file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
extern-call.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::libc;
mod rustrt {
use std::libc;
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
fact(data - 1u) * data
}
}
fn
|
(n: uint) -> uint {
unsafe {
debug!("n = %?", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
let result = fact(10u);
debug!("result = %?", result);
assert_eq!(result, 3628800u);
}
|
fact
|
identifier_name
|
extern-call.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::libc;
mod rustrt {
use std::libc;
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
|
} else {
fact(data - 1u) * data
}
}
fn fact(n: uint) -> uint {
unsafe {
debug!("n = %?", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
let result = fact(10u);
debug!("result = %?", result);
assert_eq!(result, 3628800u);
}
|
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
|
random_line_split
|
DBPoolManager.ts
|
export declare function require(name: string): any
let pg = require("pg")
import { CannotCreateInstanceError, SqlExecFailError, TableNotFoundError } from "../define/Error"
import Table, {Record} from "../model/db/table/Table"
import Column from "../model/db/column/Column"
/**
* DBPoolManager
*/
export default class DBPoolManager {
private static instance: DBPoolManager
private pool: any
private client: any
private static DB_CONF = {
user: "root",
database: "open_ishinomaki",
password: "KsJaA4uQ",
host: "localhost",
port: 5432,
max: 10,
idleTimeoutMillis: 30000
}
constructor() {
if (DBPoolManager.instance) {
throw new CannotCreateInstanceError(DBPoolManager.name)
}
this.pool = new pg.Pool(DBPoolManager.DB_CONF)
}
/**
* @return {Promise} resolve(instance), reject(error)
*/
public static getInstance(): Promise<DBPoolManager>
|
/**
* @param {String} psql 実行psqlテキスト
* @param {Array} varray 実行psqlテキストに付随する変数配列
* @return {Promise} resolve(result), reject(error)
*/
public exec(psql: string, varray?: any[]): Promise<any> {
console.log("exec-psql: " + psql)
return new Promise((resolve, reject) => {
this.client.query(psql, varray, (error, result) => {
if (error) {
reject(new SqlExecFailError(error))
return
}
resolve(result)
})
})
}
}
export function escape(value: any): any {
if (value instanceof Array) {
return value.map((value1: any) => {
return escape(value1)
})
} else if (value instanceof Object) {
return Object.keys(value).reduce((prev: any, key: string) => {
prev[key] = escape(value[key])
return prev
}, {})
} else if (value == null || typeof value != String.name.toLowerCase()) {
return value
}
return value.replace(/'/g, "''")
}
|
{
if (this.instance == null) {
this.instance = new DBPoolManager()
}
return new Promise((resolve, reject) => {
if (this.instance.client) {
resolve(this.instance)
return
}
this.instance.pool.connect((error, client, done) => {
if (error) {
reject(error)
return
}
this.instance.client = client
resolve(this.instance)
})
})
}
|
identifier_body
|
DBPoolManager.ts
|
export declare function require(name: string): any
let pg = require("pg")
import { CannotCreateInstanceError, SqlExecFailError, TableNotFoundError } from "../define/Error"
import Table, {Record} from "../model/db/table/Table"
import Column from "../model/db/column/Column"
/**
* DBPoolManager
*/
export default class DBPoolManager {
private static instance: DBPoolManager
private pool: any
private client: any
private static DB_CONF = {
user: "root",
database: "open_ishinomaki",
password: "KsJaA4uQ",
host: "localhost",
port: 5432,
max: 10,
idleTimeoutMillis: 30000
}
constructor() {
if (DBPoolManager.instance) {
throw new CannotCreateInstanceError(DBPoolManager.name)
}
this.pool = new pg.Pool(DBPoolManager.DB_CONF)
}
/**
* @return {Promise} resolve(instance), reject(error)
*/
public static getInstance(): Promise<DBPoolManager> {
if (this.instance == null) {
this.instance = new DBPoolManager()
}
return new Promise((resolve, reject) => {
if (this.instance.client) {
resolve(this.instance)
return
}
this.instance.pool.connect((error, client, done) => {
if (error) {
reject(error)
return
}
this.instance.client = client
resolve(this.instance)
})
})
}
/**
* @param {String} psql 実行psqlテキスト
* @param {Array} varray 実行psqlテキストに付随する変数配列
* @return {Promise} resolve(result), reject(error)
*/
public exec(psql: string, varray?: any[]): Promise<any> {
console.log("exec-psql: " + psql)
return new Promise((resolve, reject) => {
this.client.query(psql, varray, (error, result) => {
if (error) {
reject(new SqlExecFailError(error))
return
}
resolve(result)
})
})
}
}
export function escape(value: any): any {
if (value instanceof Array) {
return value.map((value1: any) =
|
return Object.keys(value).reduce((prev: any, key: string) => {
prev[key] = escape(value[key])
return prev
}, {})
} else if (value == null || typeof value != String.name.toLowerCase()) {
return value
}
return value.replace(/'/g, "''")
}
|
> {
return escape(value1)
})
} else if (value instanceof Object) {
|
conditional_block
|
DBPoolManager.ts
|
export declare function require(name: string): any
let pg = require("pg")
import { CannotCreateInstanceError, SqlExecFailError, TableNotFoundError } from "../define/Error"
import Table, {Record} from "../model/db/table/Table"
import Column from "../model/db/column/Column"
/**
* DBPoolManager
*/
export default class
|
{
private static instance: DBPoolManager
private pool: any
private client: any
private static DB_CONF = {
user: "root",
database: "open_ishinomaki",
password: "KsJaA4uQ",
host: "localhost",
port: 5432,
max: 10,
idleTimeoutMillis: 30000
}
constructor() {
if (DBPoolManager.instance) {
throw new CannotCreateInstanceError(DBPoolManager.name)
}
this.pool = new pg.Pool(DBPoolManager.DB_CONF)
}
/**
* @return {Promise} resolve(instance), reject(error)
*/
public static getInstance(): Promise<DBPoolManager> {
if (this.instance == null) {
this.instance = new DBPoolManager()
}
return new Promise((resolve, reject) => {
if (this.instance.client) {
resolve(this.instance)
return
}
this.instance.pool.connect((error, client, done) => {
if (error) {
reject(error)
return
}
this.instance.client = client
resolve(this.instance)
})
})
}
/**
* @param {String} psql 実行psqlテキスト
* @param {Array} varray 実行psqlテキストに付随する変数配列
* @return {Promise} resolve(result), reject(error)
*/
public exec(psql: string, varray?: any[]): Promise<any> {
console.log("exec-psql: " + psql)
return new Promise((resolve, reject) => {
this.client.query(psql, varray, (error, result) => {
if (error) {
reject(new SqlExecFailError(error))
return
}
resolve(result)
})
})
}
}
export function escape(value: any): any {
if (value instanceof Array) {
return value.map((value1: any) => {
return escape(value1)
})
} else if (value instanceof Object) {
return Object.keys(value).reduce((prev: any, key: string) => {
prev[key] = escape(value[key])
return prev
}, {})
} else if (value == null || typeof value != String.name.toLowerCase()) {
return value
}
return value.replace(/'/g, "''")
}
|
DBPoolManager
|
identifier_name
|
DBPoolManager.ts
|
export declare function require(name: string): any
let pg = require("pg")
import { CannotCreateInstanceError, SqlExecFailError, TableNotFoundError } from "../define/Error"
import Table, {Record} from "../model/db/table/Table"
import Column from "../model/db/column/Column"
/**
* DBPoolManager
*/
export default class DBPoolManager {
private static instance: DBPoolManager
private pool: any
private client: any
private static DB_CONF = {
user: "root",
database: "open_ishinomaki",
password: "KsJaA4uQ",
host: "localhost",
port: 5432,
max: 10,
idleTimeoutMillis: 30000
}
constructor() {
if (DBPoolManager.instance) {
throw new CannotCreateInstanceError(DBPoolManager.name)
}
this.pool = new pg.Pool(DBPoolManager.DB_CONF)
}
/**
* @return {Promise} resolve(instance), reject(error)
*/
|
return new Promise((resolve, reject) => {
if (this.instance.client) {
resolve(this.instance)
return
}
this.instance.pool.connect((error, client, done) => {
if (error) {
reject(error)
return
}
this.instance.client = client
resolve(this.instance)
})
})
}
/**
* @param {String} psql 実行psqlテキスト
* @param {Array} varray 実行psqlテキストに付随する変数配列
* @return {Promise} resolve(result), reject(error)
*/
public exec(psql: string, varray?: any[]): Promise<any> {
console.log("exec-psql: " + psql)
return new Promise((resolve, reject) => {
this.client.query(psql, varray, (error, result) => {
if (error) {
reject(new SqlExecFailError(error))
return
}
resolve(result)
})
})
}
}
export function escape(value: any): any {
if (value instanceof Array) {
return value.map((value1: any) => {
return escape(value1)
})
} else if (value instanceof Object) {
return Object.keys(value).reduce((prev: any, key: string) => {
prev[key] = escape(value[key])
return prev
}, {})
} else if (value == null || typeof value != String.name.toLowerCase()) {
return value
}
return value.replace(/'/g, "''")
}
|
public static getInstance(): Promise<DBPoolManager> {
if (this.instance == null) {
this.instance = new DBPoolManager()
}
|
random_line_split
|
locale.js
|
/**
* Retrieves the devices location.
* @see extend
*
* @author Joseph Fehrman
* @since 07/09/2016
* @return Promise chain representing coordinates.
*/
|
this.longitude = longitude;
this.latitude = latitude;
this.options = options;
}
this.geo_options = {
enableHighAccuracy: true,
maximumAge : 30000,
timeout : 27000
};
extend(geo_options, options);
return new Promise(function(resolve, reject){
// Evaluate if the browser supports GPS location.
if(navigator.geolocation){
// Get the current location.
navigator.geolocation.getCurrentPosition(locationSuccess, locationFailed, geo_options);
}else{
// Evaluate that the browser does not support GPS location show the following error message.
reject(Error("Can not locate device's location. Browser does not support GPS locations."));
alert("Could not locate device's location.");
}
/**
* Private function for successful geolocation queries.
*/
function locationSuccess(position){
resolve(new Location(position.coords.latitude, position.coords.longitude, geo_options));
}
/**
* Private function for failed geolocation queries.
*/
function locationFailed(error){
console.error("Location error " + error.code + ": " + error.message);
alert("Could not locate device's location.")
}
});
}
|
function locale(options){
/**
* Private object designed to house coordinates.
*/
var Location = function(latitude, longitude , options){
|
random_line_split
|
locale.js
|
/**
* Retrieves the devices location.
* @see extend
*
* @author Joseph Fehrman
* @since 07/09/2016
* @return Promise chain representing coordinates.
*/
function locale(options){
/**
* Private object designed to house coordinates.
*/
var Location = function(latitude, longitude , options){
this.longitude = longitude;
this.latitude = latitude;
this.options = options;
}
this.geo_options = {
enableHighAccuracy: true,
maximumAge : 30000,
timeout : 27000
};
extend(geo_options, options);
return new Promise(function(resolve, reject){
// Evaluate if the browser supports GPS location.
if(navigator.geolocation)
|
else{
// Evaluate that the browser does not support GPS location show the following error message.
reject(Error("Can not locate device's location. Browser does not support GPS locations."));
alert("Could not locate device's location.");
}
/**
* Private function for successful geolocation queries.
*/
function locationSuccess(position){
resolve(new Location(position.coords.latitude, position.coords.longitude, geo_options));
}
/**
* Private function for failed geolocation queries.
*/
function locationFailed(error){
console.error("Location error " + error.code + ": " + error.message);
alert("Could not locate device's location.")
}
});
}
|
{
// Get the current location.
navigator.geolocation.getCurrentPosition(locationSuccess, locationFailed, geo_options);
}
|
conditional_block
|
locale.js
|
/**
* Retrieves the devices location.
* @see extend
*
* @author Joseph Fehrman
* @since 07/09/2016
* @return Promise chain representing coordinates.
*/
function locale(options){
/**
* Private object designed to house coordinates.
*/
var Location = function(latitude, longitude , options){
this.longitude = longitude;
this.latitude = latitude;
this.options = options;
}
this.geo_options = {
enableHighAccuracy: true,
maximumAge : 30000,
timeout : 27000
};
extend(geo_options, options);
return new Promise(function(resolve, reject){
// Evaluate if the browser supports GPS location.
if(navigator.geolocation){
// Get the current location.
navigator.geolocation.getCurrentPosition(locationSuccess, locationFailed, geo_options);
}else{
// Evaluate that the browser does not support GPS location show the following error message.
reject(Error("Can not locate device's location. Browser does not support GPS locations."));
alert("Could not locate device's location.");
}
/**
* Private function for successful geolocation queries.
*/
function locationSuccess(position){
resolve(new Location(position.coords.latitude, position.coords.longitude, geo_options));
}
/**
* Private function for failed geolocation queries.
*/
function locationFailed(error)
|
});
}
|
{
console.error("Location error " + error.code + ": " + error.message);
alert("Could not locate device's location.")
}
|
identifier_body
|
locale.js
|
/**
* Retrieves the devices location.
* @see extend
*
* @author Joseph Fehrman
* @since 07/09/2016
* @return Promise chain representing coordinates.
*/
function
|
(options){
/**
* Private object designed to house coordinates.
*/
var Location = function(latitude, longitude , options){
this.longitude = longitude;
this.latitude = latitude;
this.options = options;
}
this.geo_options = {
enableHighAccuracy: true,
maximumAge : 30000,
timeout : 27000
};
extend(geo_options, options);
return new Promise(function(resolve, reject){
// Evaluate if the browser supports GPS location.
if(navigator.geolocation){
// Get the current location.
navigator.geolocation.getCurrentPosition(locationSuccess, locationFailed, geo_options);
}else{
// Evaluate that the browser does not support GPS location show the following error message.
reject(Error("Can not locate device's location. Browser does not support GPS locations."));
alert("Could not locate device's location.");
}
/**
* Private function for successful geolocation queries.
*/
function locationSuccess(position){
resolve(new Location(position.coords.latitude, position.coords.longitude, geo_options));
}
/**
* Private function for failed geolocation queries.
*/
function locationFailed(error){
console.error("Location error " + error.code + ": " + error.message);
alert("Could not locate device's location.")
}
});
}
|
locale
|
identifier_name
|
AboutDialog.js
|
/*
* Copyright (C) Zing contributors.
*
* This file is a part of the Zing project. It is distributed under the GPL3
* or later license. See the LICENSE file for a copy of the license and the
* AUTHORS file for copyright and authorship information.
*/
import React from 'react';
import { showModal } from './Modal';
import { t, tct } from 'utils/i18n';
import './AboutDialog.css';
export function showAboutDialog() {
showModal({
title: t('About this translation server...'),
children: <AboutDialogContent />,
className: 'about-dialog-component',
});
}
const AboutDialogContent = React.createClass({
render()
|
;
|
{
return (
<div>
<div className="side-column">
<img src={s('images/logo.svg')} />
</div>
<div className="main-content">
<h1>Zing</h1>
<p>
{tct(
'This server is powered by %(zing)s — ' +
'online translation software developed by %(evernoteLink)s ' +
'and based on open-source %(pootleLink)s project.',
{
zing: <strong>Zing</strong>,
evernoteLink: <a href="https://evernote.com/">Evernote</a>,
pootleLink: <a href="http://pootle.translatehouse.org/">Pootle</a>,
}
)}
</p>
<p>
{tct('Source code and bug tracker: %(githubLink)s', {
githubLink: <a href="https://github.com/evernote/zing">GitHub</a>,
})}
</p>
<p className="copyright">
{t('© %(year)s Zing Contributors', { year: 2016 })}
<br />
{t('© 2016 Pootle Contributors')}
</p>
</div>
</div>
);
},
})
|
identifier_body
|
AboutDialog.js
|
/*
* Copyright (C) Zing contributors.
*
* This file is a part of the Zing project. It is distributed under the GPL3
* or later license. See the LICENSE file for a copy of the license and the
* AUTHORS file for copyright and authorship information.
*/
import React from 'react';
import { showModal } from './Modal';
import { t, tct } from 'utils/i18n';
import './AboutDialog.css';
export function showAboutDialog() {
showModal({
title: t('About this translation server...'),
children: <AboutDialogContent />,
className: 'about-dialog-component',
});
}
const AboutDialogContent = React.createClass({
render() {
return (
<div>
<div className="side-column">
<img src={s('images/logo.svg')} />
</div>
<div className="main-content">
<h1>Zing</h1>
<p>
{tct(
'This server is powered by %(zing)s — ' +
'online translation software developed by %(evernoteLink)s ' +
'and based on open-source %(pootleLink)s project.',
{
zing: <strong>Zing</strong>,
evernoteLink: <a href="https://evernote.com/">Evernote</a>,
pootleLink: <a href="http://pootle.translatehouse.org/">Pootle</a>,
}
)}
</p>
<p>
{tct('Source code and bug tracker: %(githubLink)s', {
githubLink: <a href="https://github.com/evernote/zing">GitHub</a>,
})}
</p>
<p className="copyright">
{t('© %(year)s Zing Contributors', { year: 2016 })}
|
{t('© 2016 Pootle Contributors')}
</p>
</div>
</div>
);
},
});
|
<br />
|
random_line_split
|
AboutDialog.js
|
/*
* Copyright (C) Zing contributors.
*
* This file is a part of the Zing project. It is distributed under the GPL3
* or later license. See the LICENSE file for a copy of the license and the
* AUTHORS file for copyright and authorship information.
*/
import React from 'react';
import { showModal } from './Modal';
import { t, tct } from 'utils/i18n';
import './AboutDialog.css';
export function
|
() {
showModal({
title: t('About this translation server...'),
children: <AboutDialogContent />,
className: 'about-dialog-component',
});
}
const AboutDialogContent = React.createClass({
render() {
return (
<div>
<div className="side-column">
<img src={s('images/logo.svg')} />
</div>
<div className="main-content">
<h1>Zing</h1>
<p>
{tct(
'This server is powered by %(zing)s — ' +
'online translation software developed by %(evernoteLink)s ' +
'and based on open-source %(pootleLink)s project.',
{
zing: <strong>Zing</strong>,
evernoteLink: <a href="https://evernote.com/">Evernote</a>,
pootleLink: <a href="http://pootle.translatehouse.org/">Pootle</a>,
}
)}
</p>
<p>
{tct('Source code and bug tracker: %(githubLink)s', {
githubLink: <a href="https://github.com/evernote/zing">GitHub</a>,
})}
</p>
<p className="copyright">
{t('© %(year)s Zing Contributors', { year: 2016 })}
<br />
{t('© 2016 Pootle Contributors')}
</p>
</div>
</div>
);
},
});
|
showAboutDialog
|
identifier_name
|
useLokiSyntaxAndLabels.ts
|
import { useState, useEffect } from 'react';
import Prism, { Grammar } from 'prismjs';
import { AbsoluteTimeRange } from '@grafana/data';
import LokiLanguageProvider from 'app/plugins/datasource/loki/language_provider';
import { useLokiLabels } from 'app/plugins/datasource/loki/components/useLokiLabels';
import { useRefMounted } from 'app/core/hooks/useRefMounted';
const PRISM_SYNTAX = 'promql';
/**
* Initialise the language provider. Returns a languageProviderInitialized boolean cause there does not seem other way
* to know if the provider is already initialised or not. By the initialisation it modifies the provided
* languageProvider directly.
*/
const useInitLanguageProvider = (languageProvider: LokiLanguageProvider, absoluteRange: AbsoluteTimeRange) => {
const mounted = useRefMounted();
const [languageProviderInitialized, setLanguageProviderInitialized] = useState(false);
// Async
const initializeLanguageProvider = async () => {
languageProvider.initialRange = absoluteRange;
await languageProvider.start();
if (mounted.current) {
setLanguageProviderInitialized(true);
}
};
useEffect(() => {
initializeLanguageProvider();
}, []);
return languageProviderInitialized;
};
/**
* Returns syntax from languageProvider and initialises global Prism syntax. Waits until languageProvider itself is
* initialised (outside of this hook).
*/
const useLokiSyntax = (languageProvider: LokiLanguageProvider, languageProviderInitialized: boolean) => {
// State
const [syntax, setSyntax] = useState<Grammar | null>(null);
// Effects
useEffect(() => {
if (languageProviderInitialized)
|
}, [languageProviderInitialized, languageProvider]);
return {
isSyntaxReady: !!syntax,
syntax,
};
};
/**
* Initializes given language provider, exposes Loki syntax and enables loading label option values
*/
export const useLokiSyntaxAndLabels = (languageProvider: LokiLanguageProvider, absoluteRange: AbsoluteTimeRange) => {
const languageProviderInitialized = useInitLanguageProvider(languageProvider, absoluteRange);
const { logLabelOptions, refreshLabels, setActiveOption } = useLokiLabels(
languageProvider,
languageProviderInitialized,
absoluteRange
);
const { isSyntaxReady, syntax } = useLokiSyntax(languageProvider, languageProviderInitialized);
return {
isSyntaxReady,
syntax,
logLabelOptions,
setActiveOption,
refreshLabels,
};
};
|
{
const syntax = languageProvider.getSyntax();
Prism.languages[PRISM_SYNTAX] = syntax;
setSyntax(syntax);
}
|
conditional_block
|
useLokiSyntaxAndLabels.ts
|
import { useState, useEffect } from 'react';
import Prism, { Grammar } from 'prismjs';
import { AbsoluteTimeRange } from '@grafana/data';
import LokiLanguageProvider from 'app/plugins/datasource/loki/language_provider';
import { useLokiLabels } from 'app/plugins/datasource/loki/components/useLokiLabels';
import { useRefMounted } from 'app/core/hooks/useRefMounted';
const PRISM_SYNTAX = 'promql';
/**
* Initialise the language provider. Returns a languageProviderInitialized boolean cause there does not seem other way
* to know if the provider is already initialised or not. By the initialisation it modifies the provided
* languageProvider directly.
*/
const useInitLanguageProvider = (languageProvider: LokiLanguageProvider, absoluteRange: AbsoluteTimeRange) => {
const mounted = useRefMounted();
const [languageProviderInitialized, setLanguageProviderInitialized] = useState(false);
// Async
const initializeLanguageProvider = async () => {
languageProvider.initialRange = absoluteRange;
await languageProvider.start();
if (mounted.current) {
setLanguageProviderInitialized(true);
}
};
useEffect(() => {
initializeLanguageProvider();
}, []);
return languageProviderInitialized;
};
/**
* Returns syntax from languageProvider and initialises global Prism syntax. Waits until languageProvider itself is
* initialised (outside of this hook).
*/
const useLokiSyntax = (languageProvider: LokiLanguageProvider, languageProviderInitialized: boolean) => {
// State
const [syntax, setSyntax] = useState<Grammar | null>(null);
// Effects
useEffect(() => {
if (languageProviderInitialized) {
const syntax = languageProvider.getSyntax();
Prism.languages[PRISM_SYNTAX] = syntax;
setSyntax(syntax);
}
}, [languageProviderInitialized, languageProvider]);
return {
isSyntaxReady: !!syntax,
syntax,
};
};
/**
* Initializes given language provider, exposes Loki syntax and enables loading label option values
*/
export const useLokiSyntaxAndLabels = (languageProvider: LokiLanguageProvider, absoluteRange: AbsoluteTimeRange) => {
const languageProviderInitialized = useInitLanguageProvider(languageProvider, absoluteRange);
|
);
const { isSyntaxReady, syntax } = useLokiSyntax(languageProvider, languageProviderInitialized);
return {
isSyntaxReady,
syntax,
logLabelOptions,
setActiveOption,
refreshLabels,
};
};
|
const { logLabelOptions, refreshLabels, setActiveOption } = useLokiLabels(
languageProvider,
languageProviderInitialized,
absoluteRange
|
random_line_split
|
deploy.js
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/*
* Copyright (c) 2014, Joyent, Inc.
*/
/*
* lib/deploy.js: common functions for deploying instances of Manta zones
*/
var assert = require('assert-plus');
var async = require('async');
var fs = require('fs');
var node_uuid = require('node-uuid');
var path = require('path');
var util = require('util');
var vasync = require('vasync');
var sprintf = util.format;
var common = require('./common');
var mod_ssl = require('./ssl');
var EventEmitter = require('events').EventEmitter;
var VError = require('verror').VError;
exports.deploy = deploy;
exports.createDeployer = createDeployer;
/*
* Storage zone deployments cannot be done concurrently, so we funnel all
* storage zone deployments through a vasync Queue with concurrency 1. This is
* global to the process. Obviously, even that's not sufficient when there are
* multiple processes involved, but it helps in the important case of using
* manta-adm to deploy multiple storage zones. See MANTA-2185 for details.
*/
var dStorageQueue = vasync.queue(
function (func, callback) { func(callback); }, 1);
/*
* Deploy a new instance of a Manta service. This is a one-shot method that
* creates a Deployer and then deploys a zone. If you're deploying more than
* one zone, you're better off creating your own deployer and then calling
* "deploy" as many times as you want. Arguments:
*
* options an object with optional properties:
*
* networks array of network names (strings) that this zone should
* be provisioned with
*
* server_uuid server uuid (string) on which to provision this zone
*
* svcname the friendly name of the service to be deployed
* (e.g., "nameservice", "loadbalancer", "moray", etc.)
*
* log a bunyan logger
*
* callback invoked upon completion as callback([err])
*/
function deploy(options, svcname, ilog, callback)
{
var deployer = createDeployer(ilog);
deployer.on('error', function (err) { callback(err); });
deployer.on('ready', function () {
deployer.deploy(options, svcname, callback);
});
}
/*
* Creates a new Deployer, which can be used to deploy several Manta zones.
* This operation initializes connections to various SDC services and emits
* "ready" when ready, or "error" if something goes wrong.
*/
function createDeployer(log)
{
return (new Deployer(log));
}
/*
* A single Deployer instance basically just keeps its own connections to
* various SDC services and a cached copy of the "Manta" and "SDC" applications.
* For consumers that want to deploy several zones, this is more efficient than
* reinitializing those connections each time.
*/
function Deployer(ilog)
{
var self = this;
self.log = ilog;
EventEmitter.call(this);
async.waterfall([
function initClients(cb) {
var log = self.log;
log.info('initing sdc clients');
common.initSdcClients.call(self, cb);
},
function getPoseidon(cb) {
var log = self.log;
log.info('getting poseidon user');
getUser(self, 'poseidon', function (err, user) {
self.poseidon = user;
return (cb(err));
});
},
function loadSdcApplication(cb) {
var sapi = self.SAPI;
var log = self.log;
var search_opts = { 'name': 'sdc' };
log.info('finding "sdc" application');
sapi.listApplications(search_opts,
function (err, apps) {
if (err) {
log.error(err,
'failed to list applications');
return (cb(err));
}
if (apps.length === 0) {
var msg = 'application "sdc" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.sdc_app = apps[0];
return (cb(null));
});
},
function getMantaApplication(cb) {
var log = self.log;
log.info('finding "manta" application');
common.getMantaApplication.call(self,
self.poseidon.uuid, function (err, app) {
if (err)
return (cb(err));
if (!app) {
var msg =
'application "manta" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.manta_app = app;
return (cb());
});
},
function getMantaServices(cb) {
var log, params;
log = self.log;
params = {
'include_master': true,
'application_uuid': self.manta_app['uuid']
};
log.info(params,
'fetching "manta" application services');
self.SAPI.listServices(params, function (err, svcs) {
if (err) {
cb(err);
return;
}
self.services = svcs;
cb();
});
},
function checkShardConfigs(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var missing = [];
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.MARLIN_SHARD]) != 'string' ||
md[common.MARLIN_SHARD].length === 0) {
missing.push(common.MARLIN_SHARD);
}
if (typeof (md[common.STORAGE_SHARD]) != 'string' ||
md[common.STORAGE_SHARD].length === 0) {
missing.push(common.STORAGE_SHARD);
}
if (!Array.isArray(md[common.INDEX_SHARDS]) ||
md[common.INDEX_SHARDS].length === 0) {
missing.push(common.INDEX_SHARDS);
}
if (missing.length === 0) {
setImmediate(cb);
return;
}
message = 'cannot deploy zones before shards have ' +
'been configured (see manta-shardadm)\n';
message += 'details: metadata properties missing or ' +
'not valid: ' + missing.join(', ');
err = new Error(message);
log.error(err);
setImmediate(cb, err);
},
function checkHashRingConfig(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.HASH_RING_IMAGE]) != 'string' ||
md[common.HASH_RING_IMAGE].length === 0 ||
typeof (md[common.HASH_RING_IMGAPI_SERVICE]) !=
'string' ||
md[common.HASH_RING_IMGAPI_SERVICE].length === 0) {
message = 'cannot deploy zones before hash ' +
'ring topology has been created ' +
'(see manta-create-topology.sh)';
err = new Error(message);
log.error(err);
setImmediate(cb, err);
} else {
setImmediate(cb);
}
}
], function (err) {
if (err)
self.emit('error', err);
else
self.emit('ready');
});
}
util.inherits(Deployer, EventEmitter);
Deployer.prototype.close = function (cb)
{
common.finiSdcClients.call(this, cb);
};
/*
* Actually deploy a Manta service zone for service "svcname". For argument
* details, see deploy() above.
*/
Deployer.prototype.deploy = function (options, svcname, callback)
{
var self, allservices;
self = {};
for (var k in this)
self[k] = this[k];
self.options = options;
self.zone_uuid = node_uuid.v4();
self.svcname = svcname;
allservices = this.services;
async.waterfall([
function getMantaService(cb) {
var log = self.log;
var svcs = allservices.filter(
function (s) { return (s['name'] == svcname); });
if (svcs.length < 1) {
var t = 'Service "%s" not found. ' +
'Did you run manta-init? If so, ' +
'is it a valid service?';
var message = sprintf(t, self.svcname);
var e = new Error(message);
e.message = message;
log.error(message);
return (cb(e));
}
self.service = svcs[0];
log.debug({ svc: self.service },
'found %s service', self.svcname);
return (cb(null));
},
function ensureZk(cb) {
var app = self.manta_app;
var log = self.log;
if (self.svcname === 'nameservice') {
return (cb(null));
}
log.info('ensuring ZK servers have been deployed');
if (!app.metadata || !app.metadata['ZK_SERVERS'] ||
app.metadata['ZK_SERVERS'].length < 1) {
var message = 'zk servers missing or empty ' +
'in the manta application. Has the ' +
'nameservice been deployed yet?';
log.error({
zkServers: app.metadata['ZK_SERVERS']
}, message);
var e = new Error(message);
e.message = message;
return (cb(e));
}
return (cb(null));
},
function generateSSLCertificate(cb) {
var log = self.log;
var sapi = self.SAPI;
var app = self.manta_app;
var svc = self.service;
if (svc.name !== 'loadbalancer') {
log.info('service "%s" doesn\'t need an ' +
'SSL certificate', svc.name);
return (cb(null));
}
if (svc.metadata['SSL_CERTIFICATE']) {
log.info('SSL certificate already present');
return (cb(null));
}
log.info('generating an ssl certificate');
var file = sprintf('/tmp/cert.%d', process.pid);
var svc_name = app.metadata['MANTA_SERVICE'];
async.waterfall([
function (subcb) {
mod_ssl.generateCertificate.call(self,
file, svc_name, subcb);
},
function (subcb) {
fs.readFile(file, 'ascii',
function (err, contents) {
if (err) {
log.error(err,
'failed to ' +
'read SSL cert');
} else {
log.debug(
'read SSL cert');
}
fs.unlink(file, function (_) {
return (subcb(
err, contents));
});
});
},
function (cert, subcb) {
assert.string(cert, 'cert');
assert.func(subcb, 'subcb');
var opts = {};
opts.metadata = {};
opts.metadata['SSL_CERTIFICATE'] = cert;
sapi.updateService(svc.uuid, opts,
function (err) {
if (err) {
log.error(err,
'failed to ' +
'save SSL cert');
return (subcb(err));
}
log.debug('saved SSL cert');
return (subcb(null));
});
}
], cb);
},
function reserveIP(cb) {
if (self.svcname !== 'nameservice')
return (cb(null, {}));
// XXX I can really do this after it's deployed, no need
// to reserve before provisioning.
var log = self.log;
log.info('reserving nic');
reserveAndGetNic(self, 'manta', self.zone_uuid,
self.poseidon.uuid,
function (err, nic) {
self.nic = nic;
cb(err, nic);
});
},
function updateZKServers(nic, cb) {
var sapi = self.SAPI;
var log = self.log;
if (self.svcname !== 'nameservice')
return (cb(null));
log.info('updating the list of zk servers in the ' +
'sapi manta application');
assert.object(nic, 'nic');
assert.string(nic.ip, 'nic.ip');
var metadata = self.manta_app.metadata;
if (!metadata)
metadata = {};
if (!metadata.ZK_SERVERS)
metadata.ZK_SERVERS = [];
metadata.ZK_SERVERS.push({
host: nic.ip,
port: 2181
});
var len = metadata.ZK_SERVERS.length;
metadata.ZK_SERVERS[len - 1].num = len;
for (var ii = 0; ii < len - 1; ii++)
delete metadata.ZK_SERVERS[ii].last;
metadata.ZK_SERVERS[len - 1].last = true;
var opts = {};
opts.metadata = metadata;
sapi.updateApplication(self.manta_app.uuid, opts,
function (err, app) {
if (!err)
self.manta_app = app;
return (cb(err));
});
},
function ensureComputeId(cb) {
if (self.svcname !== 'storage') {
return (cb(null));
}
var log = self.log;
var serverUuid;
log.debug('Ensuring that the server has a compute id');
function getComputeId() {
log.debug({
serverUuid: serverUuid
}, 'server uuid for looking up compute id');
var m = 'Error getting compute id';
common.getOrCreateComputeId.call(
self, serverUuid, function (err, cid) {
if (err) {
return (cb(err));
}
if (!cid) {
var e = new Error(m);
e.message = m;
return (cb(e));
}
log.debug({
computeId: cid
}, 'found compute id');
return (cb(null));
});
}
if (self.options.server_uuid) {
serverUuid = self.options.server_uuid;
getComputeId();
} else {
common.findServerUuid.call(
self, function (err, id) {
if (err) {
return (cb(err));
}
serverUuid = id;
getComputeId();
});
}
},
function deployMantaInstance(cb) {
createInstance.call(null, self,
self.manta_app, self.service, function (err, inst) {
if (err)
return (cb(err));
self.instance = inst;
return (cb(null));
});
},
function configureMarlinComputeZone(cb) {
var cnapi = self.CNAPI;
var vmapi = self.VMAPI;
var log = self.log;
var params = { 'uuid': self.instance.uuid };
if (self.svcname !== 'marlin')
return (cb(null));
log.info('configuring compute zone, ' +
'getting vmapi object');
vmapi.getVm(params, function (err, vm) {
if (err) {
log.error(err, 'failed to get zone ' +
'"%s" after instance created',
params.uuid);
return (cb(err));
}
var server = vm.server_uuid;
var script = sprintf(
'%s/tools/mrdeploycompute %s',
common.MARLIN_DIR, params.uuid);
log.info({
server: server,
script: script
}, 'running script to configure compute zone');
cnapi.commandExecute(server, script,
function (suberr) {
if (suberr) {
log.error(suberr, 'failed to ' +
'configure compute zone %s',
params.uuid);
return (cb(err));
}
log.info('configured compute zone %s',
params.uuid);
return (cb(null));
});
});
}
], function (err) {
callback(err, self.zone_uuid);
});
};
/*
* Undeploy a SAPI instance.
*/
Deployer.prototype.undeploy = function (instance, callback)
{
var self = this;
var svcname, cnid;
async.waterfall([
function getInstanceType(cb) {
self.log.info('fetching SAPI instance', instance);
self.SAPI.getInstance(instance, function (err, inst) {
var svcs;
if (!err) {
svcs = self.services.filter(
function (s) {
return (s['uuid'] ==
inst['service_uuid']);
});
if (svcs.length === 0) {
err = new VError(
'zone "%s" has ' +
'unexpected service "%s"',
instance,
inst['service_uuid']);
} else {
svcname = svcs[0]['name'];
}
}
cb(err);
});
},
function getVmInfo(cb) {
var params = { 'uuid': instance };
self.log.info(params, 'fetching VMAPI details');
self.VMAPI.getVm(params, function (err, vm) {
if (err) {
cb(new VError(err,
'failed to get "%s" from VMAPI',
instance));
return;
}
cnid = vm['server_uuid'];
cb();
});
},
function rmMarlinZone(cb) {
if (svcname != 'marlin') {
cb();
return;
}
var log = self.log;
var scriptpath = sprintf('%s/tools/mrzoneremove %s',
common.MARLIN_DIR, instance);
log.info({
'server': cnid,
'script': scriptpath
}, 'running script to remove compute zone');
self.CNAPI.commandExecute(cnid, scriptpath,
function (err) {
if (err) {
err = new VError(err,
'failed to remove compute zone ' +
'"%s"', instance);
log.error(err);
cb(err);
return;
}
log.info('removed compute zone "%s"', instance);
cb();
});
},
function sapiDelete(cb) {
self.log.info('deleting SAPI instance', instance);
self.SAPI.deleteInstance(instance, cb);
}
], function (err) {
self.log.info('undeploy complete', instance);
callback(err);
});
};
/*
* Reprovision a SAPI instance.
*/
Deployer.prototype.reprovision = function (instance, image_uuid, callback)
{
this.SAPI.reprovisionInstance(instance, image_uuid, callback);
};
// -- User management
function getUser(self, login, cb) {
var ufds = self.UFDS;
var log = self.log;
assert.string(login, 'login');
ufds.getUser(login, function (err, ret) {
if (err)
log.error(err, 'failed to get %s', login);
return (cb(err, ret));
});
}
// -- Network management
function reserveAndGetNic(self, name, zone_uuid, owner_uuid, cb) {
var log = self.log;
var napi = self.NAPI;
assert.string(name, 'name');
assert.string(zone_uuid, 'zone_uuid');
assert.string(owner_uuid, 'owner_uuid');
var opts = {
belongs_to_uuid: zone_uuid,
owner_uuid: owner_uuid,
belongs_to_type: 'zone'
};
log.info({ opts: opts }, 'provisioning NIC');
async.waterfall([
function (subcb) {
napi.listNetworks({ name: name },
function (err, networks) {
if (err) {
log.error(err,
'failed to list networks');
return (subcb(err));
}
log.debug({ network: networks[0] },
'found network %s', name);
return (subcb(null, networks[0].uuid));
});
},
function (network_uuid, subcb) {
napi.provisionNic(network_uuid, opts,
function (err, nic) {
if (err) {
log.error(err,
'failed to provision NIC');
return (cb(err));
}
log.info({ nic: nic }, 'provisioned NIC');
return (subcb(null, nic));
});
}
], cb);
}
// -- SAPI functions
function createInstance(self, app, svc, cb)
|
/*
* Given a list of SAPI instances for storage nodes, return an unused Manta
* storage id. If we're at all unsure, we return an error rather than
* potentially returning a conflicting name.
*/
function pickNextStorageId(instances, svcname)
{
var max, inst, instname, numpart;
var i, p, n;
var err = null;
max = 0;
for (i = 0; i < instances.length; i++) {
inst = instances[i];
instname = inst.metadata.MANTA_STORAGE_ID;
if (typeof (instname) != 'string') {
err = new VError('instance "%s": missing or ' +
'invalid MANTA_STORAGE_ID metadata', inst.uuid);
break;
}
p = instname.indexOf('.' + svcname);
if (p == -1 || p === 0) {
err = new VError('instance "%s": instance name ' +
'("%s") does not contain expected suffix (".%s")',
inst.uuid, instname, svcname);
break;
}
numpart = instname.substr(0, p);
n = parseInt(numpart, 10);
if (isNaN(n) || n < 1) {
err = new VError('instance "%s": instance name ' +
'("%s") does not start with a positive integer',
inst.uuid, instname);
break;
}
max = Math.max(max, n);
}
if (err !== null) {
return (new VError(err,
'failed to allocate MANTA_STORAGE_ID'));
}
return (sprintf('%d.%s', max + 1, svcname));
}
|
{
var sapi = self.SAPI;
var log = self.log;
assert.string(self.config.datacenter_name,
'self.config.datacenter_name');
assert.object(app, 'app');
assert.object(app.metadata, 'app.metadata');
assert.string(app.metadata.REGION, 'app.metadata.REGION');
assert.string(app.metadata.DNS_DOMAIN, 'app.metadata.DNS_DOMAIN');
assert.object(svc, 'svc');
assert.string(svc.name, 'svc.name');
assert.string(svc.uuid, 'svc.uuid');
var inst_uuid = self.zone_uuid ? self.zone_uuid : node_uuid.v4();
var params = {};
/*
* Traditionally we've used numeric shards (e.g. 1.moray, 2.moray, etc.)
* but there's no reason they have to be numbers. We could have
* 1-marlin.moray, marlin.moray, or anything similar.
*/
var shard = '1';
if (self.options.shard)
shard = self.options.shard;
/*
* The root of all service hostnames is formed from the application's
* region and DNS domain.
*/
var service_root = sprintf('%s.%s',
app.metadata.REGION, app.metadata.DNS_DOMAIN);
var service_name = sprintf('%s.%s', self.svcname, service_root);
params.alias = service_name + '-' + inst_uuid.substr(0, 8);
/*
* Prefix with the shard for things that are shardable...
*/
if (['postgres', 'moray'].indexOf(self.svcname) !== -1) {
params.alias = shard + '.' + params.alias;
}
params.tags = {};
params.tags.manta_role = svc.name;
if (self.options.server_uuid)
params.server_uuid = self.options.server_uuid;
if (self.options.image_uuid)
params.image_uuid = self.options.image_uuid;
if (self.options.networks) {
var networks = [];
self.options.networks.forEach(function (token) {
networks.push({ uuid: token });
});
params.networks = networks;
}
var metadata = {};
metadata.DATACENTER = self.config.datacenter_name;
metadata.SERVICE_NAME = service_name;
metadata.SHARD = shard;
if (self.svcname === 'nameservice') {
var len = 1;
if (app.metadata.ZK_SERVERS)
len = app.metadata.ZK_SERVERS.length;
metadata.ZK_ID = len;
}
if (self.svcname === 'postgres') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
metadata.MANATEE_SHARD_PATH = sprintf('/manatee/%s',
metadata.SERVICE_NAME);
}
if (self.svcname === 'moray') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
}
if (self.svcname === 'storage') {
metadata.SERVICE_NAME = sprintf('stor.%s', service_root);
}
if (self.svcname === 'webapi' || self.svcname === 'loadbalancer')
metadata.SERVICE_NAME = app.metadata['MANTA_SERVICE'];
if (self.svcname === 'marlin')
params.tags.manta_role = 'compute';
/*
* This zone should get its configuration the local (i.e. same
* datacenter) SAPI instance, as well as use the local UFDS instance.
*/
var config = self.config;
metadata['SAPI_URL'] = config.sapi.url;
metadata['UFDS_URL'] = config.ufds.url;
metadata['UFDS_ROOT_DN'] = config.ufds.bindDN;
metadata['UFDS_ROOT_PW'] = config.ufds.bindPassword;
metadata['SDC_NAMESERVERS'] = self.sdc_app.metadata.ZK_SERVERS;
var queuecb;
async.waterfall([
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
log.debug('putting "storage" zone provision for ' +
'"%s" into the queue', inst_uuid);
dStorageQueue.push(function (_queuecb) {
/*
* When we reach here, we're the only "storage"
* zone deployment that's going on right now.
* Save the queue callback so that we can invoke
* it when we finish deploying to free up the
* queue for someone else.
*/
queuecb = _queuecb;
log.debug('dequeueing "storage" zone ' +
'provision for "%s"', inst_uuid);
subcb();
});
},
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
/*
* The manta_storage_id should be the next available
* number.
*/
var opts = {};
opts.service_uuid = svc.uuid;
opts.include_master = true;
log.info('finding next manta_storage_id');
sapi.listInstances(opts, function (err, insts) {
if (err) {
log.error(err, 'failed to list ' +
'storage instances');
return (subcb(err));
}
/*
* Find the highest-numbered storage id and pick
* the next one.
*/
var mStorageId = pickNextStorageId(
insts, metadata.SERVICE_NAME);
if (mStorageId instanceof Error) {
log.error(err);
return (subcb(err));
}
metadata.MANTA_STORAGE_ID = mStorageId;
params.tags.manta_storage_id = mStorageId;
subcb();
});
},
function (subcb) {
log.info('locating user script');
var file = sprintf('%s/../scripts/user-script.sh',
path.dirname(__filename));
file = path.resolve(file);
fs.readFile(file, 'ascii', function (err, contents) {
if (err && err['code'] == 'ENOENT') {
log.debug('no user script');
} else if (err) {
log.error(err,
'failed to read user script');
return (subcb(err));
} else {
metadata['user-script'] = contents;
log.debug('read user script from %s',
file);
}
return (subcb(null));
});
},
function (subcb) {
var opts = {};
opts.params = params;
opts.metadata = metadata;
opts.uuid = inst_uuid;
opts.master = true;
log.info({ opts: opts }, 'creating instance');
sapi.createInstance(svc.uuid, opts,
function (err, inst) {
if (err) {
log.error(err, 'failed to create ' +
'instance');
return (subcb(err));
}
log.info({ inst: inst }, 'created instance');
return (subcb(null, inst));
});
}
], function () {
if (queuecb) {
log.debug('done with "storage" zone ' +
'provision for "%s"', inst_uuid);
setTimeout(queuecb, 0);
}
cb.apply(null, Array.prototype.slice.call(arguments));
});
}
|
identifier_body
|
deploy.js
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/*
* Copyright (c) 2014, Joyent, Inc.
*/
/*
* lib/deploy.js: common functions for deploying instances of Manta zones
*/
var assert = require('assert-plus');
var async = require('async');
var fs = require('fs');
var node_uuid = require('node-uuid');
var path = require('path');
var util = require('util');
var vasync = require('vasync');
var sprintf = util.format;
var common = require('./common');
var mod_ssl = require('./ssl');
var EventEmitter = require('events').EventEmitter;
var VError = require('verror').VError;
exports.deploy = deploy;
exports.createDeployer = createDeployer;
/*
* Storage zone deployments cannot be done concurrently, so we funnel all
* storage zone deployments through a vasync Queue with concurrency 1. This is
* global to the process. Obviously, even that's not sufficient when there are
* multiple processes involved, but it helps in the important case of using
* manta-adm to deploy multiple storage zones. See MANTA-2185 for details.
*/
var dStorageQueue = vasync.queue(
function (func, callback) { func(callback); }, 1);
/*
* Deploy a new instance of a Manta service. This is a one-shot method that
* creates a Deployer and then deploys a zone. If you're deploying more than
* one zone, you're better off creating your own deployer and then calling
* "deploy" as many times as you want. Arguments:
*
* options an object with optional properties:
*
* networks array of network names (strings) that this zone should
* be provisioned with
*
* server_uuid server uuid (string) on which to provision this zone
*
* svcname the friendly name of the service to be deployed
* (e.g., "nameservice", "loadbalancer", "moray", etc.)
*
* log a bunyan logger
*
* callback invoked upon completion as callback([err])
*/
function deploy(options, svcname, ilog, callback)
{
var deployer = createDeployer(ilog);
deployer.on('error', function (err) { callback(err); });
deployer.on('ready', function () {
deployer.deploy(options, svcname, callback);
});
}
/*
* Creates a new Deployer, which can be used to deploy several Manta zones.
* This operation initializes connections to various SDC services and emits
* "ready" when ready, or "error" if something goes wrong.
*/
function createDeployer(log)
{
return (new Deployer(log));
}
/*
* A single Deployer instance basically just keeps its own connections to
* various SDC services and a cached copy of the "Manta" and "SDC" applications.
* For consumers that want to deploy several zones, this is more efficient than
* reinitializing those connections each time.
*/
function Deployer(ilog)
{
var self = this;
self.log = ilog;
EventEmitter.call(this);
async.waterfall([
function initClients(cb) {
var log = self.log;
log.info('initing sdc clients');
common.initSdcClients.call(self, cb);
},
function getPoseidon(cb) {
var log = self.log;
log.info('getting poseidon user');
getUser(self, 'poseidon', function (err, user) {
self.poseidon = user;
return (cb(err));
});
},
function loadSdcApplication(cb) {
var sapi = self.SAPI;
var log = self.log;
var search_opts = { 'name': 'sdc' };
log.info('finding "sdc" application');
sapi.listApplications(search_opts,
function (err, apps) {
if (err) {
log.error(err,
'failed to list applications');
return (cb(err));
}
if (apps.length === 0) {
var msg = 'application "sdc" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.sdc_app = apps[0];
return (cb(null));
});
},
function getMantaApplication(cb) {
var log = self.log;
log.info('finding "manta" application');
common.getMantaApplication.call(self,
self.poseidon.uuid, function (err, app) {
if (err)
return (cb(err));
if (!app) {
var msg =
'application "manta" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.manta_app = app;
return (cb());
});
},
function getMantaServices(cb) {
var log, params;
log = self.log;
params = {
'include_master': true,
'application_uuid': self.manta_app['uuid']
};
log.info(params,
'fetching "manta" application services');
self.SAPI.listServices(params, function (err, svcs) {
if (err) {
cb(err);
return;
}
self.services = svcs;
cb();
});
},
function checkShardConfigs(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var missing = [];
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.MARLIN_SHARD]) != 'string' ||
md[common.MARLIN_SHARD].length === 0) {
missing.push(common.MARLIN_SHARD);
}
if (typeof (md[common.STORAGE_SHARD]) != 'string' ||
md[common.STORAGE_SHARD].length === 0) {
missing.push(common.STORAGE_SHARD);
}
if (!Array.isArray(md[common.INDEX_SHARDS]) ||
md[common.INDEX_SHARDS].length === 0) {
missing.push(common.INDEX_SHARDS);
}
if (missing.length === 0) {
setImmediate(cb);
return;
}
message = 'cannot deploy zones before shards have ' +
'been configured (see manta-shardadm)\n';
message += 'details: metadata properties missing or ' +
'not valid: ' + missing.join(', ');
err = new Error(message);
log.error(err);
setImmediate(cb, err);
},
function checkHashRingConfig(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.HASH_RING_IMAGE]) != 'string' ||
md[common.HASH_RING_IMAGE].length === 0 ||
typeof (md[common.HASH_RING_IMGAPI_SERVICE]) !=
'string' ||
md[common.HASH_RING_IMGAPI_SERVICE].length === 0) {
message = 'cannot deploy zones before hash ' +
'ring topology has been created ' +
'(see manta-create-topology.sh)';
err = new Error(message);
log.error(err);
setImmediate(cb, err);
} else {
setImmediate(cb);
}
}
], function (err) {
if (err)
self.emit('error', err);
else
self.emit('ready');
});
}
util.inherits(Deployer, EventEmitter);
Deployer.prototype.close = function (cb)
{
common.finiSdcClients.call(this, cb);
};
/*
* Actually deploy a Manta service zone for service "svcname". For argument
* details, see deploy() above.
*/
Deployer.prototype.deploy = function (options, svcname, callback)
{
var self, allservices;
self = {};
for (var k in this)
self[k] = this[k];
self.options = options;
self.zone_uuid = node_uuid.v4();
self.svcname = svcname;
allservices = this.services;
async.waterfall([
function getMantaService(cb) {
var log = self.log;
var svcs = allservices.filter(
function (s) { return (s['name'] == svcname); });
if (svcs.length < 1) {
var t = 'Service "%s" not found. ' +
'Did you run manta-init? If so, ' +
'is it a valid service?';
var message = sprintf(t, self.svcname);
var e = new Error(message);
e.message = message;
log.error(message);
return (cb(e));
}
|
self.service = svcs[0];
log.debug({ svc: self.service },
'found %s service', self.svcname);
return (cb(null));
},
function ensureZk(cb) {
var app = self.manta_app;
var log = self.log;
if (self.svcname === 'nameservice') {
return (cb(null));
}
log.info('ensuring ZK servers have been deployed');
if (!app.metadata || !app.metadata['ZK_SERVERS'] ||
app.metadata['ZK_SERVERS'].length < 1) {
var message = 'zk servers missing or empty ' +
'in the manta application. Has the ' +
'nameservice been deployed yet?';
log.error({
zkServers: app.metadata['ZK_SERVERS']
}, message);
var e = new Error(message);
e.message = message;
return (cb(e));
}
return (cb(null));
},
function generateSSLCertificate(cb) {
var log = self.log;
var sapi = self.SAPI;
var app = self.manta_app;
var svc = self.service;
if (svc.name !== 'loadbalancer') {
log.info('service "%s" doesn\'t need an ' +
'SSL certificate', svc.name);
return (cb(null));
}
if (svc.metadata['SSL_CERTIFICATE']) {
log.info('SSL certificate already present');
return (cb(null));
}
log.info('generating an ssl certificate');
var file = sprintf('/tmp/cert.%d', process.pid);
var svc_name = app.metadata['MANTA_SERVICE'];
async.waterfall([
function (subcb) {
mod_ssl.generateCertificate.call(self,
file, svc_name, subcb);
},
function (subcb) {
fs.readFile(file, 'ascii',
function (err, contents) {
if (err) {
log.error(err,
'failed to ' +
'read SSL cert');
} else {
log.debug(
'read SSL cert');
}
fs.unlink(file, function (_) {
return (subcb(
err, contents));
});
});
},
function (cert, subcb) {
assert.string(cert, 'cert');
assert.func(subcb, 'subcb');
var opts = {};
opts.metadata = {};
opts.metadata['SSL_CERTIFICATE'] = cert;
sapi.updateService(svc.uuid, opts,
function (err) {
if (err) {
log.error(err,
'failed to ' +
'save SSL cert');
return (subcb(err));
}
log.debug('saved SSL cert');
return (subcb(null));
});
}
], cb);
},
function reserveIP(cb) {
if (self.svcname !== 'nameservice')
return (cb(null, {}));
// XXX I can really do this after it's deployed, no need
// to reserve before provisioning.
var log = self.log;
log.info('reserving nic');
reserveAndGetNic(self, 'manta', self.zone_uuid,
self.poseidon.uuid,
function (err, nic) {
self.nic = nic;
cb(err, nic);
});
},
function updateZKServers(nic, cb) {
var sapi = self.SAPI;
var log = self.log;
if (self.svcname !== 'nameservice')
return (cb(null));
log.info('updating the list of zk servers in the ' +
'sapi manta application');
assert.object(nic, 'nic');
assert.string(nic.ip, 'nic.ip');
var metadata = self.manta_app.metadata;
if (!metadata)
metadata = {};
if (!metadata.ZK_SERVERS)
metadata.ZK_SERVERS = [];
metadata.ZK_SERVERS.push({
host: nic.ip,
port: 2181
});
var len = metadata.ZK_SERVERS.length;
metadata.ZK_SERVERS[len - 1].num = len;
for (var ii = 0; ii < len - 1; ii++)
delete metadata.ZK_SERVERS[ii].last;
metadata.ZK_SERVERS[len - 1].last = true;
var opts = {};
opts.metadata = metadata;
sapi.updateApplication(self.manta_app.uuid, opts,
function (err, app) {
if (!err)
self.manta_app = app;
return (cb(err));
});
},
function ensureComputeId(cb) {
if (self.svcname !== 'storage') {
return (cb(null));
}
var log = self.log;
var serverUuid;
log.debug('Ensuring that the server has a compute id');
function getComputeId() {
log.debug({
serverUuid: serverUuid
}, 'server uuid for looking up compute id');
var m = 'Error getting compute id';
common.getOrCreateComputeId.call(
self, serverUuid, function (err, cid) {
if (err) {
return (cb(err));
}
if (!cid) {
var e = new Error(m);
e.message = m;
return (cb(e));
}
log.debug({
computeId: cid
}, 'found compute id');
return (cb(null));
});
}
if (self.options.server_uuid) {
serverUuid = self.options.server_uuid;
getComputeId();
} else {
common.findServerUuid.call(
self, function (err, id) {
if (err) {
return (cb(err));
}
serverUuid = id;
getComputeId();
});
}
},
function deployMantaInstance(cb) {
createInstance.call(null, self,
self.manta_app, self.service, function (err, inst) {
if (err)
return (cb(err));
self.instance = inst;
return (cb(null));
});
},
function configureMarlinComputeZone(cb) {
var cnapi = self.CNAPI;
var vmapi = self.VMAPI;
var log = self.log;
var params = { 'uuid': self.instance.uuid };
if (self.svcname !== 'marlin')
return (cb(null));
log.info('configuring compute zone, ' +
'getting vmapi object');
vmapi.getVm(params, function (err, vm) {
if (err) {
log.error(err, 'failed to get zone ' +
'"%s" after instance created',
params.uuid);
return (cb(err));
}
var server = vm.server_uuid;
var script = sprintf(
'%s/tools/mrdeploycompute %s',
common.MARLIN_DIR, params.uuid);
log.info({
server: server,
script: script
}, 'running script to configure compute zone');
cnapi.commandExecute(server, script,
function (suberr) {
if (suberr) {
log.error(suberr, 'failed to ' +
'configure compute zone %s',
params.uuid);
return (cb(err));
}
log.info('configured compute zone %s',
params.uuid);
return (cb(null));
});
});
}
], function (err) {
callback(err, self.zone_uuid);
});
};
/*
* Undeploy a SAPI instance.
*/
Deployer.prototype.undeploy = function (instance, callback)
{
var self = this;
var svcname, cnid;
async.waterfall([
function getInstanceType(cb) {
self.log.info('fetching SAPI instance', instance);
self.SAPI.getInstance(instance, function (err, inst) {
var svcs;
if (!err) {
svcs = self.services.filter(
function (s) {
return (s['uuid'] ==
inst['service_uuid']);
});
if (svcs.length === 0) {
err = new VError(
'zone "%s" has ' +
'unexpected service "%s"',
instance,
inst['service_uuid']);
} else {
svcname = svcs[0]['name'];
}
}
cb(err);
});
},
function getVmInfo(cb) {
var params = { 'uuid': instance };
self.log.info(params, 'fetching VMAPI details');
self.VMAPI.getVm(params, function (err, vm) {
if (err) {
cb(new VError(err,
'failed to get "%s" from VMAPI',
instance));
return;
}
cnid = vm['server_uuid'];
cb();
});
},
function rmMarlinZone(cb) {
if (svcname != 'marlin') {
cb();
return;
}
var log = self.log;
var scriptpath = sprintf('%s/tools/mrzoneremove %s',
common.MARLIN_DIR, instance);
log.info({
'server': cnid,
'script': scriptpath
}, 'running script to remove compute zone');
self.CNAPI.commandExecute(cnid, scriptpath,
function (err) {
if (err) {
err = new VError(err,
'failed to remove compute zone ' +
'"%s"', instance);
log.error(err);
cb(err);
return;
}
log.info('removed compute zone "%s"', instance);
cb();
});
},
function sapiDelete(cb) {
self.log.info('deleting SAPI instance', instance);
self.SAPI.deleteInstance(instance, cb);
}
], function (err) {
self.log.info('undeploy complete', instance);
callback(err);
});
};
/*
* Reprovision a SAPI instance.
*/
Deployer.prototype.reprovision = function (instance, image_uuid, callback)
{
this.SAPI.reprovisionInstance(instance, image_uuid, callback);
};
// -- User management
function getUser(self, login, cb) {
var ufds = self.UFDS;
var log = self.log;
assert.string(login, 'login');
ufds.getUser(login, function (err, ret) {
if (err)
log.error(err, 'failed to get %s', login);
return (cb(err, ret));
});
}
// -- Network management
function reserveAndGetNic(self, name, zone_uuid, owner_uuid, cb) {
var log = self.log;
var napi = self.NAPI;
assert.string(name, 'name');
assert.string(zone_uuid, 'zone_uuid');
assert.string(owner_uuid, 'owner_uuid');
var opts = {
belongs_to_uuid: zone_uuid,
owner_uuid: owner_uuid,
belongs_to_type: 'zone'
};
log.info({ opts: opts }, 'provisioning NIC');
async.waterfall([
function (subcb) {
napi.listNetworks({ name: name },
function (err, networks) {
if (err) {
log.error(err,
'failed to list networks');
return (subcb(err));
}
log.debug({ network: networks[0] },
'found network %s', name);
return (subcb(null, networks[0].uuid));
});
},
function (network_uuid, subcb) {
napi.provisionNic(network_uuid, opts,
function (err, nic) {
if (err) {
log.error(err,
'failed to provision NIC');
return (cb(err));
}
log.info({ nic: nic }, 'provisioned NIC');
return (subcb(null, nic));
});
}
], cb);
}
// -- SAPI functions
function createInstance(self, app, svc, cb) {
var sapi = self.SAPI;
var log = self.log;
assert.string(self.config.datacenter_name,
'self.config.datacenter_name');
assert.object(app, 'app');
assert.object(app.metadata, 'app.metadata');
assert.string(app.metadata.REGION, 'app.metadata.REGION');
assert.string(app.metadata.DNS_DOMAIN, 'app.metadata.DNS_DOMAIN');
assert.object(svc, 'svc');
assert.string(svc.name, 'svc.name');
assert.string(svc.uuid, 'svc.uuid');
var inst_uuid = self.zone_uuid ? self.zone_uuid : node_uuid.v4();
var params = {};
/*
* Traditionally we've used numeric shards (e.g. 1.moray, 2.moray, etc.)
* but there's no reason they have to be numbers. We could have
* 1-marlin.moray, marlin.moray, or anything similar.
*/
var shard = '1';
if (self.options.shard)
shard = self.options.shard;
/*
* The root of all service hostnames is formed from the application's
* region and DNS domain.
*/
var service_root = sprintf('%s.%s',
app.metadata.REGION, app.metadata.DNS_DOMAIN);
var service_name = sprintf('%s.%s', self.svcname, service_root);
params.alias = service_name + '-' + inst_uuid.substr(0, 8);
/*
* Prefix with the shard for things that are shardable...
*/
if (['postgres', 'moray'].indexOf(self.svcname) !== -1) {
params.alias = shard + '.' + params.alias;
}
params.tags = {};
params.tags.manta_role = svc.name;
if (self.options.server_uuid)
params.server_uuid = self.options.server_uuid;
if (self.options.image_uuid)
params.image_uuid = self.options.image_uuid;
if (self.options.networks) {
var networks = [];
self.options.networks.forEach(function (token) {
networks.push({ uuid: token });
});
params.networks = networks;
}
var metadata = {};
metadata.DATACENTER = self.config.datacenter_name;
metadata.SERVICE_NAME = service_name;
metadata.SHARD = shard;
if (self.svcname === 'nameservice') {
var len = 1;
if (app.metadata.ZK_SERVERS)
len = app.metadata.ZK_SERVERS.length;
metadata.ZK_ID = len;
}
if (self.svcname === 'postgres') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
metadata.MANATEE_SHARD_PATH = sprintf('/manatee/%s',
metadata.SERVICE_NAME);
}
if (self.svcname === 'moray') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
}
if (self.svcname === 'storage') {
metadata.SERVICE_NAME = sprintf('stor.%s', service_root);
}
if (self.svcname === 'webapi' || self.svcname === 'loadbalancer')
metadata.SERVICE_NAME = app.metadata['MANTA_SERVICE'];
if (self.svcname === 'marlin')
params.tags.manta_role = 'compute';
/*
* This zone should get its configuration the local (i.e. same
* datacenter) SAPI instance, as well as use the local UFDS instance.
*/
var config = self.config;
metadata['SAPI_URL'] = config.sapi.url;
metadata['UFDS_URL'] = config.ufds.url;
metadata['UFDS_ROOT_DN'] = config.ufds.bindDN;
metadata['UFDS_ROOT_PW'] = config.ufds.bindPassword;
metadata['SDC_NAMESERVERS'] = self.sdc_app.metadata.ZK_SERVERS;
var queuecb;
async.waterfall([
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
log.debug('putting "storage" zone provision for ' +
'"%s" into the queue', inst_uuid);
dStorageQueue.push(function (_queuecb) {
/*
* When we reach here, we're the only "storage"
* zone deployment that's going on right now.
* Save the queue callback so that we can invoke
* it when we finish deploying to free up the
* queue for someone else.
*/
queuecb = _queuecb;
log.debug('dequeueing "storage" zone ' +
'provision for "%s"', inst_uuid);
subcb();
});
},
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
/*
* The manta_storage_id should be the next available
* number.
*/
var opts = {};
opts.service_uuid = svc.uuid;
opts.include_master = true;
log.info('finding next manta_storage_id');
sapi.listInstances(opts, function (err, insts) {
if (err) {
log.error(err, 'failed to list ' +
'storage instances');
return (subcb(err));
}
/*
* Find the highest-numbered storage id and pick
* the next one.
*/
var mStorageId = pickNextStorageId(
insts, metadata.SERVICE_NAME);
if (mStorageId instanceof Error) {
log.error(err);
return (subcb(err));
}
metadata.MANTA_STORAGE_ID = mStorageId;
params.tags.manta_storage_id = mStorageId;
subcb();
});
},
function (subcb) {
log.info('locating user script');
var file = sprintf('%s/../scripts/user-script.sh',
path.dirname(__filename));
file = path.resolve(file);
fs.readFile(file, 'ascii', function (err, contents) {
if (err && err['code'] == 'ENOENT') {
log.debug('no user script');
} else if (err) {
log.error(err,
'failed to read user script');
return (subcb(err));
} else {
metadata['user-script'] = contents;
log.debug('read user script from %s',
file);
}
return (subcb(null));
});
},
function (subcb) {
var opts = {};
opts.params = params;
opts.metadata = metadata;
opts.uuid = inst_uuid;
opts.master = true;
log.info({ opts: opts }, 'creating instance');
sapi.createInstance(svc.uuid, opts,
function (err, inst) {
if (err) {
log.error(err, 'failed to create ' +
'instance');
return (subcb(err));
}
log.info({ inst: inst }, 'created instance');
return (subcb(null, inst));
});
}
], function () {
if (queuecb) {
log.debug('done with "storage" zone ' +
'provision for "%s"', inst_uuid);
setTimeout(queuecb, 0);
}
cb.apply(null, Array.prototype.slice.call(arguments));
});
}
/*
* Given a list of SAPI instances for storage nodes, return an unused Manta
* storage id. If we're at all unsure, we return an error rather than
* potentially returning a conflicting name.
*/
function pickNextStorageId(instances, svcname)
{
var max, inst, instname, numpart;
var i, p, n;
var err = null;
max = 0;
for (i = 0; i < instances.length; i++) {
inst = instances[i];
instname = inst.metadata.MANTA_STORAGE_ID;
if (typeof (instname) != 'string') {
err = new VError('instance "%s": missing or ' +
'invalid MANTA_STORAGE_ID metadata', inst.uuid);
break;
}
p = instname.indexOf('.' + svcname);
if (p == -1 || p === 0) {
err = new VError('instance "%s": instance name ' +
'("%s") does not contain expected suffix (".%s")',
inst.uuid, instname, svcname);
break;
}
numpart = instname.substr(0, p);
n = parseInt(numpart, 10);
if (isNaN(n) || n < 1) {
err = new VError('instance "%s": instance name ' +
'("%s") does not start with a positive integer',
inst.uuid, instname);
break;
}
max = Math.max(max, n);
}
if (err !== null) {
return (new VError(err,
'failed to allocate MANTA_STORAGE_ID'));
}
return (sprintf('%d.%s', max + 1, svcname));
}
|
random_line_split
|
|
deploy.js
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/*
* Copyright (c) 2014, Joyent, Inc.
*/
/*
* lib/deploy.js: common functions for deploying instances of Manta zones
*/
var assert = require('assert-plus');
var async = require('async');
var fs = require('fs');
var node_uuid = require('node-uuid');
var path = require('path');
var util = require('util');
var vasync = require('vasync');
var sprintf = util.format;
var common = require('./common');
var mod_ssl = require('./ssl');
var EventEmitter = require('events').EventEmitter;
var VError = require('verror').VError;
exports.deploy = deploy;
exports.createDeployer = createDeployer;
/*
* Storage zone deployments cannot be done concurrently, so we funnel all
* storage zone deployments through a vasync Queue with concurrency 1. This is
* global to the process. Obviously, even that's not sufficient when there are
* multiple processes involved, but it helps in the important case of using
* manta-adm to deploy multiple storage zones. See MANTA-2185 for details.
*/
var dStorageQueue = vasync.queue(
function (func, callback) { func(callback); }, 1);
/*
* Deploy a new instance of a Manta service. This is a one-shot method that
* creates a Deployer and then deploys a zone. If you're deploying more than
* one zone, you're better off creating your own deployer and then calling
* "deploy" as many times as you want. Arguments:
*
* options an object with optional properties:
*
* networks array of network names (strings) that this zone should
* be provisioned with
*
* server_uuid server uuid (string) on which to provision this zone
*
* svcname the friendly name of the service to be deployed
* (e.g., "nameservice", "loadbalancer", "moray", etc.)
*
* log a bunyan logger
*
* callback invoked upon completion as callback([err])
*/
function deploy(options, svcname, ilog, callback)
{
var deployer = createDeployer(ilog);
deployer.on('error', function (err) { callback(err); });
deployer.on('ready', function () {
deployer.deploy(options, svcname, callback);
});
}
/*
* Creates a new Deployer, which can be used to deploy several Manta zones.
* This operation initializes connections to various SDC services and emits
* "ready" when ready, or "error" if something goes wrong.
*/
function createDeployer(log)
{
return (new Deployer(log));
}
/*
* A single Deployer instance basically just keeps its own connections to
* various SDC services and a cached copy of the "Manta" and "SDC" applications.
* For consumers that want to deploy several zones, this is more efficient than
* reinitializing those connections each time.
*/
function Deployer(ilog)
{
var self = this;
self.log = ilog;
EventEmitter.call(this);
async.waterfall([
function initClients(cb) {
var log = self.log;
log.info('initing sdc clients');
common.initSdcClients.call(self, cb);
},
function getPoseidon(cb) {
var log = self.log;
log.info('getting poseidon user');
getUser(self, 'poseidon', function (err, user) {
self.poseidon = user;
return (cb(err));
});
},
function loadSdcApplication(cb) {
var sapi = self.SAPI;
var log = self.log;
var search_opts = { 'name': 'sdc' };
log.info('finding "sdc" application');
sapi.listApplications(search_opts,
function (err, apps) {
if (err) {
log.error(err,
'failed to list applications');
return (cb(err));
}
if (apps.length === 0) {
var msg = 'application "sdc" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.sdc_app = apps[0];
return (cb(null));
});
},
function getMantaApplication(cb) {
var log = self.log;
log.info('finding "manta" application');
common.getMantaApplication.call(self,
self.poseidon.uuid, function (err, app) {
if (err)
return (cb(err));
if (!app) {
var msg =
'application "manta" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.manta_app = app;
return (cb());
});
},
function getMantaServices(cb) {
var log, params;
log = self.log;
params = {
'include_master': true,
'application_uuid': self.manta_app['uuid']
};
log.info(params,
'fetching "manta" application services');
self.SAPI.listServices(params, function (err, svcs) {
if (err) {
cb(err);
return;
}
self.services = svcs;
cb();
});
},
function checkShardConfigs(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var missing = [];
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.MARLIN_SHARD]) != 'string' ||
md[common.MARLIN_SHARD].length === 0) {
missing.push(common.MARLIN_SHARD);
}
if (typeof (md[common.STORAGE_SHARD]) != 'string' ||
md[common.STORAGE_SHARD].length === 0) {
missing.push(common.STORAGE_SHARD);
}
if (!Array.isArray(md[common.INDEX_SHARDS]) ||
md[common.INDEX_SHARDS].length === 0) {
missing.push(common.INDEX_SHARDS);
}
if (missing.length === 0) {
setImmediate(cb);
return;
}
message = 'cannot deploy zones before shards have ' +
'been configured (see manta-shardadm)\n';
message += 'details: metadata properties missing or ' +
'not valid: ' + missing.join(', ');
err = new Error(message);
log.error(err);
setImmediate(cb, err);
},
function checkHashRingConfig(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.HASH_RING_IMAGE]) != 'string' ||
md[common.HASH_RING_IMAGE].length === 0 ||
typeof (md[common.HASH_RING_IMGAPI_SERVICE]) !=
'string' ||
md[common.HASH_RING_IMGAPI_SERVICE].length === 0) {
message = 'cannot deploy zones before hash ' +
'ring topology has been created ' +
'(see manta-create-topology.sh)';
err = new Error(message);
log.error(err);
setImmediate(cb, err);
} else {
setImmediate(cb);
}
}
], function (err) {
if (err)
self.emit('error', err);
else
self.emit('ready');
});
}
util.inherits(Deployer, EventEmitter);
Deployer.prototype.close = function (cb)
{
common.finiSdcClients.call(this, cb);
};
/*
* Actually deploy a Manta service zone for service "svcname". For argument
* details, see deploy() above.
*/
Deployer.prototype.deploy = function (options, svcname, callback)
{
var self, allservices;
self = {};
for (var k in this)
self[k] = this[k];
self.options = options;
self.zone_uuid = node_uuid.v4();
self.svcname = svcname;
allservices = this.services;
async.waterfall([
function getMantaService(cb) {
var log = self.log;
var svcs = allservices.filter(
function (s) { return (s['name'] == svcname); });
if (svcs.length < 1) {
var t = 'Service "%s" not found. ' +
'Did you run manta-init? If so, ' +
'is it a valid service?';
var message = sprintf(t, self.svcname);
var e = new Error(message);
e.message = message;
log.error(message);
return (cb(e));
}
self.service = svcs[0];
log.debug({ svc: self.service },
'found %s service', self.svcname);
return (cb(null));
},
function ensureZk(cb) {
var app = self.manta_app;
var log = self.log;
if (self.svcname === 'nameservice') {
return (cb(null));
}
log.info('ensuring ZK servers have been deployed');
if (!app.metadata || !app.metadata['ZK_SERVERS'] ||
app.metadata['ZK_SERVERS'].length < 1) {
var message = 'zk servers missing or empty ' +
'in the manta application. Has the ' +
'nameservice been deployed yet?';
log.error({
zkServers: app.metadata['ZK_SERVERS']
}, message);
var e = new Error(message);
e.message = message;
return (cb(e));
}
return (cb(null));
},
function generateSSLCertificate(cb) {
var log = self.log;
var sapi = self.SAPI;
var app = self.manta_app;
var svc = self.service;
if (svc.name !== 'loadbalancer') {
log.info('service "%s" doesn\'t need an ' +
'SSL certificate', svc.name);
return (cb(null));
}
if (svc.metadata['SSL_CERTIFICATE']) {
log.info('SSL certificate already present');
return (cb(null));
}
log.info('generating an ssl certificate');
var file = sprintf('/tmp/cert.%d', process.pid);
var svc_name = app.metadata['MANTA_SERVICE'];
async.waterfall([
function (subcb) {
mod_ssl.generateCertificate.call(self,
file, svc_name, subcb);
},
function (subcb) {
fs.readFile(file, 'ascii',
function (err, contents) {
if (err) {
log.error(err,
'failed to ' +
'read SSL cert');
} else {
log.debug(
'read SSL cert');
}
fs.unlink(file, function (_) {
return (subcb(
err, contents));
});
});
},
function (cert, subcb) {
assert.string(cert, 'cert');
assert.func(subcb, 'subcb');
var opts = {};
opts.metadata = {};
opts.metadata['SSL_CERTIFICATE'] = cert;
sapi.updateService(svc.uuid, opts,
function (err) {
if (err) {
log.error(err,
'failed to ' +
'save SSL cert');
return (subcb(err));
}
log.debug('saved SSL cert');
return (subcb(null));
});
}
], cb);
},
function reserveIP(cb) {
if (self.svcname !== 'nameservice')
return (cb(null, {}));
// XXX I can really do this after it's deployed, no need
// to reserve before provisioning.
var log = self.log;
log.info('reserving nic');
reserveAndGetNic(self, 'manta', self.zone_uuid,
self.poseidon.uuid,
function (err, nic) {
self.nic = nic;
cb(err, nic);
});
},
function updateZKServers(nic, cb) {
var sapi = self.SAPI;
var log = self.log;
if (self.svcname !== 'nameservice')
return (cb(null));
log.info('updating the list of zk servers in the ' +
'sapi manta application');
assert.object(nic, 'nic');
assert.string(nic.ip, 'nic.ip');
var metadata = self.manta_app.metadata;
if (!metadata)
metadata = {};
if (!metadata.ZK_SERVERS)
metadata.ZK_SERVERS = [];
metadata.ZK_SERVERS.push({
host: nic.ip,
port: 2181
});
var len = metadata.ZK_SERVERS.length;
metadata.ZK_SERVERS[len - 1].num = len;
for (var ii = 0; ii < len - 1; ii++)
delete metadata.ZK_SERVERS[ii].last;
metadata.ZK_SERVERS[len - 1].last = true;
var opts = {};
opts.metadata = metadata;
sapi.updateApplication(self.manta_app.uuid, opts,
function (err, app) {
if (!err)
self.manta_app = app;
return (cb(err));
});
},
function ensureComputeId(cb) {
if (self.svcname !== 'storage') {
return (cb(null));
}
var log = self.log;
var serverUuid;
log.debug('Ensuring that the server has a compute id');
function getComputeId() {
log.debug({
serverUuid: serverUuid
}, 'server uuid for looking up compute id');
var m = 'Error getting compute id';
common.getOrCreateComputeId.call(
self, serverUuid, function (err, cid) {
if (err) {
return (cb(err));
}
if (!cid) {
var e = new Error(m);
e.message = m;
return (cb(e));
}
log.debug({
computeId: cid
}, 'found compute id');
return (cb(null));
});
}
if (self.options.server_uuid) {
serverUuid = self.options.server_uuid;
getComputeId();
} else {
common.findServerUuid.call(
self, function (err, id) {
if (err) {
return (cb(err));
}
serverUuid = id;
getComputeId();
});
}
},
function deployMantaInstance(cb) {
createInstance.call(null, self,
self.manta_app, self.service, function (err, inst) {
if (err)
return (cb(err));
self.instance = inst;
return (cb(null));
});
},
function configureMarlinComputeZone(cb) {
var cnapi = self.CNAPI;
var vmapi = self.VMAPI;
var log = self.log;
var params = { 'uuid': self.instance.uuid };
if (self.svcname !== 'marlin')
return (cb(null));
log.info('configuring compute zone, ' +
'getting vmapi object');
vmapi.getVm(params, function (err, vm) {
if (err) {
log.error(err, 'failed to get zone ' +
'"%s" after instance created',
params.uuid);
return (cb(err));
}
var server = vm.server_uuid;
var script = sprintf(
'%s/tools/mrdeploycompute %s',
common.MARLIN_DIR, params.uuid);
log.info({
server: server,
script: script
}, 'running script to configure compute zone');
cnapi.commandExecute(server, script,
function (suberr) {
if (suberr) {
log.error(suberr, 'failed to ' +
'configure compute zone %s',
params.uuid);
return (cb(err));
}
log.info('configured compute zone %s',
params.uuid);
return (cb(null));
});
});
}
], function (err) {
callback(err, self.zone_uuid);
});
};
/*
* Undeploy a SAPI instance.
*/
Deployer.prototype.undeploy = function (instance, callback)
{
var self = this;
var svcname, cnid;
async.waterfall([
function getInstanceType(cb) {
self.log.info('fetching SAPI instance', instance);
self.SAPI.getInstance(instance, function (err, inst) {
var svcs;
if (!err) {
svcs = self.services.filter(
function (s) {
return (s['uuid'] ==
inst['service_uuid']);
});
if (svcs.length === 0) {
err = new VError(
'zone "%s" has ' +
'unexpected service "%s"',
instance,
inst['service_uuid']);
} else {
svcname = svcs[0]['name'];
}
}
cb(err);
});
},
function getVmInfo(cb) {
var params = { 'uuid': instance };
self.log.info(params, 'fetching VMAPI details');
self.VMAPI.getVm(params, function (err, vm) {
if (err) {
cb(new VError(err,
'failed to get "%s" from VMAPI',
instance));
return;
}
cnid = vm['server_uuid'];
cb();
});
},
function rmMarlinZone(cb) {
if (svcname != 'marlin') {
cb();
return;
}
var log = self.log;
var scriptpath = sprintf('%s/tools/mrzoneremove %s',
common.MARLIN_DIR, instance);
log.info({
'server': cnid,
'script': scriptpath
}, 'running script to remove compute zone');
self.CNAPI.commandExecute(cnid, scriptpath,
function (err) {
if (err) {
err = new VError(err,
'failed to remove compute zone ' +
'"%s"', instance);
log.error(err);
cb(err);
return;
}
log.info('removed compute zone "%s"', instance);
cb();
});
},
function sapiDelete(cb) {
self.log.info('deleting SAPI instance', instance);
self.SAPI.deleteInstance(instance, cb);
}
], function (err) {
self.log.info('undeploy complete', instance);
callback(err);
});
};
/*
* Reprovision a SAPI instance.
*/
Deployer.prototype.reprovision = function (instance, image_uuid, callback)
{
this.SAPI.reprovisionInstance(instance, image_uuid, callback);
};
// -- User management
function getUser(self, login, cb) {
var ufds = self.UFDS;
var log = self.log;
assert.string(login, 'login');
ufds.getUser(login, function (err, ret) {
if (err)
log.error(err, 'failed to get %s', login);
return (cb(err, ret));
});
}
// -- Network management
function reserveAndGetNic(self, name, zone_uuid, owner_uuid, cb) {
var log = self.log;
var napi = self.NAPI;
assert.string(name, 'name');
assert.string(zone_uuid, 'zone_uuid');
assert.string(owner_uuid, 'owner_uuid');
var opts = {
belongs_to_uuid: zone_uuid,
owner_uuid: owner_uuid,
belongs_to_type: 'zone'
};
log.info({ opts: opts }, 'provisioning NIC');
async.waterfall([
function (subcb) {
napi.listNetworks({ name: name },
function (err, networks) {
if (err) {
log.error(err,
'failed to list networks');
return (subcb(err));
}
log.debug({ network: networks[0] },
'found network %s', name);
return (subcb(null, networks[0].uuid));
});
},
function (network_uuid, subcb) {
napi.provisionNic(network_uuid, opts,
function (err, nic) {
if (err) {
log.error(err,
'failed to provision NIC');
return (cb(err));
}
log.info({ nic: nic }, 'provisioned NIC');
return (subcb(null, nic));
});
}
], cb);
}
// -- SAPI functions
function createInstance(self, app, svc, cb) {
var sapi = self.SAPI;
var log = self.log;
assert.string(self.config.datacenter_name,
'self.config.datacenter_name');
assert.object(app, 'app');
assert.object(app.metadata, 'app.metadata');
assert.string(app.metadata.REGION, 'app.metadata.REGION');
assert.string(app.metadata.DNS_DOMAIN, 'app.metadata.DNS_DOMAIN');
assert.object(svc, 'svc');
assert.string(svc.name, 'svc.name');
assert.string(svc.uuid, 'svc.uuid');
var inst_uuid = self.zone_uuid ? self.zone_uuid : node_uuid.v4();
var params = {};
/*
* Traditionally we've used numeric shards (e.g. 1.moray, 2.moray, etc.)
* but there's no reason they have to be numbers. We could have
* 1-marlin.moray, marlin.moray, or anything similar.
*/
var shard = '1';
if (self.options.shard)
shard = self.options.shard;
/*
* The root of all service hostnames is formed from the application's
* region and DNS domain.
*/
var service_root = sprintf('%s.%s',
app.metadata.REGION, app.metadata.DNS_DOMAIN);
var service_name = sprintf('%s.%s', self.svcname, service_root);
params.alias = service_name + '-' + inst_uuid.substr(0, 8);
/*
* Prefix with the shard for things that are shardable...
*/
if (['postgres', 'moray'].indexOf(self.svcname) !== -1) {
params.alias = shard + '.' + params.alias;
}
params.tags = {};
params.tags.manta_role = svc.name;
if (self.options.server_uuid)
params.server_uuid = self.options.server_uuid;
if (self.options.image_uuid)
params.image_uuid = self.options.image_uuid;
if (self.options.networks) {
var networks = [];
self.options.networks.forEach(function (token) {
networks.push({ uuid: token });
});
params.networks = networks;
}
var metadata = {};
metadata.DATACENTER = self.config.datacenter_name;
metadata.SERVICE_NAME = service_name;
metadata.SHARD = shard;
if (self.svcname === 'nameservice') {
var len = 1;
if (app.metadata.ZK_SERVERS)
len = app.metadata.ZK_SERVERS.length;
metadata.ZK_ID = len;
}
if (self.svcname === 'postgres') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
metadata.MANATEE_SHARD_PATH = sprintf('/manatee/%s',
metadata.SERVICE_NAME);
}
if (self.svcname === 'moray') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
}
if (self.svcname === 'storage')
|
if (self.svcname === 'webapi' || self.svcname === 'loadbalancer')
metadata.SERVICE_NAME = app.metadata['MANTA_SERVICE'];
if (self.svcname === 'marlin')
params.tags.manta_role = 'compute';
/*
* This zone should get its configuration the local (i.e. same
* datacenter) SAPI instance, as well as use the local UFDS instance.
*/
var config = self.config;
metadata['SAPI_URL'] = config.sapi.url;
metadata['UFDS_URL'] = config.ufds.url;
metadata['UFDS_ROOT_DN'] = config.ufds.bindDN;
metadata['UFDS_ROOT_PW'] = config.ufds.bindPassword;
metadata['SDC_NAMESERVERS'] = self.sdc_app.metadata.ZK_SERVERS;
var queuecb;
async.waterfall([
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
log.debug('putting "storage" zone provision for ' +
'"%s" into the queue', inst_uuid);
dStorageQueue.push(function (_queuecb) {
/*
* When we reach here, we're the only "storage"
* zone deployment that's going on right now.
* Save the queue callback so that we can invoke
* it when we finish deploying to free up the
* queue for someone else.
*/
queuecb = _queuecb;
log.debug('dequeueing "storage" zone ' +
'provision for "%s"', inst_uuid);
subcb();
});
},
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
/*
* The manta_storage_id should be the next available
* number.
*/
var opts = {};
opts.service_uuid = svc.uuid;
opts.include_master = true;
log.info('finding next manta_storage_id');
sapi.listInstances(opts, function (err, insts) {
if (err) {
log.error(err, 'failed to list ' +
'storage instances');
return (subcb(err));
}
/*
* Find the highest-numbered storage id and pick
* the next one.
*/
var mStorageId = pickNextStorageId(
insts, metadata.SERVICE_NAME);
if (mStorageId instanceof Error) {
log.error(err);
return (subcb(err));
}
metadata.MANTA_STORAGE_ID = mStorageId;
params.tags.manta_storage_id = mStorageId;
subcb();
});
},
function (subcb) {
log.info('locating user script');
var file = sprintf('%s/../scripts/user-script.sh',
path.dirname(__filename));
file = path.resolve(file);
fs.readFile(file, 'ascii', function (err, contents) {
if (err && err['code'] == 'ENOENT') {
log.debug('no user script');
} else if (err) {
log.error(err,
'failed to read user script');
return (subcb(err));
} else {
metadata['user-script'] = contents;
log.debug('read user script from %s',
file);
}
return (subcb(null));
});
},
function (subcb) {
var opts = {};
opts.params = params;
opts.metadata = metadata;
opts.uuid = inst_uuid;
opts.master = true;
log.info({ opts: opts }, 'creating instance');
sapi.createInstance(svc.uuid, opts,
function (err, inst) {
if (err) {
log.error(err, 'failed to create ' +
'instance');
return (subcb(err));
}
log.info({ inst: inst }, 'created instance');
return (subcb(null, inst));
});
}
], function () {
if (queuecb) {
log.debug('done with "storage" zone ' +
'provision for "%s"', inst_uuid);
setTimeout(queuecb, 0);
}
cb.apply(null, Array.prototype.slice.call(arguments));
});
}
/*
* Given a list of SAPI instances for storage nodes, return an unused Manta
* storage id. If we're at all unsure, we return an error rather than
* potentially returning a conflicting name.
*/
function pickNextStorageId(instances, svcname)
{
var max, inst, instname, numpart;
var i, p, n;
var err = null;
max = 0;
for (i = 0; i < instances.length; i++) {
inst = instances[i];
instname = inst.metadata.MANTA_STORAGE_ID;
if (typeof (instname) != 'string') {
err = new VError('instance "%s": missing or ' +
'invalid MANTA_STORAGE_ID metadata', inst.uuid);
break;
}
p = instname.indexOf('.' + svcname);
if (p == -1 || p === 0) {
err = new VError('instance "%s": instance name ' +
'("%s") does not contain expected suffix (".%s")',
inst.uuid, instname, svcname);
break;
}
numpart = instname.substr(0, p);
n = parseInt(numpart, 10);
if (isNaN(n) || n < 1) {
err = new VError('instance "%s": instance name ' +
'("%s") does not start with a positive integer',
inst.uuid, instname);
break;
}
max = Math.max(max, n);
}
if (err !== null) {
return (new VError(err,
'failed to allocate MANTA_STORAGE_ID'));
}
return (sprintf('%d.%s', max + 1, svcname));
}
|
{
metadata.SERVICE_NAME = sprintf('stor.%s', service_root);
}
|
conditional_block
|
deploy.js
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/*
* Copyright (c) 2014, Joyent, Inc.
*/
/*
* lib/deploy.js: common functions for deploying instances of Manta zones
*/
var assert = require('assert-plus');
var async = require('async');
var fs = require('fs');
var node_uuid = require('node-uuid');
var path = require('path');
var util = require('util');
var vasync = require('vasync');
var sprintf = util.format;
var common = require('./common');
var mod_ssl = require('./ssl');
var EventEmitter = require('events').EventEmitter;
var VError = require('verror').VError;
exports.deploy = deploy;
exports.createDeployer = createDeployer;
/*
* Storage zone deployments cannot be done concurrently, so we funnel all
* storage zone deployments through a vasync Queue with concurrency 1. This is
* global to the process. Obviously, even that's not sufficient when there are
* multiple processes involved, but it helps in the important case of using
* manta-adm to deploy multiple storage zones. See MANTA-2185 for details.
*/
var dStorageQueue = vasync.queue(
function (func, callback) { func(callback); }, 1);
/*
* Deploy a new instance of a Manta service. This is a one-shot method that
* creates a Deployer and then deploys a zone. If you're deploying more than
* one zone, you're better off creating your own deployer and then calling
* "deploy" as many times as you want. Arguments:
*
* options an object with optional properties:
*
* networks array of network names (strings) that this zone should
* be provisioned with
*
* server_uuid server uuid (string) on which to provision this zone
*
* svcname the friendly name of the service to be deployed
* (e.g., "nameservice", "loadbalancer", "moray", etc.)
*
* log a bunyan logger
*
* callback invoked upon completion as callback([err])
*/
function deploy(options, svcname, ilog, callback)
{
var deployer = createDeployer(ilog);
deployer.on('error', function (err) { callback(err); });
deployer.on('ready', function () {
deployer.deploy(options, svcname, callback);
});
}
/*
* Creates a new Deployer, which can be used to deploy several Manta zones.
* This operation initializes connections to various SDC services and emits
* "ready" when ready, or "error" if something goes wrong.
*/
function createDeployer(log)
{
return (new Deployer(log));
}
/*
* A single Deployer instance basically just keeps its own connections to
* various SDC services and a cached copy of the "Manta" and "SDC" applications.
* For consumers that want to deploy several zones, this is more efficient than
* reinitializing those connections each time.
*/
function Deployer(ilog)
{
var self = this;
self.log = ilog;
EventEmitter.call(this);
async.waterfall([
function initClients(cb) {
var log = self.log;
log.info('initing sdc clients');
common.initSdcClients.call(self, cb);
},
function getPoseidon(cb) {
var log = self.log;
log.info('getting poseidon user');
getUser(self, 'poseidon', function (err, user) {
self.poseidon = user;
return (cb(err));
});
},
function loadSdcApplication(cb) {
var sapi = self.SAPI;
var log = self.log;
var search_opts = { 'name': 'sdc' };
log.info('finding "sdc" application');
sapi.listApplications(search_opts,
function (err, apps) {
if (err) {
log.error(err,
'failed to list applications');
return (cb(err));
}
if (apps.length === 0) {
var msg = 'application "sdc" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.sdc_app = apps[0];
return (cb(null));
});
},
function getMantaApplication(cb) {
var log = self.log;
log.info('finding "manta" application');
common.getMantaApplication.call(self,
self.poseidon.uuid, function (err, app) {
if (err)
return (cb(err));
if (!app) {
var msg =
'application "manta" not found';
log.error(msg);
return (cb(new Error(msg)));
}
self.manta_app = app;
return (cb());
});
},
function getMantaServices(cb) {
var log, params;
log = self.log;
params = {
'include_master': true,
'application_uuid': self.manta_app['uuid']
};
log.info(params,
'fetching "manta" application services');
self.SAPI.listServices(params, function (err, svcs) {
if (err) {
cb(err);
return;
}
self.services = svcs;
cb();
});
},
function checkShardConfigs(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var missing = [];
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.MARLIN_SHARD]) != 'string' ||
md[common.MARLIN_SHARD].length === 0) {
missing.push(common.MARLIN_SHARD);
}
if (typeof (md[common.STORAGE_SHARD]) != 'string' ||
md[common.STORAGE_SHARD].length === 0) {
missing.push(common.STORAGE_SHARD);
}
if (!Array.isArray(md[common.INDEX_SHARDS]) ||
md[common.INDEX_SHARDS].length === 0) {
missing.push(common.INDEX_SHARDS);
}
if (missing.length === 0) {
setImmediate(cb);
return;
}
message = 'cannot deploy zones before shards have ' +
'been configured (see manta-shardadm)\n';
message += 'details: metadata properties missing or ' +
'not valid: ' + missing.join(', ');
err = new Error(message);
log.error(err);
setImmediate(cb, err);
},
function checkHashRingConfig(cb) {
var log = self.log;
var app = self.manta_app;
var md = app.metadata;
var message, err;
log.info('checking shard configuration parameters');
if (typeof (md[common.HASH_RING_IMAGE]) != 'string' ||
md[common.HASH_RING_IMAGE].length === 0 ||
typeof (md[common.HASH_RING_IMGAPI_SERVICE]) !=
'string' ||
md[common.HASH_RING_IMGAPI_SERVICE].length === 0) {
message = 'cannot deploy zones before hash ' +
'ring topology has been created ' +
'(see manta-create-topology.sh)';
err = new Error(message);
log.error(err);
setImmediate(cb, err);
} else {
setImmediate(cb);
}
}
], function (err) {
if (err)
self.emit('error', err);
else
self.emit('ready');
});
}
util.inherits(Deployer, EventEmitter);
Deployer.prototype.close = function (cb)
{
common.finiSdcClients.call(this, cb);
};
/*
* Actually deploy a Manta service zone for service "svcname". For argument
* details, see deploy() above.
*/
Deployer.prototype.deploy = function (options, svcname, callback)
{
var self, allservices;
self = {};
for (var k in this)
self[k] = this[k];
self.options = options;
self.zone_uuid = node_uuid.v4();
self.svcname = svcname;
allservices = this.services;
async.waterfall([
function getMantaService(cb) {
var log = self.log;
var svcs = allservices.filter(
function (s) { return (s['name'] == svcname); });
if (svcs.length < 1) {
var t = 'Service "%s" not found. ' +
'Did you run manta-init? If so, ' +
'is it a valid service?';
var message = sprintf(t, self.svcname);
var e = new Error(message);
e.message = message;
log.error(message);
return (cb(e));
}
self.service = svcs[0];
log.debug({ svc: self.service },
'found %s service', self.svcname);
return (cb(null));
},
function ensureZk(cb) {
var app = self.manta_app;
var log = self.log;
if (self.svcname === 'nameservice') {
return (cb(null));
}
log.info('ensuring ZK servers have been deployed');
if (!app.metadata || !app.metadata['ZK_SERVERS'] ||
app.metadata['ZK_SERVERS'].length < 1) {
var message = 'zk servers missing or empty ' +
'in the manta application. Has the ' +
'nameservice been deployed yet?';
log.error({
zkServers: app.metadata['ZK_SERVERS']
}, message);
var e = new Error(message);
e.message = message;
return (cb(e));
}
return (cb(null));
},
function generateSSLCertificate(cb) {
var log = self.log;
var sapi = self.SAPI;
var app = self.manta_app;
var svc = self.service;
if (svc.name !== 'loadbalancer') {
log.info('service "%s" doesn\'t need an ' +
'SSL certificate', svc.name);
return (cb(null));
}
if (svc.metadata['SSL_CERTIFICATE']) {
log.info('SSL certificate already present');
return (cb(null));
}
log.info('generating an ssl certificate');
var file = sprintf('/tmp/cert.%d', process.pid);
var svc_name = app.metadata['MANTA_SERVICE'];
async.waterfall([
function (subcb) {
mod_ssl.generateCertificate.call(self,
file, svc_name, subcb);
},
function (subcb) {
fs.readFile(file, 'ascii',
function (err, contents) {
if (err) {
log.error(err,
'failed to ' +
'read SSL cert');
} else {
log.debug(
'read SSL cert');
}
fs.unlink(file, function (_) {
return (subcb(
err, contents));
});
});
},
function (cert, subcb) {
assert.string(cert, 'cert');
assert.func(subcb, 'subcb');
var opts = {};
opts.metadata = {};
opts.metadata['SSL_CERTIFICATE'] = cert;
sapi.updateService(svc.uuid, opts,
function (err) {
if (err) {
log.error(err,
'failed to ' +
'save SSL cert');
return (subcb(err));
}
log.debug('saved SSL cert');
return (subcb(null));
});
}
], cb);
},
function reserveIP(cb) {
if (self.svcname !== 'nameservice')
return (cb(null, {}));
// XXX I can really do this after it's deployed, no need
// to reserve before provisioning.
var log = self.log;
log.info('reserving nic');
reserveAndGetNic(self, 'manta', self.zone_uuid,
self.poseidon.uuid,
function (err, nic) {
self.nic = nic;
cb(err, nic);
});
},
function updateZKServers(nic, cb) {
var sapi = self.SAPI;
var log = self.log;
if (self.svcname !== 'nameservice')
return (cb(null));
log.info('updating the list of zk servers in the ' +
'sapi manta application');
assert.object(nic, 'nic');
assert.string(nic.ip, 'nic.ip');
var metadata = self.manta_app.metadata;
if (!metadata)
metadata = {};
if (!metadata.ZK_SERVERS)
metadata.ZK_SERVERS = [];
metadata.ZK_SERVERS.push({
host: nic.ip,
port: 2181
});
var len = metadata.ZK_SERVERS.length;
metadata.ZK_SERVERS[len - 1].num = len;
for (var ii = 0; ii < len - 1; ii++)
delete metadata.ZK_SERVERS[ii].last;
metadata.ZK_SERVERS[len - 1].last = true;
var opts = {};
opts.metadata = metadata;
sapi.updateApplication(self.manta_app.uuid, opts,
function (err, app) {
if (!err)
self.manta_app = app;
return (cb(err));
});
},
function ensureComputeId(cb) {
if (self.svcname !== 'storage') {
return (cb(null));
}
var log = self.log;
var serverUuid;
log.debug('Ensuring that the server has a compute id');
function
|
() {
log.debug({
serverUuid: serverUuid
}, 'server uuid for looking up compute id');
var m = 'Error getting compute id';
common.getOrCreateComputeId.call(
self, serverUuid, function (err, cid) {
if (err) {
return (cb(err));
}
if (!cid) {
var e = new Error(m);
e.message = m;
return (cb(e));
}
log.debug({
computeId: cid
}, 'found compute id');
return (cb(null));
});
}
if (self.options.server_uuid) {
serverUuid = self.options.server_uuid;
getComputeId();
} else {
common.findServerUuid.call(
self, function (err, id) {
if (err) {
return (cb(err));
}
serverUuid = id;
getComputeId();
});
}
},
function deployMantaInstance(cb) {
createInstance.call(null, self,
self.manta_app, self.service, function (err, inst) {
if (err)
return (cb(err));
self.instance = inst;
return (cb(null));
});
},
function configureMarlinComputeZone(cb) {
var cnapi = self.CNAPI;
var vmapi = self.VMAPI;
var log = self.log;
var params = { 'uuid': self.instance.uuid };
if (self.svcname !== 'marlin')
return (cb(null));
log.info('configuring compute zone, ' +
'getting vmapi object');
vmapi.getVm(params, function (err, vm) {
if (err) {
log.error(err, 'failed to get zone ' +
'"%s" after instance created',
params.uuid);
return (cb(err));
}
var server = vm.server_uuid;
var script = sprintf(
'%s/tools/mrdeploycompute %s',
common.MARLIN_DIR, params.uuid);
log.info({
server: server,
script: script
}, 'running script to configure compute zone');
cnapi.commandExecute(server, script,
function (suberr) {
if (suberr) {
log.error(suberr, 'failed to ' +
'configure compute zone %s',
params.uuid);
return (cb(err));
}
log.info('configured compute zone %s',
params.uuid);
return (cb(null));
});
});
}
], function (err) {
callback(err, self.zone_uuid);
});
};
/*
* Undeploy a SAPI instance.
*/
Deployer.prototype.undeploy = function (instance, callback)
{
var self = this;
var svcname, cnid;
async.waterfall([
function getInstanceType(cb) {
self.log.info('fetching SAPI instance', instance);
self.SAPI.getInstance(instance, function (err, inst) {
var svcs;
if (!err) {
svcs = self.services.filter(
function (s) {
return (s['uuid'] ==
inst['service_uuid']);
});
if (svcs.length === 0) {
err = new VError(
'zone "%s" has ' +
'unexpected service "%s"',
instance,
inst['service_uuid']);
} else {
svcname = svcs[0]['name'];
}
}
cb(err);
});
},
function getVmInfo(cb) {
var params = { 'uuid': instance };
self.log.info(params, 'fetching VMAPI details');
self.VMAPI.getVm(params, function (err, vm) {
if (err) {
cb(new VError(err,
'failed to get "%s" from VMAPI',
instance));
return;
}
cnid = vm['server_uuid'];
cb();
});
},
function rmMarlinZone(cb) {
if (svcname != 'marlin') {
cb();
return;
}
var log = self.log;
var scriptpath = sprintf('%s/tools/mrzoneremove %s',
common.MARLIN_DIR, instance);
log.info({
'server': cnid,
'script': scriptpath
}, 'running script to remove compute zone');
self.CNAPI.commandExecute(cnid, scriptpath,
function (err) {
if (err) {
err = new VError(err,
'failed to remove compute zone ' +
'"%s"', instance);
log.error(err);
cb(err);
return;
}
log.info('removed compute zone "%s"', instance);
cb();
});
},
function sapiDelete(cb) {
self.log.info('deleting SAPI instance', instance);
self.SAPI.deleteInstance(instance, cb);
}
], function (err) {
self.log.info('undeploy complete', instance);
callback(err);
});
};
/*
* Reprovision a SAPI instance.
*/
Deployer.prototype.reprovision = function (instance, image_uuid, callback)
{
this.SAPI.reprovisionInstance(instance, image_uuid, callback);
};
// -- User management
function getUser(self, login, cb) {
var ufds = self.UFDS;
var log = self.log;
assert.string(login, 'login');
ufds.getUser(login, function (err, ret) {
if (err)
log.error(err, 'failed to get %s', login);
return (cb(err, ret));
});
}
// -- Network management
function reserveAndGetNic(self, name, zone_uuid, owner_uuid, cb) {
var log = self.log;
var napi = self.NAPI;
assert.string(name, 'name');
assert.string(zone_uuid, 'zone_uuid');
assert.string(owner_uuid, 'owner_uuid');
var opts = {
belongs_to_uuid: zone_uuid,
owner_uuid: owner_uuid,
belongs_to_type: 'zone'
};
log.info({ opts: opts }, 'provisioning NIC');
async.waterfall([
function (subcb) {
napi.listNetworks({ name: name },
function (err, networks) {
if (err) {
log.error(err,
'failed to list networks');
return (subcb(err));
}
log.debug({ network: networks[0] },
'found network %s', name);
return (subcb(null, networks[0].uuid));
});
},
function (network_uuid, subcb) {
napi.provisionNic(network_uuid, opts,
function (err, nic) {
if (err) {
log.error(err,
'failed to provision NIC');
return (cb(err));
}
log.info({ nic: nic }, 'provisioned NIC');
return (subcb(null, nic));
});
}
], cb);
}
// -- SAPI functions
function createInstance(self, app, svc, cb) {
var sapi = self.SAPI;
var log = self.log;
assert.string(self.config.datacenter_name,
'self.config.datacenter_name');
assert.object(app, 'app');
assert.object(app.metadata, 'app.metadata');
assert.string(app.metadata.REGION, 'app.metadata.REGION');
assert.string(app.metadata.DNS_DOMAIN, 'app.metadata.DNS_DOMAIN');
assert.object(svc, 'svc');
assert.string(svc.name, 'svc.name');
assert.string(svc.uuid, 'svc.uuid');
var inst_uuid = self.zone_uuid ? self.zone_uuid : node_uuid.v4();
var params = {};
/*
* Traditionally we've used numeric shards (e.g. 1.moray, 2.moray, etc.)
* but there's no reason they have to be numbers. We could have
* 1-marlin.moray, marlin.moray, or anything similar.
*/
var shard = '1';
if (self.options.shard)
shard = self.options.shard;
/*
* The root of all service hostnames is formed from the application's
* region and DNS domain.
*/
var service_root = sprintf('%s.%s',
app.metadata.REGION, app.metadata.DNS_DOMAIN);
var service_name = sprintf('%s.%s', self.svcname, service_root);
params.alias = service_name + '-' + inst_uuid.substr(0, 8);
/*
* Prefix with the shard for things that are shardable...
*/
if (['postgres', 'moray'].indexOf(self.svcname) !== -1) {
params.alias = shard + '.' + params.alias;
}
params.tags = {};
params.tags.manta_role = svc.name;
if (self.options.server_uuid)
params.server_uuid = self.options.server_uuid;
if (self.options.image_uuid)
params.image_uuid = self.options.image_uuid;
if (self.options.networks) {
var networks = [];
self.options.networks.forEach(function (token) {
networks.push({ uuid: token });
});
params.networks = networks;
}
var metadata = {};
metadata.DATACENTER = self.config.datacenter_name;
metadata.SERVICE_NAME = service_name;
metadata.SHARD = shard;
if (self.svcname === 'nameservice') {
var len = 1;
if (app.metadata.ZK_SERVERS)
len = app.metadata.ZK_SERVERS.length;
metadata.ZK_ID = len;
}
if (self.svcname === 'postgres') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
metadata.MANATEE_SHARD_PATH = sprintf('/manatee/%s',
metadata.SERVICE_NAME);
}
if (self.svcname === 'moray') {
metadata.SERVICE_NAME = sprintf('%s.moray.%s',
shard, service_root);
}
if (self.svcname === 'storage') {
metadata.SERVICE_NAME = sprintf('stor.%s', service_root);
}
if (self.svcname === 'webapi' || self.svcname === 'loadbalancer')
metadata.SERVICE_NAME = app.metadata['MANTA_SERVICE'];
if (self.svcname === 'marlin')
params.tags.manta_role = 'compute';
/*
* This zone should get its configuration the local (i.e. same
* datacenter) SAPI instance, as well as use the local UFDS instance.
*/
var config = self.config;
metadata['SAPI_URL'] = config.sapi.url;
metadata['UFDS_URL'] = config.ufds.url;
metadata['UFDS_ROOT_DN'] = config.ufds.bindDN;
metadata['UFDS_ROOT_PW'] = config.ufds.bindPassword;
metadata['SDC_NAMESERVERS'] = self.sdc_app.metadata.ZK_SERVERS;
var queuecb;
async.waterfall([
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
log.debug('putting "storage" zone provision for ' +
'"%s" into the queue', inst_uuid);
dStorageQueue.push(function (_queuecb) {
/*
* When we reach here, we're the only "storage"
* zone deployment that's going on right now.
* Save the queue callback so that we can invoke
* it when we finish deploying to free up the
* queue for someone else.
*/
queuecb = _queuecb;
log.debug('dequeueing "storage" zone ' +
'provision for "%s"', inst_uuid);
subcb();
});
},
function (subcb) {
if (svc.name !== 'storage')
return (subcb(null));
/*
* The manta_storage_id should be the next available
* number.
*/
var opts = {};
opts.service_uuid = svc.uuid;
opts.include_master = true;
log.info('finding next manta_storage_id');
sapi.listInstances(opts, function (err, insts) {
if (err) {
log.error(err, 'failed to list ' +
'storage instances');
return (subcb(err));
}
/*
* Find the highest-numbered storage id and pick
* the next one.
*/
var mStorageId = pickNextStorageId(
insts, metadata.SERVICE_NAME);
if (mStorageId instanceof Error) {
log.error(err);
return (subcb(err));
}
metadata.MANTA_STORAGE_ID = mStorageId;
params.tags.manta_storage_id = mStorageId;
subcb();
});
},
function (subcb) {
log.info('locating user script');
var file = sprintf('%s/../scripts/user-script.sh',
path.dirname(__filename));
file = path.resolve(file);
fs.readFile(file, 'ascii', function (err, contents) {
if (err && err['code'] == 'ENOENT') {
log.debug('no user script');
} else if (err) {
log.error(err,
'failed to read user script');
return (subcb(err));
} else {
metadata['user-script'] = contents;
log.debug('read user script from %s',
file);
}
return (subcb(null));
});
},
function (subcb) {
var opts = {};
opts.params = params;
opts.metadata = metadata;
opts.uuid = inst_uuid;
opts.master = true;
log.info({ opts: opts }, 'creating instance');
sapi.createInstance(svc.uuid, opts,
function (err, inst) {
if (err) {
log.error(err, 'failed to create ' +
'instance');
return (subcb(err));
}
log.info({ inst: inst }, 'created instance');
return (subcb(null, inst));
});
}
], function () {
if (queuecb) {
log.debug('done with "storage" zone ' +
'provision for "%s"', inst_uuid);
setTimeout(queuecb, 0);
}
cb.apply(null, Array.prototype.slice.call(arguments));
});
}
/*
* Given a list of SAPI instances for storage nodes, return an unused Manta
* storage id. If we're at all unsure, we return an error rather than
* potentially returning a conflicting name.
*/
function pickNextStorageId(instances, svcname)
{
var max, inst, instname, numpart;
var i, p, n;
var err = null;
max = 0;
for (i = 0; i < instances.length; i++) {
inst = instances[i];
instname = inst.metadata.MANTA_STORAGE_ID;
if (typeof (instname) != 'string') {
err = new VError('instance "%s": missing or ' +
'invalid MANTA_STORAGE_ID metadata', inst.uuid);
break;
}
p = instname.indexOf('.' + svcname);
if (p == -1 || p === 0) {
err = new VError('instance "%s": instance name ' +
'("%s") does not contain expected suffix (".%s")',
inst.uuid, instname, svcname);
break;
}
numpart = instname.substr(0, p);
n = parseInt(numpart, 10);
if (isNaN(n) || n < 1) {
err = new VError('instance "%s": instance name ' +
'("%s") does not start with a positive integer',
inst.uuid, instname);
break;
}
max = Math.max(max, n);
}
if (err !== null) {
return (new VError(err,
'failed to allocate MANTA_STORAGE_ID'));
}
return (sprintf('%d.%s', max + 1, svcname));
}
|
getComputeId
|
identifier_name
|
_changedsince.py
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Person/_ChangedSince.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .._changedsincebase import ChangedSinceBase
#-------------------------------------------------------------------------
#
# ChangedSince
#
#-------------------------------------------------------------------------
class
|
(ChangedSinceBase):
"""Rule that checks for persons changed since a specific time."""
labels = [ _('Changed after:'), _('but before:') ]
name = _('Persons changed after <date time>')
description = _("Matches person records changed after a specified "
"date-time (yyyy-mm-dd hh:mm:ss) or in the range, if a second "
"date-time is given.")
|
ChangedSince
|
identifier_name
|
_changedsince.py
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Person/_ChangedSince.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
|
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .._changedsincebase import ChangedSinceBase
#-------------------------------------------------------------------------
#
# ChangedSince
#
#-------------------------------------------------------------------------
class ChangedSince(ChangedSinceBase):
"""Rule that checks for persons changed since a specific time."""
labels = [ _('Changed after:'), _('but before:') ]
name = _('Persons changed after <date time>')
description = _("Matches person records changed after a specified "
"date-time (yyyy-mm-dd hh:mm:ss) or in the range, if a second "
"date-time is given.")
|
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
|
random_line_split
|
_changedsince.py
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Person/_ChangedSince.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .._changedsincebase import ChangedSinceBase
#-------------------------------------------------------------------------
#
# ChangedSince
#
#-------------------------------------------------------------------------
class ChangedSince(ChangedSinceBase):
|
"""Rule that checks for persons changed since a specific time."""
labels = [ _('Changed after:'), _('but before:') ]
name = _('Persons changed after <date time>')
description = _("Matches person records changed after a specified "
"date-time (yyyy-mm-dd hh:mm:ss) or in the range, if a second "
"date-time is given.")
|
identifier_body
|
|
redux-helpers.js
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.provider = undefined;
exports.shallowEqual = shallowEqual;
exports.observeStore = observeStore;
exports.configureStore = configureStore;
var _redux = require('redux');
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || _redux.compose;
function shallowEqual(objA, objB) {
if (objA === objB) return true;
var keysA = Object.keys(objA);
var keysB = Object.keys(objB);
if (keysA.length !== keysB.length) return false;
// Test for A's keys different from B.
var hasOwn = Object.prototype.hasOwnProperty;
for (var i = 0; i < keysA.length; i++) {
if (!hasOwn.call(objB, keysA[i]) || objA[keysA[i]] !== objB[keysA[i]]) {
return false;
}
}
return true;
}
function observeStore(store, currState, select, onChange) {
if (typeof onChange !== 'function') return null;
var currentState = currState || {};
function handleChange() {
var nextState = select(store.getState());
if (!shallowEqual(currentState, nextState)) {
var previousState = currentState;
currentState = nextState;
onChange(currentState, previousState);
}
}
var unsubscribe = store.subscribe(handleChange);
handleChange();
return unsubscribe;
}
var provider = exports.provider = {
set store(store) {
this._store = store;
},
get store() {
return this._store;
}
};
function configureStore(reducer, preloadedState)
|
//# sourceMappingURL=redux-helpers.js.map
|
{
var middleware = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
return (0, _redux.createStore)(reducer, preloadedState, composeEnhancers(_redux.applyMiddleware.apply(undefined, _toConsumableArray(middleware))));
}
|
identifier_body
|
redux-helpers.js
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.provider = undefined;
exports.shallowEqual = shallowEqual;
exports.observeStore = observeStore;
exports.configureStore = configureStore;
var _redux = require('redux');
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || _redux.compose;
function shallowEqual(objA, objB) {
if (objA === objB) return true;
var keysA = Object.keys(objA);
var keysB = Object.keys(objB);
if (keysA.length !== keysB.length) return false;
// Test for A's keys different from B.
var hasOwn = Object.prototype.hasOwnProperty;
for (var i = 0; i < keysA.length; i++) {
if (!hasOwn.call(objB, keysA[i]) || objA[keysA[i]] !== objB[keysA[i]]) {
return false;
}
}
return true;
}
function observeStore(store, currState, select, onChange) {
if (typeof onChange !== 'function') return null;
var currentState = currState || {};
function handleChange() {
var nextState = select(store.getState());
if (!shallowEqual(currentState, nextState)) {
var previousState = currentState;
currentState = nextState;
onChange(currentState, previousState);
}
}
var unsubscribe = store.subscribe(handleChange);
handleChange();
return unsubscribe;
|
}
var provider = exports.provider = {
set store(store) {
this._store = store;
},
get store() {
return this._store;
}
};
function configureStore(reducer, preloadedState) {
var middleware = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
return (0, _redux.createStore)(reducer, preloadedState, composeEnhancers(_redux.applyMiddleware.apply(undefined, _toConsumableArray(middleware))));
}
//# sourceMappingURL=redux-helpers.js.map
|
random_line_split
|
|
redux-helpers.js
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.provider = undefined;
exports.shallowEqual = shallowEqual;
exports.observeStore = observeStore;
exports.configureStore = configureStore;
var _redux = require('redux');
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || _redux.compose;
function shallowEqual(objA, objB) {
if (objA === objB) return true;
var keysA = Object.keys(objA);
var keysB = Object.keys(objB);
if (keysA.length !== keysB.length) return false;
// Test for A's keys different from B.
var hasOwn = Object.prototype.hasOwnProperty;
for (var i = 0; i < keysA.length; i++) {
if (!hasOwn.call(objB, keysA[i]) || objA[keysA[i]] !== objB[keysA[i]]) {
return false;
}
}
return true;
}
function observeStore(store, currState, select, onChange) {
if (typeof onChange !== 'function') return null;
var currentState = currState || {};
function handleChange() {
var nextState = select(store.getState());
if (!shallowEqual(currentState, nextState))
|
}
var unsubscribe = store.subscribe(handleChange);
handleChange();
return unsubscribe;
}
var provider = exports.provider = {
set store(store) {
this._store = store;
},
get store() {
return this._store;
}
};
function configureStore(reducer, preloadedState) {
var middleware = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
return (0, _redux.createStore)(reducer, preloadedState, composeEnhancers(_redux.applyMiddleware.apply(undefined, _toConsumableArray(middleware))));
}
//# sourceMappingURL=redux-helpers.js.map
|
{
var previousState = currentState;
currentState = nextState;
onChange(currentState, previousState);
}
|
conditional_block
|
redux-helpers.js
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.provider = undefined;
exports.shallowEqual = shallowEqual;
exports.observeStore = observeStore;
exports.configureStore = configureStore;
var _redux = require('redux');
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || _redux.compose;
function shallowEqual(objA, objB) {
if (objA === objB) return true;
var keysA = Object.keys(objA);
var keysB = Object.keys(objB);
if (keysA.length !== keysB.length) return false;
// Test for A's keys different from B.
var hasOwn = Object.prototype.hasOwnProperty;
for (var i = 0; i < keysA.length; i++) {
if (!hasOwn.call(objB, keysA[i]) || objA[keysA[i]] !== objB[keysA[i]]) {
return false;
}
}
return true;
}
function observeStore(store, currState, select, onChange) {
if (typeof onChange !== 'function') return null;
var currentState = currState || {};
function
|
() {
var nextState = select(store.getState());
if (!shallowEqual(currentState, nextState)) {
var previousState = currentState;
currentState = nextState;
onChange(currentState, previousState);
}
}
var unsubscribe = store.subscribe(handleChange);
handleChange();
return unsubscribe;
}
var provider = exports.provider = {
set store(store) {
this._store = store;
},
get store() {
return this._store;
}
};
function configureStore(reducer, preloadedState) {
var middleware = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
return (0, _redux.createStore)(reducer, preloadedState, composeEnhancers(_redux.applyMiddleware.apply(undefined, _toConsumableArray(middleware))));
}
//# sourceMappingURL=redux-helpers.js.map
|
handleChange
|
identifier_name
|
ssh_session.py
|
#!/usr/bin/env python
#
# Eric S. Raymond
#
# Greatly modified by Nigel W. Moriarty
# April 2003
#
from pexpect import *
import os, sys
import getpass
import time
class ssh_session:
"Session with extra state including the password to be used."
def __init__(self, user, host, password=None, verbose=0):
self.user = user
self.host = host
self.verbose = verbose
self.password = password
self.keys = [
'authenticity',
'assword:',
'@@@@@@@@@@@@',
'Command not found.',
EOF,
]
self.f = open('ssh.out','w')
def __repr__(self):
outl = 'class :'+self.__class__.__name__
for attr in self.__dict__:
if attr == 'password':
outl += '\n\t'+attr+' : '+'*'*len(self.password)
else:
outl += '\n\t'+attr+' : '+str(getattr(self, attr))
return outl
def __exec(self, command):
"Execute a command on the remote host. Return the output."
child = spawn(command,
#timeout=10,
)
if self.verbose:
sys.stderr.write("-> " + command + "\n")
seen = child.expect(self.keys)
self.f.write(str(child.before) + str(child.after)+'\n')
if seen == 0:
child.sendline('yes')
seen = child.expect(self.keys)
if seen == 1:
if not self.password:
self.password = getpass.getpass('Remote password: ')
child.sendline(self.password)
child.readline()
time.sleep(5)
# Added to allow the background running of remote process
if not child.isalive():
seen = child.expect(self.keys)
if seen == 2:
lines = child.readlines()
self.f.write(lines)
if self.verbose:
|
sys.stderr.write("<- " + child.before + "|\n")
try:
self.f.write(str(child.before) + str(child.after)+'\n')
except:
pass
self.f.close()
return child.before
def ssh(self, command):
return self.__exec("ssh -l %s %s \"%s\"" \
% (self.user,self.host,command))
def scp(self, src, dst):
return self.__exec("scp %s %s@%s:%s" \
% (src, session.user, session.host, dst))
def exists(self, file):
"Retrieve file permissions of specified remote file."
seen = self.ssh("/bin/ls -ld %s" % file)
if string.find(seen, "No such file") > -1:
return None # File doesn't exist
else:
return seen.split()[0] # Return permission field of listing.
|
random_line_split
|
|
ssh_session.py
|
#!/usr/bin/env python
#
# Eric S. Raymond
#
# Greatly modified by Nigel W. Moriarty
# April 2003
#
from pexpect import *
import os, sys
import getpass
import time
class ssh_session:
"Session with extra state including the password to be used."
def __init__(self, user, host, password=None, verbose=0):
self.user = user
self.host = host
self.verbose = verbose
self.password = password
self.keys = [
'authenticity',
'assword:',
'@@@@@@@@@@@@',
'Command not found.',
EOF,
]
self.f = open('ssh.out','w')
def __repr__(self):
outl = 'class :'+self.__class__.__name__
for attr in self.__dict__:
if attr == 'password':
outl += '\n\t'+attr+' : '+'*'*len(self.password)
else:
outl += '\n\t'+attr+' : '+str(getattr(self, attr))
return outl
def __exec(self, command):
"Execute a command on the remote host. Return the output."
child = spawn(command,
#timeout=10,
)
if self.verbose:
sys.stderr.write("-> " + command + "\n")
seen = child.expect(self.keys)
self.f.write(str(child.before) + str(child.after)+'\n')
if seen == 0:
child.sendline('yes')
seen = child.expect(self.keys)
if seen == 1:
if not self.password:
self.password = getpass.getpass('Remote password: ')
child.sendline(self.password)
child.readline()
time.sleep(5)
# Added to allow the background running of remote process
if not child.isalive():
seen = child.expect(self.keys)
if seen == 2:
lines = child.readlines()
self.f.write(lines)
if self.verbose:
|
try:
self.f.write(str(child.before) + str(child.after)+'\n')
except:
pass
self.f.close()
return child.before
def ssh(self, command):
return self.__exec("ssh -l %s %s \"%s\"" \
% (self.user,self.host,command))
def scp(self, src, dst):
return self.__exec("scp %s %s@%s:%s" \
% (src, session.user, session.host, dst))
def exists(self, file):
"Retrieve file permissions of specified remote file."
seen = self.ssh("/bin/ls -ld %s" % file)
if string.find(seen, "No such file") > -1:
return None # File doesn't exist
else:
return seen.split()[0] # Return permission field of listing.
|
sys.stderr.write("<- " + child.before + "|\n")
|
conditional_block
|
ssh_session.py
|
#!/usr/bin/env python
#
# Eric S. Raymond
#
# Greatly modified by Nigel W. Moriarty
# April 2003
#
from pexpect import *
import os, sys
import getpass
import time
class ssh_session:
"Session with extra state including the password to be used."
def __init__(self, user, host, password=None, verbose=0):
self.user = user
self.host = host
self.verbose = verbose
self.password = password
self.keys = [
'authenticity',
'assword:',
'@@@@@@@@@@@@',
'Command not found.',
EOF,
]
self.f = open('ssh.out','w')
def __repr__(self):
outl = 'class :'+self.__class__.__name__
for attr in self.__dict__:
if attr == 'password':
outl += '\n\t'+attr+' : '+'*'*len(self.password)
else:
outl += '\n\t'+attr+' : '+str(getattr(self, attr))
return outl
def __exec(self, command):
"Execute a command on the remote host. Return the output."
child = spawn(command,
#timeout=10,
)
if self.verbose:
sys.stderr.write("-> " + command + "\n")
seen = child.expect(self.keys)
self.f.write(str(child.before) + str(child.after)+'\n')
if seen == 0:
child.sendline('yes')
seen = child.expect(self.keys)
if seen == 1:
if not self.password:
self.password = getpass.getpass('Remote password: ')
child.sendline(self.password)
child.readline()
time.sleep(5)
# Added to allow the background running of remote process
if not child.isalive():
seen = child.expect(self.keys)
if seen == 2:
lines = child.readlines()
self.f.write(lines)
if self.verbose:
sys.stderr.write("<- " + child.before + "|\n")
try:
self.f.write(str(child.before) + str(child.after)+'\n')
except:
pass
self.f.close()
return child.before
def ssh(self, command):
return self.__exec("ssh -l %s %s \"%s\"" \
% (self.user,self.host,command))
def scp(self, src, dst):
return self.__exec("scp %s %s@%s:%s" \
% (src, session.user, session.host, dst))
def
|
(self, file):
"Retrieve file permissions of specified remote file."
seen = self.ssh("/bin/ls -ld %s" % file)
if string.find(seen, "No such file") > -1:
return None # File doesn't exist
else:
return seen.split()[0] # Return permission field of listing.
|
exists
|
identifier_name
|
ssh_session.py
|
#!/usr/bin/env python
#
# Eric S. Raymond
#
# Greatly modified by Nigel W. Moriarty
# April 2003
#
from pexpect import *
import os, sys
import getpass
import time
class ssh_session:
"Session with extra state including the password to be used."
def __init__(self, user, host, password=None, verbose=0):
|
def __repr__(self):
outl = 'class :'+self.__class__.__name__
for attr in self.__dict__:
if attr == 'password':
outl += '\n\t'+attr+' : '+'*'*len(self.password)
else:
outl += '\n\t'+attr+' : '+str(getattr(self, attr))
return outl
def __exec(self, command):
"Execute a command on the remote host. Return the output."
child = spawn(command,
#timeout=10,
)
if self.verbose:
sys.stderr.write("-> " + command + "\n")
seen = child.expect(self.keys)
self.f.write(str(child.before) + str(child.after)+'\n')
if seen == 0:
child.sendline('yes')
seen = child.expect(self.keys)
if seen == 1:
if not self.password:
self.password = getpass.getpass('Remote password: ')
child.sendline(self.password)
child.readline()
time.sleep(5)
# Added to allow the background running of remote process
if not child.isalive():
seen = child.expect(self.keys)
if seen == 2:
lines = child.readlines()
self.f.write(lines)
if self.verbose:
sys.stderr.write("<- " + child.before + "|\n")
try:
self.f.write(str(child.before) + str(child.after)+'\n')
except:
pass
self.f.close()
return child.before
def ssh(self, command):
return self.__exec("ssh -l %s %s \"%s\"" \
% (self.user,self.host,command))
def scp(self, src, dst):
return self.__exec("scp %s %s@%s:%s" \
% (src, session.user, session.host, dst))
def exists(self, file):
"Retrieve file permissions of specified remote file."
seen = self.ssh("/bin/ls -ld %s" % file)
if string.find(seen, "No such file") > -1:
return None # File doesn't exist
else:
return seen.split()[0] # Return permission field of listing.
|
self.user = user
self.host = host
self.verbose = verbose
self.password = password
self.keys = [
'authenticity',
'assword:',
'@@@@@@@@@@@@',
'Command not found.',
EOF,
]
self.f = open('ssh.out','w')
|
identifier_body
|
teamtreehouse.py
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__author__ = "Felix Brezo, Yaiza Rubio <[email protected]>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Teamtreehouse(Platform):
"
|
"" A <Platform> object for Teamtreehouse"""
def __init__(self):
self.platformName = "Teamtreehouse"
self.tags = ["social", "news"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://teamtreehouse.com/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Sorry, we can't find the page you are looking for</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
identifier_body
|
|
teamtreehouse.py
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__author__ = "Felix Brezo, Yaiza Rubio <[email protected]>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Teamtreehouse(Platform):
""" A <Platform> object for Teamtreehouse"""
def __init__(self):
self.platformName = "Teamtreehouse"
self.tags = ["social", "news"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://teamtreehouse.com/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Sorry, we can't find the page you are looking for</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
|
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
|
random_line_split
|
teamtreehouse.py
|
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__author__ = "Felix Brezo, Yaiza Rubio <[email protected]>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Teamtreehouse(Platform):
""" A <Platform> object for Teamtreehouse"""
def _
|
self):
self.platformName = "Teamtreehouse"
self.tags = ["social", "news"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://teamtreehouse.com/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Sorry, we can't find the page you are looking for</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
_init__(
|
identifier_name
|
test_behave.py
|
#!/usr/bin/python -tt
from behave import *
import os
import subprocess
import glob
import re
import shutil
DNF_FLAGS = ['-y', '--disablerepo=*', '--nogpgcheck']
RPM_INSTALL_FLAGS = ['-Uvh']
RPM_ERASE_FLAGS = ['-e']
def _left_decorator(item):
""" Removed packages """
return u'-' + item
def _right_decorator(item):
""" Installed packages """
return u'+' + item
def find_pkg(pkg):
""" Find the package file in the repository """
candidates = glob.glob('/repo/' + pkg + '*.rpm')
if len(candidates) == 0:
print("No candidates for: '{0}'".format(pkg))
assert len(candidates) == 1
return candidates[0]
def decorate_rpm_packages(pkgs):
""" Converts package names like TestA, TestB into absolute paths """
return [find_pkg(p) for p in pkgs]
def get_rpm_package_list():
""" Gets all installed packages in the system """
pkgstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}\n'])
return pkgstr.splitlines()
def get_rpm_package_version_list():
""" Gets all installed packages in the system with version"""
pkgverstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}-%{VERSION}-%{RELEASE}\n'])
return pkgverstr.splitlines()
def get_dnf_package_version_list():
""" Gets all installed packages in the system with version to check that dnf has same data about installed packages"""
pkgverstr = subprocess.check_output(['dnf', 'repoquery', '--installed', '-Cq', '--queryformat', '%{name}.%{version}.%{release}\n'])
pkgverstr = pkgverstr.splitlines()
return pkgverstr
def diff_package_lists(a, b):
""" Computes both left/right diff between lists `a` and `b` """
sa, sb = set(a), set(b)
return (map(_left_decorator, list(sa - sb)),
map(_right_decorator, list(sb - sa)))
def package_version_lists(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if x.startswith(pkg)]
assert len(found_pkgs) == 1
return str(found_pkgs[0])
def package_absence(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if re.search('^' + pkg, x)]
assert len(found_pkgs) == 0
return None
def execute_dnf_command(cmd, reponame):
""" Execute DNF command with default flags and the specified `reponame` enabled """
flags = DNF_FLAGS + ['--enablerepo={0}'.format(reponame)]
return subprocess.check_call(['dnf'] + flags + cmd, stdout=subprocess.PIPE)
def execute_rpm_command(pkg, action):
""" Execute given action over specified pkg(s) """
if not isinstance(pkg, list):
pkg = [pkg]
if action == "remove":
rpm_command = RPM_ERASE_FLAGS
elif action == "install":
rpm_command = RPM_INSTALL_FLAGS
pkg = decorate_rpm_packages(pkg)
return subprocess.check_call(['rpm'] + rpm_command + pkg, stdout=subprocess.PIPE)
def piecewise_compare(a, b):
""" Check if the two sequences are identical regardless of ordering """
return sorted(a) == sorted(b)
def split(pkgs):
return [p.strip() for p in pkgs.split(',')]
@given('I use the repository "{repo}"')
def given_repo_condition(context, repo):
""" :type context: behave.runner.Context """
assert repo
context.repo = repo
assert os.path.exists('/var/www/html/repo/' + repo)
for root, dirs, files in os.walk('/repo'):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
subprocess.check_call(['cp -rs /var/www/html/repo/' + repo + '/* /repo/'], shell=True)
with open('/etc/yum.repos.d/' + repo + '.repo', 'w') as f:
f.write('[' + repo + ']\nname=' + repo + '\nbaseurl=http://127.0.0.1/repo/' + repo + '\nenabled=1\ngpgcheck=0')
@when('I "{action}" a package "{pkgs}" with "{manager}"')
def when_action_package(context, action, pkgs, manager):
|
@when('I execute command "{command}" with "{result}"')
def when_action_command(context, command, result):
assert command
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
cmd_output = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
context.cmd_rc = cmd_output.returncode
if result == "success":
assert context.cmd_rc == 0
elif result == "fail":
assert context.cmd_rc != 0
else:
raise AssertionError('The option {} is not allowed option for expected result of command. '
'Allowed options are "success" and "fail"'.format(result))
@then('package "{pkgs}" should be "{state}"')
def then_package_state(context, pkgs, state):
assert pkgs
pkgs_rpm = get_rpm_package_list()
pkgs_rpm_ver = get_rpm_package_version_list()
pkgs_dnf_ver = get_dnf_package_version_list()
assert pkgs_rpm
assert context.pre_rpm_packages
removed, installed = diff_package_lists(context.pre_rpm_packages, pkgs_rpm)
assert removed is not None and installed is not None
for n in split(pkgs):
if state == 'installed':
assert ('+' + n) in installed
installed.remove('+' + n)
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'removed':
assert ('-' + n) in removed
removed.remove('-' + n)
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'absent':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'upgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver > pre_rpm_ver
elif state == 'unupgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver == pre_rpm_ver
elif state == 'downgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver < pre_rpm_ver
elif state == 'present':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'upgraded-to':
assert n in package_version_lists(n, pkgs_rpm_ver)
else:
raise AssertionError('The state {} is not allowed option for Then statement'.format(state))
""" This checks that installations/removals are always fully specified,
so that we always cover the requirements/expecations entirely """
if state in ["installed", "removed"]:
assert not installed and not removed
@then('exit code of command should be equal to "{exit_code}"')
def then_package_state(context, exit_code):
exit_code = int(exit_code)
assert context.cmd_rc == exit_code
|
assert pkgs
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
if manager == 'rpm':
if action in ["install", "remove"]:
execute_rpm_command(split(pkgs), action)
else:
raise AssertionError('The action {} is not allowed parameter with rpm manager'.format(action))
elif manager == 'dnf':
if action == 'upgrade':
if pkgs == 'all':
execute_dnf_command([action], context.repo)
else:
execute_dnf_command([action] + split(pkgs), context.repo)
elif action == 'autoremove':
subprocess.check_call(['dnf', '-y', action], stdout=subprocess.PIPE)
elif action in ["install", "remove", "downgrade", "upgrade-to"]:
execute_dnf_command([action] + split(pkgs), context.repo)
else:
raise AssertionError('The action {} is not allowed parameter with dnf manager'.format(action))
else:
raise AssertionError('The manager {} is not allowed parameter'.format(manager))
|
identifier_body
|
test_behave.py
|
#!/usr/bin/python -tt
from behave import *
import os
import subprocess
import glob
import re
import shutil
DNF_FLAGS = ['-y', '--disablerepo=*', '--nogpgcheck']
RPM_INSTALL_FLAGS = ['-Uvh']
RPM_ERASE_FLAGS = ['-e']
def _left_decorator(item):
""" Removed packages """
return u'-' + item
def _right_decorator(item):
""" Installed packages """
return u'+' + item
def find_pkg(pkg):
""" Find the package file in the repository """
candidates = glob.glob('/repo/' + pkg + '*.rpm')
if len(candidates) == 0:
print("No candidates for: '{0}'".format(pkg))
assert len(candidates) == 1
return candidates[0]
def decorate_rpm_packages(pkgs):
""" Converts package names like TestA, TestB into absolute paths """
return [find_pkg(p) for p in pkgs]
def get_rpm_package_list():
""" Gets all installed packages in the system """
pkgstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}\n'])
return pkgstr.splitlines()
def get_rpm_package_version_list():
""" Gets all installed packages in the system with version"""
pkgverstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}-%{VERSION}-%{RELEASE}\n'])
return pkgverstr.splitlines()
def get_dnf_package_version_list():
""" Gets all installed packages in the system with version to check that dnf has same data about installed packages"""
pkgverstr = subprocess.check_output(['dnf', 'repoquery', '--installed', '-Cq', '--queryformat', '%{name}.%{version}.%{release}\n'])
pkgverstr = pkgverstr.splitlines()
return pkgverstr
def diff_package_lists(a, b):
""" Computes both left/right diff between lists `a` and `b` """
sa, sb = set(a), set(b)
return (map(_left_decorator, list(sa - sb)),
map(_right_decorator, list(sb - sa)))
def package_version_lists(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if x.startswith(pkg)]
assert len(found_pkgs) == 1
return str(found_pkgs[0])
def package_absence(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if re.search('^' + pkg, x)]
assert len(found_pkgs) == 0
return None
def execute_dnf_command(cmd, reponame):
""" Execute DNF command with default flags and the specified `reponame` enabled """
flags = DNF_FLAGS + ['--enablerepo={0}'.format(reponame)]
return subprocess.check_call(['dnf'] + flags + cmd, stdout=subprocess.PIPE)
def execute_rpm_command(pkg, action):
""" Execute given action over specified pkg(s) """
if not isinstance(pkg, list):
pkg = [pkg]
if action == "remove":
rpm_command = RPM_ERASE_FLAGS
elif action == "install":
rpm_command = RPM_INSTALL_FLAGS
pkg = decorate_rpm_packages(pkg)
return subprocess.check_call(['rpm'] + rpm_command + pkg, stdout=subprocess.PIPE)
def piecewise_compare(a, b):
""" Check if the two sequences are identical regardless of ordering """
return sorted(a) == sorted(b)
def split(pkgs):
return [p.strip() for p in pkgs.split(',')]
@given('I use the repository "{repo}"')
def given_repo_condition(context, repo):
""" :type context: behave.runner.Context """
assert repo
context.repo = repo
assert os.path.exists('/var/www/html/repo/' + repo)
for root, dirs, files in os.walk('/repo'):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
subprocess.check_call(['cp -rs /var/www/html/repo/' + repo + '/* /repo/'], shell=True)
with open('/etc/yum.repos.d/' + repo + '.repo', 'w') as f:
f.write('[' + repo + ']\nname=' + repo + '\nbaseurl=http://127.0.0.1/repo/' + repo + '\nenabled=1\ngpgcheck=0')
@when('I "{action}" a package "{pkgs}" with "{manager}"')
def
|
(context, action, pkgs, manager):
assert pkgs
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
if manager == 'rpm':
if action in ["install", "remove"]:
execute_rpm_command(split(pkgs), action)
else:
raise AssertionError('The action {} is not allowed parameter with rpm manager'.format(action))
elif manager == 'dnf':
if action == 'upgrade':
if pkgs == 'all':
execute_dnf_command([action], context.repo)
else:
execute_dnf_command([action] + split(pkgs), context.repo)
elif action == 'autoremove':
subprocess.check_call(['dnf', '-y', action], stdout=subprocess.PIPE)
elif action in ["install", "remove", "downgrade", "upgrade-to"]:
execute_dnf_command([action] + split(pkgs), context.repo)
else:
raise AssertionError('The action {} is not allowed parameter with dnf manager'.format(action))
else:
raise AssertionError('The manager {} is not allowed parameter'.format(manager))
@when('I execute command "{command}" with "{result}"')
def when_action_command(context, command, result):
assert command
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
cmd_output = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
context.cmd_rc = cmd_output.returncode
if result == "success":
assert context.cmd_rc == 0
elif result == "fail":
assert context.cmd_rc != 0
else:
raise AssertionError('The option {} is not allowed option for expected result of command. '
'Allowed options are "success" and "fail"'.format(result))
@then('package "{pkgs}" should be "{state}"')
def then_package_state(context, pkgs, state):
assert pkgs
pkgs_rpm = get_rpm_package_list()
pkgs_rpm_ver = get_rpm_package_version_list()
pkgs_dnf_ver = get_dnf_package_version_list()
assert pkgs_rpm
assert context.pre_rpm_packages
removed, installed = diff_package_lists(context.pre_rpm_packages, pkgs_rpm)
assert removed is not None and installed is not None
for n in split(pkgs):
if state == 'installed':
assert ('+' + n) in installed
installed.remove('+' + n)
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'removed':
assert ('-' + n) in removed
removed.remove('-' + n)
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'absent':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'upgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver > pre_rpm_ver
elif state == 'unupgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver == pre_rpm_ver
elif state == 'downgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver < pre_rpm_ver
elif state == 'present':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'upgraded-to':
assert n in package_version_lists(n, pkgs_rpm_ver)
else:
raise AssertionError('The state {} is not allowed option for Then statement'.format(state))
""" This checks that installations/removals are always fully specified,
so that we always cover the requirements/expecations entirely """
if state in ["installed", "removed"]:
assert not installed and not removed
@then('exit code of command should be equal to "{exit_code}"')
def then_package_state(context, exit_code):
exit_code = int(exit_code)
assert context.cmd_rc == exit_code
|
when_action_package
|
identifier_name
|
test_behave.py
|
#!/usr/bin/python -tt
from behave import *
import os
import subprocess
import glob
import re
import shutil
DNF_FLAGS = ['-y', '--disablerepo=*', '--nogpgcheck']
RPM_INSTALL_FLAGS = ['-Uvh']
RPM_ERASE_FLAGS = ['-e']
def _left_decorator(item):
""" Removed packages """
return u'-' + item
def _right_decorator(item):
""" Installed packages """
return u'+' + item
def find_pkg(pkg):
""" Find the package file in the repository """
candidates = glob.glob('/repo/' + pkg + '*.rpm')
if len(candidates) == 0:
print("No candidates for: '{0}'".format(pkg))
assert len(candidates) == 1
return candidates[0]
def decorate_rpm_packages(pkgs):
""" Converts package names like TestA, TestB into absolute paths """
return [find_pkg(p) for p in pkgs]
def get_rpm_package_list():
""" Gets all installed packages in the system """
pkgstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}\n'])
return pkgstr.splitlines()
def get_rpm_package_version_list():
""" Gets all installed packages in the system with version"""
pkgverstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}-%{VERSION}-%{RELEASE}\n'])
return pkgverstr.splitlines()
def get_dnf_package_version_list():
""" Gets all installed packages in the system with version to check that dnf has same data about installed packages"""
pkgverstr = subprocess.check_output(['dnf', 'repoquery', '--installed', '-Cq', '--queryformat', '%{name}.%{version}.%{release}\n'])
pkgverstr = pkgverstr.splitlines()
return pkgverstr
def diff_package_lists(a, b):
""" Computes both left/right diff between lists `a` and `b` """
sa, sb = set(a), set(b)
return (map(_left_decorator, list(sa - sb)),
map(_right_decorator, list(sb - sa)))
def package_version_lists(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if x.startswith(pkg)]
assert len(found_pkgs) == 1
return str(found_pkgs[0])
def package_absence(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if re.search('^' + pkg, x)]
assert len(found_pkgs) == 0
return None
def execute_dnf_command(cmd, reponame):
""" Execute DNF command with default flags and the specified `reponame` enabled """
flags = DNF_FLAGS + ['--enablerepo={0}'.format(reponame)]
return subprocess.check_call(['dnf'] + flags + cmd, stdout=subprocess.PIPE)
def execute_rpm_command(pkg, action):
""" Execute given action over specified pkg(s) """
if not isinstance(pkg, list):
pkg = [pkg]
if action == "remove":
rpm_command = RPM_ERASE_FLAGS
elif action == "install":
rpm_command = RPM_INSTALL_FLAGS
pkg = decorate_rpm_packages(pkg)
return subprocess.check_call(['rpm'] + rpm_command + pkg, stdout=subprocess.PIPE)
def piecewise_compare(a, b):
""" Check if the two sequences are identical regardless of ordering """
return sorted(a) == sorted(b)
def split(pkgs):
return [p.strip() for p in pkgs.split(',')]
@given('I use the repository "{repo}"')
def given_repo_condition(context, repo):
""" :type context: behave.runner.Context """
assert repo
context.repo = repo
assert os.path.exists('/var/www/html/repo/' + repo)
for root, dirs, files in os.walk('/repo'):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
subprocess.check_call(['cp -rs /var/www/html/repo/' + repo + '/* /repo/'], shell=True)
with open('/etc/yum.repos.d/' + repo + '.repo', 'w') as f:
f.write('[' + repo + ']\nname=' + repo + '\nbaseurl=http://127.0.0.1/repo/' + repo + '\nenabled=1\ngpgcheck=0')
@when('I "{action}" a package "{pkgs}" with "{manager}"')
def when_action_package(context, action, pkgs, manager):
assert pkgs
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
if manager == 'rpm':
if action in ["install", "remove"]:
execute_rpm_command(split(pkgs), action)
else:
raise AssertionError('The action {} is not allowed parameter with rpm manager'.format(action))
elif manager == 'dnf':
if action == 'upgrade':
if pkgs == 'all':
execute_dnf_command([action], context.repo)
else:
execute_dnf_command([action] + split(pkgs), context.repo)
elif action == 'autoremove':
subprocess.check_call(['dnf', '-y', action], stdout=subprocess.PIPE)
elif action in ["install", "remove", "downgrade", "upgrade-to"]:
execute_dnf_command([action] + split(pkgs), context.repo)
else:
raise AssertionError('The action {} is not allowed parameter with dnf manager'.format(action))
else:
raise AssertionError('The manager {} is not allowed parameter'.format(manager))
@when('I execute command "{command}" with "{result}"')
def when_action_command(context, command, result):
assert command
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
|
assert context.pre_dnf_packages_version
cmd_output = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
context.cmd_rc = cmd_output.returncode
if result == "success":
assert context.cmd_rc == 0
elif result == "fail":
assert context.cmd_rc != 0
else:
raise AssertionError('The option {} is not allowed option for expected result of command. '
'Allowed options are "success" and "fail"'.format(result))
@then('package "{pkgs}" should be "{state}"')
def then_package_state(context, pkgs, state):
assert pkgs
pkgs_rpm = get_rpm_package_list()
pkgs_rpm_ver = get_rpm_package_version_list()
pkgs_dnf_ver = get_dnf_package_version_list()
assert pkgs_rpm
assert context.pre_rpm_packages
removed, installed = diff_package_lists(context.pre_rpm_packages, pkgs_rpm)
assert removed is not None and installed is not None
for n in split(pkgs):
if state == 'installed':
assert ('+' + n) in installed
installed.remove('+' + n)
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'removed':
assert ('-' + n) in removed
removed.remove('-' + n)
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'absent':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'upgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver > pre_rpm_ver
elif state == 'unupgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver == pre_rpm_ver
elif state == 'downgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver < pre_rpm_ver
elif state == 'present':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'upgraded-to':
assert n in package_version_lists(n, pkgs_rpm_ver)
else:
raise AssertionError('The state {} is not allowed option for Then statement'.format(state))
""" This checks that installations/removals are always fully specified,
so that we always cover the requirements/expecations entirely """
if state in ["installed", "removed"]:
assert not installed and not removed
@then('exit code of command should be equal to "{exit_code}"')
def then_package_state(context, exit_code):
exit_code = int(exit_code)
assert context.cmd_rc == exit_code
|
random_line_split
|
|
test_behave.py
|
#!/usr/bin/python -tt
from behave import *
import os
import subprocess
import glob
import re
import shutil
DNF_FLAGS = ['-y', '--disablerepo=*', '--nogpgcheck']
RPM_INSTALL_FLAGS = ['-Uvh']
RPM_ERASE_FLAGS = ['-e']
def _left_decorator(item):
""" Removed packages """
return u'-' + item
def _right_decorator(item):
""" Installed packages """
return u'+' + item
def find_pkg(pkg):
""" Find the package file in the repository """
candidates = glob.glob('/repo/' + pkg + '*.rpm')
if len(candidates) == 0:
print("No candidates for: '{0}'".format(pkg))
assert len(candidates) == 1
return candidates[0]
def decorate_rpm_packages(pkgs):
""" Converts package names like TestA, TestB into absolute paths """
return [find_pkg(p) for p in pkgs]
def get_rpm_package_list():
""" Gets all installed packages in the system """
pkgstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}\n'])
return pkgstr.splitlines()
def get_rpm_package_version_list():
""" Gets all installed packages in the system with version"""
pkgverstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}-%{VERSION}-%{RELEASE}\n'])
return pkgverstr.splitlines()
def get_dnf_package_version_list():
""" Gets all installed packages in the system with version to check that dnf has same data about installed packages"""
pkgverstr = subprocess.check_output(['dnf', 'repoquery', '--installed', '-Cq', '--queryformat', '%{name}.%{version}.%{release}\n'])
pkgverstr = pkgverstr.splitlines()
return pkgverstr
def diff_package_lists(a, b):
""" Computes both left/right diff between lists `a` and `b` """
sa, sb = set(a), set(b)
return (map(_left_decorator, list(sa - sb)),
map(_right_decorator, list(sb - sa)))
def package_version_lists(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if x.startswith(pkg)]
assert len(found_pkgs) == 1
return str(found_pkgs[0])
def package_absence(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if re.search('^' + pkg, x)]
assert len(found_pkgs) == 0
return None
def execute_dnf_command(cmd, reponame):
""" Execute DNF command with default flags and the specified `reponame` enabled """
flags = DNF_FLAGS + ['--enablerepo={0}'.format(reponame)]
return subprocess.check_call(['dnf'] + flags + cmd, stdout=subprocess.PIPE)
def execute_rpm_command(pkg, action):
""" Execute given action over specified pkg(s) """
if not isinstance(pkg, list):
pkg = [pkg]
if action == "remove":
rpm_command = RPM_ERASE_FLAGS
elif action == "install":
rpm_command = RPM_INSTALL_FLAGS
pkg = decorate_rpm_packages(pkg)
return subprocess.check_call(['rpm'] + rpm_command + pkg, stdout=subprocess.PIPE)
def piecewise_compare(a, b):
""" Check if the two sequences are identical regardless of ordering """
return sorted(a) == sorted(b)
def split(pkgs):
return [p.strip() for p in pkgs.split(',')]
@given('I use the repository "{repo}"')
def given_repo_condition(context, repo):
""" :type context: behave.runner.Context """
assert repo
context.repo = repo
assert os.path.exists('/var/www/html/repo/' + repo)
for root, dirs, files in os.walk('/repo'):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
subprocess.check_call(['cp -rs /var/www/html/repo/' + repo + '/* /repo/'], shell=True)
with open('/etc/yum.repos.d/' + repo + '.repo', 'w') as f:
f.write('[' + repo + ']\nname=' + repo + '\nbaseurl=http://127.0.0.1/repo/' + repo + '\nenabled=1\ngpgcheck=0')
@when('I "{action}" a package "{pkgs}" with "{manager}"')
def when_action_package(context, action, pkgs, manager):
assert pkgs
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
if manager == 'rpm':
if action in ["install", "remove"]:
execute_rpm_command(split(pkgs), action)
else:
raise AssertionError('The action {} is not allowed parameter with rpm manager'.format(action))
elif manager == 'dnf':
if action == 'upgrade':
if pkgs == 'all':
execute_dnf_command([action], context.repo)
else:
execute_dnf_command([action] + split(pkgs), context.repo)
elif action == 'autoremove':
subprocess.check_call(['dnf', '-y', action], stdout=subprocess.PIPE)
elif action in ["install", "remove", "downgrade", "upgrade-to"]:
execute_dnf_command([action] + split(pkgs), context.repo)
else:
raise AssertionError('The action {} is not allowed parameter with dnf manager'.format(action))
else:
raise AssertionError('The manager {} is not allowed parameter'.format(manager))
@when('I execute command "{command}" with "{result}"')
def when_action_command(context, command, result):
assert command
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
cmd_output = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
context.cmd_rc = cmd_output.returncode
if result == "success":
assert context.cmd_rc == 0
elif result == "fail":
assert context.cmd_rc != 0
else:
raise AssertionError('The option {} is not allowed option for expected result of command. '
'Allowed options are "success" and "fail"'.format(result))
@then('package "{pkgs}" should be "{state}"')
def then_package_state(context, pkgs, state):
assert pkgs
pkgs_rpm = get_rpm_package_list()
pkgs_rpm_ver = get_rpm_package_version_list()
pkgs_dnf_ver = get_dnf_package_version_list()
assert pkgs_rpm
assert context.pre_rpm_packages
removed, installed = diff_package_lists(context.pre_rpm_packages, pkgs_rpm)
assert removed is not None and installed is not None
for n in split(pkgs):
if state == 'installed':
assert ('+' + n) in installed
installed.remove('+' + n)
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'removed':
assert ('-' + n) in removed
removed.remove('-' + n)
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'absent':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'upgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver > pre_rpm_ver
elif state == 'unupgraded':
|
elif state == 'downgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver < pre_rpm_ver
elif state == 'present':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'upgraded-to':
assert n in package_version_lists(n, pkgs_rpm_ver)
else:
raise AssertionError('The state {} is not allowed option for Then statement'.format(state))
""" This checks that installations/removals are always fully specified,
so that we always cover the requirements/expecations entirely """
if state in ["installed", "removed"]:
assert not installed and not removed
@then('exit code of command should be equal to "{exit_code}"')
def then_package_state(context, exit_code):
exit_code = int(exit_code)
assert context.cmd_rc == exit_code
|
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver == pre_rpm_ver
|
conditional_block
|
preferences.py
|
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import anki.lang
import aqt
from aqt import AnkiQt
from aqt.profiles import RecordingDriver, VideoDriver
from aqt.qt import *
from aqt.utils import (
TR,
HelpPage,
disable_help_button,
openHelp,
showInfo,
showWarning,
tr,
)
def video_driver_name_for_platform(driver: VideoDriver) -> str:
if driver == VideoDriver.ANGLE:
return tr(TR.PREFERENCES_VIDEO_DRIVER_ANGLE)
elif driver == VideoDriver.Software:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_OTHER)
else:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_OTHER)
class Preferences(QDialog):
def __init__(self, mw: AnkiQt) -> None:
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.prof = self.mw.pm.profile
self.form = aqt.forms.preferences.Ui_Preferences()
self.form.setupUi(self)
disable_help_button(self)
self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False)
self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
qconnect(
self.form.buttonBox.helpRequested, lambda: openHelp(HelpPage.PREFERENCES)
)
self.silentlyClose = True
self.prefs = self.mw.col.get_preferences()
self.setupLang()
self.setupCollection()
self.setupNetwork()
self.setupBackup()
self.setupOptions()
self.show()
def accept(self) -> None:
# avoid exception if main window is already closed
if not self.mw.col:
return
self.updateCollection()
self.updateNetwork()
self.updateBackup()
self.updateOptions()
self.mw.pm.save()
self.mw.reset()
self.done(0)
aqt.dialogs.markClosed("Preferences")
def reject(self) -> None:
self.accept()
# Language
######################################################################
def setupLang(self) -> None:
f = self.form
f.lang.addItems([x[0] for x in anki.lang.langs])
f.lang.setCurrentIndex(self.langIdx())
qconnect(f.lang.currentIndexChanged, self.onLangIdxChanged)
def langIdx(self) -> int:
codes = [x[1] for x in anki.lang.langs]
lang = anki.lang.currentLang
if lang in anki.lang.compatMap:
lang = anki.lang.compatMap[lang]
else:
lang = lang.replace("-", "_")
try:
return codes.index(lang)
except:
return codes.index("en_US")
def onLangIdxChanged(self, idx: int) -> None:
code = anki.lang.langs[idx][1]
self.mw.pm.setLang(code)
showInfo(
tr(TR.PREFERENCES_PLEASE_RESTART_ANKI_TO_COMPLETE_LANGUAGE), parent=self
)
# Collection options
######################################################################
def setupCollection(self) -> None:
import anki.consts as c
f = self.form
qc = self.mw.col.conf
self.setup_video_driver()
f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values()))
f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True)))
s = self.prefs.sched
f.lrnCutoff.setValue(int(s.learn_ahead_secs / 60.0))
f.timeLimit.setValue(int(s.time_limit_secs / 60.0))
f.showEstimates.setChecked(s.show_intervals_on_buttons)
f.showProgress.setChecked(s.show_remaining_due_counts)
f.newSpread.setCurrentIndex(s.new_review_mix)
f.dayLearnFirst.setChecked(s.day_learn_first)
f.dayOffset.setValue(s.rollover)
if s.scheduler_version < 2:
f.dayLearnFirst.setVisible(False)
f.legacy_timezone.setVisible(False)
else:
f.legacy_timezone.setChecked(not s.new_timezone)
def setup_video_driver(self) -> None:
self.video_drivers = VideoDriver.all_for_platform()
names = [
tr(TR.PREFERENCES_VIDEO_DRIVER, driver=video_driver_name_for_platform(d))
for d in self.video_drivers
]
self.form.video_driver.addItems(names)
self.form.video_driver.setCurrentIndex(
self.video_drivers.index(self.mw.pm.video_driver())
)
def update_video_driver(self) -> None:
|
def updateCollection(self) -> None:
f = self.form
d = self.mw.col
self.update_video_driver()
qc = d.conf
qc["addToCur"] = not f.useCurrent.currentIndex()
s = self.prefs.sched
s.show_remaining_due_counts = f.showProgress.isChecked()
s.show_intervals_on_buttons = f.showEstimates.isChecked()
s.new_review_mix = f.newSpread.currentIndex()
s.time_limit_secs = f.timeLimit.value() * 60
s.learn_ahead_secs = f.lrnCutoff.value() * 60
s.day_learn_first = f.dayLearnFirst.isChecked()
s.rollover = f.dayOffset.value()
s.new_timezone = not f.legacy_timezone.isChecked()
self.mw.col.set_preferences(self.prefs)
d.setMod()
# Network
######################################################################
def setupNetwork(self) -> None:
self.form.media_log.setText(tr(TR.SYNC_MEDIA_LOG_BUTTON))
qconnect(self.form.media_log.clicked, self.on_media_log)
self.form.syncOnProgramOpen.setChecked(self.prof["autoSync"])
self.form.syncMedia.setChecked(self.prof["syncMedia"])
self.form.autoSyncMedia.setChecked(self.mw.pm.auto_sync_media_minutes() != 0)
if not self.prof["syncKey"]:
self._hideAuth()
else:
self.form.syncUser.setText(self.prof.get("syncUser", ""))
qconnect(self.form.syncDeauth.clicked, self.onSyncDeauth)
self.form.syncDeauth.setText(tr(TR.SYNC_LOG_OUT_BUTTON))
def on_media_log(self) -> None:
self.mw.media_syncer.show_sync_log()
def _hideAuth(self) -> None:
self.form.syncDeauth.setVisible(False)
self.form.syncUser.setText("")
self.form.syncLabel.setText(
tr(TR.PREFERENCES_SYNCHRONIZATIONNOT_CURRENTLY_ENABLED_CLICK_THE_SYNC)
)
def onSyncDeauth(self) -> None:
if self.mw.media_syncer.is_syncing():
showWarning("Can't log out while sync in progress.")
return
self.prof["syncKey"] = None
self.mw.col.media.force_resync()
self._hideAuth()
def updateNetwork(self) -> None:
self.prof["autoSync"] = self.form.syncOnProgramOpen.isChecked()
self.prof["syncMedia"] = self.form.syncMedia.isChecked()
self.mw.pm.set_auto_sync_media_minutes(
self.form.autoSyncMedia.isChecked() and 15 or 0
)
if self.form.fullSync.isChecked():
self.mw.col.modSchema(check=False)
self.mw.col.setMod()
# Backup
######################################################################
def setupBackup(self) -> None:
self.form.numBackups.setValue(self.prof["numBackups"])
def updateBackup(self) -> None:
self.prof["numBackups"] = self.form.numBackups.value()
# Basic & Advanced Options
######################################################################
def setupOptions(self) -> None:
self.form.pastePNG.setChecked(self.prof.get("pastePNG", False))
self.form.uiScale.setValue(int(self.mw.pm.uiScale() * 100))
self.form.pasteInvert.setChecked(self.prof.get("pasteInvert", False))
self.form.showPlayButtons.setChecked(self.prof.get("showPlayButtons", True))
self.form.nightMode.setChecked(self.mw.pm.night_mode())
self.form.interrupt_audio.setChecked(self.mw.pm.interrupt_audio())
self._recording_drivers = [
RecordingDriver.QtAudioInput,
RecordingDriver.PyAudio,
]
# The plan is to phase out PyAudio soon, so will hold off on
# making this string translatable for now.
self.form.recording_driver.addItems(
[
f"Voice recording driver: {driver.value}"
for driver in self._recording_drivers
]
)
self.form.recording_driver.setCurrentIndex(
self._recording_drivers.index(self.mw.pm.recording_driver())
)
def updateOptions(self) -> None:
restart_required = False
self.prof["pastePNG"] = self.form.pastePNG.isChecked()
self.prof["pasteInvert"] = self.form.pasteInvert.isChecked()
newScale = self.form.uiScale.value() / 100
if newScale != self.mw.pm.uiScale():
self.mw.pm.setUiScale(newScale)
restart_required = True
self.prof["showPlayButtons"] = self.form.showPlayButtons.isChecked()
if self.mw.pm.night_mode() != self.form.nightMode.isChecked():
self.mw.pm.set_night_mode(not self.mw.pm.night_mode())
restart_required = True
self.mw.pm.set_interrupt_audio(self.form.interrupt_audio.isChecked())
new_audio_driver = self._recording_drivers[
self.form.recording_driver.currentIndex()
]
if self.mw.pm.recording_driver() != new_audio_driver:
self.mw.pm.set_recording_driver(new_audio_driver)
if new_audio_driver == RecordingDriver.PyAudio:
showInfo(
"""\
The PyAudio driver will likely be removed in a future update. If you find it works better \
for you than the default driver, please let us know on the Anki forums."""
)
if restart_required:
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
|
new_driver = self.video_drivers[self.form.video_driver.currentIndex()]
if new_driver != self.mw.pm.video_driver():
self.mw.pm.set_video_driver(new_driver)
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
|
identifier_body
|
preferences.py
|
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import anki.lang
import aqt
from aqt import AnkiQt
from aqt.profiles import RecordingDriver, VideoDriver
from aqt.qt import *
from aqt.utils import (
TR,
HelpPage,
disable_help_button,
openHelp,
showInfo,
showWarning,
tr,
)
def video_driver_name_for_platform(driver: VideoDriver) -> str:
if driver == VideoDriver.ANGLE:
return tr(TR.PREFERENCES_VIDEO_DRIVER_ANGLE)
elif driver == VideoDriver.Software:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_OTHER)
else:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_OTHER)
class Preferences(QDialog):
def __init__(self, mw: AnkiQt) -> None:
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.prof = self.mw.pm.profile
self.form = aqt.forms.preferences.Ui_Preferences()
self.form.setupUi(self)
disable_help_button(self)
self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False)
self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
qconnect(
self.form.buttonBox.helpRequested, lambda: openHelp(HelpPage.PREFERENCES)
)
self.silentlyClose = True
self.prefs = self.mw.col.get_preferences()
self.setupLang()
self.setupCollection()
self.setupNetwork()
self.setupBackup()
self.setupOptions()
self.show()
def accept(self) -> None:
# avoid exception if main window is already closed
if not self.mw.col:
return
self.updateCollection()
self.updateNetwork()
self.updateBackup()
self.updateOptions()
self.mw.pm.save()
self.mw.reset()
self.done(0)
aqt.dialogs.markClosed("Preferences")
def reject(self) -> None:
self.accept()
# Language
######################################################################
def setupLang(self) -> None:
f = self.form
f.lang.addItems([x[0] for x in anki.lang.langs])
f.lang.setCurrentIndex(self.langIdx())
qconnect(f.lang.currentIndexChanged, self.onLangIdxChanged)
def langIdx(self) -> int:
codes = [x[1] for x in anki.lang.langs]
lang = anki.lang.currentLang
if lang in anki.lang.compatMap:
lang = anki.lang.compatMap[lang]
else:
lang = lang.replace("-", "_")
try:
return codes.index(lang)
except:
return codes.index("en_US")
def onLangIdxChanged(self, idx: int) -> None:
code = anki.lang.langs[idx][1]
self.mw.pm.setLang(code)
showInfo(
tr(TR.PREFERENCES_PLEASE_RESTART_ANKI_TO_COMPLETE_LANGUAGE), parent=self
)
# Collection options
######################################################################
def setupCollection(self) -> None:
import anki.consts as c
f = self.form
qc = self.mw.col.conf
self.setup_video_driver()
f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values()))
f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True)))
s = self.prefs.sched
f.lrnCutoff.setValue(int(s.learn_ahead_secs / 60.0))
f.timeLimit.setValue(int(s.time_limit_secs / 60.0))
f.showEstimates.setChecked(s.show_intervals_on_buttons)
f.showProgress.setChecked(s.show_remaining_due_counts)
f.newSpread.setCurrentIndex(s.new_review_mix)
f.dayLearnFirst.setChecked(s.day_learn_first)
f.dayOffset.setValue(s.rollover)
if s.scheduler_version < 2:
f.dayLearnFirst.setVisible(False)
f.legacy_timezone.setVisible(False)
else:
f.legacy_timezone.setChecked(not s.new_timezone)
def setup_video_driver(self) -> None:
self.video_drivers = VideoDriver.all_for_platform()
names = [
tr(TR.PREFERENCES_VIDEO_DRIVER, driver=video_driver_name_for_platform(d))
for d in self.video_drivers
]
self.form.video_driver.addItems(names)
self.form.video_driver.setCurrentIndex(
self.video_drivers.index(self.mw.pm.video_driver())
)
def update_video_driver(self) -> None:
new_driver = self.video_drivers[self.form.video_driver.currentIndex()]
if new_driver != self.mw.pm.video_driver():
self.mw.pm.set_video_driver(new_driver)
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
def updateCollection(self) -> None:
f = self.form
d = self.mw.col
self.update_video_driver()
qc = d.conf
qc["addToCur"] = not f.useCurrent.currentIndex()
s = self.prefs.sched
s.show_remaining_due_counts = f.showProgress.isChecked()
s.show_intervals_on_buttons = f.showEstimates.isChecked()
s.new_review_mix = f.newSpread.currentIndex()
s.time_limit_secs = f.timeLimit.value() * 60
s.learn_ahead_secs = f.lrnCutoff.value() * 60
s.day_learn_first = f.dayLearnFirst.isChecked()
s.rollover = f.dayOffset.value()
s.new_timezone = not f.legacy_timezone.isChecked()
self.mw.col.set_preferences(self.prefs)
d.setMod()
# Network
######################################################################
def setupNetwork(self) -> None:
self.form.media_log.setText(tr(TR.SYNC_MEDIA_LOG_BUTTON))
qconnect(self.form.media_log.clicked, self.on_media_log)
self.form.syncOnProgramOpen.setChecked(self.prof["autoSync"])
self.form.syncMedia.setChecked(self.prof["syncMedia"])
self.form.autoSyncMedia.setChecked(self.mw.pm.auto_sync_media_minutes() != 0)
if not self.prof["syncKey"]:
self._hideAuth()
else:
self.form.syncUser.setText(self.prof.get("syncUser", ""))
qconnect(self.form.syncDeauth.clicked, self.onSyncDeauth)
self.form.syncDeauth.setText(tr(TR.SYNC_LOG_OUT_BUTTON))
def on_media_log(self) -> None:
self.mw.media_syncer.show_sync_log()
def _hideAuth(self) -> None:
self.form.syncDeauth.setVisible(False)
self.form.syncUser.setText("")
self.form.syncLabel.setText(
tr(TR.PREFERENCES_SYNCHRONIZATIONNOT_CURRENTLY_ENABLED_CLICK_THE_SYNC)
)
def onSyncDeauth(self) -> None:
if self.mw.media_syncer.is_syncing():
showWarning("Can't log out while sync in progress.")
return
self.prof["syncKey"] = None
self.mw.col.media.force_resync()
self._hideAuth()
def updateNetwork(self) -> None:
self.prof["autoSync"] = self.form.syncOnProgramOpen.isChecked()
self.prof["syncMedia"] = self.form.syncMedia.isChecked()
self.mw.pm.set_auto_sync_media_minutes(
self.form.autoSyncMedia.isChecked() and 15 or 0
)
if self.form.fullSync.isChecked():
self.mw.col.modSchema(check=False)
self.mw.col.setMod()
# Backup
######################################################################
def setupBackup(self) -> None:
self.form.numBackups.setValue(self.prof["numBackups"])
def updateBackup(self) -> None:
self.prof["numBackups"] = self.form.numBackups.value()
# Basic & Advanced Options
######################################################################
def setupOptions(self) -> None:
self.form.pastePNG.setChecked(self.prof.get("pastePNG", False))
self.form.uiScale.setValue(int(self.mw.pm.uiScale() * 100))
self.form.pasteInvert.setChecked(self.prof.get("pasteInvert", False))
self.form.showPlayButtons.setChecked(self.prof.get("showPlayButtons", True))
self.form.nightMode.setChecked(self.mw.pm.night_mode())
self.form.interrupt_audio.setChecked(self.mw.pm.interrupt_audio())
self._recording_drivers = [
RecordingDriver.QtAudioInput,
RecordingDriver.PyAudio,
]
# The plan is to phase out PyAudio soon, so will hold off on
# making this string translatable for now.
self.form.recording_driver.addItems(
[
f"Voice recording driver: {driver.value}"
for driver in self._recording_drivers
]
)
self.form.recording_driver.setCurrentIndex(
self._recording_drivers.index(self.mw.pm.recording_driver())
)
def updateOptions(self) -> None:
restart_required = False
self.prof["pastePNG"] = self.form.pastePNG.isChecked()
self.prof["pasteInvert"] = self.form.pasteInvert.isChecked()
newScale = self.form.uiScale.value() / 100
if newScale != self.mw.pm.uiScale():
self.mw.pm.setUiScale(newScale)
restart_required = True
self.prof["showPlayButtons"] = self.form.showPlayButtons.isChecked()
if self.mw.pm.night_mode() != self.form.nightMode.isChecked():
self.mw.pm.set_night_mode(not self.mw.pm.night_mode())
restart_required = True
self.mw.pm.set_interrupt_audio(self.form.interrupt_audio.isChecked())
new_audio_driver = self._recording_drivers[
self.form.recording_driver.currentIndex()
]
if self.mw.pm.recording_driver() != new_audio_driver:
|
if restart_required:
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
|
self.mw.pm.set_recording_driver(new_audio_driver)
if new_audio_driver == RecordingDriver.PyAudio:
showInfo(
"""\
The PyAudio driver will likely be removed in a future update. If you find it works better \
for you than the default driver, please let us know on the Anki forums."""
)
|
conditional_block
|
preferences.py
|
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import anki.lang
import aqt
from aqt import AnkiQt
from aqt.profiles import RecordingDriver, VideoDriver
from aqt.qt import *
from aqt.utils import (
TR,
HelpPage,
disable_help_button,
openHelp,
showInfo,
showWarning,
tr,
)
def video_driver_name_for_platform(driver: VideoDriver) -> str:
if driver == VideoDriver.ANGLE:
return tr(TR.PREFERENCES_VIDEO_DRIVER_ANGLE)
elif driver == VideoDriver.Software:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_OTHER)
else:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_OTHER)
class Preferences(QDialog):
def __init__(self, mw: AnkiQt) -> None:
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.prof = self.mw.pm.profile
self.form = aqt.forms.preferences.Ui_Preferences()
self.form.setupUi(self)
disable_help_button(self)
self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False)
self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
qconnect(
self.form.buttonBox.helpRequested, lambda: openHelp(HelpPage.PREFERENCES)
)
self.silentlyClose = True
self.prefs = self.mw.col.get_preferences()
self.setupLang()
self.setupCollection()
self.setupNetwork()
self.setupBackup()
self.setupOptions()
self.show()
def accept(self) -> None:
# avoid exception if main window is already closed
if not self.mw.col:
return
self.updateCollection()
self.updateNetwork()
self.updateBackup()
self.updateOptions()
self.mw.pm.save()
self.mw.reset()
self.done(0)
aqt.dialogs.markClosed("Preferences")
def reject(self) -> None:
self.accept()
# Language
######################################################################
def setupLang(self) -> None:
f = self.form
f.lang.addItems([x[0] for x in anki.lang.langs])
f.lang.setCurrentIndex(self.langIdx())
qconnect(f.lang.currentIndexChanged, self.onLangIdxChanged)
def langIdx(self) -> int:
codes = [x[1] for x in anki.lang.langs]
lang = anki.lang.currentLang
if lang in anki.lang.compatMap:
lang = anki.lang.compatMap[lang]
else:
lang = lang.replace("-", "_")
try:
return codes.index(lang)
except:
return codes.index("en_US")
def onLangIdxChanged(self, idx: int) -> None:
code = anki.lang.langs[idx][1]
self.mw.pm.setLang(code)
showInfo(
tr(TR.PREFERENCES_PLEASE_RESTART_ANKI_TO_COMPLETE_LANGUAGE), parent=self
)
# Collection options
######################################################################
def setupCollection(self) -> None:
import anki.consts as c
f = self.form
qc = self.mw.col.conf
self.setup_video_driver()
f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values()))
f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True)))
s = self.prefs.sched
f.lrnCutoff.setValue(int(s.learn_ahead_secs / 60.0))
f.timeLimit.setValue(int(s.time_limit_secs / 60.0))
f.showEstimates.setChecked(s.show_intervals_on_buttons)
f.showProgress.setChecked(s.show_remaining_due_counts)
f.newSpread.setCurrentIndex(s.new_review_mix)
f.dayLearnFirst.setChecked(s.day_learn_first)
f.dayOffset.setValue(s.rollover)
if s.scheduler_version < 2:
f.dayLearnFirst.setVisible(False)
f.legacy_timezone.setVisible(False)
else:
f.legacy_timezone.setChecked(not s.new_timezone)
def setup_video_driver(self) -> None:
self.video_drivers = VideoDriver.all_for_platform()
names = [
tr(TR.PREFERENCES_VIDEO_DRIVER, driver=video_driver_name_for_platform(d))
for d in self.video_drivers
]
self.form.video_driver.addItems(names)
self.form.video_driver.setCurrentIndex(
self.video_drivers.index(self.mw.pm.video_driver())
)
def update_video_driver(self) -> None:
new_driver = self.video_drivers[self.form.video_driver.currentIndex()]
if new_driver != self.mw.pm.video_driver():
self.mw.pm.set_video_driver(new_driver)
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
def updateCollection(self) -> None:
f = self.form
d = self.mw.col
self.update_video_driver()
qc = d.conf
qc["addToCur"] = not f.useCurrent.currentIndex()
s = self.prefs.sched
s.show_remaining_due_counts = f.showProgress.isChecked()
s.show_intervals_on_buttons = f.showEstimates.isChecked()
s.new_review_mix = f.newSpread.currentIndex()
s.time_limit_secs = f.timeLimit.value() * 60
s.learn_ahead_secs = f.lrnCutoff.value() * 60
s.day_learn_first = f.dayLearnFirst.isChecked()
s.rollover = f.dayOffset.value()
s.new_timezone = not f.legacy_timezone.isChecked()
self.mw.col.set_preferences(self.prefs)
d.setMod()
# Network
######################################################################
def setupNetwork(self) -> None:
self.form.media_log.setText(tr(TR.SYNC_MEDIA_LOG_BUTTON))
qconnect(self.form.media_log.clicked, self.on_media_log)
self.form.syncOnProgramOpen.setChecked(self.prof["autoSync"])
self.form.syncMedia.setChecked(self.prof["syncMedia"])
self.form.autoSyncMedia.setChecked(self.mw.pm.auto_sync_media_minutes() != 0)
if not self.prof["syncKey"]:
self._hideAuth()
else:
self.form.syncUser.setText(self.prof.get("syncUser", ""))
qconnect(self.form.syncDeauth.clicked, self.onSyncDeauth)
self.form.syncDeauth.setText(tr(TR.SYNC_LOG_OUT_BUTTON))
def on_media_log(self) -> None:
self.mw.media_syncer.show_sync_log()
def _hideAuth(self) -> None:
self.form.syncDeauth.setVisible(False)
self.form.syncUser.setText("")
self.form.syncLabel.setText(
tr(TR.PREFERENCES_SYNCHRONIZATIONNOT_CURRENTLY_ENABLED_CLICK_THE_SYNC)
)
def onSyncDeauth(self) -> None:
if self.mw.media_syncer.is_syncing():
showWarning("Can't log out while sync in progress.")
return
self.prof["syncKey"] = None
self.mw.col.media.force_resync()
self._hideAuth()
def updateNetwork(self) -> None:
self.prof["autoSync"] = self.form.syncOnProgramOpen.isChecked()
self.prof["syncMedia"] = self.form.syncMedia.isChecked()
self.mw.pm.set_auto_sync_media_minutes(
self.form.autoSyncMedia.isChecked() and 15 or 0
)
if self.form.fullSync.isChecked():
self.mw.col.modSchema(check=False)
self.mw.col.setMod()
# Backup
######################################################################
def setupBackup(self) -> None:
self.form.numBackups.setValue(self.prof["numBackups"])
def
|
(self) -> None:
self.prof["numBackups"] = self.form.numBackups.value()
# Basic & Advanced Options
######################################################################
def setupOptions(self) -> None:
self.form.pastePNG.setChecked(self.prof.get("pastePNG", False))
self.form.uiScale.setValue(int(self.mw.pm.uiScale() * 100))
self.form.pasteInvert.setChecked(self.prof.get("pasteInvert", False))
self.form.showPlayButtons.setChecked(self.prof.get("showPlayButtons", True))
self.form.nightMode.setChecked(self.mw.pm.night_mode())
self.form.interrupt_audio.setChecked(self.mw.pm.interrupt_audio())
self._recording_drivers = [
RecordingDriver.QtAudioInput,
RecordingDriver.PyAudio,
]
# The plan is to phase out PyAudio soon, so will hold off on
# making this string translatable for now.
self.form.recording_driver.addItems(
[
f"Voice recording driver: {driver.value}"
for driver in self._recording_drivers
]
)
self.form.recording_driver.setCurrentIndex(
self._recording_drivers.index(self.mw.pm.recording_driver())
)
def updateOptions(self) -> None:
restart_required = False
self.prof["pastePNG"] = self.form.pastePNG.isChecked()
self.prof["pasteInvert"] = self.form.pasteInvert.isChecked()
newScale = self.form.uiScale.value() / 100
if newScale != self.mw.pm.uiScale():
self.mw.pm.setUiScale(newScale)
restart_required = True
self.prof["showPlayButtons"] = self.form.showPlayButtons.isChecked()
if self.mw.pm.night_mode() != self.form.nightMode.isChecked():
self.mw.pm.set_night_mode(not self.mw.pm.night_mode())
restart_required = True
self.mw.pm.set_interrupt_audio(self.form.interrupt_audio.isChecked())
new_audio_driver = self._recording_drivers[
self.form.recording_driver.currentIndex()
]
if self.mw.pm.recording_driver() != new_audio_driver:
self.mw.pm.set_recording_driver(new_audio_driver)
if new_audio_driver == RecordingDriver.PyAudio:
showInfo(
"""\
The PyAudio driver will likely be removed in a future update. If you find it works better \
for you than the default driver, please let us know on the Anki forums."""
)
if restart_required:
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
|
updateBackup
|
identifier_name
|
preferences.py
|
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import anki.lang
import aqt
from aqt import AnkiQt
from aqt.profiles import RecordingDriver, VideoDriver
from aqt.qt import *
from aqt.utils import (
TR,
HelpPage,
disable_help_button,
openHelp,
showInfo,
showWarning,
tr,
)
def video_driver_name_for_platform(driver: VideoDriver) -> str:
if driver == VideoDriver.ANGLE:
return tr(TR.PREFERENCES_VIDEO_DRIVER_ANGLE)
elif driver == VideoDriver.Software:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_OTHER)
else:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_OTHER)
|
def __init__(self, mw: AnkiQt) -> None:
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.prof = self.mw.pm.profile
self.form = aqt.forms.preferences.Ui_Preferences()
self.form.setupUi(self)
disable_help_button(self)
self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False)
self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
qconnect(
self.form.buttonBox.helpRequested, lambda: openHelp(HelpPage.PREFERENCES)
)
self.silentlyClose = True
self.prefs = self.mw.col.get_preferences()
self.setupLang()
self.setupCollection()
self.setupNetwork()
self.setupBackup()
self.setupOptions()
self.show()
def accept(self) -> None:
# avoid exception if main window is already closed
if not self.mw.col:
return
self.updateCollection()
self.updateNetwork()
self.updateBackup()
self.updateOptions()
self.mw.pm.save()
self.mw.reset()
self.done(0)
aqt.dialogs.markClosed("Preferences")
def reject(self) -> None:
self.accept()
# Language
######################################################################
def setupLang(self) -> None:
f = self.form
f.lang.addItems([x[0] for x in anki.lang.langs])
f.lang.setCurrentIndex(self.langIdx())
qconnect(f.lang.currentIndexChanged, self.onLangIdxChanged)
def langIdx(self) -> int:
codes = [x[1] for x in anki.lang.langs]
lang = anki.lang.currentLang
if lang in anki.lang.compatMap:
lang = anki.lang.compatMap[lang]
else:
lang = lang.replace("-", "_")
try:
return codes.index(lang)
except:
return codes.index("en_US")
def onLangIdxChanged(self, idx: int) -> None:
code = anki.lang.langs[idx][1]
self.mw.pm.setLang(code)
showInfo(
tr(TR.PREFERENCES_PLEASE_RESTART_ANKI_TO_COMPLETE_LANGUAGE), parent=self
)
# Collection options
######################################################################
def setupCollection(self) -> None:
import anki.consts as c
f = self.form
qc = self.mw.col.conf
self.setup_video_driver()
f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values()))
f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True)))
s = self.prefs.sched
f.lrnCutoff.setValue(int(s.learn_ahead_secs / 60.0))
f.timeLimit.setValue(int(s.time_limit_secs / 60.0))
f.showEstimates.setChecked(s.show_intervals_on_buttons)
f.showProgress.setChecked(s.show_remaining_due_counts)
f.newSpread.setCurrentIndex(s.new_review_mix)
f.dayLearnFirst.setChecked(s.day_learn_first)
f.dayOffset.setValue(s.rollover)
if s.scheduler_version < 2:
f.dayLearnFirst.setVisible(False)
f.legacy_timezone.setVisible(False)
else:
f.legacy_timezone.setChecked(not s.new_timezone)
def setup_video_driver(self) -> None:
self.video_drivers = VideoDriver.all_for_platform()
names = [
tr(TR.PREFERENCES_VIDEO_DRIVER, driver=video_driver_name_for_platform(d))
for d in self.video_drivers
]
self.form.video_driver.addItems(names)
self.form.video_driver.setCurrentIndex(
self.video_drivers.index(self.mw.pm.video_driver())
)
def update_video_driver(self) -> None:
new_driver = self.video_drivers[self.form.video_driver.currentIndex()]
if new_driver != self.mw.pm.video_driver():
self.mw.pm.set_video_driver(new_driver)
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
def updateCollection(self) -> None:
f = self.form
d = self.mw.col
self.update_video_driver()
qc = d.conf
qc["addToCur"] = not f.useCurrent.currentIndex()
s = self.prefs.sched
s.show_remaining_due_counts = f.showProgress.isChecked()
s.show_intervals_on_buttons = f.showEstimates.isChecked()
s.new_review_mix = f.newSpread.currentIndex()
s.time_limit_secs = f.timeLimit.value() * 60
s.learn_ahead_secs = f.lrnCutoff.value() * 60
s.day_learn_first = f.dayLearnFirst.isChecked()
s.rollover = f.dayOffset.value()
s.new_timezone = not f.legacy_timezone.isChecked()
self.mw.col.set_preferences(self.prefs)
d.setMod()
# Network
######################################################################
def setupNetwork(self) -> None:
self.form.media_log.setText(tr(TR.SYNC_MEDIA_LOG_BUTTON))
qconnect(self.form.media_log.clicked, self.on_media_log)
self.form.syncOnProgramOpen.setChecked(self.prof["autoSync"])
self.form.syncMedia.setChecked(self.prof["syncMedia"])
self.form.autoSyncMedia.setChecked(self.mw.pm.auto_sync_media_minutes() != 0)
if not self.prof["syncKey"]:
self._hideAuth()
else:
self.form.syncUser.setText(self.prof.get("syncUser", ""))
qconnect(self.form.syncDeauth.clicked, self.onSyncDeauth)
self.form.syncDeauth.setText(tr(TR.SYNC_LOG_OUT_BUTTON))
def on_media_log(self) -> None:
self.mw.media_syncer.show_sync_log()
def _hideAuth(self) -> None:
self.form.syncDeauth.setVisible(False)
self.form.syncUser.setText("")
self.form.syncLabel.setText(
tr(TR.PREFERENCES_SYNCHRONIZATIONNOT_CURRENTLY_ENABLED_CLICK_THE_SYNC)
)
def onSyncDeauth(self) -> None:
if self.mw.media_syncer.is_syncing():
showWarning("Can't log out while sync in progress.")
return
self.prof["syncKey"] = None
self.mw.col.media.force_resync()
self._hideAuth()
def updateNetwork(self) -> None:
self.prof["autoSync"] = self.form.syncOnProgramOpen.isChecked()
self.prof["syncMedia"] = self.form.syncMedia.isChecked()
self.mw.pm.set_auto_sync_media_minutes(
self.form.autoSyncMedia.isChecked() and 15 or 0
)
if self.form.fullSync.isChecked():
self.mw.col.modSchema(check=False)
self.mw.col.setMod()
# Backup
######################################################################
def setupBackup(self) -> None:
self.form.numBackups.setValue(self.prof["numBackups"])
def updateBackup(self) -> None:
self.prof["numBackups"] = self.form.numBackups.value()
# Basic & Advanced Options
######################################################################
def setupOptions(self) -> None:
self.form.pastePNG.setChecked(self.prof.get("pastePNG", False))
self.form.uiScale.setValue(int(self.mw.pm.uiScale() * 100))
self.form.pasteInvert.setChecked(self.prof.get("pasteInvert", False))
self.form.showPlayButtons.setChecked(self.prof.get("showPlayButtons", True))
self.form.nightMode.setChecked(self.mw.pm.night_mode())
self.form.interrupt_audio.setChecked(self.mw.pm.interrupt_audio())
self._recording_drivers = [
RecordingDriver.QtAudioInput,
RecordingDriver.PyAudio,
]
# The plan is to phase out PyAudio soon, so will hold off on
# making this string translatable for now.
self.form.recording_driver.addItems(
[
f"Voice recording driver: {driver.value}"
for driver in self._recording_drivers
]
)
self.form.recording_driver.setCurrentIndex(
self._recording_drivers.index(self.mw.pm.recording_driver())
)
def updateOptions(self) -> None:
restart_required = False
self.prof["pastePNG"] = self.form.pastePNG.isChecked()
self.prof["pasteInvert"] = self.form.pasteInvert.isChecked()
newScale = self.form.uiScale.value() / 100
if newScale != self.mw.pm.uiScale():
self.mw.pm.setUiScale(newScale)
restart_required = True
self.prof["showPlayButtons"] = self.form.showPlayButtons.isChecked()
if self.mw.pm.night_mode() != self.form.nightMode.isChecked():
self.mw.pm.set_night_mode(not self.mw.pm.night_mode())
restart_required = True
self.mw.pm.set_interrupt_audio(self.form.interrupt_audio.isChecked())
new_audio_driver = self._recording_drivers[
self.form.recording_driver.currentIndex()
]
if self.mw.pm.recording_driver() != new_audio_driver:
self.mw.pm.set_recording_driver(new_audio_driver)
if new_audio_driver == RecordingDriver.PyAudio:
showInfo(
"""\
The PyAudio driver will likely be removed in a future update. If you find it works better \
for you than the default driver, please let us know on the Anki forums."""
)
if restart_required:
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
|
class Preferences(QDialog):
|
random_line_split
|
TracePageHeader.tsx
|
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import * as React from 'react';
import { get as _get, maxBy as _maxBy, values as _values } from 'lodash';
import MdKeyboardArrowRight from 'react-icons/lib/md/keyboard-arrow-right';
import { css } from '@emotion/css';
import cx from 'classnames';
import { dateTimeFormat, GrafanaTheme2, TimeZone } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import SpanGraph from './SpanGraph';
import TracePageSearchBar from './TracePageSearchBar';
import { autoColor, TUpdateViewRangeTimeFunction, ViewRange, ViewRangeTimeUpdate } from '..';
import LabeledList from '../common/LabeledList';
import TraceName from '../common/TraceName';
import { getTraceName } from '../model/trace-viewer';
import { Trace } from '../types/trace';
import { formatDuration } from '../utils/date';
import { getTraceLinks } from '../model/link-patterns';
import ExternalLinks from '../common/ExternalLinks';
import { uTxMuted } from '../uberUtilityStyles';
const getStyles = (theme: GrafanaTheme2) => {
return {
TracePageHeader: css`
label: TracePageHeader;
& > :first-child {
border-bottom: 1px solid ${autoColor(theme, '#e8e8e8')};
}
& > :nth-child(2) {
background-color: ${autoColor(theme, '#eee')};
border-bottom: 1px solid ${autoColor(theme, '#e4e4e4')};
}
& > :last-child {
border-bottom: 1px solid ${autoColor(theme, '#ccc')};
}
`,
TracePageHeaderTitleRow: css`
label: TracePageHeaderTitleRow;
align-items: center;
display: flex;
`,
TracePageHeaderBack: css`
label: TracePageHeaderBack;
align-items: center;
align-self: stretch;
background-color: #fafafa;
border-bottom: 1px solid #ddd;
border-right: 1px solid #ddd;
color: inherit;
display: flex;
font-size: 1.4rem;
padding: 0 1rem;
margin-bottom: -1px;
&:hover {
background-color: #f0f0f0;
border-color: #ccc;
}
`,
TracePageHeaderTitleLink: css`
label: TracePageHeaderTitleLink;
align-items: center;
display: flex;
flex: 1;
&:hover * {
text-decoration: underline;
}
&:hover > *,
&:hover small {
text-decoration: none;
}
`,
TracePageHeaderDetailToggle: css`
label: TracePageHeaderDetailToggle;
font-size: 2.5rem;
transition: transform 0.07s ease-out;
`,
TracePageHeaderDetailToggleExpanded: css`
label: TracePageHeaderDetailToggleExpanded;
transform: rotate(90deg);
`,
TracePageHeaderTitle: css`
label: TracePageHeaderTitle;
color: inherit;
flex: 1;
font-size: 1.7em;
line-height: 1em;
margin: 0 0 0 0.5em;
padding-bottom: 0.5em;
`,
TracePageHeaderTitleCollapsible: css`
label: TracePageHeaderTitleCollapsible;
margin-left: 0;
`,
TracePageHeaderOverviewItems: css`
label: TracePageHeaderOverviewItems;
border-bottom: 1px solid #e4e4e4;
padding: 0.25rem 0.5rem !important;
`,
TracePageHeaderOverviewItemValueDetail: cx(
css`
label: TracePageHeaderOverviewItemValueDetail;
color: #aaa;
`,
'trace-item-value-detail'
),
TracePageHeaderOverviewItemValue: css`
label: TracePageHeaderOverviewItemValue;
&:hover > .trace-item-value-detail {
color: unset;
}
`,
TracePageHeaderArchiveIcon: css`
label: TracePageHeaderArchiveIcon;
font-size: 1.78em;
margin-right: 0.15em;
`,
TracePageHeaderTraceId: css`
label: TracePageHeaderTraceId;
white-space: nowrap;
`,
};
};
type TracePageHeaderEmbedProps = {
canCollapse: boolean;
clearSearch: () => void;
focusUiFindMatches: () => void;
hideMap: boolean;
hideSummary: boolean;
nextResult: () => void;
onSlimViewClicked: () => void;
onTraceGraphViewClicked: () => void;
prevResult: () => void;
resultCount: number;
slimView: boolean;
trace: Trace;
updateNextViewRangeTime: (update: ViewRangeTimeUpdate) => void;
updateViewRangeTime: TUpdateViewRangeTimeFunction;
viewRange: ViewRange;
searchValue: string;
onSearchValueChange: (value: string) => void;
timeZone: TimeZone;
};
export const HEADER_ITEMS = [
{
key: 'timestamp',
label: 'Trace Start:',
renderer(trace: Trace, timeZone: TimeZone, styles: ReturnType<typeof getStyles>) {
// Convert date from micro to milli seconds
const dateStr = dateTimeFormat(trace.startTime / 1000, { timeZone, defaultWithMS: true });
const match = dateStr.match(/^(.+)(:\d\d\.\d+)$/);
return match ? (
<span className={styles.TracePageHeaderOverviewItemValue}>
{match[1]}
<span className={styles.TracePageHeaderOverviewItemValueDetail}>{match[2]}</span>
</span>
) : (
dateStr
);
},
},
{
key: 'duration',
label: 'Duration:',
renderer: (trace: Trace) => formatDuration(trace.duration),
},
{
key: 'service-count',
label: 'Services:',
renderer: (trace: Trace) => new Set(_values(trace.processes).map((p) => p.serviceName)).size,
},
{
key: 'depth',
label: 'Depth:',
renderer: (trace: Trace) => _get(_maxBy(trace.spans, 'depth'), 'depth', 0) + 1,
},
{
key: 'span-count',
label: 'Total Spans:',
renderer: (trace: Trace) => trace.spans.length,
},
];
export default function TracePageHeader(props: TracePageHeaderEmbedProps) {
const {
canCollapse,
clearSearch,
focusUiFindMatches,
hideMap,
hideSummary,
nextResult,
onSlimViewClicked,
prevResult,
resultCount,
slimView,
trace,
updateNextViewRangeTime,
updateViewRangeTime,
viewRange,
searchValue,
onSearchValueChange,
timeZone,
} = props;
const styles = useStyles2(getStyles);
const links = React.useMemo(() => {
if (!trace) {
return [];
}
return getTraceLinks(trace);
}, [trace]);
if (!trace)
|
const summaryItems =
!hideSummary &&
!slimView &&
HEADER_ITEMS.map((item) => {
const { renderer, ...rest } = item;
return { ...rest, value: renderer(trace, timeZone, styles) };
});
const title = (
<h1 className={cx(styles.TracePageHeaderTitle, canCollapse && styles.TracePageHeaderTitleCollapsible)}>
<TraceName traceName={getTraceName(trace.spans)} />{' '}
<small className={cx(styles.TracePageHeaderTraceId, uTxMuted)}>{trace.traceID}</small>
</h1>
);
return (
<header className={styles.TracePageHeader}>
<div className={styles.TracePageHeaderTitleRow}>
{links && links.length > 0 && <ExternalLinks links={links} className={styles.TracePageHeaderBack} />}
{canCollapse ? (
<a
className={styles.TracePageHeaderTitleLink}
onClick={onSlimViewClicked}
role="switch"
aria-checked={!slimView}
>
<MdKeyboardArrowRight
className={cx(
styles.TracePageHeaderDetailToggle,
!slimView && styles.TracePageHeaderDetailToggleExpanded
)}
/>
{title}
</a>
) : (
title
)}
<TracePageSearchBar
clearSearch={clearSearch}
focusUiFindMatches={focusUiFindMatches}
nextResult={nextResult}
prevResult={prevResult}
resultCount={resultCount}
// TODO: we can change this when we have scroll to span functionality
navigable={false}
searchValue={searchValue}
onSearchValueChange={onSearchValueChange}
/>
</div>
{summaryItems && <LabeledList className={styles.TracePageHeaderOverviewItems} items={summaryItems} />}
{!hideMap && !slimView && (
<SpanGraph
trace={trace}
viewRange={viewRange}
updateNextViewRangeTime={updateNextViewRangeTime}
updateViewRangeTime={updateViewRangeTime}
/>
)}
</header>
);
}
|
{
return null;
}
|
conditional_block
|
TracePageHeader.tsx
|
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import * as React from 'react';
import { get as _get, maxBy as _maxBy, values as _values } from 'lodash';
import MdKeyboardArrowRight from 'react-icons/lib/md/keyboard-arrow-right';
import { css } from '@emotion/css';
import cx from 'classnames';
import { dateTimeFormat, GrafanaTheme2, TimeZone } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import SpanGraph from './SpanGraph';
import TracePageSearchBar from './TracePageSearchBar';
import { autoColor, TUpdateViewRangeTimeFunction, ViewRange, ViewRangeTimeUpdate } from '..';
import LabeledList from '../common/LabeledList';
import TraceName from '../common/TraceName';
import { getTraceName } from '../model/trace-viewer';
import { Trace } from '../types/trace';
import { formatDuration } from '../utils/date';
import { getTraceLinks } from '../model/link-patterns';
import ExternalLinks from '../common/ExternalLinks';
import { uTxMuted } from '../uberUtilityStyles';
const getStyles = (theme: GrafanaTheme2) => {
return {
TracePageHeader: css`
label: TracePageHeader;
& > :first-child {
border-bottom: 1px solid ${autoColor(theme, '#e8e8e8')};
}
& > :nth-child(2) {
background-color: ${autoColor(theme, '#eee')};
border-bottom: 1px solid ${autoColor(theme, '#e4e4e4')};
}
& > :last-child {
border-bottom: 1px solid ${autoColor(theme, '#ccc')};
}
`,
TracePageHeaderTitleRow: css`
label: TracePageHeaderTitleRow;
align-items: center;
display: flex;
`,
TracePageHeaderBack: css`
label: TracePageHeaderBack;
align-items: center;
align-self: stretch;
background-color: #fafafa;
border-bottom: 1px solid #ddd;
border-right: 1px solid #ddd;
color: inherit;
display: flex;
font-size: 1.4rem;
padding: 0 1rem;
margin-bottom: -1px;
&:hover {
background-color: #f0f0f0;
border-color: #ccc;
}
`,
TracePageHeaderTitleLink: css`
label: TracePageHeaderTitleLink;
align-items: center;
display: flex;
flex: 1;
&:hover * {
text-decoration: underline;
}
&:hover > *,
&:hover small {
text-decoration: none;
}
`,
TracePageHeaderDetailToggle: css`
label: TracePageHeaderDetailToggle;
font-size: 2.5rem;
transition: transform 0.07s ease-out;
`,
TracePageHeaderDetailToggleExpanded: css`
label: TracePageHeaderDetailToggleExpanded;
transform: rotate(90deg);
`,
TracePageHeaderTitle: css`
label: TracePageHeaderTitle;
color: inherit;
flex: 1;
font-size: 1.7em;
line-height: 1em;
margin: 0 0 0 0.5em;
padding-bottom: 0.5em;
`,
TracePageHeaderTitleCollapsible: css`
label: TracePageHeaderTitleCollapsible;
margin-left: 0;
`,
TracePageHeaderOverviewItems: css`
label: TracePageHeaderOverviewItems;
border-bottom: 1px solid #e4e4e4;
padding: 0.25rem 0.5rem !important;
`,
TracePageHeaderOverviewItemValueDetail: cx(
css`
label: TracePageHeaderOverviewItemValueDetail;
color: #aaa;
`,
'trace-item-value-detail'
),
TracePageHeaderOverviewItemValue: css`
label: TracePageHeaderOverviewItemValue;
&:hover > .trace-item-value-detail {
color: unset;
}
`,
TracePageHeaderArchiveIcon: css`
label: TracePageHeaderArchiveIcon;
font-size: 1.78em;
margin-right: 0.15em;
`,
TracePageHeaderTraceId: css`
label: TracePageHeaderTraceId;
white-space: nowrap;
`,
};
};
type TracePageHeaderEmbedProps = {
canCollapse: boolean;
clearSearch: () => void;
focusUiFindMatches: () => void;
hideMap: boolean;
hideSummary: boolean;
nextResult: () => void;
onSlimViewClicked: () => void;
onTraceGraphViewClicked: () => void;
prevResult: () => void;
resultCount: number;
slimView: boolean;
trace: Trace;
updateNextViewRangeTime: (update: ViewRangeTimeUpdate) => void;
updateViewRangeTime: TUpdateViewRangeTimeFunction;
viewRange: ViewRange;
searchValue: string;
onSearchValueChange: (value: string) => void;
timeZone: TimeZone;
};
export const HEADER_ITEMS = [
{
key: 'timestamp',
label: 'Trace Start:',
|
(trace: Trace, timeZone: TimeZone, styles: ReturnType<typeof getStyles>) {
// Convert date from micro to milli seconds
const dateStr = dateTimeFormat(trace.startTime / 1000, { timeZone, defaultWithMS: true });
const match = dateStr.match(/^(.+)(:\d\d\.\d+)$/);
return match ? (
<span className={styles.TracePageHeaderOverviewItemValue}>
{match[1]}
<span className={styles.TracePageHeaderOverviewItemValueDetail}>{match[2]}</span>
</span>
) : (
dateStr
);
},
},
{
key: 'duration',
label: 'Duration:',
renderer: (trace: Trace) => formatDuration(trace.duration),
},
{
key: 'service-count',
label: 'Services:',
renderer: (trace: Trace) => new Set(_values(trace.processes).map((p) => p.serviceName)).size,
},
{
key: 'depth',
label: 'Depth:',
renderer: (trace: Trace) => _get(_maxBy(trace.spans, 'depth'), 'depth', 0) + 1,
},
{
key: 'span-count',
label: 'Total Spans:',
renderer: (trace: Trace) => trace.spans.length,
},
];
export default function TracePageHeader(props: TracePageHeaderEmbedProps) {
const {
canCollapse,
clearSearch,
focusUiFindMatches,
hideMap,
hideSummary,
nextResult,
onSlimViewClicked,
prevResult,
resultCount,
slimView,
trace,
updateNextViewRangeTime,
updateViewRangeTime,
viewRange,
searchValue,
onSearchValueChange,
timeZone,
} = props;
const styles = useStyles2(getStyles);
const links = React.useMemo(() => {
if (!trace) {
return [];
}
return getTraceLinks(trace);
}, [trace]);
if (!trace) {
return null;
}
const summaryItems =
!hideSummary &&
!slimView &&
HEADER_ITEMS.map((item) => {
const { renderer, ...rest } = item;
return { ...rest, value: renderer(trace, timeZone, styles) };
});
const title = (
<h1 className={cx(styles.TracePageHeaderTitle, canCollapse && styles.TracePageHeaderTitleCollapsible)}>
<TraceName traceName={getTraceName(trace.spans)} />{' '}
<small className={cx(styles.TracePageHeaderTraceId, uTxMuted)}>{trace.traceID}</small>
</h1>
);
return (
<header className={styles.TracePageHeader}>
<div className={styles.TracePageHeaderTitleRow}>
{links && links.length > 0 && <ExternalLinks links={links} className={styles.TracePageHeaderBack} />}
{canCollapse ? (
<a
className={styles.TracePageHeaderTitleLink}
onClick={onSlimViewClicked}
role="switch"
aria-checked={!slimView}
>
<MdKeyboardArrowRight
className={cx(
styles.TracePageHeaderDetailToggle,
!slimView && styles.TracePageHeaderDetailToggleExpanded
)}
/>
{title}
</a>
) : (
title
)}
<TracePageSearchBar
clearSearch={clearSearch}
focusUiFindMatches={focusUiFindMatches}
nextResult={nextResult}
prevResult={prevResult}
resultCount={resultCount}
// TODO: we can change this when we have scroll to span functionality
navigable={false}
searchValue={searchValue}
onSearchValueChange={onSearchValueChange}
/>
</div>
{summaryItems && <LabeledList className={styles.TracePageHeaderOverviewItems} items={summaryItems} />}
{!hideMap && !slimView && (
<SpanGraph
trace={trace}
viewRange={viewRange}
updateNextViewRangeTime={updateNextViewRangeTime}
updateViewRangeTime={updateViewRangeTime}
/>
)}
</header>
);
}
|
renderer
|
identifier_name
|
TracePageHeader.tsx
|
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import * as React from 'react';
import { get as _get, maxBy as _maxBy, values as _values } from 'lodash';
import MdKeyboardArrowRight from 'react-icons/lib/md/keyboard-arrow-right';
import { css } from '@emotion/css';
import cx from 'classnames';
import { dateTimeFormat, GrafanaTheme2, TimeZone } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import SpanGraph from './SpanGraph';
import TracePageSearchBar from './TracePageSearchBar';
import { autoColor, TUpdateViewRangeTimeFunction, ViewRange, ViewRangeTimeUpdate } from '..';
import LabeledList from '../common/LabeledList';
import TraceName from '../common/TraceName';
import { getTraceName } from '../model/trace-viewer';
import { Trace } from '../types/trace';
import { formatDuration } from '../utils/date';
import { getTraceLinks } from '../model/link-patterns';
import ExternalLinks from '../common/ExternalLinks';
import { uTxMuted } from '../uberUtilityStyles';
const getStyles = (theme: GrafanaTheme2) => {
return {
TracePageHeader: css`
label: TracePageHeader;
& > :first-child {
border-bottom: 1px solid ${autoColor(theme, '#e8e8e8')};
}
& > :nth-child(2) {
background-color: ${autoColor(theme, '#eee')};
border-bottom: 1px solid ${autoColor(theme, '#e4e4e4')};
}
& > :last-child {
border-bottom: 1px solid ${autoColor(theme, '#ccc')};
}
`,
TracePageHeaderTitleRow: css`
label: TracePageHeaderTitleRow;
align-items: center;
display: flex;
`,
TracePageHeaderBack: css`
label: TracePageHeaderBack;
align-items: center;
align-self: stretch;
background-color: #fafafa;
border-bottom: 1px solid #ddd;
border-right: 1px solid #ddd;
color: inherit;
display: flex;
font-size: 1.4rem;
padding: 0 1rem;
margin-bottom: -1px;
&:hover {
background-color: #f0f0f0;
border-color: #ccc;
}
`,
TracePageHeaderTitleLink: css`
label: TracePageHeaderTitleLink;
align-items: center;
display: flex;
flex: 1;
&:hover * {
text-decoration: underline;
}
&:hover > *,
&:hover small {
text-decoration: none;
}
`,
TracePageHeaderDetailToggle: css`
label: TracePageHeaderDetailToggle;
font-size: 2.5rem;
transition: transform 0.07s ease-out;
`,
TracePageHeaderDetailToggleExpanded: css`
label: TracePageHeaderDetailToggleExpanded;
transform: rotate(90deg);
`,
TracePageHeaderTitle: css`
label: TracePageHeaderTitle;
color: inherit;
flex: 1;
font-size: 1.7em;
line-height: 1em;
margin: 0 0 0 0.5em;
padding-bottom: 0.5em;
|
TracePageHeaderOverviewItems: css`
label: TracePageHeaderOverviewItems;
border-bottom: 1px solid #e4e4e4;
padding: 0.25rem 0.5rem !important;
`,
TracePageHeaderOverviewItemValueDetail: cx(
css`
label: TracePageHeaderOverviewItemValueDetail;
color: #aaa;
`,
'trace-item-value-detail'
),
TracePageHeaderOverviewItemValue: css`
label: TracePageHeaderOverviewItemValue;
&:hover > .trace-item-value-detail {
color: unset;
}
`,
TracePageHeaderArchiveIcon: css`
label: TracePageHeaderArchiveIcon;
font-size: 1.78em;
margin-right: 0.15em;
`,
TracePageHeaderTraceId: css`
label: TracePageHeaderTraceId;
white-space: nowrap;
`,
};
};
type TracePageHeaderEmbedProps = {
canCollapse: boolean;
clearSearch: () => void;
focusUiFindMatches: () => void;
hideMap: boolean;
hideSummary: boolean;
nextResult: () => void;
onSlimViewClicked: () => void;
onTraceGraphViewClicked: () => void;
prevResult: () => void;
resultCount: number;
slimView: boolean;
trace: Trace;
updateNextViewRangeTime: (update: ViewRangeTimeUpdate) => void;
updateViewRangeTime: TUpdateViewRangeTimeFunction;
viewRange: ViewRange;
searchValue: string;
onSearchValueChange: (value: string) => void;
timeZone: TimeZone;
};
export const HEADER_ITEMS = [
{
key: 'timestamp',
label: 'Trace Start:',
renderer(trace: Trace, timeZone: TimeZone, styles: ReturnType<typeof getStyles>) {
// Convert date from micro to milli seconds
const dateStr = dateTimeFormat(trace.startTime / 1000, { timeZone, defaultWithMS: true });
const match = dateStr.match(/^(.+)(:\d\d\.\d+)$/);
return match ? (
<span className={styles.TracePageHeaderOverviewItemValue}>
{match[1]}
<span className={styles.TracePageHeaderOverviewItemValueDetail}>{match[2]}</span>
</span>
) : (
dateStr
);
},
},
{
key: 'duration',
label: 'Duration:',
renderer: (trace: Trace) => formatDuration(trace.duration),
},
{
key: 'service-count',
label: 'Services:',
renderer: (trace: Trace) => new Set(_values(trace.processes).map((p) => p.serviceName)).size,
},
{
key: 'depth',
label: 'Depth:',
renderer: (trace: Trace) => _get(_maxBy(trace.spans, 'depth'), 'depth', 0) + 1,
},
{
key: 'span-count',
label: 'Total Spans:',
renderer: (trace: Trace) => trace.spans.length,
},
];
export default function TracePageHeader(props: TracePageHeaderEmbedProps) {
const {
canCollapse,
clearSearch,
focusUiFindMatches,
hideMap,
hideSummary,
nextResult,
onSlimViewClicked,
prevResult,
resultCount,
slimView,
trace,
updateNextViewRangeTime,
updateViewRangeTime,
viewRange,
searchValue,
onSearchValueChange,
timeZone,
} = props;
const styles = useStyles2(getStyles);
const links = React.useMemo(() => {
if (!trace) {
return [];
}
return getTraceLinks(trace);
}, [trace]);
if (!trace) {
return null;
}
const summaryItems =
!hideSummary &&
!slimView &&
HEADER_ITEMS.map((item) => {
const { renderer, ...rest } = item;
return { ...rest, value: renderer(trace, timeZone, styles) };
});
const title = (
<h1 className={cx(styles.TracePageHeaderTitle, canCollapse && styles.TracePageHeaderTitleCollapsible)}>
<TraceName traceName={getTraceName(trace.spans)} />{' '}
<small className={cx(styles.TracePageHeaderTraceId, uTxMuted)}>{trace.traceID}</small>
</h1>
);
return (
<header className={styles.TracePageHeader}>
<div className={styles.TracePageHeaderTitleRow}>
{links && links.length > 0 && <ExternalLinks links={links} className={styles.TracePageHeaderBack} />}
{canCollapse ? (
<a
className={styles.TracePageHeaderTitleLink}
onClick={onSlimViewClicked}
role="switch"
aria-checked={!slimView}
>
<MdKeyboardArrowRight
className={cx(
styles.TracePageHeaderDetailToggle,
!slimView && styles.TracePageHeaderDetailToggleExpanded
)}
/>
{title}
</a>
) : (
title
)}
<TracePageSearchBar
clearSearch={clearSearch}
focusUiFindMatches={focusUiFindMatches}
nextResult={nextResult}
prevResult={prevResult}
resultCount={resultCount}
// TODO: we can change this when we have scroll to span functionality
navigable={false}
searchValue={searchValue}
onSearchValueChange={onSearchValueChange}
/>
</div>
{summaryItems && <LabeledList className={styles.TracePageHeaderOverviewItems} items={summaryItems} />}
{!hideMap && !slimView && (
<SpanGraph
trace={trace}
viewRange={viewRange}
updateNextViewRangeTime={updateNextViewRangeTime}
updateViewRangeTime={updateViewRangeTime}
/>
)}
</header>
);
}
|
`,
TracePageHeaderTitleCollapsible: css`
label: TracePageHeaderTitleCollapsible;
margin-left: 0;
`,
|
random_line_split
|
TracePageHeader.tsx
|
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import * as React from 'react';
import { get as _get, maxBy as _maxBy, values as _values } from 'lodash';
import MdKeyboardArrowRight from 'react-icons/lib/md/keyboard-arrow-right';
import { css } from '@emotion/css';
import cx from 'classnames';
import { dateTimeFormat, GrafanaTheme2, TimeZone } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import SpanGraph from './SpanGraph';
import TracePageSearchBar from './TracePageSearchBar';
import { autoColor, TUpdateViewRangeTimeFunction, ViewRange, ViewRangeTimeUpdate } from '..';
import LabeledList from '../common/LabeledList';
import TraceName from '../common/TraceName';
import { getTraceName } from '../model/trace-viewer';
import { Trace } from '../types/trace';
import { formatDuration } from '../utils/date';
import { getTraceLinks } from '../model/link-patterns';
import ExternalLinks from '../common/ExternalLinks';
import { uTxMuted } from '../uberUtilityStyles';
const getStyles = (theme: GrafanaTheme2) => {
return {
TracePageHeader: css`
label: TracePageHeader;
& > :first-child {
border-bottom: 1px solid ${autoColor(theme, '#e8e8e8')};
}
& > :nth-child(2) {
background-color: ${autoColor(theme, '#eee')};
border-bottom: 1px solid ${autoColor(theme, '#e4e4e4')};
}
& > :last-child {
border-bottom: 1px solid ${autoColor(theme, '#ccc')};
}
`,
TracePageHeaderTitleRow: css`
label: TracePageHeaderTitleRow;
align-items: center;
display: flex;
`,
TracePageHeaderBack: css`
label: TracePageHeaderBack;
align-items: center;
align-self: stretch;
background-color: #fafafa;
border-bottom: 1px solid #ddd;
border-right: 1px solid #ddd;
color: inherit;
display: flex;
font-size: 1.4rem;
padding: 0 1rem;
margin-bottom: -1px;
&:hover {
background-color: #f0f0f0;
border-color: #ccc;
}
`,
TracePageHeaderTitleLink: css`
label: TracePageHeaderTitleLink;
align-items: center;
display: flex;
flex: 1;
&:hover * {
text-decoration: underline;
}
&:hover > *,
&:hover small {
text-decoration: none;
}
`,
TracePageHeaderDetailToggle: css`
label: TracePageHeaderDetailToggle;
font-size: 2.5rem;
transition: transform 0.07s ease-out;
`,
TracePageHeaderDetailToggleExpanded: css`
label: TracePageHeaderDetailToggleExpanded;
transform: rotate(90deg);
`,
TracePageHeaderTitle: css`
label: TracePageHeaderTitle;
color: inherit;
flex: 1;
font-size: 1.7em;
line-height: 1em;
margin: 0 0 0 0.5em;
padding-bottom: 0.5em;
`,
TracePageHeaderTitleCollapsible: css`
label: TracePageHeaderTitleCollapsible;
margin-left: 0;
`,
TracePageHeaderOverviewItems: css`
label: TracePageHeaderOverviewItems;
border-bottom: 1px solid #e4e4e4;
padding: 0.25rem 0.5rem !important;
`,
TracePageHeaderOverviewItemValueDetail: cx(
css`
label: TracePageHeaderOverviewItemValueDetail;
color: #aaa;
`,
'trace-item-value-detail'
),
TracePageHeaderOverviewItemValue: css`
label: TracePageHeaderOverviewItemValue;
&:hover > .trace-item-value-detail {
color: unset;
}
`,
TracePageHeaderArchiveIcon: css`
label: TracePageHeaderArchiveIcon;
font-size: 1.78em;
margin-right: 0.15em;
`,
TracePageHeaderTraceId: css`
label: TracePageHeaderTraceId;
white-space: nowrap;
`,
};
};
type TracePageHeaderEmbedProps = {
canCollapse: boolean;
clearSearch: () => void;
focusUiFindMatches: () => void;
hideMap: boolean;
hideSummary: boolean;
nextResult: () => void;
onSlimViewClicked: () => void;
onTraceGraphViewClicked: () => void;
prevResult: () => void;
resultCount: number;
slimView: boolean;
trace: Trace;
updateNextViewRangeTime: (update: ViewRangeTimeUpdate) => void;
updateViewRangeTime: TUpdateViewRangeTimeFunction;
viewRange: ViewRange;
searchValue: string;
onSearchValueChange: (value: string) => void;
timeZone: TimeZone;
};
export const HEADER_ITEMS = [
{
key: 'timestamp',
label: 'Trace Start:',
renderer(trace: Trace, timeZone: TimeZone, styles: ReturnType<typeof getStyles>)
|
,
},
{
key: 'duration',
label: 'Duration:',
renderer: (trace: Trace) => formatDuration(trace.duration),
},
{
key: 'service-count',
label: 'Services:',
renderer: (trace: Trace) => new Set(_values(trace.processes).map((p) => p.serviceName)).size,
},
{
key: 'depth',
label: 'Depth:',
renderer: (trace: Trace) => _get(_maxBy(trace.spans, 'depth'), 'depth', 0) + 1,
},
{
key: 'span-count',
label: 'Total Spans:',
renderer: (trace: Trace) => trace.spans.length,
},
];
export default function TracePageHeader(props: TracePageHeaderEmbedProps) {
const {
canCollapse,
clearSearch,
focusUiFindMatches,
hideMap,
hideSummary,
nextResult,
onSlimViewClicked,
prevResult,
resultCount,
slimView,
trace,
updateNextViewRangeTime,
updateViewRangeTime,
viewRange,
searchValue,
onSearchValueChange,
timeZone,
} = props;
const styles = useStyles2(getStyles);
const links = React.useMemo(() => {
if (!trace) {
return [];
}
return getTraceLinks(trace);
}, [trace]);
if (!trace) {
return null;
}
const summaryItems =
!hideSummary &&
!slimView &&
HEADER_ITEMS.map((item) => {
const { renderer, ...rest } = item;
return { ...rest, value: renderer(trace, timeZone, styles) };
});
const title = (
<h1 className={cx(styles.TracePageHeaderTitle, canCollapse && styles.TracePageHeaderTitleCollapsible)}>
<TraceName traceName={getTraceName(trace.spans)} />{' '}
<small className={cx(styles.TracePageHeaderTraceId, uTxMuted)}>{trace.traceID}</small>
</h1>
);
return (
<header className={styles.TracePageHeader}>
<div className={styles.TracePageHeaderTitleRow}>
{links && links.length > 0 && <ExternalLinks links={links} className={styles.TracePageHeaderBack} />}
{canCollapse ? (
<a
className={styles.TracePageHeaderTitleLink}
onClick={onSlimViewClicked}
role="switch"
aria-checked={!slimView}
>
<MdKeyboardArrowRight
className={cx(
styles.TracePageHeaderDetailToggle,
!slimView && styles.TracePageHeaderDetailToggleExpanded
)}
/>
{title}
</a>
) : (
title
)}
<TracePageSearchBar
clearSearch={clearSearch}
focusUiFindMatches={focusUiFindMatches}
nextResult={nextResult}
prevResult={prevResult}
resultCount={resultCount}
// TODO: we can change this when we have scroll to span functionality
navigable={false}
searchValue={searchValue}
onSearchValueChange={onSearchValueChange}
/>
</div>
{summaryItems && <LabeledList className={styles.TracePageHeaderOverviewItems} items={summaryItems} />}
{!hideMap && !slimView && (
<SpanGraph
trace={trace}
viewRange={viewRange}
updateNextViewRangeTime={updateNextViewRangeTime}
updateViewRangeTime={updateViewRangeTime}
/>
)}
</header>
);
}
|
{
// Convert date from micro to milli seconds
const dateStr = dateTimeFormat(trace.startTime / 1000, { timeZone, defaultWithMS: true });
const match = dateStr.match(/^(.+)(:\d\d\.\d+)$/);
return match ? (
<span className={styles.TracePageHeaderOverviewItemValue}>
{match[1]}
<span className={styles.TracePageHeaderOverviewItemValueDetail}>{match[2]}</span>
</span>
) : (
dateStr
);
}
|
identifier_body
|
__init__.py
|
##############################################################################
#
# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""
Consider the following example::
>>> from structuredtext.stng import structurize
>>> from structuredtext.document import DocumentWithImages
>>> from structuredtext.html import HTMLWithImages
>>> from structuredtext.docbook import DocBook
We first need to structurize the string and make a full-blown
document out of it:
>>> struct = structurize(structured_string)
>>> doc = DocumentWithImages()(struct)
Now feed it to some output generator, in this case HTML or DocBook:
>>> output = HTMLWithImages()(doc, level=1)
>>> output = DocBook()(doc, level=1)
$Id: __init__.py 67724 2006-04-28 16:52:39Z jim $
"""
__docformat__ = 'restructuredtext'
import re
from zope.structuredtext import stng, document, html
from string import letters
def stx2html(aStructuredString, level=1, header=1):
|
def stx2htmlWithReferences(text, level=1, header=1):
text = re.sub(
r'[\000\n]\.\. \[([0-9_%s-]+)\]' % letters,
r'\n <a name="\1">[\1]</a>',
text)
text = re.sub(
r'([\000- ,])\[(?P<ref>[0-9_%s-]+)\]([\000- ,.:])' % letters,
r'\1<a href="#\2">[\2]</a>\3',
text)
text = re.sub(
r'([\000- ,])\[([^]]+)\.html\]([\000- ,.:])',
r'\1<a href="\2.html">[\2]</a>\3',
text)
return stx2html(text, level=level, header=header)
|
st = stng.structurize(aStructuredString)
doc = document.DocumentWithImages()(st)
return html.HTMLWithImages()(doc, header=header, level=level)
|
identifier_body
|
__init__.py
|
##############################################################################
#
# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""
Consider the following example::
>>> from structuredtext.stng import structurize
>>> from structuredtext.document import DocumentWithImages
>>> from structuredtext.html import HTMLWithImages
>>> from structuredtext.docbook import DocBook
We first need to structurize the string and make a full-blown
document out of it:
>>> struct = structurize(structured_string)
>>> doc = DocumentWithImages()(struct)
Now feed it to some output generator, in this case HTML or DocBook:
>>> output = HTMLWithImages()(doc, level=1)
>>> output = DocBook()(doc, level=1)
$Id: __init__.py 67724 2006-04-28 16:52:39Z jim $
"""
|
__docformat__ = 'restructuredtext'
import re
from zope.structuredtext import stng, document, html
from string import letters
def stx2html(aStructuredString, level=1, header=1):
st = stng.structurize(aStructuredString)
doc = document.DocumentWithImages()(st)
return html.HTMLWithImages()(doc, header=header, level=level)
def stx2htmlWithReferences(text, level=1, header=1):
text = re.sub(
r'[\000\n]\.\. \[([0-9_%s-]+)\]' % letters,
r'\n <a name="\1">[\1]</a>',
text)
text = re.sub(
r'([\000- ,])\[(?P<ref>[0-9_%s-]+)\]([\000- ,.:])' % letters,
r'\1<a href="#\2">[\2]</a>\3',
text)
text = re.sub(
r'([\000- ,])\[([^]]+)\.html\]([\000- ,.:])',
r'\1<a href="\2.html">[\2]</a>\3',
text)
return stx2html(text, level=level, header=header)
|
random_line_split
|
|
__init__.py
|
##############################################################################
#
# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""
Consider the following example::
>>> from structuredtext.stng import structurize
>>> from structuredtext.document import DocumentWithImages
>>> from structuredtext.html import HTMLWithImages
>>> from structuredtext.docbook import DocBook
We first need to structurize the string and make a full-blown
document out of it:
>>> struct = structurize(structured_string)
>>> doc = DocumentWithImages()(struct)
Now feed it to some output generator, in this case HTML or DocBook:
>>> output = HTMLWithImages()(doc, level=1)
>>> output = DocBook()(doc, level=1)
$Id: __init__.py 67724 2006-04-28 16:52:39Z jim $
"""
__docformat__ = 'restructuredtext'
import re
from zope.structuredtext import stng, document, html
from string import letters
def
|
(aStructuredString, level=1, header=1):
st = stng.structurize(aStructuredString)
doc = document.DocumentWithImages()(st)
return html.HTMLWithImages()(doc, header=header, level=level)
def stx2htmlWithReferences(text, level=1, header=1):
text = re.sub(
r'[\000\n]\.\. \[([0-9_%s-]+)\]' % letters,
r'\n <a name="\1">[\1]</a>',
text)
text = re.sub(
r'([\000- ,])\[(?P<ref>[0-9_%s-]+)\]([\000- ,.:])' % letters,
r'\1<a href="#\2">[\2]</a>\3',
text)
text = re.sub(
r'([\000- ,])\[([^]]+)\.html\]([\000- ,.:])',
r'\1<a href="\2.html">[\2]</a>\3',
text)
return stx2html(text, level=level, header=header)
|
stx2html
|
identifier_name
|
encoding.py
|
import re
import chardet
import sys
RE_CHARSET = re.compile(br'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
RE_PRAGMA = re.compile(br'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
RE_XML = re.compile(br'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
CHARSETS = {
'big5': 'big5hkscs',
'gb2312': 'gb18030',
'ascii': 'utf-8',
'maccyrillic': 'cp1251',
'win1251': 'cp1251',
'win-1251': 'cp1251',
'windows-1251': 'cp1251',
}
def
|
(encoding):
"""Overrides encoding when charset declaration
or charset determination is a subset of a larger
charset. Created because of issues with Chinese websites"""
encoding = encoding.lower()
return CHARSETS.get(encoding, encoding)
def get_encoding(page):
# Regex for XML and HTML Meta charset declaration
declared_encodings = (RE_CHARSET.findall(page) +
RE_PRAGMA.findall(page) +
RE_XML.findall(page))
# Try any declared encodings
for declared_encoding in declared_encodings:
try:
if sys.version_info[0] == 3:
# declared_encoding will actually be bytes but .decode() only
# accepts `str` type. Decode blindly with ascii because no one should
# ever use non-ascii characters in the name of an encoding.
declared_encoding = declared_encoding.decode('ascii', 'replace')
encoding = fix_charset(declared_encoding)
# Now let's decode the page
page.decode()
# It worked!
return encoding
except UnicodeDecodeError:
pass
# Fallback to chardet if declared encodings fail
# Remove all HTML tags, and leave only text for chardet
text = re.sub(b'(\s*</?[^>]*>)+\s*', b' ', page).strip()
enc = 'utf-8'
if len(text) < 10:
return enc # can't guess
res = chardet.detect(text)
enc = res['encoding'] or 'utf-8'
#print '->', enc, "%.2f" % res['confidence']
enc = fix_charset(enc)
return enc
|
fix_charset
|
identifier_name
|
encoding.py
|
import re
import chardet
import sys
RE_CHARSET = re.compile(br'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
RE_PRAGMA = re.compile(br'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
RE_XML = re.compile(br'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
CHARSETS = {
'big5': 'big5hkscs',
'gb2312': 'gb18030',
'ascii': 'utf-8',
'maccyrillic': 'cp1251',
'win1251': 'cp1251',
'win-1251': 'cp1251',
'windows-1251': 'cp1251',
}
def fix_charset(encoding):
"""Overrides encoding when charset declaration
or charset determination is a subset of a larger
charset. Created because of issues with Chinese websites"""
encoding = encoding.lower()
return CHARSETS.get(encoding, encoding)
def get_encoding(page):
# Regex for XML and HTML Meta charset declaration
declared_encodings = (RE_CHARSET.findall(page) +
RE_PRAGMA.findall(page) +
RE_XML.findall(page))
# Try any declared encodings
for declared_encoding in declared_encodings:
|
# Fallback to chardet if declared encodings fail
# Remove all HTML tags, and leave only text for chardet
text = re.sub(b'(\s*</?[^>]*>)+\s*', b' ', page).strip()
enc = 'utf-8'
if len(text) < 10:
return enc # can't guess
res = chardet.detect(text)
enc = res['encoding'] or 'utf-8'
#print '->', enc, "%.2f" % res['confidence']
enc = fix_charset(enc)
return enc
|
try:
if sys.version_info[0] == 3:
# declared_encoding will actually be bytes but .decode() only
# accepts `str` type. Decode blindly with ascii because no one should
# ever use non-ascii characters in the name of an encoding.
declared_encoding = declared_encoding.decode('ascii', 'replace')
encoding = fix_charset(declared_encoding)
# Now let's decode the page
page.decode()
# It worked!
return encoding
except UnicodeDecodeError:
pass
|
conditional_block
|
encoding.py
|
import re
import chardet
import sys
RE_CHARSET = re.compile(br'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
RE_PRAGMA = re.compile(br'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
RE_XML = re.compile(br'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
CHARSETS = {
'big5': 'big5hkscs',
'gb2312': 'gb18030',
'ascii': 'utf-8',
'maccyrillic': 'cp1251',
'win1251': 'cp1251',
'win-1251': 'cp1251',
'windows-1251': 'cp1251',
}
def fix_charset(encoding):
|
def get_encoding(page):
# Regex for XML and HTML Meta charset declaration
declared_encodings = (RE_CHARSET.findall(page) +
RE_PRAGMA.findall(page) +
RE_XML.findall(page))
# Try any declared encodings
for declared_encoding in declared_encodings:
try:
if sys.version_info[0] == 3:
# declared_encoding will actually be bytes but .decode() only
# accepts `str` type. Decode blindly with ascii because no one should
# ever use non-ascii characters in the name of an encoding.
declared_encoding = declared_encoding.decode('ascii', 'replace')
encoding = fix_charset(declared_encoding)
# Now let's decode the page
page.decode()
# It worked!
return encoding
except UnicodeDecodeError:
pass
# Fallback to chardet if declared encodings fail
# Remove all HTML tags, and leave only text for chardet
text = re.sub(b'(\s*</?[^>]*>)+\s*', b' ', page).strip()
enc = 'utf-8'
if len(text) < 10:
return enc # can't guess
res = chardet.detect(text)
enc = res['encoding'] or 'utf-8'
#print '->', enc, "%.2f" % res['confidence']
enc = fix_charset(enc)
return enc
|
"""Overrides encoding when charset declaration
or charset determination is a subset of a larger
charset. Created because of issues with Chinese websites"""
encoding = encoding.lower()
return CHARSETS.get(encoding, encoding)
|
identifier_body
|
encoding.py
|
import re
import chardet
import sys
RE_CHARSET = re.compile(br'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
RE_PRAGMA = re.compile(br'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
RE_XML = re.compile(br'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
|
'ascii': 'utf-8',
'maccyrillic': 'cp1251',
'win1251': 'cp1251',
'win-1251': 'cp1251',
'windows-1251': 'cp1251',
}
def fix_charset(encoding):
"""Overrides encoding when charset declaration
or charset determination is a subset of a larger
charset. Created because of issues with Chinese websites"""
encoding = encoding.lower()
return CHARSETS.get(encoding, encoding)
def get_encoding(page):
# Regex for XML and HTML Meta charset declaration
declared_encodings = (RE_CHARSET.findall(page) +
RE_PRAGMA.findall(page) +
RE_XML.findall(page))
# Try any declared encodings
for declared_encoding in declared_encodings:
try:
if sys.version_info[0] == 3:
# declared_encoding will actually be bytes but .decode() only
# accepts `str` type. Decode blindly with ascii because no one should
# ever use non-ascii characters in the name of an encoding.
declared_encoding = declared_encoding.decode('ascii', 'replace')
encoding = fix_charset(declared_encoding)
# Now let's decode the page
page.decode()
# It worked!
return encoding
except UnicodeDecodeError:
pass
# Fallback to chardet if declared encodings fail
# Remove all HTML tags, and leave only text for chardet
text = re.sub(b'(\s*</?[^>]*>)+\s*', b' ', page).strip()
enc = 'utf-8'
if len(text) < 10:
return enc # can't guess
res = chardet.detect(text)
enc = res['encoding'] or 'utf-8'
#print '->', enc, "%.2f" % res['confidence']
enc = fix_charset(enc)
return enc
|
CHARSETS = {
'big5': 'big5hkscs',
'gb2312': 'gb18030',
|
random_line_split
|
budget_schema.js
|
module.exports = `
type SimpleBudgetDetail {
account_type: String,
account_name: String,
fund_name: String,
department_name: String,
division_name: String,
costcenter_name: String,
function_name: String,
charcode_name: String,
organization_name: String,
category_name: String,
budget_section_name: String,
object_name: String,
year: Int,
budget: Float,
actual: Float,
full_account_id: String,
org_id: String,
obj_id: String,
fund_id: String,
dept_id: String,
|
budget_section_id: String,
proj_id: String,
is_proposed: String
use_actual: String
}
type SimpleBudgetSummary {
account_type: String,
category_name: String,
year: Int,
total_budget: Float,
total_actual: Float
use_actual: String
}
type BudgetCashFlow {
account_type: String,
category_name: String,
category_id: String,
dept_id: String,
department_name: String,
fund_id: String,
fund_name: String,
budget: Float,
year: Int
}
type BudgetParameters {
start_year: Int
end_year: Int
in_budget_season: Boolean
}
`;
|
div_id: String,
cost_id: String,
func_id: String,
charcode: String,
category_id: String,
|
random_line_split
|
retinanet_segmentation_main.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training script for RetinaNet segmentation model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import absl.logging as _logging # pylint: disable=unused-import
import tensorflow.compat.v1 as tf
import dataloader
import retinanet_segmentation_model
from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver
from tensorflow.contrib import tpu as contrib_tpu
from tensorflow.contrib import training as contrib_training
# Cloud TPU Cluster Resolvers
flags.DEFINE_string(
'tpu', default=None,
help='The Cloud TPU to use for training. This should be either the name '
'used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 '
'url.')
flags.DEFINE_string(
'gcp_project', default=None,
help='Project name for the Cloud TPU-enabled project. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
flags.DEFINE_string(
'tpu_zone', default=None,
help='GCE zone where the Cloud TPU is located in. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
# Model specific paramenters
flags.DEFINE_bool('use_tpu', True, 'Use TPUs rather than CPUs')
flags.DEFINE_string('model_dir', None, 'Location of model_dir')
flags.DEFINE_string('resnet_checkpoint', None,
'Location of the ResNet50 checkpoint to use for model '
'initialization.')
flags.DEFINE_string('hparams', '',
'Comma separated k=v pairs of hyperparameters.')
flags.DEFINE_integer(
'num_shards', default=8, help='Number of shards (TPU cores)')
flags.DEFINE_integer('train_batch_size', 64, 'training batch size')
flags.DEFINE_integer('eval_batch_size', 8, 'evaluation batch size')
flags.DEFINE_integer('eval_samples', 1449, 'The number of samples for '
'evaluation.')
flags.DEFINE_integer(
'iterations_per_loop', 100, 'Number of iterations per TPU training loop')
flags.DEFINE_string(
'training_file_pattern', None,
'Glob for training data files (e.g., Pascal VOC train set)')
flags.DEFINE_string(
'validation_file_pattern', None,
'Glob for evaluation tfrecords (e.g., Pascal VOC validation set)')
flags.DEFINE_integer('num_examples_per_epoch', 10582,
'Number of examples in one epoch')
flags.DEFINE_integer('num_epochs', 45, 'Number of epochs for training')
flags.DEFINE_string('mode', 'train_and_eval',
'Mode to run: train or eval (default: train)')
flags.DEFINE_bool('eval_after_training', False, 'Run one eval after the '
'training finishes.')
# For Eval mode
flags.DEFINE_integer('min_eval_interval', 180,
'Minimum seconds between evaluations.')
flags.DEFINE_integer(
'eval_timeout', None,
'Maximum seconds between checkpoints before evaluation terminates.')
FLAGS = flags.FLAGS
def main(argv):
del argv # Unused.
if FLAGS.use_tpu:
tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver(
FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
tpu_grpc_url = tpu_cluster_resolver.get_master()
tf.Session.reset(tpu_grpc_url)
if FLAGS.mode in ('train',
'train_and_eval') and FLAGS.training_file_pattern is None:
raise RuntimeError('You must specify --training_file_pattern for training.')
if FLAGS.mode in ('eval', 'train_and_eval'):
if FLAGS.validation_file_pattern is None:
raise RuntimeError('You must specify'
'--validation_file_pattern for evaluation.')
# Parse hparams
hparams = retinanet_segmentation_model.default_hparams()
hparams.parse(FLAGS.hparams)
params = dict(
hparams.values(),
num_shards=FLAGS.num_shards,
num_examples_per_epoch=FLAGS.num_examples_per_epoch,
use_tpu=FLAGS.use_tpu,
resnet_checkpoint=FLAGS.resnet_checkpoint,
mode=FLAGS.mode,
)
run_config = contrib_tpu.RunConfig(
cluster=tpu_cluster_resolver,
evaluation_master='',
model_dir=FLAGS.model_dir,
keep_checkpoint_max=3,
log_step_count_steps=FLAGS.iterations_per_loop,
session_config=tf.ConfigProto(
allow_soft_placement=True, log_device_placement=False),
tpu_config=contrib_tpu.TPUConfig(
FLAGS.iterations_per_loop,
FLAGS.num_shards,
per_host_input_for_training=(
contrib_tpu.InputPipelineConfig.PER_HOST_V2)))
model_fn = retinanet_segmentation_model.segmentation_model_fn
# TPU Estimator
eval_params = dict(
params,
use_tpu=FLAGS.use_tpu,
input_rand_hflip=False,
resnet_checkpoint=None,
is_training_bn=False,
)
if FLAGS.mode == 'train':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
max_steps=int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size),
)
if FLAGS.eval_after_training:
# Run evaluation on CPU after training finishes.
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
elif FLAGS.mode == 'eval':
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
def terminate_eval():
|
# Run evaluation when there's a new checkpoint
for ckpt in contrib_training.checkpoints_iterator(
FLAGS.model_dir,
min_interval_secs=FLAGS.min_eval_interval,
timeout=FLAGS.eval_timeout,
timeout_fn=terminate_eval):
tf.logging.info('Starting to evaluate.')
try:
# Note that if the eval_samples size is not fully divided by the
# eval_batch_size. The remainder will be dropped and result in
# differet evaluation performance than validating on the full set.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
# Terminate eval job when final checkpoint is reached
current_step = int(os.path.basename(ckpt).split('-')[1])
total_step = int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size)
if current_step >= total_step:
tf.logging.info('Evaluation finished after training step %d' %
current_step)
break
except tf.errors.NotFoundError:
# Since the coordinator is on a different job than the TPU worker,
# sometimes the TPU worker does not finish initializing until long after
# the CPU job tells it to start evaluating. In this case, the checkpoint
# file could have been deleted already.
tf.logging.info('Checkpoint %s no longer exists, skipping checkpoint' %
ckpt)
elif FLAGS.mode == 'train_and_eval':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
for cycle in range(0, FLAGS.num_epochs):
tf.logging.info('Starting training cycle, epoch: %d.' % cycle)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
steps=int(FLAGS.num_examples_per_epoch / FLAGS.train_batch_size))
tf.logging.info('Starting evaluation cycle, epoch: {:d}.'.format(
cycle + 1))
# Run evaluation after training finishes.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Evaluation results: %s' % eval_results)
else:
tf.logging.info('Mode not found.')
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
app.run(main)
|
tf.logging.info('Terminating eval after %d seconds of no checkpoints' %
FLAGS.eval_timeout)
return True
|
identifier_body
|
retinanet_segmentation_main.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training script for RetinaNet segmentation model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import absl.logging as _logging # pylint: disable=unused-import
import tensorflow.compat.v1 as tf
import dataloader
import retinanet_segmentation_model
from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver
from tensorflow.contrib import tpu as contrib_tpu
from tensorflow.contrib import training as contrib_training
# Cloud TPU Cluster Resolvers
flags.DEFINE_string(
'tpu', default=None,
help='The Cloud TPU to use for training. This should be either the name '
'used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 '
'url.')
flags.DEFINE_string(
'gcp_project', default=None,
help='Project name for the Cloud TPU-enabled project. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
flags.DEFINE_string(
'tpu_zone', default=None,
help='GCE zone where the Cloud TPU is located in. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
# Model specific paramenters
flags.DEFINE_bool('use_tpu', True, 'Use TPUs rather than CPUs')
flags.DEFINE_string('model_dir', None, 'Location of model_dir')
flags.DEFINE_string('resnet_checkpoint', None,
'Location of the ResNet50 checkpoint to use for model '
'initialization.')
flags.DEFINE_string('hparams', '',
'Comma separated k=v pairs of hyperparameters.')
flags.DEFINE_integer(
'num_shards', default=8, help='Number of shards (TPU cores)')
flags.DEFINE_integer('train_batch_size', 64, 'training batch size')
flags.DEFINE_integer('eval_batch_size', 8, 'evaluation batch size')
flags.DEFINE_integer('eval_samples', 1449, 'The number of samples for '
'evaluation.')
flags.DEFINE_integer(
'iterations_per_loop', 100, 'Number of iterations per TPU training loop')
flags.DEFINE_string(
'training_file_pattern', None,
'Glob for training data files (e.g., Pascal VOC train set)')
flags.DEFINE_string(
'validation_file_pattern', None,
'Glob for evaluation tfrecords (e.g., Pascal VOC validation set)')
flags.DEFINE_integer('num_examples_per_epoch', 10582,
'Number of examples in one epoch')
flags.DEFINE_integer('num_epochs', 45, 'Number of epochs for training')
flags.DEFINE_string('mode', 'train_and_eval',
'Mode to run: train or eval (default: train)')
flags.DEFINE_bool('eval_after_training', False, 'Run one eval after the '
'training finishes.')
# For Eval mode
flags.DEFINE_integer('min_eval_interval', 180,
'Minimum seconds between evaluations.')
flags.DEFINE_integer(
'eval_timeout', None,
'Maximum seconds between checkpoints before evaluation terminates.')
FLAGS = flags.FLAGS
def main(argv):
del argv # Unused.
if FLAGS.use_tpu:
tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver(
FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
tpu_grpc_url = tpu_cluster_resolver.get_master()
tf.Session.reset(tpu_grpc_url)
if FLAGS.mode in ('train',
'train_and_eval') and FLAGS.training_file_pattern is None:
raise RuntimeError('You must specify --training_file_pattern for training.')
if FLAGS.mode in ('eval', 'train_and_eval'):
if FLAGS.validation_file_pattern is None:
raise RuntimeError('You must specify'
'--validation_file_pattern for evaluation.')
# Parse hparams
hparams = retinanet_segmentation_model.default_hparams()
hparams.parse(FLAGS.hparams)
params = dict(
hparams.values(),
num_shards=FLAGS.num_shards,
num_examples_per_epoch=FLAGS.num_examples_per_epoch,
use_tpu=FLAGS.use_tpu,
resnet_checkpoint=FLAGS.resnet_checkpoint,
mode=FLAGS.mode,
)
run_config = contrib_tpu.RunConfig(
cluster=tpu_cluster_resolver,
evaluation_master='',
model_dir=FLAGS.model_dir,
keep_checkpoint_max=3,
log_step_count_steps=FLAGS.iterations_per_loop,
session_config=tf.ConfigProto(
allow_soft_placement=True, log_device_placement=False),
tpu_config=contrib_tpu.TPUConfig(
FLAGS.iterations_per_loop,
FLAGS.num_shards,
per_host_input_for_training=(
contrib_tpu.InputPipelineConfig.PER_HOST_V2)))
model_fn = retinanet_segmentation_model.segmentation_model_fn
# TPU Estimator
eval_params = dict(
params,
use_tpu=FLAGS.use_tpu,
input_rand_hflip=False,
resnet_checkpoint=None,
is_training_bn=False,
)
if FLAGS.mode == 'train':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
max_steps=int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size),
)
if FLAGS.eval_after_training:
# Run evaluation on CPU after training finishes.
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
elif FLAGS.mode == 'eval':
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
def terminate_eval():
tf.logging.info('Terminating eval after %d seconds of no checkpoints' %
FLAGS.eval_timeout)
return True
# Run evaluation when there's a new checkpoint
for ckpt in contrib_training.checkpoints_iterator(
FLAGS.model_dir,
min_interval_secs=FLAGS.min_eval_interval,
timeout=FLAGS.eval_timeout,
timeout_fn=terminate_eval):
|
elif FLAGS.mode == 'train_and_eval':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
for cycle in range(0, FLAGS.num_epochs):
tf.logging.info('Starting training cycle, epoch: %d.' % cycle)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
steps=int(FLAGS.num_examples_per_epoch / FLAGS.train_batch_size))
tf.logging.info('Starting evaluation cycle, epoch: {:d}.'.format(
cycle + 1))
# Run evaluation after training finishes.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Evaluation results: %s' % eval_results)
else:
tf.logging.info('Mode not found.')
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
app.run(main)
|
tf.logging.info('Starting to evaluate.')
try:
# Note that if the eval_samples size is not fully divided by the
# eval_batch_size. The remainder will be dropped and result in
# differet evaluation performance than validating on the full set.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
# Terminate eval job when final checkpoint is reached
current_step = int(os.path.basename(ckpt).split('-')[1])
total_step = int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size)
if current_step >= total_step:
tf.logging.info('Evaluation finished after training step %d' %
current_step)
break
except tf.errors.NotFoundError:
# Since the coordinator is on a different job than the TPU worker,
# sometimes the TPU worker does not finish initializing until long after
# the CPU job tells it to start evaluating. In this case, the checkpoint
# file could have been deleted already.
tf.logging.info('Checkpoint %s no longer exists, skipping checkpoint' %
ckpt)
|
conditional_block
|
retinanet_segmentation_main.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training script for RetinaNet segmentation model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import absl.logging as _logging # pylint: disable=unused-import
import tensorflow.compat.v1 as tf
import dataloader
import retinanet_segmentation_model
from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver
from tensorflow.contrib import tpu as contrib_tpu
from tensorflow.contrib import training as contrib_training
# Cloud TPU Cluster Resolvers
flags.DEFINE_string(
'tpu', default=None,
help='The Cloud TPU to use for training. This should be either the name '
'used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 '
'url.')
flags.DEFINE_string(
'gcp_project', default=None,
help='Project name for the Cloud TPU-enabled project. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
flags.DEFINE_string(
'tpu_zone', default=None,
help='GCE zone where the Cloud TPU is located in. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
# Model specific paramenters
flags.DEFINE_bool('use_tpu', True, 'Use TPUs rather than CPUs')
flags.DEFINE_string('model_dir', None, 'Location of model_dir')
flags.DEFINE_string('resnet_checkpoint', None,
'Location of the ResNet50 checkpoint to use for model '
'initialization.')
flags.DEFINE_string('hparams', '',
'Comma separated k=v pairs of hyperparameters.')
flags.DEFINE_integer(
'num_shards', default=8, help='Number of shards (TPU cores)')
flags.DEFINE_integer('train_batch_size', 64, 'training batch size')
flags.DEFINE_integer('eval_batch_size', 8, 'evaluation batch size')
flags.DEFINE_integer('eval_samples', 1449, 'The number of samples for '
'evaluation.')
flags.DEFINE_integer(
'iterations_per_loop', 100, 'Number of iterations per TPU training loop')
flags.DEFINE_string(
'training_file_pattern', None,
'Glob for training data files (e.g., Pascal VOC train set)')
flags.DEFINE_string(
'validation_file_pattern', None,
'Glob for evaluation tfrecords (e.g., Pascal VOC validation set)')
flags.DEFINE_integer('num_examples_per_epoch', 10582,
'Number of examples in one epoch')
flags.DEFINE_integer('num_epochs', 45, 'Number of epochs for training')
flags.DEFINE_string('mode', 'train_and_eval',
'Mode to run: train or eval (default: train)')
flags.DEFINE_bool('eval_after_training', False, 'Run one eval after the '
'training finishes.')
# For Eval mode
flags.DEFINE_integer('min_eval_interval', 180,
'Minimum seconds between evaluations.')
flags.DEFINE_integer(
'eval_timeout', None,
'Maximum seconds between checkpoints before evaluation terminates.')
FLAGS = flags.FLAGS
def
|
(argv):
del argv # Unused.
if FLAGS.use_tpu:
tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver(
FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
tpu_grpc_url = tpu_cluster_resolver.get_master()
tf.Session.reset(tpu_grpc_url)
if FLAGS.mode in ('train',
'train_and_eval') and FLAGS.training_file_pattern is None:
raise RuntimeError('You must specify --training_file_pattern for training.')
if FLAGS.mode in ('eval', 'train_and_eval'):
if FLAGS.validation_file_pattern is None:
raise RuntimeError('You must specify'
'--validation_file_pattern for evaluation.')
# Parse hparams
hparams = retinanet_segmentation_model.default_hparams()
hparams.parse(FLAGS.hparams)
params = dict(
hparams.values(),
num_shards=FLAGS.num_shards,
num_examples_per_epoch=FLAGS.num_examples_per_epoch,
use_tpu=FLAGS.use_tpu,
resnet_checkpoint=FLAGS.resnet_checkpoint,
mode=FLAGS.mode,
)
run_config = contrib_tpu.RunConfig(
cluster=tpu_cluster_resolver,
evaluation_master='',
model_dir=FLAGS.model_dir,
keep_checkpoint_max=3,
log_step_count_steps=FLAGS.iterations_per_loop,
session_config=tf.ConfigProto(
allow_soft_placement=True, log_device_placement=False),
tpu_config=contrib_tpu.TPUConfig(
FLAGS.iterations_per_loop,
FLAGS.num_shards,
per_host_input_for_training=(
contrib_tpu.InputPipelineConfig.PER_HOST_V2)))
model_fn = retinanet_segmentation_model.segmentation_model_fn
# TPU Estimator
eval_params = dict(
params,
use_tpu=FLAGS.use_tpu,
input_rand_hflip=False,
resnet_checkpoint=None,
is_training_bn=False,
)
if FLAGS.mode == 'train':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
max_steps=int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size),
)
if FLAGS.eval_after_training:
# Run evaluation on CPU after training finishes.
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
elif FLAGS.mode == 'eval':
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
def terminate_eval():
tf.logging.info('Terminating eval after %d seconds of no checkpoints' %
FLAGS.eval_timeout)
return True
# Run evaluation when there's a new checkpoint
for ckpt in contrib_training.checkpoints_iterator(
FLAGS.model_dir,
min_interval_secs=FLAGS.min_eval_interval,
timeout=FLAGS.eval_timeout,
timeout_fn=terminate_eval):
tf.logging.info('Starting to evaluate.')
try:
# Note that if the eval_samples size is not fully divided by the
# eval_batch_size. The remainder will be dropped and result in
# differet evaluation performance than validating on the full set.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
# Terminate eval job when final checkpoint is reached
current_step = int(os.path.basename(ckpt).split('-')[1])
total_step = int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size)
if current_step >= total_step:
tf.logging.info('Evaluation finished after training step %d' %
current_step)
break
except tf.errors.NotFoundError:
# Since the coordinator is on a different job than the TPU worker,
# sometimes the TPU worker does not finish initializing until long after
# the CPU job tells it to start evaluating. In this case, the checkpoint
# file could have been deleted already.
tf.logging.info('Checkpoint %s no longer exists, skipping checkpoint' %
ckpt)
elif FLAGS.mode == 'train_and_eval':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
for cycle in range(0, FLAGS.num_epochs):
tf.logging.info('Starting training cycle, epoch: %d.' % cycle)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
steps=int(FLAGS.num_examples_per_epoch / FLAGS.train_batch_size))
tf.logging.info('Starting evaluation cycle, epoch: {:d}.'.format(
cycle + 1))
# Run evaluation after training finishes.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Evaluation results: %s' % eval_results)
else:
tf.logging.info('Mode not found.')
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
app.run(main)
|
main
|
identifier_name
|
retinanet_segmentation_main.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training script for RetinaNet segmentation model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import absl.logging as _logging # pylint: disable=unused-import
import tensorflow.compat.v1 as tf
import dataloader
import retinanet_segmentation_model
from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver
from tensorflow.contrib import tpu as contrib_tpu
from tensorflow.contrib import training as contrib_training
|
'used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 '
'url.')
flags.DEFINE_string(
'gcp_project', default=None,
help='Project name for the Cloud TPU-enabled project. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
flags.DEFINE_string(
'tpu_zone', default=None,
help='GCE zone where the Cloud TPU is located in. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
# Model specific paramenters
flags.DEFINE_bool('use_tpu', True, 'Use TPUs rather than CPUs')
flags.DEFINE_string('model_dir', None, 'Location of model_dir')
flags.DEFINE_string('resnet_checkpoint', None,
'Location of the ResNet50 checkpoint to use for model '
'initialization.')
flags.DEFINE_string('hparams', '',
'Comma separated k=v pairs of hyperparameters.')
flags.DEFINE_integer(
'num_shards', default=8, help='Number of shards (TPU cores)')
flags.DEFINE_integer('train_batch_size', 64, 'training batch size')
flags.DEFINE_integer('eval_batch_size', 8, 'evaluation batch size')
flags.DEFINE_integer('eval_samples', 1449, 'The number of samples for '
'evaluation.')
flags.DEFINE_integer(
'iterations_per_loop', 100, 'Number of iterations per TPU training loop')
flags.DEFINE_string(
'training_file_pattern', None,
'Glob for training data files (e.g., Pascal VOC train set)')
flags.DEFINE_string(
'validation_file_pattern', None,
'Glob for evaluation tfrecords (e.g., Pascal VOC validation set)')
flags.DEFINE_integer('num_examples_per_epoch', 10582,
'Number of examples in one epoch')
flags.DEFINE_integer('num_epochs', 45, 'Number of epochs for training')
flags.DEFINE_string('mode', 'train_and_eval',
'Mode to run: train or eval (default: train)')
flags.DEFINE_bool('eval_after_training', False, 'Run one eval after the '
'training finishes.')
# For Eval mode
flags.DEFINE_integer('min_eval_interval', 180,
'Minimum seconds between evaluations.')
flags.DEFINE_integer(
'eval_timeout', None,
'Maximum seconds between checkpoints before evaluation terminates.')
FLAGS = flags.FLAGS
def main(argv):
del argv # Unused.
if FLAGS.use_tpu:
tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver(
FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
tpu_grpc_url = tpu_cluster_resolver.get_master()
tf.Session.reset(tpu_grpc_url)
if FLAGS.mode in ('train',
'train_and_eval') and FLAGS.training_file_pattern is None:
raise RuntimeError('You must specify --training_file_pattern for training.')
if FLAGS.mode in ('eval', 'train_and_eval'):
if FLAGS.validation_file_pattern is None:
raise RuntimeError('You must specify'
'--validation_file_pattern for evaluation.')
# Parse hparams
hparams = retinanet_segmentation_model.default_hparams()
hparams.parse(FLAGS.hparams)
params = dict(
hparams.values(),
num_shards=FLAGS.num_shards,
num_examples_per_epoch=FLAGS.num_examples_per_epoch,
use_tpu=FLAGS.use_tpu,
resnet_checkpoint=FLAGS.resnet_checkpoint,
mode=FLAGS.mode,
)
run_config = contrib_tpu.RunConfig(
cluster=tpu_cluster_resolver,
evaluation_master='',
model_dir=FLAGS.model_dir,
keep_checkpoint_max=3,
log_step_count_steps=FLAGS.iterations_per_loop,
session_config=tf.ConfigProto(
allow_soft_placement=True, log_device_placement=False),
tpu_config=contrib_tpu.TPUConfig(
FLAGS.iterations_per_loop,
FLAGS.num_shards,
per_host_input_for_training=(
contrib_tpu.InputPipelineConfig.PER_HOST_V2)))
model_fn = retinanet_segmentation_model.segmentation_model_fn
# TPU Estimator
eval_params = dict(
params,
use_tpu=FLAGS.use_tpu,
input_rand_hflip=False,
resnet_checkpoint=None,
is_training_bn=False,
)
if FLAGS.mode == 'train':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
max_steps=int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size),
)
if FLAGS.eval_after_training:
# Run evaluation on CPU after training finishes.
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
elif FLAGS.mode == 'eval':
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
def terminate_eval():
tf.logging.info('Terminating eval after %d seconds of no checkpoints' %
FLAGS.eval_timeout)
return True
# Run evaluation when there's a new checkpoint
for ckpt in contrib_training.checkpoints_iterator(
FLAGS.model_dir,
min_interval_secs=FLAGS.min_eval_interval,
timeout=FLAGS.eval_timeout,
timeout_fn=terminate_eval):
tf.logging.info('Starting to evaluate.')
try:
# Note that if the eval_samples size is not fully divided by the
# eval_batch_size. The remainder will be dropped and result in
# differet evaluation performance than validating on the full set.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Eval results: %s' % eval_results)
# Terminate eval job when final checkpoint is reached
current_step = int(os.path.basename(ckpt).split('-')[1])
total_step = int((FLAGS.num_epochs * FLAGS.num_examples_per_epoch) /
FLAGS.train_batch_size)
if current_step >= total_step:
tf.logging.info('Evaluation finished after training step %d' %
current_step)
break
except tf.errors.NotFoundError:
# Since the coordinator is on a different job than the TPU worker,
# sometimes the TPU worker does not finish initializing until long after
# the CPU job tells it to start evaluating. In this case, the checkpoint
# file could have been deleted already.
tf.logging.info('Checkpoint %s no longer exists, skipping checkpoint' %
ckpt)
elif FLAGS.mode == 'train_and_eval':
train_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
config=run_config,
params=params)
eval_estimator = contrib_tpu.TPUEstimator(
model_fn=retinanet_segmentation_model.segmentation_model_fn,
use_tpu=FLAGS.use_tpu,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
config=run_config,
params=eval_params)
for cycle in range(0, FLAGS.num_epochs):
tf.logging.info('Starting training cycle, epoch: %d.' % cycle)
train_estimator.train(
input_fn=dataloader.SegmentationInputReader(
FLAGS.training_file_pattern, is_training=True),
steps=int(FLAGS.num_examples_per_epoch / FLAGS.train_batch_size))
tf.logging.info('Starting evaluation cycle, epoch: {:d}.'.format(
cycle + 1))
# Run evaluation after training finishes.
eval_results = eval_estimator.evaluate(
input_fn=dataloader.SegmentationInputReader(
FLAGS.validation_file_pattern, is_training=False),
steps=FLAGS.eval_samples//FLAGS.eval_batch_size)
tf.logging.info('Evaluation results: %s' % eval_results)
else:
tf.logging.info('Mode not found.')
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
app.run(main)
|
# Cloud TPU Cluster Resolvers
flags.DEFINE_string(
'tpu', default=None,
help='The Cloud TPU to use for training. This should be either the name '
|
random_line_split
|
document.rs
|
use common::{ApiError, Body, Credentials, Query, discovery_api};
use hyper::method::Method::{Delete, Get, Post};
use serde_json::Value;
pub fn detail(creds: &Credentials,
env_id: &str,
collection_id: &str,
document_id: &str)
-> Result<Value, ApiError> {
let path = "/v1/environments/".to_string() + env_id + "/collections/" +
collection_id +
"/documents/" + document_id;
Ok(discovery_api(creds, Get, &path, Query::None, &Body::None)?)
}
pub fn delete(creds: &Credentials,
env_id: &str,
collection_id: &str,
document_id: &str)
-> Result<Value, ApiError> {
let path = "/v1/environments/".to_string() + env_id + "/collections/" +
collection_id +
"/documents/" + document_id;
Ok(discovery_api(creds, Delete, &path, Query::None, &Body::None)?)
}
pub fn create(creds: &Credentials,
env_id: &str,
collection_id: &str,
configuration_id: Option<&str>,
document_id: Option<&str>,
filename: &str)
-> Result<Value, ApiError> {
let path = match document_id {
Some(id) => {
"/v1/environments/".to_string() + env_id + "/collections/" +
collection_id + "/documents/" + id
}
None => {
"/v1/environments/".to_string() + env_id + "/collections/" +
collection_id + "/documents"
}
};
|
};
Ok(discovery_api(creds, Post, &path, q, &Body::Filename(filename))?)
}
|
let q = match configuration_id {
Some(id) => Query::Config(id.to_string()),
None => Query::None,
|
random_line_split
|
document.rs
|
use common::{ApiError, Body, Credentials, Query, discovery_api};
use hyper::method::Method::{Delete, Get, Post};
use serde_json::Value;
pub fn
|
(creds: &Credentials,
env_id: &str,
collection_id: &str,
document_id: &str)
-> Result<Value, ApiError> {
let path = "/v1/environments/".to_string() + env_id + "/collections/" +
collection_id +
"/documents/" + document_id;
Ok(discovery_api(creds, Get, &path, Query::None, &Body::None)?)
}
pub fn delete(creds: &Credentials,
env_id: &str,
collection_id: &str,
document_id: &str)
-> Result<Value, ApiError> {
let path = "/v1/environments/".to_string() + env_id + "/collections/" +
collection_id +
"/documents/" + document_id;
Ok(discovery_api(creds, Delete, &path, Query::None, &Body::None)?)
}
pub fn create(creds: &Credentials,
env_id: &str,
collection_id: &str,
configuration_id: Option<&str>,
document_id: Option<&str>,
filename: &str)
-> Result<Value, ApiError> {
let path = match document_id {
Some(id) => {
"/v1/environments/".to_string() + env_id + "/collections/" +
collection_id + "/documents/" + id
}
None => {
"/v1/environments/".to_string() + env_id + "/collections/" +
collection_id + "/documents"
}
};
let q = match configuration_id {
Some(id) => Query::Config(id.to_string()),
None => Query::None,
};
Ok(discovery_api(creds, Post, &path, q, &Body::Filename(filename))?)
}
|
detail
|
identifier_name
|
document.rs
|
use common::{ApiError, Body, Credentials, Query, discovery_api};
use hyper::method::Method::{Delete, Get, Post};
use serde_json::Value;
pub fn detail(creds: &Credentials,
env_id: &str,
collection_id: &str,
document_id: &str)
-> Result<Value, ApiError> {
let path = "/v1/environments/".to_string() + env_id + "/collections/" +
collection_id +
"/documents/" + document_id;
Ok(discovery_api(creds, Get, &path, Query::None, &Body::None)?)
}
pub fn delete(creds: &Credentials,
env_id: &str,
collection_id: &str,
document_id: &str)
-> Result<Value, ApiError> {
let path = "/v1/environments/".to_string() + env_id + "/collections/" +
collection_id +
"/documents/" + document_id;
Ok(discovery_api(creds, Delete, &path, Query::None, &Body::None)?)
}
pub fn create(creds: &Credentials,
env_id: &str,
collection_id: &str,
configuration_id: Option<&str>,
document_id: Option<&str>,
filename: &str)
-> Result<Value, ApiError>
|
{
let path = match document_id {
Some(id) => {
"/v1/environments/".to_string() + env_id + "/collections/" +
collection_id + "/documents/" + id
}
None => {
"/v1/environments/".to_string() + env_id + "/collections/" +
collection_id + "/documents"
}
};
let q = match configuration_id {
Some(id) => Query::Config(id.to_string()),
None => Query::None,
};
Ok(discovery_api(creds, Post, &path, q, &Body::Filename(filename))?)
}
|
identifier_body
|
|
configuration.component.ts
|
import { Store, select } from '@ngrx/store';
import { Component, OnDestroy, OnInit, ViewChild } from '@angular/core';
import { TranslateService } from '@ngx-translate/core';
import { ActivationEnd, Router } from '@angular/router';
import { Observable, Subject } from 'rxjs';
import { filter, takeUntil, map } from 'rxjs/operators';
import { MatButtonModule } from '@angular/material/button';
import { ActionIncrement } from '../main.actions';
import { routeAnimations, TitleService } from '@app/core';
//import { tick } from '@angular/core';
import {
State as BaseSettingsState,
selectSettings,
SettingsState
} from '@app/settings';
import { selectMain } from '../main.selectors';
import { MainState } from '../main.state';
import { State as BaseMainState } from '../main.state';
import { selectAuth } from '@app/core/auth/auth.selectors';
interface State extends BaseSettingsState, BaseMainState {}
@Component({
selector: 'configuration',
templateUrl: './configuration.component.html',
//styleUrls: ['./main.component.scss'],
animations: [routeAnimations]
})
export class ConfigurationComponent implements OnInit, OnDestroy {
private unsubscribe$: Subject<void> = new Subject<void>();
private isAuthenticated$: Observable<boolean>;
main: MainState;
constructor(
private store: Store<State>,
private router: Router,
private titleService: TitleService,
private translate: TranslateService
) {}
ngOnInit(): void {
this.translate.setDefaultLang('en');
this.subscribeToMain();
//this.subscribeToSettings();
this.subscribeToRouterEvents();
this.isAuthenticated$ = this.store.pipe(
select(selectAuth),
map(auth => auth.isAuthenticated)
);
}
ngOnDestroy(): void {
this.unsubscribe$.next();
|
this.unsubscribe$.complete();
}
private subscribeToRouterEvents() {
this.titleService.setTitle(
this.router.routerState.snapshot.root,
this.translate
);
this.router.events
.pipe(
filter(event => event instanceof ActivationEnd),
map((event: ActivationEnd) => event.snapshot),
takeUntil(this.unsubscribe$)
)
.subscribe(snapshot =>
this.titleService.setTitle(snapshot, this.translate)
);
}
private subscribeToMain() {
this.store
.pipe(
select(selectMain),
takeUntil(this.unsubscribe$)
)
.subscribe((main: MainState) => {
this.main = main;
//this.setCount(main);
});
}
private setCount(main: MainState) {
console.log(main);
//this.count = main.count;
}
//@ViewChild('increment') increment;
increment($event) {
console.log('incremnent');
console.log($event);
console.log(this);
this.store.dispatch(new ActionIncrement({incCount: 2}));
//this.increment.focus();
}
incrementAsync($event) {
console.log('incremnentas');
console.log($event);
//this.increment.focus();
//tick(1000);
this.delay(10000).then( () =>
//setTimeout( () => { this.router.navigate(['/']); }, 5000);
this.store.dispatch(new ActionIncrement({incCount: 2})));
}
async delay(ms: number) {
await new Promise(resolve => setTimeout(()=>resolve(), ms)).then(()=>console.log("fired"));
}
}
|
random_line_split
|
|
configuration.component.ts
|
import { Store, select } from '@ngrx/store';
import { Component, OnDestroy, OnInit, ViewChild } from '@angular/core';
import { TranslateService } from '@ngx-translate/core';
import { ActivationEnd, Router } from '@angular/router';
import { Observable, Subject } from 'rxjs';
import { filter, takeUntil, map } from 'rxjs/operators';
import { MatButtonModule } from '@angular/material/button';
import { ActionIncrement } from '../main.actions';
import { routeAnimations, TitleService } from '@app/core';
//import { tick } from '@angular/core';
import {
State as BaseSettingsState,
selectSettings,
SettingsState
} from '@app/settings';
import { selectMain } from '../main.selectors';
import { MainState } from '../main.state';
import { State as BaseMainState } from '../main.state';
import { selectAuth } from '@app/core/auth/auth.selectors';
interface State extends BaseSettingsState, BaseMainState {}
@Component({
selector: 'configuration',
templateUrl: './configuration.component.html',
//styleUrls: ['./main.component.scss'],
animations: [routeAnimations]
})
export class ConfigurationComponent implements OnInit, OnDestroy {
private unsubscribe$: Subject<void> = new Subject<void>();
private isAuthenticated$: Observable<boolean>;
main: MainState;
constructor(
private store: Store<State>,
private router: Router,
private titleService: TitleService,
private translate: TranslateService
) {}
ngOnInit(): void {
this.translate.setDefaultLang('en');
this.subscribeToMain();
//this.subscribeToSettings();
this.subscribeToRouterEvents();
this.isAuthenticated$ = this.store.pipe(
select(selectAuth),
map(auth => auth.isAuthenticated)
);
}
ngOnDestroy(): void {
this.unsubscribe$.next();
this.unsubscribe$.complete();
}
private subscribeToRouterEvents() {
this.titleService.setTitle(
this.router.routerState.snapshot.root,
this.translate
);
this.router.events
.pipe(
filter(event => event instanceof ActivationEnd),
map((event: ActivationEnd) => event.snapshot),
takeUntil(this.unsubscribe$)
)
.subscribe(snapshot =>
this.titleService.setTitle(snapshot, this.translate)
);
}
private subscribeToMain() {
this.store
.pipe(
select(selectMain),
takeUntil(this.unsubscribe$)
)
.subscribe((main: MainState) => {
this.main = main;
//this.setCount(main);
});
}
private setCount(main: MainState)
|
//@ViewChild('increment') increment;
increment($event) {
console.log('incremnent');
console.log($event);
console.log(this);
this.store.dispatch(new ActionIncrement({incCount: 2}));
//this.increment.focus();
}
incrementAsync($event) {
console.log('incremnentas');
console.log($event);
//this.increment.focus();
//tick(1000);
this.delay(10000).then( () =>
//setTimeout( () => { this.router.navigate(['/']); }, 5000);
this.store.dispatch(new ActionIncrement({incCount: 2})));
}
async delay(ms: number) {
await new Promise(resolve => setTimeout(()=>resolve(), ms)).then(()=>console.log("fired"));
}
}
|
{
console.log(main);
//this.count = main.count;
}
|
identifier_body
|
configuration.component.ts
|
import { Store, select } from '@ngrx/store';
import { Component, OnDestroy, OnInit, ViewChild } from '@angular/core';
import { TranslateService } from '@ngx-translate/core';
import { ActivationEnd, Router } from '@angular/router';
import { Observable, Subject } from 'rxjs';
import { filter, takeUntil, map } from 'rxjs/operators';
import { MatButtonModule } from '@angular/material/button';
import { ActionIncrement } from '../main.actions';
import { routeAnimations, TitleService } from '@app/core';
//import { tick } from '@angular/core';
import {
State as BaseSettingsState,
selectSettings,
SettingsState
} from '@app/settings';
import { selectMain } from '../main.selectors';
import { MainState } from '../main.state';
import { State as BaseMainState } from '../main.state';
import { selectAuth } from '@app/core/auth/auth.selectors';
interface State extends BaseSettingsState, BaseMainState {}
@Component({
selector: 'configuration',
templateUrl: './configuration.component.html',
//styleUrls: ['./main.component.scss'],
animations: [routeAnimations]
})
export class ConfigurationComponent implements OnInit, OnDestroy {
private unsubscribe$: Subject<void> = new Subject<void>();
private isAuthenticated$: Observable<boolean>;
main: MainState;
constructor(
private store: Store<State>,
private router: Router,
private titleService: TitleService,
private translate: TranslateService
) {}
ngOnInit(): void {
this.translate.setDefaultLang('en');
this.subscribeToMain();
//this.subscribeToSettings();
this.subscribeToRouterEvents();
this.isAuthenticated$ = this.store.pipe(
select(selectAuth),
map(auth => auth.isAuthenticated)
);
}
ngOnDestroy(): void {
this.unsubscribe$.next();
this.unsubscribe$.complete();
}
private subscribeToRouterEvents() {
this.titleService.setTitle(
this.router.routerState.snapshot.root,
this.translate
);
this.router.events
.pipe(
filter(event => event instanceof ActivationEnd),
map((event: ActivationEnd) => event.snapshot),
takeUntil(this.unsubscribe$)
)
.subscribe(snapshot =>
this.titleService.setTitle(snapshot, this.translate)
);
}
private subscribeToMain() {
this.store
.pipe(
select(selectMain),
takeUntil(this.unsubscribe$)
)
.subscribe((main: MainState) => {
this.main = main;
//this.setCount(main);
});
}
private
|
(main: MainState) {
console.log(main);
//this.count = main.count;
}
//@ViewChild('increment') increment;
increment($event) {
console.log('incremnent');
console.log($event);
console.log(this);
this.store.dispatch(new ActionIncrement({incCount: 2}));
//this.increment.focus();
}
incrementAsync($event) {
console.log('incremnentas');
console.log($event);
//this.increment.focus();
//tick(1000);
this.delay(10000).then( () =>
//setTimeout( () => { this.router.navigate(['/']); }, 5000);
this.store.dispatch(new ActionIncrement({incCount: 2})));
}
async delay(ms: number) {
await new Promise(resolve => setTimeout(()=>resolve(), ms)).then(()=>console.log("fired"));
}
}
|
setCount
|
identifier_name
|
call_borders.py
|
#!/usr/bin/env python
"""
Generate BED files with TAD borders of particular width.
TAD border of width 2r between two TADs is defined as a region consisting of
r bp to the left and r bp to the right of the point separating the two TADs.
Usage:
call_borders.py (-f <TADs_filename> | -d <input_directory>) -w <border_width> [-n <track_name_for_all_borders> -o <output_directory>]
Options:
-h --help Show this screen.
--version Show version.
-f <TADs_filename> Name of a BED file with TAD coordinates.
-d <input_directory> Name of a directory with BED files containing TAD coordinates.
-w <border_width> Border width (in bp).
-n <track_name_for_all_borders> A name for a track with all borders. Default: All_borders_<border_width>.
-o <output_directory> Output directory.
"""
import sys
print
modules = ["docopt", "os"]
exit_flag = False
for module in modules:
try:
__import__(module)
except ImportError:
exit_flag = True
sys.stderr.write("Error: Python module " + module + " is not installed.\n")
if exit_flag:
sys.stderr.write("You can install these modules with a command: pip install <module>\n")
sys.stderr.write("(Administrator privileges may be required.)\n")
sys.exit(1)
from docopt import docopt
from os.path import exists
from os.path import join
from os.path import splitext
from os.path import basename
from os.path import isdir
from os.path import isfile
from os import listdir
from os import makedirs
def call_borders(filename, output_directory, half_width):
name, ext = splitext(filename)
file_basename = basename(name)
if output_directory == '':
file_name = name
else:
file_name = file_basename
output_filename = join(output_directory, file_name + '_borders_' + str(border_width) + '.bed')
prev_chrom_name = ''
with open(filename, 'r') as src, open(output_filename, 'w') as dst:
left_coord = ''
i = -1
for line in src:
i += 1
line_list = line.rstrip('\n').split('\t')
if len(line_list) == 1:
dst.write(line) # copy track line
i -= 1
continue
if left_coord == '': # for the first line
left_coord = int(line_list[2]) - half_width
continue
chrom_name = line_list[0]
if chrom_name != prev_chrom_name and prev_chrom_name != '':
|
right_coord = int(line_list[1]) + half_width
border_name = chrom_name + '.border.' + str(i)
score = 0 # Just to fill in the field
strand = '.' # Just to fill in the field
color = '0,255,0' # green
border_line = chrom_name + '\t' + str(left_coord) + '\t' + str(right_coord) + '\t' + \
border_name + '\t' + str(score) + '\t' + strand + '\t' + \
str(left_coord) + '\t' + str(right_coord) + '\t' + color
dst.write(border_line + '\n')
left_coord = int(line_list[2]) - half_width
prev_chrom_name = chrom_name
if __name__ == '__main__':
arguments = docopt(__doc__, version='call_borders 0.3')
if arguments["-f"] != None:
filename = arguments["-f"]
if not exists(filename):
print "Error: Can't find BED file with TAD coordinates: no such file '" + \
filename + "'. Exit.\n"
sys.exit(1)
if not isfile(filename):
print "Error: BED file with TAD coordinates must be a regular file. " + \
"Something else given. Exit.\n"
sys.exit(1)
input_directory = None
else:
filename = None
input_directory = arguments["-d"].rstrip('/')
if not exists(input_directory):
print "Error: Can't find input directory: no such directory '" + \
input_directory + "'. Exit.\n"
sys.exit(1)
if not isdir(input_directory):
print "Error: Input directory must be a directory:). Something else given. Exit.\n"
sys.exit(1)
try:
border_width = int(arguments["-w"])
except ValueError:
print "Error: Border width must be an integer greater than 0. Exit.\n"
sys.exit(1)
if border_width <= 0:
print "Error: Border width must be an integer greater than 0. Exit.\n"
half_width = border_width / 2
if arguments["-n"] != None:
track_name = arguments["-n"]
else:
track_name = "All_borders_" + str(border_width)
if arguments["-o"] != None:
output_directory = arguments["-o"].rstrip('/')
elif input_directory != None:
output_directory = input_directory + '_borders_' + str(border_width)
else:
output_directory = ''
if output_directory != '' and (not exists(output_directory)):
makedirs(output_directory)
if filename != None:
call_borders(filename, output_directory, half_width)
else:
file_list = listdir(input_directory)
for file in file_list:
full_path = join(input_directory, file)
if isdir(full_path):
continue
call_borders(full_path, output_directory, half_width)
# merge BED files for individual chromosomes in one BED file
filename_list = listdir(output_directory)
genome_bed_filename = join(output_directory, 'All_borders_' + str(border_width) + '.bed')
with open(genome_bed_filename, 'w') as dst:
track_line = 'track name="' + track_name + '" visibility=1 itemRgb="On"'
dst.write(track_line + '\n')
for filename in sorted(filename_list):
with open(filename, 'r') as src:
for i, line in enumerate(src):
if i == 0:
continue
dst.write(line)
|
left_coord = int(line_list[2]) - half_width
i = 0
prev_chrom_name = chrom_name
continue
|
conditional_block
|
call_borders.py
|
#!/usr/bin/env python
"""
Generate BED files with TAD borders of particular width.
TAD border of width 2r between two TADs is defined as a region consisting of
r bp to the left and r bp to the right of the point separating the two TADs.
Usage:
call_borders.py (-f <TADs_filename> | -d <input_directory>) -w <border_width> [-n <track_name_for_all_borders> -o <output_directory>]
Options:
-h --help Show this screen.
--version Show version.
-f <TADs_filename> Name of a BED file with TAD coordinates.
-d <input_directory> Name of a directory with BED files containing TAD coordinates.
-w <border_width> Border width (in bp).
-n <track_name_for_all_borders> A name for a track with all borders. Default: All_borders_<border_width>.
-o <output_directory> Output directory.
"""
import sys
print
modules = ["docopt", "os"]
exit_flag = False
for module in modules:
try:
__import__(module)
except ImportError:
exit_flag = True
sys.stderr.write("Error: Python module " + module + " is not installed.\n")
if exit_flag:
sys.stderr.write("You can install these modules with a command: pip install <module>\n")
sys.stderr.write("(Administrator privileges may be required.)\n")
sys.exit(1)
from docopt import docopt
from os.path import exists
from os.path import join
from os.path import splitext
from os.path import basename
from os.path import isdir
from os.path import isfile
from os import listdir
from os import makedirs
def
|
(filename, output_directory, half_width):
name, ext = splitext(filename)
file_basename = basename(name)
if output_directory == '':
file_name = name
else:
file_name = file_basename
output_filename = join(output_directory, file_name + '_borders_' + str(border_width) + '.bed')
prev_chrom_name = ''
with open(filename, 'r') as src, open(output_filename, 'w') as dst:
left_coord = ''
i = -1
for line in src:
i += 1
line_list = line.rstrip('\n').split('\t')
if len(line_list) == 1:
dst.write(line) # copy track line
i -= 1
continue
if left_coord == '': # for the first line
left_coord = int(line_list[2]) - half_width
continue
chrom_name = line_list[0]
if chrom_name != prev_chrom_name and prev_chrom_name != '':
left_coord = int(line_list[2]) - half_width
i = 0
prev_chrom_name = chrom_name
continue
right_coord = int(line_list[1]) + half_width
border_name = chrom_name + '.border.' + str(i)
score = 0 # Just to fill in the field
strand = '.' # Just to fill in the field
color = '0,255,0' # green
border_line = chrom_name + '\t' + str(left_coord) + '\t' + str(right_coord) + '\t' + \
border_name + '\t' + str(score) + '\t' + strand + '\t' + \
str(left_coord) + '\t' + str(right_coord) + '\t' + color
dst.write(border_line + '\n')
left_coord = int(line_list[2]) - half_width
prev_chrom_name = chrom_name
if __name__ == '__main__':
arguments = docopt(__doc__, version='call_borders 0.3')
if arguments["-f"] != None:
filename = arguments["-f"]
if not exists(filename):
print "Error: Can't find BED file with TAD coordinates: no such file '" + \
filename + "'. Exit.\n"
sys.exit(1)
if not isfile(filename):
print "Error: BED file with TAD coordinates must be a regular file. " + \
"Something else given. Exit.\n"
sys.exit(1)
input_directory = None
else:
filename = None
input_directory = arguments["-d"].rstrip('/')
if not exists(input_directory):
print "Error: Can't find input directory: no such directory '" + \
input_directory + "'. Exit.\n"
sys.exit(1)
if not isdir(input_directory):
print "Error: Input directory must be a directory:). Something else given. Exit.\n"
sys.exit(1)
try:
border_width = int(arguments["-w"])
except ValueError:
print "Error: Border width must be an integer greater than 0. Exit.\n"
sys.exit(1)
if border_width <= 0:
print "Error: Border width must be an integer greater than 0. Exit.\n"
half_width = border_width / 2
if arguments["-n"] != None:
track_name = arguments["-n"]
else:
track_name = "All_borders_" + str(border_width)
if arguments["-o"] != None:
output_directory = arguments["-o"].rstrip('/')
elif input_directory != None:
output_directory = input_directory + '_borders_' + str(border_width)
else:
output_directory = ''
if output_directory != '' and (not exists(output_directory)):
makedirs(output_directory)
if filename != None:
call_borders(filename, output_directory, half_width)
else:
file_list = listdir(input_directory)
for file in file_list:
full_path = join(input_directory, file)
if isdir(full_path):
continue
call_borders(full_path, output_directory, half_width)
# merge BED files for individual chromosomes in one BED file
filename_list = listdir(output_directory)
genome_bed_filename = join(output_directory, 'All_borders_' + str(border_width) + '.bed')
with open(genome_bed_filename, 'w') as dst:
track_line = 'track name="' + track_name + '" visibility=1 itemRgb="On"'
dst.write(track_line + '\n')
for filename in sorted(filename_list):
with open(filename, 'r') as src:
for i, line in enumerate(src):
if i == 0:
continue
dst.write(line)
|
call_borders
|
identifier_name
|
call_borders.py
|
#!/usr/bin/env python
"""
Generate BED files with TAD borders of particular width.
TAD border of width 2r between two TADs is defined as a region consisting of
r bp to the left and r bp to the right of the point separating the two TADs.
Usage:
call_borders.py (-f <TADs_filename> | -d <input_directory>) -w <border_width> [-n <track_name_for_all_borders> -o <output_directory>]
Options:
-h --help Show this screen.
--version Show version.
-f <TADs_filename> Name of a BED file with TAD coordinates.
-d <input_directory> Name of a directory with BED files containing TAD coordinates.
-w <border_width> Border width (in bp).
-n <track_name_for_all_borders> A name for a track with all borders. Default: All_borders_<border_width>.
-o <output_directory> Output directory.
"""
import sys
print
modules = ["docopt", "os"]
exit_flag = False
for module in modules:
try:
__import__(module)
except ImportError:
exit_flag = True
sys.stderr.write("Error: Python module " + module + " is not installed.\n")
if exit_flag:
sys.stderr.write("You can install these modules with a command: pip install <module>\n")
sys.stderr.write("(Administrator privileges may be required.)\n")
sys.exit(1)
from docopt import docopt
from os.path import exists
from os.path import join
from os.path import splitext
from os.path import basename
from os.path import isdir
from os.path import isfile
from os import listdir
from os import makedirs
def call_borders(filename, output_directory, half_width):
|
if __name__ == '__main__':
arguments = docopt(__doc__, version='call_borders 0.3')
if arguments["-f"] != None:
filename = arguments["-f"]
if not exists(filename):
print "Error: Can't find BED file with TAD coordinates: no such file '" + \
filename + "'. Exit.\n"
sys.exit(1)
if not isfile(filename):
print "Error: BED file with TAD coordinates must be a regular file. " + \
"Something else given. Exit.\n"
sys.exit(1)
input_directory = None
else:
filename = None
input_directory = arguments["-d"].rstrip('/')
if not exists(input_directory):
print "Error: Can't find input directory: no such directory '" + \
input_directory + "'. Exit.\n"
sys.exit(1)
if not isdir(input_directory):
print "Error: Input directory must be a directory:). Something else given. Exit.\n"
sys.exit(1)
try:
border_width = int(arguments["-w"])
except ValueError:
print "Error: Border width must be an integer greater than 0. Exit.\n"
sys.exit(1)
if border_width <= 0:
print "Error: Border width must be an integer greater than 0. Exit.\n"
half_width = border_width / 2
if arguments["-n"] != None:
track_name = arguments["-n"]
else:
track_name = "All_borders_" + str(border_width)
if arguments["-o"] != None:
output_directory = arguments["-o"].rstrip('/')
elif input_directory != None:
output_directory = input_directory + '_borders_' + str(border_width)
else:
output_directory = ''
if output_directory != '' and (not exists(output_directory)):
makedirs(output_directory)
if filename != None:
call_borders(filename, output_directory, half_width)
else:
file_list = listdir(input_directory)
for file in file_list:
full_path = join(input_directory, file)
if isdir(full_path):
continue
call_borders(full_path, output_directory, half_width)
# merge BED files for individual chromosomes in one BED file
filename_list = listdir(output_directory)
genome_bed_filename = join(output_directory, 'All_borders_' + str(border_width) + '.bed')
with open(genome_bed_filename, 'w') as dst:
track_line = 'track name="' + track_name + '" visibility=1 itemRgb="On"'
dst.write(track_line + '\n')
for filename in sorted(filename_list):
with open(filename, 'r') as src:
for i, line in enumerate(src):
if i == 0:
continue
dst.write(line)
|
name, ext = splitext(filename)
file_basename = basename(name)
if output_directory == '':
file_name = name
else:
file_name = file_basename
output_filename = join(output_directory, file_name + '_borders_' + str(border_width) + '.bed')
prev_chrom_name = ''
with open(filename, 'r') as src, open(output_filename, 'w') as dst:
left_coord = ''
i = -1
for line in src:
i += 1
line_list = line.rstrip('\n').split('\t')
if len(line_list) == 1:
dst.write(line) # copy track line
i -= 1
continue
if left_coord == '': # for the first line
left_coord = int(line_list[2]) - half_width
continue
chrom_name = line_list[0]
if chrom_name != prev_chrom_name and prev_chrom_name != '':
left_coord = int(line_list[2]) - half_width
i = 0
prev_chrom_name = chrom_name
continue
right_coord = int(line_list[1]) + half_width
border_name = chrom_name + '.border.' + str(i)
score = 0 # Just to fill in the field
strand = '.' # Just to fill in the field
color = '0,255,0' # green
border_line = chrom_name + '\t' + str(left_coord) + '\t' + str(right_coord) + '\t' + \
border_name + '\t' + str(score) + '\t' + strand + '\t' + \
str(left_coord) + '\t' + str(right_coord) + '\t' + color
dst.write(border_line + '\n')
left_coord = int(line_list[2]) - half_width
prev_chrom_name = chrom_name
|
identifier_body
|
call_borders.py
|
#!/usr/bin/env python
"""
Generate BED files with TAD borders of particular width.
TAD border of width 2r between two TADs is defined as a region consisting of
r bp to the left and r bp to the right of the point separating the two TADs.
Usage:
call_borders.py (-f <TADs_filename> | -d <input_directory>) -w <border_width> [-n <track_name_for_all_borders> -o <output_directory>]
Options:
-h --help Show this screen.
--version Show version.
-f <TADs_filename> Name of a BED file with TAD coordinates.
-d <input_directory> Name of a directory with BED files containing TAD coordinates.
-w <border_width> Border width (in bp).
-n <track_name_for_all_borders> A name for a track with all borders. Default: All_borders_<border_width>.
-o <output_directory> Output directory.
"""
import sys
print
modules = ["docopt", "os"]
exit_flag = False
for module in modules:
try:
__import__(module)
except ImportError:
exit_flag = True
sys.stderr.write("Error: Python module " + module + " is not installed.\n")
if exit_flag:
sys.stderr.write("You can install these modules with a command: pip install <module>\n")
sys.stderr.write("(Administrator privileges may be required.)\n")
sys.exit(1)
from docopt import docopt
from os.path import exists
from os.path import join
from os.path import splitext
from os.path import basename
from os.path import isdir
from os.path import isfile
from os import listdir
from os import makedirs
def call_borders(filename, output_directory, half_width):
name, ext = splitext(filename)
file_basename = basename(name)
if output_directory == '':
file_name = name
else:
file_name = file_basename
output_filename = join(output_directory, file_name + '_borders_' + str(border_width) + '.bed')
prev_chrom_name = ''
with open(filename, 'r') as src, open(output_filename, 'w') as dst:
left_coord = ''
i = -1
for line in src:
i += 1
line_list = line.rstrip('\n').split('\t')
if len(line_list) == 1:
dst.write(line) # copy track line
i -= 1
continue
if left_coord == '': # for the first line
left_coord = int(line_list[2]) - half_width
continue
chrom_name = line_list[0]
if chrom_name != prev_chrom_name and prev_chrom_name != '':
left_coord = int(line_list[2]) - half_width
i = 0
prev_chrom_name = chrom_name
continue
right_coord = int(line_list[1]) + half_width
border_name = chrom_name + '.border.' + str(i)
score = 0 # Just to fill in the field
strand = '.' # Just to fill in the field
color = '0,255,0' # green
border_line = chrom_name + '\t' + str(left_coord) + '\t' + str(right_coord) + '\t' + \
border_name + '\t' + str(score) + '\t' + strand + '\t' + \
str(left_coord) + '\t' + str(right_coord) + '\t' + color
dst.write(border_line + '\n')
left_coord = int(line_list[2]) - half_width
prev_chrom_name = chrom_name
if __name__ == '__main__':
arguments = docopt(__doc__, version='call_borders 0.3')
if arguments["-f"] != None:
filename = arguments["-f"]
if not exists(filename):
print "Error: Can't find BED file with TAD coordinates: no such file '" + \
filename + "'. Exit.\n"
sys.exit(1)
if not isfile(filename):
print "Error: BED file with TAD coordinates must be a regular file. " + \
"Something else given. Exit.\n"
sys.exit(1)
input_directory = None
else:
filename = None
input_directory = arguments["-d"].rstrip('/')
if not exists(input_directory):
print "Error: Can't find input directory: no such directory '" + \
input_directory + "'. Exit.\n"
sys.exit(1)
|
try:
border_width = int(arguments["-w"])
except ValueError:
print "Error: Border width must be an integer greater than 0. Exit.\n"
sys.exit(1)
if border_width <= 0:
print "Error: Border width must be an integer greater than 0. Exit.\n"
half_width = border_width / 2
if arguments["-n"] != None:
track_name = arguments["-n"]
else:
track_name = "All_borders_" + str(border_width)
if arguments["-o"] != None:
output_directory = arguments["-o"].rstrip('/')
elif input_directory != None:
output_directory = input_directory + '_borders_' + str(border_width)
else:
output_directory = ''
if output_directory != '' and (not exists(output_directory)):
makedirs(output_directory)
if filename != None:
call_borders(filename, output_directory, half_width)
else:
file_list = listdir(input_directory)
for file in file_list:
full_path = join(input_directory, file)
if isdir(full_path):
continue
call_borders(full_path, output_directory, half_width)
# merge BED files for individual chromosomes in one BED file
filename_list = listdir(output_directory)
genome_bed_filename = join(output_directory, 'All_borders_' + str(border_width) + '.bed')
with open(genome_bed_filename, 'w') as dst:
track_line = 'track name="' + track_name + '" visibility=1 itemRgb="On"'
dst.write(track_line + '\n')
for filename in sorted(filename_list):
with open(filename, 'r') as src:
for i, line in enumerate(src):
if i == 0:
continue
dst.write(line)
|
if not isdir(input_directory):
print "Error: Input directory must be a directory:). Something else given. Exit.\n"
sys.exit(1)
|
random_line_split
|
colorSchemeSettingsTab.component.ts
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import deepEqual from 'deep-equal'
import { Component, Inject, Input, ChangeDetectionStrategy, ChangeDetectorRef, HostBinding } from '@angular/core'
import { ConfigService, PlatformService, TranslateService } from 'tabby-core'
import { TerminalColorSchemeProvider } from '../api/colorSchemeProvider'
import { TerminalColorScheme } from '../api/interfaces'
/** @hidden */
@Component({
template: require('./colorSchemeSettingsTab.component.pug'),
styles: [require('./colorSchemeSettingsTab.component.scss')],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ColorSchemeSettingsTabComponent {
@Input() stockColorSchemes: TerminalColorScheme[] = []
@Input() customColorSchemes: TerminalColorScheme[] = []
@Input() allColorSchemes: TerminalColorScheme[] = []
@Input() filter = ''
@Input() editing = false
colorIndexes = [...new Array(16).keys()]
currentStockScheme: TerminalColorScheme|null = null
currentCustomScheme: TerminalColorScheme|null = null
@HostBinding('class.content-box') true
constructor (
@Inject(TerminalColorSchemeProvider) private colorSchemeProviders: TerminalColorSchemeProvider[],
private changeDetector: ChangeDetectorRef,
private platform: PlatformService,
private translate: TranslateService,
public config: ConfigService,
) { }
async ngOnInit () {
this.stockColorSchemes = (await Promise.all(this.config.enabledServices(this.colorSchemeProviders).map(x => x.getSchemes()))).reduce((a, b) => a.concat(b))
this.stockColorSchemes.sort((a, b) => a.name.localeCompare(b.name))
this.customColorSchemes = this.config.store.terminal.customColorSchemes
this.changeDetector.markForCheck()
this.update()
}
ngOnChanges () {
this.update()
}
selectScheme (scheme: TerminalColorScheme)
|
update () {
this.currentCustomScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.customColorSchemes)
this.currentStockScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.stockColorSchemes)
this.allColorSchemes = this.customColorSchemes.concat(this.stockColorSchemes)
this.changeDetector.markForCheck()
}
editScheme () {
this.editing = true
}
saveScheme () {
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== this.config.store.terminal.colorScheme.name)
this.customColorSchemes.push(this.config.store.terminal.colorScheme)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.cancelEditing()
this.update()
}
cancelEditing () {
this.editing = false
}
async deleteScheme (scheme: TerminalColorScheme) {
if ((await this.platform.showMessageBox(
{
type: 'warning',
message: this.translate.instant('Delete "{name}"?', scheme),
buttons: [
this.translate.instant('Delete'),
this.translate.instant('Keep'),
],
defaultId: 1,
cancelId: 1,
}
)).response === 0) {
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== scheme.name)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.update()
}
}
getCurrentSchemeName () {
return (this.currentCustomScheme ?? this.currentStockScheme)?.name ?? 'Custom'
}
findMatchingScheme (scheme: TerminalColorScheme, schemes: TerminalColorScheme[]) {
return schemes.find(x => deepEqual(x, scheme)) ?? null
}
colorsTrackBy (index) {
return index
}
}
|
{
this.config.store.terminal.colorScheme = { ...scheme }
this.config.save()
this.cancelEditing()
this.update()
}
|
identifier_body
|
colorSchemeSettingsTab.component.ts
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import deepEqual from 'deep-equal'
import { Component, Inject, Input, ChangeDetectionStrategy, ChangeDetectorRef, HostBinding } from '@angular/core'
import { ConfigService, PlatformService, TranslateService } from 'tabby-core'
import { TerminalColorSchemeProvider } from '../api/colorSchemeProvider'
import { TerminalColorScheme } from '../api/interfaces'
/** @hidden */
@Component({
template: require('./colorSchemeSettingsTab.component.pug'),
styles: [require('./colorSchemeSettingsTab.component.scss')],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ColorSchemeSettingsTabComponent {
@Input() stockColorSchemes: TerminalColorScheme[] = []
@Input() customColorSchemes: TerminalColorScheme[] = []
@Input() allColorSchemes: TerminalColorScheme[] = []
@Input() filter = ''
@Input() editing = false
colorIndexes = [...new Array(16).keys()]
currentStockScheme: TerminalColorScheme|null = null
currentCustomScheme: TerminalColorScheme|null = null
@HostBinding('class.content-box') true
constructor (
@Inject(TerminalColorSchemeProvider) private colorSchemeProviders: TerminalColorSchemeProvider[],
private changeDetector: ChangeDetectorRef,
private platform: PlatformService,
private translate: TranslateService,
public config: ConfigService,
) { }
async ngOnInit () {
this.stockColorSchemes = (await Promise.all(this.config.enabledServices(this.colorSchemeProviders).map(x => x.getSchemes()))).reduce((a, b) => a.concat(b))
this.stockColorSchemes.sort((a, b) => a.name.localeCompare(b.name))
this.customColorSchemes = this.config.store.terminal.customColorSchemes
this.changeDetector.markForCheck()
this.update()
}
ngOnChanges () {
this.update()
}
selectScheme (scheme: TerminalColorScheme) {
this.config.store.terminal.colorScheme = { ...scheme }
this.config.save()
this.cancelEditing()
this.update()
}
update () {
this.currentCustomScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.customColorSchemes)
this.currentStockScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.stockColorSchemes)
this.allColorSchemes = this.customColorSchemes.concat(this.stockColorSchemes)
this.changeDetector.markForCheck()
}
|
() {
this.editing = true
}
saveScheme () {
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== this.config.store.terminal.colorScheme.name)
this.customColorSchemes.push(this.config.store.terminal.colorScheme)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.cancelEditing()
this.update()
}
cancelEditing () {
this.editing = false
}
async deleteScheme (scheme: TerminalColorScheme) {
if ((await this.platform.showMessageBox(
{
type: 'warning',
message: this.translate.instant('Delete "{name}"?', scheme),
buttons: [
this.translate.instant('Delete'),
this.translate.instant('Keep'),
],
defaultId: 1,
cancelId: 1,
}
)).response === 0) {
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== scheme.name)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.update()
}
}
getCurrentSchemeName () {
return (this.currentCustomScheme ?? this.currentStockScheme)?.name ?? 'Custom'
}
findMatchingScheme (scheme: TerminalColorScheme, schemes: TerminalColorScheme[]) {
return schemes.find(x => deepEqual(x, scheme)) ?? null
}
colorsTrackBy (index) {
return index
}
}
|
editScheme
|
identifier_name
|
colorSchemeSettingsTab.component.ts
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import deepEqual from 'deep-equal'
import { Component, Inject, Input, ChangeDetectionStrategy, ChangeDetectorRef, HostBinding } from '@angular/core'
import { ConfigService, PlatformService, TranslateService } from 'tabby-core'
import { TerminalColorSchemeProvider } from '../api/colorSchemeProvider'
import { TerminalColorScheme } from '../api/interfaces'
|
})
export class ColorSchemeSettingsTabComponent {
@Input() stockColorSchemes: TerminalColorScheme[] = []
@Input() customColorSchemes: TerminalColorScheme[] = []
@Input() allColorSchemes: TerminalColorScheme[] = []
@Input() filter = ''
@Input() editing = false
colorIndexes = [...new Array(16).keys()]
currentStockScheme: TerminalColorScheme|null = null
currentCustomScheme: TerminalColorScheme|null = null
@HostBinding('class.content-box') true
constructor (
@Inject(TerminalColorSchemeProvider) private colorSchemeProviders: TerminalColorSchemeProvider[],
private changeDetector: ChangeDetectorRef,
private platform: PlatformService,
private translate: TranslateService,
public config: ConfigService,
) { }
async ngOnInit () {
this.stockColorSchemes = (await Promise.all(this.config.enabledServices(this.colorSchemeProviders).map(x => x.getSchemes()))).reduce((a, b) => a.concat(b))
this.stockColorSchemes.sort((a, b) => a.name.localeCompare(b.name))
this.customColorSchemes = this.config.store.terminal.customColorSchemes
this.changeDetector.markForCheck()
this.update()
}
ngOnChanges () {
this.update()
}
selectScheme (scheme: TerminalColorScheme) {
this.config.store.terminal.colorScheme = { ...scheme }
this.config.save()
this.cancelEditing()
this.update()
}
update () {
this.currentCustomScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.customColorSchemes)
this.currentStockScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.stockColorSchemes)
this.allColorSchemes = this.customColorSchemes.concat(this.stockColorSchemes)
this.changeDetector.markForCheck()
}
editScheme () {
this.editing = true
}
saveScheme () {
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== this.config.store.terminal.colorScheme.name)
this.customColorSchemes.push(this.config.store.terminal.colorScheme)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.cancelEditing()
this.update()
}
cancelEditing () {
this.editing = false
}
async deleteScheme (scheme: TerminalColorScheme) {
if ((await this.platform.showMessageBox(
{
type: 'warning',
message: this.translate.instant('Delete "{name}"?', scheme),
buttons: [
this.translate.instant('Delete'),
this.translate.instant('Keep'),
],
defaultId: 1,
cancelId: 1,
}
)).response === 0) {
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== scheme.name)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.update()
}
}
getCurrentSchemeName () {
return (this.currentCustomScheme ?? this.currentStockScheme)?.name ?? 'Custom'
}
findMatchingScheme (scheme: TerminalColorScheme, schemes: TerminalColorScheme[]) {
return schemes.find(x => deepEqual(x, scheme)) ?? null
}
colorsTrackBy (index) {
return index
}
}
|
/** @hidden */
@Component({
template: require('./colorSchemeSettingsTab.component.pug'),
styles: [require('./colorSchemeSettingsTab.component.scss')],
changeDetection: ChangeDetectionStrategy.OnPush,
|
random_line_split
|
colorSchemeSettingsTab.component.ts
|
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import deepEqual from 'deep-equal'
import { Component, Inject, Input, ChangeDetectionStrategy, ChangeDetectorRef, HostBinding } from '@angular/core'
import { ConfigService, PlatformService, TranslateService } from 'tabby-core'
import { TerminalColorSchemeProvider } from '../api/colorSchemeProvider'
import { TerminalColorScheme } from '../api/interfaces'
/** @hidden */
@Component({
template: require('./colorSchemeSettingsTab.component.pug'),
styles: [require('./colorSchemeSettingsTab.component.scss')],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ColorSchemeSettingsTabComponent {
@Input() stockColorSchemes: TerminalColorScheme[] = []
@Input() customColorSchemes: TerminalColorScheme[] = []
@Input() allColorSchemes: TerminalColorScheme[] = []
@Input() filter = ''
@Input() editing = false
colorIndexes = [...new Array(16).keys()]
currentStockScheme: TerminalColorScheme|null = null
currentCustomScheme: TerminalColorScheme|null = null
@HostBinding('class.content-box') true
constructor (
@Inject(TerminalColorSchemeProvider) private colorSchemeProviders: TerminalColorSchemeProvider[],
private changeDetector: ChangeDetectorRef,
private platform: PlatformService,
private translate: TranslateService,
public config: ConfigService,
) { }
async ngOnInit () {
this.stockColorSchemes = (await Promise.all(this.config.enabledServices(this.colorSchemeProviders).map(x => x.getSchemes()))).reduce((a, b) => a.concat(b))
this.stockColorSchemes.sort((a, b) => a.name.localeCompare(b.name))
this.customColorSchemes = this.config.store.terminal.customColorSchemes
this.changeDetector.markForCheck()
this.update()
}
ngOnChanges () {
this.update()
}
selectScheme (scheme: TerminalColorScheme) {
this.config.store.terminal.colorScheme = { ...scheme }
this.config.save()
this.cancelEditing()
this.update()
}
update () {
this.currentCustomScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.customColorSchemes)
this.currentStockScheme = this.findMatchingScheme(this.config.store.terminal.colorScheme, this.stockColorSchemes)
this.allColorSchemes = this.customColorSchemes.concat(this.stockColorSchemes)
this.changeDetector.markForCheck()
}
editScheme () {
this.editing = true
}
saveScheme () {
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== this.config.store.terminal.colorScheme.name)
this.customColorSchemes.push(this.config.store.terminal.colorScheme)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.cancelEditing()
this.update()
}
cancelEditing () {
this.editing = false
}
async deleteScheme (scheme: TerminalColorScheme) {
if ((await this.platform.showMessageBox(
{
type: 'warning',
message: this.translate.instant('Delete "{name}"?', scheme),
buttons: [
this.translate.instant('Delete'),
this.translate.instant('Keep'),
],
defaultId: 1,
cancelId: 1,
}
)).response === 0)
|
}
getCurrentSchemeName () {
return (this.currentCustomScheme ?? this.currentStockScheme)?.name ?? 'Custom'
}
findMatchingScheme (scheme: TerminalColorScheme, schemes: TerminalColorScheme[]) {
return schemes.find(x => deepEqual(x, scheme)) ?? null
}
colorsTrackBy (index) {
return index
}
}
|
{
this.customColorSchemes = this.customColorSchemes.filter(x => x.name !== scheme.name)
this.config.store.terminal.customColorSchemes = this.customColorSchemes
this.config.save()
this.update()
}
|
conditional_block
|
0011_auto_20201109_1100.py
|
# Generated by Django 2.2.11 on 2020-11-09 17:00
import daphne_context.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class
|
(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('daphne_context', '0010_userinformation_mycroft_connection'),
]
operations = [
migrations.RemoveField(
model_name='userinformation',
name='mycroft_session',
),
migrations.CreateModel(
name='MycroftUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mycroft_session', models.CharField(default=daphne_context.utils.generate_mycroft_session, max_length=9)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
Migration
|
identifier_name
|
0011_auto_20201109_1100.py
|
# Generated by Django 2.2.11 on 2020-11-09 17:00
import daphne_context.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
|
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('daphne_context', '0010_userinformation_mycroft_connection'),
]
operations = [
migrations.RemoveField(
model_name='userinformation',
name='mycroft_session',
),
migrations.CreateModel(
name='MycroftUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mycroft_session', models.CharField(default=daphne_context.utils.generate_mycroft_session, max_length=9)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
identifier_body
|
|
0011_auto_20201109_1100.py
|
# Generated by Django 2.2.11 on 2020-11-09 17:00
import daphne_context.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('daphne_context', '0010_userinformation_mycroft_connection'),
]
operations = [
migrations.RemoveField(
model_name='userinformation',
name='mycroft_session',
),
migrations.CreateModel(
name='MycroftUser',
|
),
]
|
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mycroft_session', models.CharField(default=daphne_context.utils.generate_mycroft_session, max_length=9)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
|
random_line_split
|
asarUtil.ts
|
import { AsyncTaskManager, log } from "builder-util"
import { FileCopier, Filter, MAX_FILE_REQUESTS } from "builder-util/out/fs"
import { symlink, createReadStream, createWriteStream, Stats } from "fs"
import { writeFile, readFile, mkdir } from "fs/promises"
import * as path from "path"
import { AsarOptions } from "../options/PlatformSpecificBuildOptions"
import { Packager } from "../packager"
import { PlatformPackager } from "../platformPackager"
import { getDestinationPath, ResolvedFileSet } from "../util/appFileCopier"
import { AsarFilesystem, Node } from "./asar"
import { hashFile, hashFileContents } from "./integrity"
import { detectUnpackedDirs } from "./unpackDetector"
// eslint-disable-next-line @typescript-eslint/no-var-requires
const pickle = require("chromium-pickle-js")
/** @internal */
export class AsarPackager {
private readonly fs = new AsarFilesystem(this.src)
private readonly outFile: string
private readonly unpackedDest: string
constructor(private readonly src: string, private readonly destination: string, private readonly options: AsarOptions, private readonly unpackPattern: Filter | null) {
this.outFile = path.join(destination, "app.asar")
this.unpackedDest = `${this.outFile}.unpacked`
}
// sort files to minimize file change (i.e. asar file is not changed dramatically on small change)
async pack(fileSets: Array<ResolvedFileSet>, packager: PlatformPackager<any>) {
if (this.options.ordering != null) {
// ordering doesn't support transformed files, but ordering is not used functionality - wait user report to fix it
await order(fileSets[0].files, this.options.ordering, fileSets[0].src)
}
await mkdir(path.dirname(this.outFile), { recursive: true })
const unpackedFileIndexMap = new Map<ResolvedFileSet, Set<number>>()
for (const fileSet of fileSets) {
unpackedFileIndexMap.set(fileSet, await this.createPackageFromFiles(fileSet, packager.info))
}
await this.writeAsarFile(fileSets, unpackedFileIndexMap)
}
private async createPackageFromFiles(fileSet: ResolvedFileSet, packager: Packager) {
const metadata = fileSet.metadata
// search auto unpacked dir
const unpackedDirs = new Set<string>()
const rootForAppFilesWithoutAsar = path.join(this.destination, "app")
if (this.options.smartUnpack !== false) {
await detectUnpackedDirs(fileSet, unpackedDirs, this.unpackedDest, rootForAppFilesWithoutAsar)
}
const dirToCreateForUnpackedFiles = new Set<string>(unpackedDirs)
const correctDirNodeUnpackedFlag = async (filePathInArchive: string, dirNode: Node) => {
for (const dir of unpackedDirs) {
if (filePathInArchive.length > dir.length + 2 && filePathInArchive[dir.length] === path.sep && filePathInArchive.startsWith(dir)) {
dirNode.unpacked = true
unpackedDirs.add(filePathInArchive)
// not all dirs marked as unpacked after first iteration - because node module dir can be marked as unpacked after processing node module dir content
// e.g. node-notifier/example/advanced.js processed, but only on process vendor/terminal-notifier.app module will be marked as unpacked
await mkdir(path.join(this.unpackedDest, filePathInArchive), { recursive: true })
break
}
}
}
const transformedFiles = fileSet.transformedFiles
const taskManager = new AsyncTaskManager(packager.cancellationToken)
const fileCopier = new FileCopier()
let currentDirNode: Node | null = null
let currentDirPath: string | null = null
const unpackedFileIndexSet = new Set<number>()
for (let i = 0, n = fileSet.files.length; i < n; i++) {
const file = fileSet.files[i]
const stat = metadata.get(file)
if (stat == null) {
continue
}
const pathInArchive = path.relative(rootForAppFilesWithoutAsar, getDestinationPath(file, fileSet))
if (stat.isSymbolicLink()) {
const s = stat as any
this.fs.getOrCreateNode(pathInArchive).link = s.relativeLink
s.pathInArchive = pathInArchive
unpackedFileIndexSet.add(i)
continue
}
let fileParent = path.dirname(pathInArchive)
|
if (currentDirPath !== fileParent) {
if (fileParent.startsWith("..")) {
throw new Error(`Internal error: path must not start with "..": ${fileParent}`)
}
currentDirPath = fileParent
currentDirNode = this.fs.getOrCreateNode(fileParent)
// do not check for root
if (fileParent !== "" && !currentDirNode.unpacked) {
if (unpackedDirs.has(fileParent)) {
currentDirNode.unpacked = true
} else {
await correctDirNodeUnpackedFlag(fileParent, currentDirNode)
}
}
}
const dirNode = currentDirNode!
const newData = transformedFiles == null ? undefined : transformedFiles.get(i)
const isUnpacked = dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat))
const integrity = newData === undefined ? await hashFile(file) : hashFileContents(newData)
this.fs.addFileNode(file, dirNode, newData == undefined ? stat.size : Buffer.byteLength(newData), isUnpacked, stat, integrity)
if (isUnpacked) {
if (!dirNode.unpacked && !dirToCreateForUnpackedFiles.has(fileParent)) {
dirToCreateForUnpackedFiles.add(fileParent)
await mkdir(path.join(this.unpackedDest, fileParent), { recursive: true })
}
const unpackedFile = path.join(this.unpackedDest, pathInArchive)
taskManager.addTask(copyFileOrData(fileCopier, newData, file, unpackedFile, stat))
if (taskManager.tasks.length > MAX_FILE_REQUESTS) {
await taskManager.awaitTasks()
}
unpackedFileIndexSet.add(i)
}
}
if (taskManager.tasks.length > 0) {
await taskManager.awaitTasks()
}
return unpackedFileIndexSet
}
private writeAsarFile(fileSets: Array<ResolvedFileSet>, unpackedFileIndexMap: Map<ResolvedFileSet, Set<number>>): Promise<any> {
return new Promise((resolve, reject) => {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(this.fs.header))
const headerBuf = headerPickle.toBuffer()
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
const sizeBuf = sizePickle.toBuffer()
const writeStream = createWriteStream(this.outFile)
writeStream.on("error", reject)
writeStream.on("close", resolve)
writeStream.write(sizeBuf)
let fileSetIndex = 0
let files = fileSets[0].files
let metadata = fileSets[0].metadata
let transformedFiles = fileSets[0].transformedFiles
let unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[0])!
const w = (index: number) => {
while (true) {
if (index >= files.length) {
if (++fileSetIndex >= fileSets.length) {
writeStream.end()
return
} else {
files = fileSets[fileSetIndex].files
metadata = fileSets[fileSetIndex].metadata
transformedFiles = fileSets[fileSetIndex].transformedFiles
unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[fileSetIndex])!
index = 0
}
}
if (!unpackedFileIndexSet.has(index)) {
break
} else {
const stat = metadata.get(files[index])
if (stat != null && stat.isSymbolicLink()) {
symlink((stat as any).linkRelativeToFile, path.join(this.unpackedDest, (stat as any).pathInArchive), () => w(index + 1))
return
}
}
index++
}
const data = transformedFiles == null ? null : transformedFiles.get(index)
const file = files[index]
if (data !== null && data !== undefined) {
writeStream.write(data, () => w(index + 1))
return
}
// https://github.com/yarnpkg/yarn/pull/3539
const stat = metadata.get(file)
if (stat != null && stat.size < 2 * 1024 * 1024) {
readFile(file)
.then(it => {
writeStream.write(it, () => w(index + 1))
})
.catch(e => reject(`Cannot read file ${file}: ${e.stack || e}`))
} else {
const readStream = createReadStream(file)
readStream.on("error", reject)
readStream.once("end", () => w(index + 1))
readStream.on("open", () => {
readStream.pipe(writeStream, {
end: false,
})
})
}
}
writeStream.write(headerBuf, () => w(0))
})
}
}
async function order(filenames: Array<string>, orderingFile: string, src: string) {
const orderingFiles = (await readFile(orderingFile, "utf8")).split("\n").map(line => {
if (line.indexOf(":") !== -1) {
line = line.split(":").pop()!
}
line = line.trim()
if (line[0] === "/") {
line = line.slice(1)
}
return line
})
const ordering: Array<string> = []
for (const file of orderingFiles) {
const pathComponents = file.split(path.sep)
for (const pathComponent of pathComponents) {
ordering.push(path.join(src, pathComponent))
}
}
const sortedFiles: Array<string> = []
let missing = 0
const total = filenames.length
for (const file of ordering) {
if (!sortedFiles.includes(file) && filenames.includes(file)) {
sortedFiles.push(file)
}
}
for (const file of filenames) {
if (!sortedFiles.includes(file)) {
sortedFiles.push(file)
missing += 1
}
}
log.info({ coverage: ((total - missing) / total) * 100 }, "ordering files in ASAR archive")
return sortedFiles
}
function copyFileOrData(fileCopier: FileCopier, data: string | Buffer | undefined | null, source: string, destination: string, stats: Stats) {
if (data == null) {
return fileCopier.copy(source, destination, stats)
} else {
return writeFile(destination, data)
}
}
|
if (fileParent === ".") {
fileParent = ""
}
|
random_line_split
|
asarUtil.ts
|
import { AsyncTaskManager, log } from "builder-util"
import { FileCopier, Filter, MAX_FILE_REQUESTS } from "builder-util/out/fs"
import { symlink, createReadStream, createWriteStream, Stats } from "fs"
import { writeFile, readFile, mkdir } from "fs/promises"
import * as path from "path"
import { AsarOptions } from "../options/PlatformSpecificBuildOptions"
import { Packager } from "../packager"
import { PlatformPackager } from "../platformPackager"
import { getDestinationPath, ResolvedFileSet } from "../util/appFileCopier"
import { AsarFilesystem, Node } from "./asar"
import { hashFile, hashFileContents } from "./integrity"
import { detectUnpackedDirs } from "./unpackDetector"
// eslint-disable-next-line @typescript-eslint/no-var-requires
const pickle = require("chromium-pickle-js")
/** @internal */
export class AsarPackager {
private readonly fs = new AsarFilesystem(this.src)
private readonly outFile: string
private readonly unpackedDest: string
constructor(private readonly src: string, private readonly destination: string, private readonly options: AsarOptions, private readonly unpackPattern: Filter | null) {
this.outFile = path.join(destination, "app.asar")
this.unpackedDest = `${this.outFile}.unpacked`
}
// sort files to minimize file change (i.e. asar file is not changed dramatically on small change)
async pack(fileSets: Array<ResolvedFileSet>, packager: PlatformPackager<any>) {
if (this.options.ordering != null) {
// ordering doesn't support transformed files, but ordering is not used functionality - wait user report to fix it
await order(fileSets[0].files, this.options.ordering, fileSets[0].src)
}
await mkdir(path.dirname(this.outFile), { recursive: true })
const unpackedFileIndexMap = new Map<ResolvedFileSet, Set<number>>()
for (const fileSet of fileSets) {
unpackedFileIndexMap.set(fileSet, await this.createPackageFromFiles(fileSet, packager.info))
}
await this.writeAsarFile(fileSets, unpackedFileIndexMap)
}
private async createPackageFromFiles(fileSet: ResolvedFileSet, packager: Packager) {
const metadata = fileSet.metadata
// search auto unpacked dir
const unpackedDirs = new Set<string>()
const rootForAppFilesWithoutAsar = path.join(this.destination, "app")
if (this.options.smartUnpack !== false) {
await detectUnpackedDirs(fileSet, unpackedDirs, this.unpackedDest, rootForAppFilesWithoutAsar)
}
const dirToCreateForUnpackedFiles = new Set<string>(unpackedDirs)
const correctDirNodeUnpackedFlag = async (filePathInArchive: string, dirNode: Node) => {
for (const dir of unpackedDirs) {
if (filePathInArchive.length > dir.length + 2 && filePathInArchive[dir.length] === path.sep && filePathInArchive.startsWith(dir)) {
dirNode.unpacked = true
unpackedDirs.add(filePathInArchive)
// not all dirs marked as unpacked after first iteration - because node module dir can be marked as unpacked after processing node module dir content
// e.g. node-notifier/example/advanced.js processed, but only on process vendor/terminal-notifier.app module will be marked as unpacked
await mkdir(path.join(this.unpackedDest, filePathInArchive), { recursive: true })
break
}
}
}
const transformedFiles = fileSet.transformedFiles
const taskManager = new AsyncTaskManager(packager.cancellationToken)
const fileCopier = new FileCopier()
let currentDirNode: Node | null = null
let currentDirPath: string | null = null
const unpackedFileIndexSet = new Set<number>()
for (let i = 0, n = fileSet.files.length; i < n; i++) {
const file = fileSet.files[i]
const stat = metadata.get(file)
if (stat == null) {
continue
}
const pathInArchive = path.relative(rootForAppFilesWithoutAsar, getDestinationPath(file, fileSet))
if (stat.isSymbolicLink()) {
const s = stat as any
this.fs.getOrCreateNode(pathInArchive).link = s.relativeLink
s.pathInArchive = pathInArchive
unpackedFileIndexSet.add(i)
continue
}
let fileParent = path.dirname(pathInArchive)
if (fileParent === ".") {
fileParent = ""
}
if (currentDirPath !== fileParent) {
if (fileParent.startsWith("..")) {
throw new Error(`Internal error: path must not start with "..": ${fileParent}`)
}
currentDirPath = fileParent
currentDirNode = this.fs.getOrCreateNode(fileParent)
// do not check for root
if (fileParent !== "" && !currentDirNode.unpacked) {
if (unpackedDirs.has(fileParent)) {
currentDirNode.unpacked = true
} else {
await correctDirNodeUnpackedFlag(fileParent, currentDirNode)
}
}
}
const dirNode = currentDirNode!
const newData = transformedFiles == null ? undefined : transformedFiles.get(i)
const isUnpacked = dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat))
const integrity = newData === undefined ? await hashFile(file) : hashFileContents(newData)
this.fs.addFileNode(file, dirNode, newData == undefined ? stat.size : Buffer.byteLength(newData), isUnpacked, stat, integrity)
if (isUnpacked) {
if (!dirNode.unpacked && !dirToCreateForUnpackedFiles.has(fileParent)) {
dirToCreateForUnpackedFiles.add(fileParent)
await mkdir(path.join(this.unpackedDest, fileParent), { recursive: true })
}
const unpackedFile = path.join(this.unpackedDest, pathInArchive)
taskManager.addTask(copyFileOrData(fileCopier, newData, file, unpackedFile, stat))
if (taskManager.tasks.length > MAX_FILE_REQUESTS) {
await taskManager.awaitTasks()
}
unpackedFileIndexSet.add(i)
}
}
if (taskManager.tasks.length > 0) {
await taskManager.awaitTasks()
}
return unpackedFileIndexSet
}
private writeAsarFile(fileSets: Array<ResolvedFileSet>, unpackedFileIndexMap: Map<ResolvedFileSet, Set<number>>): Promise<any> {
return new Promise((resolve, reject) => {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(this.fs.header))
const headerBuf = headerPickle.toBuffer()
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
const sizeBuf = sizePickle.toBuffer()
const writeStream = createWriteStream(this.outFile)
writeStream.on("error", reject)
writeStream.on("close", resolve)
writeStream.write(sizeBuf)
let fileSetIndex = 0
let files = fileSets[0].files
let metadata = fileSets[0].metadata
let transformedFiles = fileSets[0].transformedFiles
let unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[0])!
const w = (index: number) => {
while (true) {
if (index >= files.length) {
if (++fileSetIndex >= fileSets.length) {
writeStream.end()
return
} else {
files = fileSets[fileSetIndex].files
metadata = fileSets[fileSetIndex].metadata
transformedFiles = fileSets[fileSetIndex].transformedFiles
unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[fileSetIndex])!
index = 0
}
}
if (!unpackedFileIndexSet.has(index)) {
break
} else {
const stat = metadata.get(files[index])
if (stat != null && stat.isSymbolicLink()) {
symlink((stat as any).linkRelativeToFile, path.join(this.unpackedDest, (stat as any).pathInArchive), () => w(index + 1))
return
}
}
index++
}
const data = transformedFiles == null ? null : transformedFiles.get(index)
const file = files[index]
if (data !== null && data !== undefined) {
writeStream.write(data, () => w(index + 1))
return
}
// https://github.com/yarnpkg/yarn/pull/3539
const stat = metadata.get(file)
if (stat != null && stat.size < 2 * 1024 * 1024) {
readFile(file)
.then(it => {
writeStream.write(it, () => w(index + 1))
})
.catch(e => reject(`Cannot read file ${file}: ${e.stack || e}`))
} else {
const readStream = createReadStream(file)
readStream.on("error", reject)
readStream.once("end", () => w(index + 1))
readStream.on("open", () => {
readStream.pipe(writeStream, {
end: false,
})
})
}
}
writeStream.write(headerBuf, () => w(0))
})
}
}
async function
|
(filenames: Array<string>, orderingFile: string, src: string) {
const orderingFiles = (await readFile(orderingFile, "utf8")).split("\n").map(line => {
if (line.indexOf(":") !== -1) {
line = line.split(":").pop()!
}
line = line.trim()
if (line[0] === "/") {
line = line.slice(1)
}
return line
})
const ordering: Array<string> = []
for (const file of orderingFiles) {
const pathComponents = file.split(path.sep)
for (const pathComponent of pathComponents) {
ordering.push(path.join(src, pathComponent))
}
}
const sortedFiles: Array<string> = []
let missing = 0
const total = filenames.length
for (const file of ordering) {
if (!sortedFiles.includes(file) && filenames.includes(file)) {
sortedFiles.push(file)
}
}
for (const file of filenames) {
if (!sortedFiles.includes(file)) {
sortedFiles.push(file)
missing += 1
}
}
log.info({ coverage: ((total - missing) / total) * 100 }, "ordering files in ASAR archive")
return sortedFiles
}
function copyFileOrData(fileCopier: FileCopier, data: string | Buffer | undefined | null, source: string, destination: string, stats: Stats) {
if (data == null) {
return fileCopier.copy(source, destination, stats)
} else {
return writeFile(destination, data)
}
}
|
order
|
identifier_name
|
asarUtil.ts
|
import { AsyncTaskManager, log } from "builder-util"
import { FileCopier, Filter, MAX_FILE_REQUESTS } from "builder-util/out/fs"
import { symlink, createReadStream, createWriteStream, Stats } from "fs"
import { writeFile, readFile, mkdir } from "fs/promises"
import * as path from "path"
import { AsarOptions } from "../options/PlatformSpecificBuildOptions"
import { Packager } from "../packager"
import { PlatformPackager } from "../platformPackager"
import { getDestinationPath, ResolvedFileSet } from "../util/appFileCopier"
import { AsarFilesystem, Node } from "./asar"
import { hashFile, hashFileContents } from "./integrity"
import { detectUnpackedDirs } from "./unpackDetector"
// eslint-disable-next-line @typescript-eslint/no-var-requires
const pickle = require("chromium-pickle-js")
/** @internal */
export class AsarPackager {
private readonly fs = new AsarFilesystem(this.src)
private readonly outFile: string
private readonly unpackedDest: string
constructor(private readonly src: string, private readonly destination: string, private readonly options: AsarOptions, private readonly unpackPattern: Filter | null) {
this.outFile = path.join(destination, "app.asar")
this.unpackedDest = `${this.outFile}.unpacked`
}
// sort files to minimize file change (i.e. asar file is not changed dramatically on small change)
async pack(fileSets: Array<ResolvedFileSet>, packager: PlatformPackager<any>) {
if (this.options.ordering != null) {
// ordering doesn't support transformed files, but ordering is not used functionality - wait user report to fix it
await order(fileSets[0].files, this.options.ordering, fileSets[0].src)
}
await mkdir(path.dirname(this.outFile), { recursive: true })
const unpackedFileIndexMap = new Map<ResolvedFileSet, Set<number>>()
for (const fileSet of fileSets) {
unpackedFileIndexMap.set(fileSet, await this.createPackageFromFiles(fileSet, packager.info))
}
await this.writeAsarFile(fileSets, unpackedFileIndexMap)
}
private async createPackageFromFiles(fileSet: ResolvedFileSet, packager: Packager) {
const metadata = fileSet.metadata
// search auto unpacked dir
const unpackedDirs = new Set<string>()
const rootForAppFilesWithoutAsar = path.join(this.destination, "app")
if (this.options.smartUnpack !== false) {
await detectUnpackedDirs(fileSet, unpackedDirs, this.unpackedDest, rootForAppFilesWithoutAsar)
}
const dirToCreateForUnpackedFiles = new Set<string>(unpackedDirs)
const correctDirNodeUnpackedFlag = async (filePathInArchive: string, dirNode: Node) => {
for (const dir of unpackedDirs) {
if (filePathInArchive.length > dir.length + 2 && filePathInArchive[dir.length] === path.sep && filePathInArchive.startsWith(dir)) {
dirNode.unpacked = true
unpackedDirs.add(filePathInArchive)
// not all dirs marked as unpacked after first iteration - because node module dir can be marked as unpacked after processing node module dir content
// e.g. node-notifier/example/advanced.js processed, but only on process vendor/terminal-notifier.app module will be marked as unpacked
await mkdir(path.join(this.unpackedDest, filePathInArchive), { recursive: true })
break
}
}
}
const transformedFiles = fileSet.transformedFiles
const taskManager = new AsyncTaskManager(packager.cancellationToken)
const fileCopier = new FileCopier()
let currentDirNode: Node | null = null
let currentDirPath: string | null = null
const unpackedFileIndexSet = new Set<number>()
for (let i = 0, n = fileSet.files.length; i < n; i++) {
const file = fileSet.files[i]
const stat = metadata.get(file)
if (stat == null) {
continue
}
const pathInArchive = path.relative(rootForAppFilesWithoutAsar, getDestinationPath(file, fileSet))
if (stat.isSymbolicLink()) {
const s = stat as any
this.fs.getOrCreateNode(pathInArchive).link = s.relativeLink
s.pathInArchive = pathInArchive
unpackedFileIndexSet.add(i)
continue
}
let fileParent = path.dirname(pathInArchive)
if (fileParent === ".") {
fileParent = ""
}
if (currentDirPath !== fileParent) {
if (fileParent.startsWith("..")) {
throw new Error(`Internal error: path must not start with "..": ${fileParent}`)
}
currentDirPath = fileParent
currentDirNode = this.fs.getOrCreateNode(fileParent)
// do not check for root
if (fileParent !== "" && !currentDirNode.unpacked) {
if (unpackedDirs.has(fileParent))
|
else {
await correctDirNodeUnpackedFlag(fileParent, currentDirNode)
}
}
}
const dirNode = currentDirNode!
const newData = transformedFiles == null ? undefined : transformedFiles.get(i)
const isUnpacked = dirNode.unpacked || (this.unpackPattern != null && this.unpackPattern(file, stat))
const integrity = newData === undefined ? await hashFile(file) : hashFileContents(newData)
this.fs.addFileNode(file, dirNode, newData == undefined ? stat.size : Buffer.byteLength(newData), isUnpacked, stat, integrity)
if (isUnpacked) {
if (!dirNode.unpacked && !dirToCreateForUnpackedFiles.has(fileParent)) {
dirToCreateForUnpackedFiles.add(fileParent)
await mkdir(path.join(this.unpackedDest, fileParent), { recursive: true })
}
const unpackedFile = path.join(this.unpackedDest, pathInArchive)
taskManager.addTask(copyFileOrData(fileCopier, newData, file, unpackedFile, stat))
if (taskManager.tasks.length > MAX_FILE_REQUESTS) {
await taskManager.awaitTasks()
}
unpackedFileIndexSet.add(i)
}
}
if (taskManager.tasks.length > 0) {
await taskManager.awaitTasks()
}
return unpackedFileIndexSet
}
private writeAsarFile(fileSets: Array<ResolvedFileSet>, unpackedFileIndexMap: Map<ResolvedFileSet, Set<number>>): Promise<any> {
return new Promise((resolve, reject) => {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(this.fs.header))
const headerBuf = headerPickle.toBuffer()
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
const sizeBuf = sizePickle.toBuffer()
const writeStream = createWriteStream(this.outFile)
writeStream.on("error", reject)
writeStream.on("close", resolve)
writeStream.write(sizeBuf)
let fileSetIndex = 0
let files = fileSets[0].files
let metadata = fileSets[0].metadata
let transformedFiles = fileSets[0].transformedFiles
let unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[0])!
const w = (index: number) => {
while (true) {
if (index >= files.length) {
if (++fileSetIndex >= fileSets.length) {
writeStream.end()
return
} else {
files = fileSets[fileSetIndex].files
metadata = fileSets[fileSetIndex].metadata
transformedFiles = fileSets[fileSetIndex].transformedFiles
unpackedFileIndexSet = unpackedFileIndexMap.get(fileSets[fileSetIndex])!
index = 0
}
}
if (!unpackedFileIndexSet.has(index)) {
break
} else {
const stat = metadata.get(files[index])
if (stat != null && stat.isSymbolicLink()) {
symlink((stat as any).linkRelativeToFile, path.join(this.unpackedDest, (stat as any).pathInArchive), () => w(index + 1))
return
}
}
index++
}
const data = transformedFiles == null ? null : transformedFiles.get(index)
const file = files[index]
if (data !== null && data !== undefined) {
writeStream.write(data, () => w(index + 1))
return
}
// https://github.com/yarnpkg/yarn/pull/3539
const stat = metadata.get(file)
if (stat != null && stat.size < 2 * 1024 * 1024) {
readFile(file)
.then(it => {
writeStream.write(it, () => w(index + 1))
})
.catch(e => reject(`Cannot read file ${file}: ${e.stack || e}`))
} else {
const readStream = createReadStream(file)
readStream.on("error", reject)
readStream.once("end", () => w(index + 1))
readStream.on("open", () => {
readStream.pipe(writeStream, {
end: false,
})
})
}
}
writeStream.write(headerBuf, () => w(0))
})
}
}
async function order(filenames: Array<string>, orderingFile: string, src: string) {
const orderingFiles = (await readFile(orderingFile, "utf8")).split("\n").map(line => {
if (line.indexOf(":") !== -1) {
line = line.split(":").pop()!
}
line = line.trim()
if (line[0] === "/") {
line = line.slice(1)
}
return line
})
const ordering: Array<string> = []
for (const file of orderingFiles) {
const pathComponents = file.split(path.sep)
for (const pathComponent of pathComponents) {
ordering.push(path.join(src, pathComponent))
}
}
const sortedFiles: Array<string> = []
let missing = 0
const total = filenames.length
for (const file of ordering) {
if (!sortedFiles.includes(file) && filenames.includes(file)) {
sortedFiles.push(file)
}
}
for (const file of filenames) {
if (!sortedFiles.includes(file)) {
sortedFiles.push(file)
missing += 1
}
}
log.info({ coverage: ((total - missing) / total) * 100 }, "ordering files in ASAR archive")
return sortedFiles
}
function copyFileOrData(fileCopier: FileCopier, data: string | Buffer | undefined | null, source: string, destination: string, stats: Stats) {
if (data == null) {
return fileCopier.copy(source, destination, stats)
} else {
return writeFile(destination, data)
}
}
|
{
currentDirNode.unpacked = true
}
|
conditional_block
|
lpushx.spec.ts
|
import Store from '../../Store';
import Expires from '../../Expires';
import { lpush } from '../lpush';
import { lpushx } from '../lpushx';
describe('Test lpushx command', () => {
it('should prepend values to list', () => {
const redis = new MockRedis();
redis.set('mylist', []);
expect((<any>redis).lpushx('mylist', 'v1')).toBe(1);
expect((<any>redis).lpushx('mylist1', 'v2')).toBe(0);
expect(redis.get('mylist1')).toBeNull();
});
});
class MockRedis {
private data: Store;
constructor() {
this.data = new Store(new Expires());
(<any>this)['lpush'] = lpush.bind(this);
(<any>this)['lpushx'] = lpushx.bind(this);
}
get(key: string) {
return this.data.get(key) || null;
}
|
(key: string, value: any) {
return this.data.set(key, value);
}
}
|
set
|
identifier_name
|
lpushx.spec.ts
|
import Store from '../../Store';
import Expires from '../../Expires';
import { lpush } from '../lpush';
import { lpushx } from '../lpushx';
describe('Test lpushx command', () => {
it('should prepend values to list', () => {
const redis = new MockRedis();
redis.set('mylist', []);
expect((<any>redis).lpushx('mylist', 'v1')).toBe(1);
expect((<any>redis).lpushx('mylist1', 'v2')).toBe(0);
expect(redis.get('mylist1')).toBeNull();
});
});
class MockRedis {
private data: Store;
constructor() {
this.data = new Store(new Expires());
(<any>this)['lpush'] = lpush.bind(this);
(<any>this)['lpushx'] = lpushx.bind(this);
}
get(key: string)
|
set(key: string, value: any) {
return this.data.set(key, value);
}
}
|
{
return this.data.get(key) || null;
}
|
identifier_body
|
lpushx.spec.ts
|
import Store from '../../Store';
import Expires from '../../Expires';
import { lpush } from '../lpush';
import { lpushx } from '../lpushx';
describe('Test lpushx command', () => {
it('should prepend values to list', () => {
const redis = new MockRedis();
redis.set('mylist', []);
expect((<any>redis).lpushx('mylist', 'v1')).toBe(1);
expect((<any>redis).lpushx('mylist1', 'v2')).toBe(0);
expect(redis.get('mylist1')).toBeNull();
|
});
class MockRedis {
private data: Store;
constructor() {
this.data = new Store(new Expires());
(<any>this)['lpush'] = lpush.bind(this);
(<any>this)['lpushx'] = lpushx.bind(this);
}
get(key: string) {
return this.data.get(key) || null;
}
set(key: string, value: any) {
return this.data.set(key, value);
}
}
|
});
|
random_line_split
|
ui.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic values for UI properties.
use std::fmt::{self, Write};
use style_traits::cursor::CursorKind;
use style_traits::{CssWriter, ToCss};
/// A generic value for the `cursor` property.
///
/// https://drafts.csswg.org/css-ui/#cursor
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub struct Cursor<Image> {
/// The parsed images for the cursor.
pub images: Box<[Image]>,
/// The kind of the cursor [default | help | ...].
pub keyword: CursorKind,
}
impl<Image> Cursor<Image> {
/// Set `cursor` to `auto`
#[inline]
pub fn auto() -> Self {
Self {
images: vec![].into_boxed_slice(),
keyword: CursorKind::Auto,
}
}
}
impl<Image: ToCss> ToCss for Cursor<Image> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
for image in &*self.images {
image.to_css(dest)?;
dest.write_str(", ")?;
}
self.keyword.to_css(dest)
}
}
/// A generic value for item of `image cursors`.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub struct CursorImage<ImageUrl, Number> {
/// The url to parse images from.
pub url: ImageUrl,
/// The <x> and <y> coordinates.
pub hotspot: Option<(Number, Number)>,
}
impl<ImageUrl: ToCss, Number: ToCss> ToCss for CursorImage<ImageUrl, Number> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.url.to_css(dest)?;
if let Some((ref x, ref y)) = self.hotspot
|
Ok(())
}
}
/// A generic value for `scrollbar-color` property.
///
/// https://drafts.csswg.org/css-scrollbars-1/#scrollbar-color
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedValue,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
pub enum ScrollbarColor<Color> {
/// `auto`
Auto,
/// `<color>{2}`
Colors {
/// First `<color>`, for color of the scrollbar thumb.
thumb: Color,
/// Second `<color>`, for color of the scrollbar track.
track: Color,
},
}
impl<Color> Default for ScrollbarColor<Color> {
#[inline]
fn default() -> Self {
ScrollbarColor::Auto
}
}
|
{
dest.write_str(" ")?;
x.to_css(dest)?;
dest.write_str(" ")?;
y.to_css(dest)?;
}
|
conditional_block
|
ui.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic values for UI properties.
use std::fmt::{self, Write};
use style_traits::cursor::CursorKind;
use style_traits::{CssWriter, ToCss};
/// A generic value for the `cursor` property.
///
/// https://drafts.csswg.org/css-ui/#cursor
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub struct Cursor<Image> {
/// The parsed images for the cursor.
pub images: Box<[Image]>,
/// The kind of the cursor [default | help | ...].
pub keyword: CursorKind,
}
impl<Image> Cursor<Image> {
/// Set `cursor` to `auto`
#[inline]
pub fn auto() -> Self {
Self {
images: vec![].into_boxed_slice(),
keyword: CursorKind::Auto,
}
}
}
impl<Image: ToCss> ToCss for Cursor<Image> {
fn
|
<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
for image in &*self.images {
image.to_css(dest)?;
dest.write_str(", ")?;
}
self.keyword.to_css(dest)
}
}
/// A generic value for item of `image cursors`.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub struct CursorImage<ImageUrl, Number> {
/// The url to parse images from.
pub url: ImageUrl,
/// The <x> and <y> coordinates.
pub hotspot: Option<(Number, Number)>,
}
impl<ImageUrl: ToCss, Number: ToCss> ToCss for CursorImage<ImageUrl, Number> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.url.to_css(dest)?;
if let Some((ref x, ref y)) = self.hotspot {
dest.write_str(" ")?;
x.to_css(dest)?;
dest.write_str(" ")?;
y.to_css(dest)?;
}
Ok(())
}
}
/// A generic value for `scrollbar-color` property.
///
/// https://drafts.csswg.org/css-scrollbars-1/#scrollbar-color
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedValue,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
pub enum ScrollbarColor<Color> {
/// `auto`
Auto,
/// `<color>{2}`
Colors {
/// First `<color>`, for color of the scrollbar thumb.
thumb: Color,
/// Second `<color>`, for color of the scrollbar track.
track: Color,
},
}
impl<Color> Default for ScrollbarColor<Color> {
#[inline]
fn default() -> Self {
ScrollbarColor::Auto
}
}
|
to_css
|
identifier_name
|
ui.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic values for UI properties.
use std::fmt::{self, Write};
use style_traits::cursor::CursorKind;
use style_traits::{CssWriter, ToCss};
/// A generic value for the `cursor` property.
///
/// https://drafts.csswg.org/css-ui/#cursor
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub struct Cursor<Image> {
/// The parsed images for the cursor.
pub images: Box<[Image]>,
/// The kind of the cursor [default | help | ...].
pub keyword: CursorKind,
}
impl<Image> Cursor<Image> {
/// Set `cursor` to `auto`
#[inline]
pub fn auto() -> Self {
Self {
images: vec![].into_boxed_slice(),
keyword: CursorKind::Auto,
}
}
}
impl<Image: ToCss> ToCss for Cursor<Image> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
for image in &*self.images {
image.to_css(dest)?;
dest.write_str(", ")?;
}
self.keyword.to_css(dest)
}
}
/// A generic value for item of `image cursors`.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub struct CursorImage<ImageUrl, Number> {
/// The url to parse images from.
pub url: ImageUrl,
/// The <x> and <y> coordinates.
pub hotspot: Option<(Number, Number)>,
}
impl<ImageUrl: ToCss, Number: ToCss> ToCss for CursorImage<ImageUrl, Number> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.url.to_css(dest)?;
if let Some((ref x, ref y)) = self.hotspot {
dest.write_str(" ")?;
x.to_css(dest)?;
dest.write_str(" ")?;
y.to_css(dest)?;
}
Ok(())
}
}
/// A generic value for `scrollbar-color` property.
///
/// https://drafts.csswg.org/css-scrollbars-1/#scrollbar-color
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
|
ToAnimatedValue,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
pub enum ScrollbarColor<Color> {
/// `auto`
Auto,
/// `<color>{2}`
Colors {
/// First `<color>`, for color of the scrollbar thumb.
thumb: Color,
/// Second `<color>`, for color of the scrollbar track.
track: Color,
},
}
impl<Color> Default for ScrollbarColor<Color> {
#[inline]
fn default() -> Self {
ScrollbarColor::Auto
}
}
|
SpecifiedValueInfo,
|
random_line_split
|
styled.rs
|
use super::*;
use ascii_canvas::AsciiView;
use std::fmt::{Debug, Error, Formatter};
use style::Style;
pub struct Styled {
style: Style,
content: Box<Content>,
}
impl Styled {
pub fn new(style: Style, content: Box<Content>) -> Self {
Styled {
style: style,
content: content,
}
}
}
impl Content for Styled {
fn min_width(&self) -> usize
|
fn emit(&self, view: &mut AsciiView) {
self.content.emit(&mut view.styled(self.style))
}
fn into_wrap_items(self: Box<Self>, wrap_items: &mut Vec<Box<Content>>) {
let style = self.style;
super::into_wrap_items_map(self.content, wrap_items, |item| Styled::new(style, item))
}
}
impl Debug for Styled {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
fmt.debug_struct("Styled")
.field("content", &self.content)
.finish()
}
}
|
{
self.content.min_width()
}
|
identifier_body
|
styled.rs
|
use super::*;
use ascii_canvas::AsciiView;
use std::fmt::{Debug, Error, Formatter};
use style::Style;
pub struct Styled {
style: Style,
content: Box<Content>,
|
pub fn new(style: Style, content: Box<Content>) -> Self {
Styled {
style: style,
content: content,
}
}
}
impl Content for Styled {
fn min_width(&self) -> usize {
self.content.min_width()
}
fn emit(&self, view: &mut AsciiView) {
self.content.emit(&mut view.styled(self.style))
}
fn into_wrap_items(self: Box<Self>, wrap_items: &mut Vec<Box<Content>>) {
let style = self.style;
super::into_wrap_items_map(self.content, wrap_items, |item| Styled::new(style, item))
}
}
impl Debug for Styled {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
fmt.debug_struct("Styled")
.field("content", &self.content)
.finish()
}
}
|
}
impl Styled {
|
random_line_split
|
styled.rs
|
use super::*;
use ascii_canvas::AsciiView;
use std::fmt::{Debug, Error, Formatter};
use style::Style;
pub struct Styled {
style: Style,
content: Box<Content>,
}
impl Styled {
pub fn new(style: Style, content: Box<Content>) -> Self {
Styled {
style: style,
content: content,
}
}
}
impl Content for Styled {
fn min_width(&self) -> usize {
self.content.min_width()
}
fn emit(&self, view: &mut AsciiView) {
self.content.emit(&mut view.styled(self.style))
}
fn into_wrap_items(self: Box<Self>, wrap_items: &mut Vec<Box<Content>>) {
let style = self.style;
super::into_wrap_items_map(self.content, wrap_items, |item| Styled::new(style, item))
}
}
impl Debug for Styled {
fn
|
(&self, fmt: &mut Formatter) -> Result<(), Error> {
fmt.debug_struct("Styled")
.field("content", &self.content)
.finish()
}
}
|
fmt
|
identifier_name
|
const-impl.rs
|
#![feature(adt_const_params)]
#![crate_name = "foo"]
#[derive(PartialEq, Eq)]
pub enum Order {
Sorted,
Unsorted,
}
// @has foo/struct.VSet.html '//pre[@class="rust struct"]' 'pub struct VSet<T, const ORDER: Order>'
// @has foo/struct.VSet.html '//div[@id="impl-Send"]/h3[@class="code-header in-band"]' 'impl<T, const ORDER: Order> Send for VSet<T, ORDER>'
// @has foo/struct.VSet.html '//div[@id="impl-Sync"]/h3[@class="code-header in-band"]' 'impl<T, const ORDER: Order> Sync for VSet<T, ORDER>'
pub struct VSet<T, const ORDER: Order> {
inner: Vec<T>,
}
// @has foo/struct.VSet.html '//div[@id="impl"]/h3[@class="code-header in-band"]' 'impl<T> VSet<T, {Order::Sorted}>'
impl <T> VSet<T, {Order::Sorted}> {
pub fn new() -> Self {
Self { inner: Vec::new() }
}
}
// @has foo/struct.VSet.html '//div[@id="impl-1"]/h3[@class="code-header in-band"]' 'impl<T> VSet<T, {Order::Unsorted}>'
impl <T> VSet<T, {Order::Unsorted}> {
pub fn new() -> Self {
Self { inner: Vec::new() }
}
}
pub struct
|
<const S: &'static str>;
// @has foo/struct.Escape.html '//div[@id="impl"]/h3[@class="code-header in-band"]' 'impl Escape<{ r#"<script>alert("Escape");</script>"# }>'
impl Escape<{ r#"<script>alert("Escape");</script>"# }> {
pub fn f() {}
}
|
Escape
|
identifier_name
|
const-impl.rs
|
#![feature(adt_const_params)]
#![crate_name = "foo"]
#[derive(PartialEq, Eq)]
pub enum Order {
Sorted,
Unsorted,
}
// @has foo/struct.VSet.html '//pre[@class="rust struct"]' 'pub struct VSet<T, const ORDER: Order>'
// @has foo/struct.VSet.html '//div[@id="impl-Send"]/h3[@class="code-header in-band"]' 'impl<T, const ORDER: Order> Send for VSet<T, ORDER>'
// @has foo/struct.VSet.html '//div[@id="impl-Sync"]/h3[@class="code-header in-band"]' 'impl<T, const ORDER: Order> Sync for VSet<T, ORDER>'
pub struct VSet<T, const ORDER: Order> {
inner: Vec<T>,
}
// @has foo/struct.VSet.html '//div[@id="impl"]/h3[@class="code-header in-band"]' 'impl<T> VSet<T, {Order::Sorted}>'
|
}
// @has foo/struct.VSet.html '//div[@id="impl-1"]/h3[@class="code-header in-band"]' 'impl<T> VSet<T, {Order::Unsorted}>'
impl <T> VSet<T, {Order::Unsorted}> {
pub fn new() -> Self {
Self { inner: Vec::new() }
}
}
pub struct Escape<const S: &'static str>;
// @has foo/struct.Escape.html '//div[@id="impl"]/h3[@class="code-header in-band"]' 'impl Escape<{ r#"<script>alert("Escape");</script>"# }>'
impl Escape<{ r#"<script>alert("Escape");</script>"# }> {
pub fn f() {}
}
|
impl <T> VSet<T, {Order::Sorted}> {
pub fn new() -> Self {
Self { inner: Vec::new() }
}
|
random_line_split
|
dock-spawn-tests.ts
|
/// <reference path="dock-spawn.d.ts" />
var dockManagerDiv = document.createElement('div'),
panelDiv1 = document.createElement('div'),
panelDiv2 = document.createElement('div'),
panelDiv3 = document.createElement('div');
document.body.appendChild(dockManagerDiv);
var dockManager = new dockspawn.DockManager(dockManagerDiv);
dockManager.initialize();
var panelContainer1 = new dockspawn.PanelContainer(panelDiv1, dockManager),
panelContainer2 = new dockspawn.PanelContainer(panelDiv2, dockManager),
panelContainer3 = new dockspawn.PanelContainer(panelDiv3, dockManager);
|
var documentNode = dockManager.context.model.documentManagerNode;
var panelNode1 = dockManager.dockLeft(documentNode, panelContainer1, 0.33),
panelNode2 = dockManager.dockRight(documentNode, panelContainer2, 0.33),
panelNode3 = dockManager.dockFill(documentNode, panelContainer3);
|
random_line_split
|
|
instr_shufps.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn shufps_1() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM3)), operand3: Some(Literal8(82)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 251, 82], OperandSize::Dword)
}
#[test]
fn
|
() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledIndexedDisplaced(EBX, ESI, Eight, 909533252, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(46)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 188, 243, 68, 96, 54, 54, 46], OperandSize::Dword)
}
#[test]
fn shufps_3() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM1)), operand3: Some(Literal8(66)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 241, 66], OperandSize::Qword)
}
#[test]
fn shufps_4() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM6)), operand2: Some(Indirect(RBX, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 51, 6], OperandSize::Qword)
}
|
shufps_2
|
identifier_name
|
instr_shufps.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn shufps_1() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM3)), operand3: Some(Literal8(82)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 251, 82], OperandSize::Dword)
}
#[test]
fn shufps_2()
|
#[test]
fn shufps_3() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM1)), operand3: Some(Literal8(66)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 241, 66], OperandSize::Qword)
}
#[test]
fn shufps_4() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM6)), operand2: Some(Indirect(RBX, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 51, 6], OperandSize::Qword)
}
|
{
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledIndexedDisplaced(EBX, ESI, Eight, 909533252, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(46)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 188, 243, 68, 96, 54, 54, 46], OperandSize::Dword)
}
|
identifier_body
|
instr_shufps.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn shufps_1() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM3)), operand3: Some(Literal8(82)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 251, 82], OperandSize::Dword)
}
|
#[test]
fn shufps_3() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM1)), operand3: Some(Literal8(66)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 241, 66], OperandSize::Qword)
}
#[test]
fn shufps_4() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM6)), operand2: Some(Indirect(RBX, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 51, 6], OperandSize::Qword)
}
|
#[test]
fn shufps_2() {
run_test(&Instruction { mnemonic: Mnemonic::SHUFPS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledIndexedDisplaced(EBX, ESI, Eight, 909533252, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(46)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 198, 188, 243, 68, 96, 54, 54, 46], OperandSize::Dword)
}
|
random_line_split
|
TorrentProvider.py
|
# coding=utf-8
# This file is part of SickRage.
#
# URL: https://sickrage.github.io
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from datetime import datetime
from feedparser.util import FeedParserDict
from hachoir_parser import createParser
import sickbeard
from sickbeard import logger
from sickbeard.classes import Proper, TorrentSearchResult
from sickbeard.common import Quality
from sickbeard.db import DBConnection
from sickrage.helper.common import try_int
from sickrage.helper.exceptions import ex
from sickrage.providers.GenericProvider import GenericProvider
from sickrage.show.Show import Show
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.ratio = None
self.provider_type = GenericProvider.TORRENT
def find_propers(self, search_date=None):
|
def is_active(self):
return bool(sickbeard.USE_TORRENTS) and self.is_enabled()
@property
def _custom_trackers(self):
if not (sickbeard.TRACKERS_LIST and self.public):
return ''
return '&tr=' + '&tr='.join({x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()})
def _get_result(self, episodes):
return TorrentSearchResult(episodes)
def _get_size(self, item):
if isinstance(item, dict):
size = item.get('size', -1)
elif isinstance(item, (list, tuple)) and len(item) > 2:
size = item[2]
else:
size = -1
# Make sure we didn't select seeds/leechers by accident
if not size or size < 1024 * 1024:
size = -1
return try_int(size, -1)
def _get_storage_dir(self):
return sickbeard.TORRENT_DIR
def _get_title_and_url(self, item):
if isinstance(item, (dict, FeedParserDict)):
download_url = item.get('url', '')
title = item.get('title', '')
if not download_url:
download_url = item.get('link', '')
elif isinstance(item, (list, tuple)) and len(item) > 1:
download_url = item[1]
title = item[0]
else:
download_url = ''
title = ''
if title.endswith('DIAMOND'):
logger.log('Skipping DIAMOND release for mass fake releases.')
download_url = title = 'FAKERELEASE'
if download_url:
download_url = download_url.replace('&', '&')
if title:
title = title.replace(' ', '.')
return title, download_url
def _verify_download(self, file_name=None):
try:
parser = createParser(file_name)
if parser:
# pylint: disable=protected-access
# Access to a protected member of a client class
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except Exception:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log('Failed to validate torrent file: {0}'.format(ex(e)), logger.DEBUG)
logger.log('Result is not a valid torrent file', logger.DEBUG)
return False
def seed_ratio(self):
return self.ratio
|
results = []
db = DBConnection()
placeholder = ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST])
sql_results = db.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate'
' FROM tv_episodes AS e'
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND e.status IN (' + placeholder + ') and e.is_proper = 0'
)
for result in sql_results or []:
show = Show.find(sickbeard.showList, int(result[b'showid']))
if show:
episode = show.getEpisode(result[b'season'], result[b'episode'])
for term in self.proper_strings:
search_strings = self._get_episode_search_strings(episode, add_string=term)
for item in self.search(search_strings[0]):
title, url = self._get_title_and_url(item)
results.append(Proper(title, url, datetime.today(), show))
return results
|
identifier_body
|
TorrentProvider.py
|
# coding=utf-8
# This file is part of SickRage.
#
# URL: https://sickrage.github.io
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from datetime import datetime
from feedparser.util import FeedParserDict
from hachoir_parser import createParser
import sickbeard
from sickbeard import logger
from sickbeard.classes import Proper, TorrentSearchResult
from sickbeard.common import Quality
from sickbeard.db import DBConnection
from sickrage.helper.common import try_int
from sickrage.helper.exceptions import ex
from sickrage.providers.GenericProvider import GenericProvider
from sickrage.show.Show import Show
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.ratio = None
self.provider_type = GenericProvider.TORRENT
def find_propers(self, search_date=None):
results = []
db = DBConnection()
placeholder = ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST])
sql_results = db.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate'
' FROM tv_episodes AS e'
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND e.status IN (' + placeholder + ') and e.is_proper = 0'
)
for result in sql_results or []:
show = Show.find(sickbeard.showList, int(result[b'showid']))
if show:
episode = show.getEpisode(result[b'season'], result[b'episode'])
for term in self.proper_strings:
search_strings = self._get_episode_search_strings(episode, add_string=term)
for item in self.search(search_strings[0]):
title, url = self._get_title_and_url(item)
results.append(Proper(title, url, datetime.today(), show))
return results
def is_active(self):
return bool(sickbeard.USE_TORRENTS) and self.is_enabled()
@property
def _custom_trackers(self):
if not (sickbeard.TRACKERS_LIST and self.public):
return ''
return '&tr=' + '&tr='.join({x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()})
def _get_result(self, episodes):
return TorrentSearchResult(episodes)
def _get_size(self, item):
if isinstance(item, dict):
size = item.get('size', -1)
elif isinstance(item, (list, tuple)) and len(item) > 2:
size = item[2]
else:
size = -1
# Make sure we didn't select seeds/leechers by accident
if not size or size < 1024 * 1024:
size = -1
return try_int(size, -1)
def _get_storage_dir(self):
return sickbeard.TORRENT_DIR
def _get_title_and_url(self, item):
if isinstance(item, (dict, FeedParserDict)):
download_url = item.get('url', '')
title = item.get('title', '')
|
download_url = item[1]
title = item[0]
else:
download_url = ''
title = ''
if title.endswith('DIAMOND'):
logger.log('Skipping DIAMOND release for mass fake releases.')
download_url = title = 'FAKERELEASE'
if download_url:
download_url = download_url.replace('&', '&')
if title:
title = title.replace(' ', '.')
return title, download_url
def _verify_download(self, file_name=None):
try:
parser = createParser(file_name)
if parser:
# pylint: disable=protected-access
# Access to a protected member of a client class
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except Exception:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log('Failed to validate torrent file: {0}'.format(ex(e)), logger.DEBUG)
logger.log('Result is not a valid torrent file', logger.DEBUG)
return False
def seed_ratio(self):
return self.ratio
|
if not download_url:
download_url = item.get('link', '')
elif isinstance(item, (list, tuple)) and len(item) > 1:
|
random_line_split
|
TorrentProvider.py
|
# coding=utf-8
# This file is part of SickRage.
#
# URL: https://sickrage.github.io
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from datetime import datetime
from feedparser.util import FeedParserDict
from hachoir_parser import createParser
import sickbeard
from sickbeard import logger
from sickbeard.classes import Proper, TorrentSearchResult
from sickbeard.common import Quality
from sickbeard.db import DBConnection
from sickrage.helper.common import try_int
from sickrage.helper.exceptions import ex
from sickrage.providers.GenericProvider import GenericProvider
from sickrage.show.Show import Show
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.ratio = None
self.provider_type = GenericProvider.TORRENT
def find_propers(self, search_date=None):
results = []
db = DBConnection()
placeholder = ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST])
sql_results = db.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate'
' FROM tv_episodes AS e'
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND e.status IN (' + placeholder + ') and e.is_proper = 0'
)
for result in sql_results or []:
show = Show.find(sickbeard.showList, int(result[b'showid']))
if show:
episode = show.getEpisode(result[b'season'], result[b'episode'])
for term in self.proper_strings:
search_strings = self._get_episode_search_strings(episode, add_string=term)
for item in self.search(search_strings[0]):
title, url = self._get_title_and_url(item)
results.append(Proper(title, url, datetime.today(), show))
return results
def is_active(self):
return bool(sickbeard.USE_TORRENTS) and self.is_enabled()
@property
def _custom_trackers(self):
if not (sickbeard.TRACKERS_LIST and self.public):
return ''
return '&tr=' + '&tr='.join({x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()})
def _get_result(self, episodes):
return TorrentSearchResult(episodes)
def _get_size(self, item):
if isinstance(item, dict):
size = item.get('size', -1)
elif isinstance(item, (list, tuple)) and len(item) > 2:
size = item[2]
else:
size = -1
# Make sure we didn't select seeds/leechers by accident
if not size or size < 1024 * 1024:
size = -1
return try_int(size, -1)
def _get_storage_dir(self):
return sickbeard.TORRENT_DIR
def _get_title_and_url(self, item):
if isinstance(item, (dict, FeedParserDict)):
download_url = item.get('url', '')
title = item.get('title', '')
if not download_url:
download_url = item.get('link', '')
elif isinstance(item, (list, tuple)) and len(item) > 1:
download_url = item[1]
title = item[0]
else:
download_url = ''
title = ''
if title.endswith('DIAMOND'):
|
if download_url:
download_url = download_url.replace('&', '&')
if title:
title = title.replace(' ', '.')
return title, download_url
def _verify_download(self, file_name=None):
try:
parser = createParser(file_name)
if parser:
# pylint: disable=protected-access
# Access to a protected member of a client class
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except Exception:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log('Failed to validate torrent file: {0}'.format(ex(e)), logger.DEBUG)
logger.log('Result is not a valid torrent file', logger.DEBUG)
return False
def seed_ratio(self):
return self.ratio
|
logger.log('Skipping DIAMOND release for mass fake releases.')
download_url = title = 'FAKERELEASE'
|
conditional_block
|
TorrentProvider.py
|
# coding=utf-8
# This file is part of SickRage.
#
# URL: https://sickrage.github.io
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from datetime import datetime
from feedparser.util import FeedParserDict
from hachoir_parser import createParser
import sickbeard
from sickbeard import logger
from sickbeard.classes import Proper, TorrentSearchResult
from sickbeard.common import Quality
from sickbeard.db import DBConnection
from sickrage.helper.common import try_int
from sickrage.helper.exceptions import ex
from sickrage.providers.GenericProvider import GenericProvider
from sickrage.show.Show import Show
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.ratio = None
self.provider_type = GenericProvider.TORRENT
def find_propers(self, search_date=None):
results = []
db = DBConnection()
placeholder = ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST])
sql_results = db.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate'
' FROM tv_episodes AS e'
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)'
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND e.status IN (' + placeholder + ') and e.is_proper = 0'
)
for result in sql_results or []:
show = Show.find(sickbeard.showList, int(result[b'showid']))
if show:
episode = show.getEpisode(result[b'season'], result[b'episode'])
for term in self.proper_strings:
search_strings = self._get_episode_search_strings(episode, add_string=term)
for item in self.search(search_strings[0]):
title, url = self._get_title_and_url(item)
results.append(Proper(title, url, datetime.today(), show))
return results
def is_active(self):
return bool(sickbeard.USE_TORRENTS) and self.is_enabled()
@property
def _custom_trackers(self):
if not (sickbeard.TRACKERS_LIST and self.public):
return ''
return '&tr=' + '&tr='.join({x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()})
def _get_result(self, episodes):
return TorrentSearchResult(episodes)
def _get_size(self, item):
if isinstance(item, dict):
size = item.get('size', -1)
elif isinstance(item, (list, tuple)) and len(item) > 2:
size = item[2]
else:
size = -1
# Make sure we didn't select seeds/leechers by accident
if not size or size < 1024 * 1024:
size = -1
return try_int(size, -1)
def _get_storage_dir(self):
return sickbeard.TORRENT_DIR
def _get_title_and_url(self, item):
if isinstance(item, (dict, FeedParserDict)):
download_url = item.get('url', '')
title = item.get('title', '')
if not download_url:
download_url = item.get('link', '')
elif isinstance(item, (list, tuple)) and len(item) > 1:
download_url = item[1]
title = item[0]
else:
download_url = ''
title = ''
if title.endswith('DIAMOND'):
logger.log('Skipping DIAMOND release for mass fake releases.')
download_url = title = 'FAKERELEASE'
if download_url:
download_url = download_url.replace('&', '&')
if title:
title = title.replace(' ', '.')
return title, download_url
def
|
(self, file_name=None):
try:
parser = createParser(file_name)
if parser:
# pylint: disable=protected-access
# Access to a protected member of a client class
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except Exception:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log('Failed to validate torrent file: {0}'.format(ex(e)), logger.DEBUG)
logger.log('Result is not a valid torrent file', logger.DEBUG)
return False
def seed_ratio(self):
return self.ratio
|
_verify_download
|
identifier_name
|
web.module.ts
|
// angular
import { NgModule } from '@angular/core';
import { APP_BASE_HREF } from '@angular/common';
import { BrowserModule } from '@angular/platform-browser';
import { RouterModule } from '@angular/router';
import { Http } from '@angular/http';
// libs
import { StoreModule } from '@ngrx/store';
import { EffectsModule } from '@ngrx/effects';
import { StoreDevtoolsModule } from '@ngrx/store-devtools';
import { TranslateLoader } from '@ngx-translate/core';
// Mutua Modules and Components Loading Configuration
import { MutuaExportedComponents, MutuaExportedRoutes, MutuaExportedModules, MutuaAppComponent } from './app/shared/mutua/mutua.html.loading.config';
// app
//import { APP_COMPONENTS, AppComponent } from './app/components/index';
//import { routes } from './app/components/app.routes';
// feature modules
import { WindowService, StorageService, ConsoleService, createConsoleTarget, provideConsoleTarget, LogTarget, LogLevel, ConsoleTarget } from './app/modules/core/services/index';
import { CoreModule, Config } from './app/modules/core/index';
import { AnalyticsModule } from './app/modules/analytics/index';
import { MultilingualModule, Languages, translateLoaderFactory, MultilingualEffects } from './app/modules/i18n/index';
import { SampleModule, SampleEffects } from './app/modules/sample/index';
import { AppReducer } from './app/modules/ngrx/index';
// config
Config.PLATFORM_TARGET = Config.PLATFORMS.WEB;
if (String('<%= BUILD_TYPE %>') === 'dev') {
// only output console logging in dev mode
Config.DEBUG.LEVEL_4 = true;
}
let routerModule = RouterModule.forRoot(MutuaExportedRoutes);
if (String('<%= TARGET_DESKTOP %>') === 'true') {
Config.PLATFORM_TARGET = Config.PLATFORMS.DESKTOP;
// desktop (electron) must use hash
routerModule = RouterModule.forRoot(MutuaExportedRoutes, { useHash: true });
}
declare var window, console, localStorage;
// For AoT compilation to work:
export function win() {
return window;
}
export function storage() {
return localStorage;
}
export function cons() {
return console;
}
export function consoleLogTarget(consoleService: ConsoleService) {
return new ConsoleTarget(consoleService, { minLogLevel: LogLevel.Debug });
}
let DEV_IMPORTS: any[] = [];
if (String('<%= BUILD_TYPE %>') === 'dev') {
DEV_IMPORTS = [
...DEV_IMPORTS,
StoreDevtoolsModule.instrumentOnlyWithExtension()
];
}
@NgModule({
imports: [
...MutuaExportedModules,
BrowserModule,
CoreModule.forRoot([
{ provide: WindowService, useFactory: (win) },
{ provide: StorageService, useFactory: (storage) },
{ provide: ConsoleService, useFactory: (cons) },
{ provide: LogTarget, useFactory: (consoleLogTarget), deps: [ConsoleService], multi: true }
]),
routerModule,
AnalyticsModule,
MultilingualModule.forRoot([{
provide: TranslateLoader,
deps: [Http],
useFactory: (translateLoaderFactory)
}]),
SampleModule,
// configure app state
StoreModule.provideStore(AppReducer),
EffectsModule.run(MultilingualEffects),
EffectsModule.run(SampleEffects),
// dev environment only imports
DEV_IMPORTS,
],
declarations: [
...MutuaExportedComponents,
],
providers: [
{
|
useValue: '<%= APP_BASE %>'
},
// override with supported languages
{
provide: Languages,
useValue: Config.GET_SUPPORTED_LANGUAGES()
}
],
bootstrap: [MutuaAppComponent]
})
export class WebModule { }
|
provide: APP_BASE_HREF,
|
random_line_split
|
web.module.ts
|
// angular
import { NgModule } from '@angular/core';
import { APP_BASE_HREF } from '@angular/common';
import { BrowserModule } from '@angular/platform-browser';
import { RouterModule } from '@angular/router';
import { Http } from '@angular/http';
// libs
import { StoreModule } from '@ngrx/store';
import { EffectsModule } from '@ngrx/effects';
import { StoreDevtoolsModule } from '@ngrx/store-devtools';
import { TranslateLoader } from '@ngx-translate/core';
// Mutua Modules and Components Loading Configuration
import { MutuaExportedComponents, MutuaExportedRoutes, MutuaExportedModules, MutuaAppComponent } from './app/shared/mutua/mutua.html.loading.config';
// app
//import { APP_COMPONENTS, AppComponent } from './app/components/index';
//import { routes } from './app/components/app.routes';
// feature modules
import { WindowService, StorageService, ConsoleService, createConsoleTarget, provideConsoleTarget, LogTarget, LogLevel, ConsoleTarget } from './app/modules/core/services/index';
import { CoreModule, Config } from './app/modules/core/index';
import { AnalyticsModule } from './app/modules/analytics/index';
import { MultilingualModule, Languages, translateLoaderFactory, MultilingualEffects } from './app/modules/i18n/index';
import { SampleModule, SampleEffects } from './app/modules/sample/index';
import { AppReducer } from './app/modules/ngrx/index';
// config
Config.PLATFORM_TARGET = Config.PLATFORMS.WEB;
if (String('<%= BUILD_TYPE %>') === 'dev') {
// only output console logging in dev mode
Config.DEBUG.LEVEL_4 = true;
}
let routerModule = RouterModule.forRoot(MutuaExportedRoutes);
if (String('<%= TARGET_DESKTOP %>') === 'true') {
Config.PLATFORM_TARGET = Config.PLATFORMS.DESKTOP;
// desktop (electron) must use hash
routerModule = RouterModule.forRoot(MutuaExportedRoutes, { useHash: true });
}
declare var window, console, localStorage;
// For AoT compilation to work:
export function
|
() {
return window;
}
export function storage() {
return localStorage;
}
export function cons() {
return console;
}
export function consoleLogTarget(consoleService: ConsoleService) {
return new ConsoleTarget(consoleService, { minLogLevel: LogLevel.Debug });
}
let DEV_IMPORTS: any[] = [];
if (String('<%= BUILD_TYPE %>') === 'dev') {
DEV_IMPORTS = [
...DEV_IMPORTS,
StoreDevtoolsModule.instrumentOnlyWithExtension()
];
}
@NgModule({
imports: [
...MutuaExportedModules,
BrowserModule,
CoreModule.forRoot([
{ provide: WindowService, useFactory: (win) },
{ provide: StorageService, useFactory: (storage) },
{ provide: ConsoleService, useFactory: (cons) },
{ provide: LogTarget, useFactory: (consoleLogTarget), deps: [ConsoleService], multi: true }
]),
routerModule,
AnalyticsModule,
MultilingualModule.forRoot([{
provide: TranslateLoader,
deps: [Http],
useFactory: (translateLoaderFactory)
}]),
SampleModule,
// configure app state
StoreModule.provideStore(AppReducer),
EffectsModule.run(MultilingualEffects),
EffectsModule.run(SampleEffects),
// dev environment only imports
DEV_IMPORTS,
],
declarations: [
...MutuaExportedComponents,
],
providers: [
{
provide: APP_BASE_HREF,
useValue: '<%= APP_BASE %>'
},
// override with supported languages
{
provide: Languages,
useValue: Config.GET_SUPPORTED_LANGUAGES()
}
],
bootstrap: [MutuaAppComponent]
})
export class WebModule { }
|
win
|
identifier_name
|
web.module.ts
|
// angular
import { NgModule } from '@angular/core';
import { APP_BASE_HREF } from '@angular/common';
import { BrowserModule } from '@angular/platform-browser';
import { RouterModule } from '@angular/router';
import { Http } from '@angular/http';
// libs
import { StoreModule } from '@ngrx/store';
import { EffectsModule } from '@ngrx/effects';
import { StoreDevtoolsModule } from '@ngrx/store-devtools';
import { TranslateLoader } from '@ngx-translate/core';
// Mutua Modules and Components Loading Configuration
import { MutuaExportedComponents, MutuaExportedRoutes, MutuaExportedModules, MutuaAppComponent } from './app/shared/mutua/mutua.html.loading.config';
// app
//import { APP_COMPONENTS, AppComponent } from './app/components/index';
//import { routes } from './app/components/app.routes';
// feature modules
import { WindowService, StorageService, ConsoleService, createConsoleTarget, provideConsoleTarget, LogTarget, LogLevel, ConsoleTarget } from './app/modules/core/services/index';
import { CoreModule, Config } from './app/modules/core/index';
import { AnalyticsModule } from './app/modules/analytics/index';
import { MultilingualModule, Languages, translateLoaderFactory, MultilingualEffects } from './app/modules/i18n/index';
import { SampleModule, SampleEffects } from './app/modules/sample/index';
import { AppReducer } from './app/modules/ngrx/index';
// config
Config.PLATFORM_TARGET = Config.PLATFORMS.WEB;
if (String('<%= BUILD_TYPE %>') === 'dev') {
// only output console logging in dev mode
Config.DEBUG.LEVEL_4 = true;
}
let routerModule = RouterModule.forRoot(MutuaExportedRoutes);
if (String('<%= TARGET_DESKTOP %>') === 'true')
|
declare var window, console, localStorage;
// For AoT compilation to work:
export function win() {
return window;
}
export function storage() {
return localStorage;
}
export function cons() {
return console;
}
export function consoleLogTarget(consoleService: ConsoleService) {
return new ConsoleTarget(consoleService, { minLogLevel: LogLevel.Debug });
}
let DEV_IMPORTS: any[] = [];
if (String('<%= BUILD_TYPE %>') === 'dev') {
DEV_IMPORTS = [
...DEV_IMPORTS,
StoreDevtoolsModule.instrumentOnlyWithExtension()
];
}
@NgModule({
imports: [
...MutuaExportedModules,
BrowserModule,
CoreModule.forRoot([
{ provide: WindowService, useFactory: (win) },
{ provide: StorageService, useFactory: (storage) },
{ provide: ConsoleService, useFactory: (cons) },
{ provide: LogTarget, useFactory: (consoleLogTarget), deps: [ConsoleService], multi: true }
]),
routerModule,
AnalyticsModule,
MultilingualModule.forRoot([{
provide: TranslateLoader,
deps: [Http],
useFactory: (translateLoaderFactory)
}]),
SampleModule,
// configure app state
StoreModule.provideStore(AppReducer),
EffectsModule.run(MultilingualEffects),
EffectsModule.run(SampleEffects),
// dev environment only imports
DEV_IMPORTS,
],
declarations: [
...MutuaExportedComponents,
],
providers: [
{
provide: APP_BASE_HREF,
useValue: '<%= APP_BASE %>'
},
// override with supported languages
{
provide: Languages,
useValue: Config.GET_SUPPORTED_LANGUAGES()
}
],
bootstrap: [MutuaAppComponent]
})
export class WebModule { }
|
{
Config.PLATFORM_TARGET = Config.PLATFORMS.DESKTOP;
// desktop (electron) must use hash
routerModule = RouterModule.forRoot(MutuaExportedRoutes, { useHash: true });
}
|
conditional_block
|
web.module.ts
|
// angular
import { NgModule } from '@angular/core';
import { APP_BASE_HREF } from '@angular/common';
import { BrowserModule } from '@angular/platform-browser';
import { RouterModule } from '@angular/router';
import { Http } from '@angular/http';
// libs
import { StoreModule } from '@ngrx/store';
import { EffectsModule } from '@ngrx/effects';
import { StoreDevtoolsModule } from '@ngrx/store-devtools';
import { TranslateLoader } from '@ngx-translate/core';
// Mutua Modules and Components Loading Configuration
import { MutuaExportedComponents, MutuaExportedRoutes, MutuaExportedModules, MutuaAppComponent } from './app/shared/mutua/mutua.html.loading.config';
// app
//import { APP_COMPONENTS, AppComponent } from './app/components/index';
//import { routes } from './app/components/app.routes';
// feature modules
import { WindowService, StorageService, ConsoleService, createConsoleTarget, provideConsoleTarget, LogTarget, LogLevel, ConsoleTarget } from './app/modules/core/services/index';
import { CoreModule, Config } from './app/modules/core/index';
import { AnalyticsModule } from './app/modules/analytics/index';
import { MultilingualModule, Languages, translateLoaderFactory, MultilingualEffects } from './app/modules/i18n/index';
import { SampleModule, SampleEffects } from './app/modules/sample/index';
import { AppReducer } from './app/modules/ngrx/index';
// config
Config.PLATFORM_TARGET = Config.PLATFORMS.WEB;
if (String('<%= BUILD_TYPE %>') === 'dev') {
// only output console logging in dev mode
Config.DEBUG.LEVEL_4 = true;
}
let routerModule = RouterModule.forRoot(MutuaExportedRoutes);
if (String('<%= TARGET_DESKTOP %>') === 'true') {
Config.PLATFORM_TARGET = Config.PLATFORMS.DESKTOP;
// desktop (electron) must use hash
routerModule = RouterModule.forRoot(MutuaExportedRoutes, { useHash: true });
}
declare var window, console, localStorage;
// For AoT compilation to work:
export function win() {
return window;
}
export function storage()
|
export function cons() {
return console;
}
export function consoleLogTarget(consoleService: ConsoleService) {
return new ConsoleTarget(consoleService, { minLogLevel: LogLevel.Debug });
}
let DEV_IMPORTS: any[] = [];
if (String('<%= BUILD_TYPE %>') === 'dev') {
DEV_IMPORTS = [
...DEV_IMPORTS,
StoreDevtoolsModule.instrumentOnlyWithExtension()
];
}
@NgModule({
imports: [
...MutuaExportedModules,
BrowserModule,
CoreModule.forRoot([
{ provide: WindowService, useFactory: (win) },
{ provide: StorageService, useFactory: (storage) },
{ provide: ConsoleService, useFactory: (cons) },
{ provide: LogTarget, useFactory: (consoleLogTarget), deps: [ConsoleService], multi: true }
]),
routerModule,
AnalyticsModule,
MultilingualModule.forRoot([{
provide: TranslateLoader,
deps: [Http],
useFactory: (translateLoaderFactory)
}]),
SampleModule,
// configure app state
StoreModule.provideStore(AppReducer),
EffectsModule.run(MultilingualEffects),
EffectsModule.run(SampleEffects),
// dev environment only imports
DEV_IMPORTS,
],
declarations: [
...MutuaExportedComponents,
],
providers: [
{
provide: APP_BASE_HREF,
useValue: '<%= APP_BASE %>'
},
// override with supported languages
{
provide: Languages,
useValue: Config.GET_SUPPORTED_LANGUAGES()
}
],
bootstrap: [MutuaAppComponent]
})
export class WebModule { }
|
{
return localStorage;
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.