file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
authenticator.js | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview An UI component to authenciate to Chrome. The component hosts
* IdP web pages in a webview. A client who is interested in monitoring
* authentication events should pass a listener object of type
* cr.login.GaiaAuthHost.Listener as defined in this file. After initialization,
* call {@code load} to start the authentication flow.
*/
cr.define('cr.login', function() {
'use strict';
// TODO(rogerta): should use gaia URL from GaiaUrls::gaia_url() instead
// of hardcoding the prod URL here. As is, this does not work with staging
// environments.
var IDP_ORIGIN = 'https://accounts.google.com/';
var IDP_PATH = 'ServiceLogin?skipvpage=true&sarp=1&rm=hide';
var CONTINUE_URL =
'chrome-extension://mfffpogegjflfpflabcdkioaeobkgjik/success.html';
var SIGN_IN_HEADER = 'google-accounts-signin';
var EMBEDDED_FORM_HEADER = 'google-accounts-embedded';
var SAML_HEADER = 'google-accounts-saml';
var LOCATION_HEADER = 'location';
/**
* The source URL parameter for the constrained signin flow.
*/
var CONSTRAINED_FLOW_SOURCE = 'chrome';
/**
* Enum for the authorization mode, must match AuthMode defined in
* chrome/browser/ui/webui/inline_login_ui.cc.
* @enum {number}
*/
var AuthMode = {
DEFAULT: 0,
OFFLINE: 1,
DESKTOP: 2
};
/**
* Enum for the authorization type.
* @enum {number}
*/
var AuthFlow = {
DEFAULT: 0,
SAML: 1
};
/**
* Initializes the authenticator component.
* @param {webview|string} webview The webview element or its ID to host IdP
* web pages.
* @constructor
*/
function Authenticator(webview) |
// TODO(guohui,xiyuan): no need to inherit EventTarget once we deprecate the
// old event-based signin flow.
Authenticator.prototype = Object.create(cr.EventTarget.prototype);
/**
* Loads the authenticator component with the given parameters.
* @param {AuthMode} authMode Authorization mode.
* @param {Object} data Parameters for the authorization flow.
*/
Authenticator.prototype.load = function(authMode, data) {
this.idpOrigin_ = data.gaiaUrl || IDP_ORIGIN;
this.continueUrl_ = data.continueUrl || CONTINUE_URL;
this.continueUrlWithoutParams_ =
this.continueUrl_.substring(0, this.continueUrl_.indexOf('?')) ||
this.continueUrl_;
this.isConstrainedWindow_ = data.constrained == '1';
this.initialFrameUrl_ = this.constructInitialFrameUrl_(data);
this.reloadUrl_ = data.frameUrl || this.initialFrameUrl_;
this.authFlow_ = AuthFlow.DEFAULT;
this.webview_.src = this.reloadUrl_;
this.webview_.addEventListener(
'newwindow', this.onNewWindow_.bind(this));
this.webview_.addEventListener(
'loadstop', this.onLoadStop_.bind(this));
this.webview_.request.onCompleted.addListener(
this.onRequestCompleted_.bind(this),
{urls: ['*://*/*', this.continueUrlWithoutParams_ + '*'],
types: ['main_frame']},
['responseHeaders']);
this.webview_.request.onHeadersReceived.addListener(
this.onHeadersReceived_.bind(this),
{urls: [this.idpOrigin_ + '*'], types: ['main_frame']},
['responseHeaders']);
window.addEventListener(
'message', this.onMessageFromWebview_.bind(this), false);
window.addEventListener(
'focus', this.onFocus_.bind(this), false);
window.addEventListener(
'popstate', this.onPopState_.bind(this), false);
};
/**
* Reloads the authenticator component.
*/
Authenticator.prototype.reload = function() {
this.webview_.src = this.reloadUrl_;
this.authFlow_ = AuthFlow.DEFAULT;
};
Authenticator.prototype.constructInitialFrameUrl_ = function(data) {
var url = this.idpOrigin_ + (data.gaiaPath || IDP_PATH);
url = appendParam(url, 'continue', this.continueUrl_);
url = appendParam(url, 'service', data.service);
if (data.hl)
url = appendParam(url, 'hl', data.hl);
if (data.email)
url = appendParam(url, 'Email', data.email);
if (this.isConstrainedWindow_)
url = appendParam(url, 'source', CONSTRAINED_FLOW_SOURCE);
return url;
};
/**
* Invoked when a main frame request in the webview has completed.
* @private
*/
Authenticator.prototype.onRequestCompleted_ = function(details) {
var currentUrl = details.url;
if (currentUrl.lastIndexOf(this.continueUrlWithoutParams_, 0) == 0) {
if (currentUrl.indexOf('ntp=1') >= 0)
this.skipForNow_ = true;
this.onAuthCompleted_();
return;
}
if (currentUrl.indexOf('https') != 0)
this.trusted_ = false;
if (this.isConstrainedWindow_) {
var isEmbeddedPage = false;
if (this.idpOrigin_ && currentUrl.lastIndexOf(this.idpOrigin_) == 0) {
var headers = details.responseHeaders;
for (var i = 0; headers && i < headers.length; ++i) {
if (headers[i].name.toLowerCase() == EMBEDDED_FORM_HEADER) {
isEmbeddedPage = true;
break;
}
}
}
if (!isEmbeddedPage) {
this.dispatchEvent(new CustomEvent('resize', {detail: currentUrl}));
return;
}
}
this.updateHistoryState_(currentUrl);
// Posts a message to IdP pages to initiate communication.
if (currentUrl.lastIndexOf(this.idpOrigin_) == 0)
this.webview_.contentWindow.postMessage({}, currentUrl);
};
/**
* Manually updates the history. Invoked upon completion of a webview
* navigation.
* @param {string} url Request URL.
* @private
*/
Authenticator.prototype.updateHistoryState_ = function(url) {
if (history.state && history.state.url != url)
history.pushState({url: url}, '');
else
history.replaceState({url: url});
};
/**
* Invoked when the sign-in page takes focus.
* @param {object} e The focus event being triggered.
* @private
*/
Authenticator.prototype.onFocus_ = function(e) {
this.webview_.focus();
};
/**
* Invoked when the history state is changed.
* @param {object} e The popstate event being triggered.
* @private
*/
Authenticator.prototype.onPopState_ = function(e) {
var state = e.state;
if (state && state.url)
this.webview_.src = state.url;
};
/**
* Invoked when headers are received in the main frame of the webview. It
* 1) reads the authenticated user info from a signin header,
* 2) signals the start of a saml flow upon receiving a saml header.
* @return {!Object} Modified request headers.
* @private
*/
Authenticator.prototype.onHeadersReceived_ = function(details) {
var headers = details.responseHeaders;
for (var i = 0; headers && i < headers.length; ++i) {
var header = headers[i];
var headerName = header.name.toLowerCase();
if (headerName == SIGN_IN_HEADER) {
var headerValues = header.value.toLowerCase().split(',');
var signinDetails = {};
headerValues.forEach(function(e) {
var pair = e.split('=');
signinDetails[pair[0].trim()] = pair[1].trim();
});
// Removes "" around.
var email = signinDetails['email'].slice(1, -1);
if (this.email_ != email) {
this.email_ = email;
// Clears the scraped password if the email has changed.
this.password_ = null;
}
this.gaiaId_ = signinDetails['obfuscatedid'].slice(1, -1);
this.sessionIndex_ = signinDetails['sessionindex'];
} else if (headerName == SAML_HEADER) {
this.authFlow_ = AuthFlow.SAML;
} else if (headerName == LOCATION_HEADER) {
// If the "choose what to sync" checkbox was clicked, then the continue
// URL will contain a source=3 field.
var location = decodeURIComponent(header.value);
this.chooseWhatToSync_ = !!location.match(/(\?|&)source=3($|&)/);
}
}
};
/**
* Invoked when an HTML5 message is received from the webview element.
* @param {object} e Payload of the received HTML5 message.
* @private
*/
Authenticator.prototype.onMessageFromWebview_ = function(e) {
// The event origin does not have a trailing slash.
if (e.origin != this.idpOrigin_.substring(0, this.idpOrigin_ - 1)) {
return;
}
var msg = e.data;
if (msg.method == 'attemptLogin') {
this.email_ = msg.email;
this.password_ = msg.password;
this.chooseWhatToSync_ = msg.chooseWhatToSync;
}
};
/**
* Invoked to process authentication completion.
* @private
*/
Authenticator.prototype.onAuthCompleted_ = function() {
if (!this.email_ && !this.skipForNow_) {
this.webview_.src = this.initialFrameUrl_;
return;
}
this.dispatchEvent(
new CustomEvent('authCompleted',
{detail: {email: this.email_,
gaiaId: this.gaiaId_,
password: this.password_,
usingSAML: this.authFlow_ == AuthFlow.SAML,
chooseWhatToSync: this.chooseWhatToSync_,
skipForNow: this.skipForNow_,
sessionIndex: this.sessionIndex_ || '',
trusted: this.trusted_}}));
};
/**
* Invoked when the webview attempts to open a new window.
* @private
*/
Authenticator.prototype.onNewWindow_ = function(e) {
this.dispatchEvent(new CustomEvent('newWindow', {detail: e}));
};
/**
* Invoked when the webview finishes loading a page.
* @private
*/
Authenticator.prototype.onLoadStop_ = function(e) {
if (!this.loaded_) {
this.loaded_ = true;
this.webview_.focus();
this.dispatchEvent(new Event('ready'));
}
};
Authenticator.AuthFlow = AuthFlow;
Authenticator.AuthMode = AuthMode;
return {
// TODO(guohui, xiyuan): Rename GaiaAuthHost to Authenticator once the old
// iframe-based flow is deprecated.
GaiaAuthHost: Authenticator
};
});
| {
this.webview_ = typeof webview == 'string' ? $(webview) : webview;
assert(this.webview_);
this.email_ = null;
this.password_ = null;
this.gaiaId_ = null,
this.sessionIndex_ = null;
this.chooseWhatToSync_ = false;
this.skipForNow_ = false;
this.authFlow_ = AuthFlow.DEFAULT;
this.loaded_ = false;
this.idpOrigin_ = null;
this.continueUrl_ = null;
this.continueUrlWithoutParams_ = null;
this.initialFrameUrl_ = null;
this.reloadUrl_ = null;
this.trusted_ = true;
} | identifier_body |
authenticator.js | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview An UI component to authenciate to Chrome. The component hosts
* IdP web pages in a webview. A client who is interested in monitoring
* authentication events should pass a listener object of type
* cr.login.GaiaAuthHost.Listener as defined in this file. After initialization,
* call {@code load} to start the authentication flow.
*/
cr.define('cr.login', function() {
'use strict';
// TODO(rogerta): should use gaia URL from GaiaUrls::gaia_url() instead
// of hardcoding the prod URL here. As is, this does not work with staging
// environments.
var IDP_ORIGIN = 'https://accounts.google.com/';
var IDP_PATH = 'ServiceLogin?skipvpage=true&sarp=1&rm=hide';
var CONTINUE_URL =
'chrome-extension://mfffpogegjflfpflabcdkioaeobkgjik/success.html';
var SIGN_IN_HEADER = 'google-accounts-signin';
var EMBEDDED_FORM_HEADER = 'google-accounts-embedded';
var SAML_HEADER = 'google-accounts-saml';
var LOCATION_HEADER = 'location';
/**
* The source URL parameter for the constrained signin flow.
*/
var CONSTRAINED_FLOW_SOURCE = 'chrome';
/**
* Enum for the authorization mode, must match AuthMode defined in
* chrome/browser/ui/webui/inline_login_ui.cc.
* @enum {number}
*/
var AuthMode = {
DEFAULT: 0,
OFFLINE: 1,
DESKTOP: 2
};
/**
* Enum for the authorization type.
* @enum {number}
*/
var AuthFlow = {
DEFAULT: 0,
SAML: 1
};
/**
* Initializes the authenticator component.
* @param {webview|string} webview The webview element or its ID to host IdP
* web pages.
* @constructor
*/
function Authenticator(webview) {
this.webview_ = typeof webview == 'string' ? $(webview) : webview;
assert(this.webview_);
this.email_ = null;
this.password_ = null;
this.gaiaId_ = null,
this.sessionIndex_ = null;
this.chooseWhatToSync_ = false;
this.skipForNow_ = false;
this.authFlow_ = AuthFlow.DEFAULT;
this.loaded_ = false;
this.idpOrigin_ = null;
this.continueUrl_ = null;
this.continueUrlWithoutParams_ = null;
this.initialFrameUrl_ = null;
this.reloadUrl_ = null;
this.trusted_ = true;
}
// TODO(guohui,xiyuan): no need to inherit EventTarget once we deprecate the
// old event-based signin flow.
Authenticator.prototype = Object.create(cr.EventTarget.prototype);
/**
* Loads the authenticator component with the given parameters.
* @param {AuthMode} authMode Authorization mode.
* @param {Object} data Parameters for the authorization flow.
*/
Authenticator.prototype.load = function(authMode, data) {
this.idpOrigin_ = data.gaiaUrl || IDP_ORIGIN;
this.continueUrl_ = data.continueUrl || CONTINUE_URL;
this.continueUrlWithoutParams_ =
this.continueUrl_.substring(0, this.continueUrl_.indexOf('?')) ||
this.continueUrl_;
this.isConstrainedWindow_ = data.constrained == '1';
this.initialFrameUrl_ = this.constructInitialFrameUrl_(data);
this.reloadUrl_ = data.frameUrl || this.initialFrameUrl_; | 'newwindow', this.onNewWindow_.bind(this));
this.webview_.addEventListener(
'loadstop', this.onLoadStop_.bind(this));
this.webview_.request.onCompleted.addListener(
this.onRequestCompleted_.bind(this),
{urls: ['*://*/*', this.continueUrlWithoutParams_ + '*'],
types: ['main_frame']},
['responseHeaders']);
this.webview_.request.onHeadersReceived.addListener(
this.onHeadersReceived_.bind(this),
{urls: [this.idpOrigin_ + '*'], types: ['main_frame']},
['responseHeaders']);
window.addEventListener(
'message', this.onMessageFromWebview_.bind(this), false);
window.addEventListener(
'focus', this.onFocus_.bind(this), false);
window.addEventListener(
'popstate', this.onPopState_.bind(this), false);
};
/**
* Reloads the authenticator component.
*/
Authenticator.prototype.reload = function() {
this.webview_.src = this.reloadUrl_;
this.authFlow_ = AuthFlow.DEFAULT;
};
Authenticator.prototype.constructInitialFrameUrl_ = function(data) {
var url = this.idpOrigin_ + (data.gaiaPath || IDP_PATH);
url = appendParam(url, 'continue', this.continueUrl_);
url = appendParam(url, 'service', data.service);
if (data.hl)
url = appendParam(url, 'hl', data.hl);
if (data.email)
url = appendParam(url, 'Email', data.email);
if (this.isConstrainedWindow_)
url = appendParam(url, 'source', CONSTRAINED_FLOW_SOURCE);
return url;
};
/**
* Invoked when a main frame request in the webview has completed.
* @private
*/
Authenticator.prototype.onRequestCompleted_ = function(details) {
var currentUrl = details.url;
if (currentUrl.lastIndexOf(this.continueUrlWithoutParams_, 0) == 0) {
if (currentUrl.indexOf('ntp=1') >= 0)
this.skipForNow_ = true;
this.onAuthCompleted_();
return;
}
if (currentUrl.indexOf('https') != 0)
this.trusted_ = false;
if (this.isConstrainedWindow_) {
var isEmbeddedPage = false;
if (this.idpOrigin_ && currentUrl.lastIndexOf(this.idpOrigin_) == 0) {
var headers = details.responseHeaders;
for (var i = 0; headers && i < headers.length; ++i) {
if (headers[i].name.toLowerCase() == EMBEDDED_FORM_HEADER) {
isEmbeddedPage = true;
break;
}
}
}
if (!isEmbeddedPage) {
this.dispatchEvent(new CustomEvent('resize', {detail: currentUrl}));
return;
}
}
this.updateHistoryState_(currentUrl);
// Posts a message to IdP pages to initiate communication.
if (currentUrl.lastIndexOf(this.idpOrigin_) == 0)
this.webview_.contentWindow.postMessage({}, currentUrl);
};
/**
* Manually updates the history. Invoked upon completion of a webview
* navigation.
* @param {string} url Request URL.
* @private
*/
Authenticator.prototype.updateHistoryState_ = function(url) {
if (history.state && history.state.url != url)
history.pushState({url: url}, '');
else
history.replaceState({url: url});
};
/**
* Invoked when the sign-in page takes focus.
* @param {object} e The focus event being triggered.
* @private
*/
Authenticator.prototype.onFocus_ = function(e) {
this.webview_.focus();
};
/**
* Invoked when the history state is changed.
* @param {object} e The popstate event being triggered.
* @private
*/
Authenticator.prototype.onPopState_ = function(e) {
var state = e.state;
if (state && state.url)
this.webview_.src = state.url;
};
/**
* Invoked when headers are received in the main frame of the webview. It
* 1) reads the authenticated user info from a signin header,
* 2) signals the start of a saml flow upon receiving a saml header.
* @return {!Object} Modified request headers.
* @private
*/
Authenticator.prototype.onHeadersReceived_ = function(details) {
var headers = details.responseHeaders;
for (var i = 0; headers && i < headers.length; ++i) {
var header = headers[i];
var headerName = header.name.toLowerCase();
if (headerName == SIGN_IN_HEADER) {
var headerValues = header.value.toLowerCase().split(',');
var signinDetails = {};
headerValues.forEach(function(e) {
var pair = e.split('=');
signinDetails[pair[0].trim()] = pair[1].trim();
});
// Removes "" around.
var email = signinDetails['email'].slice(1, -1);
if (this.email_ != email) {
this.email_ = email;
// Clears the scraped password if the email has changed.
this.password_ = null;
}
this.gaiaId_ = signinDetails['obfuscatedid'].slice(1, -1);
this.sessionIndex_ = signinDetails['sessionindex'];
} else if (headerName == SAML_HEADER) {
this.authFlow_ = AuthFlow.SAML;
} else if (headerName == LOCATION_HEADER) {
// If the "choose what to sync" checkbox was clicked, then the continue
// URL will contain a source=3 field.
var location = decodeURIComponent(header.value);
this.chooseWhatToSync_ = !!location.match(/(\?|&)source=3($|&)/);
}
}
};
/**
* Invoked when an HTML5 message is received from the webview element.
* @param {object} e Payload of the received HTML5 message.
* @private
*/
Authenticator.prototype.onMessageFromWebview_ = function(e) {
// The event origin does not have a trailing slash.
if (e.origin != this.idpOrigin_.substring(0, this.idpOrigin_ - 1)) {
return;
}
var msg = e.data;
if (msg.method == 'attemptLogin') {
this.email_ = msg.email;
this.password_ = msg.password;
this.chooseWhatToSync_ = msg.chooseWhatToSync;
}
};
/**
* Invoked to process authentication completion.
* @private
*/
Authenticator.prototype.onAuthCompleted_ = function() {
if (!this.email_ && !this.skipForNow_) {
this.webview_.src = this.initialFrameUrl_;
return;
}
this.dispatchEvent(
new CustomEvent('authCompleted',
{detail: {email: this.email_,
gaiaId: this.gaiaId_,
password: this.password_,
usingSAML: this.authFlow_ == AuthFlow.SAML,
chooseWhatToSync: this.chooseWhatToSync_,
skipForNow: this.skipForNow_,
sessionIndex: this.sessionIndex_ || '',
trusted: this.trusted_}}));
};
/**
* Invoked when the webview attempts to open a new window.
* @private
*/
Authenticator.prototype.onNewWindow_ = function(e) {
this.dispatchEvent(new CustomEvent('newWindow', {detail: e}));
};
/**
* Invoked when the webview finishes loading a page.
* @private
*/
Authenticator.prototype.onLoadStop_ = function(e) {
if (!this.loaded_) {
this.loaded_ = true;
this.webview_.focus();
this.dispatchEvent(new Event('ready'));
}
};
Authenticator.AuthFlow = AuthFlow;
Authenticator.AuthMode = AuthMode;
return {
// TODO(guohui, xiyuan): Rename GaiaAuthHost to Authenticator once the old
// iframe-based flow is deprecated.
GaiaAuthHost: Authenticator
};
}); | this.authFlow_ = AuthFlow.DEFAULT;
this.webview_.src = this.reloadUrl_;
this.webview_.addEventListener( | random_line_split |
fields.py | import json
from wtforms.fields import TextAreaField
from shapely.geometry import shape, mapping
from .widgets import LeafletWidget
from sqlalchemy import func
import geoalchemy2
#from types import NoneType
#from .. import db how do you get db.session in a Field?
class JSONField(TextAreaField):
def _value(self):
if self.raw_data:
return self.raw_data[0]
if self.data:
return self.data
return ""
def process_formdata(self, valuelist):
if valuelist:
value = valuelist[0]
if not value:
self.data = None
return
try:
self.data = self.from_json(value)
except ValueError:
self.data = None
raise ValueError(self.gettext('Invalid JSON'))
def to_json(self, obj):
return json.dumps(obj)
def from_json(self, data):
return json.loads(data)
class GeoJSONField(JSONField):
widget = LeafletWidget()
def __init__(self, label=None, validators=None, geometry_type="GEOMETRY", srid='-1', session=None, **kwargs):
super(GeoJSONField, self).__init__(label, validators, **kwargs)
self.web_srid = 4326
self.srid = srid
if self.srid is -1:
self.transform_srid = self.web_srid
else:
self.transform_srid = self.srid
self.geometry_type = geometry_type.upper()
self.session = session
def _value(self):
if self.raw_data:
return self.raw_data[0]
if type(self.data) is geoalchemy2.elements.WKBElement:
if self.srid is -1:
self.data = self.session.scalar(func.ST_AsGeoJson(self.data))
else:
self.data = self.session.scalar(func.ST_AsGeoJson(func.ST_Transform(self.data, self.web_srid)))
return super(GeoJSONField, self)._value()
def process_formdata(self, valuelist): | self.data = None
if self.data is not None:
web_shape = self.session.scalar(func.ST_AsText(func.ST_Transform(func.ST_GeomFromText(shape(self.data).wkt, self.web_srid), self.transform_srid)))
self.data = 'SRID='+str(self.srid)+';'+str(web_shape) | super(GeoJSONField, self).process_formdata(valuelist)
if str(self.data) is '': | random_line_split |
fields.py | import json
from wtforms.fields import TextAreaField
from shapely.geometry import shape, mapping
from .widgets import LeafletWidget
from sqlalchemy import func
import geoalchemy2
#from types import NoneType
#from .. import db how do you get db.session in a Field?
class JSONField(TextAreaField):
def _value(self):
if self.raw_data:
return self.raw_data[0]
if self.data:
return self.data
return ""
def process_formdata(self, valuelist):
if valuelist:
value = valuelist[0]
if not value:
self.data = None
return
try:
self.data = self.from_json(value)
except ValueError:
self.data = None
raise ValueError(self.gettext('Invalid JSON'))
def to_json(self, obj):
return json.dumps(obj)
def from_json(self, data):
return json.loads(data)
class GeoJSONField(JSONField):
widget = LeafletWidget()
def __init__(self, label=None, validators=None, geometry_type="GEOMETRY", srid='-1', session=None, **kwargs):
super(GeoJSONField, self).__init__(label, validators, **kwargs)
self.web_srid = 4326
self.srid = srid
if self.srid is -1:
self.transform_srid = self.web_srid
else:
self.transform_srid = self.srid
self.geometry_type = geometry_type.upper()
self.session = session
def _value(self):
if self.raw_data:
return self.raw_data[0]
if type(self.data) is geoalchemy2.elements.WKBElement:
if self.srid is -1:
self.data = self.session.scalar(func.ST_AsGeoJson(self.data))
else:
self.data = self.session.scalar(func.ST_AsGeoJson(func.ST_Transform(self.data, self.web_srid)))
return super(GeoJSONField, self)._value()
def | (self, valuelist):
super(GeoJSONField, self).process_formdata(valuelist)
if str(self.data) is '':
self.data = None
if self.data is not None:
web_shape = self.session.scalar(func.ST_AsText(func.ST_Transform(func.ST_GeomFromText(shape(self.data).wkt, self.web_srid), self.transform_srid)))
self.data = 'SRID='+str(self.srid)+';'+str(web_shape)
| process_formdata | identifier_name |
fields.py | import json
from wtforms.fields import TextAreaField
from shapely.geometry import shape, mapping
from .widgets import LeafletWidget
from sqlalchemy import func
import geoalchemy2
#from types import NoneType
#from .. import db how do you get db.session in a Field?
class JSONField(TextAreaField):
def _value(self):
if self.raw_data:
return self.raw_data[0]
if self.data:
return self.data
return ""
def process_formdata(self, valuelist):
if valuelist:
value = valuelist[0]
if not value:
|
try:
self.data = self.from_json(value)
except ValueError:
self.data = None
raise ValueError(self.gettext('Invalid JSON'))
def to_json(self, obj):
return json.dumps(obj)
def from_json(self, data):
return json.loads(data)
class GeoJSONField(JSONField):
widget = LeafletWidget()
def __init__(self, label=None, validators=None, geometry_type="GEOMETRY", srid='-1', session=None, **kwargs):
super(GeoJSONField, self).__init__(label, validators, **kwargs)
self.web_srid = 4326
self.srid = srid
if self.srid is -1:
self.transform_srid = self.web_srid
else:
self.transform_srid = self.srid
self.geometry_type = geometry_type.upper()
self.session = session
def _value(self):
if self.raw_data:
return self.raw_data[0]
if type(self.data) is geoalchemy2.elements.WKBElement:
if self.srid is -1:
self.data = self.session.scalar(func.ST_AsGeoJson(self.data))
else:
self.data = self.session.scalar(func.ST_AsGeoJson(func.ST_Transform(self.data, self.web_srid)))
return super(GeoJSONField, self)._value()
def process_formdata(self, valuelist):
super(GeoJSONField, self).process_formdata(valuelist)
if str(self.data) is '':
self.data = None
if self.data is not None:
web_shape = self.session.scalar(func.ST_AsText(func.ST_Transform(func.ST_GeomFromText(shape(self.data).wkt, self.web_srid), self.transform_srid)))
self.data = 'SRID='+str(self.srid)+';'+str(web_shape)
| self.data = None
return | conditional_block |
fields.py | import json
from wtforms.fields import TextAreaField
from shapely.geometry import shape, mapping
from .widgets import LeafletWidget
from sqlalchemy import func
import geoalchemy2
#from types import NoneType
#from .. import db how do you get db.session in a Field?
class JSONField(TextAreaField):
def _value(self):
if self.raw_data:
return self.raw_data[0]
if self.data:
return self.data
return ""
def process_formdata(self, valuelist):
if valuelist:
value = valuelist[0]
if not value:
self.data = None
return
try:
self.data = self.from_json(value)
except ValueError:
self.data = None
raise ValueError(self.gettext('Invalid JSON'))
def to_json(self, obj):
return json.dumps(obj)
def from_json(self, data):
return json.loads(data)
class GeoJSONField(JSONField):
| widget = LeafletWidget()
def __init__(self, label=None, validators=None, geometry_type="GEOMETRY", srid='-1', session=None, **kwargs):
super(GeoJSONField, self).__init__(label, validators, **kwargs)
self.web_srid = 4326
self.srid = srid
if self.srid is -1:
self.transform_srid = self.web_srid
else:
self.transform_srid = self.srid
self.geometry_type = geometry_type.upper()
self.session = session
def _value(self):
if self.raw_data:
return self.raw_data[0]
if type(self.data) is geoalchemy2.elements.WKBElement:
if self.srid is -1:
self.data = self.session.scalar(func.ST_AsGeoJson(self.data))
else:
self.data = self.session.scalar(func.ST_AsGeoJson(func.ST_Transform(self.data, self.web_srid)))
return super(GeoJSONField, self)._value()
def process_formdata(self, valuelist):
super(GeoJSONField, self).process_formdata(valuelist)
if str(self.data) is '':
self.data = None
if self.data is not None:
web_shape = self.session.scalar(func.ST_AsText(func.ST_Transform(func.ST_GeomFromText(shape(self.data).wkt, self.web_srid), self.transform_srid)))
self.data = 'SRID='+str(self.srid)+';'+str(web_shape) | identifier_body |
|
base.py | """Common settings and globals."""
from os.path import abspath, basename, dirname, join, normpath
from sys import path
try:
from secret import *
except:
pass
########## PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
# Add SITE_ROOT to lookup application (wsgi)
path.append(SITE_ROOT)
########## END PATH CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('olivier', '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = normpath(join(SITE_ROOT, 'assets'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
normpath(join(SITE_ROOT, 'static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
########## END SECRET CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
normpath(join(SITE_ROOT, 'fixtures')),
)
########## END FIXTURE CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
'cms.context_processors.media',
'sekizai.context_processors.sekizai',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
normpath(join(SITE_ROOT, 'templates')),
)
########## END TEMPLATE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
# Default Django middleware.
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.common.CommonMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin panel and documentation:
'django.contrib.admin',
# 'django.contrib.admindocs',
'django.contrib.markup',
)
THIRD_PARTY_APPS = (
# Database migration helpers:
'south',
# Django CMS
'cms',
'cms.stacks',
'menus',
'mptt',
'menus',
'sekizai',
'django_countries',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'resume',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
########## END APP CONFIGURATION
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'wsgi.application'
########## END WSGI CONFIGURATION
######### DJANGO CMS
CMS_PERMISSION = True
CMS_PUBLIC_FOR = "all"
LANGUAGES = [
('fr', 'French'),
('en', 'English'),
]
CMS_LANGUAGES = {
'default': {
'fallbacks': ['fr', 'en', ],
'redirect_on_fallback':True,
'public': True,
'hide_untranslated': False,
}
}
CMS_TEMPLATES = (
('layouts/classic.html', 'Classic'),
('layouts/classic_home.html', 'Classic Home'), | ('layouts/classic_2columns.html', 'Classic 2 columns'),
)
######### END DJANGO CMS | random_line_split |
|
testImport.py | # encoding: utf-8
import sys
sys.path.append(sys.path.insert(0,"../src"))
def urlopen(*args, **kwargs):
# Only parse one arg: the url
return Urls[args[0]]
# Provide a simple hashtable to contain the content of the urls and
# provide a mock object similar to what will be returned from the
# real urlopen() function calls
from io import StringIO
from time import time
import re
from nose.tools import with_setup
class MockUrlContent(StringIO):
def __init__(self, content):
super(MockUrlContent, self).__init__(content)
self.headers = {
'last-modified': time()
}
def close(self):
pass
scheme_re = re.compile(r'file:(/+)?')
class MockUrlCache(dict):
def __setitem__(self, name, content):
super(MockUrlCache, self).__setitem__(name, MockUrlContent(content))
def __getitem__(self, name):
if name in self:
return super(MockUrlCache, self).__getitem__(name)
# Strip off 'file:[///]' from url
elif name.startswith('file:'):
try:
name= scheme_re.sub('', name)
return super(MockUrlCache, self).__getitem__(name)
except:
# Fall through
pass
# urlopen raises ValueError if unable to load content (not KeyError)
raise ValueError("{0}: Cannot find file content".format(name))
Urls = MockUrlCache()
def | ():
pass
@with_setup(clear_configs)
def testImportContent():
"Cannot import content from a file"
from xmlconfig import getConfig
Urls.clear()
Urls["file:file.txt"] = "Content embedded in a file"
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants>
<string key="import" src="file:file.txt"/>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import") == "Content embedded in a file"
@with_setup(clear_configs)
def testImportConfig():
"Cannot import another config file"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
@with_setup(clear_configs)
def testCircularImport():
"Property detect circluar importing"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="circular" src="file:config.xml"/>
<constants>
<string key="key22">This was imported from config2.xml</string>
<string key="foreign">
Namespace changed in %(circular:key4.import)
</string>
</constants>
</config>
"""
Urls["config.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<section key="key4">
<string key="key5">value2</string>
<string key="import">%(import:key22)</string>
</section>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import:foreign") == \
"Namespace changed in This was imported from config2.xml"
@with_setup(clear_configs)
def testRelativeImport():
"""Transfer leading absolute or relative path to the location of
documents imported"""
from xmlconfig import getConfig
Urls["../config/config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["../config/config.xml"] = \
"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("../config/config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
| clear_configs | identifier_name |
testImport.py | # encoding: utf-8
import sys
sys.path.append(sys.path.insert(0,"../src"))
def urlopen(*args, **kwargs):
# Only parse one arg: the url
return Urls[args[0]]
# Provide a simple hashtable to contain the content of the urls and
# provide a mock object similar to what will be returned from the
# real urlopen() function calls
from io import StringIO
from time import time
import re
from nose.tools import with_setup
class MockUrlContent(StringIO):
def __init__(self, content):
super(MockUrlContent, self).__init__(content)
self.headers = {
'last-modified': time()
}
def close(self):
pass
scheme_re = re.compile(r'file:(/+)?')
class MockUrlCache(dict):
def __setitem__(self, name, content):
super(MockUrlCache, self).__setitem__(name, MockUrlContent(content))
def __getitem__(self, name):
if name in self:
return super(MockUrlCache, self).__getitem__(name)
# Strip off 'file:[///]' from url
elif name.startswith('file:'):
|
# urlopen raises ValueError if unable to load content (not KeyError)
raise ValueError("{0}: Cannot find file content".format(name))
Urls = MockUrlCache()
def clear_configs():
pass
@with_setup(clear_configs)
def testImportContent():
"Cannot import content from a file"
from xmlconfig import getConfig
Urls.clear()
Urls["file:file.txt"] = "Content embedded in a file"
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants>
<string key="import" src="file:file.txt"/>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import") == "Content embedded in a file"
@with_setup(clear_configs)
def testImportConfig():
"Cannot import another config file"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
@with_setup(clear_configs)
def testCircularImport():
"Property detect circluar importing"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="circular" src="file:config.xml"/>
<constants>
<string key="key22">This was imported from config2.xml</string>
<string key="foreign">
Namespace changed in %(circular:key4.import)
</string>
</constants>
</config>
"""
Urls["config.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<section key="key4">
<string key="key5">value2</string>
<string key="import">%(import:key22)</string>
</section>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import:foreign") == \
"Namespace changed in This was imported from config2.xml"
@with_setup(clear_configs)
def testRelativeImport():
"""Transfer leading absolute or relative path to the location of
documents imported"""
from xmlconfig import getConfig
Urls["../config/config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["../config/config.xml"] = \
"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("../config/config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
| try:
name= scheme_re.sub('', name)
return super(MockUrlCache, self).__getitem__(name)
except:
# Fall through
pass | conditional_block |
testImport.py | # encoding: utf-8
import sys
sys.path.append(sys.path.insert(0,"../src"))
def urlopen(*args, **kwargs):
# Only parse one arg: the url
return Urls[args[0]]
# Provide a simple hashtable to contain the content of the urls and
# provide a mock object similar to what will be returned from the
# real urlopen() function calls
from io import StringIO
from time import time
import re
from nose.tools import with_setup
class MockUrlContent(StringIO):
|
scheme_re = re.compile(r'file:(/+)?')
class MockUrlCache(dict):
def __setitem__(self, name, content):
super(MockUrlCache, self).__setitem__(name, MockUrlContent(content))
def __getitem__(self, name):
if name in self:
return super(MockUrlCache, self).__getitem__(name)
# Strip off 'file:[///]' from url
elif name.startswith('file:'):
try:
name= scheme_re.sub('', name)
return super(MockUrlCache, self).__getitem__(name)
except:
# Fall through
pass
# urlopen raises ValueError if unable to load content (not KeyError)
raise ValueError("{0}: Cannot find file content".format(name))
Urls = MockUrlCache()
def clear_configs():
pass
@with_setup(clear_configs)
def testImportContent():
"Cannot import content from a file"
from xmlconfig import getConfig
Urls.clear()
Urls["file:file.txt"] = "Content embedded in a file"
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants>
<string key="import" src="file:file.txt"/>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import") == "Content embedded in a file"
@with_setup(clear_configs)
def testImportConfig():
"Cannot import another config file"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
@with_setup(clear_configs)
def testCircularImport():
"Property detect circluar importing"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="circular" src="file:config.xml"/>
<constants>
<string key="key22">This was imported from config2.xml</string>
<string key="foreign">
Namespace changed in %(circular:key4.import)
</string>
</constants>
</config>
"""
Urls["config.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<section key="key4">
<string key="key5">value2</string>
<string key="import">%(import:key22)</string>
</section>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import:foreign") == \
"Namespace changed in This was imported from config2.xml"
@with_setup(clear_configs)
def testRelativeImport():
"""Transfer leading absolute or relative path to the location of
documents imported"""
from xmlconfig import getConfig
Urls["../config/config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["../config/config.xml"] = \
"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("../config/config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
| def __init__(self, content):
super(MockUrlContent, self).__init__(content)
self.headers = {
'last-modified': time()
}
def close(self):
pass | identifier_body |
testImport.py | # encoding: utf-8
import sys
sys.path.append(sys.path.insert(0,"../src"))
def urlopen(*args, **kwargs):
# Only parse one arg: the url
return Urls[args[0]]
# Provide a simple hashtable to contain the content of the urls and
# provide a mock object similar to what will be returned from the
# real urlopen() function calls
from io import StringIO
from time import time
import re
from nose.tools import with_setup
class MockUrlContent(StringIO):
def __init__(self, content):
super(MockUrlContent, self).__init__(content)
self.headers = {
'last-modified': time()
}
def close(self):
pass
scheme_re = re.compile(r'file:(/+)?')
class MockUrlCache(dict):
def __setitem__(self, name, content):
super(MockUrlCache, self).__setitem__(name, MockUrlContent(content))
def __getitem__(self, name):
if name in self:
return super(MockUrlCache, self).__getitem__(name)
# Strip off 'file:[///]' from url
elif name.startswith('file:'):
try:
name= scheme_re.sub('', name)
return super(MockUrlCache, self).__getitem__(name)
except:
# Fall through
pass
# urlopen raises ValueError if unable to load content (not KeyError)
raise ValueError("{0}: Cannot find file content".format(name))
Urls = MockUrlCache()
def clear_configs():
pass
@with_setup(clear_configs)
def testImportContent():
"Cannot import content from a file"
from xmlconfig import getConfig
Urls.clear()
Urls["file:file.txt"] = "Content embedded in a file"
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants>
<string key="import" src="file:file.txt"/>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import") == "Content embedded in a file"
@with_setup(clear_configs)
def testImportConfig():
"Cannot import another config file"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
@with_setup(clear_configs)
def testCircularImport():
"Property detect circluar importing"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="circular" src="file:config.xml"/>
<constants>
<string key="key22">This was imported from config2.xml</string>
<string key="foreign">
Namespace changed in %(circular:key4.import)
</string>
</constants>
</config>
"""
Urls["config.xml"] = \
"""<?xml version="1.0"?> | <section key="key4">
<string key="key5">value2</string>
<string key="import">%(import:key22)</string>
</section>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import:foreign") == \
"Namespace changed in This was imported from config2.xml"
@with_setup(clear_configs)
def testRelativeImport():
"""Transfer leading absolute or relative path to the location of
documents imported"""
from xmlconfig import getConfig
Urls["../config/config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["../config/config.xml"] = \
"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("../config/config.xml")
assert conf.get("imported") == "This was imported from config2.xml" | <config>
<constants namespace="import" src="file:config2.xml"/>
<constants> | random_line_split |
cast_sign_loss.rs | use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{method_chain_args, sext};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use super::CAST_SIGN_LOSS;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if should_lint(cx, cast_op, cast_from, cast_to) {
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
}
| return false;
}
// Don't lint for positive constants.
let const_val = constant(cx, cx.typeck_results(), cast_op);
if_chain! {
if let Some((Constant::Int(n), _)) = const_val;
if let ty::Int(ity) = *cast_from.kind();
if sext(cx.tcx, n, ity) >= 0;
then {
return false;
}
}
// Don't lint for the result of methods that always return non-negative values.
if let ExprKind::MethodCall(path, _, _, _) = cast_op.kind {
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
if_chain! {
if method_name == "unwrap";
if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
if let ExprKind::MethodCall(inner_path, _, _, _) = &arglist[0][0].kind;
then {
method_name = inner_path.ident.name.as_str();
}
}
if allowed_methods.iter().any(|&name| method_name == name) {
return false;
}
}
true
},
(false, true) => !cast_to.is_signed(),
(_, _) => false,
}
} | fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
if !cast_from.is_signed() || cast_to.is_signed() { | random_line_split |
cast_sign_loss.rs | use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{method_chain_args, sext};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use super::CAST_SIGN_LOSS;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if should_lint(cx, cast_op, cast_from, cast_to) {
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
}
fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
if !cast_from.is_signed() || cast_to.is_signed() {
return false;
}
// Don't lint for positive constants.
let const_val = constant(cx, cx.typeck_results(), cast_op);
if_chain! {
if let Some((Constant::Int(n), _)) = const_val;
if let ty::Int(ity) = *cast_from.kind();
if sext(cx.tcx, n, ity) >= 0;
then {
return false;
}
}
// Don't lint for the result of methods that always return non-negative values.
if let ExprKind::MethodCall(path, _, _, _) = cast_op.kind |
true
},
(false, true) => !cast_to.is_signed(),
(_, _) => false,
}
}
| {
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
if_chain! {
if method_name == "unwrap";
if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
if let ExprKind::MethodCall(inner_path, _, _, _) = &arglist[0][0].kind;
then {
method_name = inner_path.ident.name.as_str();
}
}
if allowed_methods.iter().any(|&name| method_name == name) {
return false;
}
} | conditional_block |
cast_sign_loss.rs | use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{method_chain_args, sext};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use super::CAST_SIGN_LOSS;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if should_lint(cx, cast_op, cast_from, cast_to) {
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
}
fn | (cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
if !cast_from.is_signed() || cast_to.is_signed() {
return false;
}
// Don't lint for positive constants.
let const_val = constant(cx, cx.typeck_results(), cast_op);
if_chain! {
if let Some((Constant::Int(n), _)) = const_val;
if let ty::Int(ity) = *cast_from.kind();
if sext(cx.tcx, n, ity) >= 0;
then {
return false;
}
}
// Don't lint for the result of methods that always return non-negative values.
if let ExprKind::MethodCall(path, _, _, _) = cast_op.kind {
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
if_chain! {
if method_name == "unwrap";
if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
if let ExprKind::MethodCall(inner_path, _, _, _) = &arglist[0][0].kind;
then {
method_name = inner_path.ident.name.as_str();
}
}
if allowed_methods.iter().any(|&name| method_name == name) {
return false;
}
}
true
},
(false, true) => !cast_to.is_signed(),
(_, _) => false,
}
}
| should_lint | identifier_name |
cast_sign_loss.rs | use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{method_chain_args, sext};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use super::CAST_SIGN_LOSS;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if should_lint(cx, cast_op, cast_from, cast_to) {
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
}
fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool | {
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
if !cast_from.is_signed() || cast_to.is_signed() {
return false;
}
// Don't lint for positive constants.
let const_val = constant(cx, cx.typeck_results(), cast_op);
if_chain! {
if let Some((Constant::Int(n), _)) = const_val;
if let ty::Int(ity) = *cast_from.kind();
if sext(cx.tcx, n, ity) >= 0;
then {
return false;
}
}
// Don't lint for the result of methods that always return non-negative values.
if let ExprKind::MethodCall(path, _, _, _) = cast_op.kind {
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
if_chain! {
if method_name == "unwrap";
if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
if let ExprKind::MethodCall(inner_path, _, _, _) = &arglist[0][0].kind;
then {
method_name = inner_path.ident.name.as_str();
}
}
if allowed_methods.iter().any(|&name| method_name == name) {
return false;
}
}
true
},
(false, true) => !cast_to.is_signed(),
(_, _) => false,
}
} | identifier_body |
|
structgruel_1_1rt__sched__param.js | var structgruel_1_1rt__sched__param = | ]; | [
[ "rt_sched_param", "structgruel_1_1rt__sched__param.html#a7592c8e69e3a07cc8ac0b0bf9339df70", null ],
[ "rt_sched_param", "structgruel_1_1rt__sched__param.html#a5257e726503db28d31aa4f780ddead47", null ],
[ "policy", "structgruel_1_1rt__sched__param.html#af42388ca3382b9f646228f9a391f1dbc", null ],
[ "priority", "structgruel_1_1rt__sched__param.html#ae3b1bc20d91fa2e614aa7bb483866be6", null ] | random_line_split |
utils.py | import functools
import os
import re
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from subprocess import TimeoutExpired
from catkin_tools.commands.catkin import main as catkin_main
TESTS_DIR = os.path.dirname(__file__)
MOCK_DIR = os.path.join(TESTS_DIR, 'mock_resources')
def catkin_success(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
if ret != 0:
import traceback
traceback.print_exc()
finally:
os.environ = orig_environ
return ret == 0
def catkin_failure(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
finally:
os.environ = orig_environ
return ret != 0
class AssertRaisesContext(object):
def __init__(self, expected, expected_regex=None):
self.expected = expected
self.expected_regex = expected_regex
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if self.expected is None:
if exc_type is None:
return True
else:
raise
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise AssertionError("{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
raise
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
expected_regex = re.compile(expected_regex)
if not expected_regex.search(str(exc_value)):
raise AssertionError("'{0}' does not match '{1}'".format(expected_regex.pattern, str(exc_value)))
return True
class redirected_stdio(object):
def __enter__(self):
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr
self.out = StringIO()
self.err = StringIO()
sys.stdout = self.out
sys.stderr = self.err
return self.out, self.err
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.original_stdout
sys.stderr = self.original_stderr
print(self.out.getvalue())
class temporary_directory(object):
def __init__(self, prefix=''):
self.prefix = prefix
self.delete = False
def __enter__(self):
self.original_cwd = os.getcwd()
self.temp_path = tempfile.mkdtemp(prefix=self.prefix)
os.chdir(self.temp_path)
return self.temp_path
def __exit__(self, exc_type, exc_value, traceback):
if self.delete and self.temp_path and os.path.exists(self.temp_path):
print('Deleting temporary testind directory: %s' % self.temp_path)
shutil.rmtree(self.temp_path)
if self.original_cwd and os.path.exists(self.original_cwd):
os.chdir(self.original_cwd)
def in_temporary_directory(f):
@functools.wraps(f)
def decorated(*args, **kwds):
with temporary_directory() as directory: | return f(*args, **kwds)
decorated.__name__ = f.__name__
return decorated
def run(args, **kwargs):
"""
Call to Popen, returns (errcode, stdout, stderr)
"""
print("run:", args)
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
cwd=kwargs.get('cwd', os.getcwd()))
print("P==", p.__dict__)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout, stderr)
def assert_cmd_success(cmd, **kwargs):
"""
Asserts that running a command returns zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, **kwargs)
print("<<<", str(out))
assert r == 0, "cmd failed with result %s:\n %s " % (r, str(cmd))
return out
def assert_cmd_failure(cmd, **kwargs):
"""
Asserts that running a command returns non-zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, withexitstatus=True, **kwargs)
print("<<<", str(out))
assert 0 != r, "cmd succeeded, but it should fail: %s result=%u\noutput=\n%s" % (cmd, r, out)
return out
def assert_files_exist(prefix, files):
"""
Assert that all files exist in the prefix.
"""
for f in files:
p = os.path.join(prefix, f)
print("Checking for", p)
assert os.path.exists(p), "%s doesn't exist" % p | from inspect import getargspec
# If it takes directory of kwargs and kwds does already have
# directory, inject it
if 'directory' not in kwds and 'directory' in getargspec(f)[0]:
kwds['directory'] = directory | random_line_split |
utils.py | import functools
import os
import re
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from subprocess import TimeoutExpired
from catkin_tools.commands.catkin import main as catkin_main
TESTS_DIR = os.path.dirname(__file__)
MOCK_DIR = os.path.join(TESTS_DIR, 'mock_resources')
def catkin_success(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
if ret != 0:
import traceback
traceback.print_exc()
finally:
os.environ = orig_environ
return ret == 0
def catkin_failure(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
finally:
os.environ = orig_environ
return ret != 0
class AssertRaisesContext(object):
def __init__(self, expected, expected_regex=None):
self.expected = expected
self.expected_regex = expected_regex
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if self.expected is None:
if exc_type is None:
return True
else:
raise
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise AssertionError("{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
raise
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
expected_regex = re.compile(expected_regex)
if not expected_regex.search(str(exc_value)):
raise AssertionError("'{0}' does not match '{1}'".format(expected_regex.pattern, str(exc_value)))
return True
class redirected_stdio(object):
def __enter__(self):
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr
self.out = StringIO()
self.err = StringIO()
sys.stdout = self.out
sys.stderr = self.err
return self.out, self.err
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.original_stdout
sys.stderr = self.original_stderr
print(self.out.getvalue())
class | (object):
def __init__(self, prefix=''):
self.prefix = prefix
self.delete = False
def __enter__(self):
self.original_cwd = os.getcwd()
self.temp_path = tempfile.mkdtemp(prefix=self.prefix)
os.chdir(self.temp_path)
return self.temp_path
def __exit__(self, exc_type, exc_value, traceback):
if self.delete and self.temp_path and os.path.exists(self.temp_path):
print('Deleting temporary testind directory: %s' % self.temp_path)
shutil.rmtree(self.temp_path)
if self.original_cwd and os.path.exists(self.original_cwd):
os.chdir(self.original_cwd)
def in_temporary_directory(f):
@functools.wraps(f)
def decorated(*args, **kwds):
with temporary_directory() as directory:
from inspect import getargspec
# If it takes directory of kwargs and kwds does already have
# directory, inject it
if 'directory' not in kwds and 'directory' in getargspec(f)[0]:
kwds['directory'] = directory
return f(*args, **kwds)
decorated.__name__ = f.__name__
return decorated
def run(args, **kwargs):
"""
Call to Popen, returns (errcode, stdout, stderr)
"""
print("run:", args)
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
cwd=kwargs.get('cwd', os.getcwd()))
print("P==", p.__dict__)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout, stderr)
def assert_cmd_success(cmd, **kwargs):
"""
Asserts that running a command returns zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, **kwargs)
print("<<<", str(out))
assert r == 0, "cmd failed with result %s:\n %s " % (r, str(cmd))
return out
def assert_cmd_failure(cmd, **kwargs):
"""
Asserts that running a command returns non-zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, withexitstatus=True, **kwargs)
print("<<<", str(out))
assert 0 != r, "cmd succeeded, but it should fail: %s result=%u\noutput=\n%s" % (cmd, r, out)
return out
def assert_files_exist(prefix, files):
"""
Assert that all files exist in the prefix.
"""
for f in files:
p = os.path.join(prefix, f)
print("Checking for", p)
assert os.path.exists(p), "%s doesn't exist" % p
| temporary_directory | identifier_name |
utils.py | import functools
import os
import re
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from subprocess import TimeoutExpired
from catkin_tools.commands.catkin import main as catkin_main
TESTS_DIR = os.path.dirname(__file__)
MOCK_DIR = os.path.join(TESTS_DIR, 'mock_resources')
def catkin_success(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
if ret != 0:
import traceback
traceback.print_exc()
finally:
os.environ = orig_environ
return ret == 0
def catkin_failure(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
finally:
os.environ = orig_environ
return ret != 0
class AssertRaisesContext(object):
def __init__(self, expected, expected_regex=None):
self.expected = expected
self.expected_regex = expected_regex
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if self.expected is None:
if exc_type is None:
|
else:
raise
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise AssertionError("{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
raise
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
expected_regex = re.compile(expected_regex)
if not expected_regex.search(str(exc_value)):
raise AssertionError("'{0}' does not match '{1}'".format(expected_regex.pattern, str(exc_value)))
return True
class redirected_stdio(object):
def __enter__(self):
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr
self.out = StringIO()
self.err = StringIO()
sys.stdout = self.out
sys.stderr = self.err
return self.out, self.err
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.original_stdout
sys.stderr = self.original_stderr
print(self.out.getvalue())
class temporary_directory(object):
def __init__(self, prefix=''):
self.prefix = prefix
self.delete = False
def __enter__(self):
self.original_cwd = os.getcwd()
self.temp_path = tempfile.mkdtemp(prefix=self.prefix)
os.chdir(self.temp_path)
return self.temp_path
def __exit__(self, exc_type, exc_value, traceback):
if self.delete and self.temp_path and os.path.exists(self.temp_path):
print('Deleting temporary testind directory: %s' % self.temp_path)
shutil.rmtree(self.temp_path)
if self.original_cwd and os.path.exists(self.original_cwd):
os.chdir(self.original_cwd)
def in_temporary_directory(f):
@functools.wraps(f)
def decorated(*args, **kwds):
with temporary_directory() as directory:
from inspect import getargspec
# If it takes directory of kwargs and kwds does already have
# directory, inject it
if 'directory' not in kwds and 'directory' in getargspec(f)[0]:
kwds['directory'] = directory
return f(*args, **kwds)
decorated.__name__ = f.__name__
return decorated
def run(args, **kwargs):
"""
Call to Popen, returns (errcode, stdout, stderr)
"""
print("run:", args)
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
cwd=kwargs.get('cwd', os.getcwd()))
print("P==", p.__dict__)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout, stderr)
def assert_cmd_success(cmd, **kwargs):
"""
Asserts that running a command returns zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, **kwargs)
print("<<<", str(out))
assert r == 0, "cmd failed with result %s:\n %s " % (r, str(cmd))
return out
def assert_cmd_failure(cmd, **kwargs):
"""
Asserts that running a command returns non-zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, withexitstatus=True, **kwargs)
print("<<<", str(out))
assert 0 != r, "cmd succeeded, but it should fail: %s result=%u\noutput=\n%s" % (cmd, r, out)
return out
def assert_files_exist(prefix, files):
"""
Assert that all files exist in the prefix.
"""
for f in files:
p = os.path.join(prefix, f)
print("Checking for", p)
assert os.path.exists(p), "%s doesn't exist" % p
| return True | conditional_block |
utils.py | import functools
import os
import re
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from subprocess import TimeoutExpired
from catkin_tools.commands.catkin import main as catkin_main
TESTS_DIR = os.path.dirname(__file__)
MOCK_DIR = os.path.join(TESTS_DIR, 'mock_resources')
def catkin_success(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
if ret != 0:
import traceback
traceback.print_exc()
finally:
os.environ = orig_environ
return ret == 0
def catkin_failure(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
finally:
os.environ = orig_environ
return ret != 0
class AssertRaisesContext(object):
def __init__(self, expected, expected_regex=None):
self.expected = expected
self.expected_regex = expected_regex
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if self.expected is None:
if exc_type is None:
return True
else:
raise
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise AssertionError("{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
raise
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
expected_regex = re.compile(expected_regex)
if not expected_regex.search(str(exc_value)):
raise AssertionError("'{0}' does not match '{1}'".format(expected_regex.pattern, str(exc_value)))
return True
class redirected_stdio(object):
def __enter__(self):
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr
self.out = StringIO()
self.err = StringIO()
sys.stdout = self.out
sys.stderr = self.err
return self.out, self.err
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.original_stdout
sys.stderr = self.original_stderr
print(self.out.getvalue())
class temporary_directory(object):
def __init__(self, prefix=''):
self.prefix = prefix
self.delete = False
def __enter__(self):
self.original_cwd = os.getcwd()
self.temp_path = tempfile.mkdtemp(prefix=self.prefix)
os.chdir(self.temp_path)
return self.temp_path
def __exit__(self, exc_type, exc_value, traceback):
|
def in_temporary_directory(f):
@functools.wraps(f)
def decorated(*args, **kwds):
with temporary_directory() as directory:
from inspect import getargspec
# If it takes directory of kwargs and kwds does already have
# directory, inject it
if 'directory' not in kwds and 'directory' in getargspec(f)[0]:
kwds['directory'] = directory
return f(*args, **kwds)
decorated.__name__ = f.__name__
return decorated
def run(args, **kwargs):
"""
Call to Popen, returns (errcode, stdout, stderr)
"""
print("run:", args)
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
cwd=kwargs.get('cwd', os.getcwd()))
print("P==", p.__dict__)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout, stderr)
def assert_cmd_success(cmd, **kwargs):
"""
Asserts that running a command returns zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, **kwargs)
print("<<<", str(out))
assert r == 0, "cmd failed with result %s:\n %s " % (r, str(cmd))
return out
def assert_cmd_failure(cmd, **kwargs):
"""
Asserts that running a command returns non-zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, withexitstatus=True, **kwargs)
print("<<<", str(out))
assert 0 != r, "cmd succeeded, but it should fail: %s result=%u\noutput=\n%s" % (cmd, r, out)
return out
def assert_files_exist(prefix, files):
"""
Assert that all files exist in the prefix.
"""
for f in files:
p = os.path.join(prefix, f)
print("Checking for", p)
assert os.path.exists(p), "%s doesn't exist" % p
| if self.delete and self.temp_path and os.path.exists(self.temp_path):
print('Deleting temporary testind directory: %s' % self.temp_path)
shutil.rmtree(self.temp_path)
if self.original_cwd and os.path.exists(self.original_cwd):
os.chdir(self.original_cwd) | identifier_body |
symbol_query_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {StaticSymbol} from '@angular/compiler';
import {AngularCompilerOptions, CompilerHost} from '@angular/compiler-cli';
import {EmittingCompilerHost, MockAotCompilerHost, MockCompilerHost, MockData, MockDirectory, MockMetadataBundlerHost, arrayToMockDir, arrayToMockMap, isSource, settings, setup, toMockFileArray} from '@angular/compiler/test/aot/test_util';
import * as ts from 'typescript';
import {Symbol, SymbolQuery, SymbolTable} from '../../src/diagnostics/symbols';
import {getSymbolQuery} from '../../src/diagnostics/typescript_symbols';
import {Directory} from '../mocks';
import {DiagnosticContext, MockLanguageServiceHost} from './mocks';
function | (): SymbolTable {
return {
size: 0,
get(key: string) { return undefined; },
has(key: string) { return false; },
values(): Symbol[]{return [];}
};
}
describe('symbol query', () => {
let program: ts.Program;
let checker: ts.TypeChecker;
let sourceFile: ts.SourceFile;
let query: SymbolQuery;
let context: DiagnosticContext;
beforeEach(() => {
const registry = ts.createDocumentRegistry(false, '/src');
const host = new MockLanguageServiceHost(
['/quickstart/app/app.component.ts'], QUICKSTART, '/quickstart');
const service = ts.createLanguageService(host, registry);
program = service.getProgram();
checker = program.getTypeChecker();
sourceFile = program.getSourceFile('/quickstart/app/app.component.ts');
const options: AngularCompilerOptions = Object.create(host.getCompilationSettings());
options.genDir = '/dist';
options.basePath = '/quickstart';
const aotHost = new CompilerHost(program, options, host, {verboseInvalidExpression: true});
context = new DiagnosticContext(service, program, checker, aotHost);
query = getSymbolQuery(program, checker, sourceFile, emptyPipes);
});
it('should be able to get undefined for an unknown symbol', () => {
const unknownType = context.getStaticSymbol('/unkonwn/file.ts', 'UnknownType');
const symbol = query.getTypeSymbol(unknownType);
expect(symbol).toBeUndefined();
});
});
function appComponentSource(template: string): string {
return `
import {Component} from '@angular/core';
export interface Person {
name: string;
address: Address;
}
export interface Address {
street: string;
city: string;
state: string;
zip: string;
}
@Component({
template: '${template}'
})
export class AppComponent {
name = 'Angular';
person: Person;
people: Person[];
maybePerson?: Person;
getName(): string { return this.name; }
getPerson(): Person { return this.person; }
getMaybePerson(): Person | undefined { this.maybePerson; }
}
`;
}
const QUICKSTART: Directory = {
quickstart: {
app: {
'app.component.ts': appComponentSource('<h1>Hello {{name}}</h1>'),
'app.module.ts': `
import { NgModule } from '@angular/core';
import { toString } from './utils';
import { AppComponent } from './app.component';
@NgModule({
declarations: [ AppComponent ],
bootstrap: [ AppComponent ]
})
export class AppModule { }
`
}
}
};
| emptyPipes | identifier_name |
symbol_query_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {StaticSymbol} from '@angular/compiler';
import {AngularCompilerOptions, CompilerHost} from '@angular/compiler-cli';
import {EmittingCompilerHost, MockAotCompilerHost, MockCompilerHost, MockData, MockDirectory, MockMetadataBundlerHost, arrayToMockDir, arrayToMockMap, isSource, settings, setup, toMockFileArray} from '@angular/compiler/test/aot/test_util';
import * as ts from 'typescript';
import {Symbol, SymbolQuery, SymbolTable} from '../../src/diagnostics/symbols';
import {getSymbolQuery} from '../../src/diagnostics/typescript_symbols';
import {Directory} from '../mocks';
import {DiagnosticContext, MockLanguageServiceHost} from './mocks';
function emptyPipes(): SymbolTable {
return {
size: 0,
get(key: string) { return undefined; },
has(key: string) { return false; },
values(): Symbol[]{return [];}
};
}
describe('symbol query', () => {
let program: ts.Program;
let checker: ts.TypeChecker;
let sourceFile: ts.SourceFile;
let query: SymbolQuery;
let context: DiagnosticContext;
beforeEach(() => {
const registry = ts.createDocumentRegistry(false, '/src');
const host = new MockLanguageServiceHost(
['/quickstart/app/app.component.ts'], QUICKSTART, '/quickstart');
const service = ts.createLanguageService(host, registry);
program = service.getProgram();
checker = program.getTypeChecker();
sourceFile = program.getSourceFile('/quickstart/app/app.component.ts');
const options: AngularCompilerOptions = Object.create(host.getCompilationSettings());
options.genDir = '/dist';
options.basePath = '/quickstart';
const aotHost = new CompilerHost(program, options, host, {verboseInvalidExpression: true});
context = new DiagnosticContext(service, program, checker, aotHost);
query = getSymbolQuery(program, checker, sourceFile, emptyPipes);
});
it('should be able to get undefined for an unknown symbol', () => {
const unknownType = context.getStaticSymbol('/unkonwn/file.ts', 'UnknownType');
const symbol = query.getTypeSymbol(unknownType);
expect(symbol).toBeUndefined();
});
});
function appComponentSource(template: string): string {
return `
import {Component} from '@angular/core';
export interface Person {
name: string;
address: Address;
}
export interface Address {
street: string;
city: string;
state: string;
zip: string;
}
@Component({
template: '${template}' | export class AppComponent {
name = 'Angular';
person: Person;
people: Person[];
maybePerson?: Person;
getName(): string { return this.name; }
getPerson(): Person { return this.person; }
getMaybePerson(): Person | undefined { this.maybePerson; }
}
`;
}
const QUICKSTART: Directory = {
quickstart: {
app: {
'app.component.ts': appComponentSource('<h1>Hello {{name}}</h1>'),
'app.module.ts': `
import { NgModule } from '@angular/core';
import { toString } from './utils';
import { AppComponent } from './app.component';
@NgModule({
declarations: [ AppComponent ],
bootstrap: [ AppComponent ]
})
export class AppModule { }
`
}
}
}; | }) | random_line_split |
symbol_query_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {StaticSymbol} from '@angular/compiler';
import {AngularCompilerOptions, CompilerHost} from '@angular/compiler-cli';
import {EmittingCompilerHost, MockAotCompilerHost, MockCompilerHost, MockData, MockDirectory, MockMetadataBundlerHost, arrayToMockDir, arrayToMockMap, isSource, settings, setup, toMockFileArray} from '@angular/compiler/test/aot/test_util';
import * as ts from 'typescript';
import {Symbol, SymbolQuery, SymbolTable} from '../../src/diagnostics/symbols';
import {getSymbolQuery} from '../../src/diagnostics/typescript_symbols';
import {Directory} from '../mocks';
import {DiagnosticContext, MockLanguageServiceHost} from './mocks';
function emptyPipes(): SymbolTable {
return {
size: 0,
get(key: string) | ,
has(key: string) { return false; },
values(): Symbol[]{return [];}
};
}
describe('symbol query', () => {
let program: ts.Program;
let checker: ts.TypeChecker;
let sourceFile: ts.SourceFile;
let query: SymbolQuery;
let context: DiagnosticContext;
beforeEach(() => {
const registry = ts.createDocumentRegistry(false, '/src');
const host = new MockLanguageServiceHost(
['/quickstart/app/app.component.ts'], QUICKSTART, '/quickstart');
const service = ts.createLanguageService(host, registry);
program = service.getProgram();
checker = program.getTypeChecker();
sourceFile = program.getSourceFile('/quickstart/app/app.component.ts');
const options: AngularCompilerOptions = Object.create(host.getCompilationSettings());
options.genDir = '/dist';
options.basePath = '/quickstart';
const aotHost = new CompilerHost(program, options, host, {verboseInvalidExpression: true});
context = new DiagnosticContext(service, program, checker, aotHost);
query = getSymbolQuery(program, checker, sourceFile, emptyPipes);
});
it('should be able to get undefined for an unknown symbol', () => {
const unknownType = context.getStaticSymbol('/unkonwn/file.ts', 'UnknownType');
const symbol = query.getTypeSymbol(unknownType);
expect(symbol).toBeUndefined();
});
});
function appComponentSource(template: string): string {
return `
import {Component} from '@angular/core';
export interface Person {
name: string;
address: Address;
}
export interface Address {
street: string;
city: string;
state: string;
zip: string;
}
@Component({
template: '${template}'
})
export class AppComponent {
name = 'Angular';
person: Person;
people: Person[];
maybePerson?: Person;
getName(): string { return this.name; }
getPerson(): Person { return this.person; }
getMaybePerson(): Person | undefined { this.maybePerson; }
}
`;
}
const QUICKSTART: Directory = {
quickstart: {
app: {
'app.component.ts': appComponentSource('<h1>Hello {{name}}</h1>'),
'app.module.ts': `
import { NgModule } from '@angular/core';
import { toString } from './utils';
import { AppComponent } from './app.component';
@NgModule({
declarations: [ AppComponent ],
bootstrap: [ AppComponent ]
})
export class AppModule { }
`
}
}
};
| { return undefined; } | identifier_body |
app.module.ts | /// <reference path="../globals.d.ts" />
import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { BrowserModule } from '@angular/platform-browser';
import { HttpModule } from '@angular/http';
import {
AppComponent,
HeaderComponent,
IndexComponent,
CardListComponent,
CardListItemComponent,
CardCollapsedComponent,
CardExpandedComponent,
LazyScrollDirective,
RuneSpriteComponent,
RuneSearchComponent,
RuneLoginComponent,
RuneCollectionComponent
} from './components';
import {
SelectedCardStore,
ScrollPositionStore,
CardListStore,
CollectionStore,
ViewStore,
SetStore,
CurrentUserStore
} from './stores';
import {
DispatcherService,
AuthTokenService,
CardSetService,
CardService,
CollectionService,
SetService,
UserService,
StoreInitializerService,
UrlService
} from './services';
@NgModule({
imports: [
BrowserModule,
HttpModule,
RouterModule.forRoot([
{ path: 'login', component: RuneLoginComponent },
{ path: '', component: RuneCollectionComponent }
])
],
declarations: [
AppComponent,
HeaderComponent,
IndexComponent,
CardListComponent,
CardListItemComponent,
CardCollapsedComponent,
CardExpandedComponent,
LazyScrollDirective,
RuneSpriteComponent,
RuneSearchComponent,
RuneCollectionComponent,
RuneLoginComponent
],
providers: [
CardSetService,
CardService, | CollectionService,
SetService,
UserService,
AuthTokenService,
StoreInitializerService,
CardListStore,
CollectionStore,
CurrentUserStore,
DispatcherService,
ScrollPositionStore,
SelectedCardStore,
SetStore,
ViewStore,
UrlService
],
bootstrap: [ AppComponent ]
})
export class AppModule { } | random_line_split |
|
app.module.ts | /// <reference path="../globals.d.ts" />
import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { BrowserModule } from '@angular/platform-browser';
import { HttpModule } from '@angular/http';
import {
AppComponent,
HeaderComponent,
IndexComponent,
CardListComponent,
CardListItemComponent,
CardCollapsedComponent,
CardExpandedComponent,
LazyScrollDirective,
RuneSpriteComponent,
RuneSearchComponent,
RuneLoginComponent,
RuneCollectionComponent
} from './components';
import {
SelectedCardStore,
ScrollPositionStore,
CardListStore,
CollectionStore,
ViewStore,
SetStore,
CurrentUserStore
} from './stores';
import {
DispatcherService,
AuthTokenService,
CardSetService,
CardService,
CollectionService,
SetService,
UserService,
StoreInitializerService,
UrlService
} from './services';
@NgModule({
imports: [
BrowserModule,
HttpModule,
RouterModule.forRoot([
{ path: 'login', component: RuneLoginComponent },
{ path: '', component: RuneCollectionComponent }
])
],
declarations: [
AppComponent,
HeaderComponent,
IndexComponent,
CardListComponent,
CardListItemComponent,
CardCollapsedComponent,
CardExpandedComponent,
LazyScrollDirective,
RuneSpriteComponent,
RuneSearchComponent,
RuneCollectionComponent,
RuneLoginComponent
],
providers: [
CardSetService,
CardService,
CollectionService,
SetService,
UserService,
AuthTokenService,
StoreInitializerService,
CardListStore,
CollectionStore,
CurrentUserStore,
DispatcherService,
ScrollPositionStore,
SelectedCardStore,
SetStore,
ViewStore,
UrlService
],
bootstrap: [ AppComponent ]
})
export class | { }
| AppModule | identifier_name |
sampler_no_sparsity.py | import numpy as np
import pypolyagamma as pypolyagamma
def calculate_D(S):
N = S.shape[1]
D = np.empty((N, N))
for i in range(N):
for j in range(N):
D[i, j] = np.dot(S[1:, i].T, S[:-1, j])
return D * 0.5
def calculate_C_w(S, w_i):
w_mat = np.diag(w_i)
return np.dot(S.T, np.dot(w_mat, S))
def sample_w_i(S, J_i):
"""
:param S: observation matrix
:param J_i: neuron i's couplings
:return: samples for w_i from a polyagamma distribution
"""
ppg = pypolyagamma.PyPolyaGamma(np.random.randint(2 ** 16))
T = S.shape[0]
A = np.ones(T)
w_i = np.zeros(T)
ppg.pgdrawv(A, np.dot(S, J_i), w_i)
return w_i
def sample_J_i(S, C, D_i, w_i, sigma_J):
N = S.shape[1]
cov_mat = (1. / sigma_J) * np.identity(N)
cov = np.linalg.inv(C + cov_mat)
mean = np.dot(cov, D_i)
J_i = np.random.multivariate_normal(mean, cov)
return J_i
def sample_neuron(samp_num, burnin, sigma_J, S, D_i, ro, thin=0, save_all=True):
""" This function uses the Gibbs sampler to sample from w, gamma and J
:param samp_num: Number of samples to be drawn
:param burnin: Number of samples to burn in
:param sigma_J: variance of the J slab
:param S: Neurons' activity matrix. Including S0. (T + 1) x N
:param C: observation correlation matrix. N x N
:param D_i: time delay correlations of neuron i. N
:return: samp_num samples (each one of length K (time_steps)) from the posterior distribution for w,x,z.
"""
T, N = S.shape
# actual number of samples needed with thining and burin-in
if (thin != 0):
N_s = samp_num * thin + burnin
else:
N_s = samp_num + burnin
samples_w_i = np.zeros((N_s, T), dtype=np.float32)
samples_J_i = np.zeros((N_s, N), dtype=np.float32)
J_i = np.random.normal(0, sigma_J, N)
for i in xrange(N_s):
w_i = sample_w_i(S, J_i)
C_w_i = calculate_C_w(S, w_i)
J_i = sample_J_i(S, C_w_i, D_i, w_i, sigma_J)
samples_w_i[i, :] = w_i
samples_J_i[i, :] = J_i
if thin == 0:
|
else:
return samples_w_i[burnin:N_s:thin, :], samples_J_i[burnin:N_s:thin, :]
| return samples_w_i[burnin:, :], samples_J_i[burnin:, :] | conditional_block |
sampler_no_sparsity.py | import numpy as np
import pypolyagamma as pypolyagamma
def | (S):
N = S.shape[1]
D = np.empty((N, N))
for i in range(N):
for j in range(N):
D[i, j] = np.dot(S[1:, i].T, S[:-1, j])
return D * 0.5
def calculate_C_w(S, w_i):
w_mat = np.diag(w_i)
return np.dot(S.T, np.dot(w_mat, S))
def sample_w_i(S, J_i):
"""
:param S: observation matrix
:param J_i: neuron i's couplings
:return: samples for w_i from a polyagamma distribution
"""
ppg = pypolyagamma.PyPolyaGamma(np.random.randint(2 ** 16))
T = S.shape[0]
A = np.ones(T)
w_i = np.zeros(T)
ppg.pgdrawv(A, np.dot(S, J_i), w_i)
return w_i
def sample_J_i(S, C, D_i, w_i, sigma_J):
N = S.shape[1]
cov_mat = (1. / sigma_J) * np.identity(N)
cov = np.linalg.inv(C + cov_mat)
mean = np.dot(cov, D_i)
J_i = np.random.multivariate_normal(mean, cov)
return J_i
def sample_neuron(samp_num, burnin, sigma_J, S, D_i, ro, thin=0, save_all=True):
""" This function uses the Gibbs sampler to sample from w, gamma and J
:param samp_num: Number of samples to be drawn
:param burnin: Number of samples to burn in
:param sigma_J: variance of the J slab
:param S: Neurons' activity matrix. Including S0. (T + 1) x N
:param C: observation correlation matrix. N x N
:param D_i: time delay correlations of neuron i. N
:return: samp_num samples (each one of length K (time_steps)) from the posterior distribution for w,x,z.
"""
T, N = S.shape
# actual number of samples needed with thining and burin-in
if (thin != 0):
N_s = samp_num * thin + burnin
else:
N_s = samp_num + burnin
samples_w_i = np.zeros((N_s, T), dtype=np.float32)
samples_J_i = np.zeros((N_s, N), dtype=np.float32)
J_i = np.random.normal(0, sigma_J, N)
for i in xrange(N_s):
w_i = sample_w_i(S, J_i)
C_w_i = calculate_C_w(S, w_i)
J_i = sample_J_i(S, C_w_i, D_i, w_i, sigma_J)
samples_w_i[i, :] = w_i
samples_J_i[i, :] = J_i
if thin == 0:
return samples_w_i[burnin:, :], samples_J_i[burnin:, :]
else:
return samples_w_i[burnin:N_s:thin, :], samples_J_i[burnin:N_s:thin, :]
| calculate_D | identifier_name |
sampler_no_sparsity.py | import numpy as np
import pypolyagamma as pypolyagamma
def calculate_D(S):
N = S.shape[1]
D = np.empty((N, N))
for i in range(N):
for j in range(N):
D[i, j] = np.dot(S[1:, i].T, S[:-1, j])
return D * 0.5
def calculate_C_w(S, w_i):
w_mat = np.diag(w_i)
return np.dot(S.T, np.dot(w_mat, S))
def sample_w_i(S, J_i):
"""
:param S: observation matrix
:param J_i: neuron i's couplings
:return: samples for w_i from a polyagamma distribution
"""
ppg = pypolyagamma.PyPolyaGamma(np.random.randint(2 ** 16))
T = S.shape[0]
A = np.ones(T)
w_i = np.zeros(T)
ppg.pgdrawv(A, np.dot(S, J_i), w_i)
return w_i
def sample_J_i(S, C, D_i, w_i, sigma_J):
N = S.shape[1]
cov_mat = (1. / sigma_J) * np.identity(N)
cov = np.linalg.inv(C + cov_mat)
mean = np.dot(cov, D_i)
J_i = np.random.multivariate_normal(mean, cov)
return J_i
def sample_neuron(samp_num, burnin, sigma_J, S, D_i, ro, thin=0, save_all=True):
| """ This function uses the Gibbs sampler to sample from w, gamma and J
:param samp_num: Number of samples to be drawn
:param burnin: Number of samples to burn in
:param sigma_J: variance of the J slab
:param S: Neurons' activity matrix. Including S0. (T + 1) x N
:param C: observation correlation matrix. N x N
:param D_i: time delay correlations of neuron i. N
:return: samp_num samples (each one of length K (time_steps)) from the posterior distribution for w,x,z.
"""
T, N = S.shape
# actual number of samples needed with thining and burin-in
if (thin != 0):
N_s = samp_num * thin + burnin
else:
N_s = samp_num + burnin
samples_w_i = np.zeros((N_s, T), dtype=np.float32)
samples_J_i = np.zeros((N_s, N), dtype=np.float32)
J_i = np.random.normal(0, sigma_J, N)
for i in xrange(N_s):
w_i = sample_w_i(S, J_i)
C_w_i = calculate_C_w(S, w_i)
J_i = sample_J_i(S, C_w_i, D_i, w_i, sigma_J)
samples_w_i[i, :] = w_i
samples_J_i[i, :] = J_i
if thin == 0:
return samples_w_i[burnin:, :], samples_J_i[burnin:, :]
else:
return samples_w_i[burnin:N_s:thin, :], samples_J_i[burnin:N_s:thin, :] | identifier_body |
|
sampler_no_sparsity.py | import numpy as np
import pypolyagamma as pypolyagamma
def calculate_D(S):
N = S.shape[1]
D = np.empty((N, N))
for i in range(N):
for j in range(N):
D[i, j] = np.dot(S[1:, i].T, S[:-1, j])
return D * 0.5 |
def calculate_C_w(S, w_i):
w_mat = np.diag(w_i)
return np.dot(S.T, np.dot(w_mat, S))
def sample_w_i(S, J_i):
"""
:param S: observation matrix
:param J_i: neuron i's couplings
:return: samples for w_i from a polyagamma distribution
"""
ppg = pypolyagamma.PyPolyaGamma(np.random.randint(2 ** 16))
T = S.shape[0]
A = np.ones(T)
w_i = np.zeros(T)
ppg.pgdrawv(A, np.dot(S, J_i), w_i)
return w_i
def sample_J_i(S, C, D_i, w_i, sigma_J):
N = S.shape[1]
cov_mat = (1. / sigma_J) * np.identity(N)
cov = np.linalg.inv(C + cov_mat)
mean = np.dot(cov, D_i)
J_i = np.random.multivariate_normal(mean, cov)
return J_i
def sample_neuron(samp_num, burnin, sigma_J, S, D_i, ro, thin=0, save_all=True):
""" This function uses the Gibbs sampler to sample from w, gamma and J
:param samp_num: Number of samples to be drawn
:param burnin: Number of samples to burn in
:param sigma_J: variance of the J slab
:param S: Neurons' activity matrix. Including S0. (T + 1) x N
:param C: observation correlation matrix. N x N
:param D_i: time delay correlations of neuron i. N
:return: samp_num samples (each one of length K (time_steps)) from the posterior distribution for w,x,z.
"""
T, N = S.shape
# actual number of samples needed with thining and burin-in
if (thin != 0):
N_s = samp_num * thin + burnin
else:
N_s = samp_num + burnin
samples_w_i = np.zeros((N_s, T), dtype=np.float32)
samples_J_i = np.zeros((N_s, N), dtype=np.float32)
J_i = np.random.normal(0, sigma_J, N)
for i in xrange(N_s):
w_i = sample_w_i(S, J_i)
C_w_i = calculate_C_w(S, w_i)
J_i = sample_J_i(S, C_w_i, D_i, w_i, sigma_J)
samples_w_i[i, :] = w_i
samples_J_i[i, :] = J_i
if thin == 0:
return samples_w_i[burnin:, :], samples_J_i[burnin:, :]
else:
return samples_w_i[burnin:N_s:thin, :], samples_J_i[burnin:N_s:thin, :] | random_line_split |
|
get_started.py | # Copyright 2014-2018 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""A simple example to get started with SPDHG [CERS2017]. The example at hand
solves the ROF denoising problem.
Reference
---------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Applications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
"""
from __future__ import division, print_function
import odl
import odl.contrib.solvers.spdhg as spdhg
import odl.contrib.datasets.images as images
import numpy as np
# set ground truth and data
image_gray = images.building(gray=True)
X = odl.uniform_discr([0, 0], image_gray.shape, image_gray.shape)
groundtruth = X.element(image_gray)
data = odl.phantom.white_noise(X, mean=groundtruth, stddev=0.1, seed=1807)
# set parameter
alpha = .12 # regularisation parameter
nepoch = 100
# set functionals and operator
A = odl.BroadcastOperator(*[odl.PartialDerivative(X, d, pad_mode='symmetric')
for d in [0, 1]])
f = odl.solvers.SeparableSum(*[odl.solvers.L1Norm(Yi) for Yi in A.range])
g = 1 / (2 * alpha) * odl.solvers.L2NormSquared(X).translated(data)
# set sampling
n = 2 # number of subsets
prob = [1 / n] * n # probablity that a subset gets selected
S = [[0], [1]] # all possible subsets to select from
def fun_select(k): # subset selection function
|
# set parameters for algorithm
Ai_norm = [2, 2]
gamma = 0.99
sigma = [gamma / a for a in Ai_norm]
tau = gamma / (n * max(Ai_norm))
# callback for output during the iterations
cb = (odl.solvers.CallbackPrintIteration(fmt='iter:{:4d}', step=n, end=', ') &
odl.solvers.CallbackPrintTiming(fmt='time: {:5.2f} s', cumulative=True,
step=n))
# initialise variable and run algorithm
x = X.zero()
niter = 2 * nepoch
spdhg.spdhg(x, f, g, A, tau, sigma, niter, prob=prob, fun_select=fun_select,
callback=cb)
# show data and output
data.show()
x.show()
| return S[int(np.random.choice(n, 1, p=prob))] | identifier_body |
get_started.py | # Copyright 2014-2018 The ODL contributors
#
# This file is part of ODL. | # This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""A simple example to get started with SPDHG [CERS2017]. The example at hand
solves the ROF denoising problem.
Reference
---------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Applications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
"""
from __future__ import division, print_function
import odl
import odl.contrib.solvers.spdhg as spdhg
import odl.contrib.datasets.images as images
import numpy as np
# set ground truth and data
image_gray = images.building(gray=True)
X = odl.uniform_discr([0, 0], image_gray.shape, image_gray.shape)
groundtruth = X.element(image_gray)
data = odl.phantom.white_noise(X, mean=groundtruth, stddev=0.1, seed=1807)
# set parameter
alpha = .12 # regularisation parameter
nepoch = 100
# set functionals and operator
A = odl.BroadcastOperator(*[odl.PartialDerivative(X, d, pad_mode='symmetric')
for d in [0, 1]])
f = odl.solvers.SeparableSum(*[odl.solvers.L1Norm(Yi) for Yi in A.range])
g = 1 / (2 * alpha) * odl.solvers.L2NormSquared(X).translated(data)
# set sampling
n = 2 # number of subsets
prob = [1 / n] * n # probablity that a subset gets selected
S = [[0], [1]] # all possible subsets to select from
def fun_select(k): # subset selection function
return S[int(np.random.choice(n, 1, p=prob))]
# set parameters for algorithm
Ai_norm = [2, 2]
gamma = 0.99
sigma = [gamma / a for a in Ai_norm]
tau = gamma / (n * max(Ai_norm))
# callback for output during the iterations
cb = (odl.solvers.CallbackPrintIteration(fmt='iter:{:4d}', step=n, end=', ') &
odl.solvers.CallbackPrintTiming(fmt='time: {:5.2f} s', cumulative=True,
step=n))
# initialise variable and run algorithm
x = X.zero()
niter = 2 * nepoch
spdhg.spdhg(x, f, g, A, tau, sigma, niter, prob=prob, fun_select=fun_select,
callback=cb)
# show data and output
data.show()
x.show() | # | random_line_split |
get_started.py | # Copyright 2014-2018 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""A simple example to get started with SPDHG [CERS2017]. The example at hand
solves the ROF denoising problem.
Reference
---------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Applications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
"""
from __future__ import division, print_function
import odl
import odl.contrib.solvers.spdhg as spdhg
import odl.contrib.datasets.images as images
import numpy as np
# set ground truth and data
image_gray = images.building(gray=True)
X = odl.uniform_discr([0, 0], image_gray.shape, image_gray.shape)
groundtruth = X.element(image_gray)
data = odl.phantom.white_noise(X, mean=groundtruth, stddev=0.1, seed=1807)
# set parameter
alpha = .12 # regularisation parameter
nepoch = 100
# set functionals and operator
A = odl.BroadcastOperator(*[odl.PartialDerivative(X, d, pad_mode='symmetric')
for d in [0, 1]])
f = odl.solvers.SeparableSum(*[odl.solvers.L1Norm(Yi) for Yi in A.range])
g = 1 / (2 * alpha) * odl.solvers.L2NormSquared(X).translated(data)
# set sampling
n = 2 # number of subsets
prob = [1 / n] * n # probablity that a subset gets selected
S = [[0], [1]] # all possible subsets to select from
def | (k): # subset selection function
return S[int(np.random.choice(n, 1, p=prob))]
# set parameters for algorithm
Ai_norm = [2, 2]
gamma = 0.99
sigma = [gamma / a for a in Ai_norm]
tau = gamma / (n * max(Ai_norm))
# callback for output during the iterations
cb = (odl.solvers.CallbackPrintIteration(fmt='iter:{:4d}', step=n, end=', ') &
odl.solvers.CallbackPrintTiming(fmt='time: {:5.2f} s', cumulative=True,
step=n))
# initialise variable and run algorithm
x = X.zero()
niter = 2 * nepoch
spdhg.spdhg(x, f, g, A, tau, sigma, niter, prob=prob, fun_select=fun_select,
callback=cb)
# show data and output
data.show()
x.show()
| fun_select | identifier_name |
category_tags.py | from django import template
from django.shortcuts import render_to_response, redirect, get_object_or_404
# from product.models import Slide
register = template.Library()
# @register.inclusion_tag('slides/slides.html')
# def get_main_slides():
# slides = Slide.objects.filter(published_main=1).order_by('ordering')
# return {'slides': slides}
# @register.inclusion_tag('comments/comments.html')
# def comments(paket, item_model, item_id):
# from comments.models import Comments
# nodes = Comments.objects.filter(paket=paket, item_model=item_model,item_id=item_id, published=1)
# return {'nodes':nodes, 'paket':paket, 'item_model':item_model, 'item_id':item_id}
# @register.filter(name='suit_conf')
# def suit_conf(name):
# value = get_config(name)
# return mark_safe(value) if isinstance(value, str) else value
# @register.tag
# def suit_date(parser, token):
# return NowNode(get_config('HEADER_DATE_FORMAT'))
# @register.tag
# def suit_time(parser, token):
# return NowNode(get_config('HEADER_TIME_FORMAT'))
# @register.filter
# def field_contents_foreign_linked(admin_field):
# """Return the .contents attribute of the admin_field, and if it
# is a foreign key, wrap it in a link to the admin page for that
# object.
# Use by replacing '{{ field.contents }}' in an admin template (e.g.
# fieldset.html) with '{{ field|field_contents_foreign_linked }}'.
# """
# fieldname = admin_field.field['field']
# displayed = admin_field.contents() | # .linked_readonly_fields:
# return displayed
# try:
# fieldtype, attr, value = lookup_field(fieldname, obj,
# admin_field.model_admin)
# except ObjectDoesNotExist:
# fieldtype = None
# if isinstance(fieldtype, ForeignKey):
# try:
# url = admin_url(value)
# except NoReverseMatch:
# url = None
# if url:
# displayed = "<a href='%s'>%s</a>" % (url, displayed)
# return mark_safe(displayed)
# @register.filter
# def admin_url(obj):
# info = (obj._meta.app_label, obj._meta.module_name)
# return reverse("admin:%s_%s_change" % info, args=[obj.pk])
# @register.simple_tag
# def suit_bc(*args):
# return utils.value_by_version(args)
# @register.assignment_tag
# def suit_bc_value(*args):
# return utils.value_by_version(args) | # obj = admin_field.form.instance
# if not hasattr(admin_field.model_admin,
# 'linked_readonly_fields') or fieldname not in admin_field \
# .model_admin \ | random_line_split |
history.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HistoryBinding;
use dom::bindings::codegen::Bindings::HistoryBinding::HistoryMethods;
use dom::bindings::codegen::Bindings::LocationBinding::LocationMethods;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::window::Window;
use ipc_channel::ipc;
use msg::constellation_msg::TraversalDirection;
use script_traits::ScriptMsg as ConstellationMsg;
// https://html.spec.whatwg.org/multipage/#the-history-interface
#[dom_struct]
pub struct History {
reflector_: Reflector,
window: JS<Window>,
}
impl History {
pub fn new_inherited(window: &Window) -> History {
History {
reflector_: Reflector::new(),
window: JS::from_ref(&window),
}
}
pub fn new(window: &Window) -> Root<History> |
}
impl History {
fn traverse_history(&self, direction: TraversalDirection) {
let pipeline = self.window.pipeline_id();
let msg = ConstellationMsg::TraverseHistory(Some(pipeline), direction);
let _ = self.window.constellation_chan().send(msg);
}
}
impl HistoryMethods for History {
// https://html.spec.whatwg.org/multipage/#dom-history-length
fn Length(&self) -> u32 {
let pipeline = self.window.pipeline_id();
let (sender, recv) = ipc::channel().expect("Failed to create channel to send jsh length.");
let msg = ConstellationMsg::JointSessionHistoryLength(pipeline, sender);
let _ = self.window.constellation_chan().send(msg);
recv.recv().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-history-go
fn Go(&self, delta: i32) {
let direction = if delta > 0 {
TraversalDirection::Forward(delta as usize)
} else if delta < 0 {
TraversalDirection::Back(-delta as usize)
} else {
self.window.Location().Reload();
return;
};
self.traverse_history(direction);
}
// https://html.spec.whatwg.org/multipage/#dom-history-back
fn Back(&self) {
self.traverse_history(TraversalDirection::Back(1));
}
// https://html.spec.whatwg.org/multipage/#dom-history-forward
fn Forward(&self) {
self.traverse_history(TraversalDirection::Forward(1));
}
}
| {
reflect_dom_object(box History::new_inherited(window),
GlobalRef::Window(window),
HistoryBinding::Wrap)
} | identifier_body |
history.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HistoryBinding;
use dom::bindings::codegen::Bindings::HistoryBinding::HistoryMethods;
use dom::bindings::codegen::Bindings::LocationBinding::LocationMethods;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::window::Window;
use ipc_channel::ipc;
use msg::constellation_msg::TraversalDirection;
use script_traits::ScriptMsg as ConstellationMsg;
// https://html.spec.whatwg.org/multipage/#the-history-interface
#[dom_struct]
pub struct History {
reflector_: Reflector,
window: JS<Window>,
}
impl History {
pub fn new_inherited(window: &Window) -> History {
History {
reflector_: Reflector::new(),
window: JS::from_ref(&window),
}
}
pub fn new(window: &Window) -> Root<History> {
reflect_dom_object(box History::new_inherited(window),
GlobalRef::Window(window),
HistoryBinding::Wrap)
}
}
impl History {
fn traverse_history(&self, direction: TraversalDirection) {
let pipeline = self.window.pipeline_id();
let msg = ConstellationMsg::TraverseHistory(Some(pipeline), direction);
let _ = self.window.constellation_chan().send(msg);
}
}
impl HistoryMethods for History {
// https://html.spec.whatwg.org/multipage/#dom-history-length
fn Length(&self) -> u32 {
let pipeline = self.window.pipeline_id();
let (sender, recv) = ipc::channel().expect("Failed to create channel to send jsh length.");
let msg = ConstellationMsg::JointSessionHistoryLength(pipeline, sender);
let _ = self.window.constellation_chan().send(msg);
recv.recv().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-history-go
fn Go(&self, delta: i32) {
let direction = if delta > 0 {
TraversalDirection::Forward(delta as usize)
} else if delta < 0 | else {
self.window.Location().Reload();
return;
};
self.traverse_history(direction);
}
// https://html.spec.whatwg.org/multipage/#dom-history-back
fn Back(&self) {
self.traverse_history(TraversalDirection::Back(1));
}
// https://html.spec.whatwg.org/multipage/#dom-history-forward
fn Forward(&self) {
self.traverse_history(TraversalDirection::Forward(1));
}
}
| {
TraversalDirection::Back(-delta as usize)
} | conditional_block |
history.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HistoryBinding;
use dom::bindings::codegen::Bindings::HistoryBinding::HistoryMethods;
use dom::bindings::codegen::Bindings::LocationBinding::LocationMethods;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::window::Window;
use ipc_channel::ipc;
use msg::constellation_msg::TraversalDirection;
use script_traits::ScriptMsg as ConstellationMsg;
// https://html.spec.whatwg.org/multipage/#the-history-interface
#[dom_struct]
pub struct History {
reflector_: Reflector,
window: JS<Window>,
}
impl History {
pub fn new_inherited(window: &Window) -> History {
History {
reflector_: Reflector::new(),
window: JS::from_ref(&window),
}
}
pub fn new(window: &Window) -> Root<History> {
reflect_dom_object(box History::new_inherited(window),
GlobalRef::Window(window),
HistoryBinding::Wrap)
}
}
impl History {
fn traverse_history(&self, direction: TraversalDirection) {
let pipeline = self.window.pipeline_id();
let msg = ConstellationMsg::TraverseHistory(Some(pipeline), direction);
let _ = self.window.constellation_chan().send(msg); | }
impl HistoryMethods for History {
// https://html.spec.whatwg.org/multipage/#dom-history-length
fn Length(&self) -> u32 {
let pipeline = self.window.pipeline_id();
let (sender, recv) = ipc::channel().expect("Failed to create channel to send jsh length.");
let msg = ConstellationMsg::JointSessionHistoryLength(pipeline, sender);
let _ = self.window.constellation_chan().send(msg);
recv.recv().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-history-go
fn Go(&self, delta: i32) {
let direction = if delta > 0 {
TraversalDirection::Forward(delta as usize)
} else if delta < 0 {
TraversalDirection::Back(-delta as usize)
} else {
self.window.Location().Reload();
return;
};
self.traverse_history(direction);
}
// https://html.spec.whatwg.org/multipage/#dom-history-back
fn Back(&self) {
self.traverse_history(TraversalDirection::Back(1));
}
// https://html.spec.whatwg.org/multipage/#dom-history-forward
fn Forward(&self) {
self.traverse_history(TraversalDirection::Forward(1));
}
} | } | random_line_split |
history.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HistoryBinding;
use dom::bindings::codegen::Bindings::HistoryBinding::HistoryMethods;
use dom::bindings::codegen::Bindings::LocationBinding::LocationMethods;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::window::Window;
use ipc_channel::ipc;
use msg::constellation_msg::TraversalDirection;
use script_traits::ScriptMsg as ConstellationMsg;
// https://html.spec.whatwg.org/multipage/#the-history-interface
#[dom_struct]
pub struct | {
reflector_: Reflector,
window: JS<Window>,
}
impl History {
pub fn new_inherited(window: &Window) -> History {
History {
reflector_: Reflector::new(),
window: JS::from_ref(&window),
}
}
pub fn new(window: &Window) -> Root<History> {
reflect_dom_object(box History::new_inherited(window),
GlobalRef::Window(window),
HistoryBinding::Wrap)
}
}
impl History {
fn traverse_history(&self, direction: TraversalDirection) {
let pipeline = self.window.pipeline_id();
let msg = ConstellationMsg::TraverseHistory(Some(pipeline), direction);
let _ = self.window.constellation_chan().send(msg);
}
}
impl HistoryMethods for History {
// https://html.spec.whatwg.org/multipage/#dom-history-length
fn Length(&self) -> u32 {
let pipeline = self.window.pipeline_id();
let (sender, recv) = ipc::channel().expect("Failed to create channel to send jsh length.");
let msg = ConstellationMsg::JointSessionHistoryLength(pipeline, sender);
let _ = self.window.constellation_chan().send(msg);
recv.recv().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-history-go
fn Go(&self, delta: i32) {
let direction = if delta > 0 {
TraversalDirection::Forward(delta as usize)
} else if delta < 0 {
TraversalDirection::Back(-delta as usize)
} else {
self.window.Location().Reload();
return;
};
self.traverse_history(direction);
}
// https://html.spec.whatwg.org/multipage/#dom-history-back
fn Back(&self) {
self.traverse_history(TraversalDirection::Back(1));
}
// https://html.spec.whatwg.org/multipage/#dom-history-forward
fn Forward(&self) {
self.traverse_history(TraversalDirection::Forward(1));
}
}
| History | identifier_name |
SnapshotListTable.test.tsx | import { locationService } from '@grafana/runtime';
import { getSnapshots } from './SnapshotListTable';
jest.mock('@grafana/runtime', () => ({
...(jest.requireActual('@grafana/runtime') as unknown as object),
getBackendSrv: () => ({
get: jest.fn().mockResolvedValue([
{
name: 'Snap 1',
key: 'JRXqfKihKZek70FM6Xaq502NxH7OyyEs',
external: true,
externalUrl: 'https://www.externalSnapshotUrl.com',
},
{
id: 3,
name: 'Snap 2',
key: 'RziRfhlBDTjwyYGoHAjnWyrMNQ1zUg3j', | ]),
}),
}));
describe('getSnapshots', () => {
(global as any).window = Object.create(window);
Object.defineProperty(window, 'location', {
value: {
href: 'http://localhost:3000/grafana/dashboard/snapshots',
},
writable: true,
});
locationService.push('/dashboard/snapshots');
test('returns correct snapshot urls', async () => {
const results = await getSnapshots();
expect(results).toMatchInlineSnapshot(`
Array [
Object {
"external": true,
"externalUrl": "https://www.externalSnapshotUrl.com",
"key": "JRXqfKihKZek70FM6Xaq502NxH7OyyEs",
"name": "Snap 1",
"url": "/dashboard/snapshot/JRXqfKihKZek70FM6Xaq502NxH7OyyEs",
},
Object {
"external": false,
"externalUrl": "",
"id": 3,
"key": "RziRfhlBDTjwyYGoHAjnWyrMNQ1zUg3j",
"name": "Snap 2",
"url": "/dashboard/snapshot/RziRfhlBDTjwyYGoHAjnWyrMNQ1zUg3j",
},
]
`);
});
}); | external: false,
externalUrl: '',
}, | random_line_split |
merkletree.rs | use services::ledger::merkletree::tree::{ Tree, LeavesIterator, LeavesIntoIterator, TreeLeafData };
use services::ledger::merkletree::proof::{ Proof, Lemma };
use utils::crypto::hash::{Hash, HASH_OUTPUT_LEN};
use errors::crypto::CryptoError;
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> Result<Self, CryptoError> {
if values.is_empty() {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
}
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while !cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?;
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1; | }
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
/// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> {
self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASH_OUTPUT_LEN*2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize {
self.count
}
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> Result<Option<Proof>, CryptoError> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn into_iter(self) -> Self::IntoIter {
self.root.iter()
}
} |
cur = next; | random_line_split |
merkletree.rs | use services::ledger::merkletree::tree::{ Tree, LeavesIterator, LeavesIntoIterator, TreeLeafData };
use services::ledger::merkletree::proof::{ Proof, Lemma };
use utils::crypto::hash::{Hash, HASH_OUTPUT_LEN};
use errors::crypto::CryptoError;
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> Result<Self, CryptoError> {
if values.is_empty() {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
}
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while !cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?;
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1;
cur = next;
}
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
/// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> {
self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASH_OUTPUT_LEN*2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize {
self.count
}
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> Result<Option<Proof>, CryptoError> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn | (self) -> Self::IntoIter {
self.root.iter()
}
}
| into_iter | identifier_name |
merkletree.rs | use services::ledger::merkletree::tree::{ Tree, LeavesIterator, LeavesIntoIterator, TreeLeafData };
use services::ledger::merkletree::proof::{ Proof, Lemma };
use utils::crypto::hash::{Hash, HASH_OUTPUT_LEN};
use errors::crypto::CryptoError;
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> Result<Self, CryptoError> {
if values.is_empty() |
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while !cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?;
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1;
cur = next;
}
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
/// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> {
self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASH_OUTPUT_LEN*2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize {
self.count
}
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> Result<Option<Proof>, CryptoError> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn into_iter(self) -> Self::IntoIter {
self.root.iter()
}
}
| {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
} | conditional_block |
merkletree.rs | use services::ledger::merkletree::tree::{ Tree, LeavesIterator, LeavesIntoIterator, TreeLeafData };
use services::ledger::merkletree::proof::{ Proof, Lemma };
use utils::crypto::hash::{Hash, HASH_OUTPUT_LEN};
use errors::crypto::CryptoError;
/// A Merkle tree is a binary tree, with values of type `T` at the leafs,
/// and where every internal node holds the hash of the concatenation of the hashes of its children nodes.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MerkleTree {
/// The root of the inner binary tree
pub root: Tree,
/// The height of the tree
pub height: usize,
/// The number of leaf nodes in the tree
pub count: usize,
/// The number of nodes in the tree
pub nodes_count: usize
}
impl MerkleTree {
/// Constructs a Merkle Tree from a vector of data blocks.
/// Returns `None` if `values` is empty.
pub fn from_vec(values: Vec<TreeLeafData>) -> Result<Self, CryptoError> {
if values.is_empty() {
return Ok(MerkleTree {
root: Tree::empty(Hash::hash_empty()?),
height: 0,
count: 0,
nodes_count: 0
});
}
let count = values.len();
let mut nodes_count = 0;
let mut height = 0;
let mut cur = Vec::with_capacity(count);
for v in values {
let leaf = Tree::new_leaf(v)?;
cur.push(leaf);
}
while cur.len() > 1 {
let mut next = Vec::new();
while !cur.is_empty() {
if cur.len() == 1 {
next.push(cur.remove(0));
}
else {
let left = cur.remove(0);
let right = cur.remove(0);
let combined_hash = Hash::hash_nodes(
left.hash(),
right.hash()
)?;
let node = Tree::Node {
hash: combined_hash.to_vec(),
left: Box::new(left),
right: Box::new(right)
};
next.push(node);
nodes_count+=1;
}
}
height += 1;
cur = next;
}
debug_assert!(cur.len() == 1);
let root = cur.remove(0);
Ok(MerkleTree {
root: root,
height: height,
count: count,
nodes_count: nodes_count
})
}
/// Returns the root hash of Merkle tree
pub fn root_hash(&self) -> &Vec<u8> {
self.root.hash()
}
/// Returns the hex root hash of Merkle tree
pub fn root_hash_hex(&self) -> String {
let rh = self.root.hash();
let mut ret:String = String::with_capacity(HASH_OUTPUT_LEN*2);
for i in rh {
ret.push_str(&format!("{:02x}", i));
}
return ret;
}
/// Returns the height of Merkle tree
pub fn height(&self) -> usize {
self.height
}
/// Returns the number of leaves in the Merkle tree
pub fn count(&self) -> usize |
/// Returns whether the Merkle tree is empty or not
pub fn is_empty(&self) -> bool {
self.count() == 0
}
/// Generate an inclusion proof for the given value.
/// Returns `None` if the given value is not found in the tree.
pub fn gen_proof(&self, value: TreeLeafData) -> Result<Option<Proof>, CryptoError> {
let root_hash = self.root_hash().clone();
let leaf_hash = Hash::hash_leaf(&value)?;
Ok(Lemma::new(&self.root, leaf_hash.to_vec().as_slice()).map(|lemma|
Proof::new(root_hash, lemma, value)
))
}
/// Creates an `Iterator` over the values contained in this Merkle tree.
pub fn iter(&self) -> LeavesIterator {
self.root.iter()
}
}
impl IntoIterator for MerkleTree {
type Item = TreeLeafData;
type IntoIter = LeavesIntoIterator;
/// Creates a consuming iterator, that is, one that moves each value out of the Merkle tree.
/// The tree cannot be used after calling this.
fn into_iter(self) -> Self::IntoIter {
self.root.into_iter()
}
}
impl <'a> IntoIterator for &'a MerkleTree {
type Item = &'a TreeLeafData;
type IntoIter = LeavesIterator<'a>;
/// Creates a borrowing `Iterator` over the values contained in this Merkle tree.
fn into_iter(self) -> Self::IntoIter {
self.root.iter()
}
}
| {
self.count
} | identifier_body |
panic_in_result_fn_debug_assertions.rs | #![warn(clippy::panic_in_result_fn)]
#![allow(clippy::unnecessary_wraps)]
// debug_assert should never trigger the `panic_in_result_fn` lint
struct A;
impl A {
fn | (x: i32) -> Result<bool, String> {
debug_assert!(x == 5, "wrong argument");
Ok(true)
}
fn result_with_debug_assert_eq(x: i32) -> Result<bool, String> {
debug_assert_eq!(x, 5);
Ok(true)
}
fn result_with_debug_assert_ne(x: i32) -> Result<bool, String> {
debug_assert_ne!(x, 1);
Ok(true)
}
fn other_with_debug_assert_with_message(x: i32) {
debug_assert!(x == 5, "wrong argument");
}
fn other_with_debug_assert_eq(x: i32) {
debug_assert_eq!(x, 5);
}
fn other_with_debug_assert_ne(x: i32) {
debug_assert_ne!(x, 1);
}
fn result_without_banned_functions() -> Result<bool, String> {
let debug_assert = "debug_assert!";
println!("No {}", debug_assert);
Ok(true)
}
}
fn main() {}
| result_with_debug_assert_with_message | identifier_name |
panic_in_result_fn_debug_assertions.rs | #![warn(clippy::panic_in_result_fn)]
#![allow(clippy::unnecessary_wraps)]
// debug_assert should never trigger the `panic_in_result_fn` lint
struct A;
impl A {
fn result_with_debug_assert_with_message(x: i32) -> Result<bool, String> {
debug_assert!(x == 5, "wrong argument");
Ok(true)
}
fn result_with_debug_assert_eq(x: i32) -> Result<bool, String> {
debug_assert_eq!(x, 5);
Ok(true)
}
fn result_with_debug_assert_ne(x: i32) -> Result<bool, String> {
debug_assert_ne!(x, 1);
Ok(true)
}
fn other_with_debug_assert_with_message(x: i32) {
debug_assert!(x == 5, "wrong argument");
}
fn other_with_debug_assert_eq(x: i32) {
debug_assert_eq!(x, 5);
}
fn other_with_debug_assert_ne(x: i32) {
debug_assert_ne!(x, 1);
}
fn result_without_banned_functions() -> Result<bool, String> {
let debug_assert = "debug_assert!";
println!("No {}", debug_assert);
Ok(true)
}
}
fn main() | {} | identifier_body |
|
panic_in_result_fn_debug_assertions.rs | #![warn(clippy::panic_in_result_fn)]
#![allow(clippy::unnecessary_wraps)] | // debug_assert should never trigger the `panic_in_result_fn` lint
struct A;
impl A {
fn result_with_debug_assert_with_message(x: i32) -> Result<bool, String> {
debug_assert!(x == 5, "wrong argument");
Ok(true)
}
fn result_with_debug_assert_eq(x: i32) -> Result<bool, String> {
debug_assert_eq!(x, 5);
Ok(true)
}
fn result_with_debug_assert_ne(x: i32) -> Result<bool, String> {
debug_assert_ne!(x, 1);
Ok(true)
}
fn other_with_debug_assert_with_message(x: i32) {
debug_assert!(x == 5, "wrong argument");
}
fn other_with_debug_assert_eq(x: i32) {
debug_assert_eq!(x, 5);
}
fn other_with_debug_assert_ne(x: i32) {
debug_assert_ne!(x, 1);
}
fn result_without_banned_functions() -> Result<bool, String> {
let debug_assert = "debug_assert!";
println!("No {}", debug_assert);
Ok(true)
}
}
fn main() {} | random_line_split |
|
newpct.py | # coding=utf-8
# Author: CristianBB
# Greetings to Mr. Pine-apple
# URL: https://sick-rage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import re
from requests.compat import urljoin
from sickbeard import helpers, logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class newpctProvider(TorrentProvider):
def __init__(self):
|
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
"""
Search query:
http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All
q => Show name
category_ = Category 'Shows' (767)
idioma_ = Language Spanish (1), All
bus_de_ = Date from (All, mes, semana, ayer, hoy)
"""
results = []
# Only search if user conditions are true
lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang
search_params = {
'l': 'doSearch',
'q': '',
'category_': 'All',
'idioma_': 1,
'bus_de_': 'All'
}
for mode in search_strings:
items = []
logger.log('Search Mode: {0}'.format(mode), logger.DEBUG)
if self.onlyspasearch:
search_params['idioma_'] = 1
else:
search_params['idioma_'] = 'All'
# Only search if user conditions are true
if self.onlyspasearch and lang_info != 'es' and mode != 'RSS':
logger.log('Show info is not spanish, skipping provider search', logger.DEBUG)
continue
search_params['bus_de_'] = 'All' if mode != 'RSS' else 'semana'
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log('Search string: {0}'.format
(search_string.decode('utf-8')), logger.DEBUG)
search_params['q'] = search_string
data = self.get_url(self.urls['search'], params=search_params, returns='text')
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', id='categoryTable')
torrent_rows = torrent_table('tr') if torrent_table else []
# Continue only if at least one Release is found
if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination
logger.log('Data returned from provider does not contain any torrents', logger.DEBUG)
continue
# 'Fecha', 'Título', 'Tamaño', ''
# Date, Title, Size
labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')]
for row in torrent_rows[1:-1]:
try:
cells = row('td')
torrent_row = row.find('a')
download_url = torrent_row.get('href', '')
title = self._processTitle(torrent_row.get('title', ''), download_url)
if not all([title, download_url]):
continue
# Provider does not provide seeders/leechers
seeders = 1
leechers = 0
#2 is the 'Tamaño' column.
torrent_size = cells[2].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log('Found result: {0}'.format(title), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError):
continue
results += items
return results
def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments
"""
returns='content' when trying access to torrent info (For calling torrent client). Previously we must parse
the URL to get torrent file
"""
trickery = kwargs.pop('returns', '')
if trickery == 'content':
kwargs['returns'] = 'text'
data = super(newpctProvider, self).get_url(url, post_data=post_data, params=params, timeout=timeout, **kwargs)
url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
url = urljoin(self.url, url.rsplit('=', 1)[-1])
kwargs['returns'] = trickery
return super(newpctProvider, self).get_url(url, post_data=post_data, params=params,
timeout=timeout, **kwargs)
def download_result(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
# Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
data = self.get_url(url, returns='text')
url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
if url_torrent.startswith('http'):
self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})
logger.log('Downloading a result from {0}'.format(url))
if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log('Saved result to {0}'.format(filename), logger.INFO)
return True
else:
logger.log('Could not download {0}'.format(url), logger.WARNING)
helpers.remove_file_failed(filename)
if urls:
logger.log('Failed to download any results', logger.WARNING)
return False
@staticmethod
def _processTitle(title, url):
# Remove 'Mas informacion sobre ' literal from title
title = title[22:]
title = re.sub(r'[ ]{2,}', ' ', title, flags=re.I)
# Quality - Use re module to avoid case sensitive problems with replace
title = re.sub(r'\[HDTV 1080p?[^\[]*]', '1080p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[ALTA DEFINICION 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.I)
title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.I)
title = re.sub(r'\[BluRay 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 720p?[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[MicroHD 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BLuRay[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BRrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BDrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
#detect hdtv/bluray by url
#hdtv 1080p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-610/hdtv-1080p-ac3-5-1/
#hdtv 720p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-26/hdtv-720p-ac3-5-1/
#hdtv example url: http://www.newpct.com/descargar-serie/foo/capitulo-214/hdtv/
#bluray compilation example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-11/bluray-1080p/
title_hdtv = re.search(r'HDTV', title, flags=re.I)
title_720p = re.search(r'720p', title, flags=re.I)
title_1080p = re.search(r'1080p', title, flags=re.I)
title_x264 = re.search(r'x264', title, flags=re.I)
title_bluray = re.search(r'bluray', title, flags=re.I)
title_serie_hd = re.search(r'descargar\-seriehd', title, flags=re.I)
url_hdtv = re.search(r'HDTV', url, flags=re.I)
url_720p = re.search(r'720p', url, flags=re.I)
url_1080p = re.search(r'1080p', url, flags=re.I)
url_bluray = re.search(r'bluray', url, flags=re.I)
if not title_hdtv and url_hdtv:
title += ' HDTV'
if not title_x264:
title += ' x264'
if not title_bluray and url_bluray:
title += ' BluRay'
if not title_x264:
title += ' x264'
if not title_1080p and url_1080p:
title += ' 1080p'
title_1080p = True
if not title_720p and url_720p:
title += ' 720p'
title_720p = True
if not (title_720p or title_1080p) and title_serie_hd:
title += ' 720p'
# Language
title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
if re.search(r'\[V.O.[^\[]*]', title, flags=re.I):
title += '-NEWPCTVO'
else:
title += '-NEWPCT'
return title.strip()
provider = newpctProvider()
| TorrentProvider.__init__(self, 'Newpct')
self.onlyspasearch = None
self.url = 'http://www.newpct.com'
self.urls = {'search': urljoin(self.url, 'index.php')}
self.cache = tvcache.TVCache(self, min_time=20) | identifier_body |
newpct.py | # coding=utf-8
# Author: CristianBB
# Greetings to Mr. Pine-apple
# URL: https://sick-rage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import re
from requests.compat import urljoin
from sickbeard import helpers, logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class newpctProvider(TorrentProvider):
def __init__(self):
TorrentProvider.__init__(self, 'Newpct')
self.onlyspasearch = None
self.url = 'http://www.newpct.com'
self.urls = {'search': urljoin(self.url, 'index.php')}
self.cache = tvcache.TVCache(self, min_time=20)
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
"""
Search query:
http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All
q => Show name
category_ = Category 'Shows' (767)
idioma_ = Language Spanish (1), All
bus_de_ = Date from (All, mes, semana, ayer, hoy)
"""
results = []
# Only search if user conditions are true
lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang
search_params = {
'l': 'doSearch',
'q': '',
'category_': 'All',
'idioma_': 1,
'bus_de_': 'All'
}
for mode in search_strings:
items = []
logger.log('Search Mode: {0}'.format(mode), logger.DEBUG)
if self.onlyspasearch:
search_params['idioma_'] = 1
else:
search_params['idioma_'] = 'All'
# Only search if user conditions are true
if self.onlyspasearch and lang_info != 'es' and mode != 'RSS':
logger.log('Show info is not spanish, skipping provider search', logger.DEBUG)
continue
search_params['bus_de_'] = 'All' if mode != 'RSS' else 'semana'
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log('Search string: {0}'.format
(search_string.decode('utf-8')), logger.DEBUG)
search_params['q'] = search_string
data = self.get_url(self.urls['search'], params=search_params, returns='text')
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', id='categoryTable')
torrent_rows = torrent_table('tr') if torrent_table else []
# Continue only if at least one Release is found
if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination
logger.log('Data returned from provider does not contain any torrents', logger.DEBUG)
continue
# 'Fecha', 'Título', 'Tamaño', ''
# Date, Title, Size
labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')]
for row in torrent_rows[1:-1]:
try:
cells = row('td')
torrent_row = row.find('a')
download_url = torrent_row.get('href', '')
title = self._processTitle(torrent_row.get('title', ''), download_url)
if not all([title, download_url]):
continue
# Provider does not provide seeders/leechers
seeders = 1
leechers = 0
#2 is the 'Tamaño' column.
torrent_size = cells[2].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log('Found result: {0}'.format(title), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError):
continue
results += items
return results
def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments
"""
returns='content' when trying access to torrent info (For calling torrent client). Previously we must parse
the URL to get torrent file
"""
trickery = kwargs.pop('returns', '')
if trickery == 'content':
kwargs['returns'] = 'text'
data = super(newpctProvider, self).get_url(url, post_data=post_data, params=params, timeout=timeout, **kwargs)
url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
url = urljoin(self.url, url.rsplit('=', 1)[-1])
kwargs['returns'] = trickery
return super(newpctProvider, self).get_url(url, post_data=post_data, params=params,
timeout=timeout, **kwargs)
def download_result(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
# Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
data = self.get_url(url, returns='text')
url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
if url_torrent.startswith('http'):
self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})
logger.log('Downloading a result from {0}'.format(url))
if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
log | else:
logger.log('Could not download {0}'.format(url), logger.WARNING)
helpers.remove_file_failed(filename)
if urls:
logger.log('Failed to download any results', logger.WARNING)
return False
@staticmethod
def _processTitle(title, url):
# Remove 'Mas informacion sobre ' literal from title
title = title[22:]
title = re.sub(r'[ ]{2,}', ' ', title, flags=re.I)
# Quality - Use re module to avoid case sensitive problems with replace
title = re.sub(r'\[HDTV 1080p?[^\[]*]', '1080p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[ALTA DEFINICION 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.I)
title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.I)
title = re.sub(r'\[BluRay 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 720p?[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[MicroHD 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BLuRay[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BRrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BDrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
#detect hdtv/bluray by url
#hdtv 1080p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-610/hdtv-1080p-ac3-5-1/
#hdtv 720p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-26/hdtv-720p-ac3-5-1/
#hdtv example url: http://www.newpct.com/descargar-serie/foo/capitulo-214/hdtv/
#bluray compilation example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-11/bluray-1080p/
title_hdtv = re.search(r'HDTV', title, flags=re.I)
title_720p = re.search(r'720p', title, flags=re.I)
title_1080p = re.search(r'1080p', title, flags=re.I)
title_x264 = re.search(r'x264', title, flags=re.I)
title_bluray = re.search(r'bluray', title, flags=re.I)
title_serie_hd = re.search(r'descargar\-seriehd', title, flags=re.I)
url_hdtv = re.search(r'HDTV', url, flags=re.I)
url_720p = re.search(r'720p', url, flags=re.I)
url_1080p = re.search(r'1080p', url, flags=re.I)
url_bluray = re.search(r'bluray', url, flags=re.I)
if not title_hdtv and url_hdtv:
title += ' HDTV'
if not title_x264:
title += ' x264'
if not title_bluray and url_bluray:
title += ' BluRay'
if not title_x264:
title += ' x264'
if not title_1080p and url_1080p:
title += ' 1080p'
title_1080p = True
if not title_720p and url_720p:
title += ' 720p'
title_720p = True
if not (title_720p or title_1080p) and title_serie_hd:
title += ' 720p'
# Language
title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
if re.search(r'\[V.O.[^\[]*]', title, flags=re.I):
title += '-NEWPCTVO'
else:
title += '-NEWPCT'
return title.strip()
provider = newpctProvider()
| ger.log('Saved result to {0}'.format(filename), logger.INFO)
return True
| conditional_block |
newpct.py | # coding=utf-8
# Author: CristianBB
# Greetings to Mr. Pine-apple
# URL: https://sick-rage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import re
from requests.compat import urljoin
from sickbeard import helpers, logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class newpctProvider(TorrentProvider):
def __init__(self):
TorrentProvider.__init__(self, 'Newpct')
self.onlyspasearch = None
self.url = 'http://www.newpct.com'
self.urls = {'search': urljoin(self.url, 'index.php')}
self.cache = tvcache.TVCache(self, min_time=20)
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
"""
Search query:
http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All
q => Show name
category_ = Category 'Shows' (767)
idioma_ = Language Spanish (1), All
bus_de_ = Date from (All, mes, semana, ayer, hoy)
"""
results = []
# Only search if user conditions are true
lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang
search_params = {
'l': 'doSearch',
'q': '',
'category_': 'All',
'idioma_': 1,
'bus_de_': 'All'
}
for mode in search_strings:
items = []
logger.log('Search Mode: {0}'.format(mode), logger.DEBUG)
if self.onlyspasearch:
search_params['idioma_'] = 1
else:
search_params['idioma_'] = 'All'
# Only search if user conditions are true
if self.onlyspasearch and lang_info != 'es' and mode != 'RSS':
logger.log('Show info is not spanish, skipping provider search', logger.DEBUG)
continue
search_params['bus_de_'] = 'All' if mode != 'RSS' else 'semana'
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log('Search string: {0}'.format
(search_string.decode('utf-8')), logger.DEBUG)
search_params['q'] = search_string
data = self.get_url(self.urls['search'], params=search_params, returns='text')
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', id='categoryTable')
torrent_rows = torrent_table('tr') if torrent_table else []
# Continue only if at least one Release is found
if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination
logger.log('Data returned from provider does not contain any torrents', logger.DEBUG)
continue
# 'Fecha', 'Título', 'Tamaño', ''
# Date, Title, Size
labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')]
for row in torrent_rows[1:-1]:
try:
cells = row('td')
torrent_row = row.find('a')
download_url = torrent_row.get('href', '')
title = self._processTitle(torrent_row.get('title', ''), download_url)
if not all([title, download_url]):
continue
# Provider does not provide seeders/leechers
seeders = 1
leechers = 0
#2 is the 'Tamaño' column.
torrent_size = cells[2].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log('Found result: {0}'.format(title), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError):
continue
results += items
return results
def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments
"""
returns='content' when trying access to torrent info (For calling torrent client). Previously we must parse
the URL to get torrent file
"""
trickery = kwargs.pop('returns', '')
if trickery == 'content':
kwargs['returns'] = 'text'
data = super(newpctProvider, self).get_url(url, post_data=post_data, params=params, timeout=timeout, **kwargs)
url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
url = urljoin(self.url, url.rsplit('=', 1)[-1])
kwargs['returns'] = trickery
return super(newpctProvider, self).get_url(url, post_data=post_data, params=params,
timeout=timeout, **kwargs)
def download_result(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
# Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
data = self.get_url(url, returns='text')
url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
if url_torrent.startswith('http'):
self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})
logger.log('Downloading a result from {0}'.format(url))
if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log('Saved result to {0}'.format(filename), logger.INFO)
return True
else:
logger.log('Could not download {0}'.format(url), logger.WARNING)
helpers.remove_file_failed(filename)
if urls:
logger.log('Failed to download any results', logger.WARNING)
return False
@staticmethod
def _processTitle(title, url):
# Remove 'Mas informacion sobre ' literal from title
title = title[22:]
title = re.sub(r'[ ]{2,}', ' ', title, flags=re.I)
# Quality - Use re module to avoid case sensitive problems with replace
title = re.sub(r'\[HDTV 1080p?[^\[]*]', '1080p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[ALTA DEFINICION 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.I)
title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.I)
title = re.sub(r'\[BluRay 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 720p?[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[MicroHD 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BLuRay[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BRrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BDrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
#detect hdtv/bluray by url
#hdtv 1080p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-610/hdtv-1080p-ac3-5-1/
#hdtv 720p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-26/hdtv-720p-ac3-5-1/
#hdtv example url: http://www.newpct.com/descargar-serie/foo/capitulo-214/hdtv/
#bluray compilation example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-11/bluray-1080p/
title_hdtv = re.search(r'HDTV', title, flags=re.I)
title_720p = re.search(r'720p', title, flags=re.I)
title_1080p = re.search(r'1080p', title, flags=re.I)
title_x264 = re.search(r'x264', title, flags=re.I)
title_bluray = re.search(r'bluray', title, flags=re.I)
title_serie_hd = re.search(r'descargar\-seriehd', title, flags=re.I)
url_hdtv = re.search(r'HDTV', url, flags=re.I)
url_720p = re.search(r'720p', url, flags=re.I)
url_1080p = re.search(r'1080p', url, flags=re.I)
url_bluray = re.search(r'bluray', url, flags=re.I)
if not title_hdtv and url_hdtv:
title += ' HDTV'
if not title_x264:
title += ' x264'
if not title_bluray and url_bluray:
title += ' BluRay'
if not title_x264:
title += ' x264'
if not title_1080p and url_1080p:
title += ' 1080p'
title_1080p = True
if not title_720p and url_720p:
title += ' 720p'
title_720p = True
if not (title_720p or title_1080p) and title_serie_hd: | title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
if re.search(r'\[V.O.[^\[]*]', title, flags=re.I):
title += '-NEWPCTVO'
else:
title += '-NEWPCT'
return title.strip()
provider = newpctProvider() | title += ' 720p'
# Language | random_line_split |
newpct.py | # coding=utf-8
# Author: CristianBB
# Greetings to Mr. Pine-apple
# URL: https://sick-rage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import re
from requests.compat import urljoin
from sickbeard import helpers, logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class newpctProvider(TorrentProvider):
def __init__(self):
TorrentProvider.__init__(self, 'Newpct')
self.onlyspasearch = None
self.url = 'http://www.newpct.com'
self.urls = {'search': urljoin(self.url, 'index.php')}
self.cache = tvcache.TVCache(self, min_time=20)
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
"""
Search query:
http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All
q => Show name
category_ = Category 'Shows' (767)
idioma_ = Language Spanish (1), All
bus_de_ = Date from (All, mes, semana, ayer, hoy)
"""
results = []
# Only search if user conditions are true
lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang
search_params = {
'l': 'doSearch',
'q': '',
'category_': 'All',
'idioma_': 1,
'bus_de_': 'All'
}
for mode in search_strings:
items = []
logger.log('Search Mode: {0}'.format(mode), logger.DEBUG)
if self.onlyspasearch:
search_params['idioma_'] = 1
else:
search_params['idioma_'] = 'All'
# Only search if user conditions are true
if self.onlyspasearch and lang_info != 'es' and mode != 'RSS':
logger.log('Show info is not spanish, skipping provider search', logger.DEBUG)
continue
search_params['bus_de_'] = 'All' if mode != 'RSS' else 'semana'
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log('Search string: {0}'.format
(search_string.decode('utf-8')), logger.DEBUG)
search_params['q'] = search_string
data = self.get_url(self.urls['search'], params=search_params, returns='text')
if not data:
continue
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find('table', id='categoryTable')
torrent_rows = torrent_table('tr') if torrent_table else []
# Continue only if at least one Release is found
if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination
logger.log('Data returned from provider does not contain any torrents', logger.DEBUG)
continue
# 'Fecha', 'Título', 'Tamaño', ''
# Date, Title, Size
labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')]
for row in torrent_rows[1:-1]:
try:
cells = row('td')
torrent_row = row.find('a')
download_url = torrent_row.get('href', '')
title = self._processTitle(torrent_row.get('title', ''), download_url)
if not all([title, download_url]):
continue
# Provider does not provide seeders/leechers
seeders = 1
leechers = 0
#2 is the 'Tamaño' column.
torrent_size = cells[2].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log('Found result: {0}'.format(title), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError):
continue
results += items
return results
def get_url(self, url, post_data=None, params=None, timeout=30, **kwargs): # pylint: disable=too-many-arguments
"""
returns='content' when trying access to torrent info (For calling torrent client). Previously we must parse
the URL to get torrent file
"""
trickery = kwargs.pop('returns', '')
if trickery == 'content':
kwargs['returns'] = 'text'
data = super(newpctProvider, self).get_url(url, post_data=post_data, params=params, timeout=timeout, **kwargs)
url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
url = urljoin(self.url, url.rsplit('=', 1)[-1])
kwargs['returns'] = trickery
return super(newpctProvider, self).get_url(url, post_data=post_data, params=params,
timeout=timeout, **kwargs)
def dow | lf, result):
"""
Save the result to disk.
"""
# check for auth
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
# Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
data = self.get_url(url, returns='text')
url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
if url_torrent.startswith('http'):
self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})
logger.log('Downloading a result from {0}'.format(url))
if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log('Saved result to {0}'.format(filename), logger.INFO)
return True
else:
logger.log('Could not download {0}'.format(url), logger.WARNING)
helpers.remove_file_failed(filename)
if urls:
logger.log('Failed to download any results', logger.WARNING)
return False
@staticmethod
def _processTitle(title, url):
# Remove 'Mas informacion sobre ' literal from title
title = title[22:]
title = re.sub(r'[ ]{2,}', ' ', title, flags=re.I)
# Quality - Use re module to avoid case sensitive problems with replace
title = re.sub(r'\[HDTV 1080p?[^\[]*]', '1080p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[ALTA DEFINICION 720p?[^\[]*]', '720p HDTV x264', title, flags=re.I)
title = re.sub(r'\[HDTV]', 'HDTV x264', title, flags=re.I)
title = re.sub(r'\[DVD[^\[]*]', 'DVDrip x264', title, flags=re.I)
title = re.sub(r'\[BluRay 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay Rip 720p?[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BluRay MicroHD[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[MicroHD 1080p?[^\[]*]', '1080p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BLuRay[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BRrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
title = re.sub(r'\[BDrip[^\[]*]', '720p BluRay x264', title, flags=re.I)
#detect hdtv/bluray by url
#hdtv 1080p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-610/hdtv-1080p-ac3-5-1/
#hdtv 720p example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-26/hdtv-720p-ac3-5-1/
#hdtv example url: http://www.newpct.com/descargar-serie/foo/capitulo-214/hdtv/
#bluray compilation example url: http://www.newpct.com/descargar-seriehd/foo/capitulo-11/bluray-1080p/
title_hdtv = re.search(r'HDTV', title, flags=re.I)
title_720p = re.search(r'720p', title, flags=re.I)
title_1080p = re.search(r'1080p', title, flags=re.I)
title_x264 = re.search(r'x264', title, flags=re.I)
title_bluray = re.search(r'bluray', title, flags=re.I)
title_serie_hd = re.search(r'descargar\-seriehd', title, flags=re.I)
url_hdtv = re.search(r'HDTV', url, flags=re.I)
url_720p = re.search(r'720p', url, flags=re.I)
url_1080p = re.search(r'1080p', url, flags=re.I)
url_bluray = re.search(r'bluray', url, flags=re.I)
if not title_hdtv and url_hdtv:
title += ' HDTV'
if not title_x264:
title += ' x264'
if not title_bluray and url_bluray:
title += ' BluRay'
if not title_x264:
title += ' x264'
if not title_1080p and url_1080p:
title += ' 1080p'
title_1080p = True
if not title_720p and url_720p:
title += ' 720p'
title_720p = True
if not (title_720p or title_1080p) and title_serie_hd:
title += ' 720p'
# Language
title = re.sub(r'\[Spanish[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Castellano[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
title = re.sub(r'\[AC3 5\.1 Español[^\[]*]', 'SPANISH AUDIO', title, flags=re.I)
if re.search(r'\[V.O.[^\[]*]', title, flags=re.I):
title += '-NEWPCTVO'
else:
title += '-NEWPCT'
return title.strip()
provider = newpctProvider()
| nload_result(se | identifier_name |
gulpfile.js | /**
*
* Web Starter Kit
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*
*/
| 'use strict';
// Include Gulp & Tools We'll Use
var gulp = require('gulp');
var $ = require('gulp-load-plugins')();
var del = require('del');
var runSequence = require('run-sequence');
var browserSync = require('browser-sync');
var reload = browserSync.reload;
// Clean Output Directory
gulp.task('clean', del.bind(null, ['./index.js', './assertRank.js', './specs.js']));
gulp.task('es6', ['clean'], function () {
return gulp.src(['./src/**/*.js'])
// .pipe($.sourcemaps.init({loadMaps: true}))
.pipe($['6to5']()).on('error', console.error.bind(console))
// .pipe($.sourcemaps.write())
.pipe(gulp.dest('.'))
.pipe($.size({title: 'es6'}))
})
gulp.task('browserify', ['es6'], function () {
return gulp.src(['./specs.js'])
.pipe($.browserify({debug: false}))
.pipe(gulp.dest('.'))
.pipe($.size({title: 'browserify'}))
})
// Watch Files For Changes & Reload
gulp.task('serve', ['browserify'], function () {
browserSync({
notify: false, browser: 'skip', ghostMode: false,
// Customize the BrowserSync console logging prefix
logPrefix: 'WSK',
port: 3010,
// Run as an https by uncommenting 'https: true'
// Note: this uses an unsigned certificate which on first access
// will present a certificate warning in the browser.
// https: true,
server: ['.', 'src']
});
gulp.watch(['gulpfile.js'], process.exit)
gulp.watch(['./src/**/*.{js,html}'], ['browserify', reload]);
});
gulp.task('default', ['es6'])
// Load custom tasks from the `tasks` directory
// try { require('require-dir')('tasks'); } catch (err) { console.error(err); } | random_line_split |
|
htmltabledatacellelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLTableDataCellElementBinding;
use dom::document::AbstractDocument;
use dom::element::HTMLTableDataCellElementTypeId;
use dom::htmltablecellelement::HTMLTableCellElement;
use dom::node::{AbstractNode, Node};
pub struct HTMLTableDataCellElement {
htmltablecellelement: HTMLTableCellElement,
}
impl HTMLTableDataCellElement {
pub fn | (localName: ~str, document: AbstractDocument) -> HTMLTableDataCellElement {
HTMLTableDataCellElement {
htmltablecellelement: HTMLTableCellElement::new_inherited(HTMLTableDataCellElementTypeId, localName, document)
}
}
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode {
let element = HTMLTableDataCellElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLTableDataCellElementBinding::Wrap)
}
}
| new_inherited | identifier_name |
htmltabledatacellelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLTableDataCellElementBinding;
use dom::document::AbstractDocument;
use dom::element::HTMLTableDataCellElementTypeId;
use dom::htmltablecellelement::HTMLTableCellElement;
use dom::node::{AbstractNode, Node};
pub struct HTMLTableDataCellElement {
htmltablecellelement: HTMLTableCellElement,
}
impl HTMLTableDataCellElement {
pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLTableDataCellElement |
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode {
let element = HTMLTableDataCellElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLTableDataCellElementBinding::Wrap)
}
}
| {
HTMLTableDataCellElement {
htmltablecellelement: HTMLTableCellElement::new_inherited(HTMLTableDataCellElementTypeId, localName, document)
}
} | identifier_body |
htmltabledatacellelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLTableDataCellElementBinding;
use dom::document::AbstractDocument;
use dom::element::HTMLTableDataCellElementTypeId;
use dom::htmltablecellelement::HTMLTableCellElement;
use dom::node::{AbstractNode, Node};
pub struct HTMLTableDataCellElement {
htmltablecellelement: HTMLTableCellElement, | pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLTableDataCellElement {
HTMLTableDataCellElement {
htmltablecellelement: HTMLTableCellElement::new_inherited(HTMLTableDataCellElementTypeId, localName, document)
}
}
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode {
let element = HTMLTableDataCellElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLTableDataCellElementBinding::Wrap)
}
} | }
impl HTMLTableDataCellElement { | random_line_split |
log_formatting.py | # -*- coding: utf-8 -*-
import sys
import string
from datetime import datetime,timedelta
import calendar
import csv
import re
# ファイルオープン(fpは引数でログファイル,wfpは書き出すcsvファイルを指定)
fp = open(sys.argv[1],'r')
# logがローテートするタイミングが1日の間にある場合,/var/log/kern.logと/var/log/kern.log.1の両方を読み込む必要があるかもしれない
wfp = open('/path/to/program/csv_data/formatted.csv', 'a')
writer = csv.writer(wfp, lineterminator='\n')
# 昨日の日付を計算
yesterday = datetime.now() + timedelta(days=-1)
print "下記の日時のログ整形データをformatted.csvに書き出します"
print yesterday.strftime('%Y %b %d %H:%M:%S')
# idと書き出し用リストの変数を作成
i = 0
w = [0] * 7
# csvヘッダの作成
#w[0] = "id"
#w[1] = "weekday" | #w[6] = "spt"
# ファイルに1行出力
#writer.writerow(w)
# ログファイルのEOFまで
for line in fp.readlines():
# フォワーディングパケットで,内部ネットから出るログを指定
if line.find("FORWARD_F IN=eth1") >= 0:
# kernel:の数値の[の後に空白が入ると,後のsplitでうまくきれないため,[を削除する
line = line.replace('[','')
line = line.replace(' DF ',' ')
# 0文字以上の文字をsplitで切り出し
l = filter(lambda x: len(x)>0, re.split(r" ", line))
#昨日の日時と一致するログを出力
if l[0] == yesterday.strftime('%b') and int(l[1], 10) == int(yesterday.strftime('%d'), 10):
# print l
# id
w[0] = i
# 昨日の曜日(Mon:0,Tue;1,Wed:2,Thu:3,FRI:4,SAT:5,SUN:6)
w[1] = yesterday.weekday()
# 時刻(時のみ)
w[2] = int(l[2][:2], 10)
# 送信元MACアドレス
w[3] = l[9][4:]
# 送信先IPアドレス
w[4] = l[11][4:]
# プロトコル
w[5] = l[17][6:]
# 送信先ポート番号
# プロトコルがICMPなら,送信先ポート番号を0に
if l[17][6:] == "ICMP":
l[19] = 0
w[6] = l[19]
else:
w[6] = l[19][4:]
i += 1
# ファイルに1行出力
writer.writerow(w)
# ファイルクローズ
fp.close()
wfp.close() | #w[2] = "hour"
#w[3] = "smacaddr"
#w[4] = "dipaddr"
#w[5] = "proto" | random_line_split |
log_formatting.py | # -*- coding: utf-8 -*-
import sys
import string
from datetime import datetime,timedelta
import calendar
import csv
import re
# ファイルオープン(fpは引数でログファイル,wfpは書き出すcsvファイルを指定)
fp = open(sys.argv[1],'r')
# logがローテートするタイミングが1日の間にある場合,/var/log/kern.logと/var/log/kern.log.1の両方を読み込む必要があるかもしれない
wfp = open('/path/to/program/csv_data/formatted.csv', 'a')
writer = csv.writer(wfp, lineterminator='\n')
# 昨日の日付を計算
yesterday = datetime.now() + timedelta(days=-1)
print "下記の日時のログ整形データをformatted.csvに書き出します"
print yesterday.strftime('%Y %b %d %H:%M:%S')
# idと書き出し用リストの変数を作成
i = 0
w = [0] * 7
# csvヘッダの作成
#w[0] = "id"
#w[1] = "weekday"
#w[2] = "hour"
#w[3] = "smacaddr"
#w[4] = "dipaddr"
#w[5] = "proto"
#w[6] = "spt"
# ファイルに1行出力
#writer.writerow(w)
# ログファイルのEOFまで
for line in fp.readlines():
# フォワーディングパケットで,内部ネットから出るログを指定
if line.find("FORWARD_F IN=eth1") >= 0:
# kernel:の数値の[の後に空白が入ると,後のsplitでうまくきれないため,[を削除する
line = line.replace('[','')
line = line.replace(' DF ',' ')
# 0文字以上の文字をsplitで切り出し
l = filter(lambda x: len(x)>0, re.split(r" ", line))
#昨日の日時と一致するログを出力
if l[0] == yesterday.strftime('%b') an | d int(l[1], 10) == int(yesterday.strftime('%d'), 10):
# print l
# id
w[0] = i
# 昨日の曜日(Mon:0,Tue;1,Wed:2,Thu:3,FRI:4,SAT:5,SUN:6)
w[1] = yesterday.weekday()
# 時刻(時のみ)
w[2] = int(l[2][:2], 10)
# 送信元MACアドレス
w[3] = l[9][4:]
# 送信先IPアドレス
w[4] = l[11][4:]
# プロトコル
w[5] = l[17][6:]
# 送信先ポート番号
# プロトコルがICMPなら,送信先ポート番号を0に
if l[17][6:] == "ICMP":
l[19] = 0
w[6] = l[19]
else:
w[6] = l[19][4:]
i += 1
# ファイルに1行出力
writer.writerow(w)
# ファイルクローズ
fp.close()
wfp.close()
| conditional_block |
|
registry_test.ts | /**
* @license
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {create, Finalizable, Registry} from './registry';
import '../test/common-test-setup-karma.js';
class Foo implements Finalizable {
constructor(private readonly final: string[]) {}
finalize() {
this.final.push('Foo');
}
}
class Bar implements Finalizable {
constructor(private readonly final: string[], _foo?: Foo) {}
| () {
this.final.push('Bar');
}
}
interface DemoContext {
foo: Foo;
bar: Bar;
}
suite('Registry', () => {
setup(() => {});
test('It finalizes correctly', () => {
const final: string[] = [];
const demoRegistry: Registry<DemoContext> = {
foo: (_ctx: Partial<DemoContext>) => new Foo(final),
bar: (ctx: Partial<DemoContext>) => new Bar(final, ctx.foo),
};
const demoContext: DemoContext & Finalizable =
create<DemoContext>(demoRegistry);
demoContext.finalize();
assert.deepEqual(final, ['Foo', 'Bar']);
});
});
| finalize | identifier_name |
registry_test.ts | /**
* @license
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {create, Finalizable, Registry} from './registry';
import '../test/common-test-setup-karma.js';
class Foo implements Finalizable {
constructor(private readonly final: string[]) {}
finalize() {
this.final.push('Foo'); |
class Bar implements Finalizable {
constructor(private readonly final: string[], _foo?: Foo) {}
finalize() {
this.final.push('Bar');
}
}
interface DemoContext {
foo: Foo;
bar: Bar;
}
suite('Registry', () => {
setup(() => {});
test('It finalizes correctly', () => {
const final: string[] = [];
const demoRegistry: Registry<DemoContext> = {
foo: (_ctx: Partial<DemoContext>) => new Foo(final),
bar: (ctx: Partial<DemoContext>) => new Bar(final, ctx.foo),
};
const demoContext: DemoContext & Finalizable =
create<DemoContext>(demoRegistry);
demoContext.finalize();
assert.deepEqual(final, ['Foo', 'Bar']);
});
}); | }
} | random_line_split |
test.ts | import './polyfills.ts';
import 'zone.js/dist/long-stack-trace-zone';
import 'zone.js/dist/jasmine-patch';
import 'zone.js/dist/async-test';
import 'zone.js/dist/fake-async-test';
import 'zone.js/dist/sync-test';
// Unfortunately there's no typing for the `__karma__` variable. Just declare it as any.
declare var __karma__: any;
// Prevent Karma from running prematurely.
__karma__.loaded = function () {};
Promise.all([
System.import('@angular/core/testing'),
System.import('@angular/platform-browser-dynamic/testing')
])
// First, initialize the Angular testing environment.
.then(([testing, testingBrowser]) => {
testing.getTestBed().initTestEnvironment(
testingBrowser.BrowserDynamicTestingModule,
testingBrowser.platformBrowserDynamicTesting()
);
}) | // And load the modules.
.then(context => context.keys().map(context))
// Finally, start Karma to run the tests.
.then(__karma__.start, __karma__.error); | // Then we find all the tests.
.then(() => require.context('./', true, /\.spec\.ts/)) | random_line_split |
web_animations_player.d.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { AnimationPlayer } from '@angular/animations';
import { DOMAnimation } from './dom_animation';
export declare class | implements AnimationPlayer {
element: any;
keyframes: {
[key: string]: string | number;
}[];
options: {
[key: string]: string | number;
};
private previousPlayers;
private _onDoneFns;
private _onStartFns;
private _onDestroyFns;
private _player;
private _duration;
private _delay;
private _initialized;
private _finished;
private _started;
private _destroyed;
private _finalKeyframe;
time: number;
parentPlayer: AnimationPlayer | null;
previousStyles: {
[styleName: string]: string | number;
};
currentSnapshot: {
[styleName: string]: string | number;
};
constructor(element: any, keyframes: {
[key: string]: string | number;
}[], options: {
[key: string]: string | number;
}, previousPlayers?: WebAnimationsPlayer[]);
private _onFinish();
init(): void;
private _buildPlayer();
private _preparePlayerBeforeStart();
readonly domPlayer: DOMAnimation;
onStart(fn: () => void): void;
onDone(fn: () => void): void;
onDestroy(fn: () => void): void;
play(): void;
pause(): void;
finish(): void;
reset(): void;
private _resetDomPlayerState();
restart(): void;
hasStarted(): boolean;
destroy(): void;
setPosition(p: number): void;
getPosition(): number;
readonly totalTime: number;
beforeDestroy(): void;
}
| WebAnimationsPlayer | identifier_name |
web_animations_player.d.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { AnimationPlayer } from '@angular/animations';
import { DOMAnimation } from './dom_animation';
export declare class WebAnimationsPlayer implements AnimationPlayer {
element: any;
keyframes: {
[key: string]: string | number;
}[];
options: {
[key: string]: string | number;
};
private previousPlayers;
private _onDoneFns;
private _onStartFns;
private _onDestroyFns;
private _player;
private _duration;
private _delay;
private _initialized;
private _finished;
private _started;
private _destroyed;
private _finalKeyframe;
time: number;
parentPlayer: AnimationPlayer | null;
previousStyles: {
[styleName: string]: string | number;
};
currentSnapshot: {
[styleName: string]: string | number;
};
constructor(element: any, keyframes: {
[key: string]: string | number; | [key: string]: string | number;
}, previousPlayers?: WebAnimationsPlayer[]);
private _onFinish();
init(): void;
private _buildPlayer();
private _preparePlayerBeforeStart();
readonly domPlayer: DOMAnimation;
onStart(fn: () => void): void;
onDone(fn: () => void): void;
onDestroy(fn: () => void): void;
play(): void;
pause(): void;
finish(): void;
reset(): void;
private _resetDomPlayerState();
restart(): void;
hasStarted(): boolean;
destroy(): void;
setPosition(p: number): void;
getPosition(): number;
readonly totalTime: number;
beforeDestroy(): void;
} | }[], options: { | random_line_split |
sockjs.js | class SockJS {
constructor(url, whitelist, options, mockOptions) {
this.nextLoginState = true;
this.url = url;
this.whitelist = whitelist;
this.options = options;
SockJS.mockInstances.push(this);
SockJS.currentMockInstance = this;
let fn = () => {
if (typeof this.onopen === 'function') {
this.onopen();
}
};
if (mockOptions != null && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
window.setTimeout(fn, 1);
}
}
log(...args) {
var log = window.console;
if (SockJS.currentMockInstance && SockJS.currentMockInstance.$log) {
log = SockJS.currentMockInstance.$log;
}
log.debug(...args);
}
close(mockOptions) {
this.log("[MOCK] SockJS.close()");
let fn = () => {
if (typeof this.onclose === 'function') {
this.onclose();
}
};
if (mockOptions && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
fn();
}
}
send(event) {
let message = this._unwrapFromEvent(event);
if (message.type !== 'send') |
this.log(`[MOCK] SockJS.send(${event})`);
if (message.replyAddress) {
this.log(`[MOCK] Sending reply to ${message.replyAddress}`);
var mockReply = message.body.mockReply || {data: 'reply'};
var reply = this._wrapToEvent(message.replyAddress, mockReply, undefined, mockReply.type);
this.onmessage(reply);
}
}
_unwrapFromEvent(event) {
return JSON.parse(event);
}
_wrapToEvent(address, body, replyAddress, type) {
return {
data : JSON.stringify({
type: type,
address : address,
message : body,
replyAddress : replyAddress
})
};
}
_buildLoginReplyAsSuccess(username, password) {
this.sessionId = "SESSION" + (Math.round(1000000 * Math.random()));
return {
status : 'ok',
sessionID : this.sessionId
};
}
_buildLoginReplyAsFail(username, password) {
return {
status : 'fail'
};
}
onmessage() {
console.warn('No SockJS.onmessage() defined!');
}
}
SockJS.mockInstances = [];
SockJS.currentMockInstance = null;
module.exports = SockJS;
| {
return;
} | conditional_block |
sockjs.js | class SockJS {
constructor(url, whitelist, options, mockOptions) {
this.nextLoginState = true;
this.url = url;
this.whitelist = whitelist;
this.options = options;
SockJS.mockInstances.push(this);
SockJS.currentMockInstance = this;
let fn = () => {
if (typeof this.onopen === 'function') {
this.onopen();
}
};
if (mockOptions != null && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
window.setTimeout(fn, 1);
}
}
log(...args) {
var log = window.console;
if (SockJS.currentMockInstance && SockJS.currentMockInstance.$log) {
log = SockJS.currentMockInstance.$log;
}
log.debug(...args);
}
close(mockOptions) {
this.log("[MOCK] SockJS.close()");
let fn = () => {
if (typeof this.onclose === 'function') {
this.onclose();
}
};
if (mockOptions && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
fn();
}
}
send(event) {
let message = this._unwrapFromEvent(event);
if (message.type !== 'send') {
return;
}
this.log(`[MOCK] SockJS.send(${event})`);
if (message.replyAddress) {
this.log(`[MOCK] Sending reply to ${message.replyAddress}`);
var mockReply = message.body.mockReply || {data: 'reply'};
var reply = this._wrapToEvent(message.replyAddress, mockReply, undefined, mockReply.type);
this.onmessage(reply);
}
}
_unwrapFromEvent(event) {
return JSON.parse(event);
}
_wrapToEvent(address, body, replyAddress, type) {
return {
data : JSON.stringify({
type: type,
address : address,
message : body, | };
}
_buildLoginReplyAsSuccess(username, password) {
this.sessionId = "SESSION" + (Math.round(1000000 * Math.random()));
return {
status : 'ok',
sessionID : this.sessionId
};
}
_buildLoginReplyAsFail(username, password) {
return {
status : 'fail'
};
}
onmessage() {
console.warn('No SockJS.onmessage() defined!');
}
}
SockJS.mockInstances = [];
SockJS.currentMockInstance = null;
module.exports = SockJS; | replyAddress : replyAddress
}) | random_line_split |
sockjs.js | class SockJS {
constructor(url, whitelist, options, mockOptions) {
this.nextLoginState = true;
this.url = url;
this.whitelist = whitelist;
this.options = options;
SockJS.mockInstances.push(this);
SockJS.currentMockInstance = this;
let fn = () => {
if (typeof this.onopen === 'function') {
this.onopen();
}
};
if (mockOptions != null && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
window.setTimeout(fn, 1);
}
}
log(...args) {
var log = window.console;
if (SockJS.currentMockInstance && SockJS.currentMockInstance.$log) {
log = SockJS.currentMockInstance.$log;
}
log.debug(...args);
}
close(mockOptions) {
this.log("[MOCK] SockJS.close()");
let fn = () => {
if (typeof this.onclose === 'function') {
this.onclose();
}
};
if (mockOptions && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
fn();
}
}
send(event) {
let message = this._unwrapFromEvent(event);
if (message.type !== 'send') {
return;
}
this.log(`[MOCK] SockJS.send(${event})`);
if (message.replyAddress) {
this.log(`[MOCK] Sending reply to ${message.replyAddress}`);
var mockReply = message.body.mockReply || {data: 'reply'};
var reply = this._wrapToEvent(message.replyAddress, mockReply, undefined, mockReply.type);
this.onmessage(reply);
}
}
_unwrapFromEvent(event) {
return JSON.parse(event);
}
_wrapToEvent(address, body, replyAddress, type) {
return {
data : JSON.stringify({
type: type,
address : address,
message : body,
replyAddress : replyAddress
})
};
}
_buildLoginReplyAsSuccess(username, password) {
this.sessionId = "SESSION" + (Math.round(1000000 * Math.random()));
return {
status : 'ok',
sessionID : this.sessionId
};
}
_buildLoginReplyAsFail(username, password) |
onmessage() {
console.warn('No SockJS.onmessage() defined!');
}
}
SockJS.mockInstances = [];
SockJS.currentMockInstance = null;
module.exports = SockJS;
| {
return {
status : 'fail'
};
} | identifier_body |
sockjs.js | class SockJS {
constructor(url, whitelist, options, mockOptions) {
this.nextLoginState = true;
this.url = url;
this.whitelist = whitelist;
this.options = options;
SockJS.mockInstances.push(this);
SockJS.currentMockInstance = this;
let fn = () => {
if (typeof this.onopen === 'function') {
this.onopen();
}
};
if (mockOptions != null && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
window.setTimeout(fn, 1);
}
}
log(...args) {
var log = window.console;
if (SockJS.currentMockInstance && SockJS.currentMockInstance.$log) {
log = SockJS.currentMockInstance.$log;
}
log.debug(...args);
}
close(mockOptions) {
this.log("[MOCK] SockJS.close()");
let fn = () => {
if (typeof this.onclose === 'function') {
this.onclose();
}
};
if (mockOptions && mockOptions.timeout) {
window.setTimeout(fn, mockOptions.timeout);
} else {
fn();
}
}
send(event) {
let message = this._unwrapFromEvent(event);
if (message.type !== 'send') {
return;
}
this.log(`[MOCK] SockJS.send(${event})`);
if (message.replyAddress) {
this.log(`[MOCK] Sending reply to ${message.replyAddress}`);
var mockReply = message.body.mockReply || {data: 'reply'};
var reply = this._wrapToEvent(message.replyAddress, mockReply, undefined, mockReply.type);
this.onmessage(reply);
}
}
_unwrapFromEvent(event) {
return JSON.parse(event);
}
_wrapToEvent(address, body, replyAddress, type) {
return {
data : JSON.stringify({
type: type,
address : address,
message : body,
replyAddress : replyAddress
})
};
}
_buildLoginReplyAsSuccess(username, password) {
this.sessionId = "SESSION" + (Math.round(1000000 * Math.random()));
return {
status : 'ok',
sessionID : this.sessionId
};
}
_buildLoginReplyAsFail(username, password) {
return {
status : 'fail'
};
}
| () {
console.warn('No SockJS.onmessage() defined!');
}
}
SockJS.mockInstances = [];
SockJS.currentMockInstance = null;
module.exports = SockJS;
| onmessage | identifier_name |
EffectMenu.ts | import * as $ from "jquery";
import { HTMLRenderer } from "./HTMLRenderer";
import { Effect } from "../Effects/Effect";
import { RubEns } from "../RubEns";
import { ParametersFieldPopup, ParametersFieldPopupParameters } from "./ParametersFieldPopup";
/**
* UI element representing the effect menu.
*
* It displays a list of effect buttons, whose effects are defined elsewhere,
* which can be applied on click.
*/
export class | extends HTMLRenderer {
protected rootNodeId = "effect_menu";
protected rootNodeType = "ul";
/**
* Related app intance.
*/
private app: RubEns;
/**
* Set of effects, to display in the UI.
*/
private effects: Effect[];
/**
* Event handler callback meant to be called when a click occurs on an effect.
*/
private effectClickEventHandler = {
eventTypes: ["click"],
selector: ".effect_button",
callback: (event) => this.onEffectClick(event)
};
/**
* Instanciates and initializes a new EffectMenu object.
* @param {JQuery} parentNode Parent node owning current instance.
* @param {RubEns} app Related app intance.
* @return {EffectMenu} Fresh instance of EffectMenu.
*
* @author Camille Gobert
*/
constructor (parentNode: JQuery, app: RubEns) {
super(parentNode);
this.effects = [];
this.app = app;
this.createRootNode();
this.updateRootNode();
this.app.eventManager.registerEventHandler(this.effectClickEventHandler);
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentCreated"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", false);
}
});
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentClosed"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", true);
}
});
}
/**
* Empty the root node, and create and append an effect node for each effect.
*
* @author Camille Gobert
*/
updateRootNode () {
this.rootNode.empty();
for (let effect of this.effects) {
let effectNode = EffectMenu.createEffectNode(effect);
this.rootNode.append(effectNode);
}
}
/**
* Update the list of effects, and update the root node afterwards.
* @param {Effect[]} effects List of [[Effect]] instances.
*
* @author Camille Gobert
*/
setEffects (effects: Effect[]) {
this.effects = effects;
this.updateRootNode();
}
/**
* Static method for creating an effect node, i.e. a node representing an effect in the UI,
* able to respond to a click by applying the related effect, openning a popup to let the user
* set the effect parameters if required.
*
* Note that this method assumes every effect has a different class (name),
* as it uses it to identify every effect button in the UI..
* @param {Effect} effect Effect to represent with an effect node.
* @return {JQuery} Fresh effect node.
*
* @author Camille Gobert
*/
private static createEffectNode (effect: Effect) {
let effectName = effect.name;
let effectClassName = effect.constructor.name;
let effectButton = $("<button>");
effectButton.html(effectName);
effectButton.attr("type", "button");
effectButton.attr("data-effect-classname", effectClassName);
effectButton.attr("title", effectName);
effectButton.addClass("effect_button");
// An effect is initially disabled
effectButton.prop("disabled", true);
let effectNode = $("<li>");
effectNode.append(effectButton);
return effectNode;
}
/**
* Method which must be called when a click occurs on an effect node.
* @param {Event} event Related click event.
*
* @author Camille Gobert
*/
private onEffectClick (event: Event) {
// Get the clicked effect instance
let effectClassName = $(event.target).attr("data-effect-classname");
let effect = this.effects.find((t) => t.constructor.name === effectClassName);
if (! effect) {
console.log("Error: effect " + effectClassName + " could not be loaded.");
return;
}
// If there is no effect parameter, immetiately apply it
let effectParameters = effect.parameters;
if (Object.keys(effectParameters).length === 0) {
effect.apply();
return;
}
// Otherwise, open a popup to allow the user to configure the effect parameters,
// before applying it (or cancelling)
let parametersToDisplay = Object.keys(effectParameters)
.map(key => { return effectParameters[key] });
let popupParameters = new ParametersFieldPopupParameters();
let popup = new ParametersFieldPopup(this.rootNode, this.app, popupParameters,
parametersToDisplay, effect.name);
popup.onParameterChangesApplied = _ => {
effect.apply();
};
popup.show();
}
}
| EffectMenu | identifier_name |
EffectMenu.ts | import * as $ from "jquery";
import { HTMLRenderer } from "./HTMLRenderer";
import { Effect } from "../Effects/Effect";
import { RubEns } from "../RubEns";
import { ParametersFieldPopup, ParametersFieldPopupParameters } from "./ParametersFieldPopup";
/**
* UI element representing the effect menu.
*
* It displays a list of effect buttons, whose effects are defined elsewhere,
* which can be applied on click.
*/
export class EffectMenu extends HTMLRenderer {
protected rootNodeId = "effect_menu";
protected rootNodeType = "ul";
/**
* Related app intance.
*/
private app: RubEns;
/**
* Set of effects, to display in the UI.
*/
private effects: Effect[];
/**
* Event handler callback meant to be called when a click occurs on an effect.
*/
private effectClickEventHandler = {
eventTypes: ["click"],
selector: ".effect_button",
callback: (event) => this.onEffectClick(event)
};
/**
* Instanciates and initializes a new EffectMenu object.
* @param {JQuery} parentNode Parent node owning current instance.
* @param {RubEns} app Related app intance.
* @return {EffectMenu} Fresh instance of EffectMenu.
*
* @author Camille Gobert
*/
constructor (parentNode: JQuery, app: RubEns) {
super(parentNode);
this.effects = [];
this.app = app;
this.createRootNode();
this.updateRootNode();
this.app.eventManager.registerEventHandler(this.effectClickEventHandler);
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentCreated"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", false);
}
});
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentClosed"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", true);
}
});
}
/**
* Empty the root node, and create and append an effect node for each effect.
*
* @author Camille Gobert
*/
updateRootNode () {
this.rootNode.empty();
for (let effect of this.effects) {
let effectNode = EffectMenu.createEffectNode(effect);
this.rootNode.append(effectNode);
}
}
/**
* Update the list of effects, and update the root node afterwards.
* @param {Effect[]} effects List of [[Effect]] instances.
*
* @author Camille Gobert
*/
setEffects (effects: Effect[]) {
this.effects = effects;
this.updateRootNode();
}
/**
* Static method for creating an effect node, i.e. a node representing an effect in the UI,
* able to respond to a click by applying the related effect, openning a popup to let the user
* set the effect parameters if required.
*
* Note that this method assumes every effect has a different class (name),
* as it uses it to identify every effect button in the UI..
* @param {Effect} effect Effect to represent with an effect node.
* @return {JQuery} Fresh effect node.
*
* @author Camille Gobert
*/
private static createEffectNode (effect: Effect) {
let effectName = effect.name;
let effectClassName = effect.constructor.name;
let effectButton = $("<button>");
effectButton.html(effectName);
effectButton.attr("type", "button");
effectButton.attr("data-effect-classname", effectClassName);
effectButton.attr("title", effectName);
effectButton.addClass("effect_button");
// An effect is initially disabled
effectButton.prop("disabled", true);
let effectNode = $("<li>");
effectNode.append(effectButton);
return effectNode;
}
/**
* Method which must be called when a click occurs on an effect node.
* @param {Event} event Related click event.
*
* @author Camille Gobert
*/
private onEffectClick (event: Event) {
// Get the clicked effect instance
let effectClassName = $(event.target).attr("data-effect-classname");
let effect = this.effects.find((t) => t.constructor.name === effectClassName);
if (! effect) |
// If there is no effect parameter, immetiately apply it
let effectParameters = effect.parameters;
if (Object.keys(effectParameters).length === 0) {
effect.apply();
return;
}
// Otherwise, open a popup to allow the user to configure the effect parameters,
// before applying it (or cancelling)
let parametersToDisplay = Object.keys(effectParameters)
.map(key => { return effectParameters[key] });
let popupParameters = new ParametersFieldPopupParameters();
let popup = new ParametersFieldPopup(this.rootNode, this.app, popupParameters,
parametersToDisplay, effect.name);
popup.onParameterChangesApplied = _ => {
effect.apply();
};
popup.show();
}
}
| {
console.log("Error: effect " + effectClassName + " could not be loaded.");
return;
} | conditional_block |
EffectMenu.ts | import * as $ from "jquery";
import { HTMLRenderer } from "./HTMLRenderer";
import { Effect } from "../Effects/Effect";
import { RubEns } from "../RubEns";
import { ParametersFieldPopup, ParametersFieldPopupParameters } from "./ParametersFieldPopup";
/**
* UI element representing the effect menu.
*
* It displays a list of effect buttons, whose effects are defined elsewhere,
* which can be applied on click.
*/
export class EffectMenu extends HTMLRenderer {
protected rootNodeId = "effect_menu";
protected rootNodeType = "ul";
/**
* Related app intance.
*/
private app: RubEns;
/**
* Set of effects, to display in the UI.
*/
private effects: Effect[];
/**
* Event handler callback meant to be called when a click occurs on an effect.
*/
private effectClickEventHandler = {
eventTypes: ["click"],
selector: ".effect_button",
callback: (event) => this.onEffectClick(event)
};
/**
* Instanciates and initializes a new EffectMenu object.
* @param {JQuery} parentNode Parent node owning current instance.
* @param {RubEns} app Related app intance.
* @return {EffectMenu} Fresh instance of EffectMenu.
*
* @author Camille Gobert
*/
constructor (parentNode: JQuery, app: RubEns) {
super(parentNode);
this.effects = [];
this.app = app;
this.createRootNode();
this.updateRootNode();
this.app.eventManager.registerEventHandler(this.effectClickEventHandler);
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentCreated"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", false);
}
});
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentClosed"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", true);
}
});
}
/**
* Empty the root node, and create and append an effect node for each effect.
*
* @author Camille Gobert
*/
updateRootNode () {
this.rootNode.empty();
for (let effect of this.effects) {
let effectNode = EffectMenu.createEffectNode(effect);
this.rootNode.append(effectNode);
}
}
/**
* Update the list of effects, and update the root node afterwards.
* @param {Effect[]} effects List of [[Effect]] instances.
*
* @author Camille Gobert
*/
setEffects (effects: Effect[]) {
this.effects = effects;
this.updateRootNode();
}
/**
* Static method for creating an effect node, i.e. a node representing an effect in the UI,
* able to respond to a click by applying the related effect, openning a popup to let the user
* set the effect parameters if required.
*
* Note that this method assumes every effect has a different class (name),
* as it uses it to identify every effect button in the UI..
* @param {Effect} effect Effect to represent with an effect node.
* @return {JQuery} Fresh effect node.
*
* @author Camille Gobert
*/
private static createEffectNode (effect: Effect) {
let effectName = effect.name;
let effectClassName = effect.constructor.name;
let effectButton = $("<button>");
effectButton.html(effectName);
effectButton.attr("type", "button");
effectButton.attr("data-effect-classname", effectClassName);
effectButton.attr("title", effectName);
effectButton.addClass("effect_button");
// An effect is initially disabled | effectNode.append(effectButton);
return effectNode;
}
/**
* Method which must be called when a click occurs on an effect node.
* @param {Event} event Related click event.
*
* @author Camille Gobert
*/
private onEffectClick (event: Event) {
// Get the clicked effect instance
let effectClassName = $(event.target).attr("data-effect-classname");
let effect = this.effects.find((t) => t.constructor.name === effectClassName);
if (! effect) {
console.log("Error: effect " + effectClassName + " could not be loaded.");
return;
}
// If there is no effect parameter, immetiately apply it
let effectParameters = effect.parameters;
if (Object.keys(effectParameters).length === 0) {
effect.apply();
return;
}
// Otherwise, open a popup to allow the user to configure the effect parameters,
// before applying it (or cancelling)
let parametersToDisplay = Object.keys(effectParameters)
.map(key => { return effectParameters[key] });
let popupParameters = new ParametersFieldPopupParameters();
let popup = new ParametersFieldPopup(this.rootNode, this.app, popupParameters,
parametersToDisplay, effect.name);
popup.onParameterChangesApplied = _ => {
effect.apply();
};
popup.show();
}
} | effectButton.prop("disabled", true);
let effectNode = $("<li>"); | random_line_split |
EffectMenu.ts | import * as $ from "jquery";
import { HTMLRenderer } from "./HTMLRenderer";
import { Effect } from "../Effects/Effect";
import { RubEns } from "../RubEns";
import { ParametersFieldPopup, ParametersFieldPopupParameters } from "./ParametersFieldPopup";
/**
* UI element representing the effect menu.
*
* It displays a list of effect buttons, whose effects are defined elsewhere,
* which can be applied on click.
*/
export class EffectMenu extends HTMLRenderer {
protected rootNodeId = "effect_menu";
protected rootNodeType = "ul";
/**
* Related app intance.
*/
private app: RubEns;
/**
* Set of effects, to display in the UI.
*/
private effects: Effect[];
/**
* Event handler callback meant to be called when a click occurs on an effect.
*/
private effectClickEventHandler = {
eventTypes: ["click"],
selector: ".effect_button",
callback: (event) => this.onEffectClick(event)
};
/**
* Instanciates and initializes a new EffectMenu object.
* @param {JQuery} parentNode Parent node owning current instance.
* @param {RubEns} app Related app intance.
* @return {EffectMenu} Fresh instance of EffectMenu.
*
* @author Camille Gobert
*/
constructor (parentNode: JQuery, app: RubEns) {
super(parentNode);
this.effects = [];
this.app = app;
this.createRootNode();
this.updateRootNode();
this.app.eventManager.registerEventHandler(this.effectClickEventHandler);
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentCreated"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", false);
}
});
this.app.eventManager.registerEventHandler({
eventTypes: ["rubens_documentClosed"],
callback : (event: CustomEvent) => {
$(".effect_button").prop("disabled", true);
}
});
}
/**
* Empty the root node, and create and append an effect node for each effect.
*
* @author Camille Gobert
*/
updateRootNode () {
this.rootNode.empty();
for (let effect of this.effects) {
let effectNode = EffectMenu.createEffectNode(effect);
this.rootNode.append(effectNode);
}
}
/**
* Update the list of effects, and update the root node afterwards.
* @param {Effect[]} effects List of [[Effect]] instances.
*
* @author Camille Gobert
*/
setEffects (effects: Effect[]) |
/**
* Static method for creating an effect node, i.e. a node representing an effect in the UI,
* able to respond to a click by applying the related effect, openning a popup to let the user
* set the effect parameters if required.
*
* Note that this method assumes every effect has a different class (name),
* as it uses it to identify every effect button in the UI..
* @param {Effect} effect Effect to represent with an effect node.
* @return {JQuery} Fresh effect node.
*
* @author Camille Gobert
*/
private static createEffectNode (effect: Effect) {
let effectName = effect.name;
let effectClassName = effect.constructor.name;
let effectButton = $("<button>");
effectButton.html(effectName);
effectButton.attr("type", "button");
effectButton.attr("data-effect-classname", effectClassName);
effectButton.attr("title", effectName);
effectButton.addClass("effect_button");
// An effect is initially disabled
effectButton.prop("disabled", true);
let effectNode = $("<li>");
effectNode.append(effectButton);
return effectNode;
}
/**
* Method which must be called when a click occurs on an effect node.
* @param {Event} event Related click event.
*
* @author Camille Gobert
*/
private onEffectClick (event: Event) {
// Get the clicked effect instance
let effectClassName = $(event.target).attr("data-effect-classname");
let effect = this.effects.find((t) => t.constructor.name === effectClassName);
if (! effect) {
console.log("Error: effect " + effectClassName + " could not be loaded.");
return;
}
// If there is no effect parameter, immetiately apply it
let effectParameters = effect.parameters;
if (Object.keys(effectParameters).length === 0) {
effect.apply();
return;
}
// Otherwise, open a popup to allow the user to configure the effect parameters,
// before applying it (or cancelling)
let parametersToDisplay = Object.keys(effectParameters)
.map(key => { return effectParameters[key] });
let popupParameters = new ParametersFieldPopupParameters();
let popup = new ParametersFieldPopup(this.rootNode, this.app, popupParameters,
parametersToDisplay, effect.name);
popup.onParameterChangesApplied = _ => {
effect.apply();
};
popup.show();
}
}
| {
this.effects = effects;
this.updateRootNode();
} | identifier_body |
es.js | /*!
* This is a `i18n` language object.
*
* Spanish
*
* @author
* Jalios (Twitter: @Jalios)
* Sascha Greuel (Twitter: @SoftCreatR)
* Rafael Miranda (GitHub: @rafa8626)
*
* @see core/i18n.js
*/ | exports.es = {
'mejs.plural-form': 1,
// core/mediaelement.js
'mejs.download-file': 'Descargar archivo',
// renderers/flash.js
'mejs.install-flash': 'Esta usando un navegador que no tiene activado o instalado el reproductor de Flash. Por favor active el plugin del reproductor de Flash o descargue la versión más reciente en https://get.adobe.com/flashplayer/',
// features/fullscreen.js
'mejs.fullscreen': 'Pantalla completa',
// features/playpause.js
'mejs.play': 'Reproducción',
'mejs.pause': 'Pausa',
// features/progress.js
'mejs.time-slider': 'Control deslizante de tiempo',
'mejs.time-help-text': 'Use las flechas Izquierda/Derecha para avanzar un segundo y las flechas Arriba/Abajo para avanzar diez segundos.',
'mejs.live-broadcast': 'Transmisión en Vivo',
// features/volume.js
'mejs.volume-help-text': 'Use las flechas Arriba/Abajo para subir o bajar el volumen.',
'mejs.unmute': 'Reactivar silencio',
'mejs.mute': 'Silencio',
'mejs.volume-slider': 'Control deslizante de volumen',
// core/player.js
'mejs.video-player': 'Reproductor de video',
'mejs.audio-player': 'Reproductor de audio',
// features/tracks.js
'mejs.captions-subtitles': 'Leyendas/Subtítulos',
'mejs.captions-chapters': 'Capítulos',
'mejs.none': 'Ninguno',
'mejs.afrikaans': 'Afrikaans',
'mejs.albanian': 'Albano',
'mejs.arabic': 'Árabe',
'mejs.belarusian': 'Bielorruso',
'mejs.bulgarian': 'Búlgaro',
'mejs.catalan': 'Catalán',
'mejs.chinese': 'Chino',
'mejs.chinese-simplified': 'Chino (Simplificado)',
'mejs.chinese-traditional': 'Chino (Tradicional)',
'mejs.croatian': 'Croata',
'mejs.czech': 'Checo',
'mejs.danish': 'Danés',
'mejs.dutch': 'Holandés',
'mejs.english': 'Inglés',
'mejs.estonian': 'Estoniano',
'mejs.filipino': 'Filipino',
'mejs.finnish': 'Finlandés',
'mejs.french': 'Francés',
'mejs.galician': 'Gallego',
'mejs.german': 'Alemán',
'mejs.greek': 'Griego',
'mejs.haitian-creole': 'Haitiano Criollo',
'mejs.hebrew': 'Hebreo',
'mejs.hindi': 'Hindi',
'mejs.hungarian': 'Húngaro',
'mejs.icelandic': 'Islandés',
'mejs.indonesian': 'Indonesio',
'mejs.irish': 'Irlandés',
'mejs.italian': 'Italiano',
'mejs.japanese': 'Japonés',
'mejs.korean': 'Coreano',
'mejs.latvian': 'Letón',
'mejs.lithuanian': 'Lituano',
'mejs.macedonian': 'Macedonio',
'mejs.malay': 'Malayo',
'mejs.maltese': 'Maltés',
'mejs.norwegian': 'Noruego',
'mejs.persian': 'Persa',
'mejs.polish': 'Polaco',
'mejs.portuguese': 'Portugués',
'mejs.romanian': 'Rumano',
'mejs.russian': 'Ruso',
'mejs.serbian': 'Serbio',
'mejs.slovak': 'Eslovaco',
'mejs.slovenian': 'Eslovenio',
'mejs.spanish': 'Español',
'mejs.swahili': 'Swahili',
'mejs.swedish': 'Suizo',
'mejs.tagalog': 'Tagalog',
'mejs.thai': 'Tailandés',
'mejs.turkish': 'Turco',
'mejs.ukrainian': 'Ucraniano',
'mejs.vietnamese': 'Vietnamita',
'mejs.welsh': 'Galés',
'mejs.yiddish': 'Yiddish'
};
}
})(mejs.i18n); |
(function (exports) {
if (exports.es === undefined) { | random_line_split |
es.js | /*!
* This is a `i18n` language object.
*
* Spanish
*
* @author
* Jalios (Twitter: @Jalios)
* Sascha Greuel (Twitter: @SoftCreatR)
* Rafael Miranda (GitHub: @rafa8626)
*
* @see core/i18n.js
*/
(function (exports) {
if (exports.es === undefined) | {
exports.es = {
'mejs.plural-form': 1,
// core/mediaelement.js
'mejs.download-file': 'Descargar archivo',
// renderers/flash.js
'mejs.install-flash': 'Esta usando un navegador que no tiene activado o instalado el reproductor de Flash. Por favor active el plugin del reproductor de Flash o descargue la versión más reciente en https://get.adobe.com/flashplayer/',
// features/fullscreen.js
'mejs.fullscreen': 'Pantalla completa',
// features/playpause.js
'mejs.play': 'Reproducción',
'mejs.pause': 'Pausa',
// features/progress.js
'mejs.time-slider': 'Control deslizante de tiempo',
'mejs.time-help-text': 'Use las flechas Izquierda/Derecha para avanzar un segundo y las flechas Arriba/Abajo para avanzar diez segundos.',
'mejs.live-broadcast': 'Transmisión en Vivo',
// features/volume.js
'mejs.volume-help-text': 'Use las flechas Arriba/Abajo para subir o bajar el volumen.',
'mejs.unmute': 'Reactivar silencio',
'mejs.mute': 'Silencio',
'mejs.volume-slider': 'Control deslizante de volumen',
// core/player.js
'mejs.video-player': 'Reproductor de video',
'mejs.audio-player': 'Reproductor de audio',
// features/tracks.js
'mejs.captions-subtitles': 'Leyendas/Subtítulos',
'mejs.captions-chapters': 'Capítulos',
'mejs.none': 'Ninguno',
'mejs.afrikaans': 'Afrikaans',
'mejs.albanian': 'Albano',
'mejs.arabic': 'Árabe',
'mejs.belarusian': 'Bielorruso',
'mejs.bulgarian': 'Búlgaro',
'mejs.catalan': 'Catalán',
'mejs.chinese': 'Chino',
'mejs.chinese-simplified': 'Chino (Simplificado)',
'mejs.chinese-traditional': 'Chino (Tradicional)',
'mejs.croatian': 'Croata',
'mejs.czech': 'Checo',
'mejs.danish': 'Danés',
'mejs.dutch': 'Holandés',
'mejs.english': 'Inglés',
'mejs.estonian': 'Estoniano',
'mejs.filipino': 'Filipino',
'mejs.finnish': 'Finlandés',
'mejs.french': 'Francés',
'mejs.galician': 'Gallego',
'mejs.german': 'Alemán',
'mejs.greek': 'Griego',
'mejs.haitian-creole': 'Haitiano Criollo',
'mejs.hebrew': 'Hebreo',
'mejs.hindi': 'Hindi',
'mejs.hungarian': 'Húngaro',
'mejs.icelandic': 'Islandés',
'mejs.indonesian': 'Indonesio',
'mejs.irish': 'Irlandés',
'mejs.italian': 'Italiano',
'mejs.japanese': 'Japonés',
'mejs.korean': 'Coreano',
'mejs.latvian': 'Letón',
'mejs.lithuanian': 'Lituano',
'mejs.macedonian': 'Macedonio',
'mejs.malay': 'Malayo',
'mejs.maltese': 'Maltés',
'mejs.norwegian': 'Noruego',
'mejs.persian': 'Persa',
'mejs.polish': 'Polaco',
'mejs.portuguese': 'Portugués',
'mejs.romanian': 'Rumano',
'mejs.russian': 'Ruso',
'mejs.serbian': 'Serbio',
'mejs.slovak': 'Eslovaco',
'mejs.slovenian': 'Eslovenio',
'mejs.spanish': 'Español',
'mejs.swahili': 'Swahili',
'mejs.swedish': 'Suizo',
'mejs.tagalog': 'Tagalog',
'mejs.thai': 'Tailandés',
'mejs.turkish': 'Turco',
'mejs.ukrainian': 'Ucraniano',
'mejs.vietnamese': 'Vietnamita',
'mejs.welsh': 'Galés',
'mejs.yiddish': 'Yiddish'
};
}
})(mejs.i18n); | conditional_block |
|
transfer_state.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {APP_ID, Injectable, NgModule} from '@angular/core';
import {DOCUMENT} from '../dom/dom_tokens';
export function escapeHtml(text: string): string {
const escapedText: {[k: string]: string} = {
'&': '&a;',
'"': '&q;',
'\'': '&s;',
'<': '&l;',
'>': '&g;',
};
return text.replace(/[&"'<>]/g, s => escapedText[s]);
}
export function unescapeHtml(text: string): string {
const unescapedText: {[k: string]: string} = {
'&a;': '&',
'&q;': '"',
'&s;': '\'',
'&l;': '<',
'&g;': '>',
};
return text.replace(/&[^;]+;/g, s => unescapedText[s]);
}
/**
* A type-safe key to use with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export type StateKey<T> = string & {__not_a_string: never};
/**
* Create a `StateKey<T>` that can be used to store value of type T with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export function makeStateKey<T = void>(key: string): StateKey<T> {
return key as StateKey<T>;
}
/**
* A key value store that is transferred from the application on the server side to the application
* on the client side.
*
* `TransferState` will be available as an injectable token. To use it import
* `ServerTransferStateModule` on the server and `BrowserTransferStateModule` on the client.
*
* The values in the store are serialized/deserialized using JSON.stringify/JSON.parse. So only
* boolean, number, string, null and non-class objects will be serialized and deserialzied in a
* non-lossy manner.
*
* @experimental
*/
@Injectable()
export class TransferState {
private store: {[k: string]: {} | undefined} = {};
private onSerializeCallbacks: {[k: string]: () => {} | undefined} = {};
/** @internal */
static init(initState: {}) {
const transferState = new TransferState();
transferState.store = initState;
return transferState;
}
/**
* Get the value corresponding to a key. Return `defaultValue` if key is not found.
*/ | get<T>(key: StateKey<T>, defaultValue: T): T {
return this.store[key] !== undefined ? this.store[key] as T : defaultValue;
}
/**
* Set the value corresponding to a key.
*/
set<T>(key: StateKey<T>, value: T): void { this.store[key] = value; }
/**
* Remove a key from the store.
*/
remove<T>(key: StateKey<T>): void { delete this.store[key]; }
/**
* Test whether a key exists in the store.
*/
hasKey<T>(key: StateKey<T>) { return this.store.hasOwnProperty(key); }
/**
* Register a callback to provide the value for a key when `toJson` is called.
*/
onSerialize<T>(key: StateKey<T>, callback: () => T): void {
this.onSerializeCallbacks[key] = callback;
}
/**
* Serialize the current state of the store to JSON.
*/
toJson(): string {
// Call the onSerialize callbacks and put those values into the store.
for (const key in this.onSerializeCallbacks) {
if (this.onSerializeCallbacks.hasOwnProperty(key)) {
try {
this.store[key] = this.onSerializeCallbacks[key]();
} catch (e) {
console.warn('Exception in onSerialize callback: ', e);
}
}
}
return JSON.stringify(this.store);
}
}
export function initTransferState(doc: Document, appId: string) {
// Locate the script tag with the JSON data transferred from the server.
// The id of the script tag is set to the Angular appId + 'state'.
const script = doc.getElementById(appId + '-state');
let initialState = {};
if (script && script.textContent) {
try {
initialState = JSON.parse(unescapeHtml(script.textContent));
} catch (e) {
console.warn('Exception while restoring TransferState for app ' + appId, e);
}
}
return TransferState.init(initialState);
}
/**
* NgModule to install on the client side while using the `TransferState` to transfer state from
* server to client.
*
* @experimental
*/
@NgModule({
providers: [{provide: TransferState, useFactory: initTransferState, deps: [DOCUMENT, APP_ID]}],
})
export class BrowserTransferStateModule {
} | random_line_split |
|
transfer_state.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {APP_ID, Injectable, NgModule} from '@angular/core';
import {DOCUMENT} from '../dom/dom_tokens';
export function escapeHtml(text: string): string {
const escapedText: {[k: string]: string} = {
'&': '&a;',
'"': '&q;',
'\'': '&s;',
'<': '&l;',
'>': '&g;',
};
return text.replace(/[&"'<>]/g, s => escapedText[s]);
}
export function unescapeHtml(text: string): string {
const unescapedText: {[k: string]: string} = {
'&a;': '&',
'&q;': '"',
'&s;': '\'',
'&l;': '<',
'&g;': '>',
};
return text.replace(/&[^;]+;/g, s => unescapedText[s]);
}
/**
* A type-safe key to use with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export type StateKey<T> = string & {__not_a_string: never};
/**
* Create a `StateKey<T>` that can be used to store value of type T with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export function makeStateKey<T = void>(key: string): StateKey<T> {
return key as StateKey<T>;
}
/**
* A key value store that is transferred from the application on the server side to the application
* on the client side.
*
* `TransferState` will be available as an injectable token. To use it import
* `ServerTransferStateModule` on the server and `BrowserTransferStateModule` on the client.
*
* The values in the store are serialized/deserialized using JSON.stringify/JSON.parse. So only
* boolean, number, string, null and non-class objects will be serialized and deserialzied in a
* non-lossy manner.
*
* @experimental
*/
@Injectable()
export class TransferState {
private store: {[k: string]: {} | undefined} = {};
private onSerializeCallbacks: {[k: string]: () => {} | undefined} = {};
/** @internal */
static init(initState: {}) {
const transferState = new TransferState();
transferState.store = initState;
return transferState;
}
/**
* Get the value corresponding to a key. Return `defaultValue` if key is not found.
*/
get<T>(key: StateKey<T>, defaultValue: T): T {
return this.store[key] !== undefined ? this.store[key] as T : defaultValue;
}
/**
* Set the value corresponding to a key.
*/
set<T>(key: StateKey<T>, value: T): void { this.store[key] = value; }
/**
* Remove a key from the store.
*/
remove<T>(key: StateKey<T>): void { delete this.store[key]; }
/**
* Test whether a key exists in the store.
*/
hasKey<T>(key: StateKey<T>) { return this.store.hasOwnProperty(key); }
/**
* Register a callback to provide the value for a key when `toJson` is called.
*/
onSerialize<T>(key: StateKey<T>, callback: () => T): void {
this.onSerializeCallbacks[key] = callback;
}
/**
* Serialize the current state of the store to JSON.
*/
toJson(): string {
// Call the onSerialize callbacks and put those values into the store.
for (const key in this.onSerializeCallbacks) {
if (this.onSerializeCallbacks.hasOwnProperty(key)) |
}
return JSON.stringify(this.store);
}
}
export function initTransferState(doc: Document, appId: string) {
// Locate the script tag with the JSON data transferred from the server.
// The id of the script tag is set to the Angular appId + 'state'.
const script = doc.getElementById(appId + '-state');
let initialState = {};
if (script && script.textContent) {
try {
initialState = JSON.parse(unescapeHtml(script.textContent));
} catch (e) {
console.warn('Exception while restoring TransferState for app ' + appId, e);
}
}
return TransferState.init(initialState);
}
/**
* NgModule to install on the client side while using the `TransferState` to transfer state from
* server to client.
*
* @experimental
*/
@NgModule({
providers: [{provide: TransferState, useFactory: initTransferState, deps: [DOCUMENT, APP_ID]}],
})
export class BrowserTransferStateModule {
}
| {
try {
this.store[key] = this.onSerializeCallbacks[key]();
} catch (e) {
console.warn('Exception in onSerialize callback: ', e);
}
} | conditional_block |
transfer_state.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {APP_ID, Injectable, NgModule} from '@angular/core';
import {DOCUMENT} from '../dom/dom_tokens';
export function escapeHtml(text: string): string {
const escapedText: {[k: string]: string} = {
'&': '&a;',
'"': '&q;',
'\'': '&s;',
'<': '&l;',
'>': '&g;',
};
return text.replace(/[&"'<>]/g, s => escapedText[s]);
}
export function unescapeHtml(text: string): string {
const unescapedText: {[k: string]: string} = {
'&a;': '&',
'&q;': '"',
'&s;': '\'',
'&l;': '<',
'&g;': '>',
};
return text.replace(/&[^;]+;/g, s => unescapedText[s]);
}
/**
* A type-safe key to use with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export type StateKey<T> = string & {__not_a_string: never};
/**
* Create a `StateKey<T>` that can be used to store value of type T with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export function makeStateKey<T = void>(key: string): StateKey<T> {
return key as StateKey<T>;
}
/**
* A key value store that is transferred from the application on the server side to the application
* on the client side.
*
* `TransferState` will be available as an injectable token. To use it import
* `ServerTransferStateModule` on the server and `BrowserTransferStateModule` on the client.
*
* The values in the store are serialized/deserialized using JSON.stringify/JSON.parse. So only
* boolean, number, string, null and non-class objects will be serialized and deserialzied in a
* non-lossy manner.
*
* @experimental
*/
@Injectable()
export class TransferState {
private store: {[k: string]: {} | undefined} = {};
private onSerializeCallbacks: {[k: string]: () => {} | undefined} = {};
/** @internal */
static init(initState: {}) {
const transferState = new TransferState();
transferState.store = initState;
return transferState;
}
/**
* Get the value corresponding to a key. Return `defaultValue` if key is not found.
*/
get<T>(key: StateKey<T>, defaultValue: T): T {
return this.store[key] !== undefined ? this.store[key] as T : defaultValue;
}
/**
* Set the value corresponding to a key.
*/
set<T>(key: StateKey<T>, value: T): void { this.store[key] = value; }
/**
* Remove a key from the store.
*/
| <T>(key: StateKey<T>): void { delete this.store[key]; }
/**
* Test whether a key exists in the store.
*/
hasKey<T>(key: StateKey<T>) { return this.store.hasOwnProperty(key); }
/**
* Register a callback to provide the value for a key when `toJson` is called.
*/
onSerialize<T>(key: StateKey<T>, callback: () => T): void {
this.onSerializeCallbacks[key] = callback;
}
/**
* Serialize the current state of the store to JSON.
*/
toJson(): string {
// Call the onSerialize callbacks and put those values into the store.
for (const key in this.onSerializeCallbacks) {
if (this.onSerializeCallbacks.hasOwnProperty(key)) {
try {
this.store[key] = this.onSerializeCallbacks[key]();
} catch (e) {
console.warn('Exception in onSerialize callback: ', e);
}
}
}
return JSON.stringify(this.store);
}
}
export function initTransferState(doc: Document, appId: string) {
// Locate the script tag with the JSON data transferred from the server.
// The id of the script tag is set to the Angular appId + 'state'.
const script = doc.getElementById(appId + '-state');
let initialState = {};
if (script && script.textContent) {
try {
initialState = JSON.parse(unescapeHtml(script.textContent));
} catch (e) {
console.warn('Exception while restoring TransferState for app ' + appId, e);
}
}
return TransferState.init(initialState);
}
/**
* NgModule to install on the client side while using the `TransferState` to transfer state from
* server to client.
*
* @experimental
*/
@NgModule({
providers: [{provide: TransferState, useFactory: initTransferState, deps: [DOCUMENT, APP_ID]}],
})
export class BrowserTransferStateModule {
}
| remove | identifier_name |
transfer_state.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {APP_ID, Injectable, NgModule} from '@angular/core';
import {DOCUMENT} from '../dom/dom_tokens';
export function escapeHtml(text: string): string {
const escapedText: {[k: string]: string} = {
'&': '&a;',
'"': '&q;',
'\'': '&s;',
'<': '&l;',
'>': '&g;',
};
return text.replace(/[&"'<>]/g, s => escapedText[s]);
}
export function unescapeHtml(text: string): string {
const unescapedText: {[k: string]: string} = {
'&a;': '&',
'&q;': '"',
'&s;': '\'',
'&l;': '<',
'&g;': '>',
};
return text.replace(/&[^;]+;/g, s => unescapedText[s]);
}
/**
* A type-safe key to use with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export type StateKey<T> = string & {__not_a_string: never};
/**
* Create a `StateKey<T>` that can be used to store value of type T with `TransferState`.
*
* Example:
*
* ```
* const COUNTER_KEY = makeStateKey<number>('counter');
* let value = 10;
*
* transferState.set(COUNTER_KEY, value);
* ```
*
* @experimental
*/
export function makeStateKey<T = void>(key: string): StateKey<T> |
/**
* A key value store that is transferred from the application on the server side to the application
* on the client side.
*
* `TransferState` will be available as an injectable token. To use it import
* `ServerTransferStateModule` on the server and `BrowserTransferStateModule` on the client.
*
* The values in the store are serialized/deserialized using JSON.stringify/JSON.parse. So only
* boolean, number, string, null and non-class objects will be serialized and deserialzied in a
* non-lossy manner.
*
* @experimental
*/
@Injectable()
export class TransferState {
private store: {[k: string]: {} | undefined} = {};
private onSerializeCallbacks: {[k: string]: () => {} | undefined} = {};
/** @internal */
static init(initState: {}) {
const transferState = new TransferState();
transferState.store = initState;
return transferState;
}
/**
* Get the value corresponding to a key. Return `defaultValue` if key is not found.
*/
get<T>(key: StateKey<T>, defaultValue: T): T {
return this.store[key] !== undefined ? this.store[key] as T : defaultValue;
}
/**
* Set the value corresponding to a key.
*/
set<T>(key: StateKey<T>, value: T): void { this.store[key] = value; }
/**
* Remove a key from the store.
*/
remove<T>(key: StateKey<T>): void { delete this.store[key]; }
/**
* Test whether a key exists in the store.
*/
hasKey<T>(key: StateKey<T>) { return this.store.hasOwnProperty(key); }
/**
* Register a callback to provide the value for a key when `toJson` is called.
*/
onSerialize<T>(key: StateKey<T>, callback: () => T): void {
this.onSerializeCallbacks[key] = callback;
}
/**
* Serialize the current state of the store to JSON.
*/
toJson(): string {
// Call the onSerialize callbacks and put those values into the store.
for (const key in this.onSerializeCallbacks) {
if (this.onSerializeCallbacks.hasOwnProperty(key)) {
try {
this.store[key] = this.onSerializeCallbacks[key]();
} catch (e) {
console.warn('Exception in onSerialize callback: ', e);
}
}
}
return JSON.stringify(this.store);
}
}
export function initTransferState(doc: Document, appId: string) {
// Locate the script tag with the JSON data transferred from the server.
// The id of the script tag is set to the Angular appId + 'state'.
const script = doc.getElementById(appId + '-state');
let initialState = {};
if (script && script.textContent) {
try {
initialState = JSON.parse(unescapeHtml(script.textContent));
} catch (e) {
console.warn('Exception while restoring TransferState for app ' + appId, e);
}
}
return TransferState.init(initialState);
}
/**
* NgModule to install on the client side while using the `TransferState` to transfer state from
* server to client.
*
* @experimental
*/
@NgModule({
providers: [{provide: TransferState, useFactory: initTransferState, deps: [DOCUMENT, APP_ID]}],
})
export class BrowserTransferStateModule {
}
| {
return key as StateKey<T>;
} | identifier_body |
utils.py | """
Helper functions for handling DB accesses.
"""
import subprocess
import logging
import gzip
import io
from nominatim.db.connection import get_pg_env
from nominatim.errors import UsageError
LOG = logging.getLogger()
def _pipe_to_proc(proc, fdesc):
chunk = fdesc.read(2048)
while chunk and proc.poll() is None:
try:
proc.stdin.write(chunk)
except BrokenPipeError as exc:
raise UsageError("Failed to execute SQL file.") from exc
chunk = fdesc.read(2048)
return len(chunk)
def execute_file(dsn, fname, ignore_errors=False, pre_code=None, post_code=None):
""" Read an SQL file and run its contents against the given database
using psql. Use `pre_code` and `post_code` to run extra commands
before or after executing the file. The commands are run within the
same session, so they may be used to wrap the file execution in a
transaction.
"""
cmd = ['psql']
if not ignore_errors:
cmd.extend(('-v', 'ON_ERROR_STOP=1'))
if not LOG.isEnabledFor(logging.INFO):
cmd.append('--quiet')
proc = subprocess.Popen(cmd, env=get_pg_env(dsn), stdin=subprocess.PIPE)
try:
if not LOG.isEnabledFor(logging.INFO):
proc.stdin.write('set client_min_messages to WARNING;'.encode('utf-8'))
if pre_code:
proc.stdin.write((pre_code + ';').encode('utf-8'))
if fname.suffix == '.gz':
with gzip.open(str(fname), 'rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc)
else:
with fname.open('rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc)
if remain == 0 and post_code:
proc.stdin.write((';' + post_code).encode('utf-8'))
finally:
proc.stdin.close()
ret = proc.wait()
if ret != 0 or remain > 0:
raise UsageError("Failed to execute SQL file.")
# List of characters that need to be quoted for the copy command.
_SQL_TRANSLATION = {ord(u'\\'): u'\\\\',
ord(u'\t'): u'\\t',
ord(u'\n'): u'\\n'}
class CopyBuffer: | """
def __init__(self):
self.buffer = io.StringIO()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if self.buffer is not None:
self.buffer.close()
def add(self, *data):
""" Add another row of data to the copy buffer.
"""
first = True
for column in data:
if first:
first = False
else:
self.buffer.write('\t')
if column is None:
self.buffer.write('\\N')
else:
self.buffer.write(str(column).translate(_SQL_TRANSLATION))
self.buffer.write('\n')
def copy_out(self, cur, table, columns=None):
""" Copy all collected data into the given table.
"""
if self.buffer.tell() > 0:
self.buffer.seek(0)
cur.copy_from(self.buffer, table, columns=columns) | """ Data collector for the copy_from command. | random_line_split |
utils.py | """
Helper functions for handling DB accesses.
"""
import subprocess
import logging
import gzip
import io
from nominatim.db.connection import get_pg_env
from nominatim.errors import UsageError
LOG = logging.getLogger()
def _pipe_to_proc(proc, fdesc):
chunk = fdesc.read(2048)
while chunk and proc.poll() is None:
try:
proc.stdin.write(chunk)
except BrokenPipeError as exc:
raise UsageError("Failed to execute SQL file.") from exc
chunk = fdesc.read(2048)
return len(chunk)
def execute_file(dsn, fname, ignore_errors=False, pre_code=None, post_code=None):
""" Read an SQL file and run its contents against the given database
using psql. Use `pre_code` and `post_code` to run extra commands
before or after executing the file. The commands are run within the
same session, so they may be used to wrap the file execution in a
transaction.
"""
cmd = ['psql']
if not ignore_errors:
cmd.extend(('-v', 'ON_ERROR_STOP=1'))
if not LOG.isEnabledFor(logging.INFO):
cmd.append('--quiet')
proc = subprocess.Popen(cmd, env=get_pg_env(dsn), stdin=subprocess.PIPE)
try:
if not LOG.isEnabledFor(logging.INFO):
proc.stdin.write('set client_min_messages to WARNING;'.encode('utf-8'))
if pre_code:
proc.stdin.write((pre_code + ';').encode('utf-8'))
if fname.suffix == '.gz':
with gzip.open(str(fname), 'rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc)
else:
with fname.open('rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc)
if remain == 0 and post_code:
proc.stdin.write((';' + post_code).encode('utf-8'))
finally:
proc.stdin.close()
ret = proc.wait()
if ret != 0 or remain > 0:
raise UsageError("Failed to execute SQL file.")
# List of characters that need to be quoted for the copy command.
_SQL_TRANSLATION = {ord(u'\\'): u'\\\\',
ord(u'\t'): u'\\t',
ord(u'\n'): u'\\n'}
class CopyBuffer:
""" Data collector for the copy_from command.
"""
def __init__(self):
self.buffer = io.StringIO()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if self.buffer is not None:
self.buffer.close()
def add(self, *data):
""" Add another row of data to the copy buffer.
"""
first = True
for column in data:
if first:
first = False
else:
self.buffer.write('\t')
if column is None:
self.buffer.write('\\N')
else:
self.buffer.write(str(column).translate(_SQL_TRANSLATION))
self.buffer.write('\n')
def | (self, cur, table, columns=None):
""" Copy all collected data into the given table.
"""
if self.buffer.tell() > 0:
self.buffer.seek(0)
cur.copy_from(self.buffer, table, columns=columns)
| copy_out | identifier_name |
utils.py | """
Helper functions for handling DB accesses.
"""
import subprocess
import logging
import gzip
import io
from nominatim.db.connection import get_pg_env
from nominatim.errors import UsageError
LOG = logging.getLogger()
def _pipe_to_proc(proc, fdesc):
chunk = fdesc.read(2048)
while chunk and proc.poll() is None:
try:
proc.stdin.write(chunk)
except BrokenPipeError as exc:
raise UsageError("Failed to execute SQL file.") from exc
chunk = fdesc.read(2048)
return len(chunk)
def execute_file(dsn, fname, ignore_errors=False, pre_code=None, post_code=None):
""" Read an SQL file and run its contents against the given database
using psql. Use `pre_code` and `post_code` to run extra commands
before or after executing the file. The commands are run within the
same session, so they may be used to wrap the file execution in a
transaction.
"""
cmd = ['psql']
if not ignore_errors:
cmd.extend(('-v', 'ON_ERROR_STOP=1'))
if not LOG.isEnabledFor(logging.INFO):
cmd.append('--quiet')
proc = subprocess.Popen(cmd, env=get_pg_env(dsn), stdin=subprocess.PIPE)
try:
if not LOG.isEnabledFor(logging.INFO):
proc.stdin.write('set client_min_messages to WARNING;'.encode('utf-8'))
if pre_code:
proc.stdin.write((pre_code + ';').encode('utf-8'))
if fname.suffix == '.gz':
with gzip.open(str(fname), 'rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc)
else:
with fname.open('rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc)
if remain == 0 and post_code:
proc.stdin.write((';' + post_code).encode('utf-8'))
finally:
proc.stdin.close()
ret = proc.wait()
if ret != 0 or remain > 0:
raise UsageError("Failed to execute SQL file.")
# List of characters that need to be quoted for the copy command.
_SQL_TRANSLATION = {ord(u'\\'): u'\\\\',
ord(u'\t'): u'\\t',
ord(u'\n'): u'\\n'}
class CopyBuffer:
""" Data collector for the copy_from command.
"""
def __init__(self):
self.buffer = io.StringIO()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
|
def add(self, *data):
""" Add another row of data to the copy buffer.
"""
first = True
for column in data:
if first:
first = False
else:
self.buffer.write('\t')
if column is None:
self.buffer.write('\\N')
else:
self.buffer.write(str(column).translate(_SQL_TRANSLATION))
self.buffer.write('\n')
def copy_out(self, cur, table, columns=None):
""" Copy all collected data into the given table.
"""
if self.buffer.tell() > 0:
self.buffer.seek(0)
cur.copy_from(self.buffer, table, columns=columns)
| if self.buffer is not None:
self.buffer.close() | identifier_body |
utils.py | """
Helper functions for handling DB accesses.
"""
import subprocess
import logging
import gzip
import io
from nominatim.db.connection import get_pg_env
from nominatim.errors import UsageError
LOG = logging.getLogger()
def _pipe_to_proc(proc, fdesc):
chunk = fdesc.read(2048)
while chunk and proc.poll() is None:
try:
proc.stdin.write(chunk)
except BrokenPipeError as exc:
raise UsageError("Failed to execute SQL file.") from exc
chunk = fdesc.read(2048)
return len(chunk)
def execute_file(dsn, fname, ignore_errors=False, pre_code=None, post_code=None):
""" Read an SQL file and run its contents against the given database
using psql. Use `pre_code` and `post_code` to run extra commands
before or after executing the file. The commands are run within the
same session, so they may be used to wrap the file execution in a
transaction.
"""
cmd = ['psql']
if not ignore_errors:
cmd.extend(('-v', 'ON_ERROR_STOP=1'))
if not LOG.isEnabledFor(logging.INFO):
cmd.append('--quiet')
proc = subprocess.Popen(cmd, env=get_pg_env(dsn), stdin=subprocess.PIPE)
try:
if not LOG.isEnabledFor(logging.INFO):
proc.stdin.write('set client_min_messages to WARNING;'.encode('utf-8'))
if pre_code:
proc.stdin.write((pre_code + ';').encode('utf-8'))
if fname.suffix == '.gz':
|
else:
with fname.open('rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc)
if remain == 0 and post_code:
proc.stdin.write((';' + post_code).encode('utf-8'))
finally:
proc.stdin.close()
ret = proc.wait()
if ret != 0 or remain > 0:
raise UsageError("Failed to execute SQL file.")
# List of characters that need to be quoted for the copy command.
_SQL_TRANSLATION = {ord(u'\\'): u'\\\\',
ord(u'\t'): u'\\t',
ord(u'\n'): u'\\n'}
class CopyBuffer:
""" Data collector for the copy_from command.
"""
def __init__(self):
self.buffer = io.StringIO()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if self.buffer is not None:
self.buffer.close()
def add(self, *data):
""" Add another row of data to the copy buffer.
"""
first = True
for column in data:
if first:
first = False
else:
self.buffer.write('\t')
if column is None:
self.buffer.write('\\N')
else:
self.buffer.write(str(column).translate(_SQL_TRANSLATION))
self.buffer.write('\n')
def copy_out(self, cur, table, columns=None):
""" Copy all collected data into the given table.
"""
if self.buffer.tell() > 0:
self.buffer.seek(0)
cur.copy_from(self.buffer, table, columns=columns)
| with gzip.open(str(fname), 'rb') as fdesc:
remain = _pipe_to_proc(proc, fdesc) | conditional_block |
_app.ts | ///<reference path='../typings/tsd.d.ts' />
module <%= moduleName %> {
'use strict';
/* @ngdoc object
* @name <%= moduleName %>
* @description
*
*/
angular | 'ngAria',<% } %><% if (framework === 'material') { %>
'ngMaterial',<% } %>
<% if (ngRoute) { %>'ngRoute'<% } else { %>'ui.router'<% } %><% if (framework === 'angularstrap') { %>,
'mgcrea.ngStrap'<% } %><% if (framework === 'uibootstrap') { %>,
'ui.bootstrap'<% } %><% if (framework === 'foundation') { %>,
'mm.foundation'<% } %>
]);
} | .module('<%= moduleName %>', [<% if (bower.indexOf('aria') > -1) { %> | random_line_split |
PRESUBMIT.py | #!python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Additional presubmit script. This will be run for changes to files in this
# subdirectory, as well as the root syzygy/PRESUBMIT.py.
#
# This script will be read as a string and intepreted, so __file__ is not
# available. However, it is guaranteed to be run with this file's directory as
# the current working directory.
def CheckChange(input_api, output_api, dummy_committing):
# We only check Python files in this tree. The others are checked by the
# PRESUBMIT in the root Syzygy directory.
white_list = [r'^.*\.py$']
black_list = []
disabled_warnings = []
results = input_api.canned_checks.RunPylint(
input_api,
output_api,
white_list=white_list,
black_list=black_list,
disabled_warnings=disabled_warnings)
return results
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api, False)
def | (input_api, output_api):
return CheckChange(input_api, output_api, True)
| CheckChangeOnCommit | identifier_name |
PRESUBMIT.py | #!python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Additional presubmit script. This will be run for changes to files in this
# subdirectory, as well as the root syzygy/PRESUBMIT.py.
#
# This script will be read as a string and intepreted, so __file__ is not
# available. However, it is guaranteed to be run with this file's directory as
# the current working directory.
def CheckChange(input_api, output_api, dummy_committing):
# We only check Python files in this tree. The others are checked by the
# PRESUBMIT in the root Syzygy directory.
|
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api, False)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api, True)
| white_list = [r'^.*\.py$']
black_list = []
disabled_warnings = []
results = input_api.canned_checks.RunPylint(
input_api,
output_api,
white_list=white_list,
black_list=black_list,
disabled_warnings=disabled_warnings)
return results | identifier_body |
PRESUBMIT.py | #!python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Additional presubmit script. This will be run for changes to files in this
# subdirectory, as well as the root syzygy/PRESUBMIT.py.
#
# This script will be read as a string and intepreted, so __file__ is not
# available. However, it is guaranteed to be run with this file's directory as
# the current working directory.
def CheckChange(input_api, output_api, dummy_committing):
# We only check Python files in this tree. The others are checked by the
# PRESUBMIT in the root Syzygy directory.
white_list = [r'^.*\.py$']
black_list = []
disabled_warnings = []
results = input_api.canned_checks.RunPylint(
input_api,
output_api,
white_list=white_list,
black_list=black_list,
disabled_warnings=disabled_warnings)
|
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api, True) | return results
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api, False) | random_line_split |
check_reads.py | import os
import sys
import pysam
import random
from collections import Counter
from .step import StepChunk
from ..mlib import util
from ..mlib.fq_idx import FastqIndex
MIN_SEED_SIZE = 400
MIN_COV = 10.
class CheckReadsStep(StepChunk):
@staticmethod
def get_steps(options):
assert os.path.isfile(options.input_fqs), \
"fastqs {} not found".format(options.input_fqs)
yield CheckReadsStep(options, options.input_fqs)
def outpaths(self, final=False):
paths = {}
paths['pass.file'] = os.path.join(self.outdir, 'pass')
paths['index.file'] = FastqIndex.get_index_path(self.nfq_path)
paths['bins.p'] = self.options.bins_pickle_path
return paths
@property
def outdir(self):
return os.path.join(
self.options.results_dir,
self.__class__.__name__,
str(self),
)
def __init__(
self,
options,
fq_path,
):
self.options = options
self.fq_path = fq_path
#self.nfq_path = fq_path[:-3]
self.nfq_path = fq_path
util.mkdir_p(self.outdir)
| self.__class__.__name__,
self.__fqid(),
)
def run(self):
self.logger.broadcast('index fastq {}'.format(self.nfq_path))
with FastqIndex(self.nfq_path, self.logger) as idx:
fq_num_se_bcoded = idx.num_se_bcoded
# check for barcodes in fastq
assert idx.num_bcodes > 0, \
"no barcodes specified in fastq {}".format(self.fq_path)
if idx.num_se * 0.8 > idx.num_se_bcoded:
print \
'''lower than expected ({:2.2f}%) of barcoded reads detected in fastq {}
'''.format(100.*idx.num_se_bcoded / idx.num_se, self.fq_path)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
# use cheat seeds if specified (for debugging)
if self.options.cheat_seeds:
self.logger.broadcast('using cheat seeds file: {}'.format(self.options.cheat_seeds))
seeds = set()
with open(self.options.cheat_seeds) as fin:
for line in fin:
seed = line.strip()
seeds.add(seed)
self.logger.broadcast(' - loaded {}'.format(len(seeds)))
seeds = list(seeds)
# use read mappings from *bam to select seeds without high enough input
# coverage
else:
self.logger.broadcast('get seed contigs from input assembly')
ctg_covs, bam_num_se_bcoded = self.get_bam_stats()
if bam_num_se_bcoded < 0.8 * fq_num_se_bcoded:
print \
'''lower than expected amount (~{:2.2f}%) of barcoded reads from fastq {} detected in bam {}
'''.format(
100.*bam_num_se_bcoded / fq_num_se_bcoded,
self.fq_path,
self.options.reads_ctg_bam_path,
)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
seeds = self.get_seeds(ctg_covs)
random.shuffle(seeds)
# strip seed contigs into bins such that no more than 4000 bins
bins = []
group_size = max(1, len(seeds) / 4000)
for i, seed_group in \
enumerate(util.grouped(seeds, group_size, slop=True)):
binid = 'bin.{}'.format(i)
bins.append((binid, seed_group))
self.logger.broadcast('created {} bins from seeds'.format(len(bins)))
util.write_pickle(self.options.bins_pickle_path, bins)
passfile_path = os.path.join(self.outdir, 'pass')
util.touch(passfile_path)
self.logger.broadcast('done')
def get_seeds(self, ctg_covs):
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
seeds = ctg_size_map.keys()
seeds = filter(
lambda(c): (
ctg_size_map[c] >= MIN_SEED_SIZE and
ctg_covs[c] >= MIN_COV
),
seeds,
)
self.logger.broadcast(' {} total inputs seeds covering {} bases'.format(
len(ctg_size_map), sum(ctg_size_map.values())
))
self.logger.broadcast(' {} input seed contigs >= {}bp and >= {}x coverage covering {} bases'.format(
len(seeds),
MIN_SEED_SIZE,
MIN_COV,
sum(map(lambda(c): ctg_size_map[c], seeds)),
))
return seeds
def get_bam_stats(self):
ctg_counts_path = os.path.join(self.options.working_dir, 'ctg_counts.p')
if os.path.isfile(ctg_counts_path):
return util.load_pickle(ctg_counts_path)
self.logger.broadcast('computing seed coverages (required pass thru *bam)')
bam_fin = pysam.Samfile(self.options.reads_ctg_bam_path, 'rb')
ctg_bases = Counter()
num_se_bcoded = 0
for i, read in enumerate(bam_fin):
if not read.is_secondary and util.get_barcode(read) != None:
num_se_bcoded += 1
if read.is_unmapped:
continue
seed_ctg = bam_fin.getrname(read.tid)
ctg_bases[seed_ctg] += read.query_alignment_length
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
ctg_covs = Counter(dict(map(
lambda(c, b) : (c, 1. * b / ctg_size_map[c]),
ctg_bases.iteritems()
)))
util.write_pickle(ctg_counts_path, (ctg_covs, num_se_bcoded))
return ctg_covs, num_se_bcoded | def __fqid(self):
return os.path.basename(os.path.dirname(os.path.dirname(self.fq_path)))
def __str__(self):
return '{}_{}'.format( | random_line_split |
check_reads.py | import os
import sys
import pysam
import random
from collections import Counter
from .step import StepChunk
from ..mlib import util
from ..mlib.fq_idx import FastqIndex
MIN_SEED_SIZE = 400
MIN_COV = 10.
class CheckReadsStep(StepChunk):
@staticmethod
def get_steps(options):
assert os.path.isfile(options.input_fqs), \
"fastqs {} not found".format(options.input_fqs)
yield CheckReadsStep(options, options.input_fqs)
def outpaths(self, final=False):
paths = {}
paths['pass.file'] = os.path.join(self.outdir, 'pass')
paths['index.file'] = FastqIndex.get_index_path(self.nfq_path)
paths['bins.p'] = self.options.bins_pickle_path
return paths
@property
def | (self):
return os.path.join(
self.options.results_dir,
self.__class__.__name__,
str(self),
)
def __init__(
self,
options,
fq_path,
):
self.options = options
self.fq_path = fq_path
#self.nfq_path = fq_path[:-3]
self.nfq_path = fq_path
util.mkdir_p(self.outdir)
def __fqid(self):
return os.path.basename(os.path.dirname(os.path.dirname(self.fq_path)))
def __str__(self):
return '{}_{}'.format(
self.__class__.__name__,
self.__fqid(),
)
def run(self):
self.logger.broadcast('index fastq {}'.format(self.nfq_path))
with FastqIndex(self.nfq_path, self.logger) as idx:
fq_num_se_bcoded = idx.num_se_bcoded
# check for barcodes in fastq
assert idx.num_bcodes > 0, \
"no barcodes specified in fastq {}".format(self.fq_path)
if idx.num_se * 0.8 > idx.num_se_bcoded:
print \
'''lower than expected ({:2.2f}%) of barcoded reads detected in fastq {}
'''.format(100.*idx.num_se_bcoded / idx.num_se, self.fq_path)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
# use cheat seeds if specified (for debugging)
if self.options.cheat_seeds:
self.logger.broadcast('using cheat seeds file: {}'.format(self.options.cheat_seeds))
seeds = set()
with open(self.options.cheat_seeds) as fin:
for line in fin:
seed = line.strip()
seeds.add(seed)
self.logger.broadcast(' - loaded {}'.format(len(seeds)))
seeds = list(seeds)
# use read mappings from *bam to select seeds without high enough input
# coverage
else:
self.logger.broadcast('get seed contigs from input assembly')
ctg_covs, bam_num_se_bcoded = self.get_bam_stats()
if bam_num_se_bcoded < 0.8 * fq_num_se_bcoded:
print \
'''lower than expected amount (~{:2.2f}%) of barcoded reads from fastq {} detected in bam {}
'''.format(
100.*bam_num_se_bcoded / fq_num_se_bcoded,
self.fq_path,
self.options.reads_ctg_bam_path,
)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
seeds = self.get_seeds(ctg_covs)
random.shuffle(seeds)
# strip seed contigs into bins such that no more than 4000 bins
bins = []
group_size = max(1, len(seeds) / 4000)
for i, seed_group in \
enumerate(util.grouped(seeds, group_size, slop=True)):
binid = 'bin.{}'.format(i)
bins.append((binid, seed_group))
self.logger.broadcast('created {} bins from seeds'.format(len(bins)))
util.write_pickle(self.options.bins_pickle_path, bins)
passfile_path = os.path.join(self.outdir, 'pass')
util.touch(passfile_path)
self.logger.broadcast('done')
def get_seeds(self, ctg_covs):
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
seeds = ctg_size_map.keys()
seeds = filter(
lambda(c): (
ctg_size_map[c] >= MIN_SEED_SIZE and
ctg_covs[c] >= MIN_COV
),
seeds,
)
self.logger.broadcast(' {} total inputs seeds covering {} bases'.format(
len(ctg_size_map), sum(ctg_size_map.values())
))
self.logger.broadcast(' {} input seed contigs >= {}bp and >= {}x coverage covering {} bases'.format(
len(seeds),
MIN_SEED_SIZE,
MIN_COV,
sum(map(lambda(c): ctg_size_map[c], seeds)),
))
return seeds
def get_bam_stats(self):
ctg_counts_path = os.path.join(self.options.working_dir, 'ctg_counts.p')
if os.path.isfile(ctg_counts_path):
return util.load_pickle(ctg_counts_path)
self.logger.broadcast('computing seed coverages (required pass thru *bam)')
bam_fin = pysam.Samfile(self.options.reads_ctg_bam_path, 'rb')
ctg_bases = Counter()
num_se_bcoded = 0
for i, read in enumerate(bam_fin):
if not read.is_secondary and util.get_barcode(read) != None:
num_se_bcoded += 1
if read.is_unmapped:
continue
seed_ctg = bam_fin.getrname(read.tid)
ctg_bases[seed_ctg] += read.query_alignment_length
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
ctg_covs = Counter(dict(map(
lambda(c, b) : (c, 1. * b / ctg_size_map[c]),
ctg_bases.iteritems()
)))
util.write_pickle(ctg_counts_path, (ctg_covs, num_se_bcoded))
return ctg_covs, num_se_bcoded
| outdir | identifier_name |
check_reads.py | import os
import sys
import pysam
import random
from collections import Counter
from .step import StepChunk
from ..mlib import util
from ..mlib.fq_idx import FastqIndex
MIN_SEED_SIZE = 400
MIN_COV = 10.
class CheckReadsStep(StepChunk):
@staticmethod
def get_steps(options):
assert os.path.isfile(options.input_fqs), \
"fastqs {} not found".format(options.input_fqs)
yield CheckReadsStep(options, options.input_fqs)
def outpaths(self, final=False):
paths = {}
paths['pass.file'] = os.path.join(self.outdir, 'pass')
paths['index.file'] = FastqIndex.get_index_path(self.nfq_path)
paths['bins.p'] = self.options.bins_pickle_path
return paths
@property
def outdir(self):
return os.path.join(
self.options.results_dir,
self.__class__.__name__,
str(self),
)
def __init__(
self,
options,
fq_path,
):
self.options = options
self.fq_path = fq_path
#self.nfq_path = fq_path[:-3]
self.nfq_path = fq_path
util.mkdir_p(self.outdir)
def __fqid(self):
return os.path.basename(os.path.dirname(os.path.dirname(self.fq_path)))
def __str__(self):
return '{}_{}'.format(
self.__class__.__name__,
self.__fqid(),
)
def run(self):
self.logger.broadcast('index fastq {}'.format(self.nfq_path))
with FastqIndex(self.nfq_path, self.logger) as idx:
fq_num_se_bcoded = idx.num_se_bcoded
# check for barcodes in fastq
assert idx.num_bcodes > 0, \
"no barcodes specified in fastq {}".format(self.fq_path)
if idx.num_se * 0.8 > idx.num_se_bcoded:
print \
'''lower than expected ({:2.2f}%) of barcoded reads detected in fastq {}
'''.format(100.*idx.num_se_bcoded / idx.num_se, self.fq_path)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
# use cheat seeds if specified (for debugging)
if self.options.cheat_seeds:
self.logger.broadcast('using cheat seeds file: {}'.format(self.options.cheat_seeds))
seeds = set()
with open(self.options.cheat_seeds) as fin:
for line in fin:
seed = line.strip()
seeds.add(seed)
self.logger.broadcast(' - loaded {}'.format(len(seeds)))
seeds = list(seeds)
# use read mappings from *bam to select seeds without high enough input
# coverage
else:
self.logger.broadcast('get seed contigs from input assembly')
ctg_covs, bam_num_se_bcoded = self.get_bam_stats()
if bam_num_se_bcoded < 0.8 * fq_num_se_bcoded:
print \
'''lower than expected amount (~{:2.2f}%) of barcoded reads from fastq {} detected in bam {}
'''.format(
100.*bam_num_se_bcoded / fq_num_se_bcoded,
self.fq_path,
self.options.reads_ctg_bam_path,
)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
seeds = self.get_seeds(ctg_covs)
random.shuffle(seeds)
# strip seed contigs into bins such that no more than 4000 bins
bins = []
group_size = max(1, len(seeds) / 4000)
for i, seed_group in \
enumerate(util.grouped(seeds, group_size, slop=True)):
|
self.logger.broadcast('created {} bins from seeds'.format(len(bins)))
util.write_pickle(self.options.bins_pickle_path, bins)
passfile_path = os.path.join(self.outdir, 'pass')
util.touch(passfile_path)
self.logger.broadcast('done')
def get_seeds(self, ctg_covs):
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
seeds = ctg_size_map.keys()
seeds = filter(
lambda(c): (
ctg_size_map[c] >= MIN_SEED_SIZE and
ctg_covs[c] >= MIN_COV
),
seeds,
)
self.logger.broadcast(' {} total inputs seeds covering {} bases'.format(
len(ctg_size_map), sum(ctg_size_map.values())
))
self.logger.broadcast(' {} input seed contigs >= {}bp and >= {}x coverage covering {} bases'.format(
len(seeds),
MIN_SEED_SIZE,
MIN_COV,
sum(map(lambda(c): ctg_size_map[c], seeds)),
))
return seeds
def get_bam_stats(self):
ctg_counts_path = os.path.join(self.options.working_dir, 'ctg_counts.p')
if os.path.isfile(ctg_counts_path):
return util.load_pickle(ctg_counts_path)
self.logger.broadcast('computing seed coverages (required pass thru *bam)')
bam_fin = pysam.Samfile(self.options.reads_ctg_bam_path, 'rb')
ctg_bases = Counter()
num_se_bcoded = 0
for i, read in enumerate(bam_fin):
if not read.is_secondary and util.get_barcode(read) != None:
num_se_bcoded += 1
if read.is_unmapped:
continue
seed_ctg = bam_fin.getrname(read.tid)
ctg_bases[seed_ctg] += read.query_alignment_length
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
ctg_covs = Counter(dict(map(
lambda(c, b) : (c, 1. * b / ctg_size_map[c]),
ctg_bases.iteritems()
)))
util.write_pickle(ctg_counts_path, (ctg_covs, num_se_bcoded))
return ctg_covs, num_se_bcoded
| binid = 'bin.{}'.format(i)
bins.append((binid, seed_group)) | conditional_block |
check_reads.py | import os
import sys
import pysam
import random
from collections import Counter
from .step import StepChunk
from ..mlib import util
from ..mlib.fq_idx import FastqIndex
MIN_SEED_SIZE = 400
MIN_COV = 10.
class CheckReadsStep(StepChunk):
@staticmethod
def get_steps(options):
assert os.path.isfile(options.input_fqs), \
"fastqs {} not found".format(options.input_fqs)
yield CheckReadsStep(options, options.input_fqs)
def outpaths(self, final=False):
paths = {}
paths['pass.file'] = os.path.join(self.outdir, 'pass')
paths['index.file'] = FastqIndex.get_index_path(self.nfq_path)
paths['bins.p'] = self.options.bins_pickle_path
return paths
@property
def outdir(self):
return os.path.join(
self.options.results_dir,
self.__class__.__name__,
str(self),
)
def __init__(
self,
options,
fq_path,
):
|
def __fqid(self):
return os.path.basename(os.path.dirname(os.path.dirname(self.fq_path)))
def __str__(self):
return '{}_{}'.format(
self.__class__.__name__,
self.__fqid(),
)
def run(self):
self.logger.broadcast('index fastq {}'.format(self.nfq_path))
with FastqIndex(self.nfq_path, self.logger) as idx:
fq_num_se_bcoded = idx.num_se_bcoded
# check for barcodes in fastq
assert idx.num_bcodes > 0, \
"no barcodes specified in fastq {}".format(self.fq_path)
if idx.num_se * 0.8 > idx.num_se_bcoded:
print \
'''lower than expected ({:2.2f}%) of barcoded reads detected in fastq {}
'''.format(100.*idx.num_se_bcoded / idx.num_se, self.fq_path)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
# use cheat seeds if specified (for debugging)
if self.options.cheat_seeds:
self.logger.broadcast('using cheat seeds file: {}'.format(self.options.cheat_seeds))
seeds = set()
with open(self.options.cheat_seeds) as fin:
for line in fin:
seed = line.strip()
seeds.add(seed)
self.logger.broadcast(' - loaded {}'.format(len(seeds)))
seeds = list(seeds)
# use read mappings from *bam to select seeds without high enough input
# coverage
else:
self.logger.broadcast('get seed contigs from input assembly')
ctg_covs, bam_num_se_bcoded = self.get_bam_stats()
if bam_num_se_bcoded < 0.8 * fq_num_se_bcoded:
print \
'''lower than expected amount (~{:2.2f}%) of barcoded reads from fastq {} detected in bam {}
'''.format(
100.*bam_num_se_bcoded / fq_num_se_bcoded,
self.fq_path,
self.options.reads_ctg_bam_path,
)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
seeds = self.get_seeds(ctg_covs)
random.shuffle(seeds)
# strip seed contigs into bins such that no more than 4000 bins
bins = []
group_size = max(1, len(seeds) / 4000)
for i, seed_group in \
enumerate(util.grouped(seeds, group_size, slop=True)):
binid = 'bin.{}'.format(i)
bins.append((binid, seed_group))
self.logger.broadcast('created {} bins from seeds'.format(len(bins)))
util.write_pickle(self.options.bins_pickle_path, bins)
passfile_path = os.path.join(self.outdir, 'pass')
util.touch(passfile_path)
self.logger.broadcast('done')
def get_seeds(self, ctg_covs):
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
seeds = ctg_size_map.keys()
seeds = filter(
lambda(c): (
ctg_size_map[c] >= MIN_SEED_SIZE and
ctg_covs[c] >= MIN_COV
),
seeds,
)
self.logger.broadcast(' {} total inputs seeds covering {} bases'.format(
len(ctg_size_map), sum(ctg_size_map.values())
))
self.logger.broadcast(' {} input seed contigs >= {}bp and >= {}x coverage covering {} bases'.format(
len(seeds),
MIN_SEED_SIZE,
MIN_COV,
sum(map(lambda(c): ctg_size_map[c], seeds)),
))
return seeds
def get_bam_stats(self):
ctg_counts_path = os.path.join(self.options.working_dir, 'ctg_counts.p')
if os.path.isfile(ctg_counts_path):
return util.load_pickle(ctg_counts_path)
self.logger.broadcast('computing seed coverages (required pass thru *bam)')
bam_fin = pysam.Samfile(self.options.reads_ctg_bam_path, 'rb')
ctg_bases = Counter()
num_se_bcoded = 0
for i, read in enumerate(bam_fin):
if not read.is_secondary and util.get_barcode(read) != None:
num_se_bcoded += 1
if read.is_unmapped:
continue
seed_ctg = bam_fin.getrname(read.tid)
ctg_bases[seed_ctg] += read.query_alignment_length
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
ctg_covs = Counter(dict(map(
lambda(c, b) : (c, 1. * b / ctg_size_map[c]),
ctg_bases.iteritems()
)))
util.write_pickle(ctg_counts_path, (ctg_covs, num_se_bcoded))
return ctg_covs, num_se_bcoded
| self.options = options
self.fq_path = fq_path
#self.nfq_path = fq_path[:-3]
self.nfq_path = fq_path
util.mkdir_p(self.outdir) | identifier_body |
__init__.py | #
# Copyright 2008,2009 Free Software Foundation, Inc.
#
# This application is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The presence of this file turns this directory into a Python package
'''
This is the GNU Radio AX25 module. Place your Python package
description here (python/__init__.py).
'''
# import swig generated symbols into the ax25 namespace
try:
# this might fail if the module is python-only
from ax25_swig import *
except ImportError:
pass |
# |
# import any pure python here
| random_line_split |
show-in-viewport.js | 'use strict';
var $ = require('./main')
, getViewportRect = require('./get-viewport-rect')
, getElementRect = require('./get-element-rect');
module.exports = $.showInViewport = function (el/*, options*/) {
var vpRect = getViewportRect()
, elRect = getElementRect(el)
, options = Object(arguments[1])
, padding = isNaN(options.padding) ? 0 : Number(options.padding)
, elTopLeft = { x: elRect.left, y: elRect.top }
, elBottomRight = { x: elRect.left + elRect.width, y: elRect.top + elRect.height }
, vpTopLeft = { x: vpRect.left + padding, y: vpRect.top + padding }
, vpBottomRight = { x: vpRect.left + vpRect.width - padding,
y: vpRect.top + vpRect.height - padding }
, diffPoint = { x: 0, y: 0 }, diff;
if (elBottomRight.x > vpBottomRight.x) {
// right beyond
diff = elBottomRight.x - vpBottomRight.x;
elTopLeft.x -= diff;
elBottomRight.x -= diff;
diffPoint.x -= diff;
}
if (elBottomRight.y > vpBottomRight.y) |
if (elTopLeft.x < vpTopLeft.x) {
// left beyond
diff = vpTopLeft.x - elTopLeft.x;
elTopLeft.x += diff;
elBottomRight.x += diff;
diffPoint.x += diff;
}
if (elTopLeft.y < vpTopLeft.y) {
// top beyond
diff = vpTopLeft.y - elTopLeft.y;
elTopLeft.y += diff;
elBottomRight.y += diff;
diffPoint.y += diff;
}
if (diffPoint.x) el.style.left = (el.offsetLeft + diffPoint.x) + "px";
if (diffPoint.y) el.style.top = (el.offsetTop + diffPoint.y) + "px";
};
| {
// bottom beyond
diff = elBottomRight.y - vpBottomRight.y;
elTopLeft.y -= diff;
elBottomRight.y -= diff;
diffPoint.y -= diff;
} | conditional_block |
show-in-viewport.js | 'use strict';
var $ = require('./main')
, getViewportRect = require('./get-viewport-rect')
, getElementRect = require('./get-element-rect');
module.exports = $.showInViewport = function (el/*, options*/) {
var vpRect = getViewportRect()
, elRect = getElementRect(el)
, options = Object(arguments[1])
, padding = isNaN(options.padding) ? 0 : Number(options.padding)
, elTopLeft = { x: elRect.left, y: elRect.top }
, elBottomRight = { x: elRect.left + elRect.width, y: elRect.top + elRect.height }
, vpTopLeft = { x: vpRect.left + padding, y: vpRect.top + padding }
, vpBottomRight = { x: vpRect.left + vpRect.width - padding,
y: vpRect.top + vpRect.height - padding }
, diffPoint = { x: 0, y: 0 }, diff;
if (elBottomRight.x > vpBottomRight.x) {
// right beyond
diff = elBottomRight.x - vpBottomRight.x;
elTopLeft.x -= diff;
elBottomRight.x -= diff;
diffPoint.x -= diff;
}
if (elBottomRight.y > vpBottomRight.y) {
// bottom beyond
diff = elBottomRight.y - vpBottomRight.y;
elTopLeft.y -= diff;
elBottomRight.y -= diff;
diffPoint.y -= diff;
}
if (elTopLeft.x < vpTopLeft.x) {
// left beyond
diff = vpTopLeft.x - elTopLeft.x;
elTopLeft.x += diff;
elBottomRight.x += diff; | }
if (elTopLeft.y < vpTopLeft.y) {
// top beyond
diff = vpTopLeft.y - elTopLeft.y;
elTopLeft.y += diff;
elBottomRight.y += diff;
diffPoint.y += diff;
}
if (diffPoint.x) el.style.left = (el.offsetLeft + diffPoint.x) + "px";
if (diffPoint.y) el.style.top = (el.offsetTop + diffPoint.y) + "px";
}; | diffPoint.x += diff; | random_line_split |
process_detail.py | #!/usr/bin/env python
# $Id$
"""
Print detailed information about a process.
"""
import os
import datetime
import socket
import sys
import psutil
from psutil._compat import namedtuple
def convert_bytes(n):
if n == 0:
return '0B'
symbols = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s)
def print_(a, b):
if sys.stdout.isatty():
fmt = '\x1b[1;32m%-17s\x1b[0m %s' %(a, b)
else:
fmt = '%-15s %s' %(a, b)
print fmt
def run(pid):
p = psutil.Process(pid)
if p.parent:
parent = '(%s)' % p.parent.name
else:
parent = ''
started = datetime.datetime.fromtimestamp(p.create_time).strftime('%Y-%M-%d %H:%M')
io = p.get_io_counters()
mem = p.get_memory_info()
mem = '%s%% (resident=%s, virtual=%s) ' %(round(p.get_memory_percent(), 1),
convert_bytes(mem.rss),
convert_bytes(mem.vms))
cpu_times = p.get_cpu_times()
cpu_percent = p.get_cpu_percent(0)
children = p.get_children()
files = p.get_open_files()
threads = p.get_threads()
connections = p.get_connections()
print_('pid', p.pid)
print_('name', p.name)
print_('exe', p.exe)
print_('parent', '%s %s' % (p.ppid, parent))
print_('cmdline', ' '.join(p.cmdline))
print_('started', started)
print_('user', p.username)
if os.name == 'posix':
print_('uids', 'real=%s, effective=%s, saved=%s' % p.uids)
print_('gids', 'real=%s, effective=%s, saved=%s' % p.gids)
print_('terminal', p.terminal or '')
if hasattr(p, 'getcwd'):
print_('cwd', p.getcwd())
print_('memory', mem)
print_('cpu', '%s%% (user=%s, system=%s)' % (cpu_percent,
cpu_times.user,
cpu_times.system))
print_('status', p.status)
print_('niceness', p.nice)
print_('num threads', p.get_num_threads())
if hasattr(p, 'get_io_counters'):
print_('I/O', 'bytes-read=%s, bytes-written=%s' % \
(convert_bytes(io.read_bytes),
convert_bytes(io.write_bytes)))
if children:
print_('children', '')
for child in children:
print_('', 'pid=%s name=%s' % (child.pid, child.name))
if files:
print_('open files', '')
for file in files:
print_('', 'fd=%s %s ' % (file.fd, file.path))
if threads:
print_('running threads', '')
for thread in threads:
print_('', 'id=%s, user-time=%s, sys-time=%s' \
% (thread.id, thread.user_time, thread.system_time))
if connections:
print_('open connections', '')
for conn in connections:
type = 'TCP' if conn.type == socket.SOCK_STREAM else 'UDP'
lip, lport = conn.local_address
if not conn.remote_address:
rip, rport = '*', '*'
else:
rip, rport = conn.remote_address
print_('', '%s:%s -> %s:%s type=%s status=%s' \
% (lip, lport, rip, rport, type, conn.status))
def main(argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 1:
sys.exit(run(os.getpid()))
elif len(argv) == 2:
|
else:
sys.exit('usage: %s [pid]' % __file__)
if __name__ == '__main__':
sys.exit(main())
| sys.exit(run(int(argv[1]))) | conditional_block |
process_detail.py | #!/usr/bin/env python
# $Id$
"""
Print detailed information about a process.
"""
import os
import datetime
import socket
import sys
import psutil
from psutil._compat import namedtuple
def convert_bytes(n):
|
def print_(a, b):
if sys.stdout.isatty():
fmt = '\x1b[1;32m%-17s\x1b[0m %s' %(a, b)
else:
fmt = '%-15s %s' %(a, b)
print fmt
def run(pid):
p = psutil.Process(pid)
if p.parent:
parent = '(%s)' % p.parent.name
else:
parent = ''
started = datetime.datetime.fromtimestamp(p.create_time).strftime('%Y-%M-%d %H:%M')
io = p.get_io_counters()
mem = p.get_memory_info()
mem = '%s%% (resident=%s, virtual=%s) ' %(round(p.get_memory_percent(), 1),
convert_bytes(mem.rss),
convert_bytes(mem.vms))
cpu_times = p.get_cpu_times()
cpu_percent = p.get_cpu_percent(0)
children = p.get_children()
files = p.get_open_files()
threads = p.get_threads()
connections = p.get_connections()
print_('pid', p.pid)
print_('name', p.name)
print_('exe', p.exe)
print_('parent', '%s %s' % (p.ppid, parent))
print_('cmdline', ' '.join(p.cmdline))
print_('started', started)
print_('user', p.username)
if os.name == 'posix':
print_('uids', 'real=%s, effective=%s, saved=%s' % p.uids)
print_('gids', 'real=%s, effective=%s, saved=%s' % p.gids)
print_('terminal', p.terminal or '')
if hasattr(p, 'getcwd'):
print_('cwd', p.getcwd())
print_('memory', mem)
print_('cpu', '%s%% (user=%s, system=%s)' % (cpu_percent,
cpu_times.user,
cpu_times.system))
print_('status', p.status)
print_('niceness', p.nice)
print_('num threads', p.get_num_threads())
if hasattr(p, 'get_io_counters'):
print_('I/O', 'bytes-read=%s, bytes-written=%s' % \
(convert_bytes(io.read_bytes),
convert_bytes(io.write_bytes)))
if children:
print_('children', '')
for child in children:
print_('', 'pid=%s name=%s' % (child.pid, child.name))
if files:
print_('open files', '')
for file in files:
print_('', 'fd=%s %s ' % (file.fd, file.path))
if threads:
print_('running threads', '')
for thread in threads:
print_('', 'id=%s, user-time=%s, sys-time=%s' \
% (thread.id, thread.user_time, thread.system_time))
if connections:
print_('open connections', '')
for conn in connections:
type = 'TCP' if conn.type == socket.SOCK_STREAM else 'UDP'
lip, lport = conn.local_address
if not conn.remote_address:
rip, rport = '*', '*'
else:
rip, rport = conn.remote_address
print_('', '%s:%s -> %s:%s type=%s status=%s' \
% (lip, lport, rip, rport, type, conn.status))
def main(argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 1:
sys.exit(run(os.getpid()))
elif len(argv) == 2:
sys.exit(run(int(argv[1])))
else:
sys.exit('usage: %s [pid]' % __file__)
if __name__ == '__main__':
sys.exit(main())
| if n == 0:
return '0B'
symbols = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s) | identifier_body |
process_detail.py | #!/usr/bin/env python
# $Id$
"""
Print detailed information about a process.
"""
import os
import datetime
import socket
import sys
import psutil
from psutil._compat import namedtuple
def convert_bytes(n):
if n == 0:
return '0B'
symbols = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s)
def print_(a, b):
if sys.stdout.isatty():
fmt = '\x1b[1;32m%-17s\x1b[0m %s' %(a, b)
else:
fmt = '%-15s %s' %(a, b)
print fmt
def run(pid):
p = psutil.Process(pid)
if p.parent:
parent = '(%s)' % p.parent.name
else:
parent = ''
started = datetime.datetime.fromtimestamp(p.create_time).strftime('%Y-%M-%d %H:%M')
io = p.get_io_counters()
mem = p.get_memory_info()
mem = '%s%% (resident=%s, virtual=%s) ' %(round(p.get_memory_percent(), 1),
convert_bytes(mem.rss),
convert_bytes(mem.vms))
cpu_times = p.get_cpu_times()
cpu_percent = p.get_cpu_percent(0)
children = p.get_children()
files = p.get_open_files()
threads = p.get_threads()
connections = p.get_connections()
print_('pid', p.pid)
print_('name', p.name)
print_('exe', p.exe)
print_('parent', '%s %s' % (p.ppid, parent))
print_('cmdline', ' '.join(p.cmdline))
print_('started', started)
print_('user', p.username)
if os.name == 'posix':
print_('uids', 'real=%s, effective=%s, saved=%s' % p.uids)
print_('gids', 'real=%s, effective=%s, saved=%s' % p.gids)
print_('terminal', p.terminal or '')
if hasattr(p, 'getcwd'):
print_('cwd', p.getcwd())
print_('memory', mem)
print_('cpu', '%s%% (user=%s, system=%s)' % (cpu_percent,
cpu_times.user,
cpu_times.system))
print_('status', p.status)
print_('niceness', p.nice)
print_('num threads', p.get_num_threads())
if hasattr(p, 'get_io_counters'):
print_('I/O', 'bytes-read=%s, bytes-written=%s' % \
(convert_bytes(io.read_bytes),
convert_bytes(io.write_bytes)))
if children:
print_('children', '')
for child in children:
print_('', 'pid=%s name=%s' % (child.pid, child.name))
if files:
print_('open files', '')
for file in files:
print_('', 'fd=%s %s ' % (file.fd, file.path))
if threads:
print_('running threads', '')
for thread in threads:
print_('', 'id=%s, user-time=%s, sys-time=%s' \
% (thread.id, thread.user_time, thread.system_time))
if connections:
print_('open connections', '')
for conn in connections:
type = 'TCP' if conn.type == socket.SOCK_STREAM else 'UDP'
lip, lport = conn.local_address
if not conn.remote_address:
rip, rport = '*', '*'
else:
rip, rport = conn.remote_address
print_('', '%s:%s -> %s:%s type=%s status=%s' \
% (lip, lport, rip, rport, type, conn.status))
def main(argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 1:
sys.exit(run(os.getpid()))
elif len(argv) == 2:
sys.exit(run(int(argv[1]))) |
if __name__ == '__main__':
sys.exit(main()) | else:
sys.exit('usage: %s [pid]' % __file__) | random_line_split |
process_detail.py | #!/usr/bin/env python
# $Id$
"""
Print detailed information about a process.
"""
import os
import datetime
import socket
import sys
import psutil
from psutil._compat import namedtuple
def convert_bytes(n):
if n == 0:
return '0B'
symbols = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s)
def print_(a, b):
if sys.stdout.isatty():
fmt = '\x1b[1;32m%-17s\x1b[0m %s' %(a, b)
else:
fmt = '%-15s %s' %(a, b)
print fmt
def run(pid):
p = psutil.Process(pid)
if p.parent:
parent = '(%s)' % p.parent.name
else:
parent = ''
started = datetime.datetime.fromtimestamp(p.create_time).strftime('%Y-%M-%d %H:%M')
io = p.get_io_counters()
mem = p.get_memory_info()
mem = '%s%% (resident=%s, virtual=%s) ' %(round(p.get_memory_percent(), 1),
convert_bytes(mem.rss),
convert_bytes(mem.vms))
cpu_times = p.get_cpu_times()
cpu_percent = p.get_cpu_percent(0)
children = p.get_children()
files = p.get_open_files()
threads = p.get_threads()
connections = p.get_connections()
print_('pid', p.pid)
print_('name', p.name)
print_('exe', p.exe)
print_('parent', '%s %s' % (p.ppid, parent))
print_('cmdline', ' '.join(p.cmdline))
print_('started', started)
print_('user', p.username)
if os.name == 'posix':
print_('uids', 'real=%s, effective=%s, saved=%s' % p.uids)
print_('gids', 'real=%s, effective=%s, saved=%s' % p.gids)
print_('terminal', p.terminal or '')
if hasattr(p, 'getcwd'):
print_('cwd', p.getcwd())
print_('memory', mem)
print_('cpu', '%s%% (user=%s, system=%s)' % (cpu_percent,
cpu_times.user,
cpu_times.system))
print_('status', p.status)
print_('niceness', p.nice)
print_('num threads', p.get_num_threads())
if hasattr(p, 'get_io_counters'):
print_('I/O', 'bytes-read=%s, bytes-written=%s' % \
(convert_bytes(io.read_bytes),
convert_bytes(io.write_bytes)))
if children:
print_('children', '')
for child in children:
print_('', 'pid=%s name=%s' % (child.pid, child.name))
if files:
print_('open files', '')
for file in files:
print_('', 'fd=%s %s ' % (file.fd, file.path))
if threads:
print_('running threads', '')
for thread in threads:
print_('', 'id=%s, user-time=%s, sys-time=%s' \
% (thread.id, thread.user_time, thread.system_time))
if connections:
print_('open connections', '')
for conn in connections:
type = 'TCP' if conn.type == socket.SOCK_STREAM else 'UDP'
lip, lport = conn.local_address
if not conn.remote_address:
rip, rport = '*', '*'
else:
rip, rport = conn.remote_address
print_('', '%s:%s -> %s:%s type=%s status=%s' \
% (lip, lport, rip, rport, type, conn.status))
def | (argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 1:
sys.exit(run(os.getpid()))
elif len(argv) == 2:
sys.exit(run(int(argv[1])))
else:
sys.exit('usage: %s [pid]' % __file__)
if __name__ == '__main__':
sys.exit(main())
| main | identifier_name |
helpers.js | var libTools = require("../lib/tools");
/*
var fn = makeTimeoutFn(1500, null, "result2");
fn(1, "whatever2", 3, function (err, res) {
console.log(err, res); // null, result2
});
*/
exports.makeTimeoutFn = function makeTimeoutFn(ms) {
var args = Array.prototype.slice.call(arguments, 1);
return function () {
var that = this,
callback = arguments[arguments.length - 1];
setTimeout(function () {
callback && callback.apply(that, args);
}, ms);
}
};
exports.asyncMirror = function asyncMirror() {
var x = arguments[0],
callback = arguments[arguments.length - 1];
setTimeout(function () {
callback(null, String(x));
}, 100);
};
exports.asyncToUppercase = function asyncToUppercase(str, callback) {
setTimeout(function () {
if (str) {
callback(null, String(str).toUpperCase());
} else { | };
exports.asyncAddSub = function asyncAddSub(a, b, callback) {
setTimeout(function () {
if (libTools.isNumber(a) && libTools.isNumber(b)) {
callback(null, a + b, a - b);
} else {
callback(new Error("Invalid"));
}
}, 100);
};
exports.expectTimestamp = function expectTimestamp(expect, ts, target, precision) {
precision = precision || 35;
ts = ts.getTime ? ts.getTime() : ts;
target = target.getTime ? target.getTime() : target;
expect(ts).toBeGreaterThan(target - precision);
expect(ts).toBeLessThan(target + precision);
}; | callback(new Error("Invalid"));
}
}, 100); | random_line_split |
helpers.js | var libTools = require("../lib/tools");
/*
var fn = makeTimeoutFn(1500, null, "result2");
fn(1, "whatever2", 3, function (err, res) {
console.log(err, res); // null, result2
});
*/
exports.makeTimeoutFn = function makeTimeoutFn(ms) {
var args = Array.prototype.slice.call(arguments, 1);
return function () {
var that = this,
callback = arguments[arguments.length - 1];
setTimeout(function () {
callback && callback.apply(that, args);
}, ms);
}
};
exports.asyncMirror = function asyncMirror() {
var x = arguments[0],
callback = arguments[arguments.length - 1];
setTimeout(function () {
callback(null, String(x));
}, 100);
};
exports.asyncToUppercase = function asyncToUppercase(str, callback) {
setTimeout(function () {
if (str) {
callback(null, String(str).toUpperCase());
} else {
callback(new Error("Invalid"));
}
}, 100);
};
exports.asyncAddSub = function asyncAddSub(a, b, callback) {
setTimeout(function () {
if (libTools.isNumber(a) && libTools.isNumber(b)) {
callback(null, a + b, a - b);
} else |
}, 100);
};
exports.expectTimestamp = function expectTimestamp(expect, ts, target, precision) {
precision = precision || 35;
ts = ts.getTime ? ts.getTime() : ts;
target = target.getTime ? target.getTime() : target;
expect(ts).toBeGreaterThan(target - precision);
expect(ts).toBeLessThan(target + precision);
}; | {
callback(new Error("Invalid"));
} | conditional_block |
_9_1_geometry_shader_houses.rs | #![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
use std::ptr;
use std::mem;
use std::os::raw::c_void;
extern crate glfw;
use self::glfw::Context;
extern crate gl;
use self::gl::types::*;
use cgmath::{Point3};
use common::{process_events, processInput};
use shader::Shader;
use camera::Camera;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn | () {
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, VBO, VAO) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
// build and compile shaders
// -------------------------
let shader = Shader::with_geometry_shader(
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.vs",
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.fs",
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.gs"
);
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
let points: [f32; 20] = [
-0.5, 0.5, 1.0, 0.0, 0.0, // top-left
0.5, 0.5, 0.0, 1.0, 0.0, // top-right
0.5, -0.5, 0.0, 0.0, 1.0, // bottom-right
-0.5, -0.5, 1.0, 1.0, 0.0 // bottom-left
];
// cube VAO
let (mut VAO, mut VBO) = (0, 0);
gl::GenVertexArrays(1, &mut VAO);
gl::GenBuffers(1, &mut VBO);
gl::BindVertexArray(VAO);
gl::BindBuffer(gl::ARRAY_BUFFER, VBO);
gl::BufferData(gl::ARRAY_BUFFER,
(points.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&points[0] as *const f32 as *const c_void,
gl::STATIC_DRAW);
let stride = 5 * mem::size_of::<GLfloat>() as GLsizei;
gl::EnableVertexAttribArray(0);
gl::VertexAttribPointer(0, 2, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(1);
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (2 * mem::size_of::<GLfloat>()) as *const f32 as *const c_void);
gl::BindVertexArray(0);
(shader, VBO, VAO)
};
// render loop
// -----------
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
// input
// -----
processInput(&mut window, deltaTime, &mut camera);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
shader.useProgram();
gl::BindVertexArray(VAO);
gl::DrawArrays(gl::POINTS, 0, 4);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
unsafe {
gl::DeleteVertexArrays(1, &VAO);
gl::DeleteBuffers(1, &VBO);
}
}
| main_4_9_1 | identifier_name |
_9_1_geometry_shader_houses.rs | #![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
use std::ptr;
use std::mem;
use std::os::raw::c_void;
extern crate glfw;
use self::glfw::Context;
extern crate gl;
use self::gl::types::*;
use cgmath::{Point3};
use common::{process_events, processInput};
use shader::Shader;
use camera::Camera;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn main_4_9_1() | {
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, VBO, VAO) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
// build and compile shaders
// -------------------------
let shader = Shader::with_geometry_shader(
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.vs",
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.fs",
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.gs"
);
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
let points: [f32; 20] = [
-0.5, 0.5, 1.0, 0.0, 0.0, // top-left
0.5, 0.5, 0.0, 1.0, 0.0, // top-right
0.5, -0.5, 0.0, 0.0, 1.0, // bottom-right
-0.5, -0.5, 1.0, 1.0, 0.0 // bottom-left
];
// cube VAO
let (mut VAO, mut VBO) = (0, 0);
gl::GenVertexArrays(1, &mut VAO);
gl::GenBuffers(1, &mut VBO);
gl::BindVertexArray(VAO);
gl::BindBuffer(gl::ARRAY_BUFFER, VBO);
gl::BufferData(gl::ARRAY_BUFFER,
(points.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&points[0] as *const f32 as *const c_void,
gl::STATIC_DRAW);
let stride = 5 * mem::size_of::<GLfloat>() as GLsizei;
gl::EnableVertexAttribArray(0);
gl::VertexAttribPointer(0, 2, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(1);
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (2 * mem::size_of::<GLfloat>()) as *const f32 as *const c_void);
gl::BindVertexArray(0);
(shader, VBO, VAO)
};
// render loop
// -----------
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
// input
// -----
processInput(&mut window, deltaTime, &mut camera);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
shader.useProgram();
gl::BindVertexArray(VAO);
gl::DrawArrays(gl::POINTS, 0, 4);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
unsafe {
gl::DeleteVertexArrays(1, &VAO);
gl::DeleteBuffers(1, &VBO);
}
} | identifier_body |
|
_9_1_geometry_shader_houses.rs | #![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
use std::ptr;
use std::mem;
use std::os::raw::c_void;
extern crate glfw;
use self::glfw::Context;
extern crate gl;
use self::gl::types::*;
use cgmath::{Point3};
use common::{process_events, processInput};
use shader::Shader;
use camera::Camera;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn main_4_9_1() {
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, VBO, VAO) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
// build and compile shaders
// -------------------------
let shader = Shader::with_geometry_shader(
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.vs",
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.fs",
"src/_4_advanced_opengl/shaders/9.1.geometry_shader.gs"
);
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
let points: [f32; 20] = [
-0.5, 0.5, 1.0, 0.0, 0.0, // top-left
0.5, 0.5, 0.0, 1.0, 0.0, // top-right
0.5, -0.5, 0.0, 0.0, 1.0, // bottom-right
-0.5, -0.5, 1.0, 1.0, 0.0 // bottom-left
];
// cube VAO
let (mut VAO, mut VBO) = (0, 0);
gl::GenVertexArrays(1, &mut VAO);
gl::GenBuffers(1, &mut VBO);
gl::BindVertexArray(VAO);
gl::BindBuffer(gl::ARRAY_BUFFER, VBO);
gl::BufferData(gl::ARRAY_BUFFER,
(points.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&points[0] as *const f32 as *const c_void,
gl::STATIC_DRAW);
let stride = 5 * mem::size_of::<GLfloat>() as GLsizei;
gl::EnableVertexAttribArray(0);
gl::VertexAttribPointer(0, 2, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(1);
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (2 * mem::size_of::<GLfloat>()) as *const f32 as *const c_void);
gl::BindVertexArray(0);
(shader, VBO, VAO)
};
// render loop
// -----------
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
| processInput(&mut window, deltaTime, &mut camera);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
shader.useProgram();
gl::BindVertexArray(VAO);
gl::DrawArrays(gl::POINTS, 0, 4);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
unsafe {
gl::DeleteVertexArrays(1, &VAO);
gl::DeleteBuffers(1, &VBO);
}
} | // input
// ----- | random_line_split |
DynamicPropertyOutput.ts | /**
* Copyright 2014 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Class: DynamicPropertyOutput
module Shumway.AVMX.AS.flash.net {
import notImplemented = Shumway.Debug.notImplemented;
import axCoerceString = Shumway.AVMX.axCoerceString;
export class DynamicPropertyOutput extends ASObject implements IDynamicPropertyOutput {
// Called whenever the class is initialized.
static classInitializer: any = null;
// List of static symbols to link.
static classSymbols: string [] = null; // [];
// List of instance symbols to link.
static instanceSymbols: string [] = null; // [];
| () {
super();
}
// JS -> AS Bindings
// AS -> JS Bindings
writeDynamicProperty(name: string, value: any): void {
name = axCoerceString(name);
release || notImplemented("packageInternal flash.net.DynamicPropertyOutput::writeDynamicProperty"); return;
}
}
}
| constructor | identifier_name |
DynamicPropertyOutput.ts | /**
* Copyright 2014 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Class: DynamicPropertyOutput
module Shumway.AVMX.AS.flash.net {
import notImplemented = Shumway.Debug.notImplemented;
import axCoerceString = Shumway.AVMX.axCoerceString; | export class DynamicPropertyOutput extends ASObject implements IDynamicPropertyOutput {
// Called whenever the class is initialized.
static classInitializer: any = null;
// List of static symbols to link.
static classSymbols: string [] = null; // [];
// List of instance symbols to link.
static instanceSymbols: string [] = null; // [];
constructor () {
super();
}
// JS -> AS Bindings
// AS -> JS Bindings
writeDynamicProperty(name: string, value: any): void {
name = axCoerceString(name);
release || notImplemented("packageInternal flash.net.DynamicPropertyOutput::writeDynamicProperty"); return;
}
}
} | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.