file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
matlab-to-python.py
# Autogenerated with SMOP version 0.23 # main.py ../../assessing-mininet/MATLAB/load_function.m ../../assessing-mininet/MATLAB/process_complete_test_set.m ../../assessing-mininet/MATLAB/process_single_testfile.m ../../assessing-mininet/MATLAB/ProcessAllLogsMain.m from __future__ import division from numpy import arange def strcat(*args): return ''.join(args) def load_octave_decoded_file_as_matrix(file_name): with open(file_name, 'r') as f: return [ map(float,line.strip().split(' ')) for line in f ] def get_test_bitrate(crosstraffic): if crosstraffic: return arange(4,6,0.25) else:
def process_complete_test_set(file_names,output_format,crosstraffic): from glob import glob overview_img_file=strcat('overview.',output_format) mean_bitrate=[] std_dev_bitrate=[] mean_delay=[] std_dev_delay=[] mean_jitter=[] std_dev_jitter=[] mean_packetloss=[] std_dev_packetloss=[] print('Starting work on:') print(file_names) for f in file_names: print('in loop, iterating through list of found files...') #current_file_name_with_ext=f #bare_file_name=strrep(current_file_name_with_ext,extension_loadfile,'') #temp_picture_file_name=strcat(bare_file_name,extension_imgfile) current_picture_file_name=strcat(f,'.jpg') matrix_to_process=load_octave_decoded_file_as_matrix(f) parsed_data=process_single_testfile(matrix_to_process,current_picture_file_name,output_format) mean_bitrate[ii]=mean(parsed_data) std_dev_bitrate[ii]=std(parsed_data) mean_delay[ii]=mean(parsed_data[:,2]) std_dev_delay[ii]=std(parsed_data[:,2]) mean_jitter[ii]=mean(parsed_data[:,3]) std_dev_jitter[ii]=std(parsed_data[:,3]) mean_packetloss[ii]=mean(parsed_data[:,4]) std_dev_packetloss[ii]=std(parsed_data[:,4]) bitrate_of_test = get_test_bitrate(crosstraffic) s_bitrate=min(bitrate_of_test) - bitrate_interval e_bitrate=max(bitrate_of_test) + bitrate_interval s_mean_bitrate=min(mean_bitrate) - max(std_dev_bitrate) e_mean_bitrate=max(mean_bitrate) + max(std_dev_bitrate) s_mean_jitter=min(mean_jitter) - max(std_dev_jitter) e_mean_jitter=max(mean_jitter) + max(std_dev_jitter) s_mean_delay=min(mean_delay) - max(std_dev_delay) e_mean_delay=max(mean_delay) + max(std_dev_delay) axis_bitrate=(cat(s_bitrate,e_bitrate,s_mean_bitrate,e_mean_bitrate)) axis_delay=(cat(s_bitrate,e_bitrate,sort(cat(round_(s_mean_delay) - 1,round_(e_mean_delay) + 1)))) axis_jitter=(cat(s_bitrate,e_bitrate,s_mean_jitter,e_mean_jitter)) print('\n\n\n*** START TESTDATA ***\n') print(bitrate_of_test) print(mean_bitrate) print(std_dev_bitrate) print('\n*** END TESTDATA ***\n\n\n') subplot(3,1,1) print(len(bitrate_of_test)) print(len(mean_bitrate)) print(len(std_dev_bitrate)) errorbar(bitrate_of_test,mean_bitrate,std_dev_bitrate,'kx') title('mean throughput with standard deviation') xlabel('test bitrate [Mbps]') ylabel('bitrate value [Mbps]') print(axis_bitrate) axis(axis_bitrate) grid('on') subplot(3,1,2) errorbar(bitrate_of_test,mean_delay,std_dev_delay,'kx') title('mean delay with standard deviation') xlabel('test bitrate [Mbps]') ylabel('delay value [ms]') axis(axis_delay) grid('on') subplot(3,1,3) errorbar(bitrate_of_test,mean_jitter,std_dev_jitter,'kx') title('mean jitter with standard deviation') xlabel('test bitrate [Mbps]') ylabel('jitter value [ms]') axis(axis_jitter) grid('on') aggregatedPicture=figure(1) set_(aggregatedPicture,'PaperUnits','centimeters') set_(aggregatedPicture,'PaperSize',cat(30,16)) set_(aggregatedPicture,'PaperPosition',cat(0,0,30,16)) set_(aggregatedPicture,'PaperOrientation','portrait') saveas(aggregatedPicture,overview_img_file,output_format) close(aggregatedPicture) clear('all') return def process_single_testfile(matrix,current_picture_file_name,output_format): t_start=matrix[1][5] * 3600 + matrix[1][6] * 60 + matrix[1][7] print (matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) t_conv=(matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) - t_start t_start_s=matrix[1][2] * 3600 + matrix[1][3] * 60 + matrix[1][4] t_conv_s=(matrix[:][2] * 3600 + matrix[:][3] * 60 + matrix[:][4]) - t_start_s jj=1 t_int=0 bitrate[jj]=0 delay[jj]=0 jitter[jj]=0 pktloss[jj]=0 for ii in arange(1,len(matrix)).reshape(-1): if (t_conv[ii] - t_int >= 1): jj=jj + 1 t_int=t_conv[ii] bitrate[jj]=matrix[ii][8] delay[jj]=t_conv[ii] - t_conv_s[ii] if (ii > 1): pktloss[jj]=matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=t_conv[ii] - t_conv[ii - 1] else: bitrate[jj]=bitrate[jj] + matrix[ii][8] delay[jj]=mean(cat(delay[jj],(t_conv[ii] - t_conv_s[ii]))) if (ii > 1): pktloss[jj]=pktloss[jj] + matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=mean(cat(jitter[jj],(t_conv[ii] - t_conv[ii - 1]))) bitrate=bitrate / 125000 return_matrix=matlabarray(cat(bitrate.T,delay.T,jitter.T,pktloss.T)) subplot(2,2,1) bitrate_u=copy(bitrate) plot(arange(0,jj - 2),bitrate_u[1:jj - 1],'-') title('Throughput') xlabel('time [s]') ylabel('[Mbps]') axis(cat(0,max(t_conv),0,round_(max(bitrate_u) * 1.125))) grid('on') subplot(2,2,2) plot(arange(0,len(delay) - 1),delay,'-') title('Delay') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(delay) - 1e-05,max(delay))) grid('on') subplot(2,2,3) plot(arange(0,len(jitter) - 1),jitter,'-') title('Jitter') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(jitter) - max(jitter) * 1.125,max(jitter) * 1.125)) grid('on') subplot(2,2,4) d=diff(t_conv) m=max(d) hist(d) title('Inter-departure time Distribution') xlabel('time [s]') ylabel('Empirical PDF') grid('on') firstPicture=figure(1) set_(firstPicture,'PaperUnits','centimeters') set_(firstPicture,'PaperSize',cat(22,18)) set_(firstPicture,'PaperPosition',cat(0,0,22,18)) set_(firstPicture,'PaperOrientation','portrait') saveas(firstPicture,current_picture_file_name,output_format) close(firstPicture) # if (strcmp(log_type,'udp_rcv')): # subplot(1,1,1) # packetloss_picture=figure(1) # set_(packetloss_picture,'PaperUnits','centimeters') # set_(packetloss_picture,'PaperSize',cat(12,10)) # set_(packetloss_picture,'PaperPosition',cat(0,0,12,10)) # set_(packetloss_picture,'PaperOrientation','portrait') # plot(arange(0,len(pktloss) - 1),pktloss,'-') # title('Packet loss') # xlabel('time [s]') # ylabel('[pps]') # axis(cat(sort(cat(0,max(t_conv))),sort(cat(round_(max(pktloss)) + 1,round_(min(pktloss)) - 1)))) # grid('on') # saveas(packetloss_picture,strcat('pl_',current_picture_file_name),output_format) # close(packetloss_picture) return return_matrix crosstraffic = False #process_complete_test_set(['/tmp/octave.dat'],'pdf',crosstraffic) process_single_testfile(load_octave_decoded_file_as_matrix('/tmp/octave.dat'),'pic.jpg',"jpg")
return arange(8,12,0.5)
conditional_block
matlab-to-python.py
# Autogenerated with SMOP version 0.23 # main.py ../../assessing-mininet/MATLAB/load_function.m ../../assessing-mininet/MATLAB/process_complete_test_set.m ../../assessing-mininet/MATLAB/process_single_testfile.m ../../assessing-mininet/MATLAB/ProcessAllLogsMain.m from __future__ import division from numpy import arange def strcat(*args): return ''.join(args) def load_octave_decoded_file_as_matrix(file_name): with open(file_name, 'r') as f: return [ map(float,line.strip().split(' ')) for line in f ] def get_test_bitrate(crosstraffic): if crosstraffic: return arange(4,6,0.25) else: return arange(8,12,0.5) def process_complete_test_set(file_names,output_format,crosstraffic): from glob import glob overview_img_file=strcat('overview.',output_format) mean_bitrate=[] std_dev_bitrate=[] mean_delay=[] std_dev_delay=[] mean_jitter=[] std_dev_jitter=[] mean_packetloss=[] std_dev_packetloss=[] print('Starting work on:') print(file_names) for f in file_names: print('in loop, iterating through list of found files...') #current_file_name_with_ext=f #bare_file_name=strrep(current_file_name_with_ext,extension_loadfile,'') #temp_picture_file_name=strcat(bare_file_name,extension_imgfile) current_picture_file_name=strcat(f,'.jpg') matrix_to_process=load_octave_decoded_file_as_matrix(f) parsed_data=process_single_testfile(matrix_to_process,current_picture_file_name,output_format) mean_bitrate[ii]=mean(parsed_data) std_dev_bitrate[ii]=std(parsed_data) mean_delay[ii]=mean(parsed_data[:,2]) std_dev_delay[ii]=std(parsed_data[:,2]) mean_jitter[ii]=mean(parsed_data[:,3]) std_dev_jitter[ii]=std(parsed_data[:,3]) mean_packetloss[ii]=mean(parsed_data[:,4]) std_dev_packetloss[ii]=std(parsed_data[:,4]) bitrate_of_test = get_test_bitrate(crosstraffic) s_bitrate=min(bitrate_of_test) - bitrate_interval e_bitrate=max(bitrate_of_test) + bitrate_interval s_mean_bitrate=min(mean_bitrate) - max(std_dev_bitrate) e_mean_bitrate=max(mean_bitrate) + max(std_dev_bitrate) s_mean_jitter=min(mean_jitter) - max(std_dev_jitter) e_mean_jitter=max(mean_jitter) + max(std_dev_jitter) s_mean_delay=min(mean_delay) - max(std_dev_delay) e_mean_delay=max(mean_delay) + max(std_dev_delay) axis_bitrate=(cat(s_bitrate,e_bitrate,s_mean_bitrate,e_mean_bitrate)) axis_delay=(cat(s_bitrate,e_bitrate,sort(cat(round_(s_mean_delay) - 1,round_(e_mean_delay) + 1)))) axis_jitter=(cat(s_bitrate,e_bitrate,s_mean_jitter,e_mean_jitter)) print('\n\n\n*** START TESTDATA ***\n') print(bitrate_of_test) print(mean_bitrate) print(std_dev_bitrate) print('\n*** END TESTDATA ***\n\n\n') subplot(3,1,1) print(len(bitrate_of_test)) print(len(mean_bitrate)) print(len(std_dev_bitrate)) errorbar(bitrate_of_test,mean_bitrate,std_dev_bitrate,'kx') title('mean throughput with standard deviation') xlabel('test bitrate [Mbps]') ylabel('bitrate value [Mbps]') print(axis_bitrate) axis(axis_bitrate) grid('on') subplot(3,1,2) errorbar(bitrate_of_test,mean_delay,std_dev_delay,'kx') title('mean delay with standard deviation') xlabel('test bitrate [Mbps]') ylabel('delay value [ms]') axis(axis_delay) grid('on') subplot(3,1,3) errorbar(bitrate_of_test,mean_jitter,std_dev_jitter,'kx') title('mean jitter with standard deviation') xlabel('test bitrate [Mbps]') ylabel('jitter value [ms]') axis(axis_jitter) grid('on') aggregatedPicture=figure(1) set_(aggregatedPicture,'PaperUnits','centimeters') set_(aggregatedPicture,'PaperSize',cat(30,16)) set_(aggregatedPicture,'PaperPosition',cat(0,0,30,16)) set_(aggregatedPicture,'PaperOrientation','portrait') saveas(aggregatedPicture,overview_img_file,output_format) close(aggregatedPicture) clear('all') return def
(matrix,current_picture_file_name,output_format): t_start=matrix[1][5] * 3600 + matrix[1][6] * 60 + matrix[1][7] print (matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) t_conv=(matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) - t_start t_start_s=matrix[1][2] * 3600 + matrix[1][3] * 60 + matrix[1][4] t_conv_s=(matrix[:][2] * 3600 + matrix[:][3] * 60 + matrix[:][4]) - t_start_s jj=1 t_int=0 bitrate[jj]=0 delay[jj]=0 jitter[jj]=0 pktloss[jj]=0 for ii in arange(1,len(matrix)).reshape(-1): if (t_conv[ii] - t_int >= 1): jj=jj + 1 t_int=t_conv[ii] bitrate[jj]=matrix[ii][8] delay[jj]=t_conv[ii] - t_conv_s[ii] if (ii > 1): pktloss[jj]=matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=t_conv[ii] - t_conv[ii - 1] else: bitrate[jj]=bitrate[jj] + matrix[ii][8] delay[jj]=mean(cat(delay[jj],(t_conv[ii] - t_conv_s[ii]))) if (ii > 1): pktloss[jj]=pktloss[jj] + matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=mean(cat(jitter[jj],(t_conv[ii] - t_conv[ii - 1]))) bitrate=bitrate / 125000 return_matrix=matlabarray(cat(bitrate.T,delay.T,jitter.T,pktloss.T)) subplot(2,2,1) bitrate_u=copy(bitrate) plot(arange(0,jj - 2),bitrate_u[1:jj - 1],'-') title('Throughput') xlabel('time [s]') ylabel('[Mbps]') axis(cat(0,max(t_conv),0,round_(max(bitrate_u) * 1.125))) grid('on') subplot(2,2,2) plot(arange(0,len(delay) - 1),delay,'-') title('Delay') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(delay) - 1e-05,max(delay))) grid('on') subplot(2,2,3) plot(arange(0,len(jitter) - 1),jitter,'-') title('Jitter') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(jitter) - max(jitter) * 1.125,max(jitter) * 1.125)) grid('on') subplot(2,2,4) d=diff(t_conv) m=max(d) hist(d) title('Inter-departure time Distribution') xlabel('time [s]') ylabel('Empirical PDF') grid('on') firstPicture=figure(1) set_(firstPicture,'PaperUnits','centimeters') set_(firstPicture,'PaperSize',cat(22,18)) set_(firstPicture,'PaperPosition',cat(0,0,22,18)) set_(firstPicture,'PaperOrientation','portrait') saveas(firstPicture,current_picture_file_name,output_format) close(firstPicture) # if (strcmp(log_type,'udp_rcv')): # subplot(1,1,1) # packetloss_picture=figure(1) # set_(packetloss_picture,'PaperUnits','centimeters') # set_(packetloss_picture,'PaperSize',cat(12,10)) # set_(packetloss_picture,'PaperPosition',cat(0,0,12,10)) # set_(packetloss_picture,'PaperOrientation','portrait') # plot(arange(0,len(pktloss) - 1),pktloss,'-') # title('Packet loss') # xlabel('time [s]') # ylabel('[pps]') # axis(cat(sort(cat(0,max(t_conv))),sort(cat(round_(max(pktloss)) + 1,round_(min(pktloss)) - 1)))) # grid('on') # saveas(packetloss_picture,strcat('pl_',current_picture_file_name),output_format) # close(packetloss_picture) return return_matrix crosstraffic = False #process_complete_test_set(['/tmp/octave.dat'],'pdf',crosstraffic) process_single_testfile(load_octave_decoded_file_as_matrix('/tmp/octave.dat'),'pic.jpg',"jpg")
process_single_testfile
identifier_name
ThePrinceWhoCameTooLate.js
const PlotCard = require('../../plotcard'); const GameActions = require('../../GameActions'); class ThePrinceWhoCameTooLate extends PlotCard {
(ability) { this.action({ title: 'Search your deck', phase: 'standing', handler: context => { this.game.resolveGameAction( GameActions.search({ title: 'Select a character', match: { type: 'character' }, message: '{player} uses {source} to search their deck and put {searchTarget} into play', cancelMessage: '{player} uses {source} to search their deck but does not find a card', gameAction: GameActions.putIntoPlay(context => ({ player: context.player, card: context.searchTarget })) }), context ); }, limit: ability.limit.perRound(1) }); } } ThePrinceWhoCameTooLate.code = '15052'; module.exports = ThePrinceWhoCameTooLate;
setupCardAbilities
identifier_name
ThePrinceWhoCameTooLate.js
const PlotCard = require('../../plotcard'); const GameActions = require('../../GameActions'); class ThePrinceWhoCameTooLate extends PlotCard { setupCardAbilities(ability) { this.action({ title: 'Search your deck', phase: 'standing', handler: context => { this.game.resolveGameAction( GameActions.search({ title: 'Select a character', match: { type: 'character' }, message: '{player} uses {source} to search their deck and put {searchTarget} into play', cancelMessage: '{player} uses {source} to search their deck but does not find a card', gameAction: GameActions.putIntoPlay(context => ({ player: context.player, card: context.searchTarget })) }), context );
} ThePrinceWhoCameTooLate.code = '15052'; module.exports = ThePrinceWhoCameTooLate;
}, limit: ability.limit.perRound(1) }); }
random_line_split
client.js
/** * React Starter Kit (https://www.reactstarterkit.com/) * * Copyright © 2014-2016 Kriasoft, LLC. All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE.txt file in the root directory of this source tree. */ import 'babel-polyfill'; import ReactDOM from 'react-dom'; import React from 'react'; import FastClick from 'fastclick'; import Router from './routes'; import Location from './core/Location'; import { addEventListener, removeEventListener } from './core/DOMUtils'; import { ApolloClient, createNetworkInterface } from 'react-apollo'; function getCookie(name) { let value = "; " + document.cookie; let parts = value.split("; " + name + "="); if (parts.length == 2) return parts.pop().split(";").shift(); } const networkInterface = createNetworkInterface('/graphql', { credentials: 'same-origin', uri: '/graphql', headers: { Cookie: getCookie("id_token") } }); const client = new ApolloClient({ connectToDevTools: true, networkInterface: networkInterface, }); let cssContainer = document.getElementById('css'); const appContainer = document.getElementById('app'); const context = { insertCss: styles => styles._insertCss(), onSetTitle: value => (document.title = value), onSetMeta: (name, content) => { // Remove and create a new <meta /> tag in order to make it work // with bookmarks in Safari const elements = document.getElementsByTagName('meta'); Array.from(elements).forEach((element) => { if (element.getAttribute('name') === name) { element.parentNode.removeChild(element); } }); const meta = document.createElement('meta'); meta.setAttribute('name', name); meta.setAttribute('content', content); document .getElementsByTagName('head')[0] .appendChild(meta); }, client }; // Google Analytics tracking. Don't send 'pageview' event after the first // rendering, as it was already sent by the Html component. let trackPageview = () => (trackPageview = () => window.ga('send', 'pageview')); function render(state) {
function run() { let currentLocation = null; let currentState = null; // Make taps on links and buttons work fast on mobiles FastClick.attach(document.body); // Re-render the app when window.location changes const unlisten = Location.listen(location => { currentLocation = location; currentState = Object.assign({}, location.state, { path: location.pathname, query: location.query, state: location.state, context, }); render(currentState); }); // Save the page scroll position into the current location's state const supportPageOffset = window.pageXOffset !== undefined; const isCSS1Compat = ((document.compatMode || '') === 'CSS1Compat'); const setPageOffset = () => { currentLocation.state = currentLocation.state || Object.create(null); if (supportPageOffset) { currentLocation.state.scrollX = window.pageXOffset; currentLocation.state.scrollY = window.pageYOffset; } else { currentLocation.state.scrollX = isCSS1Compat ? document.documentElement.scrollLeft : document.body.scrollLeft; currentLocation.state.scrollY = isCSS1Compat ? document.documentElement.scrollTop : document.body.scrollTop; } }; addEventListener(window, 'scroll', setPageOffset); addEventListener(window, 'pagehide', () => { removeEventListener(window, 'scroll', setPageOffset); unlisten(); }); } // Run the application when both DOM is ready and page content is loaded if (['complete', 'loaded', 'interactive'].includes(document.readyState) && document.body) { run(); } else { document.addEventListener('DOMContentLoaded', run, false); }
Router.dispatch(state, (newState, component) => { ReactDOM.render( component, appContainer, () => { // Restore the scroll position if it was saved into the state if (state.scrollY !== undefined) { window.scrollTo(state.scrollX, state.scrollY); } else { window.scrollTo(0, 0); } trackPageview(); // Remove the pre-rendered CSS because it's no longer used // after the React app is launched if (cssContainer) { cssContainer.parentNode.removeChild(cssContainer); cssContainer = null; } }); }); }
identifier_body
client.js
/** * React Starter Kit (https://www.reactstarterkit.com/) * * Copyright © 2014-2016 Kriasoft, LLC. All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE.txt file in the root directory of this source tree. */ import 'babel-polyfill'; import ReactDOM from 'react-dom'; import React from 'react'; import FastClick from 'fastclick'; import Router from './routes'; import Location from './core/Location'; import { addEventListener, removeEventListener } from './core/DOMUtils'; import { ApolloClient, createNetworkInterface } from 'react-apollo'; function getCookie(name) { let value = "; " + document.cookie; let parts = value.split("; " + name + "="); if (parts.length == 2) return parts.pop().split(";").shift(); } const networkInterface = createNetworkInterface('/graphql', { credentials: 'same-origin', uri: '/graphql', headers: { Cookie: getCookie("id_token") } }); const client = new ApolloClient({ connectToDevTools: true, networkInterface: networkInterface, }); let cssContainer = document.getElementById('css'); const appContainer = document.getElementById('app'); const context = { insertCss: styles => styles._insertCss(), onSetTitle: value => (document.title = value), onSetMeta: (name, content) => { // Remove and create a new <meta /> tag in order to make it work // with bookmarks in Safari const elements = document.getElementsByTagName('meta'); Array.from(elements).forEach((element) => { if (element.getAttribute('name') === name) { element.parentNode.removeChild(element); } }); const meta = document.createElement('meta'); meta.setAttribute('name', name); meta.setAttribute('content', content); document .getElementsByTagName('head')[0] .appendChild(meta); }, client }; // Google Analytics tracking. Don't send 'pageview' event after the first // rendering, as it was already sent by the Html component. let trackPageview = () => (trackPageview = () => window.ga('send', 'pageview')); function render(state) { Router.dispatch(state, (newState, component) => { ReactDOM.render( component, appContainer, () => { // Restore the scroll position if it was saved into the state if (state.scrollY !== undefined) { window.scrollTo(state.scrollX, state.scrollY); } else {
trackPageview(); // Remove the pre-rendered CSS because it's no longer used // after the React app is launched if (cssContainer) { cssContainer.parentNode.removeChild(cssContainer); cssContainer = null; } }); }); } function run() { let currentLocation = null; let currentState = null; // Make taps on links and buttons work fast on mobiles FastClick.attach(document.body); // Re-render the app when window.location changes const unlisten = Location.listen(location => { currentLocation = location; currentState = Object.assign({}, location.state, { path: location.pathname, query: location.query, state: location.state, context, }); render(currentState); }); // Save the page scroll position into the current location's state const supportPageOffset = window.pageXOffset !== undefined; const isCSS1Compat = ((document.compatMode || '') === 'CSS1Compat'); const setPageOffset = () => { currentLocation.state = currentLocation.state || Object.create(null); if (supportPageOffset) { currentLocation.state.scrollX = window.pageXOffset; currentLocation.state.scrollY = window.pageYOffset; } else { currentLocation.state.scrollX = isCSS1Compat ? document.documentElement.scrollLeft : document.body.scrollLeft; currentLocation.state.scrollY = isCSS1Compat ? document.documentElement.scrollTop : document.body.scrollTop; } }; addEventListener(window, 'scroll', setPageOffset); addEventListener(window, 'pagehide', () => { removeEventListener(window, 'scroll', setPageOffset); unlisten(); }); } // Run the application when both DOM is ready and page content is loaded if (['complete', 'loaded', 'interactive'].includes(document.readyState) && document.body) { run(); } else { document.addEventListener('DOMContentLoaded', run, false); }
window.scrollTo(0, 0); }
conditional_block
client.js
/** * React Starter Kit (https://www.reactstarterkit.com/) * * Copyright © 2014-2016 Kriasoft, LLC. All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE.txt file in the root directory of this source tree. */ import 'babel-polyfill'; import ReactDOM from 'react-dom'; import React from 'react'; import FastClick from 'fastclick'; import Router from './routes'; import Location from './core/Location'; import { addEventListener, removeEventListener } from './core/DOMUtils'; import { ApolloClient, createNetworkInterface } from 'react-apollo'; function getCookie(name) { let value = "; " + document.cookie; let parts = value.split("; " + name + "="); if (parts.length == 2) return parts.pop().split(";").shift(); } const networkInterface = createNetworkInterface('/graphql', { credentials: 'same-origin', uri: '/graphql', headers: { Cookie: getCookie("id_token") } }); const client = new ApolloClient({ connectToDevTools: true, networkInterface: networkInterface, }); let cssContainer = document.getElementById('css'); const appContainer = document.getElementById('app'); const context = { insertCss: styles => styles._insertCss(), onSetTitle: value => (document.title = value), onSetMeta: (name, content) => { // Remove and create a new <meta /> tag in order to make it work // with bookmarks in Safari const elements = document.getElementsByTagName('meta'); Array.from(elements).forEach((element) => { if (element.getAttribute('name') === name) { element.parentNode.removeChild(element); } }); const meta = document.createElement('meta'); meta.setAttribute('name', name); meta.setAttribute('content', content); document .getElementsByTagName('head')[0] .appendChild(meta); }, client }; // Google Analytics tracking. Don't send 'pageview' event after the first // rendering, as it was already sent by the Html component. let trackPageview = () => (trackPageview = () => window.ga('send', 'pageview')); function render(state) { Router.dispatch(state, (newState, component) => { ReactDOM.render( component, appContainer, () => { // Restore the scroll position if it was saved into the state if (state.scrollY !== undefined) { window.scrollTo(state.scrollX, state.scrollY); } else { window.scrollTo(0, 0); } trackPageview(); // Remove the pre-rendered CSS because it's no longer used // after the React app is launched if (cssContainer) { cssContainer.parentNode.removeChild(cssContainer); cssContainer = null; } }); }); } function r
) { let currentLocation = null; let currentState = null; // Make taps on links and buttons work fast on mobiles FastClick.attach(document.body); // Re-render the app when window.location changes const unlisten = Location.listen(location => { currentLocation = location; currentState = Object.assign({}, location.state, { path: location.pathname, query: location.query, state: location.state, context, }); render(currentState); }); // Save the page scroll position into the current location's state const supportPageOffset = window.pageXOffset !== undefined; const isCSS1Compat = ((document.compatMode || '') === 'CSS1Compat'); const setPageOffset = () => { currentLocation.state = currentLocation.state || Object.create(null); if (supportPageOffset) { currentLocation.state.scrollX = window.pageXOffset; currentLocation.state.scrollY = window.pageYOffset; } else { currentLocation.state.scrollX = isCSS1Compat ? document.documentElement.scrollLeft : document.body.scrollLeft; currentLocation.state.scrollY = isCSS1Compat ? document.documentElement.scrollTop : document.body.scrollTop; } }; addEventListener(window, 'scroll', setPageOffset); addEventListener(window, 'pagehide', () => { removeEventListener(window, 'scroll', setPageOffset); unlisten(); }); } // Run the application when both DOM is ready and page content is loaded if (['complete', 'loaded', 'interactive'].includes(document.readyState) && document.body) { run(); } else { document.addEventListener('DOMContentLoaded', run, false); }
un(
identifier_name
client.js
/** * React Starter Kit (https://www.reactstarterkit.com/) * * Copyright © 2014-2016 Kriasoft, LLC. All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE.txt file in the root directory of this source tree. */ import 'babel-polyfill'; import ReactDOM from 'react-dom'; import React from 'react'; import FastClick from 'fastclick'; import Router from './routes'; import Location from './core/Location'; import { addEventListener, removeEventListener } from './core/DOMUtils'; import { ApolloClient, createNetworkInterface } from 'react-apollo'; function getCookie(name) { let value = "; " + document.cookie; let parts = value.split("; " + name + "="); if (parts.length == 2) return parts.pop().split(";").shift(); } const networkInterface = createNetworkInterface('/graphql', { credentials: 'same-origin', uri: '/graphql', headers: { Cookie: getCookie("id_token")
const client = new ApolloClient({ connectToDevTools: true, networkInterface: networkInterface, }); let cssContainer = document.getElementById('css'); const appContainer = document.getElementById('app'); const context = { insertCss: styles => styles._insertCss(), onSetTitle: value => (document.title = value), onSetMeta: (name, content) => { // Remove and create a new <meta /> tag in order to make it work // with bookmarks in Safari const elements = document.getElementsByTagName('meta'); Array.from(elements).forEach((element) => { if (element.getAttribute('name') === name) { element.parentNode.removeChild(element); } }); const meta = document.createElement('meta'); meta.setAttribute('name', name); meta.setAttribute('content', content); document .getElementsByTagName('head')[0] .appendChild(meta); }, client }; // Google Analytics tracking. Don't send 'pageview' event after the first // rendering, as it was already sent by the Html component. let trackPageview = () => (trackPageview = () => window.ga('send', 'pageview')); function render(state) { Router.dispatch(state, (newState, component) => { ReactDOM.render( component, appContainer, () => { // Restore the scroll position if it was saved into the state if (state.scrollY !== undefined) { window.scrollTo(state.scrollX, state.scrollY); } else { window.scrollTo(0, 0); } trackPageview(); // Remove the pre-rendered CSS because it's no longer used // after the React app is launched if (cssContainer) { cssContainer.parentNode.removeChild(cssContainer); cssContainer = null; } }); }); } function run() { let currentLocation = null; let currentState = null; // Make taps on links and buttons work fast on mobiles FastClick.attach(document.body); // Re-render the app when window.location changes const unlisten = Location.listen(location => { currentLocation = location; currentState = Object.assign({}, location.state, { path: location.pathname, query: location.query, state: location.state, context, }); render(currentState); }); // Save the page scroll position into the current location's state const supportPageOffset = window.pageXOffset !== undefined; const isCSS1Compat = ((document.compatMode || '') === 'CSS1Compat'); const setPageOffset = () => { currentLocation.state = currentLocation.state || Object.create(null); if (supportPageOffset) { currentLocation.state.scrollX = window.pageXOffset; currentLocation.state.scrollY = window.pageYOffset; } else { currentLocation.state.scrollX = isCSS1Compat ? document.documentElement.scrollLeft : document.body.scrollLeft; currentLocation.state.scrollY = isCSS1Compat ? document.documentElement.scrollTop : document.body.scrollTop; } }; addEventListener(window, 'scroll', setPageOffset); addEventListener(window, 'pagehide', () => { removeEventListener(window, 'scroll', setPageOffset); unlisten(); }); } // Run the application when both DOM is ready and page content is loaded if (['complete', 'loaded', 'interactive'].includes(document.readyState) && document.body) { run(); } else { document.addEventListener('DOMContentLoaded', run, false); }
} });
random_line_split
efectos.js
$.fn.dropdown = function(options) { var defaults = { hideSpeed: 50, showSpeed: 350, parentBGHoverColor: false, parentTextHoverColor: false, zIndex: 5000 }; var settings = $.extend({}, defaults, options); var self = {}; self.show = function(menu){ if($(menu).children("li > ul").css("display") == "none"){ $(menu).attr("showing", "1"); if(settings.parentBGHoverColor){ if(!$(menu).attr("dropdownParentOriginalBGColor")){
$(menu).css("background-color") ); } $(menu).css("background-color", settings.parentBGHoverColor); } if(settings.parentTextHoverColor){ if(!$(menu).attr("dropdownParentOriginalTextColor")){ $(menu).attr("dropdownParentOriginalTextColor", $(menu).children("span").children("a").css("color")); } $(menu).children("span").children("a").css( "color", settings.parentTextHoverColor ); } $(menu).children("ul").css("position", "absolute") .css("left", $(menu).css("left")) .css("z-index", settings.zIndex) ; $(menu).children("ul").slideDown(settings.showSpeed, function(){ $(this).parent().attr("showing", "0"); }); } } self.hide = function(menu){ $(menu).children("ul").slideUp(settings.hideSpeed, function(){ if(settings.parentBGHoverColor){ $(this).parent().css( "background-color", $(this).parent().attr("dropdownParentOriginalBGColor") ); } if(settings.parentTextHoverColor){ $(this).parent().children("span") .children("a") .css( "color", $(this).parent().attr("dropdownParentOriginalTextColor") ); } }); } this.each(function() { $(this).find("li > ul").css("display", "none"); $(this).find("li > ul").parent().hover( function(){ self.show(this); }, function(){ self.hide(this); } ); $(this).find("li > ul").parent().bind( 'touchstart touchend', function(event) { if($(this).children("li > ul").css("display") == "none") { self.show(this); } else { self.hide(this); } event.preventDefault(); }); }); return this; }; $(document).ready(function(){ $("#header .bottom ul").dropdown({ showSpeed: 230, hideSpeed: 230 }); $("#mobile-menu .menu a").click(function(){ $("#mobile-main-menu").slideToggle(500); }); $(window).resize(function(){ if($(window).width() > 480){ $("#mobile-main-menu").hide(); } }); $('*').bind('touchstart touchend', function(e) { var that = this; this.onclick = function() { that.onhover.call(that); }; }); $('#pre-header .search form, #pre-header .search form input[type="image"]').click(function(event){ var text=$('#pre-header .search form input[type="text"]'); if(text.css('display') == "none"){ text.css('width', '0px'); text.css("display", 'inline-block'); text.animate({width: '200px'}, 350); event.preventDefault(); } text.focus(); }); });
$(menu).attr( "dropdownParentOriginalBGColor",
random_line_split
efectos.js
$.fn.dropdown = function(options) { var defaults = { hideSpeed: 50, showSpeed: 350, parentBGHoverColor: false, parentTextHoverColor: false, zIndex: 5000 }; var settings = $.extend({}, defaults, options); var self = {}; self.show = function(menu){ if($(menu).children("li > ul").css("display") == "none"){ $(menu).attr("showing", "1"); if(settings.parentBGHoverColor){ if(!$(menu).attr("dropdownParentOriginalBGColor")){ $(menu).attr( "dropdownParentOriginalBGColor", $(menu).css("background-color") ); } $(menu).css("background-color", settings.parentBGHoverColor); } if(settings.parentTextHoverColor){ if(!$(menu).attr("dropdownParentOriginalTextColor")){ $(menu).attr("dropdownParentOriginalTextColor", $(menu).children("span").children("a").css("color")); } $(menu).children("span").children("a").css( "color", settings.parentTextHoverColor ); } $(menu).children("ul").css("position", "absolute") .css("left", $(menu).css("left")) .css("z-index", settings.zIndex) ; $(menu).children("ul").slideDown(settings.showSpeed, function(){ $(this).parent().attr("showing", "0"); }); } } self.hide = function(menu){ $(menu).children("ul").slideUp(settings.hideSpeed, function(){ if(settings.parentBGHoverColor){ $(this).parent().css( "background-color", $(this).parent().attr("dropdownParentOriginalBGColor") ); } if(settings.parentTextHoverColor){ $(this).parent().children("span") .children("a") .css( "color", $(this).parent().attr("dropdownParentOriginalTextColor") ); } }); } this.each(function() { $(this).find("li > ul").css("display", "none"); $(this).find("li > ul").parent().hover( function(){ self.show(this); }, function(){ self.hide(this); } ); $(this).find("li > ul").parent().bind( 'touchstart touchend', function(event) { if($(this).children("li > ul").css("display") == "none")
else { self.hide(this); } event.preventDefault(); }); }); return this; }; $(document).ready(function(){ $("#header .bottom ul").dropdown({ showSpeed: 230, hideSpeed: 230 }); $("#mobile-menu .menu a").click(function(){ $("#mobile-main-menu").slideToggle(500); }); $(window).resize(function(){ if($(window).width() > 480){ $("#mobile-main-menu").hide(); } }); $('*').bind('touchstart touchend', function(e) { var that = this; this.onclick = function() { that.onhover.call(that); }; }); $('#pre-header .search form, #pre-header .search form input[type="image"]').click(function(event){ var text=$('#pre-header .search form input[type="text"]'); if(text.css('display') == "none"){ text.css('width', '0px'); text.css("display", 'inline-block'); text.animate({width: '200px'}, 350); event.preventDefault(); } text.focus(); }); });
{ self.show(this); }
conditional_block
sdputils.js
/* * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. */ /* More information about these options at jshint.com/docs/options */ /* globals displayError, params */ /* exported addCodecParam, iceCandidateType, maybePreferAudioReceiveCodec, maybePreferAudioSendCodec, maybeSetAudioReceiveBitRate, maybeSetAudioSendBitRate, maybePreferVideoReceiveCodec, maybePreferVideoSendCodec, maybeSetVideoReceiveBitRate, maybeSetVideoSendBitRate, maybeSetVideoSendInitialBitRate, mergeConstraints */ 'use strict'; function mergeConstraints(cons1, cons2) { var merged = cons1; for (var name in cons2.mandatory) { merged.mandatory[name] = cons2.mandatory[name]; } merged.optional = merged.optional.concat(cons2.optional); return merged; } function iceCandidateType(candidateStr) { return candidateStr.split(' ')[7]; } function maybeSetAudioSendBitRate(sdp) { if (!params.audioSendBitrate) { return sdp; } trace('Prefer audio send bitrate: ' + params.audioSendBitrate); return preferBitRate(sdp, params.audioSendBitrate, 'audio'); } function maybeSetAudioReceiveBitRate(sdp) { if (!params.audioRecvBitrate) { return sdp; } trace('Prefer audio receive bitrate: ' + params.audioRecvBitrate); return preferBitRate(sdp, params.audioRecvBitrate, 'audio'); } function maybeSetVideoSendBitRate(sdp) { if (!params.videoSendBitrate) { return sdp; } trace('Prefer video send bitrate: ' + params.videoSendBitrate); return preferBitRate(sdp, params.videoSendBitrate, 'video'); } function maybeSetVideoReceiveBitRate(sdp) { if (!params.videoRecvBitrate) { return sdp; } trace('Prefer video receive bitrate: ' + params.videoRecvBitrate); return preferBitRate(sdp, params.videoRecvBitrate, 'video'); } // Add a b=AS:bitrate line to the m=mediaType section. function preferBitRate(sdp, bitrate, mediaType) { var sdpLines = sdp.split('\r\n'); // Find m line for the given mediaType. var mLineIndex = findLine(sdpLines, 'm=', mediaType); if (mLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no m-line found'); return sdp; } // Find next m-line if any. var nextMLineIndex = findLineInRange(sdpLines, mLineIndex + 1, -1, 'm='); if (nextMLineIndex === null) { nextMLineIndex = sdpLines.length; } // Find c-line corresponding to the m-line. var cLineIndex = findLineInRange(sdpLines, mLineIndex + 1, nextMLineIndex, 'c='); if (cLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no c-line found'); return sdp; } // Check if bandwidth line already exists between c-line and next m-line. var bLineIndex = findLineInRange(sdpLines, cLineIndex + 1, nextMLineIndex, 'b=AS'); if (bLineIndex)
// Create the b (bandwidth) sdp line. var bwLine = 'b=AS:' + bitrate; // As per RFC 4566, the b line should follow after c-line. sdpLines.splice(cLineIndex + 1, 0, bwLine); sdp = sdpLines.join('\r\n'); return sdp; } // Add an a=fmtp: x-google-min-bitrate=kbps line, if videoSendInitialBitrate // is specified. We'll also add a x-google-min-bitrate value, since the max // must be >= the min. function maybeSetVideoSendInitialBitRate(sdp) { var initialBitrate = params.videoSendInitialBitrate; if (!initialBitrate) { return sdp; } // Validate the initial bitrate value. var maxBitrate = initialBitrate; var bitrate = params.videoSendBitrate; if (bitrate) { if (initialBitrate > bitrate) { displayError('Clamping initial bitrate to max bitrate of ' + bitrate + ' kbps.'); initialBitrate = bitrate; params.videoSendInitialBitrate = initialBitrate; } maxBitrate = bitrate; } var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', 'video'); if (mLineIndex === null) { displayError('Failed to find video m-line'); return sdp; } var vp8RtpmapIndex = findLine(sdpLines, 'a=rtpmap', 'VP8/90000'); var vp8Payload = getCodecPayloadType(sdpLines[vp8RtpmapIndex]); var vp8Fmtp = 'a=fmtp:' + vp8Payload + ' x-google-min-bitrate=' + params.videoSendInitialBitrate.toString() + '; x-google-max-bitrate=' + maxBitrate.toString(); sdpLines.splice(vp8RtpmapIndex + 1, 0, vp8Fmtp); return sdpLines.join('\r\n'); } // Promotes |audioSendCodec| to be the first in the m=audio line, if set. function maybePreferAudioSendCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'send', params.audioSendCodec); } // Promotes |audioRecvCodec| to be the first in the m=audio line, if set. function maybePreferAudioReceiveCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'receive', params.audioRecvCodec); } // Promotes |videoSendCodec| to be the first in the m=audio line, if set. function maybePreferVideoSendCodec(sdp) { return maybePreferCodec(sdp, 'video', 'send', params.videoSendCodec); } // Promotes |videoRecvCodec| to be the first in the m=audio line, if set. function maybePreferVideoReceiveCodec(sdp) { return maybePreferCodec(sdp, 'video', 'receive', params.videoRecvCodec); } // Sets |codec| as the default |type| codec if it's present. // The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'. function maybePreferCodec(sdp, type, dir, codec) { var str = type + ' ' + dir + ' codec'; if (codec === '') { trace('No preference on ' + str + '.'); return sdp; } trace('Prefer ' + str + ': ' + codec); var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', type); if (mLineIndex === null) { return sdp; } // If the codec is available, set it as the default in m line. var codecIndex = findLine(sdpLines, 'a=rtpmap', codec); if (codecIndex) { var payload = getCodecPayloadType(sdpLines[codecIndex]); if (payload) { sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload); } } sdp = sdpLines.join('\r\n'); return sdp; } // Add fmtp param to specified codec in SDP. function addCodecParam(sdp, codec, param) { var sdpLines = sdp.split('\r\n'); // Find opus payload. var index = findLine(sdpLines, 'a=rtpmap', codec); var payload; if (index) { payload = getCodecPayloadType(sdpLines[index]); } // Find the payload in fmtp line. var fmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + payload.toString()); if (fmtpLineIndex === null) { return sdp; } sdpLines[fmtpLineIndex] = sdpLines[fmtpLineIndex].concat('; ', param); sdp = sdpLines.join('\r\n'); return sdp; } // Find the line in sdpLines that starts with |prefix|, and, if specified, // contains |substr| (case-insensitive search). function findLine(sdpLines, prefix, substr) { return findLineInRange(sdpLines, 0, -1, prefix, substr); } // Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix| // and, if specified, contains |substr| (case-insensitive search). function findLineInRange(sdpLines, startLine, endLine, prefix, substr) { var realEndLine = endLine !== -1 ? endLine : sdpLines.length; for (var i = startLine; i < realEndLine; ++i) { if (sdpLines[i].indexOf(prefix) === 0) { if (!substr || sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) { return i; } } } return null; } // Gets the codec payload type from an a=rtpmap:X line. function getCodecPayloadType(sdpLine) { var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+'); var result = sdpLine.match(pattern); return (result && result.length === 2) ? result[1] : null; } // Returns a new m= line with the specified codec as the first one. function setDefaultCodec(mLine, payload) { var elements = mLine.split(' '); var newLine = []; var index = 0; for (var i = 0; i < elements.length; i++) { if (index === 3) { // Format of media starts from the fourth. newLine[index++] = payload; // Put target payload to the first. } if (elements[i] !== payload) { newLine[index++] = elements[i]; } } return newLine.join(' '); }
{ sdpLines.splice(bLineIndex, 1); }
conditional_block
sdputils.js
/* * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. */ /* More information about these options at jshint.com/docs/options */ /* globals displayError, params */ /* exported addCodecParam, iceCandidateType, maybePreferAudioReceiveCodec, maybePreferAudioSendCodec, maybeSetAudioReceiveBitRate, maybeSetAudioSendBitRate, maybePreferVideoReceiveCodec, maybePreferVideoSendCodec, maybeSetVideoReceiveBitRate, maybeSetVideoSendBitRate, maybeSetVideoSendInitialBitRate, mergeConstraints */ 'use strict'; function mergeConstraints(cons1, cons2)
function iceCandidateType(candidateStr) { return candidateStr.split(' ')[7]; } function maybeSetAudioSendBitRate(sdp) { if (!params.audioSendBitrate) { return sdp; } trace('Prefer audio send bitrate: ' + params.audioSendBitrate); return preferBitRate(sdp, params.audioSendBitrate, 'audio'); } function maybeSetAudioReceiveBitRate(sdp) { if (!params.audioRecvBitrate) { return sdp; } trace('Prefer audio receive bitrate: ' + params.audioRecvBitrate); return preferBitRate(sdp, params.audioRecvBitrate, 'audio'); } function maybeSetVideoSendBitRate(sdp) { if (!params.videoSendBitrate) { return sdp; } trace('Prefer video send bitrate: ' + params.videoSendBitrate); return preferBitRate(sdp, params.videoSendBitrate, 'video'); } function maybeSetVideoReceiveBitRate(sdp) { if (!params.videoRecvBitrate) { return sdp; } trace('Prefer video receive bitrate: ' + params.videoRecvBitrate); return preferBitRate(sdp, params.videoRecvBitrate, 'video'); } // Add a b=AS:bitrate line to the m=mediaType section. function preferBitRate(sdp, bitrate, mediaType) { var sdpLines = sdp.split('\r\n'); // Find m line for the given mediaType. var mLineIndex = findLine(sdpLines, 'm=', mediaType); if (mLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no m-line found'); return sdp; } // Find next m-line if any. var nextMLineIndex = findLineInRange(sdpLines, mLineIndex + 1, -1, 'm='); if (nextMLineIndex === null) { nextMLineIndex = sdpLines.length; } // Find c-line corresponding to the m-line. var cLineIndex = findLineInRange(sdpLines, mLineIndex + 1, nextMLineIndex, 'c='); if (cLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no c-line found'); return sdp; } // Check if bandwidth line already exists between c-line and next m-line. var bLineIndex = findLineInRange(sdpLines, cLineIndex + 1, nextMLineIndex, 'b=AS'); if (bLineIndex) { sdpLines.splice(bLineIndex, 1); } // Create the b (bandwidth) sdp line. var bwLine = 'b=AS:' + bitrate; // As per RFC 4566, the b line should follow after c-line. sdpLines.splice(cLineIndex + 1, 0, bwLine); sdp = sdpLines.join('\r\n'); return sdp; } // Add an a=fmtp: x-google-min-bitrate=kbps line, if videoSendInitialBitrate // is specified. We'll also add a x-google-min-bitrate value, since the max // must be >= the min. function maybeSetVideoSendInitialBitRate(sdp) { var initialBitrate = params.videoSendInitialBitrate; if (!initialBitrate) { return sdp; } // Validate the initial bitrate value. var maxBitrate = initialBitrate; var bitrate = params.videoSendBitrate; if (bitrate) { if (initialBitrate > bitrate) { displayError('Clamping initial bitrate to max bitrate of ' + bitrate + ' kbps.'); initialBitrate = bitrate; params.videoSendInitialBitrate = initialBitrate; } maxBitrate = bitrate; } var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', 'video'); if (mLineIndex === null) { displayError('Failed to find video m-line'); return sdp; } var vp8RtpmapIndex = findLine(sdpLines, 'a=rtpmap', 'VP8/90000'); var vp8Payload = getCodecPayloadType(sdpLines[vp8RtpmapIndex]); var vp8Fmtp = 'a=fmtp:' + vp8Payload + ' x-google-min-bitrate=' + params.videoSendInitialBitrate.toString() + '; x-google-max-bitrate=' + maxBitrate.toString(); sdpLines.splice(vp8RtpmapIndex + 1, 0, vp8Fmtp); return sdpLines.join('\r\n'); } // Promotes |audioSendCodec| to be the first in the m=audio line, if set. function maybePreferAudioSendCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'send', params.audioSendCodec); } // Promotes |audioRecvCodec| to be the first in the m=audio line, if set. function maybePreferAudioReceiveCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'receive', params.audioRecvCodec); } // Promotes |videoSendCodec| to be the first in the m=audio line, if set. function maybePreferVideoSendCodec(sdp) { return maybePreferCodec(sdp, 'video', 'send', params.videoSendCodec); } // Promotes |videoRecvCodec| to be the first in the m=audio line, if set. function maybePreferVideoReceiveCodec(sdp) { return maybePreferCodec(sdp, 'video', 'receive', params.videoRecvCodec); } // Sets |codec| as the default |type| codec if it's present. // The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'. function maybePreferCodec(sdp, type, dir, codec) { var str = type + ' ' + dir + ' codec'; if (codec === '') { trace('No preference on ' + str + '.'); return sdp; } trace('Prefer ' + str + ': ' + codec); var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', type); if (mLineIndex === null) { return sdp; } // If the codec is available, set it as the default in m line. var codecIndex = findLine(sdpLines, 'a=rtpmap', codec); if (codecIndex) { var payload = getCodecPayloadType(sdpLines[codecIndex]); if (payload) { sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload); } } sdp = sdpLines.join('\r\n'); return sdp; } // Add fmtp param to specified codec in SDP. function addCodecParam(sdp, codec, param) { var sdpLines = sdp.split('\r\n'); // Find opus payload. var index = findLine(sdpLines, 'a=rtpmap', codec); var payload; if (index) { payload = getCodecPayloadType(sdpLines[index]); } // Find the payload in fmtp line. var fmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + payload.toString()); if (fmtpLineIndex === null) { return sdp; } sdpLines[fmtpLineIndex] = sdpLines[fmtpLineIndex].concat('; ', param); sdp = sdpLines.join('\r\n'); return sdp; } // Find the line in sdpLines that starts with |prefix|, and, if specified, // contains |substr| (case-insensitive search). function findLine(sdpLines, prefix, substr) { return findLineInRange(sdpLines, 0, -1, prefix, substr); } // Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix| // and, if specified, contains |substr| (case-insensitive search). function findLineInRange(sdpLines, startLine, endLine, prefix, substr) { var realEndLine = endLine !== -1 ? endLine : sdpLines.length; for (var i = startLine; i < realEndLine; ++i) { if (sdpLines[i].indexOf(prefix) === 0) { if (!substr || sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) { return i; } } } return null; } // Gets the codec payload type from an a=rtpmap:X line. function getCodecPayloadType(sdpLine) { var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+'); var result = sdpLine.match(pattern); return (result && result.length === 2) ? result[1] : null; } // Returns a new m= line with the specified codec as the first one. function setDefaultCodec(mLine, payload) { var elements = mLine.split(' '); var newLine = []; var index = 0; for (var i = 0; i < elements.length; i++) { if (index === 3) { // Format of media starts from the fourth. newLine[index++] = payload; // Put target payload to the first. } if (elements[i] !== payload) { newLine[index++] = elements[i]; } } return newLine.join(' '); }
{ var merged = cons1; for (var name in cons2.mandatory) { merged.mandatory[name] = cons2.mandatory[name]; } merged.optional = merged.optional.concat(cons2.optional); return merged; }
identifier_body
sdputils.js
/* * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. */ /* More information about these options at jshint.com/docs/options */ /* globals displayError, params */ /* exported addCodecParam, iceCandidateType, maybePreferAudioReceiveCodec, maybePreferAudioSendCodec, maybeSetAudioReceiveBitRate, maybeSetAudioSendBitRate, maybePreferVideoReceiveCodec, maybePreferVideoSendCodec, maybeSetVideoReceiveBitRate, maybeSetVideoSendBitRate, maybeSetVideoSendInitialBitRate, mergeConstraints */ 'use strict'; function mergeConstraints(cons1, cons2) { var merged = cons1; for (var name in cons2.mandatory) { merged.mandatory[name] = cons2.mandatory[name]; } merged.optional = merged.optional.concat(cons2.optional); return merged; } function
(candidateStr) { return candidateStr.split(' ')[7]; } function maybeSetAudioSendBitRate(sdp) { if (!params.audioSendBitrate) { return sdp; } trace('Prefer audio send bitrate: ' + params.audioSendBitrate); return preferBitRate(sdp, params.audioSendBitrate, 'audio'); } function maybeSetAudioReceiveBitRate(sdp) { if (!params.audioRecvBitrate) { return sdp; } trace('Prefer audio receive bitrate: ' + params.audioRecvBitrate); return preferBitRate(sdp, params.audioRecvBitrate, 'audio'); } function maybeSetVideoSendBitRate(sdp) { if (!params.videoSendBitrate) { return sdp; } trace('Prefer video send bitrate: ' + params.videoSendBitrate); return preferBitRate(sdp, params.videoSendBitrate, 'video'); } function maybeSetVideoReceiveBitRate(sdp) { if (!params.videoRecvBitrate) { return sdp; } trace('Prefer video receive bitrate: ' + params.videoRecvBitrate); return preferBitRate(sdp, params.videoRecvBitrate, 'video'); } // Add a b=AS:bitrate line to the m=mediaType section. function preferBitRate(sdp, bitrate, mediaType) { var sdpLines = sdp.split('\r\n'); // Find m line for the given mediaType. var mLineIndex = findLine(sdpLines, 'm=', mediaType); if (mLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no m-line found'); return sdp; } // Find next m-line if any. var nextMLineIndex = findLineInRange(sdpLines, mLineIndex + 1, -1, 'm='); if (nextMLineIndex === null) { nextMLineIndex = sdpLines.length; } // Find c-line corresponding to the m-line. var cLineIndex = findLineInRange(sdpLines, mLineIndex + 1, nextMLineIndex, 'c='); if (cLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no c-line found'); return sdp; } // Check if bandwidth line already exists between c-line and next m-line. var bLineIndex = findLineInRange(sdpLines, cLineIndex + 1, nextMLineIndex, 'b=AS'); if (bLineIndex) { sdpLines.splice(bLineIndex, 1); } // Create the b (bandwidth) sdp line. var bwLine = 'b=AS:' + bitrate; // As per RFC 4566, the b line should follow after c-line. sdpLines.splice(cLineIndex + 1, 0, bwLine); sdp = sdpLines.join('\r\n'); return sdp; } // Add an a=fmtp: x-google-min-bitrate=kbps line, if videoSendInitialBitrate // is specified. We'll also add a x-google-min-bitrate value, since the max // must be >= the min. function maybeSetVideoSendInitialBitRate(sdp) { var initialBitrate = params.videoSendInitialBitrate; if (!initialBitrate) { return sdp; } // Validate the initial bitrate value. var maxBitrate = initialBitrate; var bitrate = params.videoSendBitrate; if (bitrate) { if (initialBitrate > bitrate) { displayError('Clamping initial bitrate to max bitrate of ' + bitrate + ' kbps.'); initialBitrate = bitrate; params.videoSendInitialBitrate = initialBitrate; } maxBitrate = bitrate; } var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', 'video'); if (mLineIndex === null) { displayError('Failed to find video m-line'); return sdp; } var vp8RtpmapIndex = findLine(sdpLines, 'a=rtpmap', 'VP8/90000'); var vp8Payload = getCodecPayloadType(sdpLines[vp8RtpmapIndex]); var vp8Fmtp = 'a=fmtp:' + vp8Payload + ' x-google-min-bitrate=' + params.videoSendInitialBitrate.toString() + '; x-google-max-bitrate=' + maxBitrate.toString(); sdpLines.splice(vp8RtpmapIndex + 1, 0, vp8Fmtp); return sdpLines.join('\r\n'); } // Promotes |audioSendCodec| to be the first in the m=audio line, if set. function maybePreferAudioSendCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'send', params.audioSendCodec); } // Promotes |audioRecvCodec| to be the first in the m=audio line, if set. function maybePreferAudioReceiveCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'receive', params.audioRecvCodec); } // Promotes |videoSendCodec| to be the first in the m=audio line, if set. function maybePreferVideoSendCodec(sdp) { return maybePreferCodec(sdp, 'video', 'send', params.videoSendCodec); } // Promotes |videoRecvCodec| to be the first in the m=audio line, if set. function maybePreferVideoReceiveCodec(sdp) { return maybePreferCodec(sdp, 'video', 'receive', params.videoRecvCodec); } // Sets |codec| as the default |type| codec if it's present. // The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'. function maybePreferCodec(sdp, type, dir, codec) { var str = type + ' ' + dir + ' codec'; if (codec === '') { trace('No preference on ' + str + '.'); return sdp; } trace('Prefer ' + str + ': ' + codec); var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', type); if (mLineIndex === null) { return sdp; } // If the codec is available, set it as the default in m line. var codecIndex = findLine(sdpLines, 'a=rtpmap', codec); if (codecIndex) { var payload = getCodecPayloadType(sdpLines[codecIndex]); if (payload) { sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload); } } sdp = sdpLines.join('\r\n'); return sdp; } // Add fmtp param to specified codec in SDP. function addCodecParam(sdp, codec, param) { var sdpLines = sdp.split('\r\n'); // Find opus payload. var index = findLine(sdpLines, 'a=rtpmap', codec); var payload; if (index) { payload = getCodecPayloadType(sdpLines[index]); } // Find the payload in fmtp line. var fmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + payload.toString()); if (fmtpLineIndex === null) { return sdp; } sdpLines[fmtpLineIndex] = sdpLines[fmtpLineIndex].concat('; ', param); sdp = sdpLines.join('\r\n'); return sdp; } // Find the line in sdpLines that starts with |prefix|, and, if specified, // contains |substr| (case-insensitive search). function findLine(sdpLines, prefix, substr) { return findLineInRange(sdpLines, 0, -1, prefix, substr); } // Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix| // and, if specified, contains |substr| (case-insensitive search). function findLineInRange(sdpLines, startLine, endLine, prefix, substr) { var realEndLine = endLine !== -1 ? endLine : sdpLines.length; for (var i = startLine; i < realEndLine; ++i) { if (sdpLines[i].indexOf(prefix) === 0) { if (!substr || sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) { return i; } } } return null; } // Gets the codec payload type from an a=rtpmap:X line. function getCodecPayloadType(sdpLine) { var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+'); var result = sdpLine.match(pattern); return (result && result.length === 2) ? result[1] : null; } // Returns a new m= line with the specified codec as the first one. function setDefaultCodec(mLine, payload) { var elements = mLine.split(' '); var newLine = []; var index = 0; for (var i = 0; i < elements.length; i++) { if (index === 3) { // Format of media starts from the fourth. newLine[index++] = payload; // Put target payload to the first. } if (elements[i] !== payload) { newLine[index++] = elements[i]; } } return newLine.join(' '); }
iceCandidateType
identifier_name
sdputils.js
/* * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. */ /* More information about these options at jshint.com/docs/options */ /* globals displayError, params */ /* exported addCodecParam, iceCandidateType, maybePreferAudioReceiveCodec, maybePreferAudioSendCodec, maybeSetAudioReceiveBitRate, maybeSetAudioSendBitRate, maybePreferVideoReceiveCodec, maybePreferVideoSendCodec, maybeSetVideoReceiveBitRate, maybeSetVideoSendBitRate, maybeSetVideoSendInitialBitRate, mergeConstraints */ 'use strict'; function mergeConstraints(cons1, cons2) { var merged = cons1; for (var name in cons2.mandatory) { merged.mandatory[name] = cons2.mandatory[name]; } merged.optional = merged.optional.concat(cons2.optional); return merged; } function iceCandidateType(candidateStr) { return candidateStr.split(' ')[7]; } function maybeSetAudioSendBitRate(sdp) { if (!params.audioSendBitrate) { return sdp; } trace('Prefer audio send bitrate: ' + params.audioSendBitrate); return preferBitRate(sdp, params.audioSendBitrate, 'audio'); } function maybeSetAudioReceiveBitRate(sdp) { if (!params.audioRecvBitrate) { return sdp; } trace('Prefer audio receive bitrate: ' + params.audioRecvBitrate); return preferBitRate(sdp, params.audioRecvBitrate, 'audio'); } function maybeSetVideoSendBitRate(sdp) { if (!params.videoSendBitrate) { return sdp; } trace('Prefer video send bitrate: ' + params.videoSendBitrate); return preferBitRate(sdp, params.videoSendBitrate, 'video'); } function maybeSetVideoReceiveBitRate(sdp) { if (!params.videoRecvBitrate) { return sdp; } trace('Prefer video receive bitrate: ' + params.videoRecvBitrate); return preferBitRate(sdp, params.videoRecvBitrate, 'video'); } // Add a b=AS:bitrate line to the m=mediaType section. function preferBitRate(sdp, bitrate, mediaType) { var sdpLines = sdp.split('\r\n'); // Find m line for the given mediaType. var mLineIndex = findLine(sdpLines, 'm=', mediaType); if (mLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no m-line found'); return sdp; } // Find next m-line if any. var nextMLineIndex = findLineInRange(sdpLines, mLineIndex + 1, -1, 'm='); if (nextMLineIndex === null) { nextMLineIndex = sdpLines.length; } // Find c-line corresponding to the m-line. var cLineIndex = findLineInRange(sdpLines, mLineIndex + 1, nextMLineIndex, 'c='); if (cLineIndex === null) { displayError('Failed to add bandwidth line to sdp, as no c-line found'); return sdp; } // Check if bandwidth line already exists between c-line and next m-line. var bLineIndex = findLineInRange(sdpLines, cLineIndex + 1, nextMLineIndex, 'b=AS'); if (bLineIndex) { sdpLines.splice(bLineIndex, 1); } // Create the b (bandwidth) sdp line. var bwLine = 'b=AS:' + bitrate; // As per RFC 4566, the b line should follow after c-line. sdpLines.splice(cLineIndex + 1, 0, bwLine); sdp = sdpLines.join('\r\n'); return sdp; } // Add an a=fmtp: x-google-min-bitrate=kbps line, if videoSendInitialBitrate // is specified. We'll also add a x-google-min-bitrate value, since the max // must be >= the min. function maybeSetVideoSendInitialBitRate(sdp) { var initialBitrate = params.videoSendInitialBitrate; if (!initialBitrate) { return sdp; } // Validate the initial bitrate value. var maxBitrate = initialBitrate; var bitrate = params.videoSendBitrate; if (bitrate) { if (initialBitrate > bitrate) { displayError('Clamping initial bitrate to max bitrate of ' + bitrate + ' kbps.'); initialBitrate = bitrate; params.videoSendInitialBitrate = initialBitrate; } maxBitrate = bitrate; } var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', 'video'); if (mLineIndex === null) { displayError('Failed to find video m-line'); return sdp; } var vp8RtpmapIndex = findLine(sdpLines, 'a=rtpmap', 'VP8/90000'); var vp8Payload = getCodecPayloadType(sdpLines[vp8RtpmapIndex]); var vp8Fmtp = 'a=fmtp:' + vp8Payload + ' x-google-min-bitrate=' + params.videoSendInitialBitrate.toString() + '; x-google-max-bitrate=' + maxBitrate.toString(); sdpLines.splice(vp8RtpmapIndex + 1, 0, vp8Fmtp); return sdpLines.join('\r\n'); } // Promotes |audioSendCodec| to be the first in the m=audio line, if set. function maybePreferAudioSendCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'send', params.audioSendCodec); } // Promotes |audioRecvCodec| to be the first in the m=audio line, if set. function maybePreferAudioReceiveCodec(sdp) { return maybePreferCodec(sdp, 'audio', 'receive', params.audioRecvCodec); } // Promotes |videoSendCodec| to be the first in the m=audio line, if set. function maybePreferVideoSendCodec(sdp) { return maybePreferCodec(sdp, 'video', 'send', params.videoSendCodec); } // Promotes |videoRecvCodec| to be the first in the m=audio line, if set. function maybePreferVideoReceiveCodec(sdp) { return maybePreferCodec(sdp, 'video', 'receive', params.videoRecvCodec); } // Sets |codec| as the default |type| codec if it's present. // The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'. function maybePreferCodec(sdp, type, dir, codec) { var str = type + ' ' + dir + ' codec'; if (codec === '') { trace('No preference on ' + str + '.'); return sdp; } trace('Prefer ' + str + ': ' + codec); var sdpLines = sdp.split('\r\n'); // Search for m line. var mLineIndex = findLine(sdpLines, 'm=', type); if (mLineIndex === null) { return sdp;
var payload = getCodecPayloadType(sdpLines[codecIndex]); if (payload) { sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload); } } sdp = sdpLines.join('\r\n'); return sdp; } // Add fmtp param to specified codec in SDP. function addCodecParam(sdp, codec, param) { var sdpLines = sdp.split('\r\n'); // Find opus payload. var index = findLine(sdpLines, 'a=rtpmap', codec); var payload; if (index) { payload = getCodecPayloadType(sdpLines[index]); } // Find the payload in fmtp line. var fmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + payload.toString()); if (fmtpLineIndex === null) { return sdp; } sdpLines[fmtpLineIndex] = sdpLines[fmtpLineIndex].concat('; ', param); sdp = sdpLines.join('\r\n'); return sdp; } // Find the line in sdpLines that starts with |prefix|, and, if specified, // contains |substr| (case-insensitive search). function findLine(sdpLines, prefix, substr) { return findLineInRange(sdpLines, 0, -1, prefix, substr); } // Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix| // and, if specified, contains |substr| (case-insensitive search). function findLineInRange(sdpLines, startLine, endLine, prefix, substr) { var realEndLine = endLine !== -1 ? endLine : sdpLines.length; for (var i = startLine; i < realEndLine; ++i) { if (sdpLines[i].indexOf(prefix) === 0) { if (!substr || sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) { return i; } } } return null; } // Gets the codec payload type from an a=rtpmap:X line. function getCodecPayloadType(sdpLine) { var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+'); var result = sdpLine.match(pattern); return (result && result.length === 2) ? result[1] : null; } // Returns a new m= line with the specified codec as the first one. function setDefaultCodec(mLine, payload) { var elements = mLine.split(' '); var newLine = []; var index = 0; for (var i = 0; i < elements.length; i++) { if (index === 3) { // Format of media starts from the fourth. newLine[index++] = payload; // Put target payload to the first. } if (elements[i] !== payload) { newLine[index++] = elements[i]; } } return newLine.join(' '); }
} // If the codec is available, set it as the default in m line. var codecIndex = findLine(sdpLines, 'a=rtpmap', codec); if (codecIndex) {
random_line_split
levelAnimation.ts
export class LevelAnimation { // private _animations: LevelAnimation[] = []; private _progressMs: number; constructor(seed: number, animation: LevelAnimation, progress: number)
// /** // * Returns an array of animations. // * Every levelanim is an array of shapes, or null // */ // update(delta: number, gameStarted: boolean): ShapeCollection[] { // const shapeCollections = []; // this._progressMs += delta; // for (let i = 0, m = this._animations.length; i < m; i++) { // shapeCollections.push(this._animations[i].update(this._progressMs, gameStarted)); // } // return shapeCollections; // } }
{ // this._animations = animation ? animation(seed) : []; /** * @type {number} * @private */ this._progressMs = progress || 0; }
identifier_body
levelAnimation.ts
export class LevelAnimation { // private _animations: LevelAnimation[] = []; private _progressMs: number; constructor(seed: number, animation: LevelAnimation, progress: number) { // this._animations = animation ? animation(seed) : []; /**
} // /** // * Returns an array of animations. // * Every levelanim is an array of shapes, or null // */ // update(delta: number, gameStarted: boolean): ShapeCollection[] { // const shapeCollections = []; // this._progressMs += delta; // for (let i = 0, m = this._animations.length; i < m; i++) { // shapeCollections.push(this._animations[i].update(this._progressMs, gameStarted)); // } // return shapeCollections; // } }
* @type {number} * @private */ this._progressMs = progress || 0;
random_line_split
levelAnimation.ts
export class LevelAnimation { // private _animations: LevelAnimation[] = []; private _progressMs: number;
(seed: number, animation: LevelAnimation, progress: number) { // this._animations = animation ? animation(seed) : []; /** * @type {number} * @private */ this._progressMs = progress || 0; } // /** // * Returns an array of animations. // * Every levelanim is an array of shapes, or null // */ // update(delta: number, gameStarted: boolean): ShapeCollection[] { // const shapeCollections = []; // this._progressMs += delta; // for (let i = 0, m = this._animations.length; i < m; i++) { // shapeCollections.push(this._animations[i].update(this._progressMs, gameStarted)); // } // return shapeCollections; // } }
constructor
identifier_name
OrbeonForms.js
$identify("org/mathdox/formulaeditor/OrbeonForms.js"); $require("org/mathdox/formulaeditor/FormulaEditor.js"); var ORBEON; $main(function(){ if (ORBEON && ORBEON.xforms && ORBEON.xforms.Document) { /** * Extend the save function of the formula editor to use the orbeon update
*/ org.mathdox.formulaeditor.FormulaEditor = $extend(org.mathdox.formulaeditor.FormulaEditor, { save : function() { // call the parent function arguments.callee.parent.save.apply(this, arguments); // let orbeon know about the change of textarea content var textarea = this.textarea; if (textarea.id) { ORBEON.xforms.Document.setValue(textarea.id, textarea.value); } } }); /** * Override Orbeon's xformsHandleResponse method so that it initializes any * canvases that might have been added by the xforms engine. */ /* prevent an error if the xformsHandleResponse doesn't exist */ var xformsHandleResponse; var oldXformsHandleResponse; var newXformsHandleResponse; var ancientOrbeon; if (xformsHandleResponse) { oldXformsHandleResponse = xformsHandleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponse) { oldXformsHandleResponse = ORBEON.xforms.Server.handleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponseDom) { oldXformsHandleResponse = ORBEON.xforms.Server.handleResponseDom; } else if (ORBEON.xforms.server && ORBEON.xforms.server.AjaxServer && ORBEON.xforms.server.AjaxServer.handleResponseDom) { // orbeon 3.9 oldXformsHandleResponse = ORBEON.xforms.server.AjaxServer.handleResponseDom; } else { if (org.mathdox.formulaeditor.options.ancientOrbeon !== undefined && org.mathdox.formulaeditor.options.ancientOrbeon == true) { ancientOrbeon = true; } else { ancientOrbeon = false; alert("ERROR: detected orbeon, but could not add response handler"); } } newXformsHandleResponse = function(request) { // call the overridden method if (ancientOrbeon != true ) { oldXformsHandleResponse.apply(this, arguments); } // go through all canvases in the document var canvases = document.getElementsByTagName("canvas"); for (var i=0; i<canvases.length; i++) { // initialize a FormulaEditor for each canvas var canvas = canvases[i]; if (canvas.nextSibling) { if (canvas.nextSibling.tagName.toLowerCase() == "textarea") { var FormulaEditor = org.mathdox.formulaeditor.FormulaEditor; var editor = new FormulaEditor(canvas.nextSibling, canvas); // (re-)load the contents of the textarea into the editor editor.load(); } } } }; if (xformsHandleResponse) { xformsHandleResponse = newXformsHandleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponse) { ORBEON.xforms.Server.handleResponse = newXformsHandleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponseDom) { ORBEON.xforms.Server.handleResponseDom = newXformsHandleResponse; } else if (ORBEON.xforms.server && ORBEON.xforms.server.AjaxServer && ORBEON.xforms.server.AjaxServer.handleResponseDom) { ORBEON.xforms.server.AjaxServer.handleResponseDom = newXformsHandleResponse; } } });
* mechanism, see also: * http://www.orbeon.com/ops/doc/reference-xforms-2#xforms-javascript
random_line_split
OrbeonForms.js
$identify("org/mathdox/formulaeditor/OrbeonForms.js"); $require("org/mathdox/formulaeditor/FormulaEditor.js"); var ORBEON; $main(function(){ if (ORBEON && ORBEON.xforms && ORBEON.xforms.Document) { /** * Extend the save function of the formula editor to use the orbeon update * mechanism, see also: * http://www.orbeon.com/ops/doc/reference-xforms-2#xforms-javascript */ org.mathdox.formulaeditor.FormulaEditor = $extend(org.mathdox.formulaeditor.FormulaEditor, { save : function() { // call the parent function arguments.callee.parent.save.apply(this, arguments); // let orbeon know about the change of textarea content var textarea = this.textarea; if (textarea.id) { ORBEON.xforms.Document.setValue(textarea.id, textarea.value); } } }); /** * Override Orbeon's xformsHandleResponse method so that it initializes any * canvases that might have been added by the xforms engine. */ /* prevent an error if the xformsHandleResponse doesn't exist */ var xformsHandleResponse; var oldXformsHandleResponse; var newXformsHandleResponse; var ancientOrbeon; if (xformsHandleResponse) { oldXformsHandleResponse = xformsHandleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponse) { oldXformsHandleResponse = ORBEON.xforms.Server.handleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponseDom) { oldXformsHandleResponse = ORBEON.xforms.Server.handleResponseDom; } else if (ORBEON.xforms.server && ORBEON.xforms.server.AjaxServer && ORBEON.xforms.server.AjaxServer.handleResponseDom) { // orbeon 3.9 oldXformsHandleResponse = ORBEON.xforms.server.AjaxServer.handleResponseDom; } else { if (org.mathdox.formulaeditor.options.ancientOrbeon !== undefined && org.mathdox.formulaeditor.options.ancientOrbeon == true) { ancientOrbeon = true; } else { ancientOrbeon = false; alert("ERROR: detected orbeon, but could not add response handler"); } } newXformsHandleResponse = function(request) { // call the overridden method if (ancientOrbeon != true )
// go through all canvases in the document var canvases = document.getElementsByTagName("canvas"); for (var i=0; i<canvases.length; i++) { // initialize a FormulaEditor for each canvas var canvas = canvases[i]; if (canvas.nextSibling) { if (canvas.nextSibling.tagName.toLowerCase() == "textarea") { var FormulaEditor = org.mathdox.formulaeditor.FormulaEditor; var editor = new FormulaEditor(canvas.nextSibling, canvas); // (re-)load the contents of the textarea into the editor editor.load(); } } } }; if (xformsHandleResponse) { xformsHandleResponse = newXformsHandleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponse) { ORBEON.xforms.Server.handleResponse = newXformsHandleResponse; } else if (ORBEON.xforms.Server && ORBEON.xforms.Server.handleResponseDom) { ORBEON.xforms.Server.handleResponseDom = newXformsHandleResponse; } else if (ORBEON.xforms.server && ORBEON.xforms.server.AjaxServer && ORBEON.xforms.server.AjaxServer.handleResponseDom) { ORBEON.xforms.server.AjaxServer.handleResponseDom = newXformsHandleResponse; } } });
{ oldXformsHandleResponse.apply(this, arguments); }
conditional_block
memory.py
# coding: utf-8 from __future__ import absolute_import from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError from apscheduler.util import datetime_to_utc_timestamp class MemoryJobStore(BaseJobStore): """ Stores jobs in an array in RAM. Provides no persistence support. Plugin alias: ``memory`` """ def __init__(self): super(MemoryJobStore, self).__init__() self._jobs = [] # list of (job, timestamp), sorted by next_run_time and job id (ascending) self._jobs_index = {} # id -> (job, timestamp) lookup table def lookup_job(self, job_id): return self._jobs_index.get(job_id, (None, None))[0] def get_due_jobs(self, now): now_timestamp = datetime_to_utc_timestamp(now) pending = [] for job, timestamp in self._jobs: if timestamp is None or timestamp > now_timestamp: break pending.append(job) return pending def get_next_run_time(self): return self._jobs[0][0].next_run_time if self._jobs else None def get_all_jobs(self): return [j[0] for j in self._jobs] def add_job(self, job): if job.id in self._jobs_index: raise ConflictingIdError(job.id) timestamp = datetime_to_utc_timestamp(job.next_run_time) index = self._get_job_index(timestamp, job.id) self._jobs.insert(index, (job, timestamp)) self._jobs_index[job.id] = (job, timestamp) def update_job(self, job): old_job, old_timestamp = self._jobs_index.get(job.id, (None, None)) if old_job is None: raise JobLookupError(job.id) # If the next run time has not changed, simply replace the job in its present index. # Otherwise, reinsert the job to the list to preserve the ordering. old_index = self._get_job_index(old_timestamp, old_job.id) new_timestamp = datetime_to_utc_timestamp(job.next_run_time) if old_timestamp == new_timestamp: self._jobs[old_index] = (job, new_timestamp) else: del self._jobs[old_index] new_index = self._get_job_index(new_timestamp, job.id) self._jobs.insert(new_index, (job, new_timestamp)) self._jobs_index[old_job.id] = (job, new_timestamp) def remove_job(self, job_id): job, timestamp = self._jobs_index.get(job_id, (None, None)) if job is None:
del self._jobs[index] del self._jobs_index[job.id] def remove_all_jobs(self): self._jobs = [] self._jobs_index = {} def shutdown(self): self.remove_all_jobs() def _get_job_index(self, timestamp, job_id): """ Returns the index of the given job, or if it's not found, the index where the job should be inserted based on the given timestamp. :type timestamp: int :type job_id: str """ lo, hi = 0, len(self._jobs) timestamp = float('inf') if timestamp is None else timestamp while lo < hi: mid = (lo + hi) // 2 mid_job, mid_timestamp = self._jobs[mid] mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp if mid_timestamp > timestamp: hi = mid elif mid_timestamp < timestamp: lo = mid + 1 elif mid_job.id > job_id: hi = mid elif mid_job.id < job_id: lo = mid + 1 else: return mid return lo
raise JobLookupError(job_id) index = self._get_job_index(timestamp, job_id)
random_line_split
memory.py
# coding: utf-8 from __future__ import absolute_import from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError from apscheduler.util import datetime_to_utc_timestamp class MemoryJobStore(BaseJobStore): """ Stores jobs in an array in RAM. Provides no persistence support. Plugin alias: ``memory`` """ def __init__(self): super(MemoryJobStore, self).__init__() self._jobs = [] # list of (job, timestamp), sorted by next_run_time and job id (ascending) self._jobs_index = {} # id -> (job, timestamp) lookup table def lookup_job(self, job_id): return self._jobs_index.get(job_id, (None, None))[0] def get_due_jobs(self, now): now_timestamp = datetime_to_utc_timestamp(now) pending = [] for job, timestamp in self._jobs: if timestamp is None or timestamp > now_timestamp: break pending.append(job) return pending def get_next_run_time(self): return self._jobs[0][0].next_run_time if self._jobs else None def get_all_jobs(self): return [j[0] for j in self._jobs] def add_job(self, job): if job.id in self._jobs_index: raise ConflictingIdError(job.id) timestamp = datetime_to_utc_timestamp(job.next_run_time) index = self._get_job_index(timestamp, job.id) self._jobs.insert(index, (job, timestamp)) self._jobs_index[job.id] = (job, timestamp) def
(self, job): old_job, old_timestamp = self._jobs_index.get(job.id, (None, None)) if old_job is None: raise JobLookupError(job.id) # If the next run time has not changed, simply replace the job in its present index. # Otherwise, reinsert the job to the list to preserve the ordering. old_index = self._get_job_index(old_timestamp, old_job.id) new_timestamp = datetime_to_utc_timestamp(job.next_run_time) if old_timestamp == new_timestamp: self._jobs[old_index] = (job, new_timestamp) else: del self._jobs[old_index] new_index = self._get_job_index(new_timestamp, job.id) self._jobs.insert(new_index, (job, new_timestamp)) self._jobs_index[old_job.id] = (job, new_timestamp) def remove_job(self, job_id): job, timestamp = self._jobs_index.get(job_id, (None, None)) if job is None: raise JobLookupError(job_id) index = self._get_job_index(timestamp, job_id) del self._jobs[index] del self._jobs_index[job.id] def remove_all_jobs(self): self._jobs = [] self._jobs_index = {} def shutdown(self): self.remove_all_jobs() def _get_job_index(self, timestamp, job_id): """ Returns the index of the given job, or if it's not found, the index where the job should be inserted based on the given timestamp. :type timestamp: int :type job_id: str """ lo, hi = 0, len(self._jobs) timestamp = float('inf') if timestamp is None else timestamp while lo < hi: mid = (lo + hi) // 2 mid_job, mid_timestamp = self._jobs[mid] mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp if mid_timestamp > timestamp: hi = mid elif mid_timestamp < timestamp: lo = mid + 1 elif mid_job.id > job_id: hi = mid elif mid_job.id < job_id: lo = mid + 1 else: return mid return lo
update_job
identifier_name
memory.py
# coding: utf-8 from __future__ import absolute_import from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError from apscheduler.util import datetime_to_utc_timestamp class MemoryJobStore(BaseJobStore): """ Stores jobs in an array in RAM. Provides no persistence support. Plugin alias: ``memory`` """ def __init__(self): super(MemoryJobStore, self).__init__() self._jobs = [] # list of (job, timestamp), sorted by next_run_time and job id (ascending) self._jobs_index = {} # id -> (job, timestamp) lookup table def lookup_job(self, job_id): return self._jobs_index.get(job_id, (None, None))[0] def get_due_jobs(self, now): now_timestamp = datetime_to_utc_timestamp(now) pending = [] for job, timestamp in self._jobs: if timestamp is None or timestamp > now_timestamp: break pending.append(job) return pending def get_next_run_time(self):
def get_all_jobs(self): return [j[0] for j in self._jobs] def add_job(self, job): if job.id in self._jobs_index: raise ConflictingIdError(job.id) timestamp = datetime_to_utc_timestamp(job.next_run_time) index = self._get_job_index(timestamp, job.id) self._jobs.insert(index, (job, timestamp)) self._jobs_index[job.id] = (job, timestamp) def update_job(self, job): old_job, old_timestamp = self._jobs_index.get(job.id, (None, None)) if old_job is None: raise JobLookupError(job.id) # If the next run time has not changed, simply replace the job in its present index. # Otherwise, reinsert the job to the list to preserve the ordering. old_index = self._get_job_index(old_timestamp, old_job.id) new_timestamp = datetime_to_utc_timestamp(job.next_run_time) if old_timestamp == new_timestamp: self._jobs[old_index] = (job, new_timestamp) else: del self._jobs[old_index] new_index = self._get_job_index(new_timestamp, job.id) self._jobs.insert(new_index, (job, new_timestamp)) self._jobs_index[old_job.id] = (job, new_timestamp) def remove_job(self, job_id): job, timestamp = self._jobs_index.get(job_id, (None, None)) if job is None: raise JobLookupError(job_id) index = self._get_job_index(timestamp, job_id) del self._jobs[index] del self._jobs_index[job.id] def remove_all_jobs(self): self._jobs = [] self._jobs_index = {} def shutdown(self): self.remove_all_jobs() def _get_job_index(self, timestamp, job_id): """ Returns the index of the given job, or if it's not found, the index where the job should be inserted based on the given timestamp. :type timestamp: int :type job_id: str """ lo, hi = 0, len(self._jobs) timestamp = float('inf') if timestamp is None else timestamp while lo < hi: mid = (lo + hi) // 2 mid_job, mid_timestamp = self._jobs[mid] mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp if mid_timestamp > timestamp: hi = mid elif mid_timestamp < timestamp: lo = mid + 1 elif mid_job.id > job_id: hi = mid elif mid_job.id < job_id: lo = mid + 1 else: return mid return lo
return self._jobs[0][0].next_run_time if self._jobs else None
identifier_body
memory.py
# coding: utf-8 from __future__ import absolute_import from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError from apscheduler.util import datetime_to_utc_timestamp class MemoryJobStore(BaseJobStore): """ Stores jobs in an array in RAM. Provides no persistence support. Plugin alias: ``memory`` """ def __init__(self): super(MemoryJobStore, self).__init__() self._jobs = [] # list of (job, timestamp), sorted by next_run_time and job id (ascending) self._jobs_index = {} # id -> (job, timestamp) lookup table def lookup_job(self, job_id): return self._jobs_index.get(job_id, (None, None))[0] def get_due_jobs(self, now): now_timestamp = datetime_to_utc_timestamp(now) pending = [] for job, timestamp in self._jobs: if timestamp is None or timestamp > now_timestamp: break pending.append(job) return pending def get_next_run_time(self): return self._jobs[0][0].next_run_time if self._jobs else None def get_all_jobs(self): return [j[0] for j in self._jobs] def add_job(self, job): if job.id in self._jobs_index: raise ConflictingIdError(job.id) timestamp = datetime_to_utc_timestamp(job.next_run_time) index = self._get_job_index(timestamp, job.id) self._jobs.insert(index, (job, timestamp)) self._jobs_index[job.id] = (job, timestamp) def update_job(self, job): old_job, old_timestamp = self._jobs_index.get(job.id, (None, None)) if old_job is None: raise JobLookupError(job.id) # If the next run time has not changed, simply replace the job in its present index. # Otherwise, reinsert the job to the list to preserve the ordering. old_index = self._get_job_index(old_timestamp, old_job.id) new_timestamp = datetime_to_utc_timestamp(job.next_run_time) if old_timestamp == new_timestamp: self._jobs[old_index] = (job, new_timestamp) else:
self._jobs_index[old_job.id] = (job, new_timestamp) def remove_job(self, job_id): job, timestamp = self._jobs_index.get(job_id, (None, None)) if job is None: raise JobLookupError(job_id) index = self._get_job_index(timestamp, job_id) del self._jobs[index] del self._jobs_index[job.id] def remove_all_jobs(self): self._jobs = [] self._jobs_index = {} def shutdown(self): self.remove_all_jobs() def _get_job_index(self, timestamp, job_id): """ Returns the index of the given job, or if it's not found, the index where the job should be inserted based on the given timestamp. :type timestamp: int :type job_id: str """ lo, hi = 0, len(self._jobs) timestamp = float('inf') if timestamp is None else timestamp while lo < hi: mid = (lo + hi) // 2 mid_job, mid_timestamp = self._jobs[mid] mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp if mid_timestamp > timestamp: hi = mid elif mid_timestamp < timestamp: lo = mid + 1 elif mid_job.id > job_id: hi = mid elif mid_job.id < job_id: lo = mid + 1 else: return mid return lo
del self._jobs[old_index] new_index = self._get_job_index(new_timestamp, job.id) self._jobs.insert(new_index, (job, new_timestamp))
conditional_block
all_62.js
var searchData= [ ['base_5fvalue',['base_value',['../classAdData.html#a156da702ebf4837590f4854dc7c2bc81',1,'AdData']]],
['bench_2ecpp',['bench.cpp',['../bench_8cpp.html',1,'']]], ['bench_2ehpp',['bench.hpp',['../bench_8hpp.html',1,'']]], ['block_5fof_5fmessages',['block_of_messages',['../classAdDataParallel.html#a9451414d03458e18a43331bc8eeb9db0',1,'AdDataParallel']]], ['break_5fnl',['break_nl',['../classAdData.html#a9c23c8706e4564395bb087d08e05f1f0',1,'AdData']]] ];
['bench',['Bench',['../classBench.html',1,'']]],
random_line_split
SafeAreaProviderCompat.tsx
import * as React from 'react'; import { Dimensions, Platform, StyleProp, StyleSheet, View, ViewStyle, } from 'react-native'; import { initialWindowMetrics, SafeAreaInsetsContext, SafeAreaProvider, } from 'react-native-safe-area-context'; type Props = { children: React.ReactNode; style?: StyleProp<ViewStyle>; }; const { width = 0, height = 0 } = Dimensions.get('window'); // To support SSR on web, we need to have empty insets for initial values // Otherwise there can be mismatch between SSR and client output // We also need to specify empty values to support tests environments const initialMetrics = Platform.OS === 'web' || initialWindowMetrics == null ? { frame: { x: 0, y: 0, width, height }, insets: { top: 0, left: 0, right: 0, bottom: 0 }, } : initialWindowMetrics; export default function SafeAreaProviderCompat({ children, style }: Props) { return (
<SafeAreaInsetsContext.Consumer> {(insets) => { if (insets) { // If we already have insets, don't wrap the stack in another safe area provider // This avoids an issue with updates at the cost of potentially incorrect values // https://github.com/react-navigation/react-navigation/issues/174 return <View style={[styles.container, style]}>{children}</View>; } return ( <SafeAreaProvider initialMetrics={initialMetrics} style={style}> {children} </SafeAreaProvider> ); }} </SafeAreaInsetsContext.Consumer> ); } SafeAreaProviderCompat.initialMetrics = initialMetrics; const styles = StyleSheet.create({ container: { flex: 1, }, });
random_line_split
SafeAreaProviderCompat.tsx
import * as React from 'react'; import { Dimensions, Platform, StyleProp, StyleSheet, View, ViewStyle, } from 'react-native'; import { initialWindowMetrics, SafeAreaInsetsContext, SafeAreaProvider, } from 'react-native-safe-area-context'; type Props = { children: React.ReactNode; style?: StyleProp<ViewStyle>; }; const { width = 0, height = 0 } = Dimensions.get('window'); // To support SSR on web, we need to have empty insets for initial values // Otherwise there can be mismatch between SSR and client output // We also need to specify empty values to support tests environments const initialMetrics = Platform.OS === 'web' || initialWindowMetrics == null ? { frame: { x: 0, y: 0, width, height }, insets: { top: 0, left: 0, right: 0, bottom: 0 }, } : initialWindowMetrics; export default function SafeAreaProviderCompat({ children, style }: Props) { return ( <SafeAreaInsetsContext.Consumer> {(insets) => { if (insets)
return ( <SafeAreaProvider initialMetrics={initialMetrics} style={style}> {children} </SafeAreaProvider> ); }} </SafeAreaInsetsContext.Consumer> ); } SafeAreaProviderCompat.initialMetrics = initialMetrics; const styles = StyleSheet.create({ container: { flex: 1, }, });
{ // If we already have insets, don't wrap the stack in another safe area provider // This avoids an issue with updates at the cost of potentially incorrect values // https://github.com/react-navigation/react-navigation/issues/174 return <View style={[styles.container, style]}>{children}</View>; }
conditional_block
SafeAreaProviderCompat.tsx
import * as React from 'react'; import { Dimensions, Platform, StyleProp, StyleSheet, View, ViewStyle, } from 'react-native'; import { initialWindowMetrics, SafeAreaInsetsContext, SafeAreaProvider, } from 'react-native-safe-area-context'; type Props = { children: React.ReactNode; style?: StyleProp<ViewStyle>; }; const { width = 0, height = 0 } = Dimensions.get('window'); // To support SSR on web, we need to have empty insets for initial values // Otherwise there can be mismatch between SSR and client output // We also need to specify empty values to support tests environments const initialMetrics = Platform.OS === 'web' || initialWindowMetrics == null ? { frame: { x: 0, y: 0, width, height }, insets: { top: 0, left: 0, right: 0, bottom: 0 }, } : initialWindowMetrics; export default function
({ children, style }: Props) { return ( <SafeAreaInsetsContext.Consumer> {(insets) => { if (insets) { // If we already have insets, don't wrap the stack in another safe area provider // This avoids an issue with updates at the cost of potentially incorrect values // https://github.com/react-navigation/react-navigation/issues/174 return <View style={[styles.container, style]}>{children}</View>; } return ( <SafeAreaProvider initialMetrics={initialMetrics} style={style}> {children} </SafeAreaProvider> ); }} </SafeAreaInsetsContext.Consumer> ); } SafeAreaProviderCompat.initialMetrics = initialMetrics; const styles = StyleSheet.create({ container: { flex: 1, }, });
SafeAreaProviderCompat
identifier_name
SafeAreaProviderCompat.tsx
import * as React from 'react'; import { Dimensions, Platform, StyleProp, StyleSheet, View, ViewStyle, } from 'react-native'; import { initialWindowMetrics, SafeAreaInsetsContext, SafeAreaProvider, } from 'react-native-safe-area-context'; type Props = { children: React.ReactNode; style?: StyleProp<ViewStyle>; }; const { width = 0, height = 0 } = Dimensions.get('window'); // To support SSR on web, we need to have empty insets for initial values // Otherwise there can be mismatch between SSR and client output // We also need to specify empty values to support tests environments const initialMetrics = Platform.OS === 'web' || initialWindowMetrics == null ? { frame: { x: 0, y: 0, width, height }, insets: { top: 0, left: 0, right: 0, bottom: 0 }, } : initialWindowMetrics; export default function SafeAreaProviderCompat({ children, style }: Props)
SafeAreaProviderCompat.initialMetrics = initialMetrics; const styles = StyleSheet.create({ container: { flex: 1, }, });
{ return ( <SafeAreaInsetsContext.Consumer> {(insets) => { if (insets) { // If we already have insets, don't wrap the stack in another safe area provider // This avoids an issue with updates at the cost of potentially incorrect values // https://github.com/react-navigation/react-navigation/issues/174 return <View style={[styles.container, style]}>{children}</View>; } return ( <SafeAreaProvider initialMetrics={initialMetrics} style={style}> {children} </SafeAreaProvider> ); }} </SafeAreaInsetsContext.Consumer> ); }
identifier_body
strings.py
class
: ADD_SUPERVISOR = "You are now connected with " START_MONITORING = " wants to monitor you, click to allow" ACCEPTED_MONITORING= "You started monitoring " STOP_MONITORING_SUPERVISE = " canceled monitoring" STOP_MONITORING_SUPERVISOR = " is no longer monitoring you" APPLICATION_ANOMALY = " opened an unauthorized application" SPEED_ANOMALY = " drives faster then authorized. Speed: " MONITOR_ACCEPTED = "Monitor accepted" registration_id = "cRLRNiCkFPQ:APA91bENV-BxmM3iXhZ_DwlitVpI5nTvdqGhClq5K1M5sLIZ8aAca_EJnkX3MRl9p_tLGBGoUtvROR2gOVI5bDeTIegS-55C8DM-GAnGI0xdlHVTPM5P9fkSYEslS-EcOsK6Y6dAsPca" registration_other = "fWsYNQNkFfM:APA91bH_Rq5A1rYLapfmii62coTWgNvCMnqq1q8LIxsvNNByT-iPrU-EledwiKHyT7zzCFbPMkbqbZvdb-YVidkQq0u6jvOk_1RZsvK-Q1_XuM3gavyU63GvbgjNcgd5_Ws1Z-H4Xs_l"
Strings
identifier_name
strings.py
class Strings: ADD_SUPERVISOR = "You are now connected with " START_MONITORING = " wants to monitor you, click to allow" ACCEPTED_MONITORING= "You started monitoring " STOP_MONITORING_SUPERVISE = " canceled monitoring"
STOP_MONITORING_SUPERVISOR = " is no longer monitoring you" APPLICATION_ANOMALY = " opened an unauthorized application" SPEED_ANOMALY = " drives faster then authorized. Speed: " MONITOR_ACCEPTED = "Monitor accepted" registration_id = "cRLRNiCkFPQ:APA91bENV-BxmM3iXhZ_DwlitVpI5nTvdqGhClq5K1M5sLIZ8aAca_EJnkX3MRl9p_tLGBGoUtvROR2gOVI5bDeTIegS-55C8DM-GAnGI0xdlHVTPM5P9fkSYEslS-EcOsK6Y6dAsPca" registration_other = "fWsYNQNkFfM:APA91bH_Rq5A1rYLapfmii62coTWgNvCMnqq1q8LIxsvNNByT-iPrU-EledwiKHyT7zzCFbPMkbqbZvdb-YVidkQq0u6jvOk_1RZsvK-Q1_XuM3gavyU63GvbgjNcgd5_Ws1Z-H4Xs_l"
random_line_split
strings.py
class Strings:
ADD_SUPERVISOR = "You are now connected with " START_MONITORING = " wants to monitor you, click to allow" ACCEPTED_MONITORING= "You started monitoring " STOP_MONITORING_SUPERVISE = " canceled monitoring" STOP_MONITORING_SUPERVISOR = " is no longer monitoring you" APPLICATION_ANOMALY = " opened an unauthorized application" SPEED_ANOMALY = " drives faster then authorized. Speed: " MONITOR_ACCEPTED = "Monitor accepted" registration_id = "cRLRNiCkFPQ:APA91bENV-BxmM3iXhZ_DwlitVpI5nTvdqGhClq5K1M5sLIZ8aAca_EJnkX3MRl9p_tLGBGoUtvROR2gOVI5bDeTIegS-55C8DM-GAnGI0xdlHVTPM5P9fkSYEslS-EcOsK6Y6dAsPca" registration_other = "fWsYNQNkFfM:APA91bH_Rq5A1rYLapfmii62coTWgNvCMnqq1q8LIxsvNNByT-iPrU-EledwiKHyT7zzCFbPMkbqbZvdb-YVidkQq0u6jvOk_1RZsvK-Q1_XuM3gavyU63GvbgjNcgd5_Ws1Z-H4Xs_l"
identifier_body
home.client.controller.js
'use strict'; angular.module('core').controller('HomeController', ['$scope', '$http', '$location', 'Authentication', function($scope, $http, $location, Authentication) { // ===================================================================== // Non $scope member // ===================================================================== var init = function() { $scope.authentication = Authentication; }; init(); var redirectToHome = function(user) { var location = '/'; if(user.roles.indexOf('admin') !== -1) { location = '/admin/home'; } else if(user.roles.indexOf('ero') !== -1) { location = '/ero/home'; } else if(user.roles.indexOf('resource') !== -1) { location = '/resource/home'; } $location.path(location); }; if ($scope.authentication.user) { redirectToHome($scope.authentication.user); } // ===================================================================== // $scope Member // ===================================================================== $scope.prepare = function() { $scope.credentials = { email: null, password: null }; }; $scope.signin = function() { $scope.authenticationPromise = $http.post('/api/auth/signin', $scope.credentials).success(function(response) { $scope.authentication.user = response; redirectToHome($scope.authentication.user);
}; // ===================================================================== // Event listener // ===================================================================== } ]);
}).error(function(response) { $scope.error = response.message; });
random_line_split
index.js
$('.message a').click(function(){
var name = document.getElementById('name1'); var pw = document.getElementById('pw'); function store() { localStorage.setItem('name1', name.value); localStorage.setItem('pw', pw.value); } function check() { // stored data from the register-form var storedName = localStorage.getItem('name1'); var storedPw = localStorage.getItem('pw'); // entered data from the login-form var userName = document.getElementById('userName'); var userPw = document.getElementById('userPw'); // check if stored data from register-form is equal to data from login form if(userName.value == storedName && userPw.value == storedPw) { alert('You are loged in.'); }else { alert('ERROR.'); } } var accounts = []; var profile = []; function finding(){ indexof(0) = username; } function login(){ var username = document.getElementById("usern").value; var passwo = document.getElementById("passw").value; while (username != accounts(profile[0])){ accounts.forEach(finding); var found = username; } if (password != (profile[username,1])){ alert("YES"); }else{ alert("NO"); } } function create(){ profile["UserName"] = "username"; profile["PassWord"] = "passwo"; profile["email"] = "email"; document.getElementById("username").value; document.getElementById("passwo").value; document.getElementById("email").value; profile.push("username"); profile.push("passwo"); profile.push("email"); accounts.push('profile') window.location.href = "index.html"; }
$('form').animate({height: "toggle", opacity: "toggle"}, "slow"); });
random_line_split
index.js
$('.message a').click(function(){ $('form').animate({height: "toggle", opacity: "toggle"}, "slow"); }); var name = document.getElementById('name1'); var pw = document.getElementById('pw'); function store() { localStorage.setItem('name1', name.value); localStorage.setItem('pw', pw.value); } function check() { // stored data from the register-form var storedName = localStorage.getItem('name1'); var storedPw = localStorage.getItem('pw'); // entered data from the login-form var userName = document.getElementById('userName'); var userPw = document.getElementById('userPw'); // check if stored data from register-form is equal to data from login form if(userName.value == storedName && userPw.value == storedPw) { alert('You are loged in.'); }else { alert('ERROR.'); } } var accounts = []; var profile = []; function finding(){ indexof(0) = username; } function login(){ var username = document.getElementById("usern").value; var passwo = document.getElementById("passw").value; while (username != accounts(profile[0])){ accounts.forEach(finding); var found = username; } if (password != (profile[username,1]))
else{ alert("NO"); } } function create(){ profile["UserName"] = "username"; profile["PassWord"] = "passwo"; profile["email"] = "email"; document.getElementById("username").value; document.getElementById("passwo").value; document.getElementById("email").value; profile.push("username"); profile.push("passwo"); profile.push("email"); accounts.push('profile') window.location.href = "index.html"; }
{ alert("YES"); }
conditional_block
index.js
$('.message a').click(function(){ $('form').animate({height: "toggle", opacity: "toggle"}, "slow"); }); var name = document.getElementById('name1'); var pw = document.getElementById('pw'); function store() { localStorage.setItem('name1', name.value); localStorage.setItem('pw', pw.value); } function check() { // stored data from the register-form var storedName = localStorage.getItem('name1'); var storedPw = localStorage.getItem('pw'); // entered data from the login-form var userName = document.getElementById('userName'); var userPw = document.getElementById('userPw'); // check if stored data from register-form is equal to data from login form if(userName.value == storedName && userPw.value == storedPw) { alert('You are loged in.'); }else { alert('ERROR.'); } } var accounts = []; var profile = []; function finding(){ indexof(0) = username; } function login(){ var username = document.getElementById("usern").value; var passwo = document.getElementById("passw").value; while (username != accounts(profile[0])){ accounts.forEach(finding); var found = username; } if (password != (profile[username,1])){ alert("YES"); }else{ alert("NO"); } } function
(){ profile["UserName"] = "username"; profile["PassWord"] = "passwo"; profile["email"] = "email"; document.getElementById("username").value; document.getElementById("passwo").value; document.getElementById("email").value; profile.push("username"); profile.push("passwo"); profile.push("email"); accounts.push('profile') window.location.href = "index.html"; }
create
identifier_name
index.js
$('.message a').click(function(){ $('form').animate({height: "toggle", opacity: "toggle"}, "slow"); }); var name = document.getElementById('name1'); var pw = document.getElementById('pw'); function store() { localStorage.setItem('name1', name.value); localStorage.setItem('pw', pw.value); } function check()
var accounts = []; var profile = []; function finding(){ indexof(0) = username; } function login(){ var username = document.getElementById("usern").value; var passwo = document.getElementById("passw").value; while (username != accounts(profile[0])){ accounts.forEach(finding); var found = username; } if (password != (profile[username,1])){ alert("YES"); }else{ alert("NO"); } } function create(){ profile["UserName"] = "username"; profile["PassWord"] = "passwo"; profile["email"] = "email"; document.getElementById("username").value; document.getElementById("passwo").value; document.getElementById("email").value; profile.push("username"); profile.push("passwo"); profile.push("email"); accounts.push('profile') window.location.href = "index.html"; }
{ // stored data from the register-form var storedName = localStorage.getItem('name1'); var storedPw = localStorage.getItem('pw'); // entered data from the login-form var userName = document.getElementById('userName'); var userPw = document.getElementById('userPw'); // check if stored data from register-form is equal to data from login form if(userName.value == storedName && userPw.value == storedPw) { alert('You are loged in.'); }else { alert('ERROR.'); } }
identifier_body
java_compile.py
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis import JMakeAnalysis from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis_parser import JMakeAnalysisParser from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnitLabel from pants.util.dirutil import relativize_paths, safe_mkdir # From http://kenai.com/projects/jmake/sources/mercurial/content # /src/com/sun/tools/jmake/Main.java?rev=26 # Main.mainExternal docs. _JMAKE_ERROR_CODES = { -1: 'invalid command line option detected', -2: 'error reading command file', -3: 'project database corrupted', -4: 'error initializing or calling the compiler', -5: 'compilation error', -6: 'error parsing a class file', -7: 'file not found', -8: 'I/O exception', -9: 'internal jmake exception', -10: 'deduced and actual class name mismatch', -11: 'invalid source file extension', -12: 'a class in a JAR is found dependent on a class with the .java source', -13: 'more than one entry for the same class is found in the project', -20: 'internal Java error (caused by java.lang.InternalError)', -30: 'internal Java error (caused by java.lang.RuntimeException).' } # When executed via a subprocess return codes will be treated as unsigned _JMAKE_ERROR_CODES.update((256 + code, msg) for code, msg in _JMAKE_ERROR_CODES.items()) class JmakeCompile(JvmCompile): """Compile Java code using JMake.""" _name = 'java' _file_suffix = '.java' _supports_concurrent_execution = False _JMAKE_MAIN = 'org.pantsbuild.jmake.Main' @classmethod def get_args_default(cls, bootstrap_option_values): workdir_gen = os.path.relpath(os.path.join(bootstrap_option_values.pants_workdir, 'gen'), get_buildroot()) return ('-C-encoding', '-CUTF-8', '-C-g', '-C-Tcolor', # Don't warn for generated code. '-C-Tnowarnprefixes', '-C{0}'.format(workdir_gen), # Suppress warning for annotations with no processor - we know there are many of these! '-C-Tnowarnregex', '-C^(warning: )?No processor claimed any of these annotations: .*') @classmethod def get_warning_args_default(cls): return ('-C-Xlint:all', '-C-Xlint:-serial', '-C-Xlint:-path', '-C-deprecation') @classmethod def get_no_warning_args_default(cls): return ('-C-Xlint:none', '-C-nowarn') @classmethod def register_options(cls, register): super(JmakeCompile, cls).register_options(register) register('--use-jmake', advanced=True, action='store_true', default=True, fingerprint=True, help='Use jmake to compile Java targets') register('--source', advanced=True, fingerprint=True, help='Provide source compatibility with this release. Overrides the jvm platform ' 'source.', deprecated_hint='The -source arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') register('--target', advanced=True, fingerprint=True, help='Generate class files for this JVM version. Overrides the jvm platform target.', deprecated_hint='The -target arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') cls.register_jvm_tool(register, 'jmake') cls.register_jvm_tool(register, 'java-compiler') def select(self, target): return self.get_options().use_jmake and super(JmakeCompile, self).select(target) def __init__(self, *args, **kwargs): super(JmakeCompile, self).__init__(*args, **kwargs) self.set_distribution(jdk=True) self._buildroot = get_buildroot() # The depfile is generated by org.pantsbuild.tools.compiler.Compiler # and includes information about package-private classes -- e.g. # the case where Foo.java also defines class Bar. This allows jmake # to correctly include these files in its analysis. self._depfile_folder = os.path.join(self.workdir, 'jmake-depfiles') @property def _depfile(self): safe_mkdir(self._depfile_folder) return os.path.join(self._depfile_folder, 'global_depfile') def create_analysis_tools(self): return AnalysisTools(self.context.java_home, JMakeAnalysisParser(), JMakeAnalysis) def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file, log_file, settings):
relative_classpath = relativize_paths(classpath, self._buildroot) jmake_classpath = self.tool_classpath('jmake') args = [ '-classpath', ':'.join(relative_classpath), '-d', classes_output_dir, '-pdb', analysis_file, '-pdb-text-format', ] # TODO: This file should always exist for modern jmake installs; this check should # be removed via a Task-level identity bump after: # https://github.com/pantsbuild/pants/issues/1351 if os.path.exists(self._depfile): args.extend(['-depfile', self._depfile]) compiler_classpath = self.tool_classpath('java-compiler') args.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'org.pantsbuild.tools.compiler.Compiler', ]) if not self.get_options().colors: filtered_args = filter(lambda arg: not arg == '-C-Tcolor', self._args) else: filtered_args = self._args args.extend(filtered_args) args.extend(settings.args) if '-C-source' in args: raise TaskError("Set the source Java version with the 'source' or with the jvm platform, not " "in 'args'.") if '-C-target' in args: raise TaskError("Set the target JVM version with the 'target' option or with the jvm " "platform, not in 'args'.") if self.get_options().source or self.get_options().target: self.context.log.warn('--compile-java-source and --compile-java-target trample and override ' 'target jvm platform settings, and probably should not be used except ' 'for testing.') source_level = self.get_options().source or settings.source_level target_level = self.get_options().target or settings.target_level if source_level: args.extend(['-C-source', '-C{0}'.format(source_level)]) if target_level: args.extend(['-C-target', '-C{0}'.format(target_level)]) args.append('-C-Tdependencyfile') args.append('-C{}'.format(self._depfile)) jvm_options = list(self._jvm_options) args.extend(sources) result = self.runjava(classpath=jmake_classpath, main=JmakeCompile._JMAKE_MAIN, jvm_options=jvm_options, args=args, workunit_name='jmake', workunit_labels=[WorkUnitLabel.COMPILER]) if result: default_message = 'Unexpected error - JMake returned {}'.format(result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
identifier_body
java_compile.py
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis import JMakeAnalysis from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis_parser import JMakeAnalysisParser from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnitLabel from pants.util.dirutil import relativize_paths, safe_mkdir # From http://kenai.com/projects/jmake/sources/mercurial/content # /src/com/sun/tools/jmake/Main.java?rev=26 # Main.mainExternal docs. _JMAKE_ERROR_CODES = { -1: 'invalid command line option detected', -2: 'error reading command file', -3: 'project database corrupted', -4: 'error initializing or calling the compiler', -5: 'compilation error', -6: 'error parsing a class file', -7: 'file not found', -8: 'I/O exception', -9: 'internal jmake exception', -10: 'deduced and actual class name mismatch', -11: 'invalid source file extension', -12: 'a class in a JAR is found dependent on a class with the .java source', -13: 'more than one entry for the same class is found in the project', -20: 'internal Java error (caused by java.lang.InternalError)', -30: 'internal Java error (caused by java.lang.RuntimeException).' } # When executed via a subprocess return codes will be treated as unsigned _JMAKE_ERROR_CODES.update((256 + code, msg) for code, msg in _JMAKE_ERROR_CODES.items()) class JmakeCompile(JvmCompile): """Compile Java code using JMake.""" _name = 'java' _file_suffix = '.java' _supports_concurrent_execution = False _JMAKE_MAIN = 'org.pantsbuild.jmake.Main' @classmethod def get_args_default(cls, bootstrap_option_values): workdir_gen = os.path.relpath(os.path.join(bootstrap_option_values.pants_workdir, 'gen'), get_buildroot()) return ('-C-encoding', '-CUTF-8', '-C-g', '-C-Tcolor', # Don't warn for generated code. '-C-Tnowarnprefixes', '-C{0}'.format(workdir_gen), # Suppress warning for annotations with no processor - we know there are many of these! '-C-Tnowarnregex', '-C^(warning: )?No processor claimed any of these annotations: .*') @classmethod def get_warning_args_default(cls): return ('-C-Xlint:all', '-C-Xlint:-serial', '-C-Xlint:-path', '-C-deprecation') @classmethod def get_no_warning_args_default(cls): return ('-C-Xlint:none', '-C-nowarn') @classmethod def register_options(cls, register): super(JmakeCompile, cls).register_options(register) register('--use-jmake', advanced=True, action='store_true', default=True, fingerprint=True, help='Use jmake to compile Java targets') register('--source', advanced=True, fingerprint=True, help='Provide source compatibility with this release. Overrides the jvm platform ' 'source.', deprecated_hint='The -source arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') register('--target', advanced=True, fingerprint=True, help='Generate class files for this JVM version. Overrides the jvm platform target.', deprecated_hint='The -target arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') cls.register_jvm_tool(register, 'jmake') cls.register_jvm_tool(register, 'java-compiler') def select(self, target): return self.get_options().use_jmake and super(JmakeCompile, self).select(target) def __init__(self, *args, **kwargs): super(JmakeCompile, self).__init__(*args, **kwargs) self.set_distribution(jdk=True) self._buildroot = get_buildroot() # The depfile is generated by org.pantsbuild.tools.compiler.Compiler # and includes information about package-private classes -- e.g. # the case where Foo.java also defines class Bar. This allows jmake # to correctly include these files in its analysis. self._depfile_folder = os.path.join(self.workdir, 'jmake-depfiles') @property def _depfile(self): safe_mkdir(self._depfile_folder) return os.path.join(self._depfile_folder, 'global_depfile') def create_analysis_tools(self): return AnalysisTools(self.context.java_home, JMakeAnalysisParser(), JMakeAnalysis) def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file, log_file, settings): relative_classpath = relativize_paths(classpath, self._buildroot) jmake_classpath = self.tool_classpath('jmake') args = [ '-classpath', ':'.join(relative_classpath), '-d', classes_output_dir, '-pdb', analysis_file, '-pdb-text-format', ] # TODO: This file should always exist for modern jmake installs; this check should # be removed via a Task-level identity bump after: # https://github.com/pantsbuild/pants/issues/1351 if os.path.exists(self._depfile): args.extend(['-depfile', self._depfile]) compiler_classpath = self.tool_classpath('java-compiler') args.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'org.pantsbuild.tools.compiler.Compiler', ]) if not self.get_options().colors: filtered_args = filter(lambda arg: not arg == '-C-Tcolor', self._args) else: filtered_args = self._args args.extend(filtered_args) args.extend(settings.args) if '-C-source' in args: raise TaskError("Set the source Java version with the 'source' or with the jvm platform, not " "in 'args'.") if '-C-target' in args: raise TaskError("Set the target JVM version with the 'target' option or with the jvm " "platform, not in 'args'.") if self.get_options().source or self.get_options().target: self.context.log.warn('--compile-java-source and --compile-java-target trample and override ' 'target jvm platform settings, and probably should not be used except ' 'for testing.') source_level = self.get_options().source or settings.source_level target_level = self.get_options().target or settings.target_level if source_level: args.extend(['-C-source', '-C{0}'.format(source_level)]) if target_level:
args.append('-C-Tdependencyfile') args.append('-C{}'.format(self._depfile)) jvm_options = list(self._jvm_options) args.extend(sources) result = self.runjava(classpath=jmake_classpath, main=JmakeCompile._JMAKE_MAIN, jvm_options=jvm_options, args=args, workunit_name='jmake', workunit_labels=[WorkUnitLabel.COMPILER]) if result: default_message = 'Unexpected error - JMake returned {}'.format(result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
args.extend(['-C-target', '-C{0}'.format(target_level)])
conditional_block
java_compile.py
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis import JMakeAnalysis from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis_parser import JMakeAnalysisParser from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnitLabel from pants.util.dirutil import relativize_paths, safe_mkdir # From http://kenai.com/projects/jmake/sources/mercurial/content # /src/com/sun/tools/jmake/Main.java?rev=26 # Main.mainExternal docs. _JMAKE_ERROR_CODES = { -1: 'invalid command line option detected', -2: 'error reading command file', -3: 'project database corrupted', -4: 'error initializing or calling the compiler', -5: 'compilation error', -6: 'error parsing a class file', -7: 'file not found', -8: 'I/O exception', -9: 'internal jmake exception', -10: 'deduced and actual class name mismatch', -11: 'invalid source file extension', -12: 'a class in a JAR is found dependent on a class with the .java source', -13: 'more than one entry for the same class is found in the project', -20: 'internal Java error (caused by java.lang.InternalError)', -30: 'internal Java error (caused by java.lang.RuntimeException).' } # When executed via a subprocess return codes will be treated as unsigned _JMAKE_ERROR_CODES.update((256 + code, msg) for code, msg in _JMAKE_ERROR_CODES.items()) class JmakeCompile(JvmCompile): """Compile Java code using JMake.""" _name = 'java' _file_suffix = '.java' _supports_concurrent_execution = False _JMAKE_MAIN = 'org.pantsbuild.jmake.Main' @classmethod def get_args_default(cls, bootstrap_option_values): workdir_gen = os.path.relpath(os.path.join(bootstrap_option_values.pants_workdir, 'gen'), get_buildroot()) return ('-C-encoding', '-CUTF-8', '-C-g', '-C-Tcolor', # Don't warn for generated code. '-C-Tnowarnprefixes', '-C{0}'.format(workdir_gen), # Suppress warning for annotations with no processor - we know there are many of these! '-C-Tnowarnregex', '-C^(warning: )?No processor claimed any of these annotations: .*') @classmethod def get_warning_args_default(cls): return ('-C-Xlint:all', '-C-Xlint:-serial', '-C-Xlint:-path', '-C-deprecation') @classmethod def
(cls): return ('-C-Xlint:none', '-C-nowarn') @classmethod def register_options(cls, register): super(JmakeCompile, cls).register_options(register) register('--use-jmake', advanced=True, action='store_true', default=True, fingerprint=True, help='Use jmake to compile Java targets') register('--source', advanced=True, fingerprint=True, help='Provide source compatibility with this release. Overrides the jvm platform ' 'source.', deprecated_hint='The -source arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') register('--target', advanced=True, fingerprint=True, help='Generate class files for this JVM version. Overrides the jvm platform target.', deprecated_hint='The -target arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') cls.register_jvm_tool(register, 'jmake') cls.register_jvm_tool(register, 'java-compiler') def select(self, target): return self.get_options().use_jmake and super(JmakeCompile, self).select(target) def __init__(self, *args, **kwargs): super(JmakeCompile, self).__init__(*args, **kwargs) self.set_distribution(jdk=True) self._buildroot = get_buildroot() # The depfile is generated by org.pantsbuild.tools.compiler.Compiler # and includes information about package-private classes -- e.g. # the case where Foo.java also defines class Bar. This allows jmake # to correctly include these files in its analysis. self._depfile_folder = os.path.join(self.workdir, 'jmake-depfiles') @property def _depfile(self): safe_mkdir(self._depfile_folder) return os.path.join(self._depfile_folder, 'global_depfile') def create_analysis_tools(self): return AnalysisTools(self.context.java_home, JMakeAnalysisParser(), JMakeAnalysis) def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file, log_file, settings): relative_classpath = relativize_paths(classpath, self._buildroot) jmake_classpath = self.tool_classpath('jmake') args = [ '-classpath', ':'.join(relative_classpath), '-d', classes_output_dir, '-pdb', analysis_file, '-pdb-text-format', ] # TODO: This file should always exist for modern jmake installs; this check should # be removed via a Task-level identity bump after: # https://github.com/pantsbuild/pants/issues/1351 if os.path.exists(self._depfile): args.extend(['-depfile', self._depfile]) compiler_classpath = self.tool_classpath('java-compiler') args.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'org.pantsbuild.tools.compiler.Compiler', ]) if not self.get_options().colors: filtered_args = filter(lambda arg: not arg == '-C-Tcolor', self._args) else: filtered_args = self._args args.extend(filtered_args) args.extend(settings.args) if '-C-source' in args: raise TaskError("Set the source Java version with the 'source' or with the jvm platform, not " "in 'args'.") if '-C-target' in args: raise TaskError("Set the target JVM version with the 'target' option or with the jvm " "platform, not in 'args'.") if self.get_options().source or self.get_options().target: self.context.log.warn('--compile-java-source and --compile-java-target trample and override ' 'target jvm platform settings, and probably should not be used except ' 'for testing.') source_level = self.get_options().source or settings.source_level target_level = self.get_options().target or settings.target_level if source_level: args.extend(['-C-source', '-C{0}'.format(source_level)]) if target_level: args.extend(['-C-target', '-C{0}'.format(target_level)]) args.append('-C-Tdependencyfile') args.append('-C{}'.format(self._depfile)) jvm_options = list(self._jvm_options) args.extend(sources) result = self.runjava(classpath=jmake_classpath, main=JmakeCompile._JMAKE_MAIN, jvm_options=jvm_options, args=args, workunit_name='jmake', workunit_labels=[WorkUnitLabel.COMPILER]) if result: default_message = 'Unexpected error - JMake returned {}'.format(result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
get_no_warning_args_default
identifier_name
java_compile.py
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis import JMakeAnalysis from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis_parser import JMakeAnalysisParser from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnitLabel from pants.util.dirutil import relativize_paths, safe_mkdir # From http://kenai.com/projects/jmake/sources/mercurial/content # /src/com/sun/tools/jmake/Main.java?rev=26 # Main.mainExternal docs. _JMAKE_ERROR_CODES = { -1: 'invalid command line option detected', -2: 'error reading command file', -3: 'project database corrupted', -4: 'error initializing or calling the compiler', -5: 'compilation error',
-11: 'invalid source file extension', -12: 'a class in a JAR is found dependent on a class with the .java source', -13: 'more than one entry for the same class is found in the project', -20: 'internal Java error (caused by java.lang.InternalError)', -30: 'internal Java error (caused by java.lang.RuntimeException).' } # When executed via a subprocess return codes will be treated as unsigned _JMAKE_ERROR_CODES.update((256 + code, msg) for code, msg in _JMAKE_ERROR_CODES.items()) class JmakeCompile(JvmCompile): """Compile Java code using JMake.""" _name = 'java' _file_suffix = '.java' _supports_concurrent_execution = False _JMAKE_MAIN = 'org.pantsbuild.jmake.Main' @classmethod def get_args_default(cls, bootstrap_option_values): workdir_gen = os.path.relpath(os.path.join(bootstrap_option_values.pants_workdir, 'gen'), get_buildroot()) return ('-C-encoding', '-CUTF-8', '-C-g', '-C-Tcolor', # Don't warn for generated code. '-C-Tnowarnprefixes', '-C{0}'.format(workdir_gen), # Suppress warning for annotations with no processor - we know there are many of these! '-C-Tnowarnregex', '-C^(warning: )?No processor claimed any of these annotations: .*') @classmethod def get_warning_args_default(cls): return ('-C-Xlint:all', '-C-Xlint:-serial', '-C-Xlint:-path', '-C-deprecation') @classmethod def get_no_warning_args_default(cls): return ('-C-Xlint:none', '-C-nowarn') @classmethod def register_options(cls, register): super(JmakeCompile, cls).register_options(register) register('--use-jmake', advanced=True, action='store_true', default=True, fingerprint=True, help='Use jmake to compile Java targets') register('--source', advanced=True, fingerprint=True, help='Provide source compatibility with this release. Overrides the jvm platform ' 'source.', deprecated_hint='The -source arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') register('--target', advanced=True, fingerprint=True, help='Generate class files for this JVM version. Overrides the jvm platform target.', deprecated_hint='The -target arg to javac should be specified by the jvm-platform.', deprecated_version='0.0.44') cls.register_jvm_tool(register, 'jmake') cls.register_jvm_tool(register, 'java-compiler') def select(self, target): return self.get_options().use_jmake and super(JmakeCompile, self).select(target) def __init__(self, *args, **kwargs): super(JmakeCompile, self).__init__(*args, **kwargs) self.set_distribution(jdk=True) self._buildroot = get_buildroot() # The depfile is generated by org.pantsbuild.tools.compiler.Compiler # and includes information about package-private classes -- e.g. # the case where Foo.java also defines class Bar. This allows jmake # to correctly include these files in its analysis. self._depfile_folder = os.path.join(self.workdir, 'jmake-depfiles') @property def _depfile(self): safe_mkdir(self._depfile_folder) return os.path.join(self._depfile_folder, 'global_depfile') def create_analysis_tools(self): return AnalysisTools(self.context.java_home, JMakeAnalysisParser(), JMakeAnalysis) def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file, log_file, settings): relative_classpath = relativize_paths(classpath, self._buildroot) jmake_classpath = self.tool_classpath('jmake') args = [ '-classpath', ':'.join(relative_classpath), '-d', classes_output_dir, '-pdb', analysis_file, '-pdb-text-format', ] # TODO: This file should always exist for modern jmake installs; this check should # be removed via a Task-level identity bump after: # https://github.com/pantsbuild/pants/issues/1351 if os.path.exists(self._depfile): args.extend(['-depfile', self._depfile]) compiler_classpath = self.tool_classpath('java-compiler') args.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'org.pantsbuild.tools.compiler.Compiler', ]) if not self.get_options().colors: filtered_args = filter(lambda arg: not arg == '-C-Tcolor', self._args) else: filtered_args = self._args args.extend(filtered_args) args.extend(settings.args) if '-C-source' in args: raise TaskError("Set the source Java version with the 'source' or with the jvm platform, not " "in 'args'.") if '-C-target' in args: raise TaskError("Set the target JVM version with the 'target' option or with the jvm " "platform, not in 'args'.") if self.get_options().source or self.get_options().target: self.context.log.warn('--compile-java-source and --compile-java-target trample and override ' 'target jvm platform settings, and probably should not be used except ' 'for testing.') source_level = self.get_options().source or settings.source_level target_level = self.get_options().target or settings.target_level if source_level: args.extend(['-C-source', '-C{0}'.format(source_level)]) if target_level: args.extend(['-C-target', '-C{0}'.format(target_level)]) args.append('-C-Tdependencyfile') args.append('-C{}'.format(self._depfile)) jvm_options = list(self._jvm_options) args.extend(sources) result = self.runjava(classpath=jmake_classpath, main=JmakeCompile._JMAKE_MAIN, jvm_options=jvm_options, args=args, workunit_name='jmake', workunit_labels=[WorkUnitLabel.COMPILER]) if result: default_message = 'Unexpected error - JMake returned {}'.format(result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
-6: 'error parsing a class file', -7: 'file not found', -8: 'I/O exception', -9: 'internal jmake exception', -10: 'deduced and actual class name mismatch',
random_line_split
mysigs.py
from __future__ import division import myhdl from myhdl import instance, delay ClockList = [] class Clock(myhdl.SignalType): def __init__(self, val, frequency=1, timescale='1ns'): self._frequency = frequency self._period = 1/frequency self._timescale = timescale self._hticks = 0 self._set_hticks() myhdl.SignalType.__init__(self, bool(val)) ClockList.append(self) @property def timescale(self): return self._timescale @timescale.setter def timescale(self, t): self._timescale = t @property def
(self): return self._frequency @frequency.setter def frequency(self, f): self._frequency = f self._period = 1/f self._set_hticks() @property def period(self): return self._period def _set_hticks(self): # self._nts = self._convert_timescale(self._timescale) # self._hticks = int(round(self._period/self._nts)) self._hticks = 5 def _convert_timescale(self, ts): # @todo: need to complete this, ts is in the form # "[0-9]*["ms","us","ns","ps"], parse the text # format and retrieve a numerical value # separate the numerical and text nts = 1e9 return nts def gen(self, hticks=None): if hticks is None: hticks = self._hticks else: self._hticks = hticks # print('hticks %d'%(hticks)) @instance def gclock(): self.next = False while True: yield delay(hticks) self.next = not self.val return gclock class Reset(myhdl.ResetSignal): def __init__(self, val, active, async): myhdl.ResetSignal.__init__(self, val, active, async) def pulse(self, delays=10): if isinstance(delays, int): self.next = self.active yield delay(delays) self.next = not self.active elif isinstance(delays, tuple): assert len(delays) in (1, 2, 3), "Incorrect number of delays" self.next = not self.active if len(delays) == 3 else self.active for dd in delays: yield delay(dd) self.next = not self.val self.next = not self.active else: raise ValueError("{} type not supported".format(type(delays)))
frequency
identifier_name
mysigs.py
from __future__ import division import myhdl from myhdl import instance, delay ClockList = [] class Clock(myhdl.SignalType): def __init__(self, val, frequency=1, timescale='1ns'): self._frequency = frequency self._period = 1/frequency self._timescale = timescale self._hticks = 0 self._set_hticks() myhdl.SignalType.__init__(self, bool(val)) ClockList.append(self) @property def timescale(self): return self._timescale @timescale.setter def timescale(self, t): self._timescale = t @property def frequency(self): return self._frequency @frequency.setter def frequency(self, f): self._frequency = f self._period = 1/f self._set_hticks() @property def period(self): return self._period
def _convert_timescale(self, ts): # @todo: need to complete this, ts is in the form # "[0-9]*["ms","us","ns","ps"], parse the text # format and retrieve a numerical value # separate the numerical and text nts = 1e9 return nts def gen(self, hticks=None): if hticks is None: hticks = self._hticks else: self._hticks = hticks # print('hticks %d'%(hticks)) @instance def gclock(): self.next = False while True: yield delay(hticks) self.next = not self.val return gclock class Reset(myhdl.ResetSignal): def __init__(self, val, active, async): myhdl.ResetSignal.__init__(self, val, active, async) def pulse(self, delays=10): if isinstance(delays, int): self.next = self.active yield delay(delays) self.next = not self.active elif isinstance(delays, tuple): assert len(delays) in (1, 2, 3), "Incorrect number of delays" self.next = not self.active if len(delays) == 3 else self.active for dd in delays: yield delay(dd) self.next = not self.val self.next = not self.active else: raise ValueError("{} type not supported".format(type(delays)))
def _set_hticks(self): # self._nts = self._convert_timescale(self._timescale) # self._hticks = int(round(self._period/self._nts)) self._hticks = 5
random_line_split
mysigs.py
from __future__ import division import myhdl from myhdl import instance, delay ClockList = [] class Clock(myhdl.SignalType): def __init__(self, val, frequency=1, timescale='1ns'): self._frequency = frequency self._period = 1/frequency self._timescale = timescale self._hticks = 0 self._set_hticks() myhdl.SignalType.__init__(self, bool(val)) ClockList.append(self) @property def timescale(self): return self._timescale @timescale.setter def timescale(self, t): self._timescale = t @property def frequency(self): return self._frequency @frequency.setter def frequency(self, f): self._frequency = f self._period = 1/f self._set_hticks() @property def period(self): return self._period def _set_hticks(self): # self._nts = self._convert_timescale(self._timescale) # self._hticks = int(round(self._period/self._nts)) self._hticks = 5 def _convert_timescale(self, ts): # @todo: need to complete this, ts is in the form # "[0-9]*["ms","us","ns","ps"], parse the text # format and retrieve a numerical value # separate the numerical and text nts = 1e9 return nts def gen(self, hticks=None): if hticks is None:
else: self._hticks = hticks # print('hticks %d'%(hticks)) @instance def gclock(): self.next = False while True: yield delay(hticks) self.next = not self.val return gclock class Reset(myhdl.ResetSignal): def __init__(self, val, active, async): myhdl.ResetSignal.__init__(self, val, active, async) def pulse(self, delays=10): if isinstance(delays, int): self.next = self.active yield delay(delays) self.next = not self.active elif isinstance(delays, tuple): assert len(delays) in (1, 2, 3), "Incorrect number of delays" self.next = not self.active if len(delays) == 3 else self.active for dd in delays: yield delay(dd) self.next = not self.val self.next = not self.active else: raise ValueError("{} type not supported".format(type(delays)))
hticks = self._hticks
conditional_block
mysigs.py
from __future__ import division import myhdl from myhdl import instance, delay ClockList = [] class Clock(myhdl.SignalType): def __init__(self, val, frequency=1, timescale='1ns'): self._frequency = frequency self._period = 1/frequency self._timescale = timescale self._hticks = 0 self._set_hticks() myhdl.SignalType.__init__(self, bool(val)) ClockList.append(self) @property def timescale(self): return self._timescale @timescale.setter def timescale(self, t): self._timescale = t @property def frequency(self): return self._frequency @frequency.setter def frequency(self, f): self._frequency = f self._period = 1/f self._set_hticks() @property def period(self): return self._period def _set_hticks(self): # self._nts = self._convert_timescale(self._timescale) # self._hticks = int(round(self._period/self._nts)) self._hticks = 5 def _convert_timescale(self, ts): # @todo: need to complete this, ts is in the form # "[0-9]*["ms","us","ns","ps"], parse the text # format and retrieve a numerical value # separate the numerical and text nts = 1e9 return nts def gen(self, hticks=None): if hticks is None: hticks = self._hticks else: self._hticks = hticks # print('hticks %d'%(hticks)) @instance def gclock(): self.next = False while True: yield delay(hticks) self.next = not self.val return gclock class Reset(myhdl.ResetSignal): def __init__(self, val, active, async): myhdl.ResetSignal.__init__(self, val, active, async) def pulse(self, delays=10):
if isinstance(delays, int): self.next = self.active yield delay(delays) self.next = not self.active elif isinstance(delays, tuple): assert len(delays) in (1, 2, 3), "Incorrect number of delays" self.next = not self.active if len(delays) == 3 else self.active for dd in delays: yield delay(dd) self.next = not self.val self.next = not self.active else: raise ValueError("{} type not supported".format(type(delays)))
identifier_body
user.ts
import * as fs from 'fs' import * as gm from 'gm' import * as request from 'superagent' const { promisify } = require('util') const tesseract = require('node-tesseract') import _h from '../../utils/headers' import _c from '../../utils/charset' import config from '../../config/user' const headers = _h.updateHeaders() const { url: { host: hostURL, path: pathURL }, spotImgOptions } = config const { verification, login, encoded } = pathURL const imgURL = hostURL + verification const loginURL = hostURL + login type TYPE = { randomCode?: string, username?: string, password?: string, cookie?: string, imgDir?: string, encoded?: string, img?: string }; export const getCookie = () => new Promise((resolve, reject) => { request .get(hostURL) .set(headers) .end((err: any, sres: object) => { if (err)
const cookies = sres.headers['set-cookie'] let cookie = cookies.find(el => el.includes('JSESSIONID')) return resolve({ cookie: cookie.split(';')[0] }) }) }) export const getImg = (cookie: string) => new Promise((resolve, reject) => { request .get(imgURL) .set(headers) .set('Cookie', cookie) .end((err, sres) => { if (err) { return reject(err) } resolve(sres.body) }) }) export const saveImg = ({ username, img, imgDir }: TYPE) => new Promise((resolve) => { const writeFile = promisify(fs.writeFile) writeFile(imgDir, img).then(() => resolve()) }) export const editImg = ({ username, imgDir }: TYPE) => new Promise((resolve, reject) => { gm(imgDir) .despeckle() // 去斑 .contrast(-2000) // 对比度调整 .write(imgDir, (err) => err ? reject(err) : resolve() ) }) export const spotImg = ({ username, imgDir }: TYPE) => new Promise((resolve, reject) => { tesseract.process(imgDir, spotImgOptions, (err: any, text: string) => { if (err) { return reject(err) } fs.unlinkSync(imgDir) text = text .replace(/[^a-zA-Z0-9]/gm, '') .substr(0, 4) .toLowerCase() if (text.match(/\W/g) !== null) { err = '验证码不合法' return reject(err) } resolve(text) }) }) export const fetchEncoded = (cookie: string) => new Promise((resolve, reject) => { request .post(loginURL + encoded) .set(headers) .set('Cookie', cookie) .end((err: any, sres: any) => { if (err) { return reject(err) } resolve(JSON.parse(sres.text).data) }) }) export const packEncoded = ({ username, password, encoded = '' }: TYPE) => { const code = username + '%%%' + password let ret = ''; let scode = encoded.split("#")[0]; let sxh = encoded.split("#")[1]; for (let i = 0; i < code.length; i++) { if (i < 20) { ret = ret + code.substring(i, i + 1) + scode.substring(0,parseInt(sxh.substring(i, i + 1))); scode = scode.substring(parseInt(sxh.substring(i, i + 1)), scode.length); }else{ ret = ret + code.substring(i, code.length); i = code.length; } } return ret } export const loginToJWXT = ({ randomCode, username, password, encoded, cookie }: TYPE) => new Promise((resolve, reject) => { request .post(loginURL) .type('form') .charset('gbk') .set({ ...headers, // 'Content-Type': 'application/x-www-form-urlencoded', // 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36', Cookie: cookie, Referer: 'http://jwxt.xtu.edu.cn/jsxsd/xk/LoginToXk' }) .send({ USERNAME: username, PASSWORD: password, RANDOMCODE: randomCode, encoded }) .end((err: any, sres: any) => { if (err) { return reject(err) } if (sres.text.includes('用户名或密码错误')) { err = '用户名或密码错误' return reject(err) } if (sres.text.includes('验证码错误')) { err = '验证码错误' return reject(err) } resolve() }) })
{ return reject(err) }
conditional_block
user.ts
import * as fs from 'fs' import * as gm from 'gm' import * as request from 'superagent' const { promisify } = require('util') const tesseract = require('node-tesseract') import _h from '../../utils/headers' import _c from '../../utils/charset' import config from '../../config/user' const headers = _h.updateHeaders() const { url: { host: hostURL, path: pathURL }, spotImgOptions } = config const { verification, login, encoded } = pathURL const imgURL = hostURL + verification const loginURL = hostURL + login type TYPE = { randomCode?: string, username?: string, password?: string, cookie?: string, imgDir?: string, encoded?: string, img?: string }; export const getCookie = () => new Promise((resolve, reject) => { request .get(hostURL) .set(headers) .end((err: any, sres: object) => { if (err) { return reject(err) } const cookies = sres.headers['set-cookie'] let cookie = cookies.find(el => el.includes('JSESSIONID')) return resolve({ cookie: cookie.split(';')[0] }) }) }) export const getImg = (cookie: string) => new Promise((resolve, reject) => { request .get(imgURL)
return reject(err) } resolve(sres.body) }) }) export const saveImg = ({ username, img, imgDir }: TYPE) => new Promise((resolve) => { const writeFile = promisify(fs.writeFile) writeFile(imgDir, img).then(() => resolve()) }) export const editImg = ({ username, imgDir }: TYPE) => new Promise((resolve, reject) => { gm(imgDir) .despeckle() // 去斑 .contrast(-2000) // 对比度调整 .write(imgDir, (err) => err ? reject(err) : resolve() ) }) export const spotImg = ({ username, imgDir }: TYPE) => new Promise((resolve, reject) => { tesseract.process(imgDir, spotImgOptions, (err: any, text: string) => { if (err) { return reject(err) } fs.unlinkSync(imgDir) text = text .replace(/[^a-zA-Z0-9]/gm, '') .substr(0, 4) .toLowerCase() if (text.match(/\W/g) !== null) { err = '验证码不合法' return reject(err) } resolve(text) }) }) export const fetchEncoded = (cookie: string) => new Promise((resolve, reject) => { request .post(loginURL + encoded) .set(headers) .set('Cookie', cookie) .end((err: any, sres: any) => { if (err) { return reject(err) } resolve(JSON.parse(sres.text).data) }) }) export const packEncoded = ({ username, password, encoded = '' }: TYPE) => { const code = username + '%%%' + password let ret = ''; let scode = encoded.split("#")[0]; let sxh = encoded.split("#")[1]; for (let i = 0; i < code.length; i++) { if (i < 20) { ret = ret + code.substring(i, i + 1) + scode.substring(0,parseInt(sxh.substring(i, i + 1))); scode = scode.substring(parseInt(sxh.substring(i, i + 1)), scode.length); }else{ ret = ret + code.substring(i, code.length); i = code.length; } } return ret } export const loginToJWXT = ({ randomCode, username, password, encoded, cookie }: TYPE) => new Promise((resolve, reject) => { request .post(loginURL) .type('form') .charset('gbk') .set({ ...headers, // 'Content-Type': 'application/x-www-form-urlencoded', // 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36', Cookie: cookie, Referer: 'http://jwxt.xtu.edu.cn/jsxsd/xk/LoginToXk' }) .send({ USERNAME: username, PASSWORD: password, RANDOMCODE: randomCode, encoded }) .end((err: any, sres: any) => { if (err) { return reject(err) } if (sres.text.includes('用户名或密码错误')) { err = '用户名或密码错误' return reject(err) } if (sres.text.includes('验证码错误')) { err = '验证码错误' return reject(err) } resolve() }) })
.set(headers) .set('Cookie', cookie) .end((err, sres) => { if (err) {
random_line_split
yuan-ti.ts
import * as Names from "app/models/names"; import { NameList } from "./name-list"; /* cSpell:disable */ export const YuanTi: Names.NameDefinition = new Names.NameDefinition({ name: "Yuan-ti", formats: [{ name: "Forename", formats: ["{name}"] }], parts: { name: { markovOrder: 2, mode: "markovOrItem", source: { "N": [ "acalan", "acamapichtli", "ahuiliztli", "ahuiliztli", "atl", "centehua", "centeotl", "chalchiuhtlicue", "chalchiuitl", "chicomecoatl", "chimalli", "chimalma", "chimalpopoca", "citlali", "citlalli", "citlalmina", "coatl", "cozamalotl",
"tlaltecuhtli,", "tochtli", "tonatiuh", "totec", "tototl", "xilonen", "xipe", "xiuhpilli", "xochitl", "xochitl", "yaotl", "yaretzi", "zolin" ] }, } } }); NameList.push(YuanTi);
"cuauhtémoc", "eréndira", "huitzilin", "huitzilopochtli", "itzcóatl", "itzli", "matlal", "matlalihuitl", "mayahuel", "nochehuatl", "quauhtli", "quetzalcoatl", "teiuc", "teyacapan", "tezcatlipoca", "tlacotl", "tlaloc", "tlaloc",
random_line_split
app.js
import React, { Component } from 'react' ; import { connect } from 'reslice' ; import Footer from './footer' ; import AddTodo from './addtodo' ; import TodoList from './todolist' ; /** * Top-level component. Embeds the AddTodo, TodoList and Footer * (application) components. These are each passed their slice of * the tree. Note that this requires the top-level of the store * to have been correctly constructed (in ducks.js). **/ class App extends Component { onAddTodo = () => { this.props.onAddTodo () ; }
() { let { slice, filter } = this.props ; return ( <div> <AddTodo slice={ slice.addtodo } onAddTodo={ this.onAddTodo }/> <TodoList slice={ slice.todolist } filter={ filter }/> <Footer slice={ slice.footer } /> </div> ) ; } } /** * Map in state props. This being reslice, the slice is the argument, * rather than the entire state tree. The currently selected filter is * required, which is exposed via a selector function on the footer * slice of the state tree. **/ function mapStateToProps (slice) { return { filter: slice.footer.getFilter(), } ; } /** * Map in dispatcher props, here for the onAddTodo action. This is * mapped to an action which is exposed as a method on the Apps * slice (which is the slice argument to the function). **/ function mapDispatchToProps (dispatch, slice) { return { onAddTodo: () => dispatch(slice.onAddTodo()), } ; } export default connect(mapStateToProps, mapDispatchToProps)(App) ;
render
identifier_name
app.js
import React, { Component } from 'react' ; import { connect } from 'reslice' ; import Footer from './footer' ; import AddTodo from './addtodo' ; import TodoList from './todolist' ; /** * Top-level component. Embeds the AddTodo, TodoList and Footer * (application) components. These are each passed their slice of * the tree. Note that this requires the top-level of the store * to have been correctly constructed (in ducks.js). **/ class App extends Component { onAddTodo = () => { this.props.onAddTodo () ; } render () { let { slice, filter } = this.props ; return ( <div> <AddTodo slice={ slice.addtodo } onAddTodo={ this.onAddTodo }/> <TodoList slice={ slice.todolist } filter={ filter }/> <Footer slice={ slice.footer } /> </div> ) ; } } /** * Map in state props. This being reslice, the slice is the argument, * rather than the entire state tree. The currently selected filter is * required, which is exposed via a selector function on the footer * slice of the state tree. **/ function mapStateToProps (slice) { return { filter: slice.footer.getFilter(), } ; } /** * Map in dispatcher props, here for the onAddTodo action. This is * mapped to an action which is exposed as a method on the Apps * slice (which is the slice argument to the function). **/ function mapDispatchToProps (dispatch, slice) { return { onAddTodo: () => dispatch(slice.onAddTodo()),
} ; } export default connect(mapStateToProps, mapDispatchToProps)(App) ;
random_line_split
selectionClipboard.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import {clipboard} from 'electron'; import * as platform from 'vs/base/common/platform'; import {ICodeEditor, IEditorMouseEvent} from 'vs/editor/browser/editorBrowser'; import {Disposable} from 'vs/base/common/lifecycle'; import {EndOfLinePreference, IEditorContribution, ICursorSelectionChangedEvent, IConfigurationChangedEvent} from 'vs/editor/common/editorCommon'; import {EditorBrowserRegistry} from 'vs/editor/browser/editorBrowserExtensions'; import {IKeybindingService} from 'vs/platform/keybinding/common/keybindingService'; import {RunOnceScheduler} from 'vs/base/common/async'; import {Range} from 'vs/editor/common/core/range'; class SelectionClipboard extends Disposable implements IEditorContribution { static ID = 'editor.contrib.selectionClipboard'; constructor(editor:ICodeEditor, @IKeybindingService keybindingService:IKeybindingService) { super(); if (platform.isLinux) { var isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; this._register(editor.onDidChangeConfiguration((e:IConfigurationChangedEvent) => { if (e.contribInfo)
})); this._register(editor.onMouseDown((e:IEditorMouseEvent) => { if (!isEnabled) { return; } if (!editor.getModel()) { return; } if (e.event.middleButton) { e.event.preventDefault(); editor.focus(); if (e.target.position) { editor.setPosition(e.target.position); } process.nextTick(() => { // TODO@Alex: electron weirdness: calling clipboard.readText('selection') generates a paste event, so no need to execute paste ourselves clipboard.readText('selection'); // keybindingService.executeCommand(Handler.Paste, { // text: clipboard.readText('selection'), // pasteOnNewLine: false // }); }); } })); let setSelectionToClipboard = this._register(new RunOnceScheduler(() => { let model = editor.getModel(); if (!model) { return; } let selections = editor.getSelections(); selections = selections.slice(0); selections.sort(Range.compareRangesUsingStarts); let result: string[] = []; for (let i = 0; i < selections.length; i++) { let sel = selections[i]; if (sel.isEmpty()) { // Only write if all cursors have selection return; } result.push(model.getValueInRange(sel, EndOfLinePreference.TextDefined)); } let textToCopy = result.join(model.getEOL()); clipboard.writeText(textToCopy, 'selection'); }, 100)); this._register(editor.onDidChangeCursorSelection((e:ICursorSelectionChangedEvent) => { if (!isEnabled) { return; } setSelectionToClipboard.schedule(); })); } } public getId(): string { return SelectionClipboard.ID; } public dispose(): void { super.dispose(); } } EditorBrowserRegistry.registerEditorContribution(SelectionClipboard);
{ isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; }
conditional_block
selectionClipboard.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import {clipboard} from 'electron'; import * as platform from 'vs/base/common/platform'; import {ICodeEditor, IEditorMouseEvent} from 'vs/editor/browser/editorBrowser'; import {Disposable} from 'vs/base/common/lifecycle'; import {EndOfLinePreference, IEditorContribution, ICursorSelectionChangedEvent, IConfigurationChangedEvent} from 'vs/editor/common/editorCommon'; import {EditorBrowserRegistry} from 'vs/editor/browser/editorBrowserExtensions'; import {IKeybindingService} from 'vs/platform/keybinding/common/keybindingService'; import {RunOnceScheduler} from 'vs/base/common/async'; import {Range} from 'vs/editor/common/core/range'; class SelectionClipboard extends Disposable implements IEditorContribution { static ID = 'editor.contrib.selectionClipboard'; constructor(editor:ICodeEditor, @IKeybindingService keybindingService:IKeybindingService) { super(); if (platform.isLinux) { var isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; this._register(editor.onDidChangeConfiguration((e:IConfigurationChangedEvent) => { if (e.contribInfo) { isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; } })); this._register(editor.onMouseDown((e:IEditorMouseEvent) => { if (!isEnabled) { return; } if (!editor.getModel()) { return; } if (e.event.middleButton) { e.event.preventDefault(); editor.focus(); if (e.target.position) { editor.setPosition(e.target.position); } process.nextTick(() => { // TODO@Alex: electron weirdness: calling clipboard.readText('selection') generates a paste event, so no need to execute paste ourselves clipboard.readText('selection'); // keybindingService.executeCommand(Handler.Paste, { // text: clipboard.readText('selection'), // pasteOnNewLine: false // }); }); } })); let setSelectionToClipboard = this._register(new RunOnceScheduler(() => { let model = editor.getModel(); if (!model) { return; } let selections = editor.getSelections(); selections = selections.slice(0); selections.sort(Range.compareRangesUsingStarts); let result: string[] = []; for (let i = 0; i < selections.length; i++) { let sel = selections[i]; if (sel.isEmpty()) { // Only write if all cursors have selection return; } result.push(model.getValueInRange(sel, EndOfLinePreference.TextDefined)); } let textToCopy = result.join(model.getEOL()); clipboard.writeText(textToCopy, 'selection'); }, 100)); this._register(editor.onDidChangeCursorSelection((e:ICursorSelectionChangedEvent) => { if (!isEnabled) { return; } setSelectionToClipboard.schedule(); })); } } public getId(): string { return SelectionClipboard.ID; } public
(): void { super.dispose(); } } EditorBrowserRegistry.registerEditorContribution(SelectionClipboard);
dispose
identifier_name
selectionClipboard.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import {clipboard} from 'electron'; import * as platform from 'vs/base/common/platform'; import {ICodeEditor, IEditorMouseEvent} from 'vs/editor/browser/editorBrowser'; import {Disposable} from 'vs/base/common/lifecycle'; import {EndOfLinePreference, IEditorContribution, ICursorSelectionChangedEvent, IConfigurationChangedEvent} from 'vs/editor/common/editorCommon'; import {EditorBrowserRegistry} from 'vs/editor/browser/editorBrowserExtensions'; import {IKeybindingService} from 'vs/platform/keybinding/common/keybindingService'; import {RunOnceScheduler} from 'vs/base/common/async'; import {Range} from 'vs/editor/common/core/range'; class SelectionClipboard extends Disposable implements IEditorContribution { static ID = 'editor.contrib.selectionClipboard'; constructor(editor:ICodeEditor, @IKeybindingService keybindingService:IKeybindingService) { super(); if (platform.isLinux) { var isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; this._register(editor.onDidChangeConfiguration((e:IConfigurationChangedEvent) => { if (e.contribInfo) { isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; } })); this._register(editor.onMouseDown((e:IEditorMouseEvent) => { if (!isEnabled) { return; } if (!editor.getModel()) { return; } if (e.event.middleButton) { e.event.preventDefault(); editor.focus(); if (e.target.position) { editor.setPosition(e.target.position); } process.nextTick(() => { // TODO@Alex: electron weirdness: calling clipboard.readText('selection') generates a paste event, so no need to execute paste ourselves clipboard.readText('selection'); // keybindingService.executeCommand(Handler.Paste, { // text: clipboard.readText('selection'), // pasteOnNewLine: false // }); }); } })); let setSelectionToClipboard = this._register(new RunOnceScheduler(() => { let model = editor.getModel(); if (!model) { return; } let selections = editor.getSelections(); selections = selections.slice(0); selections.sort(Range.compareRangesUsingStarts); let result: string[] = []; for (let i = 0; i < selections.length; i++) { let sel = selections[i]; if (sel.isEmpty()) { // Only write if all cursors have selection return; } result.push(model.getValueInRange(sel, EndOfLinePreference.TextDefined)); } let textToCopy = result.join(model.getEOL()); clipboard.writeText(textToCopy, 'selection'); }, 100)); this._register(editor.onDidChangeCursorSelection((e:ICursorSelectionChangedEvent) => { if (!isEnabled) { return; } setSelectionToClipboard.schedule(); })); } } public getId(): string { return SelectionClipboard.ID; } public dispose(): void
} EditorBrowserRegistry.registerEditorContribution(SelectionClipboard);
{ super.dispose(); }
identifier_body
selectionClipboard.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import {clipboard} from 'electron'; import * as platform from 'vs/base/common/platform'; import {ICodeEditor, IEditorMouseEvent} from 'vs/editor/browser/editorBrowser'; import {Disposable} from 'vs/base/common/lifecycle'; import {EndOfLinePreference, IEditorContribution, ICursorSelectionChangedEvent, IConfigurationChangedEvent} from 'vs/editor/common/editorCommon'; import {EditorBrowserRegistry} from 'vs/editor/browser/editorBrowserExtensions'; import {IKeybindingService} from 'vs/platform/keybinding/common/keybindingService'; import {RunOnceScheduler} from 'vs/base/common/async'; import {Range} from 'vs/editor/common/core/range'; class SelectionClipboard extends Disposable implements IEditorContribution { static ID = 'editor.contrib.selectionClipboard'; constructor(editor:ICodeEditor, @IKeybindingService keybindingService:IKeybindingService) { super(); if (platform.isLinux) { var isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; this._register(editor.onDidChangeConfiguration((e:IConfigurationChangedEvent) => { if (e.contribInfo) { isEnabled = editor.getConfiguration().contribInfo.selectionClipboard; } })); this._register(editor.onMouseDown((e:IEditorMouseEvent) => { if (!isEnabled) { return; } if (!editor.getModel()) { return; } if (e.event.middleButton) { e.event.preventDefault(); editor.focus();
process.nextTick(() => { // TODO@Alex: electron weirdness: calling clipboard.readText('selection') generates a paste event, so no need to execute paste ourselves clipboard.readText('selection'); // keybindingService.executeCommand(Handler.Paste, { // text: clipboard.readText('selection'), // pasteOnNewLine: false // }); }); } })); let setSelectionToClipboard = this._register(new RunOnceScheduler(() => { let model = editor.getModel(); if (!model) { return; } let selections = editor.getSelections(); selections = selections.slice(0); selections.sort(Range.compareRangesUsingStarts); let result: string[] = []; for (let i = 0; i < selections.length; i++) { let sel = selections[i]; if (sel.isEmpty()) { // Only write if all cursors have selection return; } result.push(model.getValueInRange(sel, EndOfLinePreference.TextDefined)); } let textToCopy = result.join(model.getEOL()); clipboard.writeText(textToCopy, 'selection'); }, 100)); this._register(editor.onDidChangeCursorSelection((e:ICursorSelectionChangedEvent) => { if (!isEnabled) { return; } setSelectionToClipboard.schedule(); })); } } public getId(): string { return SelectionClipboard.ID; } public dispose(): void { super.dispose(); } } EditorBrowserRegistry.registerEditorContribution(SelectionClipboard);
if (e.target.position) { editor.setPosition(e.target.position); }
random_line_split
models.py
from __future__ import unicode_literals from future.builtins import str from datetime import datetime import re try: from urllib.parse import quote except ImportError: # Python 2 from urllib import quote from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.html import urlize from django.utils.timezone import make_aware, utc from django.utils.translation import ugettext_lazy as _ from requests_oauthlib import OAuth1 import requests from mezzanine.conf import settings from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_USER, \ QUERY_TYPE_LIST, QUERY_TYPE_SEARCH from mezzanine.twitter import get_auth_settings from mezzanine.twitter.managers import TweetManager re_usernames = re.compile("@([0-9a-zA-Z+_]+)", re.IGNORECASE) re_hashtags = re.compile("#([0-9a-zA-Z+_]+)", re.IGNORECASE) replace_hashtags = "<a href=\"http://twitter.com/search?q=%23\\1\">#\\1</a>" replace_usernames = "<a href=\"http://twitter.com/\\1\">@\\1</a>" class TwitterQueryException(Exception): pass @python_2_unicode_compatible class Query(models.Model): type = models.CharField(_("Type"), choices=QUERY_TYPE_CHOICES, max_length=10) value = models.CharField(_("Value"), max_length=140) interested = models.BooleanField("Interested", default=True) class Meta: verbose_name = _("Twitter query") verbose_name_plural = _("Twitter queries") ordering = ("-id",) def __str__(self): return "%s: %s" % (self.get_type_display(), self.value) def run(self): """ Request new tweets from the Twitter API. """ try: value = quote(self.value) except KeyError: value = self.value urls = { QUERY_TYPE_USER: ("https://api.twitter.com/1.1/statuses/" "user_timeline.json?screen_name=%s" "&include_rts=true" % value.lstrip("@")), QUERY_TYPE_LIST: ("https://api.twitter.com/1.1/lists/statuses.json" "?list_id=%s&include_rts=true" % value), QUERY_TYPE_SEARCH: "https://api.twitter.com/1.1/search/tweets.json" "?q=%s" % value, } try: url = urls[self.type] except KeyError: raise TwitterQueryException("Invalid query type: %s" % self.type) settings.use_editable() auth_settings = get_auth_settings() if not auth_settings: from mezzanine.conf import registry if self.value == registry["TWITTER_DEFAULT_QUERY"]["default"]: # These are some read-only keys and secrets we use # for the default query (eg nothing has been configured)
else: raise TwitterQueryException("Twitter OAuth settings missing") try: tweets = requests.get(url, auth=OAuth1(*auth_settings)).json() except Exception as e: raise TwitterQueryException("Error retrieving: %s" % e) try: raise TwitterQueryException(tweets["errors"][0]["message"]) except (IndexError, KeyError, TypeError): pass if self.type == "search": tweets = tweets["statuses"] for tweet_json in tweets: remote_id = str(tweet_json["id"]) tweet, created = self.tweets.get_or_create(remote_id=remote_id) if not created: continue if "retweeted_status" in tweet_json: user = tweet_json['user'] tweet.retweeter_user_name = user["screen_name"] tweet.retweeter_full_name = user["name"] tweet.retweeter_profile_image_url = user["profile_image_url"] tweet_json = tweet_json["retweeted_status"] if self.type == QUERY_TYPE_SEARCH: tweet.user_name = tweet_json['user']['screen_name'] tweet.full_name = tweet_json['user']['name'] tweet.profile_image_url = \ tweet_json['user']["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" else: user = tweet_json["user"] tweet.user_name = user["screen_name"] tweet.full_name = user["name"] tweet.profile_image_url = user["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" tweet.text = urlize(tweet_json["text"]) tweet.text = re_usernames.sub(replace_usernames, tweet.text) tweet.text = re_hashtags.sub(replace_hashtags, tweet.text) if getattr(settings, 'TWITTER_STRIP_HIGH_MULTIBYTE', False): chars = [ch for ch in tweet.text if ord(ch) < 0x800] tweet.text = ''.join(chars) d = datetime.strptime(tweet_json["created_at"], date_format) tweet.created_at = make_aware(d, utc) try: tweet.save() except Warning: pass tweet.save() self.interested = False self.save() class Tweet(models.Model): remote_id = models.CharField(_("Twitter ID"), max_length=50) created_at = models.DateTimeField(_("Date/time"), null=True) text = models.TextField(_("Message"), null=True) profile_image_url = models.URLField(_("Profile image URL"), null=True) user_name = models.CharField(_("User name"), max_length=100, null=True) full_name = models.CharField(_("Full name"), max_length=100, null=True) retweeter_profile_image_url = models.URLField( _("Profile image URL (Retweeted by)"), null=True) retweeter_user_name = models.CharField( _("User name (Retweeted by)"), max_length=100, null=True) retweeter_full_name = models.CharField( _("Full name (Retweeted by)"), max_length=100, null=True) query = models.ForeignKey("Query", related_name="tweets") objects = TweetManager() class Meta: verbose_name = _("Tweet") verbose_name_plural = _("Tweets") ordering = ("-created_at",) def __str__(self): return "%s: %s" % (self.user_name, self.text) def is_retweet(self): return self.retweeter_user_name is not None
auth_settings = ( "KxZTRD3OBft4PP0iQW0aNQ", "sXpQRSDUVJ2AVPZTfh6MrJjHfOGcdK4wRb1WTGQ", "1368725588-ldWCsd54AJpG2xcB5nyTHyCeIC3RJcNVUAkB1OI", "r9u7qS18t8ad4Hu9XVqmCGxlIpzoCN3e1vx6LOSVgyw3R", )
conditional_block
models.py
from __future__ import unicode_literals from future.builtins import str from datetime import datetime import re try: from urllib.parse import quote except ImportError: # Python 2 from urllib import quote from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.html import urlize from django.utils.timezone import make_aware, utc from django.utils.translation import ugettext_lazy as _ from requests_oauthlib import OAuth1 import requests from mezzanine.conf import settings from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_USER, \
from mezzanine.twitter import get_auth_settings from mezzanine.twitter.managers import TweetManager re_usernames = re.compile("@([0-9a-zA-Z+_]+)", re.IGNORECASE) re_hashtags = re.compile("#([0-9a-zA-Z+_]+)", re.IGNORECASE) replace_hashtags = "<a href=\"http://twitter.com/search?q=%23\\1\">#\\1</a>" replace_usernames = "<a href=\"http://twitter.com/\\1\">@\\1</a>" class TwitterQueryException(Exception): pass @python_2_unicode_compatible class Query(models.Model): type = models.CharField(_("Type"), choices=QUERY_TYPE_CHOICES, max_length=10) value = models.CharField(_("Value"), max_length=140) interested = models.BooleanField("Interested", default=True) class Meta: verbose_name = _("Twitter query") verbose_name_plural = _("Twitter queries") ordering = ("-id",) def __str__(self): return "%s: %s" % (self.get_type_display(), self.value) def run(self): """ Request new tweets from the Twitter API. """ try: value = quote(self.value) except KeyError: value = self.value urls = { QUERY_TYPE_USER: ("https://api.twitter.com/1.1/statuses/" "user_timeline.json?screen_name=%s" "&include_rts=true" % value.lstrip("@")), QUERY_TYPE_LIST: ("https://api.twitter.com/1.1/lists/statuses.json" "?list_id=%s&include_rts=true" % value), QUERY_TYPE_SEARCH: "https://api.twitter.com/1.1/search/tweets.json" "?q=%s" % value, } try: url = urls[self.type] except KeyError: raise TwitterQueryException("Invalid query type: %s" % self.type) settings.use_editable() auth_settings = get_auth_settings() if not auth_settings: from mezzanine.conf import registry if self.value == registry["TWITTER_DEFAULT_QUERY"]["default"]: # These are some read-only keys and secrets we use # for the default query (eg nothing has been configured) auth_settings = ( "KxZTRD3OBft4PP0iQW0aNQ", "sXpQRSDUVJ2AVPZTfh6MrJjHfOGcdK4wRb1WTGQ", "1368725588-ldWCsd54AJpG2xcB5nyTHyCeIC3RJcNVUAkB1OI", "r9u7qS18t8ad4Hu9XVqmCGxlIpzoCN3e1vx6LOSVgyw3R", ) else: raise TwitterQueryException("Twitter OAuth settings missing") try: tweets = requests.get(url, auth=OAuth1(*auth_settings)).json() except Exception as e: raise TwitterQueryException("Error retrieving: %s" % e) try: raise TwitterQueryException(tweets["errors"][0]["message"]) except (IndexError, KeyError, TypeError): pass if self.type == "search": tweets = tweets["statuses"] for tweet_json in tweets: remote_id = str(tweet_json["id"]) tweet, created = self.tweets.get_or_create(remote_id=remote_id) if not created: continue if "retweeted_status" in tweet_json: user = tweet_json['user'] tweet.retweeter_user_name = user["screen_name"] tweet.retweeter_full_name = user["name"] tweet.retweeter_profile_image_url = user["profile_image_url"] tweet_json = tweet_json["retweeted_status"] if self.type == QUERY_TYPE_SEARCH: tweet.user_name = tweet_json['user']['screen_name'] tweet.full_name = tweet_json['user']['name'] tweet.profile_image_url = \ tweet_json['user']["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" else: user = tweet_json["user"] tweet.user_name = user["screen_name"] tweet.full_name = user["name"] tweet.profile_image_url = user["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" tweet.text = urlize(tweet_json["text"]) tweet.text = re_usernames.sub(replace_usernames, tweet.text) tweet.text = re_hashtags.sub(replace_hashtags, tweet.text) if getattr(settings, 'TWITTER_STRIP_HIGH_MULTIBYTE', False): chars = [ch for ch in tweet.text if ord(ch) < 0x800] tweet.text = ''.join(chars) d = datetime.strptime(tweet_json["created_at"], date_format) tweet.created_at = make_aware(d, utc) try: tweet.save() except Warning: pass tweet.save() self.interested = False self.save() class Tweet(models.Model): remote_id = models.CharField(_("Twitter ID"), max_length=50) created_at = models.DateTimeField(_("Date/time"), null=True) text = models.TextField(_("Message"), null=True) profile_image_url = models.URLField(_("Profile image URL"), null=True) user_name = models.CharField(_("User name"), max_length=100, null=True) full_name = models.CharField(_("Full name"), max_length=100, null=True) retweeter_profile_image_url = models.URLField( _("Profile image URL (Retweeted by)"), null=True) retweeter_user_name = models.CharField( _("User name (Retweeted by)"), max_length=100, null=True) retweeter_full_name = models.CharField( _("Full name (Retweeted by)"), max_length=100, null=True) query = models.ForeignKey("Query", related_name="tweets") objects = TweetManager() class Meta: verbose_name = _("Tweet") verbose_name_plural = _("Tweets") ordering = ("-created_at",) def __str__(self): return "%s: %s" % (self.user_name, self.text) def is_retweet(self): return self.retweeter_user_name is not None
QUERY_TYPE_LIST, QUERY_TYPE_SEARCH
random_line_split
models.py
from __future__ import unicode_literals from future.builtins import str from datetime import datetime import re try: from urllib.parse import quote except ImportError: # Python 2 from urllib import quote from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.html import urlize from django.utils.timezone import make_aware, utc from django.utils.translation import ugettext_lazy as _ from requests_oauthlib import OAuth1 import requests from mezzanine.conf import settings from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_USER, \ QUERY_TYPE_LIST, QUERY_TYPE_SEARCH from mezzanine.twitter import get_auth_settings from mezzanine.twitter.managers import TweetManager re_usernames = re.compile("@([0-9a-zA-Z+_]+)", re.IGNORECASE) re_hashtags = re.compile("#([0-9a-zA-Z+_]+)", re.IGNORECASE) replace_hashtags = "<a href=\"http://twitter.com/search?q=%23\\1\">#\\1</a>" replace_usernames = "<a href=\"http://twitter.com/\\1\">@\\1</a>" class TwitterQueryException(Exception):
@python_2_unicode_compatible class Query(models.Model): type = models.CharField(_("Type"), choices=QUERY_TYPE_CHOICES, max_length=10) value = models.CharField(_("Value"), max_length=140) interested = models.BooleanField("Interested", default=True) class Meta: verbose_name = _("Twitter query") verbose_name_plural = _("Twitter queries") ordering = ("-id",) def __str__(self): return "%s: %s" % (self.get_type_display(), self.value) def run(self): """ Request new tweets from the Twitter API. """ try: value = quote(self.value) except KeyError: value = self.value urls = { QUERY_TYPE_USER: ("https://api.twitter.com/1.1/statuses/" "user_timeline.json?screen_name=%s" "&include_rts=true" % value.lstrip("@")), QUERY_TYPE_LIST: ("https://api.twitter.com/1.1/lists/statuses.json" "?list_id=%s&include_rts=true" % value), QUERY_TYPE_SEARCH: "https://api.twitter.com/1.1/search/tweets.json" "?q=%s" % value, } try: url = urls[self.type] except KeyError: raise TwitterQueryException("Invalid query type: %s" % self.type) settings.use_editable() auth_settings = get_auth_settings() if not auth_settings: from mezzanine.conf import registry if self.value == registry["TWITTER_DEFAULT_QUERY"]["default"]: # These are some read-only keys and secrets we use # for the default query (eg nothing has been configured) auth_settings = ( "KxZTRD3OBft4PP0iQW0aNQ", "sXpQRSDUVJ2AVPZTfh6MrJjHfOGcdK4wRb1WTGQ", "1368725588-ldWCsd54AJpG2xcB5nyTHyCeIC3RJcNVUAkB1OI", "r9u7qS18t8ad4Hu9XVqmCGxlIpzoCN3e1vx6LOSVgyw3R", ) else: raise TwitterQueryException("Twitter OAuth settings missing") try: tweets = requests.get(url, auth=OAuth1(*auth_settings)).json() except Exception as e: raise TwitterQueryException("Error retrieving: %s" % e) try: raise TwitterQueryException(tweets["errors"][0]["message"]) except (IndexError, KeyError, TypeError): pass if self.type == "search": tweets = tweets["statuses"] for tweet_json in tweets: remote_id = str(tweet_json["id"]) tweet, created = self.tweets.get_or_create(remote_id=remote_id) if not created: continue if "retweeted_status" in tweet_json: user = tweet_json['user'] tweet.retweeter_user_name = user["screen_name"] tweet.retweeter_full_name = user["name"] tweet.retweeter_profile_image_url = user["profile_image_url"] tweet_json = tweet_json["retweeted_status"] if self.type == QUERY_TYPE_SEARCH: tweet.user_name = tweet_json['user']['screen_name'] tweet.full_name = tweet_json['user']['name'] tweet.profile_image_url = \ tweet_json['user']["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" else: user = tweet_json["user"] tweet.user_name = user["screen_name"] tweet.full_name = user["name"] tweet.profile_image_url = user["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" tweet.text = urlize(tweet_json["text"]) tweet.text = re_usernames.sub(replace_usernames, tweet.text) tweet.text = re_hashtags.sub(replace_hashtags, tweet.text) if getattr(settings, 'TWITTER_STRIP_HIGH_MULTIBYTE', False): chars = [ch for ch in tweet.text if ord(ch) < 0x800] tweet.text = ''.join(chars) d = datetime.strptime(tweet_json["created_at"], date_format) tweet.created_at = make_aware(d, utc) try: tweet.save() except Warning: pass tweet.save() self.interested = False self.save() class Tweet(models.Model): remote_id = models.CharField(_("Twitter ID"), max_length=50) created_at = models.DateTimeField(_("Date/time"), null=True) text = models.TextField(_("Message"), null=True) profile_image_url = models.URLField(_("Profile image URL"), null=True) user_name = models.CharField(_("User name"), max_length=100, null=True) full_name = models.CharField(_("Full name"), max_length=100, null=True) retweeter_profile_image_url = models.URLField( _("Profile image URL (Retweeted by)"), null=True) retweeter_user_name = models.CharField( _("User name (Retweeted by)"), max_length=100, null=True) retweeter_full_name = models.CharField( _("Full name (Retweeted by)"), max_length=100, null=True) query = models.ForeignKey("Query", related_name="tweets") objects = TweetManager() class Meta: verbose_name = _("Tweet") verbose_name_plural = _("Tweets") ordering = ("-created_at",) def __str__(self): return "%s: %s" % (self.user_name, self.text) def is_retweet(self): return self.retweeter_user_name is not None
pass
identifier_body
models.py
from __future__ import unicode_literals from future.builtins import str from datetime import datetime import re try: from urllib.parse import quote except ImportError: # Python 2 from urllib import quote from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.html import urlize from django.utils.timezone import make_aware, utc from django.utils.translation import ugettext_lazy as _ from requests_oauthlib import OAuth1 import requests from mezzanine.conf import settings from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_USER, \ QUERY_TYPE_LIST, QUERY_TYPE_SEARCH from mezzanine.twitter import get_auth_settings from mezzanine.twitter.managers import TweetManager re_usernames = re.compile("@([0-9a-zA-Z+_]+)", re.IGNORECASE) re_hashtags = re.compile("#([0-9a-zA-Z+_]+)", re.IGNORECASE) replace_hashtags = "<a href=\"http://twitter.com/search?q=%23\\1\">#\\1</a>" replace_usernames = "<a href=\"http://twitter.com/\\1\">@\\1</a>" class TwitterQueryException(Exception): pass @python_2_unicode_compatible class Query(models.Model): type = models.CharField(_("Type"), choices=QUERY_TYPE_CHOICES, max_length=10) value = models.CharField(_("Value"), max_length=140) interested = models.BooleanField("Interested", default=True) class
: verbose_name = _("Twitter query") verbose_name_plural = _("Twitter queries") ordering = ("-id",) def __str__(self): return "%s: %s" % (self.get_type_display(), self.value) def run(self): """ Request new tweets from the Twitter API. """ try: value = quote(self.value) except KeyError: value = self.value urls = { QUERY_TYPE_USER: ("https://api.twitter.com/1.1/statuses/" "user_timeline.json?screen_name=%s" "&include_rts=true" % value.lstrip("@")), QUERY_TYPE_LIST: ("https://api.twitter.com/1.1/lists/statuses.json" "?list_id=%s&include_rts=true" % value), QUERY_TYPE_SEARCH: "https://api.twitter.com/1.1/search/tweets.json" "?q=%s" % value, } try: url = urls[self.type] except KeyError: raise TwitterQueryException("Invalid query type: %s" % self.type) settings.use_editable() auth_settings = get_auth_settings() if not auth_settings: from mezzanine.conf import registry if self.value == registry["TWITTER_DEFAULT_QUERY"]["default"]: # These are some read-only keys and secrets we use # for the default query (eg nothing has been configured) auth_settings = ( "KxZTRD3OBft4PP0iQW0aNQ", "sXpQRSDUVJ2AVPZTfh6MrJjHfOGcdK4wRb1WTGQ", "1368725588-ldWCsd54AJpG2xcB5nyTHyCeIC3RJcNVUAkB1OI", "r9u7qS18t8ad4Hu9XVqmCGxlIpzoCN3e1vx6LOSVgyw3R", ) else: raise TwitterQueryException("Twitter OAuth settings missing") try: tweets = requests.get(url, auth=OAuth1(*auth_settings)).json() except Exception as e: raise TwitterQueryException("Error retrieving: %s" % e) try: raise TwitterQueryException(tweets["errors"][0]["message"]) except (IndexError, KeyError, TypeError): pass if self.type == "search": tweets = tweets["statuses"] for tweet_json in tweets: remote_id = str(tweet_json["id"]) tweet, created = self.tweets.get_or_create(remote_id=remote_id) if not created: continue if "retweeted_status" in tweet_json: user = tweet_json['user'] tweet.retweeter_user_name = user["screen_name"] tweet.retweeter_full_name = user["name"] tweet.retweeter_profile_image_url = user["profile_image_url"] tweet_json = tweet_json["retweeted_status"] if self.type == QUERY_TYPE_SEARCH: tweet.user_name = tweet_json['user']['screen_name'] tweet.full_name = tweet_json['user']['name'] tweet.profile_image_url = \ tweet_json['user']["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" else: user = tweet_json["user"] tweet.user_name = user["screen_name"] tweet.full_name = user["name"] tweet.profile_image_url = user["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" tweet.text = urlize(tweet_json["text"]) tweet.text = re_usernames.sub(replace_usernames, tweet.text) tweet.text = re_hashtags.sub(replace_hashtags, tweet.text) if getattr(settings, 'TWITTER_STRIP_HIGH_MULTIBYTE', False): chars = [ch for ch in tweet.text if ord(ch) < 0x800] tweet.text = ''.join(chars) d = datetime.strptime(tweet_json["created_at"], date_format) tweet.created_at = make_aware(d, utc) try: tweet.save() except Warning: pass tweet.save() self.interested = False self.save() class Tweet(models.Model): remote_id = models.CharField(_("Twitter ID"), max_length=50) created_at = models.DateTimeField(_("Date/time"), null=True) text = models.TextField(_("Message"), null=True) profile_image_url = models.URLField(_("Profile image URL"), null=True) user_name = models.CharField(_("User name"), max_length=100, null=True) full_name = models.CharField(_("Full name"), max_length=100, null=True) retweeter_profile_image_url = models.URLField( _("Profile image URL (Retweeted by)"), null=True) retweeter_user_name = models.CharField( _("User name (Retweeted by)"), max_length=100, null=True) retweeter_full_name = models.CharField( _("Full name (Retweeted by)"), max_length=100, null=True) query = models.ForeignKey("Query", related_name="tweets") objects = TweetManager() class Meta: verbose_name = _("Tweet") verbose_name_plural = _("Tweets") ordering = ("-created_at",) def __str__(self): return "%s: %s" % (self.user_name, self.text) def is_retweet(self): return self.retweeter_user_name is not None
Meta
identifier_name
struct-partial-move-1.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[derive(PartialEq, Debug)] pub struct Partial<T> { x: T, y: T } #[derive(PartialEq, Debug)] struct S { val: isize } impl S { fn new(v: isize) -> S { S { val: v } } } impl Drop for S { fn drop(&mut self) { } } pub fn f<T, F>((b1, b2): (T, T), mut f: F) -> Partial<T> where F: FnMut(T) -> T { let p = Partial { x: b1, y: b2 }; // Move of `p` is legal even though we are also moving `p.y`; the // `..p` moves all fields *except* `p.y` in this context. Partial { y: f(p.y), ..p } } pub fn main() { let p = f((S::new(3), S::new(4)), |S { val: z }| S::new(z+1)); assert_eq!(p, Partial { x: S::new(3), y: S::new(5) }); }
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
random_line_split
struct-partial-move-1.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[derive(PartialEq, Debug)] pub struct Partial<T> { x: T, y: T } #[derive(PartialEq, Debug)] struct
{ val: isize } impl S { fn new(v: isize) -> S { S { val: v } } } impl Drop for S { fn drop(&mut self) { } } pub fn f<T, F>((b1, b2): (T, T), mut f: F) -> Partial<T> where F: FnMut(T) -> T { let p = Partial { x: b1, y: b2 }; // Move of `p` is legal even though we are also moving `p.y`; the // `..p` moves all fields *except* `p.y` in this context. Partial { y: f(p.y), ..p } } pub fn main() { let p = f((S::new(3), S::new(4)), |S { val: z }| S::new(z+1)); assert_eq!(p, Partial { x: S::new(3), y: S::new(5) }); }
S
identifier_name
zbytes.rs
//! The `zbyte` module contains code //! to deal with opcodes and zcode. /// A struct that holds an array of bytes and provides some convenience functions. pub struct Bytes { /// The underlying data pub bytes: Vec<u8> } impl Bytes { /// Returns the length of the byte array. pub fn len(&self) -> usize { self.bytes.len() } /// Writes the byte (u8) to the index specified. /// /// If the vector isn't large enough it fills everything up to the index with zeros. pub fn write_byte(&mut self, byte: u8, index: usize) { while self.len() <= index { self.bytes.push(0); } self.bytes[index] = byte; } /// Appends a byte to the end of the data. pub fn append_byte(&mut self, byte: u8) { let index: usize = self.bytes.len(); self.write_byte(byte, index); } /// Writes a u16 in two bytes with the correct byte-order for the Z-Machine at the specified /// index. pub fn write_u16(&mut self, value: u16, index: usize) { self.write_byte((value >> 8) as u8, index); self.write_byte((value & 0xff) as u8, index + 1); } /// Appends a u16 to the end of the data. pub fn append_u16(&mut self, value: u16) { let index: usize = self.bytes.len(); self.write_u16(value, index); } /// Writes multiple bytes at the specified index. pub fn write_bytes(&mut self, bytes: &[u8], to_index: usize) { for i in 0..bytes.len() { self.write_byte(bytes[i], to_index+i); } } /// Appends an array of bytes at the end of the data. pub fn append_bytes(&mut self, bytes: &[u8]) { let index: usize = self.bytes.len(); self.write_bytes(bytes, index); }
/// /// `=> [index-1] == 0; [index] == nil;` pub fn write_zero_until(&mut self, index: usize) { while self.len() < index { self.bytes.push(0); } } /// Prints the underlying byte array pub fn print(&self) { debug!("bytes: {:?}", self.bytes); } }
/// Fills everything with zeros until but not including the index.
random_line_split
zbytes.rs
//! The `zbyte` module contains code //! to deal with opcodes and zcode. /// A struct that holds an array of bytes and provides some convenience functions. pub struct Bytes { /// The underlying data pub bytes: Vec<u8> } impl Bytes { /// Returns the length of the byte array. pub fn len(&self) -> usize { self.bytes.len() } /// Writes the byte (u8) to the index specified. /// /// If the vector isn't large enough it fills everything up to the index with zeros. pub fn write_byte(&mut self, byte: u8, index: usize) { while self.len() <= index { self.bytes.push(0); } self.bytes[index] = byte; } /// Appends a byte to the end of the data. pub fn
(&mut self, byte: u8) { let index: usize = self.bytes.len(); self.write_byte(byte, index); } /// Writes a u16 in two bytes with the correct byte-order for the Z-Machine at the specified /// index. pub fn write_u16(&mut self, value: u16, index: usize) { self.write_byte((value >> 8) as u8, index); self.write_byte((value & 0xff) as u8, index + 1); } /// Appends a u16 to the end of the data. pub fn append_u16(&mut self, value: u16) { let index: usize = self.bytes.len(); self.write_u16(value, index); } /// Writes multiple bytes at the specified index. pub fn write_bytes(&mut self, bytes: &[u8], to_index: usize) { for i in 0..bytes.len() { self.write_byte(bytes[i], to_index+i); } } /// Appends an array of bytes at the end of the data. pub fn append_bytes(&mut self, bytes: &[u8]) { let index: usize = self.bytes.len(); self.write_bytes(bytes, index); } /// Fills everything with zeros until but not including the index. /// /// `=> [index-1] == 0; [index] == nil;` pub fn write_zero_until(&mut self, index: usize) { while self.len() < index { self.bytes.push(0); } } /// Prints the underlying byte array pub fn print(&self) { debug!("bytes: {:?}", self.bytes); } }
append_byte
identifier_name
es-DO.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js function plural(n: number): number { if (n === 1) return 1; return 5; } export default [ 'es-DO', [ ['a. m.', 'p. m.'], , ], , [ ['D', 'L', 'M', 'M', 'J', 'V', 'S'], ['dom.', 'lun.', 'mar.', 'mié.', 'jue.', 'vie.', 'sáb.'], ['domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado'], ['DO', 'LU', 'MA', 'MI', 'JU', 'VI', 'SA'] ], , [ ['E', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'], [ 'ene.', 'feb.', 'mar.', 'abr.', 'may.', 'jun.', 'jul.', 'ago.', 'sep.', 'oct.', 'nov.', 'dic.' ], [ 'enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre' ] ], , [['a. C.', 'd. C.'], , ['antes de Cristo', 'después de Cristo']], 0, [6, 0], ['d/M/yy', 'd MMM y', 'd \'de\' MMMM \'de\' y', 'EEEE, d \'de\' MMMM \'de\' y'], ['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'], [ '{1} {0}', , '{1}, {0}', ], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##0.###', '#,##0 %', '¤#,##0.00', '#E0'], 'RD$', 'peso dominicano', { 'AUD': [, '$'], 'BRL': [, 'R$'], 'CAD': [, '$'], 'CNY': [, '¥'], 'DOP': ['RD$', '$'], 'ESP': ['₧'], 'EUR': [, '€'], 'FKP': [, 'FK£'], 'GBP': [, '£'], 'HKD': [, '$'], 'ILS': [, '₪'], 'INR': [, '₹'], 'JPY': [, '¥'],
'KRW': [, '₩'], 'MXN': [, '$'], 'NZD': [, '$'], 'RON': [, 'L'], 'SSP': [, 'SD£'], 'SYP': [, 'S£'], 'TWD': [, 'NT$'], 'USD': ['US$', '$'], 'VEF': [, 'BsF'], 'VND': [, '₫'], 'XAF': [], 'XCD': [, '$'], 'XOF': [] }, plural ];
random_line_split
es-DO.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js function
(n: number): number { if (n === 1) return 1; return 5; } export default [ 'es-DO', [ ['a. m.', 'p. m.'], , ], , [ ['D', 'L', 'M', 'M', 'J', 'V', 'S'], ['dom.', 'lun.', 'mar.', 'mié.', 'jue.', 'vie.', 'sáb.'], ['domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado'], ['DO', 'LU', 'MA', 'MI', 'JU', 'VI', 'SA'] ], , [ ['E', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'], [ 'ene.', 'feb.', 'mar.', 'abr.', 'may.', 'jun.', 'jul.', 'ago.', 'sep.', 'oct.', 'nov.', 'dic.' ], [ 'enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre' ] ], , [['a. C.', 'd. C.'], , ['antes de Cristo', 'después de Cristo']], 0, [6, 0], ['d/M/yy', 'd MMM y', 'd \'de\' MMMM \'de\' y', 'EEEE, d \'de\' MMMM \'de\' y'], ['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'], [ '{1} {0}', , '{1}, {0}', ], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##0.###', '#,##0 %', '¤#,##0.00', '#E0'], 'RD$', 'peso dominicano', { 'AUD': [, '$'], 'BRL': [, 'R$'], 'CAD': [, '$'], 'CNY': [, '¥'], 'DOP': ['RD$', '$'], 'ESP': ['₧'], 'EUR': [, '€'], 'FKP': [, 'FK£'], 'GBP': [, '£'], 'HKD': [, '$'], 'ILS': [, '₪'], 'INR': [, '₹'], 'JPY': [, '¥'], 'KRW': [, '₩'], 'MXN': [, '$'], 'NZD': [, '$'], 'RON': [, 'L'], 'SSP': [, 'SD£'], 'SYP': [, 'S£'], 'TWD': [, 'NT$'], 'USD': ['US$', '$'], 'VEF': [, 'BsF'], 'VND': [, '₫'], 'XAF': [], 'XCD': [, '$'], 'XOF': [] }, plural ];
plural
identifier_name
es-DO.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js function plural(n: number): number
export default [ 'es-DO', [ ['a. m.', 'p. m.'], , ], , [ ['D', 'L', 'M', 'M', 'J', 'V', 'S'], ['dom.', 'lun.', 'mar.', 'mié.', 'jue.', 'vie.', 'sáb.'], ['domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado'], ['DO', 'LU', 'MA', 'MI', 'JU', 'VI', 'SA'] ], , [ ['E', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'], [ 'ene.', 'feb.', 'mar.', 'abr.', 'may.', 'jun.', 'jul.', 'ago.', 'sep.', 'oct.', 'nov.', 'dic.' ], [ 'enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre' ] ], , [['a. C.', 'd. C.'], , ['antes de Cristo', 'después de Cristo']], 0, [6, 0], ['d/M/yy', 'd MMM y', 'd \'de\' MMMM \'de\' y', 'EEEE, d \'de\' MMMM \'de\' y'], ['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'], [ '{1} {0}', , '{1}, {0}', ], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##0.###', '#,##0 %', '¤#,##0.00', '#E0'], 'RD$', 'peso dominicano', { 'AUD': [, '$'], 'BRL': [, 'R$'], 'CAD': [, '$'], 'CNY': [, '¥'], 'DOP': ['RD$', '$'], 'ESP': ['₧'], 'EUR': [, '€'], 'FKP': [, 'FK£'], 'GBP': [, '£'], 'HKD': [, '$'], 'ILS': [, '₪'], 'INR': [, '₹'], 'JPY': [, '¥'], 'KRW': [, '₩'], 'MXN': [, '$'], 'NZD': [, '$'], 'RON': [, 'L'], 'SSP': [, 'SD£'], 'SYP': [, 'S£'], 'TWD': [, 'NT$'], 'USD': ['US$', '$'], 'VEF': [, 'BsF'], 'VND': [, '₫'], 'XAF': [], 'XCD': [, '$'], 'XOF': [] }, plural ];
{ if (n === 1) return 1; return 5; }
identifier_body
closeevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::EventBinding::EventMethods; use dom::bindings::codegen::Bindings::CloseEventBinding; use dom::bindings::codegen::Bindings::CloseEventBinding::CloseEventMethods; use dom::bindings::codegen::InheritTypes::EventCast; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::utils::reflect_dom_object; use dom::event::{Event, EventTypeId, EventBubbles, EventCancelable}; use script_task::ScriptChan; use util::str::DOMString; #[dom_struct] #[derive(HeapSizeOf)] pub struct CloseEvent { event: Event, wasClean: bool, code: u16, reason: DOMString, } impl CloseEvent { pub fn new_inherited(type_id: EventTypeId, wasClean: bool, code: u16, reason: DOMString) -> CloseEvent { CloseEvent { event: Event::new_inherited(type_id), wasClean: wasClean, code: code, reason: reason, } } pub fn new(global: GlobalRef, type_: DOMString, bubbles: EventBubbles, cancelable: EventCancelable, wasClean: bool, code: u16, reason: DOMString) -> Root<CloseEvent> { let event = box CloseEvent::new_inherited(EventTypeId::CloseEvent, wasClean, code, reason); let ev = reflect_dom_object(event, global, CloseEventBinding::Wrap); { let event = EventCast::from_ref(ev.r()); event.InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable); } ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &CloseEventBinding::CloseEventInit) -> Fallible<Root<CloseEvent>> { let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.cancelable
else { EventCancelable::NotCancelable }; Ok(CloseEvent::new(global, type_, bubbles, cancelable, init.wasClean, init.code, init.reason.clone())) } } impl<'a> CloseEventMethods for &'a CloseEvent { // https://html.spec.whatwg.org/multipage/#dom-closeevent-wasclean fn WasClean(self) -> bool { self.wasClean } // https://html.spec.whatwg.org/multipage/#dom-closeevent-code fn Code(self) -> u16 { self.code } // https://html.spec.whatwg.org/multipage/#dom-closeevent-reason fn Reason(self) -> DOMString { self.reason.clone() } }
{ EventCancelable::Cancelable }
conditional_block
closeevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::EventBinding::EventMethods; use dom::bindings::codegen::Bindings::CloseEventBinding; use dom::bindings::codegen::Bindings::CloseEventBinding::CloseEventMethods; use dom::bindings::codegen::InheritTypes::EventCast; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::utils::reflect_dom_object; use dom::event::{Event, EventTypeId, EventBubbles, EventCancelable}; use script_task::ScriptChan; use util::str::DOMString; #[dom_struct] #[derive(HeapSizeOf)] pub struct CloseEvent { event: Event, wasClean: bool, code: u16, reason: DOMString, } impl CloseEvent { pub fn new_inherited(type_id: EventTypeId, wasClean: bool, code: u16, reason: DOMString) -> CloseEvent { CloseEvent { event: Event::new_inherited(type_id), wasClean: wasClean, code: code, reason: reason, } } pub fn new(global: GlobalRef, type_: DOMString, bubbles: EventBubbles, cancelable: EventCancelable, wasClean: bool, code: u16, reason: DOMString) -> Root<CloseEvent> { let event = box CloseEvent::new_inherited(EventTypeId::CloseEvent, wasClean, code, reason); let ev = reflect_dom_object(event, global, CloseEventBinding::Wrap); { let event = EventCast::from_ref(ev.r()); event.InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable); } ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &CloseEventBinding::CloseEventInit) -> Fallible<Root<CloseEvent>> { let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; Ok(CloseEvent::new(global, type_, bubbles, cancelable, init.wasClean, init.code, init.reason.clone())) } } impl<'a> CloseEventMethods for &'a CloseEvent { // https://html.spec.whatwg.org/multipage/#dom-closeevent-wasclean fn WasClean(self) -> bool { self.wasClean } // https://html.spec.whatwg.org/multipage/#dom-closeevent-code fn Code(self) -> u16
// https://html.spec.whatwg.org/multipage/#dom-closeevent-reason fn Reason(self) -> DOMString { self.reason.clone() } }
{ self.code }
identifier_body
closeevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::EventBinding::EventMethods; use dom::bindings::codegen::Bindings::CloseEventBinding; use dom::bindings::codegen::Bindings::CloseEventBinding::CloseEventMethods; use dom::bindings::codegen::InheritTypes::EventCast; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::utils::reflect_dom_object; use dom::event::{Event, EventTypeId, EventBubbles, EventCancelable}; use script_task::ScriptChan; use util::str::DOMString; #[dom_struct]
reason: DOMString, } impl CloseEvent { pub fn new_inherited(type_id: EventTypeId, wasClean: bool, code: u16, reason: DOMString) -> CloseEvent { CloseEvent { event: Event::new_inherited(type_id), wasClean: wasClean, code: code, reason: reason, } } pub fn new(global: GlobalRef, type_: DOMString, bubbles: EventBubbles, cancelable: EventCancelable, wasClean: bool, code: u16, reason: DOMString) -> Root<CloseEvent> { let event = box CloseEvent::new_inherited(EventTypeId::CloseEvent, wasClean, code, reason); let ev = reflect_dom_object(event, global, CloseEventBinding::Wrap); { let event = EventCast::from_ref(ev.r()); event.InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable); } ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &CloseEventBinding::CloseEventInit) -> Fallible<Root<CloseEvent>> { let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; Ok(CloseEvent::new(global, type_, bubbles, cancelable, init.wasClean, init.code, init.reason.clone())) } } impl<'a> CloseEventMethods for &'a CloseEvent { // https://html.spec.whatwg.org/multipage/#dom-closeevent-wasclean fn WasClean(self) -> bool { self.wasClean } // https://html.spec.whatwg.org/multipage/#dom-closeevent-code fn Code(self) -> u16 { self.code } // https://html.spec.whatwg.org/multipage/#dom-closeevent-reason fn Reason(self) -> DOMString { self.reason.clone() } }
#[derive(HeapSizeOf)] pub struct CloseEvent { event: Event, wasClean: bool, code: u16,
random_line_split
closeevent.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::EventBinding::EventMethods; use dom::bindings::codegen::Bindings::CloseEventBinding; use dom::bindings::codegen::Bindings::CloseEventBinding::CloseEventMethods; use dom::bindings::codegen::InheritTypes::EventCast; use dom::bindings::error::Fallible; use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::utils::reflect_dom_object; use dom::event::{Event, EventTypeId, EventBubbles, EventCancelable}; use script_task::ScriptChan; use util::str::DOMString; #[dom_struct] #[derive(HeapSizeOf)] pub struct CloseEvent { event: Event, wasClean: bool, code: u16, reason: DOMString, } impl CloseEvent { pub fn new_inherited(type_id: EventTypeId, wasClean: bool, code: u16, reason: DOMString) -> CloseEvent { CloseEvent { event: Event::new_inherited(type_id), wasClean: wasClean, code: code, reason: reason, } } pub fn
(global: GlobalRef, type_: DOMString, bubbles: EventBubbles, cancelable: EventCancelable, wasClean: bool, code: u16, reason: DOMString) -> Root<CloseEvent> { let event = box CloseEvent::new_inherited(EventTypeId::CloseEvent, wasClean, code, reason); let ev = reflect_dom_object(event, global, CloseEventBinding::Wrap); { let event = EventCast::from_ref(ev.r()); event.InitEvent(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable); } ev } pub fn Constructor(global: GlobalRef, type_: DOMString, init: &CloseEventBinding::CloseEventInit) -> Fallible<Root<CloseEvent>> { let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble }; let cancelable = if init.parent.cancelable { EventCancelable::Cancelable } else { EventCancelable::NotCancelable }; Ok(CloseEvent::new(global, type_, bubbles, cancelable, init.wasClean, init.code, init.reason.clone())) } } impl<'a> CloseEventMethods for &'a CloseEvent { // https://html.spec.whatwg.org/multipage/#dom-closeevent-wasclean fn WasClean(self) -> bool { self.wasClean } // https://html.spec.whatwg.org/multipage/#dom-closeevent-code fn Code(self) -> u16 { self.code } // https://html.spec.whatwg.org/multipage/#dom-closeevent-reason fn Reason(self) -> DOMString { self.reason.clone() } }
new
identifier_name
es.py
# coding: utf-8 { '!langcode!': 'es', '!langname!': 'Español', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"actualice" es una expresión opcional como "campo1=\'nuevo_valor\'". No se puede actualizar o eliminar resultados de un JOIN', '%d days ago': 'hace %d días', '%d hours ago': 'hace %d horas', '%d minutes ago': 'hace %d minutos', '%d months ago': '%d months ago', '%d seconds ago': 'hace %d segundos', '%d weeks ago': 'hace %d semanas', '%s %%{row} deleted': '%s %%{fila} %%{eliminada}', '%s %%{row} updated': '%s %%{fila} %%{actualizada}', '%s selected': '%s %%{seleccionado}', '%Y-%m-%d': '%d/%m/%Y', '%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S', '(something like "it-it")': '(algo como "eso-eso")', '1 day ago': 'ayer', '1 hour ago': 'hace una hora', '1 minute ago': 'hace un minuto', '1 second ago': 'hace 1 segundo', '1 week ago': 'hace una semana', '@markmin\x01**not available** (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '**not available** (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)', '@markmin\x01``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)', '@markmin\x01An error occured, please [[reload %s]] the page': 'Ha ocurrido un error, por favor [[recargar %s]] la página', '@markmin\x01Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.', '@markmin\x01Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})': 'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})', '@markmin\x01Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses})': 'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses})', '@markmin\x01Number of entries: **%s**': 'Number of entries: **%s**', '@markmin\x01RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.', 'A new version of web2py is available': 'Hay una nueva versión de web2py disponible', 'A new version of web2py is available: %s': 'Hay una nueva versión de web2py disponible: %s', 'About': 'Acerca de', 'about': 'acerca de', 'About application': 'Acerca de la aplicación', 'Access Control': 'Control de Acceso', 'additional code for your application': 'código adicional para su aplicación', 'admin disabled because no admin password': 'admin deshabilitado por falta de contraseña', 'admin disabled because not supported on google app engine': 'admin deshabilitado, no es soportado en GAE', 'admin disabled because unable to access password file': 'admin deshabilitado, imposible acceder al archivo con la contraseña', 'Admin is disabled because insecure channel': 'Admin deshabilitado, el canal no es seguro', 'Admin is disabled because unsecure channel': 'Admin deshabilitado, el canal no es seguro', 'Administrative interface': 'Interfaz administrativa', 'Administrative Interface': 'Interfaz Administrativa', 'Administrator Password:': 'Contraseña del Administrador:', 'Ajax Recipes': 'Recetas AJAX', 'An error occured, please %s the page': 'Ha ocurrido un error, por favor %s la página', 'and rename it (required):': 'y renómbrela (requerido):', 'and rename it:': ' y renómbrelo:', 'Aplicar cambios': 'Aplicar cambios', 'appadmin': 'appadmin', 'appadmin is disabled because insecure channel': 'admin deshabilitado, el canal no es seguro', 'application "%s" uninstalled': 'aplicación "%s" desinstalada', 'application compiled': 'aplicación compilada', 'application is compiled and cannot be designed': 'la aplicación está compilada y no puede ser modificada', 'Apply changes': 'Aplicar cambios', 'Are you sure you want to delete file "%s"?': '¿Está seguro que desea eliminar el archivo "%s"?', 'Are you sure you want to delete this object?': '¿Está seguro que desea borrar este objeto?', 'Are you sure you want to uninstall application "%s"': '¿Está seguro que desea desinstalar la aplicación "%s"', 'Are you sure you want to uninstall application "%s"?': '¿Está seguro que desea desinstalar la aplicación "%s"?', 'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENCION: Inicio de sesión requiere una conexión segura (HTTPS) o localhost.', 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENCION: NO EJECUTE VARIAS PRUEBAS SIMULTANEAMENTE, NO SON THREAD SAFE.', 'ATTENTION: you cannot edit the running application!': 'ATENCION: no puede modificar la aplicación que está ejecutandose!', 'Authentication': 'Autenticación', 'Available Databases and Tables': 'Bases de datos y tablas disponibles', 'Buy this book': 'Compra este libro', 'Cache': 'Caché', 'cache': 'caché', 'Cache Keys': 'Llaves de la Caché', 'cache, errors and sessions cleaned': 'caché, errores y sesiones eliminados', 'Cambie la contraseña': 'Cambie la contraseña', 'Cannot be empty': 'No puede estar vacío', 'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'No se puede compilar: hay errores en su aplicación. Depure, corrija errores y vuelva a intentarlo.', 'cannot create file': 'no es posible crear archivo', 'cannot upload file "%(filename)s"': 'no es posible subir archivo "%(filename)s"', 'Change Password': 'Cambie la Contraseña', 'Change password': 'Cambie la contraseña', 'change password': 'cambie la contraseña', 'check all': 'marcar todos', 'Check to delete': 'Marque para eliminar', 'clean': 'limpiar', 'Clear CACHE?': '¿Limpiar CACHÉ?', 'Clear DISK': 'Limpiar DISCO', 'Clear RAM': 'Limpiar RAM', 'Click on the link %(link)s to reset your password': 'Pulse en el enlace %(link)s para reiniciar su contraseña', 'click to check for upgrades': 'haga clic para buscar actualizaciones', 'Client IP': 'IP del Cliente', 'Community': 'Comunidad', 'compile': 'compilar', 'compiled application removed': 'aplicación compilada eliminada', 'Components and Plugins': 'Componentes y Plugins', 'Controller': 'Controlador', 'Controllers': 'Controladores', 'controllers': 'controladores', 'Copyright': 'Copyright', 'Correo electrónico inválido': 'Correo electrónico inválido', 'create file with filename:': 'cree archivo con nombre:', 'Create new application': 'Cree una nueva aplicación', 'create new application:': 'nombre de la nueva aplicación:', 'Created By': 'Creado Por', 'Created On': 'Creado En', 'crontab': 'crontab', 'Current request': 'Solicitud en curso', 'Current response': 'Respuesta en curso', 'Current session': 'Sesión en curso', 'currently saved or': 'actualmente guardado o', 'customize me!': '¡Adáptame!', 'data uploaded': 'datos subidos', 'Database': 'Base de datos', 'Database %s select': 'selección en base de datos %s', 'database administration': 'administración base de datos', 'Database Administration (appadmin)': 'Database Administration (appadmin)', 'Date and Time': 'Fecha y Hora', 'db': 'bdd', 'DB Model': 'Modelo BDD', 'defines tables': 'define tablas', 'Delete': 'Eliminar', 'delete': 'eliminar', 'delete all checked': 'eliminar marcados', 'Delete:': 'Eliminar:', 'Demo': 'Demostración', 'Deploy on Google App Engine': 'Despliegue en Google App Engine', 'Deployment Recipes': 'Recetas de despliegue', 'Description': 'Descripción', 'design': 'diseño', 'DESIGN': 'DISEÑO', 'Design for': 'Diseño por', 'DISK': 'DISCO', 'Disk Cache Keys': 'Llaves de Caché en Disco', 'Disk Cleared': 'Disco limpiado', 'Documentation': 'Documentación', "Don't know what to do?": '¿No sabe que hacer?', 'done!': '¡hecho!', 'Download': 'Descargas', 'E-mail': 'Correo electrónico', 'edit': 'editar', 'EDIT': 'EDITAR', 'Edit': 'Editar', 'Edit application': 'Editar aplicación', 'edit controller': 'editar controlador', 'Edit current record': 'Edite el registro actual', 'Edit Profile': 'Editar Perfil', 'edit profile': 'editar perfil', 'Edit This App': 'Edite esta App', 'Editing file': 'Editando archivo', 'Editing file "%s"': 'Editando archivo "%s"', 'Email and SMS': 'Correo electrónico y SMS', 'Email sent': 'Correo electrónico enviado', 'Email verification': 'Verificación de correo', 'Email verified': 'Corre verificado', 'enter a number between %(min)g and %(max)g': 'introduzca un número entre %(min)g y %(max)g', 'enter a value': 'Introduce un valor', 'enter an integer between %(min)g and %(max)g': 'introduzca un entero entre %(min)g y %(max)g', 'enter from %(min)g to %(max)g characters': 'escribe de %(min)g a %(max)g caracteres', 'Error logs for "%(app)s"': 'Bitácora de errores en "%(app)s"', 'errors': 'errores', 'Errors': 'Errores', 'Errors in form, please check it out.': 'Hay errores en el formulario, por favor comprúebelo.', 'Este correo electrónico ya tiene una cuenta': 'Este correo electrónico ya tiene una cuenta', 'export as csv file': 'exportar como archivo CSV', 'exposes': 'expone', 'extends': 'extiende', 'failed to reload module': 'la recarga del módulo ha fallado', 'FAQ': 'FAQ', 'file': 'archivo', 'file "%(filename)s" created': 'archivo "%(filename)s" creado', 'file "%(filename)s" deleted': 'archivo "%(filename)s" eliminado', 'file "%(filename)s" uploaded': 'archivo "%(filename)s" subido', 'file "%(filename)s" was not deleted': 'archivo "%(filename)s" no fué eliminado', 'file "%s" of %s restored': 'archivo "%s" de %s restaurado', 'file ## download': 'file ', 'file changed on disk': 'archivo modificado en el disco', 'file does not exist': 'archivo no existe', 'file saved on %(time)s': 'archivo guardado %(time)s', 'file saved on %s': 'archivo guardado %s', 'First name': 'Nombre', 'Forgot username?': '¿Olvidó el nombre de usuario?', 'Forms and Validators': 'Formularios y validadores', 'Free Applications': 'Aplicaciones Libres', 'Functions with no doctests will result in [passed] tests.': 'Funciones sin doctests equivalen a pruebas [aceptadas].', 'Graph Model': 'Graph Model', 'Group %(group_id)s created': 'Grupo %(group_id)s creado', 'Group ID': 'ID de Grupo', 'Group uniquely assigned to user %(id)s': 'Grupo asignado únicamente al usuario %(id)s', 'Groups': 'Grupos', 'Hello World': 'Hola Mundo', 'help': 'ayuda', 'Home': 'Inicio', 'How did you get here?': '¿Cómo llegaste aquí?', 'htmledit': 'htmledit', 'Impersonate': 'Suplantar', 'import': 'importar', 'Import/Export': 'Importar/Exportar', 'includes': 'incluye', 'Index': 'Índice', 'Inicio de sesión': 'Inicio de sesión', 'insert new': 'inserte nuevo', 'insert new %s': 'inserte nuevo %s', 'Installed applications': 'Aplicaciones instaladas', 'Insufficient privileges': 'Privilegios insuficientes', 'internal error': 'error interno', 'Internal State': 'Estado Interno', 'Introduction': 'Introducción', 'Invalid action': 'Acción inválida', 'Invalid email': 'Correo electrónico inválido', 'invalid image': 'imagen inválida', 'Invalid login': 'Inicio de sesión inválido', 'invalid password': 'contraseña inválida', 'Invalid Query': 'Consulta inválida', 'invalid request': 'solicitud inválida', 'Invalid reset password': 'Reinicio de contraseña inválido', 'invalid ticket': 'tiquete inválido', 'Is Active': 'Está Activo', 'Key': 'Llave', 'language file "%(filename)s" created/updated': 'archivo de lenguaje "%(filename)s" creado/actualizado', 'Language files (static strings) updated': 'Archivos de lenguaje (cadenas estáticas) actualizados', 'languages': 'lenguajes', 'Languages': 'Lenguajes', 'languages updated': 'lenguajes actualizados', 'Last name': 'Apellido', 'Last saved on:': 'Guardado en:', 'Layout': 'Diseño de página', 'Layout Plugins': 'Plugins de diseño', 'Layouts': 'Diseños de páginas', 'License for': 'Licencia para', 'Live Chat': 'Chat en vivo', 'loading...': 'cargando...', 'Logged in': 'Sesión iniciada', 'Logged out': 'Sesión finalizada', 'Login': 'Inicio de sesión', 'login': 'inicio de sesión', 'Login disabled by administrator': 'Inicio de sesión deshabilitado por el administrador', 'Login to the Administrative Interface': 'Inicio de sesión para la Interfaz Administrativa', 'logout': 'fin de sesión', 'Logout': 'Fin de sesión', 'Los campos de contraseña no coinciden': 'Los campos de contraseña no coinciden', 'Lost Password': 'Contraseña perdida', 'Lost password?': '¿Olvidó la contraseña?', 'lost password?': '¿olvidó la contraseña?', 'Main Menu': 'Menú principal', 'Manage %(action)s': 'Manage %(action)s', 'Manage Access Control': 'Manage Access Control', 'Manage Cache': 'Gestionar la Caché', 'Memberships': 'Memberships', 'Menu Model': 'Modelo "menu"', 'merge': 'combinar', 'Models': 'Modelos', 'models': 'modelos', 'Modified By': 'Modificado Por', 'Modified On': 'Modificado En', 'Modules': 'Módulos', 'modules': 'módulos', 'must be YYYY-MM-DD HH:MM:SS!': '¡debe ser DD/MM/YYYY HH:MM:SS!', 'must be YYYY-MM-DD!': '¡debe ser DD/MM/YYYY!', 'My Sites': 'Mis Sitios', 'Name': 'Nombre', 'Necesitas elegir una facultad': 'Necesitas elegir una facultad', 'new application "%s" created': 'nueva aplicación "%s" creada', 'New password': 'Contraseña nueva', 'New Record': 'Registro nuevo', 'new record inserted': 'nuevo registro insertado', 'next %s rows': 'next %s rows', 'next 100 rows': '100 filas siguientes', 'NO': 'NO', 'No databases in this application': 'No hay bases de datos en esta aplicación', 'No puede estar vacío': 'No puede estar vacío', 'Not authorized': 'No autorizado', 'now': 'ahora', 'Object or table name': 'Nombre del objeto o tabla', 'Old password': 'Contraseña vieja', 'Online examples': 'Ejemplos en línea', 'or import from csv file': 'o importar desde archivo CSV', 'or provide application url:': 'o provea URL de la aplicación:', 'Origin': 'Origen', 'Original/Translation': 'Original/Traducción', 'Other Plugins': 'Otros Plugins', 'Other Recipes': 'Otras Recetas', 'Overview': 'Resumen', 'pack all': 'empaquetar todo', 'pack compiled': 'empaquete compiladas', 'Password': 'Contraseña', 'Password changed': 'Contraseña cambiada', "Password fields don't match": 'Los campos de contraseña no coinciden', 'Password reset': 'Reinicio de contraseña', 'Peeking at file': 'Visualizando archivo', 'Permission': 'Permission', 'Permissions': 'Permissions', 'Phone': 'Teléfono', 'please input your password again': 'por favor introduzca su contraseña otra vez', 'Plugins': 'Plugins', 'Powered by': 'Este sitio usa', 'Preface': 'Prefacio', 'previous %s rows': 'previous %s rows', 'previous 100 rows': '100 filas anteriores', 'Profile': 'Perfil', 'Profile updated': 'Perfil actualizado', 'Prueba con un nombre más largo': 'Prueba con un nombre más largo', 'pygraphviz library not found': 'pygraphviz library not found', 'Python': 'Python', 'Query:': 'Consulta:', 'Quick Examples': 'Ejemplos Rápidos', 'RAM': 'RAM', 'RAM Cache Keys': 'Llaves de la Caché en RAM', 'Ram Cleared': 'Ram Limpiada', 'Recipes': 'Recetas', 'Record': 'Registro', 'record does not exist': 'el registro no existe', 'Record ID': 'ID de Registro', 'Record id': 'Id de registro', 'register': 'regístrese', 'Register': 'Regístrese', 'Registration identifier': 'Identificador de Registro', 'Registration key': 'Llave de registro', 'Registration needs verification': 'Registration needs verification', 'Registration successful': 'Registro con éxito', 'Regístrese': 'Regístrese', 'reload': 'recargar', 'Remember me (for 30 days)': 'Recuérdame (durante 30 días)', 'remove compiled': 'eliminar compiladas', 'Request reset password': 'Solicitar reinicio de contraseña', 'Reset Password key': 'Restaurar Llave de la Contraseña', 'Resolve Conflict file': 'archivo Resolución de Conflicto', 'restore': 'restaurar', 'Retrieve username': 'Recuperar nombre de usuario', 'revert': 'revertir', 'Role': 'Rol', 'Roles': 'Roles', 'Rows in Table': 'Filas en la tabla', 'Rows selected': 'Filas seleccionadas', 'save': 'guardar', 'Save model as...': 'Save model as...', 'Saved file hash:': 'Hash del archivo guardado:', 'Semantic': 'Semántica', 'Services': 'Servicios', 'session expired': 'sesión expirada', 'shell': 'terminal', 'site': 'sitio', 'Size of cache:': 'Tamaño de la Caché:', 'Solicitar reinicio de contraseña': 'Solicitar reinicio de contraseña', 'some files could not be removed': 'algunos archivos no pudieron ser removidos', 'state': 'estado', 'static': 'estáticos', 'Static files': 'Archivos estáticos', 'Statistics': 'Estadísticas', 'Stylesheet': 'Hoja de estilo', 'Submit': 'Enviar', 'submit': 'enviar', 'Success!': '¡Hecho!', 'Support': 'Soporte', 'Sure you want to delete this object?': '¿Está seguro que desea eliminar este objeto?', 'Table': 'tabla', 'Table name': 'Nombre de la tabla', 'test': 'probar', 'Testing application': 'Probando aplicación', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "consulta" es una condición como "db.tabla1.campo1==\'valor\'". Algo como "db.tabla1.campo1==db.tabla2.campo2" resulta en un JOIN SQL.', 'the application logic, each URL path is mapped in one exposed function in the controller': 'la lógica de la aplicación, cada ruta URL se mapea en una función expuesta en el controlador', 'The Core': 'El Núcleo', 'the data representation, define database tables and sets': 'la representación de datos, define tablas y conjuntos de base de datos', 'The output of the file is a dictionary that was rendered by the view %s': 'La salida de dicha función es un diccionario que es desplegado por la vista %s', 'the presentations layer, views are also known as templates': 'la capa de presentación, las vistas también son llamadas plantillas', 'The Views': 'Las Vistas', 'There are no controllers': 'No hay controladores', 'There are no models': 'No hay modelos', 'There are no modules': 'No hay módulos', 'There are no static files': 'No hay archivos estáticos', 'There are no translators, only default language is supported': 'No hay traductores, sólo el lenguaje por defecto es soportado', 'There are no views': 'No hay vistas', 'these files are served without processing, your images go here': 'estos archivos son servidos sin procesar, sus imágenes van aquí', 'This App': 'Esta Aplicación', 'This email already has an account': 'Este correo electrónico ya tiene una cuenta', 'This is a copy of the scaffolding application': 'Esta es una copia de la aplicación de andamiaje', 'This is the %(filename)s template': 'Esta es la plantilla %(filename)s', 'Ticket': 'Tiquete', 'Time in Cache (h:m:s)': 'Tiempo en Caché (h:m:s)', 'Timestamp': 'Marca de tiempo', 'to previous version.': 'a la versión previa.', 'Traceback': 'Traceback', 'translation strings for the application': 'cadenas de carácteres de traducción para la aplicación', 'try': 'intente', 'try something like': 'intente algo como', 'Twitter': 'Twitter', 'Unable to check for upgrades': 'No es posible verificar la existencia de actualizaciones', 'unable to create application "%s"': 'no es posible crear la aplicación "%s"', 'unable to delete file "%(filename)s"': 'no es posible eliminar el archivo "%(filename)s"', 'Unable to download': 'No es posible la descarga', 'Unable to download app': 'No es posible descarga la aplicación', 'unable to parse csv file': 'no es posible analizar el archivo CSV', 'unable to uninstall "%s"': 'no es posible instalar "%s"', 'uncheck all': 'desmarcar todos', 'uninstall': 'desinstalar', 'update': 'actualizar', 'update all languages': 'actualizar todos los lenguajes', 'Update:': 'Actualice:', 'upload application:': 'subir aplicación:', 'Upload existing application': 'Suba esta aplicación', 'upload file:': 'suba archivo:', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para crear consultas más complejas.', 'User': 'Usuario', 'User %(id)s is impersonating %(other_id)s': 'El usuario %(id)s está suplantando %(other_id)s', 'User %(id)s Logged-in': 'El usuario %(id)s inició la sesión', 'User %(id)s Logged-out': 'El usuario %(id)s finalizó la sesión', 'User %(id)s Password changed': 'Contraseña del usuario %(id)s cambiada', 'User %(id)s Password reset': 'Contraseña del usuario %(id)s reiniciada', 'User %(id)s Profile updated': 'Actualizado el perfil del usuario %(id)s', 'User %(id)s Registered': 'Usuario %(id)s Registrado', 'User %(id)s Username retrieved': 'Se ha recuperado el nombre de usuario del usuario %(id)s', 'User Id': 'Id de Usuario', 'User ID': 'ID de Usuario', 'Username': 'Nombre de usuario', 'Username retrieve': 'Recuperar nombre de usuario',
'value already in database or empty': 'el valor ya existe en la base de datos o está vacío', 'value not in database': 'el valor no está en la base de datos', 'Verify Password': 'Verificar Contraseña', 'versioning': 'versiones', 'Videos': 'Vídeos', 'View': 'Vista', 'view': 'vista', 'views': 'vistas', 'Views': 'Vistas', 'web2py is up to date': 'web2py está actualizado', 'web2py Recent Tweets': 'Tweets Recientes de web2py', 'Welcome': 'Bienvenido', 'Welcome %(username)s! Click on the link %(link)s to verify your email': 'Bienvenido a Evadoc %(username)s! Haz clic en este enlace: %(link)s para verificar tu correo electronico', 'Welcome %s': 'Bienvenido %s', 'Welcome to web2py': 'Bienvenido a web2py', 'Welcome to web2py!': '¡Bienvenido a web2py!', 'Which called the function %s located in the file %s': 'La cual llamó la función %s localizada en el archivo %s', 'Working...': 'Trabajando...', 'YES': 'SÍ', 'You are successfully running web2py': 'Usted está ejecutando web2py exitosamente', 'You can modify this application and adapt it to your needs': 'Usted puede modificar esta aplicación y adaptarla a sus necesidades', 'You visited the url %s': 'Usted visitó la url %s', 'Your username is: %(username)s': 'Su nombre de usuario es: %(username)s', }
'Users': 'Usuarios',
random_line_split
views.py
from django.shortcuts import redirect from django.shortcuts import render from django.views.decorators.cache import never_cache from django.contrib.auth.decorators import permission_required from django.contrib.auth.decorators import login_required from registration.backends import get_backend def register(request, backend='default', template_name='registration/registration_form.html'):
@never_cache def verify(request, backend='default', template_name='registration/registration_verify.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) if profile: # check to see if moderation for this profile is required and whether or # not it is a verified account. if backend.moderation_required(request, profile): moderation_required = True backend.verify(request, profile, **kwargs) else: moderation_required = False # attempt to activate this user backend.activate(request, profile, **kwargs) else: moderation_required = None return render(request, template_name, { 'profile': profile, 'moderation_required': moderation_required, }) @never_cache @login_required() def moderate(request, backend='default', template_name='registration/registration_moderate.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) form_class = backend.get_moderation_form_class(request) if request.method == 'POST': form = form_class(request.POST) if form.is_valid(): backend.moderate(request, form, profile, **kwargs) return redirect(backend.post_moderation_redirect(request, profile)) else: form = form_class() return render(request, template_name, { 'form': form, 'profile': profile, }) @permission_required('registration.change_registrationprofile') @login_required() def moderate_list(request, backend='default', template_name='registration/registration_moderate_list.html'): backend = get_backend(backend) profiles = backend.get_unmoderated_profiles(request) return render(request, template_name, { 'profiles': profiles, })
backend = get_backend(backend) # determine is registration is currently allowed. the ``request`` object # is passed which can be used to selectively disallow registration based on # the user-agent if not backend.registration_allowed(request): return redirect(*backend.registration_closed_redirect(request)) form_class = backend.get_registration_form_class(request) if request.method == 'POST': form = form_class(request.POST, request.FILES) if form.is_valid(): user = backend.register(request, form) return redirect(backend.post_registration_redirect(request, user)) else: form = form_class() return render(request, template_name, {'form': form})
identifier_body
views.py
from django.shortcuts import redirect from django.shortcuts import render from django.views.decorators.cache import never_cache from django.contrib.auth.decorators import permission_required from django.contrib.auth.decorators import login_required from registration.backends import get_backend def register(request, backend='default', template_name='registration/registration_form.html'): backend = get_backend(backend) # determine is registration is currently allowed. the ``request`` object # is passed which can be used to selectively disallow registration based on # the user-agent if not backend.registration_allowed(request): return redirect(*backend.registration_closed_redirect(request)) form_class = backend.get_registration_form_class(request) if request.method == 'POST': form = form_class(request.POST, request.FILES) if form.is_valid(): user = backend.register(request, form) return redirect(backend.post_registration_redirect(request, user)) else: form = form_class() return render(request, template_name, {'form': form}) @never_cache def verify(request, backend='default', template_name='registration/registration_verify.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) if profile: # check to see if moderation for this profile is required and whether or # not it is a verified account. if backend.moderation_required(request, profile): moderation_required = True backend.verify(request, profile, **kwargs) else: moderation_required = False # attempt to activate this user backend.activate(request, profile, **kwargs) else: moderation_required = None return render(request, template_name, { 'profile': profile, 'moderation_required': moderation_required, }) @never_cache @login_required() def moderate(request, backend='default', template_name='registration/registration_moderate.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) form_class = backend.get_moderation_form_class(request) if request.method == 'POST':
else: form = form_class() return render(request, template_name, { 'form': form, 'profile': profile, }) @permission_required('registration.change_registrationprofile') @login_required() def moderate_list(request, backend='default', template_name='registration/registration_moderate_list.html'): backend = get_backend(backend) profiles = backend.get_unmoderated_profiles(request) return render(request, template_name, { 'profiles': profiles, })
form = form_class(request.POST) if form.is_valid(): backend.moderate(request, form, profile, **kwargs) return redirect(backend.post_moderation_redirect(request, profile))
conditional_block
views.py
from django.contrib.auth.decorators import permission_required from django.contrib.auth.decorators import login_required from registration.backends import get_backend def register(request, backend='default', template_name='registration/registration_form.html'): backend = get_backend(backend) # determine is registration is currently allowed. the ``request`` object # is passed which can be used to selectively disallow registration based on # the user-agent if not backend.registration_allowed(request): return redirect(*backend.registration_closed_redirect(request)) form_class = backend.get_registration_form_class(request) if request.method == 'POST': form = form_class(request.POST, request.FILES) if form.is_valid(): user = backend.register(request, form) return redirect(backend.post_registration_redirect(request, user)) else: form = form_class() return render(request, template_name, {'form': form}) @never_cache def verify(request, backend='default', template_name='registration/registration_verify.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) if profile: # check to see if moderation for this profile is required and whether or # not it is a verified account. if backend.moderation_required(request, profile): moderation_required = True backend.verify(request, profile, **kwargs) else: moderation_required = False # attempt to activate this user backend.activate(request, profile, **kwargs) else: moderation_required = None return render(request, template_name, { 'profile': profile, 'moderation_required': moderation_required, }) @never_cache @login_required() def moderate(request, backend='default', template_name='registration/registration_moderate.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) form_class = backend.get_moderation_form_class(request) if request.method == 'POST': form = form_class(request.POST) if form.is_valid(): backend.moderate(request, form, profile, **kwargs) return redirect(backend.post_moderation_redirect(request, profile)) else: form = form_class() return render(request, template_name, { 'form': form, 'profile': profile, }) @permission_required('registration.change_registrationprofile') @login_required() def moderate_list(request, backend='default', template_name='registration/registration_moderate_list.html'): backend = get_backend(backend) profiles = backend.get_unmoderated_profiles(request) return render(request, template_name, { 'profiles': profiles, })
from django.shortcuts import redirect from django.shortcuts import render from django.views.decorators.cache import never_cache
random_line_split
views.py
from django.shortcuts import redirect from django.shortcuts import render from django.views.decorators.cache import never_cache from django.contrib.auth.decorators import permission_required from django.contrib.auth.decorators import login_required from registration.backends import get_backend def
(request, backend='default', template_name='registration/registration_form.html'): backend = get_backend(backend) # determine is registration is currently allowed. the ``request`` object # is passed which can be used to selectively disallow registration based on # the user-agent if not backend.registration_allowed(request): return redirect(*backend.registration_closed_redirect(request)) form_class = backend.get_registration_form_class(request) if request.method == 'POST': form = form_class(request.POST, request.FILES) if form.is_valid(): user = backend.register(request, form) return redirect(backend.post_registration_redirect(request, user)) else: form = form_class() return render(request, template_name, {'form': form}) @never_cache def verify(request, backend='default', template_name='registration/registration_verify.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) if profile: # check to see if moderation for this profile is required and whether or # not it is a verified account. if backend.moderation_required(request, profile): moderation_required = True backend.verify(request, profile, **kwargs) else: moderation_required = False # attempt to activate this user backend.activate(request, profile, **kwargs) else: moderation_required = None return render(request, template_name, { 'profile': profile, 'moderation_required': moderation_required, }) @never_cache @login_required() def moderate(request, backend='default', template_name='registration/registration_moderate.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) form_class = backend.get_moderation_form_class(request) if request.method == 'POST': form = form_class(request.POST) if form.is_valid(): backend.moderate(request, form, profile, **kwargs) return redirect(backend.post_moderation_redirect(request, profile)) else: form = form_class() return render(request, template_name, { 'form': form, 'profile': profile, }) @permission_required('registration.change_registrationprofile') @login_required() def moderate_list(request, backend='default', template_name='registration/registration_moderate_list.html'): backend = get_backend(backend) profiles = backend.get_unmoderated_profiles(request) return render(request, template_name, { 'profiles': profiles, })
register
identifier_name
index.tsx
/** * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import React from 'react'; import Card from 'components/common/Card'; import CommitProgress from 'components/common/CommitProgress'; import ListingCard from 'components/common/ListingCard'; import StrippedCol from 'components/common/StrippedCol'; import ListingCardWithCommitStatus from 'components/customer/my-commits/ListingCardWithCommitStatus'; import MobilePromptSample from 'components/design-samples/MobilePromptSample'; import SendItemModalSample from 'components/design-samples/SendItemModalSample'; import PaidCustomerCollection from 'components/merchant/listing-details/PaidCustomerCollection'; import {formatRFC3339} from 'date-fns'; import {Commit, Listing, Money} from 'interfaces'; import Container from 'muicss/lib/react/container'; const SAMPLE_IMG_URL = 'https://picsum.photos/seed/picsum/200/300'; const SAMPLE_END_DATE = formatRFC3339(new Date('2020-07-10T23:59:59Z')); const SAMPLE_PRICE: Money = { currency: 'USD', dollars: 85, cents: 0, }; const SAMPLE_OLD_PRICE: Money = { currency: 'USD', dollars: 121, cents: 0, }; const SAMPLE_LISTING: Listing = { merchantId: 1, name: 'Some Listing', price: SAMPLE_PRICE, oldPrice: SAMPLE_OLD_PRICE, imgUrl: SAMPLE_IMG_URL, description: '', deadline: SAMPLE_END_DATE, minCommits: 100, id: 1, numCommits: 0, numPaid: 0, numCompleted: 0, listingStatus: 'ongoing', }; const SAMPLE_PAID_COMMITS: Commit[] = [ { commitStatus: 'successful', createdAt: new Date('2020-07-17T10:29:30.639Z'), listingId: 5068871128055808, customerId: 5683780991844352, fulfilmentDetails: { name: 'Buyer Name', contactNumber: '+911234567890', address: 'Rainbow Land, Pusheen St', }, id: 5759630718271488, }, { commitStatus: 'successful', createdAt: new Date('2020-07-17T10:29:30.639Z'), listingId: 5068871128055808, customerId: 5683780991844352, fulfilmentDetails: { name: 'Slightly Longer Name', contactNumber: '+911234567890', address: 'Rainbow Land, Pusheen St', }, id: 5759630718271488, }, { commitStatus: 'successful', createdAt: new Date('2020-07-17T10:29:30.639Z'), listingId: 5068871128055808, customerId: 5683780991844352, fulfilmentDetails: { name: 'Very Very Longgggggggggggggggggg Buyer Name', contactNumber: '+911234567890', address: 'Rainbow Land, Pusheen St', }, id: 5759630718271488, }, ]; const DesignSamplesPage: React.FC = () => ( <Container> <h1>Design Samples</h1> <Container> <h2>Cards</h2> <StrippedCol xs={6}> <Card img={{ url: SAMPLE_IMG_URL, alt: 'Random Image', }} > Card with image on top. Risus quis varius quam quisque id diam vel quam elementum pulvinar etiam. </Card> </StrippedCol> <StrippedCol xs={6}> <ListingCard listingName="Some Listing" price={SAMPLE_PRICE} oldPrice={SAMPLE_OLD_PRICE} endDate={SAMPLE_END_DATE} imgUrl={SAMPLE_IMG_URL}
<ListingCard listingName="Some Listing" price={SAMPLE_PRICE} oldPrice={SAMPLE_OLD_PRICE} endDate={SAMPLE_END_DATE} imgUrl={SAMPLE_IMG_URL} horizontal > <CommitProgress numCommits={70} minCommits={100} /> </ListingCard> </StrippedCol> <ListingCardWithCommitStatus listing={SAMPLE_LISTING} commitStatus="successful" /> <ListingCardWithCommitStatus listing={SAMPLE_LISTING} commitStatus="paid" /> <ListingCardWithCommitStatus listing={SAMPLE_LISTING} commitStatus="completed" /> <ListingCardWithCommitStatus listing={SAMPLE_LISTING} commitStatus="unsuccessful" /> <StrippedCol xs={12}> <Card img={{ url: SAMPLE_IMG_URL, alt: 'Random Image', }} horizontal > Card with image on left. Risus quis varius quam quisque id diam vel quam elementum pulvinar etiam non quam lacus est pellentesque elit. </Card> </StrippedCol> <StrippedCol xs={12}> <Card> Card with no image. Risus quis varius quam quisque id diam vel quam elementum pulvinar etiam non quam lacus est pellentesque elit. </Card> </StrippedCol> </Container> <Container> <h2>Progress Bars</h2> <StrippedCol xs={12}> <CommitProgress numCommits={70} minCommits={100} /> </StrippedCol> <StrippedCol xs={12}> <CommitProgress numCommits={70} minCommits={100} textPos="top" /> </StrippedCol> <StrippedCol xs={12}> <CommitProgress numCommits={70} minCommits={100} textPos="none" thicker /> </StrippedCol> </Container> <Container> <h2>Mobile Prompt</h2> <MobilePromptSample /> </Container> <Container> <h2>Paid Committed Customers</h2> <PaidCustomerCollection paidCommits={SAMPLE_PAID_COMMITS} /> </Container> <Container> <h2>Send Item Modal</h2> <SendItemModalSample /> </Container> </Container> ); export default DesignSamplesPage;
> <CommitProgress numCommits={70} minCommits={100} textPos="none" /> </ListingCard> </StrippedCol> <StrippedCol xs={12}>
random_line_split
css.js
CodeMirror.defineMode("css", function(config) { var indentUnit = config.indentUnit, type; function ret(style, tp) {type = tp; return style;} function
(stream, state) { var ch = stream.next(); if (ch == "@") {stream.eatWhile(/\w/); return ret("meta", stream.current());} else if (ch == "/" && stream.eat("*")) { state.tokenize = tokenCComment; return tokenCComment(stream, state); } else if (ch == "<" && stream.eat("!")) { state.tokenize = tokenSGMLComment; return tokenSGMLComment(stream, state); } else if (ch == "=") ret(null, "compare"); else if ((ch == "~" || ch == "|") && stream.eat("=")) return ret(null, "compare"); else if (ch == "\"" || ch == "'") { state.tokenize = tokenString(ch); return state.tokenize(stream, state); } else if (ch == "#") { stream.eatWhile(/\w/); return ret("atom", "hash"); } else if (ch == "!") { stream.match(/^\s*\w*/); return ret("keyword", "important"); } else if (/\d/.test(ch)) { stream.eatWhile(/[\w.%]/); return ret("number", "unit"); } else if (/[,.+>*\/]/.test(ch)) { return ret(null, "select-op"); } else if (/[;{}:\[\]]/.test(ch)) { return ret(null, ch); } else { stream.eatWhile(/[\w\\\-_]/); return ret("variable", "variable"); } } function tokenCComment(stream, state) { var maybeEnd = false, ch; while ((ch = stream.next()) != null) { if (maybeEnd && ch == "/") { state.tokenize = tokenBase; break; } maybeEnd = (ch == "*"); } return ret("comment", "comment"); } function tokenSGMLComment(stream, state) { var dashes = 0, ch; while ((ch = stream.next()) != null) { if (dashes >= 2 && ch == ">") { state.tokenize = tokenBase; break; } dashes = (ch == "-") ? dashes + 1 : 0; } return ret("comment", "comment"); } function tokenString(quote) { return function(stream, state) { var escaped = false, ch; while ((ch = stream.next()) != null) { if (ch == quote && !escaped) break; escaped = !escaped && ch == "\\"; } if (!escaped) state.tokenize = tokenBase; return ret("string", "string"); }; } return { startState: function(base) { return {tokenize: tokenBase, baseIndent: base || 0, stack: []}; }, token: function(stream, state) { if (stream.eatSpace()) return null; var style = state.tokenize(stream, state); var context = state.stack[state.stack.length-1]; if (type == "hash" && context == "rule") style = "atom"; else if (style == "variable") { if (context == "rule") style = "number"; else if (!context || context == "@media{") style = "tag"; } if (context == "rule" && /^[\{\};]$/.test(type)) state.stack.pop(); if (type == "{") { if (context == "@media") state.stack[state.stack.length-1] = "@media{"; else state.stack.push("{"); } else if (type == "}") state.stack.pop(); else if (type == "@media") state.stack.push("@media"); else if (context == "{" && type != "comment") state.stack.push("rule"); return style; }, indent: function(state, textAfter) { var n = state.stack.length; if (/^\}/.test(textAfter)) n -= state.stack[state.stack.length-1] == "rule" ? 2 : 1; return state.baseIndent + n * indentUnit; }, electricChars: "}" }; }); CodeMirror.defineMIME("text/css", "css");
tokenBase
identifier_name
css.js
CodeMirror.defineMode("css", function(config) { var indentUnit = config.indentUnit, type; function ret(style, tp) {type = tp; return style;} function tokenBase(stream, state) { var ch = stream.next(); if (ch == "@") {stream.eatWhile(/\w/); return ret("meta", stream.current());} else if (ch == "/" && stream.eat("*")) { state.tokenize = tokenCComment; return tokenCComment(stream, state); } else if (ch == "<" && stream.eat("!")) { state.tokenize = tokenSGMLComment; return tokenSGMLComment(stream, state); } else if (ch == "=") ret(null, "compare"); else if ((ch == "~" || ch == "|") && stream.eat("=")) return ret(null, "compare"); else if (ch == "\"" || ch == "'") { state.tokenize = tokenString(ch); return state.tokenize(stream, state); } else if (ch == "#") { stream.eatWhile(/\w/); return ret("atom", "hash"); } else if (ch == "!") { stream.match(/^\s*\w*/); return ret("keyword", "important"); } else if (/\d/.test(ch)) { stream.eatWhile(/[\w.%]/); return ret("number", "unit"); } else if (/[,.+>*\/]/.test(ch)) { return ret(null, "select-op"); } else if (/[;{}:\[\]]/.test(ch)) { return ret(null, ch); } else { stream.eatWhile(/[\w\\\-_]/); return ret("variable", "variable"); } } function tokenCComment(stream, state) { var maybeEnd = false, ch; while ((ch = stream.next()) != null) { if (maybeEnd && ch == "/") { state.tokenize = tokenBase; break; } maybeEnd = (ch == "*"); } return ret("comment", "comment"); } function tokenSGMLComment(stream, state) { var dashes = 0, ch; while ((ch = stream.next()) != null) { if (dashes >= 2 && ch == ">") { state.tokenize = tokenBase; break; } dashes = (ch == "-") ? dashes + 1 : 0; } return ret("comment", "comment"); } function tokenString(quote)
return { startState: function(base) { return {tokenize: tokenBase, baseIndent: base || 0, stack: []}; }, token: function(stream, state) { if (stream.eatSpace()) return null; var style = state.tokenize(stream, state); var context = state.stack[state.stack.length-1]; if (type == "hash" && context == "rule") style = "atom"; else if (style == "variable") { if (context == "rule") style = "number"; else if (!context || context == "@media{") style = "tag"; } if (context == "rule" && /^[\{\};]$/.test(type)) state.stack.pop(); if (type == "{") { if (context == "@media") state.stack[state.stack.length-1] = "@media{"; else state.stack.push("{"); } else if (type == "}") state.stack.pop(); else if (type == "@media") state.stack.push("@media"); else if (context == "{" && type != "comment") state.stack.push("rule"); return style; }, indent: function(state, textAfter) { var n = state.stack.length; if (/^\}/.test(textAfter)) n -= state.stack[state.stack.length-1] == "rule" ? 2 : 1; return state.baseIndent + n * indentUnit; }, electricChars: "}" }; }); CodeMirror.defineMIME("text/css", "css");
{ return function(stream, state) { var escaped = false, ch; while ((ch = stream.next()) != null) { if (ch == quote && !escaped) break; escaped = !escaped && ch == "\\"; } if (!escaped) state.tokenize = tokenBase; return ret("string", "string"); }; }
identifier_body
css.js
CodeMirror.defineMode("css", function(config) { var indentUnit = config.indentUnit, type; function ret(style, tp) {type = tp; return style;} function tokenBase(stream, state) { var ch = stream.next(); if (ch == "@") {stream.eatWhile(/\w/); return ret("meta", stream.current());} else if (ch == "/" && stream.eat("*")) { state.tokenize = tokenCComment; return tokenCComment(stream, state); } else if (ch == "<" && stream.eat("!")) { state.tokenize = tokenSGMLComment; return tokenSGMLComment(stream, state); } else if (ch == "=") ret(null, "compare"); else if ((ch == "~" || ch == "|") && stream.eat("=")) return ret(null, "compare"); else if (ch == "\"" || ch == "'") { state.tokenize = tokenString(ch); return state.tokenize(stream, state); } else if (ch == "#") { stream.eatWhile(/\w/); return ret("atom", "hash"); } else if (ch == "!") { stream.match(/^\s*\w*/); return ret("keyword", "important"); } else if (/\d/.test(ch)) { stream.eatWhile(/[\w.%]/); return ret("number", "unit"); } else if (/[,.+>*\/]/.test(ch)) { return ret(null, "select-op"); } else if (/[;{}:\[\]]/.test(ch)) { return ret(null, ch); } else { stream.eatWhile(/[\w\\\-_]/); return ret("variable", "variable"); } } function tokenCComment(stream, state) { var maybeEnd = false, ch; while ((ch = stream.next()) != null) { if (maybeEnd && ch == "/") { state.tokenize = tokenBase; break; } maybeEnd = (ch == "*"); } return ret("comment", "comment"); } function tokenSGMLComment(stream, state) { var dashes = 0, ch; while ((ch = stream.next()) != null) { if (dashes >= 2 && ch == ">") { state.tokenize = tokenBase; break; } dashes = (ch == "-") ? dashes + 1 : 0; } return ret("comment", "comment"); } function tokenString(quote) { return function(stream, state) { var escaped = false, ch; while ((ch = stream.next()) != null) { if (ch == quote && !escaped) break; escaped = !escaped && ch == "\\"; } if (!escaped) state.tokenize = tokenBase; return ret("string", "string"); }; } return { startState: function(base) { return {tokenize: tokenBase, baseIndent: base || 0, stack: []}; }, token: function(stream, state) { if (stream.eatSpace()) return null; var style = state.tokenize(stream, state); var context = state.stack[state.stack.length-1]; if (type == "hash" && context == "rule") style = "atom"; else if (style == "variable") { if (context == "rule") style = "number"; else if (!context || context == "@media{") style = "tag"; } if (context == "rule" && /^[\{\};]$/.test(type)) state.stack.pop(); if (type == "{") { if (context == "@media") state.stack[state.stack.length-1] = "@media{"; else state.stack.push("{"); } else if (type == "}") state.stack.pop(); else if (type == "@media") state.stack.push("@media"); else if (context == "{" && type != "comment") state.stack.push("rule"); return style; },
n -= state.stack[state.stack.length-1] == "rule" ? 2 : 1; return state.baseIndent + n * indentUnit; }, electricChars: "}" }; }); CodeMirror.defineMIME("text/css", "css");
indent: function(state, textAfter) { var n = state.stack.length; if (/^\}/.test(textAfter))
random_line_split
scheduler_class.py
""" MIT License Copyright (c) 2017 cgalleguillosm, AlessioNetti Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import logging from sys import maxsize from random import seed from abc import abstractmethod, ABC from sortedcontainers.sortedlist import SortedListWithKey from enum import Enum from copy import deepcopy from accasim.base.resource_manager_class import ResourceManager from accasim.base.allocator_class import AllocatorBase class DispatcherError(Exception): pass class JobVerification(Enum): REJECT = -1 # All jobs are rejected NO_CHECK = 0 # No verification CHECK_TOTAL = 1 # Total requested resources are verified CHECK_REQUEST = 2 # Each node x resources are verified class SchedulerBase(ABC): """ This class allows to implement dispatching methods by integrating with an implementation of this class an allocator (:class:`accasim.base.allocator_class.AllocatorBase`). An implementation of this class could also serve as a entire dispatching method if the allocation class is not used as default (:class:`.allocator` = None), but the resource manager must be set on the allocator using :func:`accasim.base.allocator_class.AllocatorBase.set_resource_manager`. """ MAXSIZE = maxsize ALLOW_MAPPING_SAME_NODE = True def __init__(self, _seed, allocator=None, job_check=JobVerification.CHECK_REQUEST, **kwargs): """ Construct a scheduler :param seed: Seed for the random state :param resource_manager: A Resource Manager object for dealing with system resources. :param allocator: Allocator object to be used by the scheduler to allocater after schedule generation. If an allocator isn't defined, the scheduler class must generate the entire dispatching plan. :param job_check: A job may be rejected if it doesnt comply with: - JobVerification.REJECT: Any job is rejected - JobVerification.NO_CHECK: All jobs are accepted - JobVerification.CHECK_TOTAL: If the job requires more resources than the available in the system. - JobVerification.CHECK_REQUEST: if an individual request by node requests more resources than the available one. :param kwargs: - skip_jobs_on_allocation: If the allocator is predefined and this parameter is true, the allocator will try to allocate jobs as much as possible. Otherwise, the allocation will stop after the first fail. """ seed(_seed) self._counter = 0 self.allocator = None self._logger = logging.getLogger('accasim') self._system_capacity = None self._nodes_capacity = None self.resource_manager = None if allocator: assert isinstance(allocator, AllocatorBase), 'Allocator not valid for scheduler' self.allocator = allocator # self.set_resource_manager(resource_manager) assert(isinstance(job_check, JobVerification)), 'job_check invalid type. {}'.format(job_check.__class__) if job_check == JobVerification.REJECT: print('All jobs will be rejected, and for performance purposes the rejection messages will be omitted.') self._job_check = job_check # Check resources self._min_required_availability = kwargs.pop('min_resources', None) # ['core', 'mem']s # Skip jobs during allocation self.skip_jobs_on_allocation = kwargs.pop('skip_jobs_on_allocation', False) @property def name(self): """ Name of the schedulign method """ raise NotImplementedError @abstractmethod def get_id(self): """ Must return the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ raise NotImplementedError @abstractmethod def scheduling_method(self, cur_time, es_dict, es): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the job events :param es: events to be scheduled :return a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ raise Exception('This function must be implemented!!') def set_resource_manager(self, resource_manager): """ Set a resource manager. :param resource_manager: An instantiation of a resource_manager class or None """ if resource_manager: if self.allocator: self.allocator.set_resource_manager(resource_manager) assert isinstance(resource_manager, ResourceManager), 'Resource Manager not valid for scheduler' self.resource_manager = resource_manager else: self.resource_manager = None def schedule(self, cur_time, es_dict, es): """ Method for schedule. It calls the specific scheduling method. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), array of rejected job ids. """ assert(self.resource_manager is not None), 'The resource manager is not defined. It must defined prior to run the simulation.' self._counter += 1 self._logger.debug("{} Dispatching: #{} decision".format(cur_time, self._counter)) self._logger.debug('{} Dispatching: {} queued jobs'.format(cur_time, len(es))) self._logger.debug('{} Dispatching: {}'.format(cur_time, self.resource_manager.current_usage)) rejected = [] # At least a job need 1 core and 1 kb/mb/gb of mem to run if self._min_required_availability and any([self.resource_manager.resources.full[res] for res in self._min_required_availability]): self._logger.debug("There is no availability of one of the min required resource to run a job. The dispatching process will be delayed until there is enough resources.") return [(None, e, []) for e in es], rejected accepted = [] # Verify jobs with the defined Job Policy for e in es: job = es_dict[e] if not job.get_checked() and not self._check_job_request(job): if self._job_check != JobVerification.REJECT: self._logger.warning('{} has been rejected by the dispatcher. ({})'.format(e, self._job_check)) rejected.append(e) continue accepted.append(job) to_allocate = [] # On accepted jobs by policy, try to schedule with the scheduling policy if accepted: to_allocate, to_reject = self.scheduling_method(cur_time, accepted, es_dict) rejected += to_reject for e in to_reject: self._logger.warning('{} has been rejected by the dispatcher. (Scheduling policy)'.format(e)) # If there are scheduled jobs and an allocator defined, try to allocate the scheduled jobs. if to_allocate and self.allocator: dispatching_plan = self.allocator.allocate(to_allocate, cur_time, skip=self.skip_jobs_on_allocation) else: dispatching_plan = to_allocate return dispatching_plan, rejected def _check_job_request(self, _job): """ Simple method that checks if the loaded _job violates the system's resource constraints. :param _job: Job object :return: True if the _job is valid, false otherwise """ _job.set_checked(True) if self._job_check == JobVerification.REJECT: return False elif self._job_check == JobVerification.NO_CHECK: return True elif self._job_check == JobVerification.CHECK_TOTAL: # We verify that the _job does not violate the system's resource constraints by comparing the total if not self._system_capacity: self._system_capacity = self.resource_manager.system_capacity('total') return not any([_job.requested_resources[res] * _job.requested_nodes > self._system_capacity[res] for res in _job.requested_resources.keys()]) elif self._job_check == JobVerification.CHECK_REQUEST: if not self._nodes_capacity: self._nodes_capacity = self.resource_manager.system_capacity('nodes') # We verify the _job request can be fitted in the system _requested_resources = _job.requested_resources _requested_nodes = _job.requested_nodes _fits = 0 _diff_node = 0 for _node, _attrs in self._nodes_capacity.items(): # How many time a request fits on the node _nfits = min([_attrs[_attr] // req for _attr, req in _requested_resources.items() if req > 0 ]) # Update current number of times the current job fits in the nodes if _nfits > 0: _fits += _nfits _diff_node += 1 if self.ALLOW_MAPPING_SAME_NODE: # Since _fits >> _diff_node this logical comparison is omitted. if _fits >= _requested_nodes: return True else: if _diff_node >= _requested_nodes: return True return False raise DispatcherError('Invalid option.') def __str__(self): return self.get_id() class SimpleHeuristic(SchedulerBase): """ Simple scheduler, sorts the event depending on the chosen policy. If a single job allocation fails, all subsequent jobs fail too. Sorting as name, sort funct parameters """ def __init__(self, seed, allocator, name, sorting_parameters, **kwargs): SchedulerBase.__init__(self, seed, allocator, **kwargs) self.name = name self.sorting_parameters = sorting_parameters def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.__class__.__name__, self.name, self.allocator.get_id()]) def scheduling_method(self, cur_time, jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the events
:param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ to_reject = [] to_schedule = SortedListWithKey(jobs, **self.sorting_parameters) return to_schedule, to_reject class FirstInFirstOut(SimpleHeuristic): """ **FirstInFirstOut scheduling policy.** The first come, first served (commonly called FirstInFirstOut ‒ first in, first out) process scheduling algorithm is the simplest process scheduling algorithm. """ name = 'FIFO' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.queued_time } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _seed=0, **kwargs): """ FirstInFirstOut Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class LongestJobFirst(SimpleHeuristic): """ **LJF scheduling policy.** Longest Job First (LJF) sorts the jobs, where the longest jobs are preferred over the shortest ones. """ name = 'LJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x:-x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ LJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class ShortestJobFirst(SimpleHeuristic): """ **SJF scheduling policy.** Shortest Job First (SJF) sorts the jobs, where the shortest jobs are preferred over the longest ones. """ name = 'SJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ SJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class EASYBackfilling(SchedulerBase): """ EASY Backfilling scheduler. Whenever a job cannot be allocated, a reservation is made for it. After this, the following jobs are used to backfill the schedule, not allowing them to use the reserved nodes. This dispatching methods includes its own calls to the allocator over the dispatching process. Then it isn't use the auto allocator call, after the schedule generation. """ name = 'EBF' """ Name of the Scheduler policy. """ def __init__(self, allocator, seed=0, **kwargs): """ Easy BackFilling Constructor """ SchedulerBase.__init__(self, seed, allocator=None, **kwargs) self._blocked_job_id = None self._reserved_slot = (None, [],) self.nonauto_allocator = allocator self.allocator_rm_set = False # self.nonauto_allocator.set_resource_manager(resource_manager) def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.name, self.nonauto_allocator.name]) def scheduling_method(self, cur_time, queued_jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param queued_jobs: Jobs to be dispatched :param es_dict: dictionary with full data of the events :return: a list of tuples (time to schedule, event id, list of assigned nodes), and a list of rejected job ids """ if not self.allocator_rm_set: self.nonauto_allocator.set_resource_manager(self.resource_manager) self.allocator_rm_set = True avl_resources = self.resource_manager.current_availability self.nonauto_allocator.set_resources(avl_resources) to_dispatch = [] to_reject = [] _to_fill = [] _prev_blocked = None _time_reached = False if self._reserved_slot[0] and self._reserved_slot[0] <= cur_time: _time_reached = True # Tries to allocate the blocked job self._logger.trace('There is a blocked job {} with {}'.format(self._blocked_job_id, self._reserved_slot)) # assert(self._blocked_job_id == queued_jobs[0].id), 'The first element is not the blocked one. ({} != {})'.format(self._blocked_job_id, queued_jobs[0].id) blocked_job = queued_jobs[0] queued_jobs = queued_jobs[1:] allocation = self.nonauto_allocator.allocating_method(blocked_job, cur_time, skip=False) if allocation[-1]: self._logger.trace('{}: {} blocked job can be allocated. Unblocking'.format(cur_time, self._blocked_job_id)) self._blocked_job_id = None self._reserved_slot = (None, []) _prev_blocked = [allocation] else: # There are jobs still using the reserved nodes self._logger.trace('{} job is still blocked. Reservation {}'.format(self._blocked_job_id, self._reserved_slot)) # Add the current allocation for the (un)blocked job. to_dispatch += [allocation] if self._blocked_job_id is None and queued_jobs: # Tries to perform a FIFO allocation if there is no blocked job # Returns the (partial) allocation and the idx for the blocked job, also sets the self._blocked_job_id var _allocated_jobs, blocked_idx = self._try_fifo_allocation(queued_jobs, cur_time) # There is a blocked job if not (blocked_idx is None): # If there is no a reservation, calculate it for the blocked job if not self._reserved_slot[0]: blocked_job = queued_jobs[blocked_idx] self._logger.trace('Blocked {} Job: Calculate the reservation'.format(self._blocked_job_id)) # Current reservation (future time, reserved nodes) self._reserved_slot = self._calculate_slot(cur_time, deepcopy(avl_resources), _allocated_jobs[:blocked_idx], _prev_blocked, blocked_job, es_dict) self._logger.trace('Blocked {} Job: Nodes {} are reserved at {}'.format(self._blocked_job_id, self._reserved_slot[1], self._reserved_slot[0])) # Include the blocked job to_dispatch += _allocated_jobs[:blocked_idx + 1] _to_fill = queued_jobs[blocked_idx + 1:] else: to_dispatch += _allocated_jobs else: if not _time_reached: # The blocked job to_dispatch += [(None, self._blocked_job_id, [])] # All the remaining queued jobs _to_fill = queued_jobs[1:] else: # The remaining queued jobs _to_fill = queued_jobs if _to_fill: self._logger.trace('Blocked job {}. {} jobs candidates to fill the gap'.format(self._blocked_job_id, len(_to_fill))) # Filling the gap between cur_time and res_time (reserved_time, reserved_nodes) = self._reserved_slot filling_allocation = self.nonauto_allocator.allocating_method(_to_fill, cur_time, \ reserved_time=reserved_time, reserved_nodes=[], skip=True ) # Include the remaining jobs to_dispatch += filling_allocation return to_dispatch, to_reject def _try_fifo_allocation(self, queued_jobs, cur_time): """ Allocates as many jobs as possible using the FIFO approach. As soon as one allocation fails, all subsequent jobs fail too. Then, the return tuple contains info about the allocated jobs (assigned nodes and such) and also the position of the blocked job. :param queued_jobs: List of job objects :param cur_time: current time :return job allocation, and position of the blocked job in the list """ # Try to allocate jobs as in FIFO _allocated_jobs = self.nonauto_allocator.allocating_method(queued_jobs, cur_time, skip=False) # Check if there is a blocked job (a job without an allocation) blocked_idx = None for i, (_, job_id, allocated_nodes) in enumerate(_allocated_jobs): if not allocated_nodes: self._blocked_job_id = job_id blocked_idx = i break return _allocated_jobs, blocked_idx def _calculate_slot(self, cur_time, avl_resources, decided_allocations, prev_blocked, blocked_job, es_dict): """ Computes a reservation for the blocked job, by releasing incrementally the resources used by the running events and recently allocated jobs. The earliest slot in which blocked_job fits is chosen. :param avl_resources: Actual available resources :param decided_allocations: Allocated jobs on the current iteration. :param prev_blocked: Allocation corresponding to the previous blocked job which has been unblocked during this iteration :param blocked_jobs: Event to be fitted in the time slot :param es_dist: Job dictionary :return: a tuple of time of the slot and nodes """ current_allocations = self.resource_manager.current_allocations # Creates a list the jobs sorted by soonest ending time first future_endings = SortedListWithKey(key=lambda x:x[1]) # Running jobs for job_id, resources in current_allocations.items(): future_endings.add((job_id, es_dict[job_id].start_time + es_dict[job_id].expected_duration, resources)) # Previous blocked job has been scheduled if prev_blocked: decided_allocations += prev_blocked # Current allocated job for (_, job_id, nodes) in decided_allocations: _dec_alloc = {} for node in nodes: if not(node in _dec_alloc): _dec_alloc[node] = {k:v for k, v in es_dict[job_id].requested_resources.items()} else: for res, v in es_dict[job_id].requested_resources.items(): _dec_alloc[node][res] += v future_endings.add((job_id, cur_time + es_dict[job_id].expected_duration, _dec_alloc)) _required_alloc = blocked_job.requested_nodes _requested_resources = blocked_job.requested_resources _partial_alloc = {} # Calculate the partial allocation on the current system state for node, resources in avl_resources.items(): new_alloc = min([resources[req] // _requested_resources[req] for req in _requested_resources]) if new_alloc > 0: _partial_alloc[node] = new_alloc # Calculate the partial allocation on the next future endings for (job_id, res_time, used_nodes) in future_endings: for node, used_resources in used_nodes.items(): if not(node in avl_resources): avl_resources[node] = {r:0 for r in _requested_resources} for r, v in used_resources.items(): avl_resources[node][r] += v cur_alloc = _partial_alloc.get(node, 0) new_alloc = min([avl_resources[node][req] // _requested_resources[req] for req in _requested_resources]) _diff = new_alloc - cur_alloc if _diff > 0: _partial_alloc[node] = _partial_alloc.get(node, 0) + _diff # At this point the blocked job can be allocated if sum(_partial_alloc.values()) >= _required_alloc: ctimes = 0 nodes = [] for node, times in _partial_alloc.items(): ctimes += times nodes.append(node) if ctimes >= _required_alloc: break return (res_time, nodes,) raise DispatcherError('Can\'t find the slot.... no end? :(')
random_line_split
scheduler_class.py
""" MIT License Copyright (c) 2017 cgalleguillosm, AlessioNetti Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import logging from sys import maxsize from random import seed from abc import abstractmethod, ABC from sortedcontainers.sortedlist import SortedListWithKey from enum import Enum from copy import deepcopy from accasim.base.resource_manager_class import ResourceManager from accasim.base.allocator_class import AllocatorBase class DispatcherError(Exception): pass class JobVerification(Enum): REJECT = -1 # All jobs are rejected NO_CHECK = 0 # No verification CHECK_TOTAL = 1 # Total requested resources are verified CHECK_REQUEST = 2 # Each node x resources are verified class SchedulerBase(ABC): """ This class allows to implement dispatching methods by integrating with an implementation of this class an allocator (:class:`accasim.base.allocator_class.AllocatorBase`). An implementation of this class could also serve as a entire dispatching method if the allocation class is not used as default (:class:`.allocator` = None), but the resource manager must be set on the allocator using :func:`accasim.base.allocator_class.AllocatorBase.set_resource_manager`. """ MAXSIZE = maxsize ALLOW_MAPPING_SAME_NODE = True def __init__(self, _seed, allocator=None, job_check=JobVerification.CHECK_REQUEST, **kwargs): """ Construct a scheduler :param seed: Seed for the random state :param resource_manager: A Resource Manager object for dealing with system resources. :param allocator: Allocator object to be used by the scheduler to allocater after schedule generation. If an allocator isn't defined, the scheduler class must generate the entire dispatching plan. :param job_check: A job may be rejected if it doesnt comply with: - JobVerification.REJECT: Any job is rejected - JobVerification.NO_CHECK: All jobs are accepted - JobVerification.CHECK_TOTAL: If the job requires more resources than the available in the system. - JobVerification.CHECK_REQUEST: if an individual request by node requests more resources than the available one. :param kwargs: - skip_jobs_on_allocation: If the allocator is predefined and this parameter is true, the allocator will try to allocate jobs as much as possible. Otherwise, the allocation will stop after the first fail. """ seed(_seed) self._counter = 0 self.allocator = None self._logger = logging.getLogger('accasim') self._system_capacity = None self._nodes_capacity = None self.resource_manager = None if allocator: assert isinstance(allocator, AllocatorBase), 'Allocator not valid for scheduler' self.allocator = allocator # self.set_resource_manager(resource_manager) assert(isinstance(job_check, JobVerification)), 'job_check invalid type. {}'.format(job_check.__class__) if job_check == JobVerification.REJECT: print('All jobs will be rejected, and for performance purposes the rejection messages will be omitted.') self._job_check = job_check # Check resources self._min_required_availability = kwargs.pop('min_resources', None) # ['core', 'mem']s # Skip jobs during allocation self.skip_jobs_on_allocation = kwargs.pop('skip_jobs_on_allocation', False) @property def name(self): """ Name of the schedulign method """ raise NotImplementedError @abstractmethod def get_id(self): """ Must return the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ raise NotImplementedError @abstractmethod def scheduling_method(self, cur_time, es_dict, es): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the job events :param es: events to be scheduled :return a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ raise Exception('This function must be implemented!!') def set_resource_manager(self, resource_manager): """ Set a resource manager. :param resource_manager: An instantiation of a resource_manager class or None """ if resource_manager: if self.allocator: self.allocator.set_resource_manager(resource_manager) assert isinstance(resource_manager, ResourceManager), 'Resource Manager not valid for scheduler' self.resource_manager = resource_manager else: self.resource_manager = None def schedule(self, cur_time, es_dict, es): """ Method for schedule. It calls the specific scheduling method. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), array of rejected job ids. """ assert(self.resource_manager is not None), 'The resource manager is not defined. It must defined prior to run the simulation.' self._counter += 1 self._logger.debug("{} Dispatching: #{} decision".format(cur_time, self._counter)) self._logger.debug('{} Dispatching: {} queued jobs'.format(cur_time, len(es))) self._logger.debug('{} Dispatching: {}'.format(cur_time, self.resource_manager.current_usage)) rejected = [] # At least a job need 1 core and 1 kb/mb/gb of mem to run if self._min_required_availability and any([self.resource_manager.resources.full[res] for res in self._min_required_availability]): self._logger.debug("There is no availability of one of the min required resource to run a job. The dispatching process will be delayed until there is enough resources.") return [(None, e, []) for e in es], rejected accepted = [] # Verify jobs with the defined Job Policy for e in es:
to_allocate = [] # On accepted jobs by policy, try to schedule with the scheduling policy if accepted: to_allocate, to_reject = self.scheduling_method(cur_time, accepted, es_dict) rejected += to_reject for e in to_reject: self._logger.warning('{} has been rejected by the dispatcher. (Scheduling policy)'.format(e)) # If there are scheduled jobs and an allocator defined, try to allocate the scheduled jobs. if to_allocate and self.allocator: dispatching_plan = self.allocator.allocate(to_allocate, cur_time, skip=self.skip_jobs_on_allocation) else: dispatching_plan = to_allocate return dispatching_plan, rejected def _check_job_request(self, _job): """ Simple method that checks if the loaded _job violates the system's resource constraints. :param _job: Job object :return: True if the _job is valid, false otherwise """ _job.set_checked(True) if self._job_check == JobVerification.REJECT: return False elif self._job_check == JobVerification.NO_CHECK: return True elif self._job_check == JobVerification.CHECK_TOTAL: # We verify that the _job does not violate the system's resource constraints by comparing the total if not self._system_capacity: self._system_capacity = self.resource_manager.system_capacity('total') return not any([_job.requested_resources[res] * _job.requested_nodes > self._system_capacity[res] for res in _job.requested_resources.keys()]) elif self._job_check == JobVerification.CHECK_REQUEST: if not self._nodes_capacity: self._nodes_capacity = self.resource_manager.system_capacity('nodes') # We verify the _job request can be fitted in the system _requested_resources = _job.requested_resources _requested_nodes = _job.requested_nodes _fits = 0 _diff_node = 0 for _node, _attrs in self._nodes_capacity.items(): # How many time a request fits on the node _nfits = min([_attrs[_attr] // req for _attr, req in _requested_resources.items() if req > 0 ]) # Update current number of times the current job fits in the nodes if _nfits > 0: _fits += _nfits _diff_node += 1 if self.ALLOW_MAPPING_SAME_NODE: # Since _fits >> _diff_node this logical comparison is omitted. if _fits >= _requested_nodes: return True else: if _diff_node >= _requested_nodes: return True return False raise DispatcherError('Invalid option.') def __str__(self): return self.get_id() class SimpleHeuristic(SchedulerBase): """ Simple scheduler, sorts the event depending on the chosen policy. If a single job allocation fails, all subsequent jobs fail too. Sorting as name, sort funct parameters """ def __init__(self, seed, allocator, name, sorting_parameters, **kwargs): SchedulerBase.__init__(self, seed, allocator, **kwargs) self.name = name self.sorting_parameters = sorting_parameters def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.__class__.__name__, self.name, self.allocator.get_id()]) def scheduling_method(self, cur_time, jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ to_reject = [] to_schedule = SortedListWithKey(jobs, **self.sorting_parameters) return to_schedule, to_reject class FirstInFirstOut(SimpleHeuristic): """ **FirstInFirstOut scheduling policy.** The first come, first served (commonly called FirstInFirstOut ‒ first in, first out) process scheduling algorithm is the simplest process scheduling algorithm. """ name = 'FIFO' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.queued_time } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _seed=0, **kwargs): """ FirstInFirstOut Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class LongestJobFirst(SimpleHeuristic): """ **LJF scheduling policy.** Longest Job First (LJF) sorts the jobs, where the longest jobs are preferred over the shortest ones. """ name = 'LJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x:-x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ LJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class ShortestJobFirst(SimpleHeuristic): """ **SJF scheduling policy.** Shortest Job First (SJF) sorts the jobs, where the shortest jobs are preferred over the longest ones. """ name = 'SJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ SJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class EASYBackfilling(SchedulerBase): """ EASY Backfilling scheduler. Whenever a job cannot be allocated, a reservation is made for it. After this, the following jobs are used to backfill the schedule, not allowing them to use the reserved nodes. This dispatching methods includes its own calls to the allocator over the dispatching process. Then it isn't use the auto allocator call, after the schedule generation. """ name = 'EBF' """ Name of the Scheduler policy. """ def __init__(self, allocator, seed=0, **kwargs): """ Easy BackFilling Constructor """ SchedulerBase.__init__(self, seed, allocator=None, **kwargs) self._blocked_job_id = None self._reserved_slot = (None, [],) self.nonauto_allocator = allocator self.allocator_rm_set = False # self.nonauto_allocator.set_resource_manager(resource_manager) def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.name, self.nonauto_allocator.name]) def scheduling_method(self, cur_time, queued_jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param queued_jobs: Jobs to be dispatched :param es_dict: dictionary with full data of the events :return: a list of tuples (time to schedule, event id, list of assigned nodes), and a list of rejected job ids """ if not self.allocator_rm_set: self.nonauto_allocator.set_resource_manager(self.resource_manager) self.allocator_rm_set = True avl_resources = self.resource_manager.current_availability self.nonauto_allocator.set_resources(avl_resources) to_dispatch = [] to_reject = [] _to_fill = [] _prev_blocked = None _time_reached = False if self._reserved_slot[0] and self._reserved_slot[0] <= cur_time: _time_reached = True # Tries to allocate the blocked job self._logger.trace('There is a blocked job {} with {}'.format(self._blocked_job_id, self._reserved_slot)) # assert(self._blocked_job_id == queued_jobs[0].id), 'The first element is not the blocked one. ({} != {})'.format(self._blocked_job_id, queued_jobs[0].id) blocked_job = queued_jobs[0] queued_jobs = queued_jobs[1:] allocation = self.nonauto_allocator.allocating_method(blocked_job, cur_time, skip=False) if allocation[-1]: self._logger.trace('{}: {} blocked job can be allocated. Unblocking'.format(cur_time, self._blocked_job_id)) self._blocked_job_id = None self._reserved_slot = (None, []) _prev_blocked = [allocation] else: # There are jobs still using the reserved nodes self._logger.trace('{} job is still blocked. Reservation {}'.format(self._blocked_job_id, self._reserved_slot)) # Add the current allocation for the (un)blocked job. to_dispatch += [allocation] if self._blocked_job_id is None and queued_jobs: # Tries to perform a FIFO allocation if there is no blocked job # Returns the (partial) allocation and the idx for the blocked job, also sets the self._blocked_job_id var _allocated_jobs, blocked_idx = self._try_fifo_allocation(queued_jobs, cur_time) # There is a blocked job if not (blocked_idx is None): # If there is no a reservation, calculate it for the blocked job if not self._reserved_slot[0]: blocked_job = queued_jobs[blocked_idx] self._logger.trace('Blocked {} Job: Calculate the reservation'.format(self._blocked_job_id)) # Current reservation (future time, reserved nodes) self._reserved_slot = self._calculate_slot(cur_time, deepcopy(avl_resources), _allocated_jobs[:blocked_idx], _prev_blocked, blocked_job, es_dict) self._logger.trace('Blocked {} Job: Nodes {} are reserved at {}'.format(self._blocked_job_id, self._reserved_slot[1], self._reserved_slot[0])) # Include the blocked job to_dispatch += _allocated_jobs[:blocked_idx + 1] _to_fill = queued_jobs[blocked_idx + 1:] else: to_dispatch += _allocated_jobs else: if not _time_reached: # The blocked job to_dispatch += [(None, self._blocked_job_id, [])] # All the remaining queued jobs _to_fill = queued_jobs[1:] else: # The remaining queued jobs _to_fill = queued_jobs if _to_fill: self._logger.trace('Blocked job {}. {} jobs candidates to fill the gap'.format(self._blocked_job_id, len(_to_fill))) # Filling the gap between cur_time and res_time (reserved_time, reserved_nodes) = self._reserved_slot filling_allocation = self.nonauto_allocator.allocating_method(_to_fill, cur_time, \ reserved_time=reserved_time, reserved_nodes=[], skip=True ) # Include the remaining jobs to_dispatch += filling_allocation return to_dispatch, to_reject def _try_fifo_allocation(self, queued_jobs, cur_time): """ Allocates as many jobs as possible using the FIFO approach. As soon as one allocation fails, all subsequent jobs fail too. Then, the return tuple contains info about the allocated jobs (assigned nodes and such) and also the position of the blocked job. :param queued_jobs: List of job objects :param cur_time: current time :return job allocation, and position of the blocked job in the list """ # Try to allocate jobs as in FIFO _allocated_jobs = self.nonauto_allocator.allocating_method(queued_jobs, cur_time, skip=False) # Check if there is a blocked job (a job without an allocation) blocked_idx = None for i, (_, job_id, allocated_nodes) in enumerate(_allocated_jobs): if not allocated_nodes: self._blocked_job_id = job_id blocked_idx = i break return _allocated_jobs, blocked_idx def _calculate_slot(self, cur_time, avl_resources, decided_allocations, prev_blocked, blocked_job, es_dict): """ Computes a reservation for the blocked job, by releasing incrementally the resources used by the running events and recently allocated jobs. The earliest slot in which blocked_job fits is chosen. :param avl_resources: Actual available resources :param decided_allocations: Allocated jobs on the current iteration. :param prev_blocked: Allocation corresponding to the previous blocked job which has been unblocked during this iteration :param blocked_jobs: Event to be fitted in the time slot :param es_dist: Job dictionary :return: a tuple of time of the slot and nodes """ current_allocations = self.resource_manager.current_allocations # Creates a list the jobs sorted by soonest ending time first future_endings = SortedListWithKey(key=lambda x:x[1]) # Running jobs for job_id, resources in current_allocations.items(): future_endings.add((job_id, es_dict[job_id].start_time + es_dict[job_id].expected_duration, resources)) # Previous blocked job has been scheduled if prev_blocked: decided_allocations += prev_blocked # Current allocated job for (_, job_id, nodes) in decided_allocations: _dec_alloc = {} for node in nodes: if not(node in _dec_alloc): _dec_alloc[node] = {k:v for k, v in es_dict[job_id].requested_resources.items()} else: for res, v in es_dict[job_id].requested_resources.items(): _dec_alloc[node][res] += v future_endings.add((job_id, cur_time + es_dict[job_id].expected_duration, _dec_alloc)) _required_alloc = blocked_job.requested_nodes _requested_resources = blocked_job.requested_resources _partial_alloc = {} # Calculate the partial allocation on the current system state for node, resources in avl_resources.items(): new_alloc = min([resources[req] // _requested_resources[req] for req in _requested_resources]) if new_alloc > 0: _partial_alloc[node] = new_alloc # Calculate the partial allocation on the next future endings for (job_id, res_time, used_nodes) in future_endings: for node, used_resources in used_nodes.items(): if not(node in avl_resources): avl_resources[node] = {r:0 for r in _requested_resources} for r, v in used_resources.items(): avl_resources[node][r] += v cur_alloc = _partial_alloc.get(node, 0) new_alloc = min([avl_resources[node][req] // _requested_resources[req] for req in _requested_resources]) _diff = new_alloc - cur_alloc if _diff > 0: _partial_alloc[node] = _partial_alloc.get(node, 0) + _diff # At this point the blocked job can be allocated if sum(_partial_alloc.values()) >= _required_alloc: ctimes = 0 nodes = [] for node, times in _partial_alloc.items(): ctimes += times nodes.append(node) if ctimes >= _required_alloc: break return (res_time, nodes,) raise DispatcherError('Can\'t find the slot.... no end? :(')
job = es_dict[e] if not job.get_checked() and not self._check_job_request(job): if self._job_check != JobVerification.REJECT: self._logger.warning('{} has been rejected by the dispatcher. ({})'.format(e, self._job_check)) rejected.append(e) continue accepted.append(job)
conditional_block
scheduler_class.py
""" MIT License Copyright (c) 2017 cgalleguillosm, AlessioNetti Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import logging from sys import maxsize from random import seed from abc import abstractmethod, ABC from sortedcontainers.sortedlist import SortedListWithKey from enum import Enum from copy import deepcopy from accasim.base.resource_manager_class import ResourceManager from accasim.base.allocator_class import AllocatorBase class DispatcherError(Exception): pass class JobVerification(Enum): REJECT = -1 # All jobs are rejected NO_CHECK = 0 # No verification CHECK_TOTAL = 1 # Total requested resources are verified CHECK_REQUEST = 2 # Each node x resources are verified class SchedulerBase(ABC): """ This class allows to implement dispatching methods by integrating with an implementation of this class an allocator (:class:`accasim.base.allocator_class.AllocatorBase`). An implementation of this class could also serve as a entire dispatching method if the allocation class is not used as default (:class:`.allocator` = None), but the resource manager must be set on the allocator using :func:`accasim.base.allocator_class.AllocatorBase.set_resource_manager`. """ MAXSIZE = maxsize ALLOW_MAPPING_SAME_NODE = True def __init__(self, _seed, allocator=None, job_check=JobVerification.CHECK_REQUEST, **kwargs): """ Construct a scheduler :param seed: Seed for the random state :param resource_manager: A Resource Manager object for dealing with system resources. :param allocator: Allocator object to be used by the scheduler to allocater after schedule generation. If an allocator isn't defined, the scheduler class must generate the entire dispatching plan. :param job_check: A job may be rejected if it doesnt comply with: - JobVerification.REJECT: Any job is rejected - JobVerification.NO_CHECK: All jobs are accepted - JobVerification.CHECK_TOTAL: If the job requires more resources than the available in the system. - JobVerification.CHECK_REQUEST: if an individual request by node requests more resources than the available one. :param kwargs: - skip_jobs_on_allocation: If the allocator is predefined and this parameter is true, the allocator will try to allocate jobs as much as possible. Otherwise, the allocation will stop after the first fail. """ seed(_seed) self._counter = 0 self.allocator = None self._logger = logging.getLogger('accasim') self._system_capacity = None self._nodes_capacity = None self.resource_manager = None if allocator: assert isinstance(allocator, AllocatorBase), 'Allocator not valid for scheduler' self.allocator = allocator # self.set_resource_manager(resource_manager) assert(isinstance(job_check, JobVerification)), 'job_check invalid type. {}'.format(job_check.__class__) if job_check == JobVerification.REJECT: print('All jobs will be rejected, and for performance purposes the rejection messages will be omitted.') self._job_check = job_check # Check resources self._min_required_availability = kwargs.pop('min_resources', None) # ['core', 'mem']s # Skip jobs during allocation self.skip_jobs_on_allocation = kwargs.pop('skip_jobs_on_allocation', False) @property def name(self): """ Name of the schedulign method """ raise NotImplementedError @abstractmethod def get_id(self): """ Must return the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ raise NotImplementedError @abstractmethod def scheduling_method(self, cur_time, es_dict, es): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the job events :param es: events to be scheduled :return a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ raise Exception('This function must be implemented!!') def set_resource_manager(self, resource_manager): """ Set a resource manager. :param resource_manager: An instantiation of a resource_manager class or None """ if resource_manager: if self.allocator: self.allocator.set_resource_manager(resource_manager) assert isinstance(resource_manager, ResourceManager), 'Resource Manager not valid for scheduler' self.resource_manager = resource_manager else: self.resource_manager = None def schedule(self, cur_time, es_dict, es): """ Method for schedule. It calls the specific scheduling method. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), array of rejected job ids. """ assert(self.resource_manager is not None), 'The resource manager is not defined. It must defined prior to run the simulation.' self._counter += 1 self._logger.debug("{} Dispatching: #{} decision".format(cur_time, self._counter)) self._logger.debug('{} Dispatching: {} queued jobs'.format(cur_time, len(es))) self._logger.debug('{} Dispatching: {}'.format(cur_time, self.resource_manager.current_usage)) rejected = [] # At least a job need 1 core and 1 kb/mb/gb of mem to run if self._min_required_availability and any([self.resource_manager.resources.full[res] for res in self._min_required_availability]): self._logger.debug("There is no availability of one of the min required resource to run a job. The dispatching process will be delayed until there is enough resources.") return [(None, e, []) for e in es], rejected accepted = [] # Verify jobs with the defined Job Policy for e in es: job = es_dict[e] if not job.get_checked() and not self._check_job_request(job): if self._job_check != JobVerification.REJECT: self._logger.warning('{} has been rejected by the dispatcher. ({})'.format(e, self._job_check)) rejected.append(e) continue accepted.append(job) to_allocate = [] # On accepted jobs by policy, try to schedule with the scheduling policy if accepted: to_allocate, to_reject = self.scheduling_method(cur_time, accepted, es_dict) rejected += to_reject for e in to_reject: self._logger.warning('{} has been rejected by the dispatcher. (Scheduling policy)'.format(e)) # If there are scheduled jobs and an allocator defined, try to allocate the scheduled jobs. if to_allocate and self.allocator: dispatching_plan = self.allocator.allocate(to_allocate, cur_time, skip=self.skip_jobs_on_allocation) else: dispatching_plan = to_allocate return dispatching_plan, rejected def
(self, _job): """ Simple method that checks if the loaded _job violates the system's resource constraints. :param _job: Job object :return: True if the _job is valid, false otherwise """ _job.set_checked(True) if self._job_check == JobVerification.REJECT: return False elif self._job_check == JobVerification.NO_CHECK: return True elif self._job_check == JobVerification.CHECK_TOTAL: # We verify that the _job does not violate the system's resource constraints by comparing the total if not self._system_capacity: self._system_capacity = self.resource_manager.system_capacity('total') return not any([_job.requested_resources[res] * _job.requested_nodes > self._system_capacity[res] for res in _job.requested_resources.keys()]) elif self._job_check == JobVerification.CHECK_REQUEST: if not self._nodes_capacity: self._nodes_capacity = self.resource_manager.system_capacity('nodes') # We verify the _job request can be fitted in the system _requested_resources = _job.requested_resources _requested_nodes = _job.requested_nodes _fits = 0 _diff_node = 0 for _node, _attrs in self._nodes_capacity.items(): # How many time a request fits on the node _nfits = min([_attrs[_attr] // req for _attr, req in _requested_resources.items() if req > 0 ]) # Update current number of times the current job fits in the nodes if _nfits > 0: _fits += _nfits _diff_node += 1 if self.ALLOW_MAPPING_SAME_NODE: # Since _fits >> _diff_node this logical comparison is omitted. if _fits >= _requested_nodes: return True else: if _diff_node >= _requested_nodes: return True return False raise DispatcherError('Invalid option.') def __str__(self): return self.get_id() class SimpleHeuristic(SchedulerBase): """ Simple scheduler, sorts the event depending on the chosen policy. If a single job allocation fails, all subsequent jobs fail too. Sorting as name, sort funct parameters """ def __init__(self, seed, allocator, name, sorting_parameters, **kwargs): SchedulerBase.__init__(self, seed, allocator, **kwargs) self.name = name self.sorting_parameters = sorting_parameters def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.__class__.__name__, self.name, self.allocator.get_id()]) def scheduling_method(self, cur_time, jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ to_reject = [] to_schedule = SortedListWithKey(jobs, **self.sorting_parameters) return to_schedule, to_reject class FirstInFirstOut(SimpleHeuristic): """ **FirstInFirstOut scheduling policy.** The first come, first served (commonly called FirstInFirstOut ‒ first in, first out) process scheduling algorithm is the simplest process scheduling algorithm. """ name = 'FIFO' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.queued_time } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _seed=0, **kwargs): """ FirstInFirstOut Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class LongestJobFirst(SimpleHeuristic): """ **LJF scheduling policy.** Longest Job First (LJF) sorts the jobs, where the longest jobs are preferred over the shortest ones. """ name = 'LJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x:-x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ LJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class ShortestJobFirst(SimpleHeuristic): """ **SJF scheduling policy.** Shortest Job First (SJF) sorts the jobs, where the shortest jobs are preferred over the longest ones. """ name = 'SJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ SJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class EASYBackfilling(SchedulerBase): """ EASY Backfilling scheduler. Whenever a job cannot be allocated, a reservation is made for it. After this, the following jobs are used to backfill the schedule, not allowing them to use the reserved nodes. This dispatching methods includes its own calls to the allocator over the dispatching process. Then it isn't use the auto allocator call, after the schedule generation. """ name = 'EBF' """ Name of the Scheduler policy. """ def __init__(self, allocator, seed=0, **kwargs): """ Easy BackFilling Constructor """ SchedulerBase.__init__(self, seed, allocator=None, **kwargs) self._blocked_job_id = None self._reserved_slot = (None, [],) self.nonauto_allocator = allocator self.allocator_rm_set = False # self.nonauto_allocator.set_resource_manager(resource_manager) def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.name, self.nonauto_allocator.name]) def scheduling_method(self, cur_time, queued_jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param queued_jobs: Jobs to be dispatched :param es_dict: dictionary with full data of the events :return: a list of tuples (time to schedule, event id, list of assigned nodes), and a list of rejected job ids """ if not self.allocator_rm_set: self.nonauto_allocator.set_resource_manager(self.resource_manager) self.allocator_rm_set = True avl_resources = self.resource_manager.current_availability self.nonauto_allocator.set_resources(avl_resources) to_dispatch = [] to_reject = [] _to_fill = [] _prev_blocked = None _time_reached = False if self._reserved_slot[0] and self._reserved_slot[0] <= cur_time: _time_reached = True # Tries to allocate the blocked job self._logger.trace('There is a blocked job {} with {}'.format(self._blocked_job_id, self._reserved_slot)) # assert(self._blocked_job_id == queued_jobs[0].id), 'The first element is not the blocked one. ({} != {})'.format(self._blocked_job_id, queued_jobs[0].id) blocked_job = queued_jobs[0] queued_jobs = queued_jobs[1:] allocation = self.nonauto_allocator.allocating_method(blocked_job, cur_time, skip=False) if allocation[-1]: self._logger.trace('{}: {} blocked job can be allocated. Unblocking'.format(cur_time, self._blocked_job_id)) self._blocked_job_id = None self._reserved_slot = (None, []) _prev_blocked = [allocation] else: # There are jobs still using the reserved nodes self._logger.trace('{} job is still blocked. Reservation {}'.format(self._blocked_job_id, self._reserved_slot)) # Add the current allocation for the (un)blocked job. to_dispatch += [allocation] if self._blocked_job_id is None and queued_jobs: # Tries to perform a FIFO allocation if there is no blocked job # Returns the (partial) allocation and the idx for the blocked job, also sets the self._blocked_job_id var _allocated_jobs, blocked_idx = self._try_fifo_allocation(queued_jobs, cur_time) # There is a blocked job if not (blocked_idx is None): # If there is no a reservation, calculate it for the blocked job if not self._reserved_slot[0]: blocked_job = queued_jobs[blocked_idx] self._logger.trace('Blocked {} Job: Calculate the reservation'.format(self._blocked_job_id)) # Current reservation (future time, reserved nodes) self._reserved_slot = self._calculate_slot(cur_time, deepcopy(avl_resources), _allocated_jobs[:blocked_idx], _prev_blocked, blocked_job, es_dict) self._logger.trace('Blocked {} Job: Nodes {} are reserved at {}'.format(self._blocked_job_id, self._reserved_slot[1], self._reserved_slot[0])) # Include the blocked job to_dispatch += _allocated_jobs[:blocked_idx + 1] _to_fill = queued_jobs[blocked_idx + 1:] else: to_dispatch += _allocated_jobs else: if not _time_reached: # The blocked job to_dispatch += [(None, self._blocked_job_id, [])] # All the remaining queued jobs _to_fill = queued_jobs[1:] else: # The remaining queued jobs _to_fill = queued_jobs if _to_fill: self._logger.trace('Blocked job {}. {} jobs candidates to fill the gap'.format(self._blocked_job_id, len(_to_fill))) # Filling the gap between cur_time and res_time (reserved_time, reserved_nodes) = self._reserved_slot filling_allocation = self.nonauto_allocator.allocating_method(_to_fill, cur_time, \ reserved_time=reserved_time, reserved_nodes=[], skip=True ) # Include the remaining jobs to_dispatch += filling_allocation return to_dispatch, to_reject def _try_fifo_allocation(self, queued_jobs, cur_time): """ Allocates as many jobs as possible using the FIFO approach. As soon as one allocation fails, all subsequent jobs fail too. Then, the return tuple contains info about the allocated jobs (assigned nodes and such) and also the position of the blocked job. :param queued_jobs: List of job objects :param cur_time: current time :return job allocation, and position of the blocked job in the list """ # Try to allocate jobs as in FIFO _allocated_jobs = self.nonauto_allocator.allocating_method(queued_jobs, cur_time, skip=False) # Check if there is a blocked job (a job without an allocation) blocked_idx = None for i, (_, job_id, allocated_nodes) in enumerate(_allocated_jobs): if not allocated_nodes: self._blocked_job_id = job_id blocked_idx = i break return _allocated_jobs, blocked_idx def _calculate_slot(self, cur_time, avl_resources, decided_allocations, prev_blocked, blocked_job, es_dict): """ Computes a reservation for the blocked job, by releasing incrementally the resources used by the running events and recently allocated jobs. The earliest slot in which blocked_job fits is chosen. :param avl_resources: Actual available resources :param decided_allocations: Allocated jobs on the current iteration. :param prev_blocked: Allocation corresponding to the previous blocked job which has been unblocked during this iteration :param blocked_jobs: Event to be fitted in the time slot :param es_dist: Job dictionary :return: a tuple of time of the slot and nodes """ current_allocations = self.resource_manager.current_allocations # Creates a list the jobs sorted by soonest ending time first future_endings = SortedListWithKey(key=lambda x:x[1]) # Running jobs for job_id, resources in current_allocations.items(): future_endings.add((job_id, es_dict[job_id].start_time + es_dict[job_id].expected_duration, resources)) # Previous blocked job has been scheduled if prev_blocked: decided_allocations += prev_blocked # Current allocated job for (_, job_id, nodes) in decided_allocations: _dec_alloc = {} for node in nodes: if not(node in _dec_alloc): _dec_alloc[node] = {k:v for k, v in es_dict[job_id].requested_resources.items()} else: for res, v in es_dict[job_id].requested_resources.items(): _dec_alloc[node][res] += v future_endings.add((job_id, cur_time + es_dict[job_id].expected_duration, _dec_alloc)) _required_alloc = blocked_job.requested_nodes _requested_resources = blocked_job.requested_resources _partial_alloc = {} # Calculate the partial allocation on the current system state for node, resources in avl_resources.items(): new_alloc = min([resources[req] // _requested_resources[req] for req in _requested_resources]) if new_alloc > 0: _partial_alloc[node] = new_alloc # Calculate the partial allocation on the next future endings for (job_id, res_time, used_nodes) in future_endings: for node, used_resources in used_nodes.items(): if not(node in avl_resources): avl_resources[node] = {r:0 for r in _requested_resources} for r, v in used_resources.items(): avl_resources[node][r] += v cur_alloc = _partial_alloc.get(node, 0) new_alloc = min([avl_resources[node][req] // _requested_resources[req] for req in _requested_resources]) _diff = new_alloc - cur_alloc if _diff > 0: _partial_alloc[node] = _partial_alloc.get(node, 0) + _diff # At this point the blocked job can be allocated if sum(_partial_alloc.values()) >= _required_alloc: ctimes = 0 nodes = [] for node, times in _partial_alloc.items(): ctimes += times nodes.append(node) if ctimes >= _required_alloc: break return (res_time, nodes,) raise DispatcherError('Can\'t find the slot.... no end? :(')
_check_job_request
identifier_name
scheduler_class.py
""" MIT License Copyright (c) 2017 cgalleguillosm, AlessioNetti Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import logging from sys import maxsize from random import seed from abc import abstractmethod, ABC from sortedcontainers.sortedlist import SortedListWithKey from enum import Enum from copy import deepcopy from accasim.base.resource_manager_class import ResourceManager from accasim.base.allocator_class import AllocatorBase class DispatcherError(Exception): pass class JobVerification(Enum): REJECT = -1 # All jobs are rejected NO_CHECK = 0 # No verification CHECK_TOTAL = 1 # Total requested resources are verified CHECK_REQUEST = 2 # Each node x resources are verified class SchedulerBase(ABC): """ This class allows to implement dispatching methods by integrating with an implementation of this class an allocator (:class:`accasim.base.allocator_class.AllocatorBase`). An implementation of this class could also serve as a entire dispatching method if the allocation class is not used as default (:class:`.allocator` = None), but the resource manager must be set on the allocator using :func:`accasim.base.allocator_class.AllocatorBase.set_resource_manager`. """ MAXSIZE = maxsize ALLOW_MAPPING_SAME_NODE = True def __init__(self, _seed, allocator=None, job_check=JobVerification.CHECK_REQUEST, **kwargs): """ Construct a scheduler :param seed: Seed for the random state :param resource_manager: A Resource Manager object for dealing with system resources. :param allocator: Allocator object to be used by the scheduler to allocater after schedule generation. If an allocator isn't defined, the scheduler class must generate the entire dispatching plan. :param job_check: A job may be rejected if it doesnt comply with: - JobVerification.REJECT: Any job is rejected - JobVerification.NO_CHECK: All jobs are accepted - JobVerification.CHECK_TOTAL: If the job requires more resources than the available in the system. - JobVerification.CHECK_REQUEST: if an individual request by node requests more resources than the available one. :param kwargs: - skip_jobs_on_allocation: If the allocator is predefined and this parameter is true, the allocator will try to allocate jobs as much as possible. Otherwise, the allocation will stop after the first fail. """ seed(_seed) self._counter = 0 self.allocator = None self._logger = logging.getLogger('accasim') self._system_capacity = None self._nodes_capacity = None self.resource_manager = None if allocator: assert isinstance(allocator, AllocatorBase), 'Allocator not valid for scheduler' self.allocator = allocator # self.set_resource_manager(resource_manager) assert(isinstance(job_check, JobVerification)), 'job_check invalid type. {}'.format(job_check.__class__) if job_check == JobVerification.REJECT: print('All jobs will be rejected, and for performance purposes the rejection messages will be omitted.') self._job_check = job_check # Check resources self._min_required_availability = kwargs.pop('min_resources', None) # ['core', 'mem']s # Skip jobs during allocation self.skip_jobs_on_allocation = kwargs.pop('skip_jobs_on_allocation', False) @property def name(self): """ Name of the schedulign method """ raise NotImplementedError @abstractmethod def get_id(self):
@abstractmethod def scheduling_method(self, cur_time, es_dict, es): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the job events :param es: events to be scheduled :return a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ raise Exception('This function must be implemented!!') def set_resource_manager(self, resource_manager): """ Set a resource manager. :param resource_manager: An instantiation of a resource_manager class or None """ if resource_manager: if self.allocator: self.allocator.set_resource_manager(resource_manager) assert isinstance(resource_manager, ResourceManager), 'Resource Manager not valid for scheduler' self.resource_manager = resource_manager else: self.resource_manager = None def schedule(self, cur_time, es_dict, es): """ Method for schedule. It calls the specific scheduling method. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), array of rejected job ids. """ assert(self.resource_manager is not None), 'The resource manager is not defined. It must defined prior to run the simulation.' self._counter += 1 self._logger.debug("{} Dispatching: #{} decision".format(cur_time, self._counter)) self._logger.debug('{} Dispatching: {} queued jobs'.format(cur_time, len(es))) self._logger.debug('{} Dispatching: {}'.format(cur_time, self.resource_manager.current_usage)) rejected = [] # At least a job need 1 core and 1 kb/mb/gb of mem to run if self._min_required_availability and any([self.resource_manager.resources.full[res] for res in self._min_required_availability]): self._logger.debug("There is no availability of one of the min required resource to run a job. The dispatching process will be delayed until there is enough resources.") return [(None, e, []) for e in es], rejected accepted = [] # Verify jobs with the defined Job Policy for e in es: job = es_dict[e] if not job.get_checked() and not self._check_job_request(job): if self._job_check != JobVerification.REJECT: self._logger.warning('{} has been rejected by the dispatcher. ({})'.format(e, self._job_check)) rejected.append(e) continue accepted.append(job) to_allocate = [] # On accepted jobs by policy, try to schedule with the scheduling policy if accepted: to_allocate, to_reject = self.scheduling_method(cur_time, accepted, es_dict) rejected += to_reject for e in to_reject: self._logger.warning('{} has been rejected by the dispatcher. (Scheduling policy)'.format(e)) # If there are scheduled jobs and an allocator defined, try to allocate the scheduled jobs. if to_allocate and self.allocator: dispatching_plan = self.allocator.allocate(to_allocate, cur_time, skip=self.skip_jobs_on_allocation) else: dispatching_plan = to_allocate return dispatching_plan, rejected def _check_job_request(self, _job): """ Simple method that checks if the loaded _job violates the system's resource constraints. :param _job: Job object :return: True if the _job is valid, false otherwise """ _job.set_checked(True) if self._job_check == JobVerification.REJECT: return False elif self._job_check == JobVerification.NO_CHECK: return True elif self._job_check == JobVerification.CHECK_TOTAL: # We verify that the _job does not violate the system's resource constraints by comparing the total if not self._system_capacity: self._system_capacity = self.resource_manager.system_capacity('total') return not any([_job.requested_resources[res] * _job.requested_nodes > self._system_capacity[res] for res in _job.requested_resources.keys()]) elif self._job_check == JobVerification.CHECK_REQUEST: if not self._nodes_capacity: self._nodes_capacity = self.resource_manager.system_capacity('nodes') # We verify the _job request can be fitted in the system _requested_resources = _job.requested_resources _requested_nodes = _job.requested_nodes _fits = 0 _diff_node = 0 for _node, _attrs in self._nodes_capacity.items(): # How many time a request fits on the node _nfits = min([_attrs[_attr] // req for _attr, req in _requested_resources.items() if req > 0 ]) # Update current number of times the current job fits in the nodes if _nfits > 0: _fits += _nfits _diff_node += 1 if self.ALLOW_MAPPING_SAME_NODE: # Since _fits >> _diff_node this logical comparison is omitted. if _fits >= _requested_nodes: return True else: if _diff_node >= _requested_nodes: return True return False raise DispatcherError('Invalid option.') def __str__(self): return self.get_id() class SimpleHeuristic(SchedulerBase): """ Simple scheduler, sorts the event depending on the chosen policy. If a single job allocation fails, all subsequent jobs fail too. Sorting as name, sort funct parameters """ def __init__(self, seed, allocator, name, sorting_parameters, **kwargs): SchedulerBase.__init__(self, seed, allocator, **kwargs) self.name = name self.sorting_parameters = sorting_parameters def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.__class__.__name__, self.name, self.allocator.get_id()]) def scheduling_method(self, cur_time, jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param es_dict: dictionary with full data of the events :param es: events to be scheduled :return: a tuple of (time to schedule, event id, list of assigned nodes), an array jobs id of rejected jobs """ to_reject = [] to_schedule = SortedListWithKey(jobs, **self.sorting_parameters) return to_schedule, to_reject class FirstInFirstOut(SimpleHeuristic): """ **FirstInFirstOut scheduling policy.** The first come, first served (commonly called FirstInFirstOut ‒ first in, first out) process scheduling algorithm is the simplest process scheduling algorithm. """ name = 'FIFO' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.queued_time } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _seed=0, **kwargs): """ FirstInFirstOut Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class LongestJobFirst(SimpleHeuristic): """ **LJF scheduling policy.** Longest Job First (LJF) sorts the jobs, where the longest jobs are preferred over the shortest ones. """ name = 'LJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x:-x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ LJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class ShortestJobFirst(SimpleHeuristic): """ **SJF scheduling policy.** Shortest Job First (SJF) sorts the jobs, where the shortest jobs are preferred over the longest ones. """ name = 'SJF' """ Name of the Scheduler policy. """ sorting_arguments = { 'key': lambda x: x.expected_duration } """ This sorting function allows to sort the jobs in relation of the scheduling policy. """ def __init__(self, _allocator, _resource_manager=None, _seed=0, **kwargs): """ SJF Constructor """ SimpleHeuristic.__init__(self, _seed, _allocator, self.name, self.sorting_arguments, **kwargs) class EASYBackfilling(SchedulerBase): """ EASY Backfilling scheduler. Whenever a job cannot be allocated, a reservation is made for it. After this, the following jobs are used to backfill the schedule, not allowing them to use the reserved nodes. This dispatching methods includes its own calls to the allocator over the dispatching process. Then it isn't use the auto allocator call, after the schedule generation. """ name = 'EBF' """ Name of the Scheduler policy. """ def __init__(self, allocator, seed=0, **kwargs): """ Easy BackFilling Constructor """ SchedulerBase.__init__(self, seed, allocator=None, **kwargs) self._blocked_job_id = None self._reserved_slot = (None, [],) self.nonauto_allocator = allocator self.allocator_rm_set = False # self.nonauto_allocator.set_resource_manager(resource_manager) def get_id(self): """ Returns the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ return '-'.join([self.name, self.nonauto_allocator.name]) def scheduling_method(self, cur_time, queued_jobs, es_dict): """ This function must map the queued events to available nodes at the current time. :param cur_time: current time :param queued_jobs: Jobs to be dispatched :param es_dict: dictionary with full data of the events :return: a list of tuples (time to schedule, event id, list of assigned nodes), and a list of rejected job ids """ if not self.allocator_rm_set: self.nonauto_allocator.set_resource_manager(self.resource_manager) self.allocator_rm_set = True avl_resources = self.resource_manager.current_availability self.nonauto_allocator.set_resources(avl_resources) to_dispatch = [] to_reject = [] _to_fill = [] _prev_blocked = None _time_reached = False if self._reserved_slot[0] and self._reserved_slot[0] <= cur_time: _time_reached = True # Tries to allocate the blocked job self._logger.trace('There is a blocked job {} with {}'.format(self._blocked_job_id, self._reserved_slot)) # assert(self._blocked_job_id == queued_jobs[0].id), 'The first element is not the blocked one. ({} != {})'.format(self._blocked_job_id, queued_jobs[0].id) blocked_job = queued_jobs[0] queued_jobs = queued_jobs[1:] allocation = self.nonauto_allocator.allocating_method(blocked_job, cur_time, skip=False) if allocation[-1]: self._logger.trace('{}: {} blocked job can be allocated. Unblocking'.format(cur_time, self._blocked_job_id)) self._blocked_job_id = None self._reserved_slot = (None, []) _prev_blocked = [allocation] else: # There are jobs still using the reserved nodes self._logger.trace('{} job is still blocked. Reservation {}'.format(self._blocked_job_id, self._reserved_slot)) # Add the current allocation for the (un)blocked job. to_dispatch += [allocation] if self._blocked_job_id is None and queued_jobs: # Tries to perform a FIFO allocation if there is no blocked job # Returns the (partial) allocation and the idx for the blocked job, also sets the self._blocked_job_id var _allocated_jobs, blocked_idx = self._try_fifo_allocation(queued_jobs, cur_time) # There is a blocked job if not (blocked_idx is None): # If there is no a reservation, calculate it for the blocked job if not self._reserved_slot[0]: blocked_job = queued_jobs[blocked_idx] self._logger.trace('Blocked {} Job: Calculate the reservation'.format(self._blocked_job_id)) # Current reservation (future time, reserved nodes) self._reserved_slot = self._calculate_slot(cur_time, deepcopy(avl_resources), _allocated_jobs[:blocked_idx], _prev_blocked, blocked_job, es_dict) self._logger.trace('Blocked {} Job: Nodes {} are reserved at {}'.format(self._blocked_job_id, self._reserved_slot[1], self._reserved_slot[0])) # Include the blocked job to_dispatch += _allocated_jobs[:blocked_idx + 1] _to_fill = queued_jobs[blocked_idx + 1:] else: to_dispatch += _allocated_jobs else: if not _time_reached: # The blocked job to_dispatch += [(None, self._blocked_job_id, [])] # All the remaining queued jobs _to_fill = queued_jobs[1:] else: # The remaining queued jobs _to_fill = queued_jobs if _to_fill: self._logger.trace('Blocked job {}. {} jobs candidates to fill the gap'.format(self._blocked_job_id, len(_to_fill))) # Filling the gap between cur_time and res_time (reserved_time, reserved_nodes) = self._reserved_slot filling_allocation = self.nonauto_allocator.allocating_method(_to_fill, cur_time, \ reserved_time=reserved_time, reserved_nodes=[], skip=True ) # Include the remaining jobs to_dispatch += filling_allocation return to_dispatch, to_reject def _try_fifo_allocation(self, queued_jobs, cur_time): """ Allocates as many jobs as possible using the FIFO approach. As soon as one allocation fails, all subsequent jobs fail too. Then, the return tuple contains info about the allocated jobs (assigned nodes and such) and also the position of the blocked job. :param queued_jobs: List of job objects :param cur_time: current time :return job allocation, and position of the blocked job in the list """ # Try to allocate jobs as in FIFO _allocated_jobs = self.nonauto_allocator.allocating_method(queued_jobs, cur_time, skip=False) # Check if there is a blocked job (a job without an allocation) blocked_idx = None for i, (_, job_id, allocated_nodes) in enumerate(_allocated_jobs): if not allocated_nodes: self._blocked_job_id = job_id blocked_idx = i break return _allocated_jobs, blocked_idx def _calculate_slot(self, cur_time, avl_resources, decided_allocations, prev_blocked, blocked_job, es_dict): """ Computes a reservation for the blocked job, by releasing incrementally the resources used by the running events and recently allocated jobs. The earliest slot in which blocked_job fits is chosen. :param avl_resources: Actual available resources :param decided_allocations: Allocated jobs on the current iteration. :param prev_blocked: Allocation corresponding to the previous blocked job which has been unblocked during this iteration :param blocked_jobs: Event to be fitted in the time slot :param es_dist: Job dictionary :return: a tuple of time of the slot and nodes """ current_allocations = self.resource_manager.current_allocations # Creates a list the jobs sorted by soonest ending time first future_endings = SortedListWithKey(key=lambda x:x[1]) # Running jobs for job_id, resources in current_allocations.items(): future_endings.add((job_id, es_dict[job_id].start_time + es_dict[job_id].expected_duration, resources)) # Previous blocked job has been scheduled if prev_blocked: decided_allocations += prev_blocked # Current allocated job for (_, job_id, nodes) in decided_allocations: _dec_alloc = {} for node in nodes: if not(node in _dec_alloc): _dec_alloc[node] = {k:v for k, v in es_dict[job_id].requested_resources.items()} else: for res, v in es_dict[job_id].requested_resources.items(): _dec_alloc[node][res] += v future_endings.add((job_id, cur_time + es_dict[job_id].expected_duration, _dec_alloc)) _required_alloc = blocked_job.requested_nodes _requested_resources = blocked_job.requested_resources _partial_alloc = {} # Calculate the partial allocation on the current system state for node, resources in avl_resources.items(): new_alloc = min([resources[req] // _requested_resources[req] for req in _requested_resources]) if new_alloc > 0: _partial_alloc[node] = new_alloc # Calculate the partial allocation on the next future endings for (job_id, res_time, used_nodes) in future_endings: for node, used_resources in used_nodes.items(): if not(node in avl_resources): avl_resources[node] = {r:0 for r in _requested_resources} for r, v in used_resources.items(): avl_resources[node][r] += v cur_alloc = _partial_alloc.get(node, 0) new_alloc = min([avl_resources[node][req] // _requested_resources[req] for req in _requested_resources]) _diff = new_alloc - cur_alloc if _diff > 0: _partial_alloc[node] = _partial_alloc.get(node, 0) + _diff # At this point the blocked job can be allocated if sum(_partial_alloc.values()) >= _required_alloc: ctimes = 0 nodes = [] for node, times in _partial_alloc.items(): ctimes += times nodes.append(node) if ctimes >= _required_alloc: break return (res_time, nodes,) raise DispatcherError('Can\'t find the slot.... no end? :(')
""" Must return the full ID of the scheduler, including policy and allocator. :return: the scheduler's id. """ raise NotImplementedError
identifier_body
index.tsx
import * as React from 'react' import { F, Atom } from '@grammarly/focal' import { of } from 'rxjs' let globalCounter = 0 function mkName() { return (++globalCounter).toString() } const El = ({ text = '', ts = '' }) => { return <F.div> El #{text} (render #{ts}) {of(undefined)} </F.div> }
const ElWithHover = ({ text = '' }) => { const ts = mkName() console.log('RENDERED', ts) const hovered = Atom.create(false) return <F.div onMouseEnter={() => hovered.set(true)} onMouseLeave={() => hovered.set(false)} > El #{text} (render #{ts}) is&nbsp; {hovered.view(x => { console.log('VIEW', ts, x) return x ? 'hovered' : 'NOT hovered' })} </F.div> } const MinimalReproduce = () => { const state = Atom.create([0]) return <F.div> <h2>The hover does not work after you add an item</h2> <button onClick={_ => state.modify(s => ([] as number[]).concat(s.concat([s.length])))}> add item </button> {state.view(xs => xs.map((text, i) => <ElWithHover key={i} text={text.toString()} /> ))} <h2>Simplified</h2> {state.view(xs => xs.map((text, i) => { const ts = mkName() return <El key={i} text={text.toString()} ts={ts} /> }))} </F.div> } export const Main = ({ // eslint-disable-next-line @typescript-eslint/no-unused-vars state = Atom.create(0) }) => { return ( <MinimalReproduce /> ) } export default { Component: Main, defaultState: 0 }
random_line_split
index.tsx
import * as React from 'react' import { F, Atom } from '@grammarly/focal' import { of } from 'rxjs' let globalCounter = 0 function mkName()
const El = ({ text = '', ts = '' }) => { return <F.div> El #{text} (render #{ts}) {of(undefined)} </F.div> } const ElWithHover = ({ text = '' }) => { const ts = mkName() console.log('RENDERED', ts) const hovered = Atom.create(false) return <F.div onMouseEnter={() => hovered.set(true)} onMouseLeave={() => hovered.set(false)} > El #{text} (render #{ts}) is&nbsp; {hovered.view(x => { console.log('VIEW', ts, x) return x ? 'hovered' : 'NOT hovered' })} </F.div> } const MinimalReproduce = () => { const state = Atom.create([0]) return <F.div> <h2>The hover does not work after you add an item</h2> <button onClick={_ => state.modify(s => ([] as number[]).concat(s.concat([s.length])))}> add item </button> {state.view(xs => xs.map((text, i) => <ElWithHover key={i} text={text.toString()} /> ))} <h2>Simplified</h2> {state.view(xs => xs.map((text, i) => { const ts = mkName() return <El key={i} text={text.toString()} ts={ts} /> }))} </F.div> } export const Main = ({ // eslint-disable-next-line @typescript-eslint/no-unused-vars state = Atom.create(0) }) => { return ( <MinimalReproduce /> ) } export default { Component: Main, defaultState: 0 }
{ return (++globalCounter).toString() }
identifier_body
index.tsx
import * as React from 'react' import { F, Atom } from '@grammarly/focal' import { of } from 'rxjs' let globalCounter = 0 function
() { return (++globalCounter).toString() } const El = ({ text = '', ts = '' }) => { return <F.div> El #{text} (render #{ts}) {of(undefined)} </F.div> } const ElWithHover = ({ text = '' }) => { const ts = mkName() console.log('RENDERED', ts) const hovered = Atom.create(false) return <F.div onMouseEnter={() => hovered.set(true)} onMouseLeave={() => hovered.set(false)} > El #{text} (render #{ts}) is&nbsp; {hovered.view(x => { console.log('VIEW', ts, x) return x ? 'hovered' : 'NOT hovered' })} </F.div> } const MinimalReproduce = () => { const state = Atom.create([0]) return <F.div> <h2>The hover does not work after you add an item</h2> <button onClick={_ => state.modify(s => ([] as number[]).concat(s.concat([s.length])))}> add item </button> {state.view(xs => xs.map((text, i) => <ElWithHover key={i} text={text.toString()} /> ))} <h2>Simplified</h2> {state.view(xs => xs.map((text, i) => { const ts = mkName() return <El key={i} text={text.toString()} ts={ts} /> }))} </F.div> } export const Main = ({ // eslint-disable-next-line @typescript-eslint/no-unused-vars state = Atom.create(0) }) => { return ( <MinimalReproduce /> ) } export default { Component: Main, defaultState: 0 }
mkName
identifier_name
upgrade.ts
/* * LiskHQ/lisk-commander * Copyright © 2017–2018 Lisk Foundation * * See the LICENSE file at the top-level directory of this distribution * for licensing information. * * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, * no part of this software, including this file, may be copied, modified, * propagated, or distributed except according to the terms contained in the * LICENSE file. * * Removal or modification of this copyright notice is prohibited. * */ import { flags as flagParser } from '@oclif/command'; import * as fsExtra from 'fs-extra'; import Listr from 'listr'; import semver from 'semver'; import BaseCommand from '../../base'; import { RELEASE_URL } from '../../utils/constants'; import { downloadLiskAndValidate, extract } from '../../utils/download'; import { flags as commonFlags } from '../../utils/flags'; import { isCacheRunning, startCache, stopCache } from '../../utils/node/cache'; import { backupLisk, getVersionToUpgrade, liskTar, upgradeLisk, validateVersion, } from '../../utils/node/commons'; import { getConfig } from '../../utils/node/config'; import { startDatabase, stopDatabase } from '../../utils/node/database'; import { describeApplication, Pm2Env, registerApplication, restartApplication, unRegisterApplication, } from '../../utils/node/pm2'; interface Flags { readonly 'lisk-version': string; } interface Args { readonly name: string; } interface PackageJson { readonly version: string; } export default class UpgradeCommand extends BaseCommand { static args = [ { name: 'name', description: 'Lisk Core installation directory name.', required: true, }, ]; static description = 'Upgrade an instance of Lisk Core to a specified or latest version.'; static examples = [ 'node:upgrade mainnet-latest', 'node:upgrade --lisk-version=2.0.0 mainnet-latest', ]; static flags = { ...BaseCommand.flags, 'lisk-version': flagParser.string({ ...commonFlags.liskVersion, }), }; async run(): Promise<void> { const { args, flags } = this.parse(UpgradeCommand); const { name }: Args = args; const { 'lisk-version': liskVersion } = flags as Flags; const { pm2_env } = await describeApplication(name); const { pm_cwd: installDir, LISK_NETWORK: network } = pm2_env as Pm2Env; const { version: currentVersion } = getConfig( `${installDir}/package.json`, ) as PackageJson; const upgradeVersion: string = await getVersionToUpgrade( network, liskVersion, ); const releaseUrl = `${RELEASE_URL}/${network}/${upgradeVersion}`; const { cacheDir } = this.config; const tasks = new Listr([ { title: 'Validate Version Input', task: async () => { await validateVersion(network, upgradeVersion); if (semver.lte(upgradeVersion, currentVersion)) { throw new Error( `Upgrade version:${upgradeVersion} should be greater than current version: ${currentVersion}`, ); } }, }, { title: 'Stop and Unregister Lisk Core', task: async () => { const isRunning = await isCacheRunning(installDir, network); if (isRunning) { await stopCache(installDir, network); } await stopDatabase(installDir, network); await unRegisterApplication(name); }, }, { title: 'Download, Backup and Install Lisk Core', task: () => new Listr([ { title: `Download Lisk Core: ${upgradeVersion} Release`,
cacheDir, releaseUrl, upgradeVersion, ); }, }, { title: `Backup Lisk Core: ${currentVersion} installed as ${name}`, task: async () => { await backupLisk(installDir); }, }, { title: `Install Lisk Core: ${upgradeVersion}`, task: async () => { fsExtra.ensureDirSync(installDir); await extract(cacheDir, liskTar(upgradeVersion), installDir); }, }, ]), }, { title: `Upgrade Lisk Core from: ${currentVersion} to: ${upgradeVersion}`, task: async () => { await upgradeLisk(installDir, name, network, currentVersion); }, }, { title: `Start Lisk Core: ${upgradeVersion}`, task: async () => { await registerApplication(installDir, network, name); const isRunning = await isCacheRunning(installDir, network); if (!isRunning) { await startCache(installDir, network); } await startDatabase(installDir, network); await restartApplication(name); }, }, ]); await tasks.run(); } }
task: async () => { await downloadLiskAndValidate(
random_line_split
upgrade.ts
/* * LiskHQ/lisk-commander * Copyright © 2017–2018 Lisk Foundation * * See the LICENSE file at the top-level directory of this distribution * for licensing information. * * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, * no part of this software, including this file, may be copied, modified, * propagated, or distributed except according to the terms contained in the * LICENSE file. * * Removal or modification of this copyright notice is prohibited. * */ import { flags as flagParser } from '@oclif/command'; import * as fsExtra from 'fs-extra'; import Listr from 'listr'; import semver from 'semver'; import BaseCommand from '../../base'; import { RELEASE_URL } from '../../utils/constants'; import { downloadLiskAndValidate, extract } from '../../utils/download'; import { flags as commonFlags } from '../../utils/flags'; import { isCacheRunning, startCache, stopCache } from '../../utils/node/cache'; import { backupLisk, getVersionToUpgrade, liskTar, upgradeLisk, validateVersion, } from '../../utils/node/commons'; import { getConfig } from '../../utils/node/config'; import { startDatabase, stopDatabase } from '../../utils/node/database'; import { describeApplication, Pm2Env, registerApplication, restartApplication, unRegisterApplication, } from '../../utils/node/pm2'; interface Flags { readonly 'lisk-version': string; } interface Args { readonly name: string; } interface PackageJson { readonly version: string; } export default class UpgradeCommand extends BaseCommand { static args = [ { name: 'name', description: 'Lisk Core installation directory name.', required: true, }, ]; static description = 'Upgrade an instance of Lisk Core to a specified or latest version.'; static examples = [ 'node:upgrade mainnet-latest', 'node:upgrade --lisk-version=2.0.0 mainnet-latest', ]; static flags = { ...BaseCommand.flags, 'lisk-version': flagParser.string({ ...commonFlags.liskVersion, }), }; async run
Promise<void> { const { args, flags } = this.parse(UpgradeCommand); const { name }: Args = args; const { 'lisk-version': liskVersion } = flags as Flags; const { pm2_env } = await describeApplication(name); const { pm_cwd: installDir, LISK_NETWORK: network } = pm2_env as Pm2Env; const { version: currentVersion } = getConfig( `${installDir}/package.json`, ) as PackageJson; const upgradeVersion: string = await getVersionToUpgrade( network, liskVersion, ); const releaseUrl = `${RELEASE_URL}/${network}/${upgradeVersion}`; const { cacheDir } = this.config; const tasks = new Listr([ { title: 'Validate Version Input', task: async () => { await validateVersion(network, upgradeVersion); if (semver.lte(upgradeVersion, currentVersion)) { throw new Error( `Upgrade version:${upgradeVersion} should be greater than current version: ${currentVersion}`, ); } }, }, { title: 'Stop and Unregister Lisk Core', task: async () => { const isRunning = await isCacheRunning(installDir, network); if (isRunning) { await stopCache(installDir, network); } await stopDatabase(installDir, network); await unRegisterApplication(name); }, }, { title: 'Download, Backup and Install Lisk Core', task: () => new Listr([ { title: `Download Lisk Core: ${upgradeVersion} Release`, task: async () => { await downloadLiskAndValidate( cacheDir, releaseUrl, upgradeVersion, ); }, }, { title: `Backup Lisk Core: ${currentVersion} installed as ${name}`, task: async () => { await backupLisk(installDir); }, }, { title: `Install Lisk Core: ${upgradeVersion}`, task: async () => { fsExtra.ensureDirSync(installDir); await extract(cacheDir, liskTar(upgradeVersion), installDir); }, }, ]), }, { title: `Upgrade Lisk Core from: ${currentVersion} to: ${upgradeVersion}`, task: async () => { await upgradeLisk(installDir, name, network, currentVersion); }, }, { title: `Start Lisk Core: ${upgradeVersion}`, task: async () => { await registerApplication(installDir, network, name); const isRunning = await isCacheRunning(installDir, network); if (!isRunning) { await startCache(installDir, network); } await startDatabase(installDir, network); await restartApplication(name); }, }, ]); await tasks.run(); } }
():
identifier_name
upgrade.ts
/* * LiskHQ/lisk-commander * Copyright © 2017–2018 Lisk Foundation * * See the LICENSE file at the top-level directory of this distribution * for licensing information. * * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, * no part of this software, including this file, may be copied, modified, * propagated, or distributed except according to the terms contained in the * LICENSE file. * * Removal or modification of this copyright notice is prohibited. * */ import { flags as flagParser } from '@oclif/command'; import * as fsExtra from 'fs-extra'; import Listr from 'listr'; import semver from 'semver'; import BaseCommand from '../../base'; import { RELEASE_URL } from '../../utils/constants'; import { downloadLiskAndValidate, extract } from '../../utils/download'; import { flags as commonFlags } from '../../utils/flags'; import { isCacheRunning, startCache, stopCache } from '../../utils/node/cache'; import { backupLisk, getVersionToUpgrade, liskTar, upgradeLisk, validateVersion, } from '../../utils/node/commons'; import { getConfig } from '../../utils/node/config'; import { startDatabase, stopDatabase } from '../../utils/node/database'; import { describeApplication, Pm2Env, registerApplication, restartApplication, unRegisterApplication, } from '../../utils/node/pm2'; interface Flags { readonly 'lisk-version': string; } interface Args { readonly name: string; } interface PackageJson { readonly version: string; } export default class UpgradeCommand extends BaseCommand { static args = [ { name: 'name', description: 'Lisk Core installation directory name.', required: true, }, ]; static description = 'Upgrade an instance of Lisk Core to a specified or latest version.'; static examples = [ 'node:upgrade mainnet-latest', 'node:upgrade --lisk-version=2.0.0 mainnet-latest', ]; static flags = { ...BaseCommand.flags, 'lisk-version': flagParser.string({ ...commonFlags.liskVersion, }), }; async run(): Promise<void> { const { args, flags } = this.parse(UpgradeCommand); const { name }: Args = args; const { 'lisk-version': liskVersion } = flags as Flags; const { pm2_env } = await describeApplication(name); const { pm_cwd: installDir, LISK_NETWORK: network } = pm2_env as Pm2Env; const { version: currentVersion } = getConfig( `${installDir}/package.json`, ) as PackageJson; const upgradeVersion: string = await getVersionToUpgrade( network, liskVersion, ); const releaseUrl = `${RELEASE_URL}/${network}/${upgradeVersion}`; const { cacheDir } = this.config; const tasks = new Listr([ { title: 'Validate Version Input', task: async () => { await validateVersion(network, upgradeVersion); if (semver.lte(upgradeVersion, currentVersion)) { throw new Error( `Upgrade version:${upgradeVersion} should be greater than current version: ${currentVersion}`, ); } }, }, { title: 'Stop and Unregister Lisk Core', task: async () => { const isRunning = await isCacheRunning(installDir, network); if (isRunning) { await stopCache(installDir, network); } await stopDatabase(installDir, network); await unRegisterApplication(name); }, }, { title: 'Download, Backup and Install Lisk Core', task: () => new Listr([ { title: `Download Lisk Core: ${upgradeVersion} Release`, task: async () => { await downloadLiskAndValidate( cacheDir, releaseUrl, upgradeVersion, ); }, }, { title: `Backup Lisk Core: ${currentVersion} installed as ${name}`, task: async () => { await backupLisk(installDir); }, }, { title: `Install Lisk Core: ${upgradeVersion}`, task: async () => { fsExtra.ensureDirSync(installDir); await extract(cacheDir, liskTar(upgradeVersion), installDir); }, }, ]), }, { title: `Upgrade Lisk Core from: ${currentVersion} to: ${upgradeVersion}`, task: async () => { await upgradeLisk(installDir, name, network, currentVersion); }, }, { title: `Start Lisk Core: ${upgradeVersion}`, task: async () => { await registerApplication(installDir, network, name); const isRunning = await isCacheRunning(installDir, network); if (!isRunning) {
await startDatabase(installDir, network); await restartApplication(name); }, }, ]); await tasks.run(); } }
await startCache(installDir, network); }
conditional_block
assignment-expression.js
'use strict'; module.exports = function (node, scope, environment, utils) { function assignResult(value)
return utils.when(utils.evaluateNode(node.right, scope, environment), function (value) { switch (node.operator) { case '=': assignResult(value); return value; default: throw new Error('Unexpected operator ' + node.operator); } }); };
{ if (node.left.type === 'Identifier') { scope.assign(node.left.name, value); } else if (node.left.type = 'MemberExpression') { if (node.left.computed) { return utils.when(utils.evaluateArray([node.left.object, node.left.property], scope, environment), function (results) { if (results[1] && (results[1].type === 'string' || results[1].type === 'number')) { return results[0].setProperty(results[1].value, value); } }); } } else { throw new Error('Invalid left hand side of assignment ' + node.left.type); } }
identifier_body
assignment-expression.js
'use strict'; module.exports = function (node, scope, environment, utils) { function assignResult(value) { if (node.left.type === 'Identifier') { scope.assign(node.left.name, value); } else if (node.left.type = 'MemberExpression') { if (node.left.computed)
} else { throw new Error('Invalid left hand side of assignment ' + node.left.type); } } return utils.when(utils.evaluateNode(node.right, scope, environment), function (value) { switch (node.operator) { case '=': assignResult(value); return value; default: throw new Error('Unexpected operator ' + node.operator); } }); };
{ return utils.when(utils.evaluateArray([node.left.object, node.left.property], scope, environment), function (results) { if (results[1] && (results[1].type === 'string' || results[1].type === 'number')) { return results[0].setProperty(results[1].value, value); } }); }
conditional_block
assignment-expression.js
'use strict'; module.exports = function (node, scope, environment, utils) { function assignResult(value) { if (node.left.type === 'Identifier') { scope.assign(node.left.name, value); } else if (node.left.type = 'MemberExpression') { if (node.left.computed) { return utils.when(utils.evaluateArray([node.left.object, node.left.property], scope, environment), function (results) { if (results[1] && (results[1].type === 'string' || results[1].type === 'number')) { return results[0].setProperty(results[1].value, value); } }); } } else { throw new Error('Invalid left hand side of assignment ' + node.left.type); } }
default: throw new Error('Unexpected operator ' + node.operator); } }); };
return utils.when(utils.evaluateNode(node.right, scope, environment), function (value) { switch (node.operator) { case '=': assignResult(value); return value;
random_line_split
assignment-expression.js
'use strict'; module.exports = function (node, scope, environment, utils) { function
(value) { if (node.left.type === 'Identifier') { scope.assign(node.left.name, value); } else if (node.left.type = 'MemberExpression') { if (node.left.computed) { return utils.when(utils.evaluateArray([node.left.object, node.left.property], scope, environment), function (results) { if (results[1] && (results[1].type === 'string' || results[1].type === 'number')) { return results[0].setProperty(results[1].value, value); } }); } } else { throw new Error('Invalid left hand side of assignment ' + node.left.type); } } return utils.when(utils.evaluateNode(node.right, scope, environment), function (value) { switch (node.operator) { case '=': assignResult(value); return value; default: throw new Error('Unexpected operator ' + node.operator); } }); };
assignResult
identifier_name
bind-to-item.tsx
import * as React from "react"; import { database } from "./init"; import { isEqual, difference } from "lodash"; /// <reference path="../react.d.ts" /> const enum Status { Pending, LoadedFromLocalStorage, LoadedFromFirebase } interface IProps<P> { firebaseRef: string; cacheLocally?: boolean; loader?: (props: P) => JSX.Element; } interface IState<T>{ status: Status; data?: T; } type InnerProps<T, P> = { data: T} & P; type OuterProps<P> = { firebaseRef: string; cacheLocally?: boolean; storage?: Storage; loader?: (props: P) => JSX.Element; debug?: boolean; } & P; interface Storage { getItem(key: string): string; setItem(key: string, value: string); } export function bindToItem<T, P>(innerKlass: React.ComponentClass<{data: T} & P>): React.ComponentClass<OuterProps<P>> { class BindToItem extends React.Component<OuterProps<P>, IState<T>> { private static propKeys = ["debug", "firebaseRef", "cacheLocally", "storage", "loader"]; private unbind: () => void; constructor(props: OuterProps<P>) { super(props); this.reset(props, false); } public componentWillReceiveProps(nextProps: OuterProps<P>) { // reset if reference changes if (this.props.firebaseRef !== nextProps.firebaseRef) { this.debug("Reseting since Firebase reference has changed"); this.reset(nextProps, true); } } public shouldComponentUpdate(nextProps: OuterProps<P>, nextState: IState<T>): boolean { // Yes if reference has changed if (nextProps.firebaseRef !== nextProps.firebaseRef) { this.debug("Updating since Firebase reference has changed"); return true; } // Yes if finished loading if (this.state.status === Status.Pending && nextState.status !== Status.Pending) { this.debug("Updating since status has changed"); return true; } // Yes if user-supplier props have changed if (!isEqual(this.buildOtherProps(this.props), this.buildOtherProps(nextProps))) { this.debug("Updating since user-supplied props have changed"); return true; } // Otherwise do deep comparison of data if (!isEqual(this.state.data, nextState.data)) { this.debug("Updating since data has changed"); return true; } return false; } public render(): JSX.Element { this.debug("Rendering"); const innerProps = this.buildInnerProps(this.props); if (this.state.status === Status.Pending) { if (this.props.loader) { return this.props.loader(innerProps); } return null; } return React.createElement<InnerProps<T, P>>(innerKlass, innerProps); } public componentWillUnmount() { this.debug("Unmounting"); if (this.unbind) { this.debug("Unbinding Firebase listener"); this.unbind(); } } private reset(props: OuterProps<P>, useSetState?: boolean) { const state: IState<T> = { status: Status.Pending }; if (this.props.cacheLocally) { this.debug("Checking storage for cached data"); const localStorageData = checkStorage<T>(props.firebaseRef, props.storage); if (localStorageData) { this.debug("Cache hit"); state.data = localStorageData; state.status = Status.LoadedFromLocalStorage; } } if (this.unbind) { this.debug("Unbinding deprecated Firebase listener"); this.unbind(); this.unbind = undefined; } const callback = this.updateData.bind(this); const reference = database().ref(props.firebaseRef); this.debug("Registering Firebase listener"); reference.on("value", callback); this.unbind = () => { reference.off("value", callback); }; if (useSetState) { this.setState(state); } else { this.state = state;
} private buildOtherProps(outerProps: OuterProps<P>): P { const otherProps = {} as P; for (const id of difference(Object.keys(outerProps), BindToItem.propKeys)) { otherProps[id] = outerProps[id]; } return otherProps; } private buildInnerProps(outerProps: OuterProps<P>): InnerProps<T, P> { const innerProps = this.buildOtherProps(outerProps) as InnerProps<T, P> ; innerProps.data = this.state.data; return innerProps; } private updateData(snapshot: firebase.database.DataSnapshot) { const val = snapshot.val() as T; this.setState({ data: val, status: Status.LoadedFromFirebase }); if (this.props.cacheLocally) { saveToStorage(this.props.firebaseRef, val, this.props.storage); } } private debug(message: string) { if (this.props.debug) { console.log(`bindToItem[${this.props.firebaseRef}]: ${message}`); } } }; return BindToItem; } function localStorageKey(firebaseRef: string): string { return `firebase-cache-item:${firebaseRef}`; } function saveToStorage<T>(firebaseRef: string, data: T, customStorage?: Storage) { const storage = customStorage || window.localStorage; try { storage.setItem(localStorageKey(firebaseRef), JSON.stringify(data)); } catch (err) { console.error(err.message); } } function checkStorage<T>(firebaseRef: string, customStorage?: Storage): T { const storage = customStorage || window.localStorage; const item = storage.getItem(localStorageKey(firebaseRef)); if (item) { return JSON.parse(item); } }
}
random_line_split
bind-to-item.tsx
import * as React from "react"; import { database } from "./init"; import { isEqual, difference } from "lodash"; /// <reference path="../react.d.ts" /> const enum Status { Pending, LoadedFromLocalStorage, LoadedFromFirebase } interface IProps<P> { firebaseRef: string; cacheLocally?: boolean; loader?: (props: P) => JSX.Element; } interface IState<T>{ status: Status; data?: T; } type InnerProps<T, P> = { data: T} & P; type OuterProps<P> = { firebaseRef: string; cacheLocally?: boolean; storage?: Storage; loader?: (props: P) => JSX.Element; debug?: boolean; } & P; interface Storage { getItem(key: string): string; setItem(key: string, value: string); } export function bindToItem<T, P>(innerKlass: React.ComponentClass<{data: T} & P>): React.ComponentClass<OuterProps<P>> { class BindToItem extends React.Component<OuterProps<P>, IState<T>> { private static propKeys = ["debug", "firebaseRef", "cacheLocally", "storage", "loader"]; private unbind: () => void; constructor(props: OuterProps<P>) { super(props); this.reset(props, false); } public componentWillReceiveProps(nextProps: OuterProps<P>) { // reset if reference changes if (this.props.firebaseRef !== nextProps.firebaseRef) { this.debug("Reseting since Firebase reference has changed"); this.reset(nextProps, true); } } public shouldComponentUpdate(nextProps: OuterProps<P>, nextState: IState<T>): boolean { // Yes if reference has changed if (nextProps.firebaseRef !== nextProps.firebaseRef) { this.debug("Updating since Firebase reference has changed"); return true; } // Yes if finished loading if (this.state.status === Status.Pending && nextState.status !== Status.Pending) { this.debug("Updating since status has changed"); return true; } // Yes if user-supplier props have changed if (!isEqual(this.buildOtherProps(this.props), this.buildOtherProps(nextProps))) { this.debug("Updating since user-supplied props have changed"); return true; } // Otherwise do deep comparison of data if (!isEqual(this.state.data, nextState.data)) { this.debug("Updating since data has changed"); return true; } return false; } public render(): JSX.Element { this.debug("Rendering"); const innerProps = this.buildInnerProps(this.props); if (this.state.status === Status.Pending) { if (this.props.loader) { return this.props.loader(innerProps); } return null; } return React.createElement<InnerProps<T, P>>(innerKlass, innerProps); } public componentWillUnmount() { this.debug("Unmounting"); if (this.unbind) { this.debug("Unbinding Firebase listener"); this.unbind(); } } private reset(props: OuterProps<P>, useSetState?: boolean) { const state: IState<T> = { status: Status.Pending }; if (this.props.cacheLocally) { this.debug("Checking storage for cached data"); const localStorageData = checkStorage<T>(props.firebaseRef, props.storage); if (localStorageData) { this.debug("Cache hit"); state.data = localStorageData; state.status = Status.LoadedFromLocalStorage; } } if (this.unbind) { this.debug("Unbinding deprecated Firebase listener"); this.unbind(); this.unbind = undefined; } const callback = this.updateData.bind(this); const reference = database().ref(props.firebaseRef); this.debug("Registering Firebase listener"); reference.on("value", callback); this.unbind = () => { reference.off("value", callback); }; if (useSetState) { this.setState(state); } else { this.state = state; } } private buildOtherProps(outerProps: OuterProps<P>): P { const otherProps = {} as P; for (const id of difference(Object.keys(outerProps), BindToItem.propKeys)) { otherProps[id] = outerProps[id]; } return otherProps; } private buildInnerProps(outerProps: OuterProps<P>): InnerProps<T, P>
private updateData(snapshot: firebase.database.DataSnapshot) { const val = snapshot.val() as T; this.setState({ data: val, status: Status.LoadedFromFirebase }); if (this.props.cacheLocally) { saveToStorage(this.props.firebaseRef, val, this.props.storage); } } private debug(message: string) { if (this.props.debug) { console.log(`bindToItem[${this.props.firebaseRef}]: ${message}`); } } }; return BindToItem; } function localStorageKey(firebaseRef: string): string { return `firebase-cache-item:${firebaseRef}`; } function saveToStorage<T>(firebaseRef: string, data: T, customStorage?: Storage) { const storage = customStorage || window.localStorage; try { storage.setItem(localStorageKey(firebaseRef), JSON.stringify(data)); } catch (err) { console.error(err.message); } } function checkStorage<T>(firebaseRef: string, customStorage?: Storage): T { const storage = customStorage || window.localStorage; const item = storage.getItem(localStorageKey(firebaseRef)); if (item) { return JSON.parse(item); } }
{ const innerProps = this.buildOtherProps(outerProps) as InnerProps<T, P> ; innerProps.data = this.state.data; return innerProps; }
identifier_body
bind-to-item.tsx
import * as React from "react"; import { database } from "./init"; import { isEqual, difference } from "lodash"; /// <reference path="../react.d.ts" /> const enum Status { Pending, LoadedFromLocalStorage, LoadedFromFirebase } interface IProps<P> { firebaseRef: string; cacheLocally?: boolean; loader?: (props: P) => JSX.Element; } interface IState<T>{ status: Status; data?: T; } type InnerProps<T, P> = { data: T} & P; type OuterProps<P> = { firebaseRef: string; cacheLocally?: boolean; storage?: Storage; loader?: (props: P) => JSX.Element; debug?: boolean; } & P; interface Storage { getItem(key: string): string; setItem(key: string, value: string); } export function bindToItem<T, P>(innerKlass: React.ComponentClass<{data: T} & P>): React.ComponentClass<OuterProps<P>> { class BindToItem extends React.Component<OuterProps<P>, IState<T>> { private static propKeys = ["debug", "firebaseRef", "cacheLocally", "storage", "loader"]; private unbind: () => void; constructor(props: OuterProps<P>) { super(props); this.reset(props, false); } public componentWillReceiveProps(nextProps: OuterProps<P>) { // reset if reference changes if (this.props.firebaseRef !== nextProps.firebaseRef) { this.debug("Reseting since Firebase reference has changed"); this.reset(nextProps, true); } } public shouldComponentUpdate(nextProps: OuterProps<P>, nextState: IState<T>): boolean { // Yes if reference has changed if (nextProps.firebaseRef !== nextProps.firebaseRef) { this.debug("Updating since Firebase reference has changed"); return true; } // Yes if finished loading if (this.state.status === Status.Pending && nextState.status !== Status.Pending) { this.debug("Updating since status has changed"); return true; } // Yes if user-supplier props have changed if (!isEqual(this.buildOtherProps(this.props), this.buildOtherProps(nextProps))) { this.debug("Updating since user-supplied props have changed"); return true; } // Otherwise do deep comparison of data if (!isEqual(this.state.data, nextState.data)) { this.debug("Updating since data has changed"); return true; } return false; } public render(): JSX.Element { this.debug("Rendering"); const innerProps = this.buildInnerProps(this.props); if (this.state.status === Status.Pending) { if (this.props.loader) { return this.props.loader(innerProps); } return null; } return React.createElement<InnerProps<T, P>>(innerKlass, innerProps); } public componentWillUnmount() { this.debug("Unmounting"); if (this.unbind) { this.debug("Unbinding Firebase listener"); this.unbind(); } } private reset(props: OuterProps<P>, useSetState?: boolean) { const state: IState<T> = { status: Status.Pending }; if (this.props.cacheLocally) { this.debug("Checking storage for cached data"); const localStorageData = checkStorage<T>(props.firebaseRef, props.storage); if (localStorageData) { this.debug("Cache hit"); state.data = localStorageData; state.status = Status.LoadedFromLocalStorage; } } if (this.unbind) { this.debug("Unbinding deprecated Firebase listener"); this.unbind(); this.unbind = undefined; } const callback = this.updateData.bind(this); const reference = database().ref(props.firebaseRef); this.debug("Registering Firebase listener"); reference.on("value", callback); this.unbind = () => { reference.off("value", callback); }; if (useSetState)
else { this.state = state; } } private buildOtherProps(outerProps: OuterProps<P>): P { const otherProps = {} as P; for (const id of difference(Object.keys(outerProps), BindToItem.propKeys)) { otherProps[id] = outerProps[id]; } return otherProps; } private buildInnerProps(outerProps: OuterProps<P>): InnerProps<T, P> { const innerProps = this.buildOtherProps(outerProps) as InnerProps<T, P> ; innerProps.data = this.state.data; return innerProps; } private updateData(snapshot: firebase.database.DataSnapshot) { const val = snapshot.val() as T; this.setState({ data: val, status: Status.LoadedFromFirebase }); if (this.props.cacheLocally) { saveToStorage(this.props.firebaseRef, val, this.props.storage); } } private debug(message: string) { if (this.props.debug) { console.log(`bindToItem[${this.props.firebaseRef}]: ${message}`); } } }; return BindToItem; } function localStorageKey(firebaseRef: string): string { return `firebase-cache-item:${firebaseRef}`; } function saveToStorage<T>(firebaseRef: string, data: T, customStorage?: Storage) { const storage = customStorage || window.localStorage; try { storage.setItem(localStorageKey(firebaseRef), JSON.stringify(data)); } catch (err) { console.error(err.message); } } function checkStorage<T>(firebaseRef: string, customStorage?: Storage): T { const storage = customStorage || window.localStorage; const item = storage.getItem(localStorageKey(firebaseRef)); if (item) { return JSON.parse(item); } }
{ this.setState(state); }
conditional_block
bind-to-item.tsx
import * as React from "react"; import { database } from "./init"; import { isEqual, difference } from "lodash"; /// <reference path="../react.d.ts" /> const enum Status { Pending, LoadedFromLocalStorage, LoadedFromFirebase } interface IProps<P> { firebaseRef: string; cacheLocally?: boolean; loader?: (props: P) => JSX.Element; } interface IState<T>{ status: Status; data?: T; } type InnerProps<T, P> = { data: T} & P; type OuterProps<P> = { firebaseRef: string; cacheLocally?: boolean; storage?: Storage; loader?: (props: P) => JSX.Element; debug?: boolean; } & P; interface Storage { getItem(key: string): string; setItem(key: string, value: string); } export function bindToItem<T, P>(innerKlass: React.ComponentClass<{data: T} & P>): React.ComponentClass<OuterProps<P>> { class BindToItem extends React.Component<OuterProps<P>, IState<T>> { private static propKeys = ["debug", "firebaseRef", "cacheLocally", "storage", "loader"]; private unbind: () => void;
(props: OuterProps<P>) { super(props); this.reset(props, false); } public componentWillReceiveProps(nextProps: OuterProps<P>) { // reset if reference changes if (this.props.firebaseRef !== nextProps.firebaseRef) { this.debug("Reseting since Firebase reference has changed"); this.reset(nextProps, true); } } public shouldComponentUpdate(nextProps: OuterProps<P>, nextState: IState<T>): boolean { // Yes if reference has changed if (nextProps.firebaseRef !== nextProps.firebaseRef) { this.debug("Updating since Firebase reference has changed"); return true; } // Yes if finished loading if (this.state.status === Status.Pending && nextState.status !== Status.Pending) { this.debug("Updating since status has changed"); return true; } // Yes if user-supplier props have changed if (!isEqual(this.buildOtherProps(this.props), this.buildOtherProps(nextProps))) { this.debug("Updating since user-supplied props have changed"); return true; } // Otherwise do deep comparison of data if (!isEqual(this.state.data, nextState.data)) { this.debug("Updating since data has changed"); return true; } return false; } public render(): JSX.Element { this.debug("Rendering"); const innerProps = this.buildInnerProps(this.props); if (this.state.status === Status.Pending) { if (this.props.loader) { return this.props.loader(innerProps); } return null; } return React.createElement<InnerProps<T, P>>(innerKlass, innerProps); } public componentWillUnmount() { this.debug("Unmounting"); if (this.unbind) { this.debug("Unbinding Firebase listener"); this.unbind(); } } private reset(props: OuterProps<P>, useSetState?: boolean) { const state: IState<T> = { status: Status.Pending }; if (this.props.cacheLocally) { this.debug("Checking storage for cached data"); const localStorageData = checkStorage<T>(props.firebaseRef, props.storage); if (localStorageData) { this.debug("Cache hit"); state.data = localStorageData; state.status = Status.LoadedFromLocalStorage; } } if (this.unbind) { this.debug("Unbinding deprecated Firebase listener"); this.unbind(); this.unbind = undefined; } const callback = this.updateData.bind(this); const reference = database().ref(props.firebaseRef); this.debug("Registering Firebase listener"); reference.on("value", callback); this.unbind = () => { reference.off("value", callback); }; if (useSetState) { this.setState(state); } else { this.state = state; } } private buildOtherProps(outerProps: OuterProps<P>): P { const otherProps = {} as P; for (const id of difference(Object.keys(outerProps), BindToItem.propKeys)) { otherProps[id] = outerProps[id]; } return otherProps; } private buildInnerProps(outerProps: OuterProps<P>): InnerProps<T, P> { const innerProps = this.buildOtherProps(outerProps) as InnerProps<T, P> ; innerProps.data = this.state.data; return innerProps; } private updateData(snapshot: firebase.database.DataSnapshot) { const val = snapshot.val() as T; this.setState({ data: val, status: Status.LoadedFromFirebase }); if (this.props.cacheLocally) { saveToStorage(this.props.firebaseRef, val, this.props.storage); } } private debug(message: string) { if (this.props.debug) { console.log(`bindToItem[${this.props.firebaseRef}]: ${message}`); } } }; return BindToItem; } function localStorageKey(firebaseRef: string): string { return `firebase-cache-item:${firebaseRef}`; } function saveToStorage<T>(firebaseRef: string, data: T, customStorage?: Storage) { const storage = customStorage || window.localStorage; try { storage.setItem(localStorageKey(firebaseRef), JSON.stringify(data)); } catch (err) { console.error(err.message); } } function checkStorage<T>(firebaseRef: string, customStorage?: Storage): T { const storage = customStorage || window.localStorage; const item = storage.getItem(localStorageKey(firebaseRef)); if (item) { return JSON.parse(item); } }
constructor
identifier_name
build.rs
// Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). use prost_build::Config; fn
() -> Result<(), Box<dyn std::error::Error>> { let mut config = Config::new(); config.bytes(&["."]); tonic_build::configure() .build_client(true) .build_server(true) .compile_with_config( config, &[ "protos/bazelbuild_remote-apis/build/bazel/remote/execution/v2/remote_execution.proto", "protos/bazelbuild_remote-apis/build/bazel/semver/semver.proto", "protos/buildbarn/cas.proto", "protos/googleapis/google/bytestream/bytestream.proto", "protos/googleapis/google/rpc/code.proto", "protos/googleapis/google/rpc/error_details.proto", "protos/googleapis/google/rpc/status.proto", "protos/googleapis/google/longrunning/operations.proto", "protos/standard/google/protobuf/empty.proto", ], &[ "protos/bazelbuild_remote-apis", "protos/buildbarn", "protos/googleapis", "protos/standard", ], )?; Ok(()) }
main
identifier_name
build.rs
// Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). use prost_build::Config; fn main() -> Result<(), Box<dyn std::error::Error>> { let mut config = Config::new(); config.bytes(&["."]); tonic_build::configure() .build_client(true) .build_server(true) .compile_with_config( config, &[ "protos/bazelbuild_remote-apis/build/bazel/remote/execution/v2/remote_execution.proto", "protos/bazelbuild_remote-apis/build/bazel/semver/semver.proto", "protos/buildbarn/cas.proto", "protos/googleapis/google/bytestream/bytestream.proto", "protos/googleapis/google/rpc/code.proto", "protos/googleapis/google/rpc/error_details.proto", "protos/googleapis/google/rpc/status.proto", "protos/googleapis/google/longrunning/operations.proto", "protos/standard/google/protobuf/empty.proto", ], &[ "protos/bazelbuild_remote-apis", "protos/buildbarn", "protos/googleapis", "protos/standard",
)?; Ok(()) }
],
random_line_split
build.rs
// Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). use prost_build::Config; fn main() -> Result<(), Box<dyn std::error::Error>>
{ let mut config = Config::new(); config.bytes(&["."]); tonic_build::configure() .build_client(true) .build_server(true) .compile_with_config( config, &[ "protos/bazelbuild_remote-apis/build/bazel/remote/execution/v2/remote_execution.proto", "protos/bazelbuild_remote-apis/build/bazel/semver/semver.proto", "protos/buildbarn/cas.proto", "protos/googleapis/google/bytestream/bytestream.proto", "protos/googleapis/google/rpc/code.proto", "protos/googleapis/google/rpc/error_details.proto", "protos/googleapis/google/rpc/status.proto", "protos/googleapis/google/longrunning/operations.proto", "protos/standard/google/protobuf/empty.proto", ], &[ "protos/bazelbuild_remote-apis", "protos/buildbarn", "protos/googleapis", "protos/standard", ], )?; Ok(()) }
identifier_body
lib.rs
extern crate httparse; extern crate hyper; extern crate mio; extern crate netbuf; extern crate rotor; extern crate unicase; extern crate url; extern crate time; extern crate multimap; use rotor::transports::{accept, stream}; pub use hyper::method::Method; pub use hyper::status::StatusCode; pub use hyper::version::HttpVersion; pub use mio::{EventLoop}; pub use mio::tcp::{TcpListener, TcpStream}; pub use rotor::Handler as EventHandler; pub use url::Url; pub use error::{Error, Result}; pub use headers::{IterListHeader, Headers}; pub use http1::Handler; pub use message::Message; pub use request::Request; pub use response::Response; mod error; mod headers;
pub type HttpServer<C, R> = accept::Serve<C, TcpListener, stream::Stream<C, TcpStream, http1::Client<C, R>>>;
pub mod http1; mod message; mod request; mod response;
random_line_split
evaluateXPathToStrings.ts
import IDomFacade from './domFacade/IDomFacade'; import evaluateXPath, { EvaluableExpression } from './evaluateXPath'; import { Options } from './types/Options'; /** * Evaluates an XPath on the given contextNode. Returns the string result as if the XPath is wrapped in string(...).
* @param contextItem - The node from which to run the XPath. * @param domFacade - The domFacade (or DomFacade like interface) for retrieving relations. * @param variables - Extra variables (name to value). Values can be number, string, boolean, nodes or object literals and arrays. * @param options - Extra options for evaluating this XPath. * * @returns The string result. */ export default function evaluateXPathToStrings( selector: EvaluableExpression, contextItem?: any | null, domFacade?: IDomFacade | null, variables?: { [s: string]: any } | null, options?: Options | null ): string[] { return evaluateXPath( selector, contextItem, domFacade, variables, evaluateXPath.STRINGS_TYPE, options ); }
* * @public * * @param selector - The selector to execute. Supports XPath 3.1.
random_line_split