file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
tables.js
|
import DB from '../db';
import * as types from '../constants/tablesConstants';
import { stopFetching, internalInitTable } from './currentTable';
export function setCurrentTable(tableName) {
return {
type: types.SET_CURRENT_TABLE,
tableName
};
}
export function changeTableName(newTableName) {
return {
type: types.CHANGE_TABLE_NAME,
newTableName
};
}
export function createTable(tableName, i = -1) {
return dispatch => new Promise((resolve, reject) => {
// eslint-disable-next-line no-param-reassign
DB.createTable(tableName)
.then(
() => {
dispatch({
type: types.CREATE_TABLE,
tableName
});
resolve(tableName);
},
(error) => {
// if tableName is occupied it sends reject with
// incremented counter to pick new table name
// recursively
if (error.search('already exists')) {
const j = i + 1;
reject(j);
}
}
);
});
}
export function dropTable(tableName) {
return (dispatch) => {
dispatch({
type: 'tables/DROP_TABLE'
});
DB.dropTable(tableName);
};
}
export function truncateTable(tableName, restartIdentity) {
return (dispatch) => {
dispatch({
type: 'tables/TRUNCATE_TABLE'
});
DB.truncateTable(tableName, restartIdentity);
};
}
function internalGetTables(dispatch, clear) {
return new Promise((resolve, reject) => {
if (clear)
|
DB.getTables()
.then(
(tables) => {
if (tables.length) {
return DB.getTableOid(tables);
}
return tables;
},
(error) => {
reject(error);
}
)
.then(
(tables) => {
if (tables.length) {
return DB.getForeignKeys(tables);
}
dispatch(stopFetching());
return tables;
}
)
.then(
(tables) => {
dispatch({
type: types.GET_TABLES,
tables
});
resolve(tables.length ? tables[0].table_name : '');
}
);
});
}
export function getTables(clear = undefined) {
return dispatch => (internalGetTables(dispatch, clear));
}
export function reloadTables() {
return (dispatch, getState) => {
const currentTableState = getState().currentTable;
const tableName = currentTableState.tableName;
dispatch({ type: types.CLEAR_TABLES });
internalGetTables(dispatch)
.then(
() => {
if (tableName) {
dispatch({
type: types.SET_CURRENT_TABLE,
tableName
});
const page = currentTableState.page;
const order = currentTableState.order;
const filters = currentTableState.filters;
internalInitTable(dispatch, getState, { tableName, page, order, filters });
}
}
);
};
}
export function clearTables() {
return {
type: types.CLEAR_TABLES
};
}
export function searchTables(keyword) {
return {
type: types.SEARCH_TABLES,
keyword
};
}
|
{
dispatch({ type: types.GET_TABLES, tables: [] });
}
|
conditional_block
|
tables.js
|
import DB from '../db';
import * as types from '../constants/tablesConstants';
import { stopFetching, internalInitTable } from './currentTable';
export function
|
(tableName) {
return {
type: types.SET_CURRENT_TABLE,
tableName
};
}
export function changeTableName(newTableName) {
return {
type: types.CHANGE_TABLE_NAME,
newTableName
};
}
export function createTable(tableName, i = -1) {
return dispatch => new Promise((resolve, reject) => {
// eslint-disable-next-line no-param-reassign
DB.createTable(tableName)
.then(
() => {
dispatch({
type: types.CREATE_TABLE,
tableName
});
resolve(tableName);
},
(error) => {
// if tableName is occupied it sends reject with
// incremented counter to pick new table name
// recursively
if (error.search('already exists')) {
const j = i + 1;
reject(j);
}
}
);
});
}
export function dropTable(tableName) {
return (dispatch) => {
dispatch({
type: 'tables/DROP_TABLE'
});
DB.dropTable(tableName);
};
}
export function truncateTable(tableName, restartIdentity) {
return (dispatch) => {
dispatch({
type: 'tables/TRUNCATE_TABLE'
});
DB.truncateTable(tableName, restartIdentity);
};
}
function internalGetTables(dispatch, clear) {
return new Promise((resolve, reject) => {
if (clear) {
dispatch({ type: types.GET_TABLES, tables: [] });
}
DB.getTables()
.then(
(tables) => {
if (tables.length) {
return DB.getTableOid(tables);
}
return tables;
},
(error) => {
reject(error);
}
)
.then(
(tables) => {
if (tables.length) {
return DB.getForeignKeys(tables);
}
dispatch(stopFetching());
return tables;
}
)
.then(
(tables) => {
dispatch({
type: types.GET_TABLES,
tables
});
resolve(tables.length ? tables[0].table_name : '');
}
);
});
}
export function getTables(clear = undefined) {
return dispatch => (internalGetTables(dispatch, clear));
}
export function reloadTables() {
return (dispatch, getState) => {
const currentTableState = getState().currentTable;
const tableName = currentTableState.tableName;
dispatch({ type: types.CLEAR_TABLES });
internalGetTables(dispatch)
.then(
() => {
if (tableName) {
dispatch({
type: types.SET_CURRENT_TABLE,
tableName
});
const page = currentTableState.page;
const order = currentTableState.order;
const filters = currentTableState.filters;
internalInitTable(dispatch, getState, { tableName, page, order, filters });
}
}
);
};
}
export function clearTables() {
return {
type: types.CLEAR_TABLES
};
}
export function searchTables(keyword) {
return {
type: types.SEARCH_TABLES,
keyword
};
}
|
setCurrentTable
|
identifier_name
|
manager.py
|
import os
import stepper
import time
import random
import thermo
import threading
import traceback
import logging
import states
import PID
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class TempLog(object):
def __init__(self, history, interval=60, suffix=""): #save data every 60 seconds
import paths
self.history = history
fname = time.strftime('%Y-%m-%d_%I:%M%P')
if len(suffix) > 0:
suffix = "_"+suffix
self.fname = os.path.join(paths.log_path, fname+suffix+".log")
with open(self.fname, 'w') as fp:
fp.write("time\ttemp\n")
for t, temp in history:
fp.write("%f\t%f\n"%(t, temp))
self.next = time.time() + interval
self.interval = interval
self._buffer = []
def __iter__(self):
return iter(self.history)
def append(self, data):
self.history.append(data)
self._buffer.append(data)
if time.time() > self.next:
with open(self.fname, 'a') as fp:
for t, temp in self._buffer:
fp.write("%f\t%f\n"%(t, temp))
self._buffer = []
self.next = time.time() + self.interval
class Manager(threading.Thread):
def __init__(self, start=states.Idle, simulate=False):
"""
Implement a state machine that cycles through States
"""
super(Manager, self).__init__()
self._send = None
if simulate:
self.regulator = stepper.Regulator(simulate=simulate)
self.therm = thermo.Simulate(regulator=self.regulator)
else:
self.regulator = stepper.Breakout(0x08)
self.therm = thermo.Breakout(0x08)
self.state = start(self)
self.state_change = threading.Event()
self.running = True
self.start()
def notify(self, data):
if self._send is not None:
try:
self._send(data)
except:
pass
else:
logger.info("No notifier set, ignoring message: %s"%data)
def __getattr__(self, name):
"""Mutates the manager to return State actions
If the requested attribute is a function, wrap the function
such that returned obejcts which are States indicate a state change
"""
attr = getattr(self.state, name)
if hasattr(attr, "__call__"):
def func(*args, **kwargs):
self._change_state(attr(*args, **kwargs))
return func
return attr
def _change_state(self, output):
if isinstance(output, type) and issubclass(output, states.State) :
self.state = output(self)
self.state_change.set()
self.notify(dict(type="state", state=output.__name__))
logger.info("Switching to state '%s'"%output.__name__)
elif isinstance(output, tuple) and issubclass(output[0], states.State):
newstate, kwargs = output
self.state = newstate(self, **kwargs)
self.notify(dict(type="state", state=newstate.__name__))
logger.info("Switching to state '%s'"%newstate.__name__)
elif isinstance(output, dict) and "type" in output:
self.notify(output)
elif output is not None:
logger.warn("Unknown state output: %r"%output)
def run(self):
while self.running:
self._change_state(self.state.run())
def manager_stop(self):
self.running = False
self.state_change.set()
class Profile(threading.Thread):
"""Performs the PID loop required for feedback control"""
def __init__(self, schedule, therm, regulator, interval=1, start_time=None, callback=None,
Kp=.03, Ki=.015, Kd=.001):
super(Profile, self).__init__()
self.daemon = True
self.schedule = schedule
self.therm = therm
self.regulator = regulator
self.interval = interval
self.start_time = start_time
if start_time is None:
self.start_time = time.time()
self.pid = PID.PID(Kp, Ki, Kd)
self.callback = callback
self.running = True
self.duty_cycle = False
self.start()
@property
def elapsed(self):
|
@property
def completed(self):
return self.elapsed > self.schedule[-1][0]
def stop(self):
self.running = False
def run(self):
_next = time.time()+self.interval
while not self.completed and self.running:
ts = self.elapsed
#find epoch
for i in range(len(self.schedule)-1):
if self.schedule[i][0] < ts < self.schedule[i+1][0]:
time0, temp0 = self.schedule[i]
time1, temp1 = self.schedule[i+1]
frac = (ts - time0) / (time1 - time0)
setpoint = frac * (temp1 - temp0) + temp0
self.pid.setPoint(setpoint)
temp = self.therm.temperature.temp
if temp == -1:
continue #skip invalid temperature readings
elif temp - setpoint > 10:
self.regulator.off()
self.duty_cycle = True
pid_out = -1
elif self.duty_cycle:
if temp - setpoint < -5:
self.regulator.ignite()
self.duty_cycle = False
pid_out = -1
else:
pid_out = self.pid.update(temp)
if pid_out < 0: pid_out = 0
if pid_out > 1: pid_out = 1
self.regulator.set(pid_out)
if self.callback is not None:
self.callback(temp, setpoint, pid_out)
sleep = _next - time.time()
if sleep > 0:
time.sleep(sleep)
_next += self.interval
|
''' Returns the elapsed time from start in seconds'''
return time.time() - self.start_time
|
identifier_body
|
manager.py
|
import os
import stepper
import time
import random
import thermo
import threading
import traceback
import logging
import states
import PID
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class TempLog(object):
def __init__(self, history, interval=60, suffix=""): #save data every 60 seconds
import paths
self.history = history
fname = time.strftime('%Y-%m-%d_%I:%M%P')
if len(suffix) > 0:
suffix = "_"+suffix
self.fname = os.path.join(paths.log_path, fname+suffix+".log")
with open(self.fname, 'w') as fp:
fp.write("time\ttemp\n")
for t, temp in history:
fp.write("%f\t%f\n"%(t, temp))
self.next = time.time() + interval
self.interval = interval
self._buffer = []
def __iter__(self):
return iter(self.history)
def append(self, data):
self.history.append(data)
self._buffer.append(data)
if time.time() > self.next:
with open(self.fname, 'a') as fp:
for t, temp in self._buffer:
fp.write("%f\t%f\n"%(t, temp))
self._buffer = []
self.next = time.time() + self.interval
class Manager(threading.Thread):
def __init__(self, start=states.Idle, simulate=False):
"""
Implement a state machine that cycles through States
"""
super(Manager, self).__init__()
self._send = None
if simulate:
self.regulator = stepper.Regulator(simulate=simulate)
self.therm = thermo.Simulate(regulator=self.regulator)
else:
self.regulator = stepper.Breakout(0x08)
self.therm = thermo.Breakout(0x08)
self.state = start(self)
self.state_change = threading.Event()
self.running = True
self.start()
def notify(self, data):
if self._send is not None:
try:
self._send(data)
except:
pass
else:
logger.info("No notifier set, ignoring message: %s"%data)
def __getattr__(self, name):
"""Mutates the manager to return State actions
If the requested attribute is a function, wrap the function
such that returned obejcts which are States indicate a state change
"""
attr = getattr(self.state, name)
if hasattr(attr, "__call__"):
def func(*args, **kwargs):
self._change_state(attr(*args, **kwargs))
return func
return attr
def _change_state(self, output):
if isinstance(output, type) and issubclass(output, states.State) :
self.state = output(self)
self.state_change.set()
self.notify(dict(type="state", state=output.__name__))
logger.info("Switching to state '%s'"%output.__name__)
elif isinstance(output, tuple) and issubclass(output[0], states.State):
newstate, kwargs = output
self.state = newstate(self, **kwargs)
self.notify(dict(type="state", state=newstate.__name__))
logger.info("Switching to state '%s'"%newstate.__name__)
elif isinstance(output, dict) and "type" in output:
self.notify(output)
elif output is not None:
logger.warn("Unknown state output: %r"%output)
def run(self):
while self.running:
self._change_state(self.state.run())
def manager_stop(self):
self.running = False
self.state_change.set()
class Profile(threading.Thread):
"""Performs the PID loop required for feedback control"""
def __init__(self, schedule, therm, regulator, interval=1, start_time=None, callback=None,
|
self.schedule = schedule
self.therm = therm
self.regulator = regulator
self.interval = interval
self.start_time = start_time
if start_time is None:
self.start_time = time.time()
self.pid = PID.PID(Kp, Ki, Kd)
self.callback = callback
self.running = True
self.duty_cycle = False
self.start()
@property
def elapsed(self):
''' Returns the elapsed time from start in seconds'''
return time.time() - self.start_time
@property
def completed(self):
return self.elapsed > self.schedule[-1][0]
def stop(self):
self.running = False
def run(self):
_next = time.time()+self.interval
while not self.completed and self.running:
ts = self.elapsed
#find epoch
for i in range(len(self.schedule)-1):
if self.schedule[i][0] < ts < self.schedule[i+1][0]:
time0, temp0 = self.schedule[i]
time1, temp1 = self.schedule[i+1]
frac = (ts - time0) / (time1 - time0)
setpoint = frac * (temp1 - temp0) + temp0
self.pid.setPoint(setpoint)
temp = self.therm.temperature.temp
if temp == -1:
continue #skip invalid temperature readings
elif temp - setpoint > 10:
self.regulator.off()
self.duty_cycle = True
pid_out = -1
elif self.duty_cycle:
if temp - setpoint < -5:
self.regulator.ignite()
self.duty_cycle = False
pid_out = -1
else:
pid_out = self.pid.update(temp)
if pid_out < 0: pid_out = 0
if pid_out > 1: pid_out = 1
self.regulator.set(pid_out)
if self.callback is not None:
self.callback(temp, setpoint, pid_out)
sleep = _next - time.time()
if sleep > 0:
time.sleep(sleep)
_next += self.interval
|
Kp=.03, Ki=.015, Kd=.001):
super(Profile, self).__init__()
self.daemon = True
|
random_line_split
|
manager.py
|
import os
import stepper
import time
import random
import thermo
import threading
import traceback
import logging
import states
import PID
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class TempLog(object):
def __init__(self, history, interval=60, suffix=""): #save data every 60 seconds
import paths
self.history = history
fname = time.strftime('%Y-%m-%d_%I:%M%P')
if len(suffix) > 0:
|
self.fname = os.path.join(paths.log_path, fname+suffix+".log")
with open(self.fname, 'w') as fp:
fp.write("time\ttemp\n")
for t, temp in history:
fp.write("%f\t%f\n"%(t, temp))
self.next = time.time() + interval
self.interval = interval
self._buffer = []
def __iter__(self):
return iter(self.history)
def append(self, data):
self.history.append(data)
self._buffer.append(data)
if time.time() > self.next:
with open(self.fname, 'a') as fp:
for t, temp in self._buffer:
fp.write("%f\t%f\n"%(t, temp))
self._buffer = []
self.next = time.time() + self.interval
class Manager(threading.Thread):
def __init__(self, start=states.Idle, simulate=False):
"""
Implement a state machine that cycles through States
"""
super(Manager, self).__init__()
self._send = None
if simulate:
self.regulator = stepper.Regulator(simulate=simulate)
self.therm = thermo.Simulate(regulator=self.regulator)
else:
self.regulator = stepper.Breakout(0x08)
self.therm = thermo.Breakout(0x08)
self.state = start(self)
self.state_change = threading.Event()
self.running = True
self.start()
def notify(self, data):
if self._send is not None:
try:
self._send(data)
except:
pass
else:
logger.info("No notifier set, ignoring message: %s"%data)
def __getattr__(self, name):
"""Mutates the manager to return State actions
If the requested attribute is a function, wrap the function
such that returned obejcts which are States indicate a state change
"""
attr = getattr(self.state, name)
if hasattr(attr, "__call__"):
def func(*args, **kwargs):
self._change_state(attr(*args, **kwargs))
return func
return attr
def _change_state(self, output):
if isinstance(output, type) and issubclass(output, states.State) :
self.state = output(self)
self.state_change.set()
self.notify(dict(type="state", state=output.__name__))
logger.info("Switching to state '%s'"%output.__name__)
elif isinstance(output, tuple) and issubclass(output[0], states.State):
newstate, kwargs = output
self.state = newstate(self, **kwargs)
self.notify(dict(type="state", state=newstate.__name__))
logger.info("Switching to state '%s'"%newstate.__name__)
elif isinstance(output, dict) and "type" in output:
self.notify(output)
elif output is not None:
logger.warn("Unknown state output: %r"%output)
def run(self):
while self.running:
self._change_state(self.state.run())
def manager_stop(self):
self.running = False
self.state_change.set()
class Profile(threading.Thread):
"""Performs the PID loop required for feedback control"""
def __init__(self, schedule, therm, regulator, interval=1, start_time=None, callback=None,
Kp=.03, Ki=.015, Kd=.001):
super(Profile, self).__init__()
self.daemon = True
self.schedule = schedule
self.therm = therm
self.regulator = regulator
self.interval = interval
self.start_time = start_time
if start_time is None:
self.start_time = time.time()
self.pid = PID.PID(Kp, Ki, Kd)
self.callback = callback
self.running = True
self.duty_cycle = False
self.start()
@property
def elapsed(self):
''' Returns the elapsed time from start in seconds'''
return time.time() - self.start_time
@property
def completed(self):
return self.elapsed > self.schedule[-1][0]
def stop(self):
self.running = False
def run(self):
_next = time.time()+self.interval
while not self.completed and self.running:
ts = self.elapsed
#find epoch
for i in range(len(self.schedule)-1):
if self.schedule[i][0] < ts < self.schedule[i+1][0]:
time0, temp0 = self.schedule[i]
time1, temp1 = self.schedule[i+1]
frac = (ts - time0) / (time1 - time0)
setpoint = frac * (temp1 - temp0) + temp0
self.pid.setPoint(setpoint)
temp = self.therm.temperature.temp
if temp == -1:
continue #skip invalid temperature readings
elif temp - setpoint > 10:
self.regulator.off()
self.duty_cycle = True
pid_out = -1
elif self.duty_cycle:
if temp - setpoint < -5:
self.regulator.ignite()
self.duty_cycle = False
pid_out = -1
else:
pid_out = self.pid.update(temp)
if pid_out < 0: pid_out = 0
if pid_out > 1: pid_out = 1
self.regulator.set(pid_out)
if self.callback is not None:
self.callback(temp, setpoint, pid_out)
sleep = _next - time.time()
if sleep > 0:
time.sleep(sleep)
_next += self.interval
|
suffix = "_"+suffix
|
conditional_block
|
manager.py
|
import os
import stepper
import time
import random
import thermo
import threading
import traceback
import logging
import states
import PID
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class TempLog(object):
def __init__(self, history, interval=60, suffix=""): #save data every 60 seconds
import paths
self.history = history
fname = time.strftime('%Y-%m-%d_%I:%M%P')
if len(suffix) > 0:
suffix = "_"+suffix
self.fname = os.path.join(paths.log_path, fname+suffix+".log")
with open(self.fname, 'w') as fp:
fp.write("time\ttemp\n")
for t, temp in history:
fp.write("%f\t%f\n"%(t, temp))
self.next = time.time() + interval
self.interval = interval
self._buffer = []
def __iter__(self):
return iter(self.history)
def append(self, data):
self.history.append(data)
self._buffer.append(data)
if time.time() > self.next:
with open(self.fname, 'a') as fp:
for t, temp in self._buffer:
fp.write("%f\t%f\n"%(t, temp))
self._buffer = []
self.next = time.time() + self.interval
class Manager(threading.Thread):
def
|
(self, start=states.Idle, simulate=False):
"""
Implement a state machine that cycles through States
"""
super(Manager, self).__init__()
self._send = None
if simulate:
self.regulator = stepper.Regulator(simulate=simulate)
self.therm = thermo.Simulate(regulator=self.regulator)
else:
self.regulator = stepper.Breakout(0x08)
self.therm = thermo.Breakout(0x08)
self.state = start(self)
self.state_change = threading.Event()
self.running = True
self.start()
def notify(self, data):
if self._send is not None:
try:
self._send(data)
except:
pass
else:
logger.info("No notifier set, ignoring message: %s"%data)
def __getattr__(self, name):
"""Mutates the manager to return State actions
If the requested attribute is a function, wrap the function
such that returned obejcts which are States indicate a state change
"""
attr = getattr(self.state, name)
if hasattr(attr, "__call__"):
def func(*args, **kwargs):
self._change_state(attr(*args, **kwargs))
return func
return attr
def _change_state(self, output):
if isinstance(output, type) and issubclass(output, states.State) :
self.state = output(self)
self.state_change.set()
self.notify(dict(type="state", state=output.__name__))
logger.info("Switching to state '%s'"%output.__name__)
elif isinstance(output, tuple) and issubclass(output[0], states.State):
newstate, kwargs = output
self.state = newstate(self, **kwargs)
self.notify(dict(type="state", state=newstate.__name__))
logger.info("Switching to state '%s'"%newstate.__name__)
elif isinstance(output, dict) and "type" in output:
self.notify(output)
elif output is not None:
logger.warn("Unknown state output: %r"%output)
def run(self):
while self.running:
self._change_state(self.state.run())
def manager_stop(self):
self.running = False
self.state_change.set()
class Profile(threading.Thread):
"""Performs the PID loop required for feedback control"""
def __init__(self, schedule, therm, regulator, interval=1, start_time=None, callback=None,
Kp=.03, Ki=.015, Kd=.001):
super(Profile, self).__init__()
self.daemon = True
self.schedule = schedule
self.therm = therm
self.regulator = regulator
self.interval = interval
self.start_time = start_time
if start_time is None:
self.start_time = time.time()
self.pid = PID.PID(Kp, Ki, Kd)
self.callback = callback
self.running = True
self.duty_cycle = False
self.start()
@property
def elapsed(self):
''' Returns the elapsed time from start in seconds'''
return time.time() - self.start_time
@property
def completed(self):
return self.elapsed > self.schedule[-1][0]
def stop(self):
self.running = False
def run(self):
_next = time.time()+self.interval
while not self.completed and self.running:
ts = self.elapsed
#find epoch
for i in range(len(self.schedule)-1):
if self.schedule[i][0] < ts < self.schedule[i+1][0]:
time0, temp0 = self.schedule[i]
time1, temp1 = self.schedule[i+1]
frac = (ts - time0) / (time1 - time0)
setpoint = frac * (temp1 - temp0) + temp0
self.pid.setPoint(setpoint)
temp = self.therm.temperature.temp
if temp == -1:
continue #skip invalid temperature readings
elif temp - setpoint > 10:
self.regulator.off()
self.duty_cycle = True
pid_out = -1
elif self.duty_cycle:
if temp - setpoint < -5:
self.regulator.ignite()
self.duty_cycle = False
pid_out = -1
else:
pid_out = self.pid.update(temp)
if pid_out < 0: pid_out = 0
if pid_out > 1: pid_out = 1
self.regulator.set(pid_out)
if self.callback is not None:
self.callback(temp, setpoint, pid_out)
sleep = _next - time.time()
if sleep > 0:
time.sleep(sleep)
_next += self.interval
|
__init__
|
identifier_name
|
msgsend-pipes.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A port of the simplistic benchmark from
//
// http://github.com/PaulKeeble/ScalaVErlangAgents
//
// I *think* it's the same, more or less.
#![feature(std_misc)]
use std::sync::mpsc::{channel, Sender, Receiver};
use std::env;
use std::thread;
use std::time::Duration;
enum request {
get_count,
bytes(usize),
stop
}
fn server(requests: &Receiver<request>, responses: &Sender<usize>)
|
fn run(args: &[String]) {
let (to_parent, from_child) = channel();
let size = args[1].parse::<usize>().unwrap();
let workers = args[2].parse::<usize>().unwrap();
let num_bytes = 100;
let mut result = None;
let mut to_parent = Some(to_parent);
let dur = Duration::span(|| {
let to_parent = to_parent.take().unwrap();
let mut worker_results = Vec::new();
let from_parent = if workers == 1 {
let (to_child, from_parent) = channel();
worker_results.push(thread::spawn(move|| {
for _ in 0..size / workers {
//println!("worker {}: sending {} bytes", i, num_bytes);
to_child.send(request::bytes(num_bytes));
}
//println!("worker {} exiting", i);
}));
from_parent
} else {
let (to_child, from_parent) = channel();
for _ in 0..workers {
let to_child = to_child.clone();
worker_results.push(thread::spawn(move|| {
for _ in 0..size / workers {
//println!("worker {}: sending {} bytes", i, num_bytes);
to_child.send(request::bytes(num_bytes));
}
//println!("worker {} exiting", i);
}));
}
from_parent
};
thread::spawn(move|| {
server(&from_parent, &to_parent);
});
for r in worker_results {
let _ = r.join();
}
//println!("sending stop message");
//to_child.send(stop);
//move_out(to_child);
result = Some(from_child.recv().unwrap());
});
let result = result.unwrap();
print!("Count is {}\n", result);
print!("Test took {} ms\n", dur.num_milliseconds());
let thruput = ((size / workers * workers) as f64) / (dur.num_milliseconds() as f64);
print!("Throughput={} per sec\n", thruput / 1000.0);
assert_eq!(result, num_bytes * size);
}
fn main() {
let args = env::args();
let args = if env::var_os("RUST_BENCH").is_some() {
vec!("".to_string(), "1000000".to_string(), "8".to_string())
} else if args.len() <= 1 {
vec!("".to_string(), "10000".to_string(), "4".to_string())
} else {
args.map(|x| x.to_string()).collect()
};
println!("{:?}", args);
run(&args);
}
|
{
let mut count: usize = 0;
let mut done = false;
while !done {
match requests.recv() {
Ok(request::get_count) => { responses.send(count.clone()); }
Ok(request::bytes(b)) => {
//println!("server: received {} bytes", b);
count += b;
}
Err(..) => { done = true; }
_ => { }
}
}
responses.send(count).unwrap();
//println!("server exiting");
}
|
identifier_body
|
msgsend-pipes.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A port of the simplistic benchmark from
|
//
// I *think* it's the same, more or less.
#![feature(std_misc)]
use std::sync::mpsc::{channel, Sender, Receiver};
use std::env;
use std::thread;
use std::time::Duration;
enum request {
get_count,
bytes(usize),
stop
}
fn server(requests: &Receiver<request>, responses: &Sender<usize>) {
let mut count: usize = 0;
let mut done = false;
while !done {
match requests.recv() {
Ok(request::get_count) => { responses.send(count.clone()); }
Ok(request::bytes(b)) => {
//println!("server: received {} bytes", b);
count += b;
}
Err(..) => { done = true; }
_ => { }
}
}
responses.send(count).unwrap();
//println!("server exiting");
}
fn run(args: &[String]) {
let (to_parent, from_child) = channel();
let size = args[1].parse::<usize>().unwrap();
let workers = args[2].parse::<usize>().unwrap();
let num_bytes = 100;
let mut result = None;
let mut to_parent = Some(to_parent);
let dur = Duration::span(|| {
let to_parent = to_parent.take().unwrap();
let mut worker_results = Vec::new();
let from_parent = if workers == 1 {
let (to_child, from_parent) = channel();
worker_results.push(thread::spawn(move|| {
for _ in 0..size / workers {
//println!("worker {}: sending {} bytes", i, num_bytes);
to_child.send(request::bytes(num_bytes));
}
//println!("worker {} exiting", i);
}));
from_parent
} else {
let (to_child, from_parent) = channel();
for _ in 0..workers {
let to_child = to_child.clone();
worker_results.push(thread::spawn(move|| {
for _ in 0..size / workers {
//println!("worker {}: sending {} bytes", i, num_bytes);
to_child.send(request::bytes(num_bytes));
}
//println!("worker {} exiting", i);
}));
}
from_parent
};
thread::spawn(move|| {
server(&from_parent, &to_parent);
});
for r in worker_results {
let _ = r.join();
}
//println!("sending stop message");
//to_child.send(stop);
//move_out(to_child);
result = Some(from_child.recv().unwrap());
});
let result = result.unwrap();
print!("Count is {}\n", result);
print!("Test took {} ms\n", dur.num_milliseconds());
let thruput = ((size / workers * workers) as f64) / (dur.num_milliseconds() as f64);
print!("Throughput={} per sec\n", thruput / 1000.0);
assert_eq!(result, num_bytes * size);
}
fn main() {
let args = env::args();
let args = if env::var_os("RUST_BENCH").is_some() {
vec!("".to_string(), "1000000".to_string(), "8".to_string())
} else if args.len() <= 1 {
vec!("".to_string(), "10000".to_string(), "4".to_string())
} else {
args.map(|x| x.to_string()).collect()
};
println!("{:?}", args);
run(&args);
}
|
//
// http://github.com/PaulKeeble/ScalaVErlangAgents
|
random_line_split
|
msgsend-pipes.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A port of the simplistic benchmark from
//
// http://github.com/PaulKeeble/ScalaVErlangAgents
//
// I *think* it's the same, more or less.
#![feature(std_misc)]
use std::sync::mpsc::{channel, Sender, Receiver};
use std::env;
use std::thread;
use std::time::Duration;
enum request {
get_count,
bytes(usize),
stop
}
fn
|
(requests: &Receiver<request>, responses: &Sender<usize>) {
let mut count: usize = 0;
let mut done = false;
while !done {
match requests.recv() {
Ok(request::get_count) => { responses.send(count.clone()); }
Ok(request::bytes(b)) => {
//println!("server: received {} bytes", b);
count += b;
}
Err(..) => { done = true; }
_ => { }
}
}
responses.send(count).unwrap();
//println!("server exiting");
}
fn run(args: &[String]) {
let (to_parent, from_child) = channel();
let size = args[1].parse::<usize>().unwrap();
let workers = args[2].parse::<usize>().unwrap();
let num_bytes = 100;
let mut result = None;
let mut to_parent = Some(to_parent);
let dur = Duration::span(|| {
let to_parent = to_parent.take().unwrap();
let mut worker_results = Vec::new();
let from_parent = if workers == 1 {
let (to_child, from_parent) = channel();
worker_results.push(thread::spawn(move|| {
for _ in 0..size / workers {
//println!("worker {}: sending {} bytes", i, num_bytes);
to_child.send(request::bytes(num_bytes));
}
//println!("worker {} exiting", i);
}));
from_parent
} else {
let (to_child, from_parent) = channel();
for _ in 0..workers {
let to_child = to_child.clone();
worker_results.push(thread::spawn(move|| {
for _ in 0..size / workers {
//println!("worker {}: sending {} bytes", i, num_bytes);
to_child.send(request::bytes(num_bytes));
}
//println!("worker {} exiting", i);
}));
}
from_parent
};
thread::spawn(move|| {
server(&from_parent, &to_parent);
});
for r in worker_results {
let _ = r.join();
}
//println!("sending stop message");
//to_child.send(stop);
//move_out(to_child);
result = Some(from_child.recv().unwrap());
});
let result = result.unwrap();
print!("Count is {}\n", result);
print!("Test took {} ms\n", dur.num_milliseconds());
let thruput = ((size / workers * workers) as f64) / (dur.num_milliseconds() as f64);
print!("Throughput={} per sec\n", thruput / 1000.0);
assert_eq!(result, num_bytes * size);
}
fn main() {
let args = env::args();
let args = if env::var_os("RUST_BENCH").is_some() {
vec!("".to_string(), "1000000".to_string(), "8".to_string())
} else if args.len() <= 1 {
vec!("".to_string(), "10000".to_string(), "4".to_string())
} else {
args.map(|x| x.to_string()).collect()
};
println!("{:?}", args);
run(&args);
}
|
server
|
identifier_name
|
mod.rs
|
// Copyright (c) 2019, Ben Boeckel
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of this project nor the names of its contributors
|
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//! The test structure here comes from the structure in libkeyutils.
pub(crate) mod utils;
mod add;
mod clear;
mod describe;
mod instantiate;
mod invalidate;
mod keytype;
mod link;
mod newring;
mod permitting;
mod reading;
mod revoke;
mod search;
mod timeout;
mod unlink;
mod update;
|
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
random_line_split
|
fs_helpers.rs
|
#![allow(non_camel_case_types)]
#![allow(unused_unsafe)]
use crate::ctx::WasiCtx;
use crate::host;
use lucet_runtime::vmctx::Vmctx;
use nix::libc::{self, c_long};
use std::ffi::{OsStr, OsString};
use std::os::unix::prelude::{OsStrExt, OsStringExt, RawFd};
#[cfg(target_os = "linux")]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_RSYNC;
#[cfg(not(target_os = "linux"))]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_SYNC;
/// Normalizes a path to ensure that the target path is located under the directory provided.
///
/// This is a workaround for not having Capsicum support in the OS.
pub fn path_get<P: AsRef<OsStr>>(
vmctx: &Vmctx,
dirfd: host::__wasi_fd_t,
dirflags: host::__wasi_lookupflags_t,
path: P,
needed_base: host::__wasi_rights_t,
needed_inheriting: host::__wasi_rights_t,
needs_final_component: bool,
) -> Result<(RawFd, OsString), host::__wasi_errno_t>
|
#[cfg(not(target_os = "macos"))]
pub fn utime_now() -> c_long {
libc::UTIME_NOW
}
#[cfg(target_os = "macos")]
pub fn utime_now() -> c_long {
-1
}
#[cfg(not(target_os = "macos"))]
pub fn utime_omit() -> c_long {
libc::UTIME_OMIT
}
#[cfg(target_os = "macos")]
pub fn utime_omit() -> c_long {
-2
}
|
{
use nix::errno::Errno;
use nix::fcntl::{openat, readlinkat, OFlag};
use nix::sys::stat::Mode;
const MAX_SYMLINK_EXPANSIONS: usize = 128;
/// close all the intermediate file descriptors, but make sure not to drop either the original
/// dirfd or the one we return (which may be the same dirfd)
fn ret_dir_success(dir_stack: &mut Vec<RawFd>) -> RawFd {
let ret_dir = dir_stack.pop().expect("there is always a dirfd to return");
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
ret_dir
}
/// close all file descriptors other than the base directory, and return the errno for
/// convenience with `return`
fn ret_error(
dir_stack: &mut Vec<RawFd>,
errno: host::__wasi_errno_t,
) -> Result<(RawFd, OsString), host::__wasi_errno_t> {
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
Err(errno)
}
let ctx = vmctx.get_embed_ctx::<WasiCtx>();
let dirfe = ctx.get_fd_entry(dirfd, needed_base, needed_inheriting)?;
// Stack of directory file descriptors. Index 0 always corresponds with the directory provided
// to this function. Entering a directory causes a file descriptor to be pushed, while handling
// ".." entries causes an entry to be popped. Index 0 cannot be popped, as this would imply
// escaping the base directory.
let mut dir_stack = vec![dirfe.fd_object.rawfd];
// Stack of paths left to process. This is initially the `path` argument to this function, but
// any symlinks we encounter are processed by pushing them on the stack.
let mut path_stack = vec![path.as_ref().to_owned().into_vec()];
// Track the number of symlinks we've expanded, so we can return `ELOOP` after too many.
let mut symlink_expansions = 0;
// Buffer to read links into; defined outside of the loop so we don't reallocate it constantly.
let mut readlink_buf = vec![0u8; libc::PATH_MAX as usize + 1];
// TODO: rewrite this using a custom posix path type, with a component iterator that respects
// trailing slashes. This version does way too much allocation, and is way too fiddly.
loop {
let component = if let Some(cur_path) = path_stack.pop() {
// eprintln!(
// "cur_path = {:?}",
// std::str::from_utf8(cur_path.as_slice()).unwrap()
// );
let mut split = cur_path.splitn(2, |&c| c == b'/');
let head = split.next();
let tail = split.next();
match (head, tail) {
(None, _) => {
// split always returns at least a singleton iterator with an empty slice
panic!("unreachable");
}
// path is empty
(Some([]), None) => {
return ret_error(&mut dir_stack, host::__WASI_ENOENT as host::__wasi_errno_t);
}
// path starts with `/`, is absolute
(Some([]), Some(_)) => {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
}
// the final component of the path with no trailing slash
(Some(component), None) => component.to_vec(),
(Some(component), Some(rest)) => {
if rest.iter().all(|&c| c == b'/') {
// the final component of the path with trailing slashes; put one trailing
// slash back on
let mut component = component.to_vec();
component.push('/' as u8);
component
} else {
// non-final component; push the rest back on the stack
path_stack.push(rest.to_vec());
component.to_vec()
}
}
}
} else {
// if the path stack is ever empty, we return rather than going through the loop again
panic!("unreachable");
};
// eprintln!(
// "component = {:?}",
// std::str::from_utf8(component.as_slice()).unwrap()
// );
match component.as_slice() {
b"." => {
// skip component
}
b".." => {
// pop a directory
let dirfd = dir_stack.pop().expect("dir_stack is never empty");
// we're not allowed to pop past the original directory
if dir_stack.is_empty() {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
} else {
nix::unistd::close(dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
// should the component be a directory? it should if there is more path left to process, or
// if it has a trailing slash and `needs_final_component` is not set
component
if !path_stack.is_empty()
|| (component.ends_with(b"/") && !needs_final_component) =>
{
match openat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
OFlag::O_RDONLY | OFlag::O_DIRECTORY | OFlag::O_NOFOLLOW,
Mode::empty(),
) {
Ok(new_dir) => {
dir_stack.push(new_dir);
continue;
}
Err(e)
// Check to see if it was a symlink. Linux indicates
// this with ENOTDIR because of the O_DIRECTORY flag.
if e.as_errno() == Some(Errno::ELOOP)
|| e.as_errno() == Some(Errno::EMLINK)
|| e.as_errno() == Some(Errno::ENOTDIR) =>
{
// attempt symlink expansion
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
// the final component
component => {
// if there's a trailing slash, or if `LOOKUP_SYMLINK_FOLLOW` is set, attempt
// symlink expansion
if component.ends_with(b"/") || (dirflags & host::__WASI_LOOKUP_SYMLINK_FOLLOW) != 0
{
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
let errno = e.as_errno().unwrap();
if errno != Errno::EINVAL && errno != Errno::ENOENT {
// only return an error if this path is not actually a symlink
return ret_error(&mut dir_stack, host::errno_from_nix(errno));
}
}
}
}
// not a symlink, so we're done;
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::from_bytes(component).to_os_string(),
));
}
}
if path_stack.is_empty() {
// no further components to process. means we've hit a case like "." or "a/..", or if the
// input path has trailing slashes and `needs_final_component` is not set
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::new(".").to_os_string(),
));
} else {
continue;
}
}
}
|
identifier_body
|
fs_helpers.rs
|
#![allow(non_camel_case_types)]
#![allow(unused_unsafe)]
use crate::ctx::WasiCtx;
use crate::host;
use lucet_runtime::vmctx::Vmctx;
use nix::libc::{self, c_long};
use std::ffi::{OsStr, OsString};
use std::os::unix::prelude::{OsStrExt, OsStringExt, RawFd};
#[cfg(target_os = "linux")]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_RSYNC;
#[cfg(not(target_os = "linux"))]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_SYNC;
/// Normalizes a path to ensure that the target path is located under the directory provided.
///
/// This is a workaround for not having Capsicum support in the OS.
pub fn path_get<P: AsRef<OsStr>>(
vmctx: &Vmctx,
dirfd: host::__wasi_fd_t,
dirflags: host::__wasi_lookupflags_t,
path: P,
needed_base: host::__wasi_rights_t,
needed_inheriting: host::__wasi_rights_t,
needs_final_component: bool,
) -> Result<(RawFd, OsString), host::__wasi_errno_t> {
use nix::errno::Errno;
use nix::fcntl::{openat, readlinkat, OFlag};
use nix::sys::stat::Mode;
const MAX_SYMLINK_EXPANSIONS: usize = 128;
/// close all the intermediate file descriptors, but make sure not to drop either the original
/// dirfd or the one we return (which may be the same dirfd)
fn ret_dir_success(dir_stack: &mut Vec<RawFd>) -> RawFd {
let ret_dir = dir_stack.pop().expect("there is always a dirfd to return");
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
ret_dir
}
/// close all file descriptors other than the base directory, and return the errno for
/// convenience with `return`
fn ret_error(
dir_stack: &mut Vec<RawFd>,
errno: host::__wasi_errno_t,
) -> Result<(RawFd, OsString), host::__wasi_errno_t> {
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
Err(errno)
}
let ctx = vmctx.get_embed_ctx::<WasiCtx>();
let dirfe = ctx.get_fd_entry(dirfd, needed_base, needed_inheriting)?;
// Stack of directory file descriptors. Index 0 always corresponds with the directory provided
// to this function. Entering a directory causes a file descriptor to be pushed, while handling
// ".." entries causes an entry to be popped. Index 0 cannot be popped, as this would imply
// escaping the base directory.
let mut dir_stack = vec![dirfe.fd_object.rawfd];
// Stack of paths left to process. This is initially the `path` argument to this function, but
// any symlinks we encounter are processed by pushing them on the stack.
let mut path_stack = vec![path.as_ref().to_owned().into_vec()];
// Track the number of symlinks we've expanded, so we can return `ELOOP` after too many.
let mut symlink_expansions = 0;
// Buffer to read links into; defined outside of the loop so we don't reallocate it constantly.
let mut readlink_buf = vec![0u8; libc::PATH_MAX as usize + 1];
// TODO: rewrite this using a custom posix path type, with a component iterator that respects
// trailing slashes. This version does way too much allocation, and is way too fiddly.
loop {
let component = if let Some(cur_path) = path_stack.pop() {
// eprintln!(
// "cur_path = {:?}",
// std::str::from_utf8(cur_path.as_slice()).unwrap()
// );
let mut split = cur_path.splitn(2, |&c| c == b'/');
let head = split.next();
let tail = split.next();
match (head, tail) {
(None, _) => {
// split always returns at least a singleton iterator with an empty slice
panic!("unreachable");
}
// path is empty
(Some([]), None) => {
return ret_error(&mut dir_stack, host::__WASI_ENOENT as host::__wasi_errno_t);
}
// path starts with `/`, is absolute
(Some([]), Some(_)) => {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
}
// the final component of the path with no trailing slash
(Some(component), None) => component.to_vec(),
(Some(component), Some(rest)) => {
if rest.iter().all(|&c| c == b'/') {
// the final component of the path with trailing slashes; put one trailing
// slash back on
let mut component = component.to_vec();
component.push('/' as u8);
component
} else {
// non-final component; push the rest back on the stack
path_stack.push(rest.to_vec());
component.to_vec()
}
}
}
} else {
// if the path stack is ever empty, we return rather than going through the loop again
panic!("unreachable");
};
// eprintln!(
// "component = {:?}",
// std::str::from_utf8(component.as_slice()).unwrap()
// );
match component.as_slice() {
b"." => {
// skip component
}
b".." => {
// pop a directory
let dirfd = dir_stack.pop().expect("dir_stack is never empty");
// we're not allowed to pop past the original directory
if dir_stack.is_empty() {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
} else {
nix::unistd::close(dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
// should the component be a directory? it should if there is more path left to process, or
// if it has a trailing slash and `needs_final_component` is not set
component
if !path_stack.is_empty()
|| (component.ends_with(b"/") && !needs_final_component) =>
{
match openat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
OFlag::O_RDONLY | OFlag::O_DIRECTORY | OFlag::O_NOFOLLOW,
Mode::empty(),
) {
Ok(new_dir) => {
dir_stack.push(new_dir);
continue;
}
Err(e)
// Check to see if it was a symlink. Linux indicates
// this with ENOTDIR because of the O_DIRECTORY flag.
if e.as_errno() == Some(Errno::ELOOP)
|| e.as_errno() == Some(Errno::EMLINK)
|| e.as_errno() == Some(Errno::ENOTDIR) =>
{
// attempt symlink expansion
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
// the final component
component => {
// if there's a trailing slash, or if `LOOKUP_SYMLINK_FOLLOW` is set, attempt
// symlink expansion
if component.ends_with(b"/") || (dirflags & host::__WASI_LOOKUP_SYMLINK_FOLLOW) != 0
{
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
let errno = e.as_errno().unwrap();
if errno != Errno::EINVAL && errno != Errno::ENOENT {
// only return an error if this path is not actually a symlink
return ret_error(&mut dir_stack, host::errno_from_nix(errno));
}
}
}
}
// not a symlink, so we're done;
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::from_bytes(component).to_os_string(),
));
}
}
if path_stack.is_empty() {
// no further components to process. means we've hit a case like "." or "a/..", or if the
// input path has trailing slashes and `needs_final_component` is not set
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::new(".").to_os_string(),
));
} else {
continue;
}
}
}
#[cfg(not(target_os = "macos"))]
pub fn utime_now() -> c_long {
libc::UTIME_NOW
}
#[cfg(target_os = "macos")]
pub fn
|
() -> c_long {
-1
}
#[cfg(not(target_os = "macos"))]
pub fn utime_omit() -> c_long {
libc::UTIME_OMIT
}
#[cfg(target_os = "macos")]
pub fn utime_omit() -> c_long {
-2
}
|
utime_now
|
identifier_name
|
fs_helpers.rs
|
#![allow(non_camel_case_types)]
#![allow(unused_unsafe)]
use crate::ctx::WasiCtx;
use crate::host;
use lucet_runtime::vmctx::Vmctx;
use nix::libc::{self, c_long};
use std::ffi::{OsStr, OsString};
use std::os::unix::prelude::{OsStrExt, OsStringExt, RawFd};
#[cfg(target_os = "linux")]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_RSYNC;
#[cfg(not(target_os = "linux"))]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_SYNC;
/// Normalizes a path to ensure that the target path is located under the directory provided.
///
/// This is a workaround for not having Capsicum support in the OS.
pub fn path_get<P: AsRef<OsStr>>(
vmctx: &Vmctx,
dirfd: host::__wasi_fd_t,
dirflags: host::__wasi_lookupflags_t,
path: P,
needed_base: host::__wasi_rights_t,
needed_inheriting: host::__wasi_rights_t,
needs_final_component: bool,
) -> Result<(RawFd, OsString), host::__wasi_errno_t> {
use nix::errno::Errno;
use nix::fcntl::{openat, readlinkat, OFlag};
use nix::sys::stat::Mode;
const MAX_SYMLINK_EXPANSIONS: usize = 128;
/// close all the intermediate file descriptors, but make sure not to drop either the original
/// dirfd or the one we return (which may be the same dirfd)
fn ret_dir_success(dir_stack: &mut Vec<RawFd>) -> RawFd {
let ret_dir = dir_stack.pop().expect("there is always a dirfd to return");
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
ret_dir
}
/// close all file descriptors other than the base directory, and return the errno for
/// convenience with `return`
fn ret_error(
dir_stack: &mut Vec<RawFd>,
errno: host::__wasi_errno_t,
) -> Result<(RawFd, OsString), host::__wasi_errno_t> {
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
Err(errno)
}
let ctx = vmctx.get_embed_ctx::<WasiCtx>();
let dirfe = ctx.get_fd_entry(dirfd, needed_base, needed_inheriting)?;
// Stack of directory file descriptors. Index 0 always corresponds with the directory provided
// to this function. Entering a directory causes a file descriptor to be pushed, while handling
// ".." entries causes an entry to be popped. Index 0 cannot be popped, as this would imply
// escaping the base directory.
let mut dir_stack = vec![dirfe.fd_object.rawfd];
// Stack of paths left to process. This is initially the `path` argument to this function, but
// any symlinks we encounter are processed by pushing them on the stack.
let mut path_stack = vec![path.as_ref().to_owned().into_vec()];
// Track the number of symlinks we've expanded, so we can return `ELOOP` after too many.
let mut symlink_expansions = 0;
// Buffer to read links into; defined outside of the loop so we don't reallocate it constantly.
let mut readlink_buf = vec![0u8; libc::PATH_MAX as usize + 1];
// TODO: rewrite this using a custom posix path type, with a component iterator that respects
// trailing slashes. This version does way too much allocation, and is way too fiddly.
loop {
let component = if let Some(cur_path) = path_stack.pop() {
// eprintln!(
// "cur_path = {:?}",
// std::str::from_utf8(cur_path.as_slice()).unwrap()
// );
let mut split = cur_path.splitn(2, |&c| c == b'/');
let head = split.next();
let tail = split.next();
match (head, tail) {
(None, _) => {
// split always returns at least a singleton iterator with an empty slice
panic!("unreachable");
}
// path is empty
(Some([]), None) => {
return ret_error(&mut dir_stack, host::__WASI_ENOENT as host::__wasi_errno_t);
}
// path starts with `/`, is absolute
(Some([]), Some(_)) => {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
}
// the final component of the path with no trailing slash
(Some(component), None) => component.to_vec(),
(Some(component), Some(rest)) => {
if rest.iter().all(|&c| c == b'/') {
// the final component of the path with trailing slashes; put one trailing
// slash back on
let mut component = component.to_vec();
component.push('/' as u8);
component
} else {
// non-final component; push the rest back on the stack
path_stack.push(rest.to_vec());
component.to_vec()
}
}
}
} else {
// if the path stack is ever empty, we return rather than going through the loop again
panic!("unreachable");
};
// eprintln!(
// "component = {:?}",
// std::str::from_utf8(component.as_slice()).unwrap()
// );
match component.as_slice() {
b"." => {
// skip component
}
b".." => {
// pop a directory
let dirfd = dir_stack.pop().expect("dir_stack is never empty");
// we're not allowed to pop past the original directory
if dir_stack.is_empty() {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
} else {
nix::unistd::close(dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
// should the component be a directory? it should if there is more path left to process, or
// if it has a trailing slash and `needs_final_component` is not set
component
if !path_stack.is_empty()
|| (component.ends_with(b"/") && !needs_final_component) =>
{
match openat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
OFlag::O_RDONLY | OFlag::O_DIRECTORY | OFlag::O_NOFOLLOW,
Mode::empty(),
) {
Ok(new_dir) => {
dir_stack.push(new_dir);
continue;
}
Err(e)
// Check to see if it was a symlink. Linux indicates
// this with ENOTDIR because of the O_DIRECTORY flag.
if e.as_errno() == Some(Errno::ELOOP)
|| e.as_errno() == Some(Errno::EMLINK)
|| e.as_errno() == Some(Errno::ENOTDIR) =>
{
// attempt symlink expansion
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
// the final component
component => {
// if there's a trailing slash, or if `LOOKUP_SYMLINK_FOLLOW` is set, attempt
// symlink expansion
if component.ends_with(b"/") || (dirflags & host::__WASI_LOOKUP_SYMLINK_FOLLOW) != 0
{
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
let errno = e.as_errno().unwrap();
if errno != Errno::EINVAL && errno != Errno::ENOENT {
// only return an error if this path is not actually a symlink
return ret_error(&mut dir_stack, host::errno_from_nix(errno));
}
}
}
}
|
// not a symlink, so we're done;
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::from_bytes(component).to_os_string(),
));
}
}
if path_stack.is_empty() {
// no further components to process. means we've hit a case like "." or "a/..", or if the
// input path has trailing slashes and `needs_final_component` is not set
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::new(".").to_os_string(),
));
} else {
continue;
}
}
}
#[cfg(not(target_os = "macos"))]
pub fn utime_now() -> c_long {
libc::UTIME_NOW
}
#[cfg(target_os = "macos")]
pub fn utime_now() -> c_long {
-1
}
#[cfg(not(target_os = "macos"))]
pub fn utime_omit() -> c_long {
libc::UTIME_OMIT
}
#[cfg(target_os = "macos")]
pub fn utime_omit() -> c_long {
-2
}
|
random_line_split
|
|
fs_helpers.rs
|
#![allow(non_camel_case_types)]
#![allow(unused_unsafe)]
use crate::ctx::WasiCtx;
use crate::host;
use lucet_runtime::vmctx::Vmctx;
use nix::libc::{self, c_long};
use std::ffi::{OsStr, OsString};
use std::os::unix::prelude::{OsStrExt, OsStringExt, RawFd};
#[cfg(target_os = "linux")]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_RSYNC;
#[cfg(not(target_os = "linux"))]
pub const O_RSYNC: nix::fcntl::OFlag = nix::fcntl::OFlag::O_SYNC;
/// Normalizes a path to ensure that the target path is located under the directory provided.
///
/// This is a workaround for not having Capsicum support in the OS.
pub fn path_get<P: AsRef<OsStr>>(
vmctx: &Vmctx,
dirfd: host::__wasi_fd_t,
dirflags: host::__wasi_lookupflags_t,
path: P,
needed_base: host::__wasi_rights_t,
needed_inheriting: host::__wasi_rights_t,
needs_final_component: bool,
) -> Result<(RawFd, OsString), host::__wasi_errno_t> {
use nix::errno::Errno;
use nix::fcntl::{openat, readlinkat, OFlag};
use nix::sys::stat::Mode;
const MAX_SYMLINK_EXPANSIONS: usize = 128;
/// close all the intermediate file descriptors, but make sure not to drop either the original
/// dirfd or the one we return (which may be the same dirfd)
fn ret_dir_success(dir_stack: &mut Vec<RawFd>) -> RawFd {
let ret_dir = dir_stack.pop().expect("there is always a dirfd to return");
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
ret_dir
}
/// close all file descriptors other than the base directory, and return the errno for
/// convenience with `return`
fn ret_error(
dir_stack: &mut Vec<RawFd>,
errno: host::__wasi_errno_t,
) -> Result<(RawFd, OsString), host::__wasi_errno_t> {
if let Some(dirfds) = dir_stack.get(1..) {
for dirfd in dirfds {
nix::unistd::close(*dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
Err(errno)
}
let ctx = vmctx.get_embed_ctx::<WasiCtx>();
let dirfe = ctx.get_fd_entry(dirfd, needed_base, needed_inheriting)?;
// Stack of directory file descriptors. Index 0 always corresponds with the directory provided
// to this function. Entering a directory causes a file descriptor to be pushed, while handling
// ".." entries causes an entry to be popped. Index 0 cannot be popped, as this would imply
// escaping the base directory.
let mut dir_stack = vec![dirfe.fd_object.rawfd];
// Stack of paths left to process. This is initially the `path` argument to this function, but
// any symlinks we encounter are processed by pushing them on the stack.
let mut path_stack = vec![path.as_ref().to_owned().into_vec()];
// Track the number of symlinks we've expanded, so we can return `ELOOP` after too many.
let mut symlink_expansions = 0;
// Buffer to read links into; defined outside of the loop so we don't reallocate it constantly.
let mut readlink_buf = vec![0u8; libc::PATH_MAX as usize + 1];
// TODO: rewrite this using a custom posix path type, with a component iterator that respects
// trailing slashes. This version does way too much allocation, and is way too fiddly.
loop {
let component = if let Some(cur_path) = path_stack.pop() {
// eprintln!(
// "cur_path = {:?}",
// std::str::from_utf8(cur_path.as_slice()).unwrap()
// );
let mut split = cur_path.splitn(2, |&c| c == b'/');
let head = split.next();
let tail = split.next();
match (head, tail) {
(None, _) => {
// split always returns at least a singleton iterator with an empty slice
panic!("unreachable");
}
// path is empty
(Some([]), None) => {
return ret_error(&mut dir_stack, host::__WASI_ENOENT as host::__wasi_errno_t);
}
// path starts with `/`, is absolute
(Some([]), Some(_)) => {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
}
// the final component of the path with no trailing slash
(Some(component), None) => component.to_vec(),
(Some(component), Some(rest)) => {
if rest.iter().all(|&c| c == b'/') {
// the final component of the path with trailing slashes; put one trailing
// slash back on
let mut component = component.to_vec();
component.push('/' as u8);
component
} else {
// non-final component; push the rest back on the stack
path_stack.push(rest.to_vec());
component.to_vec()
}
}
}
} else {
// if the path stack is ever empty, we return rather than going through the loop again
panic!("unreachable");
};
// eprintln!(
// "component = {:?}",
// std::str::from_utf8(component.as_slice()).unwrap()
// );
match component.as_slice() {
b"." => {
// skip component
}
b".." => {
// pop a directory
let dirfd = dir_stack.pop().expect("dir_stack is never empty");
// we're not allowed to pop past the original directory
if dir_stack.is_empty() {
return ret_error(
&mut dir_stack,
host::__WASI_ENOTCAPABLE as host::__wasi_errno_t,
);
} else {
nix::unistd::close(dirfd).unwrap_or_else(|e| {
dbg!(e);
});
}
}
// should the component be a directory? it should if there is more path left to process, or
// if it has a trailing slash and `needs_final_component` is not set
component
if !path_stack.is_empty()
|| (component.ends_with(b"/") && !needs_final_component) =>
{
match openat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
OFlag::O_RDONLY | OFlag::O_DIRECTORY | OFlag::O_NOFOLLOW,
Mode::empty(),
) {
Ok(new_dir) =>
|
Err(e)
// Check to see if it was a symlink. Linux indicates
// this with ENOTDIR because of the O_DIRECTORY flag.
if e.as_errno() == Some(Errno::ELOOP)
|| e.as_errno() == Some(Errno::EMLINK)
|| e.as_errno() == Some(Errno::ENOTDIR) =>
{
// attempt symlink expansion
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
Err(e) => {
return ret_error(
&mut dir_stack,
host::errno_from_nix(e.as_errno().unwrap()),
);
}
}
}
// the final component
component => {
// if there's a trailing slash, or if `LOOKUP_SYMLINK_FOLLOW` is set, attempt
// symlink expansion
if component.ends_with(b"/") || (dirflags & host::__WASI_LOOKUP_SYMLINK_FOLLOW) != 0
{
match readlinkat(
*dir_stack.last().expect("dir_stack is never empty"),
component,
readlink_buf.as_mut_slice(),
) {
Ok(link_path) => {
symlink_expansions += 1;
if symlink_expansions > MAX_SYMLINK_EXPANSIONS {
return ret_error(
&mut dir_stack,
host::__WASI_ELOOP as host::__wasi_errno_t,
);
}
let mut link_path = link_path.as_bytes().to_vec();
// append a trailing slash if the component leading to it has one, so
// that we preserve any ENOTDIR that might come from trying to open a
// non-directory
if component.ends_with(b"/") {
link_path.push(b'/');
}
path_stack.push(link_path);
continue;
}
Err(e) => {
let errno = e.as_errno().unwrap();
if errno != Errno::EINVAL && errno != Errno::ENOENT {
// only return an error if this path is not actually a symlink
return ret_error(&mut dir_stack, host::errno_from_nix(errno));
}
}
}
}
// not a symlink, so we're done;
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::from_bytes(component).to_os_string(),
));
}
}
if path_stack.is_empty() {
// no further components to process. means we've hit a case like "." or "a/..", or if the
// input path has trailing slashes and `needs_final_component` is not set
return Ok((
ret_dir_success(&mut dir_stack),
OsStr::new(".").to_os_string(),
));
} else {
continue;
}
}
}
#[cfg(not(target_os = "macos"))]
pub fn utime_now() -> c_long {
libc::UTIME_NOW
}
#[cfg(target_os = "macos")]
pub fn utime_now() -> c_long {
-1
}
#[cfg(not(target_os = "macos"))]
pub fn utime_omit() -> c_long {
libc::UTIME_OMIT
}
#[cfg(target_os = "macos")]
pub fn utime_omit() -> c_long {
-2
}
|
{
dir_stack.push(new_dir);
continue;
}
|
conditional_block
|
tydecode.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type decoding
// tjc note: Would be great to have a `match check` macro equivalent
// for some of these
use core::prelude::*;
use middle::ty;
use core::str;
use core::uint;
use core::vec;
use syntax::abi::AbiSet;
use syntax::abi;
use syntax::ast;
use syntax::ast::*;
use syntax::codemap::dummy_sp;
use syntax::opt_vec;
// Compact string representation for ty::t values. API ty_str &
// parse_from_str. Extra parameters are for converting to/from def_ids in the
// data buffer. Whatever format you choose should not contain pipe characters.
// Def id conversion: when we encounter def-ids, they have to be translated.
// For example, the crate number must be converted from the crate number used
// in the library we are reading from into the local crate numbers in use
// here. To perform this translation, the type decoder is supplied with a
// conversion function of type `conv_did`.
//
// Sometimes, particularly when inlining, the correct translation of the
// def-id will depend on where it originated from. Therefore, the conversion
// function is given an indicator of the source of the def-id. See
// astencode.rs for more information.
pub enum DefIdSource {
// Identifies a struct, trait, enum, etc.
NominalType,
// Identifies a type alias (`type X = ...`).
TypeWithId,
// Identifies a type parameter (`fn foo<X>() { ... }`).
TypeParameter
}
type conv_did<'self> =
&'self fn(source: DefIdSource, ast::def_id) -> ast::def_id;
pub struct PState {
data: @~[u8],
crate: int,
pos: uint,
tcx: ty::ctxt
}
fn peek(st: @mut PState) -> char {
st.data[st.pos] as char
}
fn next(st: @mut PState) -> char {
let ch = st.data[st.pos] as char;
st.pos = st.pos + 1u;
return ch;
}
fn next_byte(st: @mut PState) -> u8 {
let b = st.data[st.pos];
st.pos = st.pos + 1u;
return b;
}
fn scan<R>(st: &mut PState, is_last: &fn(char) -> bool,
op: &fn(&[u8]) -> R) -> R
{
let start_pos = st.pos;
debug!("scan: '%c' (start)", st.data[st.pos] as char);
while !is_last(st.data[st.pos] as char) {
st.pos += 1;
debug!("scan: '%c'", st.data[st.pos] as char);
}
let end_pos = st.pos;
st.pos += 1;
return op(st.data.slice(start_pos, end_pos));
}
pub fn parse_ident(st: @mut PState, last: char) -> ast::ident {
fn is_last(b: char, c: char) -> bool { return c == b; }
return parse_ident_(st, |a| is_last(last, a) );
}
fn
|
(st: @mut PState, is_last: @fn(char) -> bool) ->
ast::ident {
let rslt = scan(st, is_last, str::from_bytes);
return st.tcx.sess.ident_of(rslt);
}
pub fn parse_state_from_data(data: @~[u8], crate_num: int,
pos: uint, tcx: ty::ctxt) -> @mut PState {
@mut PState {
data: data,
crate: crate_num,
pos: pos,
tcx: tcx
}
}
pub fn parse_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
conv: conv_did) -> ty::t {
let st = parse_state_from_data(data, crate_num, pos, tcx);
parse_ty(st, conv)
}
pub fn parse_arg_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
conv: conv_did) -> ty::arg {
let st = parse_state_from_data(data, crate_num, pos, tcx);
parse_arg(st, conv)
}
fn parse_path(st: @mut PState) -> @ast::path {
let mut idents: ~[ast::ident] = ~[];
fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
idents.push(parse_ident_(st, is_last));
loop {
match peek(st) {
':' => { next(st); next(st); }
c => {
if c == '(' {
return @ast::path { span: dummy_sp(),
global: false,
idents: idents,
rp: None,
types: ~[] };
} else { idents.push(parse_ident_(st, is_last)); }
}
}
};
}
fn parse_sigil(st: @mut PState) -> ast::Sigil {
match next(st) {
'@' => ast::ManagedSigil,
'~' => ast::OwnedSigil,
'&' => ast::BorrowedSigil,
c => st.tcx.sess.bug(fmt!("parse_sigil(): bad input '%c'", c))
}
}
fn parse_vstore(st: @mut PState) -> ty::vstore {
assert!(next(st) == '/');
let c = peek(st);
if '0' <= c && c <= '9' {
let n = parse_int(st) as uint;
assert!(next(st) == '|');
return ty::vstore_fixed(n);
}
match next(st) {
'~' => ty::vstore_uniq,
'@' => ty::vstore_box,
'&' => ty::vstore_slice(parse_region(st)),
c => st.tcx.sess.bug(fmt!("parse_vstore(): bad input '%c'", c))
}
}
fn parse_trait_store(st: @mut PState) -> ty::TraitStore {
match next(st) {
'~' => ty::UniqTraitStore,
'@' => ty::BoxTraitStore,
'&' => ty::RegionTraitStore(parse_region(st)),
'.' => ty::BareTraitStore,
c => st.tcx.sess.bug(fmt!("parse_trait_store(): bad input '%c'", c))
}
}
fn parse_substs(st: @mut PState, conv: conv_did) -> ty::substs {
let self_r = parse_opt(st, || parse_region(st) );
let self_ty = parse_opt(st, || parse_ty(st, conv) );
assert!(next(st) == '[');
let mut params: ~[ty::t] = ~[];
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
st.pos = st.pos + 1u;
return ty::substs {
self_r: self_r,
self_ty: self_ty,
tps: params
};
}
fn parse_bound_region(st: @mut PState) -> ty::bound_region {
match next(st) {
's' => ty::br_self,
'a' => {
let id = parse_int(st) as uint;
assert!(next(st) == '|');
ty::br_anon(id)
}
'[' => ty::br_named(st.tcx.sess.ident_of(parse_str(st, ']'))),
'c' => {
let id = parse_int(st);
assert!(next(st) == '|');
ty::br_cap_avoid(id, @parse_bound_region(st))
},
_ => fail!(~"parse_bound_region: bad input")
}
}
fn parse_region(st: @mut PState) -> ty::Region {
match next(st) {
'b' => {
ty::re_bound(parse_bound_region(st))
}
'f' => {
assert!(next(st) == '[');
let id = parse_int(st);
assert!(next(st) == '|');
let br = parse_bound_region(st);
assert!(next(st) == ']');
ty::re_free(id, br)
}
's' => {
let id = parse_int(st);
assert!(next(st) == '|');
ty::re_scope(id)
}
't' => {
ty::re_static
}
_ => fail!(~"parse_region: bad input")
}
}
fn parse_opt<T>(st: @mut PState, f: &fn() -> T) -> Option<T> {
match next(st) {
'n' => None,
's' => Some(f()),
_ => fail!(~"parse_opt: bad input")
}
}
fn parse_str(st: @mut PState, term: char) -> ~str {
let mut result = ~"";
while peek(st) != term {
result += str::from_byte(next_byte(st));
}
next(st);
return result;
}
fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
match next(st) {
'n' => return ty::mk_nil(st.tcx),
'z' => return ty::mk_bot(st.tcx),
'b' => return ty::mk_bool(st.tcx),
'i' => return ty::mk_int(st.tcx),
'u' => return ty::mk_uint(st.tcx),
'l' => return ty::mk_float(st.tcx),
'M' => {
match next(st) {
'b' => return ty::mk_mach_uint(st.tcx, ast::ty_u8),
'w' => return ty::mk_mach_uint(st.tcx, ast::ty_u16),
'l' => return ty::mk_mach_uint(st.tcx, ast::ty_u32),
'd' => return ty::mk_mach_uint(st.tcx, ast::ty_u64),
'B' => return ty::mk_mach_int(st.tcx, ast::ty_i8),
'W' => return ty::mk_mach_int(st.tcx, ast::ty_i16),
'L' => return ty::mk_mach_int(st.tcx, ast::ty_i32),
'D' => return ty::mk_mach_int(st.tcx, ast::ty_i64),
'f' => return ty::mk_mach_float(st.tcx, ast::ty_f32),
'F' => return ty::mk_mach_float(st.tcx, ast::ty_f64),
_ => fail!(~"parse_ty: bad numeric type")
}
}
'c' => return ty::mk_char(st.tcx),
't' => {
assert!((next(st) == '['));
let def = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
assert!(next(st) == ']');
return ty::mk_enum(st.tcx, def, substs);
}
'x' => {
assert!(next(st) == '[');
let def = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
let store = parse_trait_store(st);
assert!(next(st) == ']');
return ty::mk_trait(st.tcx, def, substs, store);
}
'p' => {
let did = parse_def(st, TypeParameter, conv);
debug!("parsed ty_param: did=%?", did);
return ty::mk_param(st.tcx, parse_int(st) as uint, did);
}
's' => {
let did = parse_def(st, TypeParameter, conv);
return ty::mk_self(st.tcx, did);
}
'@' => return ty::mk_box(st.tcx, parse_mt(st, conv)),
'~' => return ty::mk_uniq(st.tcx, parse_mt(st, conv)),
'*' => return ty::mk_ptr(st.tcx, parse_mt(st, conv)),
'&' => {
let r = parse_region(st);
let mt = parse_mt(st, conv);
return ty::mk_rptr(st.tcx, r, mt);
}
'U' => return ty::mk_unboxed_vec(st.tcx, parse_mt(st, conv)),
'V' => {
let mt = parse_mt(st, conv);
let v = parse_vstore(st);
return ty::mk_evec(st.tcx, mt, v);
}
'v' => {
let v = parse_vstore(st);
return ty::mk_estr(st.tcx, v);
}
'T' => {
assert!((next(st) == '['));
let mut params = ~[];
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
st.pos = st.pos + 1u;
return ty::mk_tup(st.tcx, params);
}
'f' => {
return ty::mk_closure(st.tcx, parse_closure_ty(st, conv));
}
'F' => {
return ty::mk_bare_fn(st.tcx, parse_bare_fn_ty(st, conv));
}
'Y' => return ty::mk_type(st.tcx),
'C' => {
let sigil = parse_sigil(st);
return ty::mk_opaque_closure_ptr(st.tcx, sigil);
}
'#' => {
let pos = parse_hex(st);
assert!((next(st) == ':'));
let len = parse_hex(st);
assert!((next(st) == '#'));
let key = ty::creader_cache_key {cnum: st.crate,
pos: pos,
len: len };
match st.tcx.rcache.find(&key) {
Some(&tt) => return tt,
None => {
let ps = @mut PState {pos: pos ,.. copy *st};
let tt = parse_ty(ps, conv);
st.tcx.rcache.insert(key, tt);
return tt;
}
}
}
'"' => {
let def = parse_def(st, TypeWithId, conv);
let inner = parse_ty(st, conv);
ty::mk_with_id(st.tcx, inner, def)
}
'B' => ty::mk_opaque_box(st.tcx),
'a' => {
assert!((next(st) == '['));
let did = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
assert!((next(st) == ']'));
return ty::mk_struct(st.tcx, did, substs);
}
c => { error!("unexpected char in type string: %c", c); fail!();}
}
}
fn parse_mt(st: @mut PState, conv: conv_did) -> ty::mt {
let mut m;
match peek(st) {
'm' => { next(st); m = ast::m_mutbl; }
'?' => { next(st); m = ast::m_const; }
_ => { m = ast::m_imm; }
}
ty::mt { ty: parse_ty(st, conv), mutbl: m }
}
fn parse_def(st: @mut PState, source: DefIdSource,
conv: conv_did) -> ast::def_id {
let mut def = ~[];
while peek(st) != '|' { def.push(next_byte(st)); }
st.pos = st.pos + 1u;
return conv(source, parse_def_id(def));
}
fn parse_int(st: @mut PState) -> int {
let mut n = 0;
loop {
let cur = peek(st);
if cur < '0' || cur > '9' { return n; }
st.pos = st.pos + 1u;
n *= 10;
n += (cur as int) - ('0' as int);
};
}
fn parse_hex(st: @mut PState) -> uint {
let mut n = 0u;
loop {
let cur = peek(st);
if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { return n; }
st.pos = st.pos + 1u;
n *= 16u;
if '0' <= cur && cur <= '9' {
n += (cur as uint) - ('0' as uint);
} else { n += 10u + (cur as uint) - ('a' as uint); }
};
}
fn parse_purity(c: char) -> purity {
match c {
'u' => unsafe_fn,
'p' => pure_fn,
'i' => impure_fn,
'c' => extern_fn,
_ => fail!(~"parse_purity: bad purity")
}
}
fn parse_abi_set(st: @mut PState) -> AbiSet {
assert!(next(st) == '[');
let mut abis = AbiSet::empty();
while peek(st) != ']' {
// FIXME(#5422) str API should not force this copy
let abi_str = scan(st, |c| c == ',', str::from_bytes);
let abi = abi::lookup(abi_str).expect(abi_str);
abis.add(abi);
}
assert!(next(st) == ']');
return abis;
}
fn parse_onceness(c: char) -> ast::Onceness {
match c {
'o' => ast::Once,
'm' => ast::Many,
_ => fail!(~"parse_onceness: bad onceness")
}
}
fn parse_arg(st: @mut PState, conv: conv_did) -> ty::arg {
ty::arg { mode: parse_mode(st), ty: parse_ty(st, conv) }
}
fn parse_mode(st: @mut PState) -> ast::mode {
let m = ast::expl(match next(st) {
'+' => ast::by_copy,
'=' => ast::by_ref,
_ => fail!(~"bad mode")
});
return m;
}
fn parse_closure_ty(st: @mut PState, conv: conv_did) -> ty::ClosureTy {
let sigil = parse_sigil(st);
let purity = parse_purity(next(st));
let onceness = parse_onceness(next(st));
let region = parse_region(st);
let sig = parse_sig(st, conv);
ty::ClosureTy {
purity: purity,
sigil: sigil,
onceness: onceness,
region: region,
sig: sig
}
}
fn parse_bare_fn_ty(st: @mut PState, conv: conv_did) -> ty::BareFnTy {
let purity = parse_purity(next(st));
let abi = parse_abi_set(st);
let sig = parse_sig(st, conv);
ty::BareFnTy {
purity: purity,
abis: abi,
sig: sig
}
}
fn parse_sig(st: @mut PState, conv: conv_did) -> ty::FnSig {
assert!((next(st) == '['));
let mut inputs: ~[ty::arg] = ~[];
while peek(st) != ']' {
let mode = parse_mode(st);
inputs.push(ty::arg { mode: mode, ty: parse_ty(st, conv) });
}
st.pos += 1u; // eat the ']'
let ret_ty = parse_ty(st, conv);
ty::FnSig {bound_lifetime_names: opt_vec::Empty, // FIXME(#4846)
inputs: inputs,
output: ret_ty}
}
// Rust metadata parsing
pub fn parse_def_id(buf: &[u8]) -> ast::def_id {
let mut colon_idx = 0u;
let len = vec::len(buf);
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
if colon_idx == len {
error!("didn't find ':' when parsing def id");
fail!();
}
let crate_part = vec::slice(buf, 0u, colon_idx);
let def_part = vec::slice(buf, colon_idx + 1u, len);
let crate_num = match uint::parse_bytes(crate_part, 10u) {
Some(cn) => cn as int,
None => fail!(fmt!("internal error: parse_def_id: crate number \
expected, but found %?", crate_part))
};
let def_num = match uint::parse_bytes(def_part, 10u) {
Some(dn) => dn as int,
None => fail!(fmt!("internal error: parse_def_id: id expected, but \
found %?", def_part))
};
ast::def_id { crate: crate_num, node: def_num }
}
pub fn parse_bounds_data(data: @~[u8], start: uint,
crate_num: int, tcx: ty::ctxt, conv: conv_did)
-> @~[ty::param_bound] {
let st = parse_state_from_data(data, crate_num, start, tcx);
parse_bounds(st, conv)
}
fn parse_bounds(st: @mut PState, conv: conv_did) -> @~[ty::param_bound] {
let mut bounds = ~[];
loop {
bounds.push(match next(st) {
'S' => ty::bound_owned,
'C' => ty::bound_copy,
'K' => ty::bound_const,
'O' => ty::bound_durable,
'I' => ty::bound_trait(parse_ty(st, conv)),
'.' => break,
_ => fail!(~"parse_bounds: bad bounds")
});
}
@bounds
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//
|
parse_ident_
|
identifier_name
|
tydecode.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type decoding
// tjc note: Would be great to have a `match check` macro equivalent
// for some of these
use core::prelude::*;
use middle::ty;
use core::str;
use core::uint;
use core::vec;
use syntax::abi::AbiSet;
use syntax::abi;
use syntax::ast;
use syntax::ast::*;
use syntax::codemap::dummy_sp;
use syntax::opt_vec;
// Compact string representation for ty::t values. API ty_str &
// parse_from_str. Extra parameters are for converting to/from def_ids in the
// data buffer. Whatever format you choose should not contain pipe characters.
// Def id conversion: when we encounter def-ids, they have to be translated.
// For example, the crate number must be converted from the crate number used
// in the library we are reading from into the local crate numbers in use
// here. To perform this translation, the type decoder is supplied with a
// conversion function of type `conv_did`.
//
// Sometimes, particularly when inlining, the correct translation of the
// def-id will depend on where it originated from. Therefore, the conversion
// function is given an indicator of the source of the def-id. See
// astencode.rs for more information.
pub enum DefIdSource {
// Identifies a struct, trait, enum, etc.
NominalType,
// Identifies a type alias (`type X = ...`).
TypeWithId,
// Identifies a type parameter (`fn foo<X>() { ... }`).
TypeParameter
}
type conv_did<'self> =
&'self fn(source: DefIdSource, ast::def_id) -> ast::def_id;
pub struct PState {
data: @~[u8],
crate: int,
pos: uint,
tcx: ty::ctxt
}
fn peek(st: @mut PState) -> char {
st.data[st.pos] as char
}
fn next(st: @mut PState) -> char {
let ch = st.data[st.pos] as char;
st.pos = st.pos + 1u;
return ch;
}
fn next_byte(st: @mut PState) -> u8 {
let b = st.data[st.pos];
st.pos = st.pos + 1u;
return b;
}
fn scan<R>(st: &mut PState, is_last: &fn(char) -> bool,
op: &fn(&[u8]) -> R) -> R
{
let start_pos = st.pos;
debug!("scan: '%c' (start)", st.data[st.pos] as char);
while !is_last(st.data[st.pos] as char) {
st.pos += 1;
debug!("scan: '%c'", st.data[st.pos] as char);
}
let end_pos = st.pos;
st.pos += 1;
return op(st.data.slice(start_pos, end_pos));
}
pub fn parse_ident(st: @mut PState, last: char) -> ast::ident {
fn is_last(b: char, c: char) -> bool { return c == b; }
return parse_ident_(st, |a| is_last(last, a) );
}
fn parse_ident_(st: @mut PState, is_last: @fn(char) -> bool) ->
ast::ident {
let rslt = scan(st, is_last, str::from_bytes);
return st.tcx.sess.ident_of(rslt);
}
pub fn parse_state_from_data(data: @~[u8], crate_num: int,
pos: uint, tcx: ty::ctxt) -> @mut PState {
@mut PState {
data: data,
crate: crate_num,
pos: pos,
tcx: tcx
}
}
pub fn parse_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
conv: conv_did) -> ty::t
|
pub fn parse_arg_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
conv: conv_did) -> ty::arg {
let st = parse_state_from_data(data, crate_num, pos, tcx);
parse_arg(st, conv)
}
fn parse_path(st: @mut PState) -> @ast::path {
let mut idents: ~[ast::ident] = ~[];
fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
idents.push(parse_ident_(st, is_last));
loop {
match peek(st) {
':' => { next(st); next(st); }
c => {
if c == '(' {
return @ast::path { span: dummy_sp(),
global: false,
idents: idents,
rp: None,
types: ~[] };
} else { idents.push(parse_ident_(st, is_last)); }
}
}
};
}
fn parse_sigil(st: @mut PState) -> ast::Sigil {
match next(st) {
'@' => ast::ManagedSigil,
'~' => ast::OwnedSigil,
'&' => ast::BorrowedSigil,
c => st.tcx.sess.bug(fmt!("parse_sigil(): bad input '%c'", c))
}
}
fn parse_vstore(st: @mut PState) -> ty::vstore {
assert!(next(st) == '/');
let c = peek(st);
if '0' <= c && c <= '9' {
let n = parse_int(st) as uint;
assert!(next(st) == '|');
return ty::vstore_fixed(n);
}
match next(st) {
'~' => ty::vstore_uniq,
'@' => ty::vstore_box,
'&' => ty::vstore_slice(parse_region(st)),
c => st.tcx.sess.bug(fmt!("parse_vstore(): bad input '%c'", c))
}
}
fn parse_trait_store(st: @mut PState) -> ty::TraitStore {
match next(st) {
'~' => ty::UniqTraitStore,
'@' => ty::BoxTraitStore,
'&' => ty::RegionTraitStore(parse_region(st)),
'.' => ty::BareTraitStore,
c => st.tcx.sess.bug(fmt!("parse_trait_store(): bad input '%c'", c))
}
}
fn parse_substs(st: @mut PState, conv: conv_did) -> ty::substs {
let self_r = parse_opt(st, || parse_region(st) );
let self_ty = parse_opt(st, || parse_ty(st, conv) );
assert!(next(st) == '[');
let mut params: ~[ty::t] = ~[];
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
st.pos = st.pos + 1u;
return ty::substs {
self_r: self_r,
self_ty: self_ty,
tps: params
};
}
fn parse_bound_region(st: @mut PState) -> ty::bound_region {
match next(st) {
's' => ty::br_self,
'a' => {
let id = parse_int(st) as uint;
assert!(next(st) == '|');
ty::br_anon(id)
}
'[' => ty::br_named(st.tcx.sess.ident_of(parse_str(st, ']'))),
'c' => {
let id = parse_int(st);
assert!(next(st) == '|');
ty::br_cap_avoid(id, @parse_bound_region(st))
},
_ => fail!(~"parse_bound_region: bad input")
}
}
fn parse_region(st: @mut PState) -> ty::Region {
match next(st) {
'b' => {
ty::re_bound(parse_bound_region(st))
}
'f' => {
assert!(next(st) == '[');
let id = parse_int(st);
assert!(next(st) == '|');
let br = parse_bound_region(st);
assert!(next(st) == ']');
ty::re_free(id, br)
}
's' => {
let id = parse_int(st);
assert!(next(st) == '|');
ty::re_scope(id)
}
't' => {
ty::re_static
}
_ => fail!(~"parse_region: bad input")
}
}
fn parse_opt<T>(st: @mut PState, f: &fn() -> T) -> Option<T> {
match next(st) {
'n' => None,
's' => Some(f()),
_ => fail!(~"parse_opt: bad input")
}
}
fn parse_str(st: @mut PState, term: char) -> ~str {
let mut result = ~"";
while peek(st) != term {
result += str::from_byte(next_byte(st));
}
next(st);
return result;
}
fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
match next(st) {
'n' => return ty::mk_nil(st.tcx),
'z' => return ty::mk_bot(st.tcx),
'b' => return ty::mk_bool(st.tcx),
'i' => return ty::mk_int(st.tcx),
'u' => return ty::mk_uint(st.tcx),
'l' => return ty::mk_float(st.tcx),
'M' => {
match next(st) {
'b' => return ty::mk_mach_uint(st.tcx, ast::ty_u8),
'w' => return ty::mk_mach_uint(st.tcx, ast::ty_u16),
'l' => return ty::mk_mach_uint(st.tcx, ast::ty_u32),
'd' => return ty::mk_mach_uint(st.tcx, ast::ty_u64),
'B' => return ty::mk_mach_int(st.tcx, ast::ty_i8),
'W' => return ty::mk_mach_int(st.tcx, ast::ty_i16),
'L' => return ty::mk_mach_int(st.tcx, ast::ty_i32),
'D' => return ty::mk_mach_int(st.tcx, ast::ty_i64),
'f' => return ty::mk_mach_float(st.tcx, ast::ty_f32),
'F' => return ty::mk_mach_float(st.tcx, ast::ty_f64),
_ => fail!(~"parse_ty: bad numeric type")
}
}
'c' => return ty::mk_char(st.tcx),
't' => {
assert!((next(st) == '['));
let def = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
assert!(next(st) == ']');
return ty::mk_enum(st.tcx, def, substs);
}
'x' => {
assert!(next(st) == '[');
let def = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
let store = parse_trait_store(st);
assert!(next(st) == ']');
return ty::mk_trait(st.tcx, def, substs, store);
}
'p' => {
let did = parse_def(st, TypeParameter, conv);
debug!("parsed ty_param: did=%?", did);
return ty::mk_param(st.tcx, parse_int(st) as uint, did);
}
's' => {
let did = parse_def(st, TypeParameter, conv);
return ty::mk_self(st.tcx, did);
}
'@' => return ty::mk_box(st.tcx, parse_mt(st, conv)),
'~' => return ty::mk_uniq(st.tcx, parse_mt(st, conv)),
'*' => return ty::mk_ptr(st.tcx, parse_mt(st, conv)),
'&' => {
let r = parse_region(st);
let mt = parse_mt(st, conv);
return ty::mk_rptr(st.tcx, r, mt);
}
'U' => return ty::mk_unboxed_vec(st.tcx, parse_mt(st, conv)),
'V' => {
let mt = parse_mt(st, conv);
let v = parse_vstore(st);
return ty::mk_evec(st.tcx, mt, v);
}
'v' => {
let v = parse_vstore(st);
return ty::mk_estr(st.tcx, v);
}
'T' => {
assert!((next(st) == '['));
let mut params = ~[];
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
st.pos = st.pos + 1u;
return ty::mk_tup(st.tcx, params);
}
'f' => {
return ty::mk_closure(st.tcx, parse_closure_ty(st, conv));
}
'F' => {
return ty::mk_bare_fn(st.tcx, parse_bare_fn_ty(st, conv));
}
'Y' => return ty::mk_type(st.tcx),
'C' => {
let sigil = parse_sigil(st);
return ty::mk_opaque_closure_ptr(st.tcx, sigil);
}
'#' => {
let pos = parse_hex(st);
assert!((next(st) == ':'));
let len = parse_hex(st);
assert!((next(st) == '#'));
let key = ty::creader_cache_key {cnum: st.crate,
pos: pos,
len: len };
match st.tcx.rcache.find(&key) {
Some(&tt) => return tt,
None => {
let ps = @mut PState {pos: pos ,.. copy *st};
let tt = parse_ty(ps, conv);
st.tcx.rcache.insert(key, tt);
return tt;
}
}
}
'"' => {
let def = parse_def(st, TypeWithId, conv);
let inner = parse_ty(st, conv);
ty::mk_with_id(st.tcx, inner, def)
}
'B' => ty::mk_opaque_box(st.tcx),
'a' => {
assert!((next(st) == '['));
let did = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
assert!((next(st) == ']'));
return ty::mk_struct(st.tcx, did, substs);
}
c => { error!("unexpected char in type string: %c", c); fail!();}
}
}
fn parse_mt(st: @mut PState, conv: conv_did) -> ty::mt {
let mut m;
match peek(st) {
'm' => { next(st); m = ast::m_mutbl; }
'?' => { next(st); m = ast::m_const; }
_ => { m = ast::m_imm; }
}
ty::mt { ty: parse_ty(st, conv), mutbl: m }
}
fn parse_def(st: @mut PState, source: DefIdSource,
conv: conv_did) -> ast::def_id {
let mut def = ~[];
while peek(st) != '|' { def.push(next_byte(st)); }
st.pos = st.pos + 1u;
return conv(source, parse_def_id(def));
}
fn parse_int(st: @mut PState) -> int {
let mut n = 0;
loop {
let cur = peek(st);
if cur < '0' || cur > '9' { return n; }
st.pos = st.pos + 1u;
n *= 10;
n += (cur as int) - ('0' as int);
};
}
fn parse_hex(st: @mut PState) -> uint {
let mut n = 0u;
loop {
let cur = peek(st);
if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { return n; }
st.pos = st.pos + 1u;
n *= 16u;
if '0' <= cur && cur <= '9' {
n += (cur as uint) - ('0' as uint);
} else { n += 10u + (cur as uint) - ('a' as uint); }
};
}
fn parse_purity(c: char) -> purity {
match c {
'u' => unsafe_fn,
'p' => pure_fn,
'i' => impure_fn,
'c' => extern_fn,
_ => fail!(~"parse_purity: bad purity")
}
}
fn parse_abi_set(st: @mut PState) -> AbiSet {
assert!(next(st) == '[');
let mut abis = AbiSet::empty();
while peek(st) != ']' {
// FIXME(#5422) str API should not force this copy
let abi_str = scan(st, |c| c == ',', str::from_bytes);
let abi = abi::lookup(abi_str).expect(abi_str);
abis.add(abi);
}
assert!(next(st) == ']');
return abis;
}
fn parse_onceness(c: char) -> ast::Onceness {
match c {
'o' => ast::Once,
'm' => ast::Many,
_ => fail!(~"parse_onceness: bad onceness")
}
}
fn parse_arg(st: @mut PState, conv: conv_did) -> ty::arg {
ty::arg { mode: parse_mode(st), ty: parse_ty(st, conv) }
}
fn parse_mode(st: @mut PState) -> ast::mode {
let m = ast::expl(match next(st) {
'+' => ast::by_copy,
'=' => ast::by_ref,
_ => fail!(~"bad mode")
});
return m;
}
fn parse_closure_ty(st: @mut PState, conv: conv_did) -> ty::ClosureTy {
let sigil = parse_sigil(st);
let purity = parse_purity(next(st));
let onceness = parse_onceness(next(st));
let region = parse_region(st);
let sig = parse_sig(st, conv);
ty::ClosureTy {
purity: purity,
sigil: sigil,
onceness: onceness,
region: region,
sig: sig
}
}
fn parse_bare_fn_ty(st: @mut PState, conv: conv_did) -> ty::BareFnTy {
let purity = parse_purity(next(st));
let abi = parse_abi_set(st);
let sig = parse_sig(st, conv);
ty::BareFnTy {
purity: purity,
abis: abi,
sig: sig
}
}
fn parse_sig(st: @mut PState, conv: conv_did) -> ty::FnSig {
assert!((next(st) == '['));
let mut inputs: ~[ty::arg] = ~[];
while peek(st) != ']' {
let mode = parse_mode(st);
inputs.push(ty::arg { mode: mode, ty: parse_ty(st, conv) });
}
st.pos += 1u; // eat the ']'
let ret_ty = parse_ty(st, conv);
ty::FnSig {bound_lifetime_names: opt_vec::Empty, // FIXME(#4846)
inputs: inputs,
output: ret_ty}
}
// Rust metadata parsing
pub fn parse_def_id(buf: &[u8]) -> ast::def_id {
let mut colon_idx = 0u;
let len = vec::len(buf);
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
if colon_idx == len {
error!("didn't find ':' when parsing def id");
fail!();
}
let crate_part = vec::slice(buf, 0u, colon_idx);
let def_part = vec::slice(buf, colon_idx + 1u, len);
let crate_num = match uint::parse_bytes(crate_part, 10u) {
Some(cn) => cn as int,
None => fail!(fmt!("internal error: parse_def_id: crate number \
expected, but found %?", crate_part))
};
let def_num = match uint::parse_bytes(def_part, 10u) {
Some(dn) => dn as int,
None => fail!(fmt!("internal error: parse_def_id: id expected, but \
found %?", def_part))
};
ast::def_id { crate: crate_num, node: def_num }
}
pub fn parse_bounds_data(data: @~[u8], start: uint,
crate_num: int, tcx: ty::ctxt, conv: conv_did)
-> @~[ty::param_bound] {
let st = parse_state_from_data(data, crate_num, start, tcx);
parse_bounds(st, conv)
}
fn parse_bounds(st: @mut PState, conv: conv_did) -> @~[ty::param_bound] {
let mut bounds = ~[];
loop {
bounds.push(match next(st) {
'S' => ty::bound_owned,
'C' => ty::bound_copy,
'K' => ty::bound_const,
'O' => ty::bound_durable,
'I' => ty::bound_trait(parse_ty(st, conv)),
'.' => break,
_ => fail!(~"parse_bounds: bad bounds")
});
}
@bounds
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//
|
{
let st = parse_state_from_data(data, crate_num, pos, tcx);
parse_ty(st, conv)
}
|
identifier_body
|
tydecode.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type decoding
// tjc note: Would be great to have a `match check` macro equivalent
// for some of these
use core::prelude::*;
use middle::ty;
use core::str;
use core::uint;
use core::vec;
use syntax::abi::AbiSet;
use syntax::abi;
use syntax::ast;
use syntax::ast::*;
use syntax::codemap::dummy_sp;
use syntax::opt_vec;
// Compact string representation for ty::t values. API ty_str &
// parse_from_str. Extra parameters are for converting to/from def_ids in the
// data buffer. Whatever format you choose should not contain pipe characters.
// Def id conversion: when we encounter def-ids, they have to be translated.
// For example, the crate number must be converted from the crate number used
// in the library we are reading from into the local crate numbers in use
// here. To perform this translation, the type decoder is supplied with a
// conversion function of type `conv_did`.
//
// Sometimes, particularly when inlining, the correct translation of the
// def-id will depend on where it originated from. Therefore, the conversion
// function is given an indicator of the source of the def-id. See
// astencode.rs for more information.
pub enum DefIdSource {
// Identifies a struct, trait, enum, etc.
NominalType,
// Identifies a type alias (`type X = ...`).
TypeWithId,
// Identifies a type parameter (`fn foo<X>() { ... }`).
TypeParameter
}
type conv_did<'self> =
&'self fn(source: DefIdSource, ast::def_id) -> ast::def_id;
pub struct PState {
data: @~[u8],
crate: int,
pos: uint,
tcx: ty::ctxt
}
fn peek(st: @mut PState) -> char {
st.data[st.pos] as char
}
fn next(st: @mut PState) -> char {
let ch = st.data[st.pos] as char;
st.pos = st.pos + 1u;
return ch;
}
fn next_byte(st: @mut PState) -> u8 {
let b = st.data[st.pos];
st.pos = st.pos + 1u;
return b;
}
fn scan<R>(st: &mut PState, is_last: &fn(char) -> bool,
op: &fn(&[u8]) -> R) -> R
{
let start_pos = st.pos;
debug!("scan: '%c' (start)", st.data[st.pos] as char);
while !is_last(st.data[st.pos] as char) {
st.pos += 1;
debug!("scan: '%c'", st.data[st.pos] as char);
}
let end_pos = st.pos;
st.pos += 1;
return op(st.data.slice(start_pos, end_pos));
}
pub fn parse_ident(st: @mut PState, last: char) -> ast::ident {
fn is_last(b: char, c: char) -> bool { return c == b; }
return parse_ident_(st, |a| is_last(last, a) );
}
fn parse_ident_(st: @mut PState, is_last: @fn(char) -> bool) ->
ast::ident {
let rslt = scan(st, is_last, str::from_bytes);
return st.tcx.sess.ident_of(rslt);
}
pub fn parse_state_from_data(data: @~[u8], crate_num: int,
pos: uint, tcx: ty::ctxt) -> @mut PState {
@mut PState {
data: data,
crate: crate_num,
pos: pos,
tcx: tcx
}
}
pub fn parse_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
conv: conv_did) -> ty::t {
let st = parse_state_from_data(data, crate_num, pos, tcx);
parse_ty(st, conv)
}
pub fn parse_arg_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
conv: conv_did) -> ty::arg {
let st = parse_state_from_data(data, crate_num, pos, tcx);
parse_arg(st, conv)
}
fn parse_path(st: @mut PState) -> @ast::path {
let mut idents: ~[ast::ident] = ~[];
fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
idents.push(parse_ident_(st, is_last));
loop {
match peek(st) {
':' => { next(st); next(st); }
c => {
if c == '(' {
return @ast::path { span: dummy_sp(),
global: false,
idents: idents,
rp: None,
types: ~[] };
} else { idents.push(parse_ident_(st, is_last)); }
}
}
};
}
fn parse_sigil(st: @mut PState) -> ast::Sigil {
match next(st) {
'@' => ast::ManagedSigil,
'~' => ast::OwnedSigil,
'&' => ast::BorrowedSigil,
c => st.tcx.sess.bug(fmt!("parse_sigil(): bad input '%c'", c))
}
}
fn parse_vstore(st: @mut PState) -> ty::vstore {
assert!(next(st) == '/');
let c = peek(st);
if '0' <= c && c <= '9' {
let n = parse_int(st) as uint;
assert!(next(st) == '|');
return ty::vstore_fixed(n);
}
match next(st) {
'~' => ty::vstore_uniq,
'@' => ty::vstore_box,
'&' => ty::vstore_slice(parse_region(st)),
c => st.tcx.sess.bug(fmt!("parse_vstore(): bad input '%c'", c))
}
}
fn parse_trait_store(st: @mut PState) -> ty::TraitStore {
match next(st) {
'~' => ty::UniqTraitStore,
'@' => ty::BoxTraitStore,
'&' => ty::RegionTraitStore(parse_region(st)),
'.' => ty::BareTraitStore,
c => st.tcx.sess.bug(fmt!("parse_trait_store(): bad input '%c'", c))
}
}
fn parse_substs(st: @mut PState, conv: conv_did) -> ty::substs {
let self_r = parse_opt(st, || parse_region(st) );
let self_ty = parse_opt(st, || parse_ty(st, conv) );
assert!(next(st) == '[');
let mut params: ~[ty::t] = ~[];
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
st.pos = st.pos + 1u;
return ty::substs {
self_r: self_r,
self_ty: self_ty,
tps: params
};
}
fn parse_bound_region(st: @mut PState) -> ty::bound_region {
match next(st) {
's' => ty::br_self,
'a' => {
let id = parse_int(st) as uint;
assert!(next(st) == '|');
ty::br_anon(id)
}
'[' => ty::br_named(st.tcx.sess.ident_of(parse_str(st, ']'))),
'c' => {
let id = parse_int(st);
assert!(next(st) == '|');
ty::br_cap_avoid(id, @parse_bound_region(st))
},
_ => fail!(~"parse_bound_region: bad input")
}
}
fn parse_region(st: @mut PState) -> ty::Region {
match next(st) {
'b' => {
ty::re_bound(parse_bound_region(st))
}
'f' => {
assert!(next(st) == '[');
let id = parse_int(st);
assert!(next(st) == '|');
let br = parse_bound_region(st);
assert!(next(st) == ']');
ty::re_free(id, br)
}
's' => {
let id = parse_int(st);
assert!(next(st) == '|');
ty::re_scope(id)
}
't' => {
ty::re_static
}
_ => fail!(~"parse_region: bad input")
}
}
fn parse_opt<T>(st: @mut PState, f: &fn() -> T) -> Option<T> {
match next(st) {
'n' => None,
's' => Some(f()),
_ => fail!(~"parse_opt: bad input")
}
}
fn parse_str(st: @mut PState, term: char) -> ~str {
let mut result = ~"";
while peek(st) != term {
result += str::from_byte(next_byte(st));
}
next(st);
return result;
}
fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
match next(st) {
'n' => return ty::mk_nil(st.tcx),
'z' => return ty::mk_bot(st.tcx),
'b' => return ty::mk_bool(st.tcx),
'i' => return ty::mk_int(st.tcx),
'u' => return ty::mk_uint(st.tcx),
'l' => return ty::mk_float(st.tcx),
'M' => {
match next(st) {
'b' => return ty::mk_mach_uint(st.tcx, ast::ty_u8),
'w' => return ty::mk_mach_uint(st.tcx, ast::ty_u16),
'l' => return ty::mk_mach_uint(st.tcx, ast::ty_u32),
'd' => return ty::mk_mach_uint(st.tcx, ast::ty_u64),
'B' => return ty::mk_mach_int(st.tcx, ast::ty_i8),
'W' => return ty::mk_mach_int(st.tcx, ast::ty_i16),
'L' => return ty::mk_mach_int(st.tcx, ast::ty_i32),
'D' => return ty::mk_mach_int(st.tcx, ast::ty_i64),
'f' => return ty::mk_mach_float(st.tcx, ast::ty_f32),
'F' => return ty::mk_mach_float(st.tcx, ast::ty_f64),
_ => fail!(~"parse_ty: bad numeric type")
}
}
'c' => return ty::mk_char(st.tcx),
't' => {
assert!((next(st) == '['));
let def = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
assert!(next(st) == ']');
return ty::mk_enum(st.tcx, def, substs);
}
'x' => {
assert!(next(st) == '[');
let def = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
let store = parse_trait_store(st);
assert!(next(st) == ']');
return ty::mk_trait(st.tcx, def, substs, store);
}
'p' => {
let did = parse_def(st, TypeParameter, conv);
debug!("parsed ty_param: did=%?", did);
return ty::mk_param(st.tcx, parse_int(st) as uint, did);
}
's' => {
let did = parse_def(st, TypeParameter, conv);
return ty::mk_self(st.tcx, did);
}
'@' => return ty::mk_box(st.tcx, parse_mt(st, conv)),
'~' => return ty::mk_uniq(st.tcx, parse_mt(st, conv)),
'*' => return ty::mk_ptr(st.tcx, parse_mt(st, conv)),
'&' => {
let r = parse_region(st);
let mt = parse_mt(st, conv);
return ty::mk_rptr(st.tcx, r, mt);
}
'U' => return ty::mk_unboxed_vec(st.tcx, parse_mt(st, conv)),
'V' => {
let mt = parse_mt(st, conv);
let v = parse_vstore(st);
return ty::mk_evec(st.tcx, mt, v);
}
'v' => {
let v = parse_vstore(st);
return ty::mk_estr(st.tcx, v);
|
}
'T' => {
assert!((next(st) == '['));
let mut params = ~[];
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
st.pos = st.pos + 1u;
return ty::mk_tup(st.tcx, params);
}
'f' => {
return ty::mk_closure(st.tcx, parse_closure_ty(st, conv));
}
'F' => {
return ty::mk_bare_fn(st.tcx, parse_bare_fn_ty(st, conv));
}
'Y' => return ty::mk_type(st.tcx),
'C' => {
let sigil = parse_sigil(st);
return ty::mk_opaque_closure_ptr(st.tcx, sigil);
}
'#' => {
let pos = parse_hex(st);
assert!((next(st) == ':'));
let len = parse_hex(st);
assert!((next(st) == '#'));
let key = ty::creader_cache_key {cnum: st.crate,
pos: pos,
len: len };
match st.tcx.rcache.find(&key) {
Some(&tt) => return tt,
None => {
let ps = @mut PState {pos: pos ,.. copy *st};
let tt = parse_ty(ps, conv);
st.tcx.rcache.insert(key, tt);
return tt;
}
}
}
'"' => {
let def = parse_def(st, TypeWithId, conv);
let inner = parse_ty(st, conv);
ty::mk_with_id(st.tcx, inner, def)
}
'B' => ty::mk_opaque_box(st.tcx),
'a' => {
assert!((next(st) == '['));
let did = parse_def(st, NominalType, conv);
let substs = parse_substs(st, conv);
assert!((next(st) == ']'));
return ty::mk_struct(st.tcx, did, substs);
}
c => { error!("unexpected char in type string: %c", c); fail!();}
}
}
fn parse_mt(st: @mut PState, conv: conv_did) -> ty::mt {
let mut m;
match peek(st) {
'm' => { next(st); m = ast::m_mutbl; }
'?' => { next(st); m = ast::m_const; }
_ => { m = ast::m_imm; }
}
ty::mt { ty: parse_ty(st, conv), mutbl: m }
}
fn parse_def(st: @mut PState, source: DefIdSource,
conv: conv_did) -> ast::def_id {
let mut def = ~[];
while peek(st) != '|' { def.push(next_byte(st)); }
st.pos = st.pos + 1u;
return conv(source, parse_def_id(def));
}
fn parse_int(st: @mut PState) -> int {
let mut n = 0;
loop {
let cur = peek(st);
if cur < '0' || cur > '9' { return n; }
st.pos = st.pos + 1u;
n *= 10;
n += (cur as int) - ('0' as int);
};
}
fn parse_hex(st: @mut PState) -> uint {
let mut n = 0u;
loop {
let cur = peek(st);
if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { return n; }
st.pos = st.pos + 1u;
n *= 16u;
if '0' <= cur && cur <= '9' {
n += (cur as uint) - ('0' as uint);
} else { n += 10u + (cur as uint) - ('a' as uint); }
};
}
fn parse_purity(c: char) -> purity {
match c {
'u' => unsafe_fn,
'p' => pure_fn,
'i' => impure_fn,
'c' => extern_fn,
_ => fail!(~"parse_purity: bad purity")
}
}
fn parse_abi_set(st: @mut PState) -> AbiSet {
assert!(next(st) == '[');
let mut abis = AbiSet::empty();
while peek(st) != ']' {
// FIXME(#5422) str API should not force this copy
let abi_str = scan(st, |c| c == ',', str::from_bytes);
let abi = abi::lookup(abi_str).expect(abi_str);
abis.add(abi);
}
assert!(next(st) == ']');
return abis;
}
fn parse_onceness(c: char) -> ast::Onceness {
match c {
'o' => ast::Once,
'm' => ast::Many,
_ => fail!(~"parse_onceness: bad onceness")
}
}
fn parse_arg(st: @mut PState, conv: conv_did) -> ty::arg {
ty::arg { mode: parse_mode(st), ty: parse_ty(st, conv) }
}
fn parse_mode(st: @mut PState) -> ast::mode {
let m = ast::expl(match next(st) {
'+' => ast::by_copy,
'=' => ast::by_ref,
_ => fail!(~"bad mode")
});
return m;
}
fn parse_closure_ty(st: @mut PState, conv: conv_did) -> ty::ClosureTy {
let sigil = parse_sigil(st);
let purity = parse_purity(next(st));
let onceness = parse_onceness(next(st));
let region = parse_region(st);
let sig = parse_sig(st, conv);
ty::ClosureTy {
purity: purity,
sigil: sigil,
onceness: onceness,
region: region,
sig: sig
}
}
fn parse_bare_fn_ty(st: @mut PState, conv: conv_did) -> ty::BareFnTy {
let purity = parse_purity(next(st));
let abi = parse_abi_set(st);
let sig = parse_sig(st, conv);
ty::BareFnTy {
purity: purity,
abis: abi,
sig: sig
}
}
fn parse_sig(st: @mut PState, conv: conv_did) -> ty::FnSig {
assert!((next(st) == '['));
let mut inputs: ~[ty::arg] = ~[];
while peek(st) != ']' {
let mode = parse_mode(st);
inputs.push(ty::arg { mode: mode, ty: parse_ty(st, conv) });
}
st.pos += 1u; // eat the ']'
let ret_ty = parse_ty(st, conv);
ty::FnSig {bound_lifetime_names: opt_vec::Empty, // FIXME(#4846)
inputs: inputs,
output: ret_ty}
}
// Rust metadata parsing
pub fn parse_def_id(buf: &[u8]) -> ast::def_id {
let mut colon_idx = 0u;
let len = vec::len(buf);
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
if colon_idx == len {
error!("didn't find ':' when parsing def id");
fail!();
}
let crate_part = vec::slice(buf, 0u, colon_idx);
let def_part = vec::slice(buf, colon_idx + 1u, len);
let crate_num = match uint::parse_bytes(crate_part, 10u) {
Some(cn) => cn as int,
None => fail!(fmt!("internal error: parse_def_id: crate number \
expected, but found %?", crate_part))
};
let def_num = match uint::parse_bytes(def_part, 10u) {
Some(dn) => dn as int,
None => fail!(fmt!("internal error: parse_def_id: id expected, but \
found %?", def_part))
};
ast::def_id { crate: crate_num, node: def_num }
}
pub fn parse_bounds_data(data: @~[u8], start: uint,
crate_num: int, tcx: ty::ctxt, conv: conv_did)
-> @~[ty::param_bound] {
let st = parse_state_from_data(data, crate_num, start, tcx);
parse_bounds(st, conv)
}
fn parse_bounds(st: @mut PState, conv: conv_did) -> @~[ty::param_bound] {
let mut bounds = ~[];
loop {
bounds.push(match next(st) {
'S' => ty::bound_owned,
'C' => ty::bound_copy,
'K' => ty::bound_const,
'O' => ty::bound_durable,
'I' => ty::bound_trait(parse_ty(st, conv)),
'.' => break,
_ => fail!(~"parse_bounds: bad bounds")
});
}
@bounds
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//
|
random_line_split
|
|
config.js
|
var Response = require("ringo/webapp/response").Response;
var Request = require("ringo/webapp/request").Request;
var urls = [
[(/^\/(index(.html)?)?/), require("./root/index").app],
//[(/^\/(login)/), require("./root/login").app],
[(/^\/(loginpage)/), require("./root/loginpage").app],
//[(/^\/(maps(\/\d+)?)/), require("./root/maps").app],
//[(/^\/(geonetwork)/), require("./root/geonetwork").app], // Enable this only for the GeoNetwork integration
[(/^\/(composer)/), require("./root/composer").app],
[(/^\/(manager)/), require("./root/manager").app],
[(/^\/(wps)/), require("./root/wps").app], // to test WPS plugin
[(/^\/(test)/), require("./root/test").app], // to test the MapStore Viewport
[(/^\/(viewer(.html)?)/), require("./root/viewer").app],
[(/^\/(embedded(.html)?)/), require("./root/embedded").app],
[(/^\/(debug(.js)?)/), require("./root/debug").app]
];
var debug_proxy = java.lang.System.getProperty("app.debug.proxy");
if (debug_proxy) {
urls.push([(/^\/(proxy)/), require("./root/proxy").app]);
}
var FS = require("fs");
// debug mode loads unminified scripts
if (java.lang.System.getProperty("app.debug")) {
var config = FS.normal(FS.join(module.directory, "..", "buildjs.cfg"));
var configs = [config];
if(environment.applicationPath) {
configs.push(FS.normal(FS.join(module.directory, environment.applicationPath.toString(), "buildjs.cfg")));
}
urls.push(
[(/^\/script(\/.*)/), require("./autoloader").App(configs)]
);
// proxy a remote geoserver on /geoserver by setting proxy.geoserver to remote URL
// only recommended for debug mode
var geoserver = java.lang.System.getProperty("app.proxy.geoserver");
if (geoserver) {
if (geoserver.charAt(geoserver.length-1) !== "/") {
geoserver = geoserver + "/";
}
// debug specific proxy
urls.push(
[(/^\/geoserver\/(.*)/), require("./root/proxy").pass({
url: geoserver,
allowAuth: true,
/**
* Setting preserveHost to true makes it so GeoServer advertises
* URLs on the same origin as this app. That makes it so the
* proxy is not involved in any requests issued to this same
* GeoServer. The reason this is required is because we want
* to preserve auth related headers in requests to GeoServer and
* we don't want to send those same auth related headers to
* every service that is proxied. The negative side effect is
* that the proxy will occasionally fail due to
* java.lang.OutOfMemoryError issues.
* TODO: figure out why so much memory is being consumed in proxy
*/
preserveHost: true
})]
);
}
// TODO: remove this - for temporary debugging of the proxy only
urls.push([
(/^\/wms/), require("./proxywms").app
]);
}
exports.urls = urls;
// TODO: remove if http://github.com/ringo/ringojs/issues/issue/98 is addressed
function slash(config)
|
exports.middleware = [
slash(),
require("ringo/middleware/gzip").middleware,
require("ringo/middleware/static").middleware({base: module.resolve("static")}),
require("ringo/middleware/error").middleware,
require("ringo/middleware/notfound").middleware
];
exports.app = require("ringo/webapp").handleRequest;
exports.charset = "UTF-8";
exports.contentType = "text/html";
// handle application configuration
if(environment.applicationPath) {
// for debug
var applicationConfig = require(environment.applicationPath.toString() + '/config');
applicationConfig.config(urls, exports.middleware);
} else if(!java.lang.System.getProperty("app.debug")) {
// for deploy
var applicationsFolder = getRepository(module.resolve('./applications'));
if(applicationsFolder && applicationsFolder.exists()) {
var application = null;
var files = applicationsFolder.getResources(true);
for(var i = 0, l = files.length; i < l && i < 1; i++) {
var file = files[i].path;
file = file.substring(applicationsFolder.path.length);
application = file.split('/')[0];
}
if(application) {
var applicationConfig = require('applications/' + application + '/config');
applicationConfig.config(urls, exports.middleware);
}
}
}
|
{
return function(app) {
return function(request) {
var response;
var servletRequest = request.env.servletRequest;
var pathInfo = servletRequest.getPathInfo();
if (pathInfo === "/") {
var uri = servletRequest.getRequestURI();
if (uri.charAt(uri.length-1) !== "/") {
var location = servletRequest.getScheme() + "://" +
servletRequest.getServerName() + ":" + servletRequest.getServerPort() +
uri + "/";
return {
status: 301,
headers: {"Location": location},
body: []
};
}
}
return app(request);
};
};
}
|
identifier_body
|
config.js
|
var Response = require("ringo/webapp/response").Response;
var Request = require("ringo/webapp/request").Request;
var urls = [
[(/^\/(index(.html)?)?/), require("./root/index").app],
//[(/^\/(login)/), require("./root/login").app],
[(/^\/(loginpage)/), require("./root/loginpage").app],
//[(/^\/(maps(\/\d+)?)/), require("./root/maps").app],
//[(/^\/(geonetwork)/), require("./root/geonetwork").app], // Enable this only for the GeoNetwork integration
[(/^\/(composer)/), require("./root/composer").app],
[(/^\/(manager)/), require("./root/manager").app],
[(/^\/(wps)/), require("./root/wps").app], // to test WPS plugin
[(/^\/(test)/), require("./root/test").app], // to test the MapStore Viewport
[(/^\/(viewer(.html)?)/), require("./root/viewer").app],
[(/^\/(embedded(.html)?)/), require("./root/embedded").app],
[(/^\/(debug(.js)?)/), require("./root/debug").app]
];
var debug_proxy = java.lang.System.getProperty("app.debug.proxy");
if (debug_proxy) {
urls.push([(/^\/(proxy)/), require("./root/proxy").app]);
}
var FS = require("fs");
// debug mode loads unminified scripts
if (java.lang.System.getProperty("app.debug")) {
var config = FS.normal(FS.join(module.directory, "..", "buildjs.cfg"));
var configs = [config];
if(environment.applicationPath) {
configs.push(FS.normal(FS.join(module.directory, environment.applicationPath.toString(), "buildjs.cfg")));
}
urls.push(
[(/^\/script(\/.*)/), require("./autoloader").App(configs)]
);
// proxy a remote geoserver on /geoserver by setting proxy.geoserver to remote URL
// only recommended for debug mode
var geoserver = java.lang.System.getProperty("app.proxy.geoserver");
if (geoserver) {
if (geoserver.charAt(geoserver.length-1) !== "/") {
geoserver = geoserver + "/";
}
// debug specific proxy
urls.push(
[(/^\/geoserver\/(.*)/), require("./root/proxy").pass({
url: geoserver,
allowAuth: true,
/**
* Setting preserveHost to true makes it so GeoServer advertises
* URLs on the same origin as this app. That makes it so the
* proxy is not involved in any requests issued to this same
* GeoServer. The reason this is required is because we want
* to preserve auth related headers in requests to GeoServer and
* we don't want to send those same auth related headers to
* every service that is proxied. The negative side effect is
* that the proxy will occasionally fail due to
* java.lang.OutOfMemoryError issues.
* TODO: figure out why so much memory is being consumed in proxy
*/
preserveHost: true
})]
);
}
// TODO: remove this - for temporary debugging of the proxy only
urls.push([
(/^\/wms/), require("./proxywms").app
]);
}
exports.urls = urls;
// TODO: remove if http://github.com/ringo/ringojs/issues/issue/98 is addressed
function slash(config) {
return function(app) {
return function(request) {
var response;
var servletRequest = request.env.servletRequest;
var pathInfo = servletRequest.getPathInfo();
if (pathInfo === "/") {
var uri = servletRequest.getRequestURI();
if (uri.charAt(uri.length-1) !== "/") {
var location = servletRequest.getScheme() + "://" +
servletRequest.getServerName() + ":" + servletRequest.getServerPort() +
uri + "/";
return {
status: 301,
headers: {"Location": location},
body: []
};
}
}
return app(request);
};
};
}
exports.middleware = [
slash(),
require("ringo/middleware/gzip").middleware,
require("ringo/middleware/static").middleware({base: module.resolve("static")}),
require("ringo/middleware/error").middleware,
require("ringo/middleware/notfound").middleware
];
exports.app = require("ringo/webapp").handleRequest;
exports.charset = "UTF-8";
exports.contentType = "text/html";
// handle application configuration
if(environment.applicationPath) {
// for debug
var applicationConfig = require(environment.applicationPath.toString() + '/config');
applicationConfig.config(urls, exports.middleware);
} else if(!java.lang.System.getProperty("app.debug")) {
// for deploy
var applicationsFolder = getRepository(module.resolve('./applications'));
if(applicationsFolder && applicationsFolder.exists()) {
var application = null;
var files = applicationsFolder.getResources(true);
|
for(var i = 0, l = files.length; i < l && i < 1; i++) {
var file = files[i].path;
file = file.substring(applicationsFolder.path.length);
application = file.split('/')[0];
}
if(application) {
var applicationConfig = require('applications/' + application + '/config');
applicationConfig.config(urls, exports.middleware);
}
}
}
|
random_line_split
|
|
config.js
|
var Response = require("ringo/webapp/response").Response;
var Request = require("ringo/webapp/request").Request;
var urls = [
[(/^\/(index(.html)?)?/), require("./root/index").app],
//[(/^\/(login)/), require("./root/login").app],
[(/^\/(loginpage)/), require("./root/loginpage").app],
//[(/^\/(maps(\/\d+)?)/), require("./root/maps").app],
//[(/^\/(geonetwork)/), require("./root/geonetwork").app], // Enable this only for the GeoNetwork integration
[(/^\/(composer)/), require("./root/composer").app],
[(/^\/(manager)/), require("./root/manager").app],
[(/^\/(wps)/), require("./root/wps").app], // to test WPS plugin
[(/^\/(test)/), require("./root/test").app], // to test the MapStore Viewport
[(/^\/(viewer(.html)?)/), require("./root/viewer").app],
[(/^\/(embedded(.html)?)/), require("./root/embedded").app],
[(/^\/(debug(.js)?)/), require("./root/debug").app]
];
var debug_proxy = java.lang.System.getProperty("app.debug.proxy");
if (debug_proxy) {
urls.push([(/^\/(proxy)/), require("./root/proxy").app]);
}
var FS = require("fs");
// debug mode loads unminified scripts
if (java.lang.System.getProperty("app.debug")) {
var config = FS.normal(FS.join(module.directory, "..", "buildjs.cfg"));
var configs = [config];
if(environment.applicationPath) {
configs.push(FS.normal(FS.join(module.directory, environment.applicationPath.toString(), "buildjs.cfg")));
}
urls.push(
[(/^\/script(\/.*)/), require("./autoloader").App(configs)]
);
// proxy a remote geoserver on /geoserver by setting proxy.geoserver to remote URL
// only recommended for debug mode
var geoserver = java.lang.System.getProperty("app.proxy.geoserver");
if (geoserver) {
if (geoserver.charAt(geoserver.length-1) !== "/") {
geoserver = geoserver + "/";
}
// debug specific proxy
urls.push(
[(/^\/geoserver\/(.*)/), require("./root/proxy").pass({
url: geoserver,
allowAuth: true,
/**
* Setting preserveHost to true makes it so GeoServer advertises
* URLs on the same origin as this app. That makes it so the
* proxy is not involved in any requests issued to this same
* GeoServer. The reason this is required is because we want
* to preserve auth related headers in requests to GeoServer and
* we don't want to send those same auth related headers to
* every service that is proxied. The negative side effect is
* that the proxy will occasionally fail due to
* java.lang.OutOfMemoryError issues.
* TODO: figure out why so much memory is being consumed in proxy
*/
preserveHost: true
})]
);
}
// TODO: remove this - for temporary debugging of the proxy only
urls.push([
(/^\/wms/), require("./proxywms").app
]);
}
exports.urls = urls;
// TODO: remove if http://github.com/ringo/ringojs/issues/issue/98 is addressed
function
|
(config) {
return function(app) {
return function(request) {
var response;
var servletRequest = request.env.servletRequest;
var pathInfo = servletRequest.getPathInfo();
if (pathInfo === "/") {
var uri = servletRequest.getRequestURI();
if (uri.charAt(uri.length-1) !== "/") {
var location = servletRequest.getScheme() + "://" +
servletRequest.getServerName() + ":" + servletRequest.getServerPort() +
uri + "/";
return {
status: 301,
headers: {"Location": location},
body: []
};
}
}
return app(request);
};
};
}
exports.middleware = [
slash(),
require("ringo/middleware/gzip").middleware,
require("ringo/middleware/static").middleware({base: module.resolve("static")}),
require("ringo/middleware/error").middleware,
require("ringo/middleware/notfound").middleware
];
exports.app = require("ringo/webapp").handleRequest;
exports.charset = "UTF-8";
exports.contentType = "text/html";
// handle application configuration
if(environment.applicationPath) {
// for debug
var applicationConfig = require(environment.applicationPath.toString() + '/config');
applicationConfig.config(urls, exports.middleware);
} else if(!java.lang.System.getProperty("app.debug")) {
// for deploy
var applicationsFolder = getRepository(module.resolve('./applications'));
if(applicationsFolder && applicationsFolder.exists()) {
var application = null;
var files = applicationsFolder.getResources(true);
for(var i = 0, l = files.length; i < l && i < 1; i++) {
var file = files[i].path;
file = file.substring(applicationsFolder.path.length);
application = file.split('/')[0];
}
if(application) {
var applicationConfig = require('applications/' + application + '/config');
applicationConfig.config(urls, exports.middleware);
}
}
}
|
slash
|
identifier_name
|
mock.rs
|
use std::{
io::{self, Cursor, Read, Write},
pin::Pin,
task::{Context, Poll},
};
use tokio::io::{AsyncRead, AsyncWrite};
/// A fake stream for testing network applications backed by buffers.
#[derive(Clone, Debug)]
pub struct MockStream {
written: Cursor<Vec<u8>>,
received: Cursor<Vec<u8>>,
}
impl MockStream {
/// Creates a new mock stream with nothing to read.
pub fn empty() -> MockStream {
MockStream::new(&[])
}
/// Creates a new mock stream with the specified bytes to read.
pub fn new(initial: &[u8]) -> MockStream {
MockStream {
written: Cursor::new(vec![]),
received: Cursor::new(initial.to_owned()),
}
}
/// Gets a slice of bytes representing the data that has been written.
pub fn written(&self) -> &[u8] {
self.written.get_ref()
}
/// Gets a slice of bytes representing the data that has been received.
pub fn
|
(&self) -> &[u8] {
self.received.get_ref()
}
}
impl AsyncRead for MockStream {
fn poll_read(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
Poll::Ready(self.as_mut().received.read(buf))
}
}
impl AsyncWrite for MockStream {
fn poll_write(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
Poll::Ready(self.as_mut().written.write(buf))
}
fn poll_flush(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(self.as_mut().written.flush())
}
fn poll_shutdown(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(Ok(()))
}
}
|
received
|
identifier_name
|
mock.rs
|
use std::{
io::{self, Cursor, Read, Write},
pin::Pin,
task::{Context, Poll},
};
use tokio::io::{AsyncRead, AsyncWrite};
/// A fake stream for testing network applications backed by buffers.
#[derive(Clone, Debug)]
pub struct MockStream {
written: Cursor<Vec<u8>>,
received: Cursor<Vec<u8>>,
}
impl MockStream {
/// Creates a new mock stream with nothing to read.
pub fn empty() -> MockStream {
MockStream::new(&[])
}
/// Creates a new mock stream with the specified bytes to read.
pub fn new(initial: &[u8]) -> MockStream {
MockStream {
written: Cursor::new(vec![]),
received: Cursor::new(initial.to_owned()),
}
}
/// Gets a slice of bytes representing the data that has been written.
pub fn written(&self) -> &[u8] {
self.written.get_ref()
}
/// Gets a slice of bytes representing the data that has been received.
pub fn received(&self) -> &[u8] {
self.received.get_ref()
}
|
fn poll_read(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
Poll::Ready(self.as_mut().received.read(buf))
}
}
impl AsyncWrite for MockStream {
fn poll_write(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
Poll::Ready(self.as_mut().written.write(buf))
}
fn poll_flush(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(self.as_mut().written.flush())
}
fn poll_shutdown(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(Ok(()))
}
}
|
}
impl AsyncRead for MockStream {
|
random_line_split
|
mock.rs
|
use std::{
io::{self, Cursor, Read, Write},
pin::Pin,
task::{Context, Poll},
};
use tokio::io::{AsyncRead, AsyncWrite};
/// A fake stream for testing network applications backed by buffers.
#[derive(Clone, Debug)]
pub struct MockStream {
written: Cursor<Vec<u8>>,
received: Cursor<Vec<u8>>,
}
impl MockStream {
/// Creates a new mock stream with nothing to read.
pub fn empty() -> MockStream {
MockStream::new(&[])
}
/// Creates a new mock stream with the specified bytes to read.
pub fn new(initial: &[u8]) -> MockStream {
MockStream {
written: Cursor::new(vec![]),
received: Cursor::new(initial.to_owned()),
}
}
/// Gets a slice of bytes representing the data that has been written.
pub fn written(&self) -> &[u8] {
self.written.get_ref()
}
/// Gets a slice of bytes representing the data that has been received.
pub fn received(&self) -> &[u8] {
self.received.get_ref()
}
}
impl AsyncRead for MockStream {
fn poll_read(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
Poll::Ready(self.as_mut().received.read(buf))
}
}
impl AsyncWrite for MockStream {
fn poll_write(
mut self: Pin<&mut Self>,
_: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
Poll::Ready(self.as_mut().written.write(buf))
}
fn poll_flush(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(self.as_mut().written.flush())
}
fn poll_shutdown(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), io::Error>>
|
}
|
{
Poll::Ready(Ok(()))
}
|
identifier_body
|
AngularFire.js
|
import Firebase from 'firebase/firebase';
export class AngularFire {
ref: Firebase;
|
(ref: Firebase) {
this.ref = ref;
}
asArray() {
return new FirebaseArray(this.ref);
}
}
/*
FirebaseArray
*/
export class FirebaseArray {
ref: Firebase;
error: any;
list: Array;
constructor(ref: Firebase) {
this.ref = ref;
this.list = [];
// listen for changes at the Firebase instance
this.ref.on('child_added', this.created.bind(this), this.error);
this.ref.on('child_moved', this.moved.bind(this), this.error);
this.ref.on('child_changed', this.updated.bind(this), this.error);
this.ref.on('child_removed', this.removed.bind(this), this.error);
// determine when initial load is completed
// ref.once('value', function() { resolve(null); }, resolve);
}
getItem(recOrIndex: any) {
var item = recOrIndex;
if(typeof(recOrIndex) === "number") {
item = this.getRecord(recOrIndex);
}
return item;
}
getChild(recOrIndex: any) {
var item = this.getItem(recOrIndex);
return this.ref.child(item._key);
}
add(rec: any) {
this.ref.push(rec);
}
remove(recOrIndex: any) {
this.getChild(recOrIndex).remove();
}
save(recOrIndex: any) {
var item = this.getItem(recOrIndex);
this.getChild(recOrIndex).update(item);
}
keyify(snap) {
var item = snap.val();
item._key = snap.key();
return item;
}
created(snap) {
debugger;
var addedValue = this.keyify(snap);
this.list.push(addedValue);
}
moved(snap) {
var key = snap.key();
this.spliceOut(key);
}
updated(snap) {
var key = snap.key();
var indexToUpdate = this.indexFor(key);
this.list[indexToUpdate] = this.keyify(snap);
}
removed(snap) {
var key = snap.key();
this.spliceOut(key);
}
bulkUpdate(items) {
this.ref.update(items);
}
spliceOut(key) {
var i = this.indexFor(key);
if( i > -1 ) {
return this.list.splice(i, 1)[0];
}
return null;
}
indexFor(key) {
var record = this.getRecord(key);
return this.list.indexOf(record);
}
getRecord(key) {
return this.list.find((item) => key === item._key);
}
}
|
constructor
|
identifier_name
|
AngularFire.js
|
import Firebase from 'firebase/firebase';
export class AngularFire {
ref: Firebase;
constructor(ref: Firebase) {
this.ref = ref;
}
asArray()
|
}
/*
FirebaseArray
*/
export class FirebaseArray {
ref: Firebase;
error: any;
list: Array;
constructor(ref: Firebase) {
this.ref = ref;
this.list = [];
// listen for changes at the Firebase instance
this.ref.on('child_added', this.created.bind(this), this.error);
this.ref.on('child_moved', this.moved.bind(this), this.error);
this.ref.on('child_changed', this.updated.bind(this), this.error);
this.ref.on('child_removed', this.removed.bind(this), this.error);
// determine when initial load is completed
// ref.once('value', function() { resolve(null); }, resolve);
}
getItem(recOrIndex: any) {
var item = recOrIndex;
if(typeof(recOrIndex) === "number") {
item = this.getRecord(recOrIndex);
}
return item;
}
getChild(recOrIndex: any) {
var item = this.getItem(recOrIndex);
return this.ref.child(item._key);
}
add(rec: any) {
this.ref.push(rec);
}
remove(recOrIndex: any) {
this.getChild(recOrIndex).remove();
}
save(recOrIndex: any) {
var item = this.getItem(recOrIndex);
this.getChild(recOrIndex).update(item);
}
keyify(snap) {
var item = snap.val();
item._key = snap.key();
return item;
}
created(snap) {
debugger;
var addedValue = this.keyify(snap);
this.list.push(addedValue);
}
moved(snap) {
var key = snap.key();
this.spliceOut(key);
}
updated(snap) {
var key = snap.key();
var indexToUpdate = this.indexFor(key);
this.list[indexToUpdate] = this.keyify(snap);
}
removed(snap) {
var key = snap.key();
this.spliceOut(key);
}
bulkUpdate(items) {
this.ref.update(items);
}
spliceOut(key) {
var i = this.indexFor(key);
if( i > -1 ) {
return this.list.splice(i, 1)[0];
}
return null;
}
indexFor(key) {
var record = this.getRecord(key);
return this.list.indexOf(record);
}
getRecord(key) {
return this.list.find((item) => key === item._key);
}
}
|
{
return new FirebaseArray(this.ref);
}
|
identifier_body
|
AngularFire.js
|
import Firebase from 'firebase/firebase';
export class AngularFire {
ref: Firebase;
constructor(ref: Firebase) {
this.ref = ref;
}
asArray() {
return new FirebaseArray(this.ref);
}
}
/*
FirebaseArray
*/
export class FirebaseArray {
ref: Firebase;
error: any;
list: Array;
constructor(ref: Firebase) {
this.ref = ref;
this.list = [];
// listen for changes at the Firebase instance
this.ref.on('child_added', this.created.bind(this), this.error);
this.ref.on('child_moved', this.moved.bind(this), this.error);
this.ref.on('child_changed', this.updated.bind(this), this.error);
this.ref.on('child_removed', this.removed.bind(this), this.error);
// determine when initial load is completed
// ref.once('value', function() { resolve(null); }, resolve);
}
getItem(recOrIndex: any) {
var item = recOrIndex;
if(typeof(recOrIndex) === "number") {
item = this.getRecord(recOrIndex);
}
return item;
}
getChild(recOrIndex: any) {
var item = this.getItem(recOrIndex);
return this.ref.child(item._key);
}
add(rec: any) {
this.ref.push(rec);
}
remove(recOrIndex: any) {
this.getChild(recOrIndex).remove();
}
save(recOrIndex: any) {
var item = this.getItem(recOrIndex);
this.getChild(recOrIndex).update(item);
}
keyify(snap) {
var item = snap.val();
item._key = snap.key();
return item;
}
created(snap) {
debugger;
var addedValue = this.keyify(snap);
this.list.push(addedValue);
}
moved(snap) {
var key = snap.key();
this.spliceOut(key);
}
updated(snap) {
var key = snap.key();
var indexToUpdate = this.indexFor(key);
this.list[indexToUpdate] = this.keyify(snap);
}
removed(snap) {
var key = snap.key();
this.spliceOut(key);
}
bulkUpdate(items) {
this.ref.update(items);
}
spliceOut(key) {
var i = this.indexFor(key);
if( i > -1 ) {
return this.list.splice(i, 1)[0];
}
return null;
}
|
indexFor(key) {
var record = this.getRecord(key);
return this.list.indexOf(record);
}
getRecord(key) {
return this.list.find((item) => key === item._key);
}
}
|
random_line_split
|
|
AngularFire.js
|
import Firebase from 'firebase/firebase';
export class AngularFire {
ref: Firebase;
constructor(ref: Firebase) {
this.ref = ref;
}
asArray() {
return new FirebaseArray(this.ref);
}
}
/*
FirebaseArray
*/
export class FirebaseArray {
ref: Firebase;
error: any;
list: Array;
constructor(ref: Firebase) {
this.ref = ref;
this.list = [];
// listen for changes at the Firebase instance
this.ref.on('child_added', this.created.bind(this), this.error);
this.ref.on('child_moved', this.moved.bind(this), this.error);
this.ref.on('child_changed', this.updated.bind(this), this.error);
this.ref.on('child_removed', this.removed.bind(this), this.error);
// determine when initial load is completed
// ref.once('value', function() { resolve(null); }, resolve);
}
getItem(recOrIndex: any) {
var item = recOrIndex;
if(typeof(recOrIndex) === "number")
|
return item;
}
getChild(recOrIndex: any) {
var item = this.getItem(recOrIndex);
return this.ref.child(item._key);
}
add(rec: any) {
this.ref.push(rec);
}
remove(recOrIndex: any) {
this.getChild(recOrIndex).remove();
}
save(recOrIndex: any) {
var item = this.getItem(recOrIndex);
this.getChild(recOrIndex).update(item);
}
keyify(snap) {
var item = snap.val();
item._key = snap.key();
return item;
}
created(snap) {
debugger;
var addedValue = this.keyify(snap);
this.list.push(addedValue);
}
moved(snap) {
var key = snap.key();
this.spliceOut(key);
}
updated(snap) {
var key = snap.key();
var indexToUpdate = this.indexFor(key);
this.list[indexToUpdate] = this.keyify(snap);
}
removed(snap) {
var key = snap.key();
this.spliceOut(key);
}
bulkUpdate(items) {
this.ref.update(items);
}
spliceOut(key) {
var i = this.indexFor(key);
if( i > -1 ) {
return this.list.splice(i, 1)[0];
}
return null;
}
indexFor(key) {
var record = this.getRecord(key);
return this.list.indexOf(record);
}
getRecord(key) {
return this.list.find((item) => key === item._key);
}
}
|
{
item = this.getRecord(recOrIndex);
}
|
conditional_block
|
sync-send-iterators-in-libcore.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
#![feature(collections)]
fn is_sync<T>(_: T) where T: Sync
|
fn is_send<T>(_: T) where T: Send {}
macro_rules! all_sync_send {
($ctor:expr, $($iter:ident),+) => ({
$(
let mut x = $ctor;
is_sync(x.$iter());
let mut y = $ctor;
is_send(y.$iter());
)+
})
}
fn main() {
// for char.rs
all_sync_send!("Я", escape_default, escape_unicode);
// for iter.rs
// FIXME
// for option.rs
// FIXME
// for result.rs
// FIXME
// for slice.rs
// FIXME
// for str/mod.rs
// FIXME
}
|
{}
|
identifier_body
|
sync-send-iterators-in-libcore.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
#![feature(collections)]
fn
|
<T>(_: T) where T: Sync {}
fn is_send<T>(_: T) where T: Send {}
macro_rules! all_sync_send {
($ctor:expr, $($iter:ident),+) => ({
$(
let mut x = $ctor;
is_sync(x.$iter());
let mut y = $ctor;
is_send(y.$iter());
)+
})
}
fn main() {
// for char.rs
all_sync_send!("Я", escape_default, escape_unicode);
// for iter.rs
// FIXME
// for option.rs
// FIXME
// for result.rs
// FIXME
// for slice.rs
// FIXME
// for str/mod.rs
// FIXME
}
|
is_sync
|
identifier_name
|
sync-send-iterators-in-libcore.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
// pretty-expanded FIXME #23616
#![feature(collections)]
fn is_sync<T>(_: T) where T: Sync {}
fn is_send<T>(_: T) where T: Send {}
macro_rules! all_sync_send {
($ctor:expr, $($iter:ident),+) => ({
$(
let mut x = $ctor;
is_sync(x.$iter());
let mut y = $ctor;
is_send(y.$iter());
)+
})
}
fn main() {
// for char.rs
all_sync_send!("Я", escape_default, escape_unicode);
// for iter.rs
// FIXME
// for option.rs
// FIXME
// for result.rs
// FIXME
// for slice.rs
// FIXME
// for str/mod.rs
// FIXME
}
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
random_line_split
|
corpusInfo.py
|
#!/usr/bin/python3
ci_version="0.10"
# This script is used to retrieve corpus information. It can be run after the parser
# has finished its work. The corpus information is part of the final report.
# Database connection is configured in the server configuration.
# Include custom libs
import sys
sys.path.append( '../../include/python' )
import serverutils.config as config
import serverutils.mongohelper as mongohelper
import pymongo
mongoClient, mongoDb = mongohelper.getMongoClient(silent = True)
# Initialize all values. To make finding minimal values, we set those
# variables to an extremely high value initially, so that there is at least
# one character that has less...
movieCount = 0
characterCount = 0
minPerMovieCharacterCount = 999
minPerMovieCharacterCountMovie = None
maxPerMovieCharacterCount = 0
maxPerMovieCharacterCountMovie = None
totalWordCount = 0
characterWordCounts = []
minWordCount = 99999
maxWordCount = 0
minWordCountChar = None
maxWordCountChar = None
print("Processing movies ",end="")
# For every movie in our database
for movie in mongoDb.rawMovies.find():
print(".",end="",flush=True)
# Count the movie and (re-)initialize movie-specific variables
movieCount+=1
characters = {}
movieCharacterCount = 0
# For every quote...
for quote in mongoDb.rawQuotes.find({'_id.movie': movie['_id']}):
# Sort the quotes into character-specific lists to be able to generate
# values for the characters
if quote['character'] in characters:
characters[quote['character']] = characters[quote['character']] + " " + quote['text']
|
characters[quote['character']] = quote['text']
movieCharacterCount += 1
# Calculating word counts for every character
wordCounts = {cid: len(txt.split()) for cid,txt in characters.items()}
for char, wc in wordCounts.items():
totalWordCount += wc
characterWordCounts += [wc]
charname = char + " (" + movie['normalizedMovieId'] + ")"
if minWordCount > wc:
minWordCount = wc
minWordCountChar = charname
elif minWordCount == wc:
minWordCountChar += ", " + charname
if maxWordCount < wc:
maxWordCount = wc
maxWordCountChar = charname
elif maxWordCount == wc:
maxWordCountChar += ", " + charname
# Adding to total Character Count
characterCount += movieCharacterCount
# Counting Characters per Movie
if minPerMovieCharacterCount > movieCharacterCount:
minPerMovieCharacterCount = movieCharacterCount
minPerMovieCharacterCountMovie = movie['normalizedMovieId']
elif minPerMovieCharacterCount == movieCharacterCount:
minPerMovieCharacterCountMovie+= ", " + movie['normalizedMovieId']
if maxPerMovieCharacterCount < movieCharacterCount:
maxPerMovieCharacterCount = movieCharacterCount
maxPerMovieCharacterCountMovie = movie['normalizedMovieId']
elif maxPerMovieCharacterCount == movieCharacterCount:
maxPerMovieCharacterCountMovie += ", " + movie['normalizedMovieId']
# Display results
print("")
print("Movies in DB: ", movieCount)
print("Total characters: ", characterCount)
print("Total words: ", totalWordCount)
print()
print("Characters per movie... ")
print(" ... on avarage: ", float(characterCount)/float(movieCount))
print(" ... max: ", maxPerMovieCharacterCount, "(in "+maxPerMovieCharacterCountMovie+")")
print(" ... min: ", minPerMovieCharacterCount, "(in "+minPerMovieCharacterCountMovie+")")
print()
print("Word count...")
print(" ... avg. per character: ", totalWordCount / characterCount)
print(" ... avg. per movie: ", totalWordCount / movieCount)
print(" ... max: ", maxWordCount, "(for " + maxWordCountChar + ")")
print(" ... min: ", minWordCount, "(for " + minWordCountChar + ")")
print()
print("Word count - amount of characters:")
for i in range(0, maxWordCount + 500, 500):
print(" " + str(i) + " - " + str(i+499) + ": "+str(len(list(filter(lambda a: i <= a < i+500, characterWordCounts)))))
|
else:
|
random_line_split
|
corpusInfo.py
|
#!/usr/bin/python3
ci_version="0.10"
# This script is used to retrieve corpus information. It can be run after the parser
# has finished its work. The corpus information is part of the final report.
# Database connection is configured in the server configuration.
# Include custom libs
import sys
sys.path.append( '../../include/python' )
import serverutils.config as config
import serverutils.mongohelper as mongohelper
import pymongo
mongoClient, mongoDb = mongohelper.getMongoClient(silent = True)
# Initialize all values. To make finding minimal values, we set those
# variables to an extremely high value initially, so that there is at least
# one character that has less...
movieCount = 0
characterCount = 0
minPerMovieCharacterCount = 999
minPerMovieCharacterCountMovie = None
maxPerMovieCharacterCount = 0
maxPerMovieCharacterCountMovie = None
totalWordCount = 0
characterWordCounts = []
minWordCount = 99999
maxWordCount = 0
minWordCountChar = None
maxWordCountChar = None
print("Processing movies ",end="")
# For every movie in our database
for movie in mongoDb.rawMovies.find():
print(".",end="",flush=True)
# Count the movie and (re-)initialize movie-specific variables
movieCount+=1
characters = {}
movieCharacterCount = 0
# For every quote...
for quote in mongoDb.rawQuotes.find({'_id.movie': movie['_id']}):
# Sort the quotes into character-specific lists to be able to generate
# values for the characters
if quote['character'] in characters:
characters[quote['character']] = characters[quote['character']] + " " + quote['text']
else:
characters[quote['character']] = quote['text']
movieCharacterCount += 1
# Calculating word counts for every character
wordCounts = {cid: len(txt.split()) for cid,txt in characters.items()}
for char, wc in wordCounts.items():
totalWordCount += wc
characterWordCounts += [wc]
charname = char + " (" + movie['normalizedMovieId'] + ")"
if minWordCount > wc:
minWordCount = wc
minWordCountChar = charname
elif minWordCount == wc:
|
if maxWordCount < wc:
maxWordCount = wc
maxWordCountChar = charname
elif maxWordCount == wc:
maxWordCountChar += ", " + charname
# Adding to total Character Count
characterCount += movieCharacterCount
# Counting Characters per Movie
if minPerMovieCharacterCount > movieCharacterCount:
minPerMovieCharacterCount = movieCharacterCount
minPerMovieCharacterCountMovie = movie['normalizedMovieId']
elif minPerMovieCharacterCount == movieCharacterCount:
minPerMovieCharacterCountMovie+= ", " + movie['normalizedMovieId']
if maxPerMovieCharacterCount < movieCharacterCount:
maxPerMovieCharacterCount = movieCharacterCount
maxPerMovieCharacterCountMovie = movie['normalizedMovieId']
elif maxPerMovieCharacterCount == movieCharacterCount:
maxPerMovieCharacterCountMovie += ", " + movie['normalizedMovieId']
# Display results
print("")
print("Movies in DB: ", movieCount)
print("Total characters: ", characterCount)
print("Total words: ", totalWordCount)
print()
print("Characters per movie... ")
print(" ... on avarage: ", float(characterCount)/float(movieCount))
print(" ... max: ", maxPerMovieCharacterCount, "(in "+maxPerMovieCharacterCountMovie+")")
print(" ... min: ", minPerMovieCharacterCount, "(in "+minPerMovieCharacterCountMovie+")")
print()
print("Word count...")
print(" ... avg. per character: ", totalWordCount / characterCount)
print(" ... avg. per movie: ", totalWordCount / movieCount)
print(" ... max: ", maxWordCount, "(for " + maxWordCountChar + ")")
print(" ... min: ", minWordCount, "(for " + minWordCountChar + ")")
print()
print("Word count - amount of characters:")
for i in range(0, maxWordCount + 500, 500):
print(" " + str(i) + " - " + str(i+499) + ": "+str(len(list(filter(lambda a: i <= a < i+500, characterWordCounts)))))
|
minWordCountChar += ", " + charname
|
conditional_block
|
cipher.rs
|
//! Implements the basic XTEA cipher routines as described in the
//! paper (http://en.wikipedia.org/wiki/XTEA). These functions only
//! deal with a single 64-bit block of data at a time.
static NUM_ROUNDS: u32 = 32;
use super::{Key, Block};
/// Encrypts 64 bits of `input` using the `key`.
///
/// # Example:
/// ```
/// use tea::cipher;
///
/// let key = [5, 6, 7, 8];
/// let plaintext = [128, 256];
/// assert!(cipher::encipher(&key, &plaintext) != plaintext);
/// ```
pub fn encipher(key: &Key, input: &Block) -> Block {
let [mut v0, mut v1] = *input;
let delta = 0x9E3779B9;
let mut sum: u32 = 0;
for _ in 0..NUM_ROUNDS {
v0 = v0.wrapping_add((((v1 << 4) ^ (v1 >> 5)).wrapping_add(v1)) ^ (sum.wrapping_add(key[(sum & 3) as usize])));
sum = sum.wrapping_add(delta);
v1 = v1.wrapping_add((((v0 << 4) ^ (v0 >> 5)).wrapping_add(v0)) ^ (sum.wrapping_add(key[((sum>>11) & 3) as usize])))
}
[v0, v1]
}
/// Decrypts 64 bits of `input` using the `key`.
///
/// # Example:
/// ```
/// use tea::cipher;
///
/// let key = [5, 6, 7, 8];
/// let plaintext = [128, 256];
/// let crypted = cipher::encipher(&key, &plaintext);
/// assert_eq!(cipher::decipher(&key, &crypted), plaintext);
/// ```
pub fn decipher(key: &Key, input: &Block) -> Block {
let [mut v0, mut v1] = *input;
let delta = 0x9E3779B9;
let mut sum = delta.wrapping_mul(NUM_ROUNDS);
for _ in 0..NUM_ROUNDS {
v1 = v1.wrapping_sub((((v0 << 4) ^ (v0 >> 5)).wrapping_add(v0)) ^ (sum.wrapping_add(key[((sum>>11) & 3) as usize])));
sum = sum.wrapping_sub(delta);
v0 = v0.wrapping_sub((((v1 << 4) ^ (v1 >> 5)).wrapping_add(v1)) ^ (sum.wrapping_add(key[(sum & 3) as usize])));
}
[v0, v1]
}
#[test]
fn it_works()
|
{
let key: Key = [10, 20, 30, 42];
let plaintext: Block = [300, 400];
let ciphertext = encipher(&key, &plaintext);
assert!(plaintext != ciphertext);
assert_eq!(plaintext, decipher(&key, &ciphertext));
}
|
identifier_body
|
|
cipher.rs
|
//! Implements the basic XTEA cipher routines as described in the
//! paper (http://en.wikipedia.org/wiki/XTEA). These functions only
//! deal with a single 64-bit block of data at a time.
static NUM_ROUNDS: u32 = 32;
use super::{Key, Block};
/// Encrypts 64 bits of `input` using the `key`.
///
/// # Example:
/// ```
/// use tea::cipher;
///
/// let key = [5, 6, 7, 8];
/// let plaintext = [128, 256];
/// assert!(cipher::encipher(&key, &plaintext) != plaintext);
/// ```
pub fn encipher(key: &Key, input: &Block) -> Block {
let [mut v0, mut v1] = *input;
let delta = 0x9E3779B9;
let mut sum: u32 = 0;
for _ in 0..NUM_ROUNDS {
v0 = v0.wrapping_add((((v1 << 4) ^ (v1 >> 5)).wrapping_add(v1)) ^ (sum.wrapping_add(key[(sum & 3) as usize])));
sum = sum.wrapping_add(delta);
v1 = v1.wrapping_add((((v0 << 4) ^ (v0 >> 5)).wrapping_add(v0)) ^ (sum.wrapping_add(key[((sum>>11) & 3) as usize])))
}
[v0, v1]
}
/// Decrypts 64 bits of `input` using the `key`.
///
/// # Example:
/// ```
/// use tea::cipher;
///
/// let key = [5, 6, 7, 8];
/// let plaintext = [128, 256];
/// let crypted = cipher::encipher(&key, &plaintext);
/// assert_eq!(cipher::decipher(&key, &crypted), plaintext);
/// ```
pub fn decipher(key: &Key, input: &Block) -> Block {
let [mut v0, mut v1] = *input;
let delta = 0x9E3779B9;
let mut sum = delta.wrapping_mul(NUM_ROUNDS);
for _ in 0..NUM_ROUNDS {
v1 = v1.wrapping_sub((((v0 << 4) ^ (v0 >> 5)).wrapping_add(v0)) ^ (sum.wrapping_add(key[((sum>>11) & 3) as usize])));
sum = sum.wrapping_sub(delta);
v0 = v0.wrapping_sub((((v1 << 4) ^ (v1 >> 5)).wrapping_add(v1)) ^ (sum.wrapping_add(key[(sum & 3) as usize])));
|
#[test]
fn it_works() {
let key: Key = [10, 20, 30, 42];
let plaintext: Block = [300, 400];
let ciphertext = encipher(&key, &plaintext);
assert!(plaintext != ciphertext);
assert_eq!(plaintext, decipher(&key, &ciphertext));
}
|
}
[v0, v1]
}
|
random_line_split
|
cipher.rs
|
//! Implements the basic XTEA cipher routines as described in the
//! paper (http://en.wikipedia.org/wiki/XTEA). These functions only
//! deal with a single 64-bit block of data at a time.
static NUM_ROUNDS: u32 = 32;
use super::{Key, Block};
/// Encrypts 64 bits of `input` using the `key`.
///
/// # Example:
/// ```
/// use tea::cipher;
///
/// let key = [5, 6, 7, 8];
/// let plaintext = [128, 256];
/// assert!(cipher::encipher(&key, &plaintext) != plaintext);
/// ```
pub fn
|
(key: &Key, input: &Block) -> Block {
let [mut v0, mut v1] = *input;
let delta = 0x9E3779B9;
let mut sum: u32 = 0;
for _ in 0..NUM_ROUNDS {
v0 = v0.wrapping_add((((v1 << 4) ^ (v1 >> 5)).wrapping_add(v1)) ^ (sum.wrapping_add(key[(sum & 3) as usize])));
sum = sum.wrapping_add(delta);
v1 = v1.wrapping_add((((v0 << 4) ^ (v0 >> 5)).wrapping_add(v0)) ^ (sum.wrapping_add(key[((sum>>11) & 3) as usize])))
}
[v0, v1]
}
/// Decrypts 64 bits of `input` using the `key`.
///
/// # Example:
/// ```
/// use tea::cipher;
///
/// let key = [5, 6, 7, 8];
/// let plaintext = [128, 256];
/// let crypted = cipher::encipher(&key, &plaintext);
/// assert_eq!(cipher::decipher(&key, &crypted), plaintext);
/// ```
pub fn decipher(key: &Key, input: &Block) -> Block {
let [mut v0, mut v1] = *input;
let delta = 0x9E3779B9;
let mut sum = delta.wrapping_mul(NUM_ROUNDS);
for _ in 0..NUM_ROUNDS {
v1 = v1.wrapping_sub((((v0 << 4) ^ (v0 >> 5)).wrapping_add(v0)) ^ (sum.wrapping_add(key[((sum>>11) & 3) as usize])));
sum = sum.wrapping_sub(delta);
v0 = v0.wrapping_sub((((v1 << 4) ^ (v1 >> 5)).wrapping_add(v1)) ^ (sum.wrapping_add(key[(sum & 3) as usize])));
}
[v0, v1]
}
#[test]
fn it_works() {
let key: Key = [10, 20, 30, 42];
let plaintext: Block = [300, 400];
let ciphertext = encipher(&key, &plaintext);
assert!(plaintext != ciphertext);
assert_eq!(plaintext, decipher(&key, &ciphertext));
}
|
encipher
|
identifier_name
|
gulpfile.js
|
const gulp = require('gulp');
const sass = require('gulp-sass');
const rename = require('gulp-rename');
const autoprefixer = require('gulp-autoprefixer');
// Cactu scss source
const cactuUrl = './scss/**/*.scss';
const docUrl = './_sass/**/*.scss';
const sassOpts = {
outputStyle: 'compressed',
precison: 3,
errLogToConsole: true
};
function styles()
|
;
function docStyles() {
return gulp.src(docUrl)
.pipe(sass(sassOpts).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(rename({
suffix: ".min"
}))
.pipe(gulp.dest('./assets/css'))
}
function compressedStyles() {
return gulp.src(cactuUrl)
.pipe(sass(sassOpts).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(rename({
suffix: ".min"
}))
.pipe(gulp.dest('./css'))
}
gulp.task('sass', styles);
gulp.task('sass-doc', docStyles);
gulp.task('sass-compressed', compressedStyles);
gulp.task('cactu-build', gulp.parallel(styles, compressedStyles));
gulp.task('watch', () => {
gulp.watch([cactuUrl, docUrl], gulp.series(styles, compressedStyles, docStyles));
});
|
{
return gulp.src(cactuUrl)
.pipe(sass({
outputStyle: 'expanded'
}).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(gulp.dest('./css'))
}
|
identifier_body
|
gulpfile.js
|
const gulp = require('gulp');
const sass = require('gulp-sass');
const rename = require('gulp-rename');
const autoprefixer = require('gulp-autoprefixer');
// Cactu scss source
const cactuUrl = './scss/**/*.scss';
const docUrl = './_sass/**/*.scss';
const sassOpts = {
outputStyle: 'compressed',
precison: 3,
errLogToConsole: true
};
function styles() {
return gulp.src(cactuUrl)
.pipe(sass({
outputStyle: 'expanded'
}).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(gulp.dest('./css'))
};
function docStyles() {
return gulp.src(docUrl)
.pipe(sass(sassOpts).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(rename({
suffix: ".min"
}))
.pipe(gulp.dest('./assets/css'))
}
function compressedStyles() {
return gulp.src(cactuUrl)
.pipe(sass(sassOpts).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
|
suffix: ".min"
}))
.pipe(gulp.dest('./css'))
}
gulp.task('sass', styles);
gulp.task('sass-doc', docStyles);
gulp.task('sass-compressed', compressedStyles);
gulp.task('cactu-build', gulp.parallel(styles, compressedStyles));
gulp.task('watch', () => {
gulp.watch([cactuUrl, docUrl], gulp.series(styles, compressedStyles, docStyles));
});
|
.pipe(rename({
|
random_line_split
|
gulpfile.js
|
const gulp = require('gulp');
const sass = require('gulp-sass');
const rename = require('gulp-rename');
const autoprefixer = require('gulp-autoprefixer');
// Cactu scss source
const cactuUrl = './scss/**/*.scss';
const docUrl = './_sass/**/*.scss';
const sassOpts = {
outputStyle: 'compressed',
precison: 3,
errLogToConsole: true
};
function styles() {
return gulp.src(cactuUrl)
.pipe(sass({
outputStyle: 'expanded'
}).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(gulp.dest('./css'))
};
function docStyles() {
return gulp.src(docUrl)
.pipe(sass(sassOpts).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(rename({
suffix: ".min"
}))
.pipe(gulp.dest('./assets/css'))
}
function
|
() {
return gulp.src(cactuUrl)
.pipe(sass(sassOpts).on('error', sass.logError))
.pipe(autoprefixer({
browsers: ['last 2 versions'],
cascade: false
}))
.pipe(rename({
suffix: ".min"
}))
.pipe(gulp.dest('./css'))
}
gulp.task('sass', styles);
gulp.task('sass-doc', docStyles);
gulp.task('sass-compressed', compressedStyles);
gulp.task('cactu-build', gulp.parallel(styles, compressedStyles));
gulp.task('watch', () => {
gulp.watch([cactuUrl, docUrl], gulp.series(styles, compressedStyles, docStyles));
});
|
compressedStyles
|
identifier_name
|
config.js
|
/**
* config is the configuration wrapper object of the app
*/
var config = {
site : 'http://www.some.com',
domains : {
some : 'some.com'
},
el : {
productList : $('#product_list'),
productTemplate : $('#productTemplate'),
loader : $('#loader'),
err : $("#err")
},
imgPaths : {
logo : '../../img/logo.png'
},
urls : {
|
fireabase : 'https://product-api.firebaseio.com'
},
errors : {
400 : '400',
500 : '500',
timeout : 'timeout'
},
errorMessages : {
serverConnectionFailed : 'Sunucuya bağlanılamıyor...',/*400*/
internalServerError : 'Sunucu taraflı bir hata oluştu...',/*500*/
timeout : 'Sayfa zaman aşımına uğradı. Lütfen tekrar deneyiniz...',
unidentifiedError : 'Beklenmeyen bir hata oluştu. Lütfen tekrar deneyiniz.'
},
stringBoundry : 16,
stringExtension : '...',
json : {
shortDesc : 'short_description',
name : 'name'
}
};
|
favouriteProducts : 'http://127.0.0.1/ajax/favorite_products',
|
random_line_split
|
python_runner_1.5.js
|
/*
python_runner:
Python code runner.
*/
var currentPythonContext = null;
function PythonInterpreter(context, msgCallback) {
this.context = context;
this.messageCallback = msgCallback;
this._code = '';
this._editor_filename = "<stdin>";
this.context.runner = this;
this._maxIterations = 4000;
this._maxIterWithoutAction = 50;
this._resetCallstackOnNextStep = false;
this._paused = false;
this._isRunning = false;
this._stepInProgress = false;
this._resetDone = true;
this.stepMode = false;
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = null;
this._hasActions = false;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._timeouts = [];
this._editorMarker = null;
this.availableModules = [];
this._argumentsByBlock = {};
this._definedFunctions = [];
this.nbNodes = 0;
this.curNode = 0;
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
this.waitingOnReadyNode = false;
var that = this;
this._skulptifyHandler = function (name, generatorName, blockName, nbArgs, type) {
if(!arrayContains(this._definedFunctions, name)) { this._definedFunctions.push(name); }
var handler = '';
handler += "\tcurrentPythonContext.runner.checkArgs('" + name + "', '" + generatorName + "', '" + blockName + "', arguments);";
handler += "\n\tvar susp = new Sk.misceval.Suspension();";
handler += "\n\tvar result = Sk.builtin.none.none$;";
// If there are arguments, convert them from Skulpt format to the libs format
handler += "\n\tvar args = Array.prototype.slice.call(arguments);";
handler += "\n\tfor(var i=0; i<args.length; i++) { args[i] = currentPythonContext.runner.skToJs(args[i]); };";
handler += "\n\tsusp.resume = function() { return result; };";
handler += "\n\tsusp.data = {type: 'Sk.promise', promise: new Promise(function(resolve) {";
handler += "\n\targs.push(resolve);";
// Count actions
if(type == 'actions') {
handler += "\n\tcurrentPythonContext.runner._nbActions += 1;";
}
handler += "\n\ttry {";
handler += '\n\t\tcurrentPythonContext["' + generatorName + '"]["' + blockName + '"].apply(currentPythonContext, args);';
handler += "\n\t} catch (e) {";
handler += "\n\t\tcurrentPythonContext.runner._onStepError(e)}";
handler += '\n\t}).then(function (value) {\nresult = value;\nreturn value;\n })};';
handler += '\n\treturn susp;';
return '\nmod.' + name + ' = new Sk.builtin.func(function () {\n' + handler + '\n});\n';
};
this._skulptifyValue = function(value) {
if(typeof value === "number") {
var handler = 'Sk.builtin.int_(' + value + ')';
} else if(typeof value === "boolean") {
var handler = 'Sk.builtin.bool(' + value.toString() + ')';
} else if(typeof value === "string") {
var handler = 'Sk.builtin.str(' + JSON.stringify(value) + ')';
} else if(Array.isArray(value)) {
var list = [];
for(var i=0; i<value.length; i++) {
list.push(this._skulptifyValue(value[i]));
}
var handler = 'Sk.builtin.list([' + list.join(',') + '])';
} else {
throw "Unable to translate value '" + value + "' into a Skulpt constant.";
}
return 'new ' + handler;
}
this._skulptifyConst = function(name, value) {
var handler = this._skulptifyValue(value);
return '\nmod.' + name + ' = ' + handler + ';\n';
};
this._injectFunctions = function () {
// Generate Python custom libraries from all generated blocks
this._definedFunctions = [];
if(this.context.infos && this.context.infos.includeBlocks && this.context.infos.includeBlocks.generatedBlocks) {
// Flatten customBlocks information for easy access
var blocksInfos = {};
for (var generatorName in this.context.customBlocks) {
for (var typeName in this.context.customBlocks[generatorName]) {
var blockList = this.context.customBlocks[generatorName][typeName];
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockInfo = blockList[iBlock];
blocksInfos[blockInfo.name] = {
nbArgs: 0, // handled below
type: typeName};
blocksInfos[blockInfo.name].nbsArgs = [];
if(blockInfo.anyArgs) {
// Allows to specify the function can accept any number of arguments
blocksInfos[blockInfo.name].nbsArgs.push(Infinity);
}
var variants = blockInfo.variants ? blockInfo.variants : (blockInfo.params ? [blockInfo.params] : []);
if(variants.length) {
for(var i=0; i < variants.length; i++) {
blocksInfos[blockInfo.name].nbsArgs.push(variants[i].length);
}
}
}
}
}
// Generate functions used in the task
for (var generatorName in this.context.infos.includeBlocks.generatedBlocks) {
var blockList = this.context.infos.includeBlocks.generatedBlocks[generatorName];
if(!blockList.length) { continue; }
var modContents = "var $builtinmodule = function (name) {\n\nvar mod = {};\nmod.__package__ = Sk.builtin.none.none$;\n";
if(!this._argumentsByBlock[generatorName]) {
this._argumentsByBlock[generatorName] = {};
}
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockName = blockList[iBlock];
var code = this.context.strings.code[blockName];
if (typeof(code) == "undefined") {
code = blockName;
}
var nbsArgs = blocksInfos[blockName] ? (blocksInfos[blockName].nbsArgs ? blocksInfos[blockName].nbsArgs : []) : [];
var type = blocksInfos[blockName] ? blocksInfos[blockName].type : 'actions';
if(type == 'actions') {
this._hasActions = true;
}
this._argumentsByBlock[generatorName][blockName] = nbsArgs;
modContents += this._skulptifyHandler(code, generatorName, blockName, nbsArgs, type);
}
// TODO :: allow selection of constants available in a task
// if(this.context.infos.includeBlocks.constants && this.context.infos.includeBlocks.constants[generatorName]) {
if(this.context.customConstants && this.context.customConstants[generatorName]) {
var constList = this.context.customConstants[generatorName];
for(var iConst=0; iConst < constList.length; iConst++) {
var name = constList[iConst].name;
if(this.context.strings.constant && this.context.strings.constant[name]) {
name = this.context.strings.constant[name];
}
modContents += this._skulptifyConst(name, constList[iConst].value)
}
}
modContents += "\nreturn mod;\n};";
Sk.builtinFiles["files"]["src/lib/"+generatorName+".js"] = modContents;
this.availableModules.push(generatorName);
}
}
};
this.checkArgs = function (name, generatorName, blockName, args) {
// Check the number of arguments corresponds to a variant of the function
if(!this._argumentsByBlock[generatorName] || !this._argumentsByBlock[generatorName][blockName]) {
console.error("Couldn't find the number of arguments for " + generatorName + "/" + blockName + ".");
return;
}
var nbsArgs = this._argumentsByBlock[generatorName][blockName];
if(nbsArgs.length == 0) {
// This function doesn't have arguments
if(args.length > 0) {
msg = name + "() takes no arguments (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
} else if(nbsArgs.indexOf(args.length) == -1 && nbsArgs.indexOf(Infinity) == -1) {
var minArgs = nbsArgs[0];
var maxArgs = nbsArgs[0];
for(var i=1; i < nbsArgs.length; i++) {
minArgs = Math.min(minArgs, nbsArgs[i]);
maxArgs = Math.max(maxArgs, nbsArgs[i]);
}
if (minArgs === maxArgs) {
msg = name + "() takes exactly " + minArgs + " arguments";
} else if (args.length < minArgs) {
msg = name + "() takes at least " + minArgs + " arguments";
} else if (args.length > maxArgs){
msg = name + "() takes at most " + maxArgs + " arguments";
} else {
msg = name + "() doesn't have a variant accepting this number of arguments";
}
msg += " (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
};
this._definePythonNumber = function() {
// Create a class which behaves as a Number, but can have extra properties
this.pythonNumber = function(val) {
this.val = new Number(val);
}
this.pythonNumber.prototype = Object.create(Number.prototype);
function makePrototype(func) {
return function() { return Number.prototype[func].call(this.val); }
}
var funcs = ['toExponential', 'toFixed', 'toLocaleString', 'toPrecision', 'toSource', 'toString', 'valueOf'];
for(var i = 0; i < funcs.length ; i++) {
this.pythonNumber.prototype[funcs[i]] = makePrototype(funcs[i]);
}
}
this.skToJs = function(val) {
// Convert Skulpt item to JavaScript
// TODO :: Might be partly replaceable with Sk.ffi.remapToJs
if(val instanceof Sk.builtin.bool) {
return val.v ? true : false;
} else if(val instanceof Sk.builtin.func) {
return function() {
var args = [];
for(var i = 0; i < arguments.length; i++) {
args.push(that._createPrimitive(arguments[i]));
}
var retp = new Promise(function(resolve, reject) {
var p = Sk.misceval.asyncToPromise(function() { return val.tp$call(args); });
p.then(function(val) { resolve(that.skToJs(val)); });
});
return retp;
}
} else if(val instanceof Sk.builtin.dict) {
var dictKeys = Object.keys(val);
var retVal = {};
for(var i = 0; i < dictKeys.length; i++) {
var key = dictKeys[i];
if(key == 'size' || key == '__class__') { continue; }
var subItems = val[key].items;
for(var j = 0; j < subItems.length; j++) {
var subItem = subItems[j];
retVal[subItem.lhs.v] = this.skToJs(subItem.rhs);
}
}
return retVal;
} else {
var retVal = val.v;
if(val instanceof Sk.builtin.tuple || val instanceof Sk.builtin.list) {
retVal = [];
for(var i = 0; i < val.v.length; i++) {
retVal[i] = this.skToJs(val.v[i]);
}
}
if(val instanceof Sk.builtin.tuple) {
retVal.isTuple = true;
}
if(val instanceof Sk.builtin.float_) {
retVal = new this.pythonNumber(retVal);
retVal.isFloat = true;
}
return retVal;
}
};
this.getDefinedFunctions = function() {
this._injectFunctions();
return this._definedFunctions.slice();
};
this._setTimeout = function(func, time) {
var timeoutId = null;
var that = this;
function wrapper() {
var idx = that._timeouts.indexOf(timeoutId);
if(idx > -1) { that._timeouts.splice(idx, 1); }
func();
}
timeoutId = window.setTimeout(wrapper, time);
this._timeouts.push(timeoutId);
}
this.waitDelay = function (callback, value, delay) {
this._paused = true;
if (delay > 0) {
var _noDelay = this.noDelay.bind(this, callback, value);
this._setTimeout(_noDelay, delay);
// We just waited some time, allow next steps to not be delayed
this._allowStepsWithoutDelay = Math.min(this._allowStepsWithoutDelay + Math.ceil(delay / 10), 100);
} else {
this.noDelay(callback, value);
}
};
this.waitEvent = function (callback, target, eventName, func) {
this._paused = true;
var listenerFunc = null;
var that = this;
listenerFunc = function(e) {
target.removeEventListener(eventName, listenerFunc);
that.noDelay(callback, func(e));
};
target.addEventListener(eventName, listenerFunc);
};
this.waitCallback = function (callback) {
// Returns a callback to be called once we can continue the execution
this._paused = true;
var that = this;
return function(value) {
that.noDelay(callback, value);
};
};
this.noDelay = function (callback, value) {
var primitive = this._createPrimitive(value);
if (primitive !== Sk.builtin.none.none$) {
// Apparently when we create a new primitive, the debugger adds a call to
// the callstack.
this._resetCallstackOnNextStep = true;
this.reportValue(value);
}
this._paused = false;
callback(primitive);
this._setTimeout(this._continue.bind(this), 10);
};
this.allowSwitch = function(callback) {
// Tells the runner that we can switch the execution to another node
var curNode = context.curNode;
var ready = function(readyCallback) {
that.readyNodes[curNode] = function() {
readyCallback(callback);
};
if(that.waitingOnReadyNode) {
that.waitingOnReadyNode = false;
that.startNode(that.curNode, curNode);
}
};
this.readyNodes[curNode] = false;
this.startNextNode(curNode);
return ready;
};
this.defaultSelectNextNode = function(runner, previousNode) {
var i = previousNode + 1;
if(i >= runner.nbNodes) { i = 0; }
do {
if(runner.readyNodes[i]) {
break;
} else {
i++;
}
if(i >= runner.nbNodes) { i = 0; }
} while(i != previousNode);
return i;
};
// Allow the next node selection process to be customized
this.selectNextNode = this.defaultSelectNextNode;
this.startNextNode = function(curNode) {
// Start the next node when one has been switched from
var newNode = this.selectNextNode(this, curNode);
this._paused = true;
if(newNode == curNode) {
// No ready node
this.waitingOnReadyNode = true;
} else {
// TODO :: switch execution
this.startNode(curNode, newNode);
}
};
this.startNode = function(curNode, newNode) {
setTimeout(function() {
that.nodeStates[curNode] = that._debugger.suspension_stack.slice();
that._debugger.suspension_stack = that.nodeStates[newNode];
that.curNode = newNode;
var ready = that.readyNodes[newNode];
if(ready) {
that.readyNodes[newNode] = false;
context.setCurNode(newNode);
if(typeof ready == 'function') {
ready();
} else {
that._paused = false;
that._continue();
}
} else {
that.waitingOnReadyNode = true;
}
}, 0);
};
this._createPrimitive = function (data) {
// TODO :: Might be replaceable with Sk.ffi.remapToPy
if (data === undefined || data === null) {
return Sk.builtin.none.none$; // Reuse the same object.
}
var type = typeof data;
var result = {v: data}; // Emulate a Skulpt object as default
if (type === 'number') {
if(Math.floor(data) == data) { // isInteger isn't supported by IE
result = new Sk.builtin.int_(data);
} else {
result = new Sk.builtin.float_(data);
}
} else if (type === 'string') {
result = new Sk.builtin.str(data);
} else if (type === 'boolean') {
result = new Sk.builtin.bool(data);
} else if (typeof data.length != 'undefined') {
var skl = [];
for(var i = 0; i < data.length; i++) {
skl.push(this._createPrimitive(data[i]));
}
result = new Sk.builtin.list(skl);
} else if (data) {
// Create a dict if it's an object with properties
var props = [];
for(var prop in data) {
if(data.hasOwnProperty(prop)) {
// We can pass a list [prop1name, prop1val, ...] to Skulpt's dict
// constructor ; however to work properly they need to be Skulpt
// primitives too
props.push(this._createPrimitive(prop));
props.push(this._createPrimitive(data[prop]));
}
}
if(props.length > 0) {
result = new Sk.builtin.dict(props);
}
}
return result;
};
this._onOutput = function (_output) {
that.print(_output);
};
this._onDebugOut = function (text) {
// console.log('DEBUG: ', text);
};
this._configure = function () {
Sk.configure({
output: this._onOutput,
debugout: this._onDebugOut,
read: this._builtinRead.bind(this),
yieldLimit: null,
execLimit: null,
debugging: true,
breakpoints: this._debugger.check_breakpoints.bind(this._debugger),
__future__: Sk.python3
});
Sk.pre = "edoutput";
Sk.pre = "codeoutput";
// Disable document library
delete Sk.builtinFiles["files"]["src/lib/document.js"];
this._definePythonNumber();
this.context.callCallback = this.noDelay.bind(this);
};
this.print = function (message, className) {
if (message.trim() === 'Program execution complete') {
this._onFinished();
}
if (message) {
//console.log('PRINT: ', message, className || '');
}
};
this._onFinished = function () {
this.finishedNodes[this.curNode] = true;
this.readyNodes[this.curNode] = false;
if(this.finishedNodes.indexOf(false) != -1) {
// At least one node is not finished
this.startNextNode(this.curNode);
} else {
// All nodes are finished, stop the execution
this.stop();
}
try {
this.context.infos.checkEndCondition(this.context, true);
} catch (e) {
this._onStepError(e);
}
};
this._builtinRead = function (x) {
if (Sk.builtinFiles === undefined || Sk.builtinFiles["files"][x] === undefined)
throw "File not found: '" + x + "'";
return Sk.builtinFiles["files"][x];
};
this.get_source_line = function (lineno) {
return this._code.split('\n')[lineno];
};
this._continue = function () {
if (this.context.infos.checkEndEveryTurn) {
try {
this.context.infos.checkEndCondition(context, false);
} catch(e) {
this._onStepError(e);
return;
}
}
if (!this.context.allowInfiniteLoop && this._steps >= this._maxIterations) {
this._onStepError(window.languageStrings.tooManyIterations);
} else if (!this.context.allowInfiniteLoop && this._stepsWithoutAction >= this._maxIterWithoutAction) {
this._onStepError(window.languageStrings.tooManyIterationsWithoutAction);
} else if (!this._paused && this._isRunning) {
this.step();
}
};
this.initCodes = function (codes) {
// For reportValue in Skulpt.
window.currentPythonRunner = this;
if(Sk.running) {
if(typeof Sk.runQueue === 'undefined') {
Sk.runQueue = [];
}
Sk.runQueue.push({ctrl: this, codes: codes});
return;
}
currentPythonContext = this.context;
this._debugger = new Sk.Debugger(this._editor_filename, this);
this._configure();
this._injectFunctions();
/**
* Add a last instruction at the end of the code so Skupt will generate a Suspension state
* for after the user's last instruction. Otherwise it would be impossible to retrieve the
* modifications made by the last user's line. For skulpt analysis.
*/
this._code = codes[0] + "\npass";
this._setBreakpoint(1, false);
if(typeof this.context.infos.maxIter !== 'undefined') {
this._maxIterations = Math.ceil(this.context.infos.maxIter/10);
}
if(typeof this.context.infos.maxIterWithoutAction !== 'undefined') {
this._maxIterWithoutAction = Math.ceil(this.context.infos.maxIterWithoutAction/10);
}
if(!this._hasActions) {
// No limit on
this._maxIterWithoutAction = this._maxIterations;
}
var susp_handlers = {};
susp_handlers["*"] = this._debugger.suspension_handler.bind(this);
this.nbNodes = codes.length;
this.curNode = 0;
context.setCurNode(this.curNode);
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
for(var i = 0; i < codes.length ; i++) {
this.readyNodes.push(true);
this.finishedNodes.push(false);
try {
var promise = this._debugger.asyncToPromise(this._asyncCallback(this._editor_filename, codes[i]), susp_handlers, this._debugger);
promise.then(this._debugger.success.bind(this._debugger), this._debugger.error.bind(this._debugger));
} catch (e) {
this._onOutput(e.toString() + "\n");
}
this.nodeStates.push(this._debugger.suspension_stack);
this._debugger.suspension_stack = [];
}
this._debugger.suspension_stack = this.nodeStates[0];
this._resetInterpreterState();
Sk.running = true;
this._isRunning = true;
};
this.run = function () {
if(this.stepMode) {
this._paused = this._stepInProgress;
this.stepMode = false;
}
this._setTimeout(this._continue.bind(this), 100);
};
this.runCodes = function(codes) {
this.initCodes(codes);
this.run();
};
this.runStep = function (resolve, reject) {
this.stepMode = true;
if (this._isRunning && !this._stepInProgress) {
this.step(resolve, reject);
}
};
this.nbRunning = function () {
return this._isRunning ? 1 : 0;
};
this.removeEditorMarker = function () {
var editor = this.context.blocklyHelper._aceEditor;
if(editor && this._editorMarker) {
editor.session.removeMarker(this._editorMarker);
this._editorMarker = null;
}
};
this.unSkulptValue = function (origValue) {
// Transform a value, possibly a Skulpt one, into a printable value
if(typeof origValue !== 'object' || origValue === null) {
var value = origValue;
} else if(origValue.constructor === Sk.builtin.dict) {
var keys = Object.keys(origValue);
var dictElems = [];
for(var i=0; i<keys.length; i++) {
if(keys[i] == 'size' || keys[i] == '__class__'
|| !origValue[keys[i]].items
|| !origValue[keys[i]].items[0]) {
continue;
}
var items = origValue[keys[i]].items[0];
dictElems.push('' + this.unSkulptValue(items.lhs) + ': ' + this.unSkulptValue(items.rhs));
}
var value = '{' + dictElems.join(',' ) + '}';
} else if(origValue.constructor === Sk.builtin.list) {
var oldArray = origValue.v;
var newArray = [];
for(var i=0; i<oldArray.length; i++) {
newArray.push(this.unSkulptValue(oldArray[i]));
}
var value = '[' + newArray.join(', ') + ']';
} else if(origValue.v !== undefined) {
var value = origValue.v;
if(typeof value == 'string') {
value = '"' + value + '"';
}
} else if(typeof origValue == 'object') {
var value = origValue;
}
return value;
};
this.reportValue = function (origValue, varName) {
// Show a popup displaying the value of a block in step-by-step mode
if(origValue === undefined
|| (origValue && origValue.constructor === Sk.builtin.func)
|| !this._editorMarker
|| !context.display
|| !this.stepMode) {
return origValue;
}
var value = this.unSkulptValue(origValue);
|
if(highlighted.length == 0) {
return origValue;
} else if(highlighted.find('.ace_start').length > 0) {
var target = highlighted.find('.ace_start')[0];
} else {
var target = highlighted[0];
}
var bbox = target.getBoundingClientRect();
var leftPos = bbox.left+10;
var topPos = bbox.top-14;
if(typeof value == 'boolean') {
var displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
} else if(value === null) {
var displayStr = "None"
} else {
var displayStr = value.toString();
}
if(typeof value == 'boolean') {
displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
}
if(varName) {
displayStr = '' + varName + ' = ' + displayStr;
}
var dropDownDiv = '' +
'<div class="blocklyDropDownDiv" style="transition: transform 0.25s, opacity 0.25s; background-color: rgb(255, 255, 255); border-color: rgb(170, 170, 170); left: '+leftPos+'px; top: '+topPos+'px; display: block; opacity: 1; transform: translate(0px, -20px);">' +
' <div class="blocklyDropDownContent">' +
' <div class="valueReportBox">' +
displayStr +
' </div>' +
' </div>' +
' <div class="blocklyDropDownArrow arrowBottom" style="transform: translate(22px, 15px) rotate(45deg);"></div>' +
'</div>';
$('.blocklyDropDownDiv').remove();
$('body').append(dropDownDiv);
return origValue;
};
this.stop = function () {
for (var i = 0; i < this._timeouts.length; i += 1) {
window.clearTimeout(this._timeouts[i]);
}
this._timeouts = [];
this.removeEditorMarker();
if(Sk.runQueue) {
for (var i=0; i<Sk.runQueue.length; i++) {
if(Sk.runQueue[i].ctrl === this) {
Sk.runQueue.splice(i, 1);
i--;
}
}
}
if(window.quickAlgoInterface) {
window.quickAlgoInterface.setPlayPause(false);
}
this._resetInterpreterState();
};
this.isRunning = function () {
return this._isRunning;
};
this._resetInterpreterState = function () {
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = 0;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._isRunning = false;
this._resetDone = false;
this.stepMode = false;
this._stepInProgress = false;
this._resetCallstackOnNextStep = false;
this._paused = false;
this.waitingOnReadyNode = false;
Sk.running = false;
if(Sk.runQueue && Sk.runQueue.length > 0) {
var nextExec = Sk.runQueue.shift();
setTimeout(function () { nextExec.ctrl.runCodes(nextExec.codes); }, 100);
}
};
this._resetCallstack = function () {
if (this._resetCallstackOnNextStep) {
this._resetCallstackOnNextStep = false;
this._debugger.suspension_stack.pop();
}
};
this.reset = function() {
if(this._resetDone) { return; }
if(this.isRunning()) {
this.stop();
}
this.context.reset();
this._resetDone = true;
};
this.step = function (resolve, reject) {
this._resetCallstack();
this._stepInProgress = true;
var editor = this.context.blocklyHelper._aceEditor;
var markDelay = this.context.infos ? Math.floor(this.context.infos.actionDelay/4) : 0;
if(this.context.display && (this.stepMode || markDelay > 30)) {
var curSusp = this._debugger.suspension_stack[this._debugger.suspension_stack.length-1];
if(curSusp && curSusp.$lineno) {
this.removeEditorMarker();
var splitCode = this._code.split(/[\r\n]/);
var Range = ace.require('ace/range').Range;
this._editorMarker = editor.session.addMarker(
new Range(curSusp.$lineno-1, curSusp.$colno, curSusp.$lineno, 0),
"aceHighlight",
"line");
}
} else {
this.removeEditorMarker();
}
var stepDelay = 0;
if(!this.stepMode && this.context.allowInfiniteLoop) {
// Add a delay in infinite loops to avoid using all CPU
if(this._allowStepsWithoutDelay > 0) {
// We just had a waitDelay, don't delay further
this._allowStepsWithoutDelay -= 1;
} else {
stepDelay = 10;
}
}
var realStepDelay = markDelay + stepDelay;
if(realStepDelay > 0) {
this._paused = true;
var self = this;
setTimeout(function() {
self.realStep(resolve, reject);
}, realStepDelay);
} else {
this.realStep(resolve, reject);
}
};
this.realStep = function (resolve, reject) {
this._paused = this.stepMode;
this._debugger.enable_step_mode();
this._debugger.resume.call(this._debugger, resolve, reject);
this._steps += 1;
if(this._lastNbActions != this._nbActions) {
this._lastNbActions = this._nbActions;
this._stepsWithoutAction = 0;
} else {
this._stepsWithoutAction += 1;
}
};
this._onStepSuccess = function (callback) {
// If there are still timeouts, there's still a step in progress
this._stepInProgress = !!this._timeouts.length;
this._continue();
if (typeof callback === 'function') {
callback();
}
};
this._onStepError = function (message, callback) {
context.onExecutionEnd && context.onExecutionEnd();
// We always get there, even on a success
this.stop();
message = '' + message;
// Skulpt doesn't support well NoneTypes
if(message.indexOf("TypeError: Cannot read property") > -1 && message.indexOf("undefined") > -1) {
message = message.replace(/^.* line/, "TypeError: NoneType value used in operation on line");
}
if(message.indexOf('undefined') > -1) {
message += '. ' + window.languageStrings.undefinedMsg;
}
// Transform message depending on whether we successfully
if(this.context.success) {
message = "<span style='color:green;font-weight:bold'>" + message + "</span>";
} else {
message = this.context.messagePrefixFailure + message;
}
this.messageCallback(message);
if (typeof callback === 'function') {
callback();
}
};
this._setBreakpoint = function (bp, isTemporary) {
this._debugger.add_breakpoint(this._editor_filename + ".py", bp, "0", isTemporary);
};
this._asyncCallback = function (editor_filename, code) {
var dumpJS = false;
return function() {
return Sk.importMainWithBody(editor_filename, dumpJS, code, true);
};
};
this.signalAction = function () {
// Allows a context to signal an "action" happened
this._stepsWithoutAction = 0;
};
}
function initBlocklyRunner(context, msgCallback) {
return new PythonInterpreter(context, msgCallback);
};
|
var highlighted = $('.aceHighlight');
|
random_line_split
|
python_runner_1.5.js
|
/*
python_runner:
Python code runner.
*/
var currentPythonContext = null;
function PythonInterpreter(context, msgCallback) {
this.context = context;
this.messageCallback = msgCallback;
this._code = '';
this._editor_filename = "<stdin>";
this.context.runner = this;
this._maxIterations = 4000;
this._maxIterWithoutAction = 50;
this._resetCallstackOnNextStep = false;
this._paused = false;
this._isRunning = false;
this._stepInProgress = false;
this._resetDone = true;
this.stepMode = false;
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = null;
this._hasActions = false;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._timeouts = [];
this._editorMarker = null;
this.availableModules = [];
this._argumentsByBlock = {};
this._definedFunctions = [];
this.nbNodes = 0;
this.curNode = 0;
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
this.waitingOnReadyNode = false;
var that = this;
this._skulptifyHandler = function (name, generatorName, blockName, nbArgs, type) {
if(!arrayContains(this._definedFunctions, name)) { this._definedFunctions.push(name); }
var handler = '';
handler += "\tcurrentPythonContext.runner.checkArgs('" + name + "', '" + generatorName + "', '" + blockName + "', arguments);";
handler += "\n\tvar susp = new Sk.misceval.Suspension();";
handler += "\n\tvar result = Sk.builtin.none.none$;";
// If there are arguments, convert them from Skulpt format to the libs format
handler += "\n\tvar args = Array.prototype.slice.call(arguments);";
handler += "\n\tfor(var i=0; i<args.length; i++) { args[i] = currentPythonContext.runner.skToJs(args[i]); };";
handler += "\n\tsusp.resume = function() { return result; };";
handler += "\n\tsusp.data = {type: 'Sk.promise', promise: new Promise(function(resolve) {";
handler += "\n\targs.push(resolve);";
// Count actions
if(type == 'actions') {
handler += "\n\tcurrentPythonContext.runner._nbActions += 1;";
}
handler += "\n\ttry {";
handler += '\n\t\tcurrentPythonContext["' + generatorName + '"]["' + blockName + '"].apply(currentPythonContext, args);';
handler += "\n\t} catch (e) {";
handler += "\n\t\tcurrentPythonContext.runner._onStepError(e)}";
handler += '\n\t}).then(function (value) {\nresult = value;\nreturn value;\n })};';
handler += '\n\treturn susp;';
return '\nmod.' + name + ' = new Sk.builtin.func(function () {\n' + handler + '\n});\n';
};
this._skulptifyValue = function(value) {
if(typeof value === "number") {
var handler = 'Sk.builtin.int_(' + value + ')';
} else if(typeof value === "boolean") {
var handler = 'Sk.builtin.bool(' + value.toString() + ')';
} else if(typeof value === "string") {
var handler = 'Sk.builtin.str(' + JSON.stringify(value) + ')';
} else if(Array.isArray(value)) {
var list = [];
for(var i=0; i<value.length; i++) {
list.push(this._skulptifyValue(value[i]));
}
var handler = 'Sk.builtin.list([' + list.join(',') + '])';
} else {
throw "Unable to translate value '" + value + "' into a Skulpt constant.";
}
return 'new ' + handler;
}
this._skulptifyConst = function(name, value) {
var handler = this._skulptifyValue(value);
return '\nmod.' + name + ' = ' + handler + ';\n';
};
this._injectFunctions = function () {
// Generate Python custom libraries from all generated blocks
this._definedFunctions = [];
if(this.context.infos && this.context.infos.includeBlocks && this.context.infos.includeBlocks.generatedBlocks) {
// Flatten customBlocks information for easy access
var blocksInfos = {};
for (var generatorName in this.context.customBlocks) {
for (var typeName in this.context.customBlocks[generatorName]) {
var blockList = this.context.customBlocks[generatorName][typeName];
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockInfo = blockList[iBlock];
blocksInfos[blockInfo.name] = {
nbArgs: 0, // handled below
type: typeName};
blocksInfos[blockInfo.name].nbsArgs = [];
if(blockInfo.anyArgs) {
// Allows to specify the function can accept any number of arguments
blocksInfos[blockInfo.name].nbsArgs.push(Infinity);
}
var variants = blockInfo.variants ? blockInfo.variants : (blockInfo.params ? [blockInfo.params] : []);
if(variants.length) {
for(var i=0; i < variants.length; i++) {
blocksInfos[blockInfo.name].nbsArgs.push(variants[i].length);
}
}
}
}
}
// Generate functions used in the task
for (var generatorName in this.context.infos.includeBlocks.generatedBlocks) {
var blockList = this.context.infos.includeBlocks.generatedBlocks[generatorName];
if(!blockList.length) { continue; }
var modContents = "var $builtinmodule = function (name) {\n\nvar mod = {};\nmod.__package__ = Sk.builtin.none.none$;\n";
if(!this._argumentsByBlock[generatorName]) {
this._argumentsByBlock[generatorName] = {};
}
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockName = blockList[iBlock];
var code = this.context.strings.code[blockName];
if (typeof(code) == "undefined") {
code = blockName;
}
var nbsArgs = blocksInfos[blockName] ? (blocksInfos[blockName].nbsArgs ? blocksInfos[blockName].nbsArgs : []) : [];
var type = blocksInfos[blockName] ? blocksInfos[blockName].type : 'actions';
if(type == 'actions') {
this._hasActions = true;
}
this._argumentsByBlock[generatorName][blockName] = nbsArgs;
modContents += this._skulptifyHandler(code, generatorName, blockName, nbsArgs, type);
}
// TODO :: allow selection of constants available in a task
// if(this.context.infos.includeBlocks.constants && this.context.infos.includeBlocks.constants[generatorName]) {
if(this.context.customConstants && this.context.customConstants[generatorName]) {
var constList = this.context.customConstants[generatorName];
for(var iConst=0; iConst < constList.length; iConst++) {
var name = constList[iConst].name;
if(this.context.strings.constant && this.context.strings.constant[name]) {
name = this.context.strings.constant[name];
}
modContents += this._skulptifyConst(name, constList[iConst].value)
}
}
modContents += "\nreturn mod;\n};";
Sk.builtinFiles["files"]["src/lib/"+generatorName+".js"] = modContents;
this.availableModules.push(generatorName);
}
}
};
this.checkArgs = function (name, generatorName, blockName, args) {
// Check the number of arguments corresponds to a variant of the function
if(!this._argumentsByBlock[generatorName] || !this._argumentsByBlock[generatorName][blockName]) {
console.error("Couldn't find the number of arguments for " + generatorName + "/" + blockName + ".");
return;
}
var nbsArgs = this._argumentsByBlock[generatorName][blockName];
if(nbsArgs.length == 0) {
// This function doesn't have arguments
if(args.length > 0) {
msg = name + "() takes no arguments (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
} else if(nbsArgs.indexOf(args.length) == -1 && nbsArgs.indexOf(Infinity) == -1) {
var minArgs = nbsArgs[0];
var maxArgs = nbsArgs[0];
for(var i=1; i < nbsArgs.length; i++) {
minArgs = Math.min(minArgs, nbsArgs[i]);
maxArgs = Math.max(maxArgs, nbsArgs[i]);
}
if (minArgs === maxArgs) {
msg = name + "() takes exactly " + minArgs + " arguments";
} else if (args.length < minArgs) {
msg = name + "() takes at least " + minArgs + " arguments";
} else if (args.length > maxArgs){
msg = name + "() takes at most " + maxArgs + " arguments";
} else {
msg = name + "() doesn't have a variant accepting this number of arguments";
}
msg += " (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
};
this._definePythonNumber = function() {
// Create a class which behaves as a Number, but can have extra properties
this.pythonNumber = function(val) {
this.val = new Number(val);
}
this.pythonNumber.prototype = Object.create(Number.prototype);
function makePrototype(func) {
return function() { return Number.prototype[func].call(this.val); }
}
var funcs = ['toExponential', 'toFixed', 'toLocaleString', 'toPrecision', 'toSource', 'toString', 'valueOf'];
for(var i = 0; i < funcs.length ; i++) {
this.pythonNumber.prototype[funcs[i]] = makePrototype(funcs[i]);
}
}
this.skToJs = function(val) {
// Convert Skulpt item to JavaScript
// TODO :: Might be partly replaceable with Sk.ffi.remapToJs
if(val instanceof Sk.builtin.bool) {
return val.v ? true : false;
} else if(val instanceof Sk.builtin.func) {
return function() {
var args = [];
for(var i = 0; i < arguments.length; i++) {
args.push(that._createPrimitive(arguments[i]));
}
var retp = new Promise(function(resolve, reject) {
var p = Sk.misceval.asyncToPromise(function() { return val.tp$call(args); });
p.then(function(val) { resolve(that.skToJs(val)); });
});
return retp;
}
} else if(val instanceof Sk.builtin.dict) {
var dictKeys = Object.keys(val);
var retVal = {};
for(var i = 0; i < dictKeys.length; i++) {
var key = dictKeys[i];
if(key == 'size' || key == '__class__') { continue; }
var subItems = val[key].items;
for(var j = 0; j < subItems.length; j++) {
var subItem = subItems[j];
retVal[subItem.lhs.v] = this.skToJs(subItem.rhs);
}
}
return retVal;
} else {
var retVal = val.v;
if(val instanceof Sk.builtin.tuple || val instanceof Sk.builtin.list) {
retVal = [];
for(var i = 0; i < val.v.length; i++) {
retVal[i] = this.skToJs(val.v[i]);
}
}
if(val instanceof Sk.builtin.tuple) {
retVal.isTuple = true;
}
if(val instanceof Sk.builtin.float_) {
retVal = new this.pythonNumber(retVal);
retVal.isFloat = true;
}
return retVal;
}
};
this.getDefinedFunctions = function() {
this._injectFunctions();
return this._definedFunctions.slice();
};
this._setTimeout = function(func, time) {
var timeoutId = null;
var that = this;
function wrapper() {
var idx = that._timeouts.indexOf(timeoutId);
if(idx > -1) { that._timeouts.splice(idx, 1); }
func();
}
timeoutId = window.setTimeout(wrapper, time);
this._timeouts.push(timeoutId);
}
this.waitDelay = function (callback, value, delay) {
this._paused = true;
if (delay > 0) {
var _noDelay = this.noDelay.bind(this, callback, value);
this._setTimeout(_noDelay, delay);
// We just waited some time, allow next steps to not be delayed
this._allowStepsWithoutDelay = Math.min(this._allowStepsWithoutDelay + Math.ceil(delay / 10), 100);
} else {
this.noDelay(callback, value);
}
};
this.waitEvent = function (callback, target, eventName, func) {
this._paused = true;
var listenerFunc = null;
var that = this;
listenerFunc = function(e) {
target.removeEventListener(eventName, listenerFunc);
that.noDelay(callback, func(e));
};
target.addEventListener(eventName, listenerFunc);
};
this.waitCallback = function (callback) {
// Returns a callback to be called once we can continue the execution
this._paused = true;
var that = this;
return function(value) {
that.noDelay(callback, value);
};
};
this.noDelay = function (callback, value) {
var primitive = this._createPrimitive(value);
if (primitive !== Sk.builtin.none.none$) {
// Apparently when we create a new primitive, the debugger adds a call to
// the callstack.
this._resetCallstackOnNextStep = true;
this.reportValue(value);
}
this._paused = false;
callback(primitive);
this._setTimeout(this._continue.bind(this), 10);
};
this.allowSwitch = function(callback) {
// Tells the runner that we can switch the execution to another node
var curNode = context.curNode;
var ready = function(readyCallback) {
that.readyNodes[curNode] = function() {
readyCallback(callback);
};
if(that.waitingOnReadyNode) {
that.waitingOnReadyNode = false;
that.startNode(that.curNode, curNode);
}
};
this.readyNodes[curNode] = false;
this.startNextNode(curNode);
return ready;
};
this.defaultSelectNextNode = function(runner, previousNode) {
var i = previousNode + 1;
if(i >= runner.nbNodes) { i = 0; }
do {
if(runner.readyNodes[i]) {
break;
} else {
i++;
}
if(i >= runner.nbNodes) { i = 0; }
} while(i != previousNode);
return i;
};
// Allow the next node selection process to be customized
this.selectNextNode = this.defaultSelectNextNode;
this.startNextNode = function(curNode) {
// Start the next node when one has been switched from
var newNode = this.selectNextNode(this, curNode);
this._paused = true;
if(newNode == curNode) {
// No ready node
this.waitingOnReadyNode = true;
} else {
// TODO :: switch execution
this.startNode(curNode, newNode);
}
};
this.startNode = function(curNode, newNode) {
setTimeout(function() {
that.nodeStates[curNode] = that._debugger.suspension_stack.slice();
that._debugger.suspension_stack = that.nodeStates[newNode];
that.curNode = newNode;
var ready = that.readyNodes[newNode];
if(ready) {
that.readyNodes[newNode] = false;
context.setCurNode(newNode);
if(typeof ready == 'function') {
ready();
} else {
that._paused = false;
that._continue();
}
} else {
that.waitingOnReadyNode = true;
}
}, 0);
};
this._createPrimitive = function (data) {
// TODO :: Might be replaceable with Sk.ffi.remapToPy
if (data === undefined || data === null) {
return Sk.builtin.none.none$; // Reuse the same object.
}
var type = typeof data;
var result = {v: data}; // Emulate a Skulpt object as default
if (type === 'number') {
if(Math.floor(data) == data) { // isInteger isn't supported by IE
result = new Sk.builtin.int_(data);
} else {
result = new Sk.builtin.float_(data);
}
} else if (type === 'string') {
result = new Sk.builtin.str(data);
} else if (type === 'boolean') {
result = new Sk.builtin.bool(data);
} else if (typeof data.length != 'undefined') {
var skl = [];
for(var i = 0; i < data.length; i++) {
skl.push(this._createPrimitive(data[i]));
}
result = new Sk.builtin.list(skl);
} else if (data) {
// Create a dict if it's an object with properties
var props = [];
for(var prop in data) {
if(data.hasOwnProperty(prop)) {
// We can pass a list [prop1name, prop1val, ...] to Skulpt's dict
// constructor ; however to work properly they need to be Skulpt
// primitives too
props.push(this._createPrimitive(prop));
props.push(this._createPrimitive(data[prop]));
}
}
if(props.length > 0) {
result = new Sk.builtin.dict(props);
}
}
return result;
};
this._onOutput = function (_output) {
that.print(_output);
};
this._onDebugOut = function (text) {
// console.log('DEBUG: ', text);
};
this._configure = function () {
Sk.configure({
output: this._onOutput,
debugout: this._onDebugOut,
read: this._builtinRead.bind(this),
yieldLimit: null,
execLimit: null,
debugging: true,
breakpoints: this._debugger.check_breakpoints.bind(this._debugger),
__future__: Sk.python3
});
Sk.pre = "edoutput";
Sk.pre = "codeoutput";
// Disable document library
delete Sk.builtinFiles["files"]["src/lib/document.js"];
this._definePythonNumber();
this.context.callCallback = this.noDelay.bind(this);
};
this.print = function (message, className) {
if (message.trim() === 'Program execution complete') {
this._onFinished();
}
if (message) {
//console.log('PRINT: ', message, className || '');
}
};
this._onFinished = function () {
this.finishedNodes[this.curNode] = true;
this.readyNodes[this.curNode] = false;
if(this.finishedNodes.indexOf(false) != -1) {
// At least one node is not finished
this.startNextNode(this.curNode);
} else {
// All nodes are finished, stop the execution
this.stop();
}
try {
this.context.infos.checkEndCondition(this.context, true);
} catch (e) {
this._onStepError(e);
}
};
this._builtinRead = function (x) {
if (Sk.builtinFiles === undefined || Sk.builtinFiles["files"][x] === undefined)
throw "File not found: '" + x + "'";
return Sk.builtinFiles["files"][x];
};
this.get_source_line = function (lineno) {
return this._code.split('\n')[lineno];
};
this._continue = function () {
if (this.context.infos.checkEndEveryTurn) {
try {
this.context.infos.checkEndCondition(context, false);
} catch(e) {
this._onStepError(e);
return;
}
}
if (!this.context.allowInfiniteLoop && this._steps >= this._maxIterations) {
this._onStepError(window.languageStrings.tooManyIterations);
} else if (!this.context.allowInfiniteLoop && this._stepsWithoutAction >= this._maxIterWithoutAction) {
this._onStepError(window.languageStrings.tooManyIterationsWithoutAction);
} else if (!this._paused && this._isRunning) {
this.step();
}
};
this.initCodes = function (codes) {
// For reportValue in Skulpt.
window.currentPythonRunner = this;
if(Sk.running) {
if(typeof Sk.runQueue === 'undefined') {
Sk.runQueue = [];
}
Sk.runQueue.push({ctrl: this, codes: codes});
return;
}
currentPythonContext = this.context;
this._debugger = new Sk.Debugger(this._editor_filename, this);
this._configure();
this._injectFunctions();
/**
* Add a last instruction at the end of the code so Skupt will generate a Suspension state
* for after the user's last instruction. Otherwise it would be impossible to retrieve the
* modifications made by the last user's line. For skulpt analysis.
*/
this._code = codes[0] + "\npass";
this._setBreakpoint(1, false);
if(typeof this.context.infos.maxIter !== 'undefined') {
this._maxIterations = Math.ceil(this.context.infos.maxIter/10);
}
if(typeof this.context.infos.maxIterWithoutAction !== 'undefined') {
this._maxIterWithoutAction = Math.ceil(this.context.infos.maxIterWithoutAction/10);
}
if(!this._hasActions) {
// No limit on
this._maxIterWithoutAction = this._maxIterations;
}
var susp_handlers = {};
susp_handlers["*"] = this._debugger.suspension_handler.bind(this);
this.nbNodes = codes.length;
this.curNode = 0;
context.setCurNode(this.curNode);
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
for(var i = 0; i < codes.length ; i++) {
this.readyNodes.push(true);
this.finishedNodes.push(false);
try {
var promise = this._debugger.asyncToPromise(this._asyncCallback(this._editor_filename, codes[i]), susp_handlers, this._debugger);
promise.then(this._debugger.success.bind(this._debugger), this._debugger.error.bind(this._debugger));
} catch (e) {
this._onOutput(e.toString() + "\n");
}
this.nodeStates.push(this._debugger.suspension_stack);
this._debugger.suspension_stack = [];
}
this._debugger.suspension_stack = this.nodeStates[0];
this._resetInterpreterState();
Sk.running = true;
this._isRunning = true;
};
this.run = function () {
if(this.stepMode) {
this._paused = this._stepInProgress;
this.stepMode = false;
}
this._setTimeout(this._continue.bind(this), 100);
};
this.runCodes = function(codes) {
this.initCodes(codes);
this.run();
};
this.runStep = function (resolve, reject) {
this.stepMode = true;
if (this._isRunning && !this._stepInProgress) {
this.step(resolve, reject);
}
};
this.nbRunning = function () {
return this._isRunning ? 1 : 0;
};
this.removeEditorMarker = function () {
var editor = this.context.blocklyHelper._aceEditor;
if(editor && this._editorMarker)
|
};
this.unSkulptValue = function (origValue) {
// Transform a value, possibly a Skulpt one, into a printable value
if(typeof origValue !== 'object' || origValue === null) {
var value = origValue;
} else if(origValue.constructor === Sk.builtin.dict) {
var keys = Object.keys(origValue);
var dictElems = [];
for(var i=0; i<keys.length; i++) {
if(keys[i] == 'size' || keys[i] == '__class__'
|| !origValue[keys[i]].items
|| !origValue[keys[i]].items[0]) {
continue;
}
var items = origValue[keys[i]].items[0];
dictElems.push('' + this.unSkulptValue(items.lhs) + ': ' + this.unSkulptValue(items.rhs));
}
var value = '{' + dictElems.join(',' ) + '}';
} else if(origValue.constructor === Sk.builtin.list) {
var oldArray = origValue.v;
var newArray = [];
for(var i=0; i<oldArray.length; i++) {
newArray.push(this.unSkulptValue(oldArray[i]));
}
var value = '[' + newArray.join(', ') + ']';
} else if(origValue.v !== undefined) {
var value = origValue.v;
if(typeof value == 'string') {
value = '"' + value + '"';
}
} else if(typeof origValue == 'object') {
var value = origValue;
}
return value;
};
this.reportValue = function (origValue, varName) {
// Show a popup displaying the value of a block in step-by-step mode
if(origValue === undefined
|| (origValue && origValue.constructor === Sk.builtin.func)
|| !this._editorMarker
|| !context.display
|| !this.stepMode) {
return origValue;
}
var value = this.unSkulptValue(origValue);
var highlighted = $('.aceHighlight');
if(highlighted.length == 0) {
return origValue;
} else if(highlighted.find('.ace_start').length > 0) {
var target = highlighted.find('.ace_start')[0];
} else {
var target = highlighted[0];
}
var bbox = target.getBoundingClientRect();
var leftPos = bbox.left+10;
var topPos = bbox.top-14;
if(typeof value == 'boolean') {
var displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
} else if(value === null) {
var displayStr = "None"
} else {
var displayStr = value.toString();
}
if(typeof value == 'boolean') {
displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
}
if(varName) {
displayStr = '' + varName + ' = ' + displayStr;
}
var dropDownDiv = '' +
'<div class="blocklyDropDownDiv" style="transition: transform 0.25s, opacity 0.25s; background-color: rgb(255, 255, 255); border-color: rgb(170, 170, 170); left: '+leftPos+'px; top: '+topPos+'px; display: block; opacity: 1; transform: translate(0px, -20px);">' +
' <div class="blocklyDropDownContent">' +
' <div class="valueReportBox">' +
displayStr +
' </div>' +
' </div>' +
' <div class="blocklyDropDownArrow arrowBottom" style="transform: translate(22px, 15px) rotate(45deg);"></div>' +
'</div>';
$('.blocklyDropDownDiv').remove();
$('body').append(dropDownDiv);
return origValue;
};
this.stop = function () {
for (var i = 0; i < this._timeouts.length; i += 1) {
window.clearTimeout(this._timeouts[i]);
}
this._timeouts = [];
this.removeEditorMarker();
if(Sk.runQueue) {
for (var i=0; i<Sk.runQueue.length; i++) {
if(Sk.runQueue[i].ctrl === this) {
Sk.runQueue.splice(i, 1);
i--;
}
}
}
if(window.quickAlgoInterface) {
window.quickAlgoInterface.setPlayPause(false);
}
this._resetInterpreterState();
};
this.isRunning = function () {
return this._isRunning;
};
this._resetInterpreterState = function () {
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = 0;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._isRunning = false;
this._resetDone = false;
this.stepMode = false;
this._stepInProgress = false;
this._resetCallstackOnNextStep = false;
this._paused = false;
this.waitingOnReadyNode = false;
Sk.running = false;
if(Sk.runQueue && Sk.runQueue.length > 0) {
var nextExec = Sk.runQueue.shift();
setTimeout(function () { nextExec.ctrl.runCodes(nextExec.codes); }, 100);
}
};
this._resetCallstack = function () {
if (this._resetCallstackOnNextStep) {
this._resetCallstackOnNextStep = false;
this._debugger.suspension_stack.pop();
}
};
this.reset = function() {
if(this._resetDone) { return; }
if(this.isRunning()) {
this.stop();
}
this.context.reset();
this._resetDone = true;
};
this.step = function (resolve, reject) {
this._resetCallstack();
this._stepInProgress = true;
var editor = this.context.blocklyHelper._aceEditor;
var markDelay = this.context.infos ? Math.floor(this.context.infos.actionDelay/4) : 0;
if(this.context.display && (this.stepMode || markDelay > 30)) {
var curSusp = this._debugger.suspension_stack[this._debugger.suspension_stack.length-1];
if(curSusp && curSusp.$lineno) {
this.removeEditorMarker();
var splitCode = this._code.split(/[\r\n]/);
var Range = ace.require('ace/range').Range;
this._editorMarker = editor.session.addMarker(
new Range(curSusp.$lineno-1, curSusp.$colno, curSusp.$lineno, 0),
"aceHighlight",
"line");
}
} else {
this.removeEditorMarker();
}
var stepDelay = 0;
if(!this.stepMode && this.context.allowInfiniteLoop) {
// Add a delay in infinite loops to avoid using all CPU
if(this._allowStepsWithoutDelay > 0) {
// We just had a waitDelay, don't delay further
this._allowStepsWithoutDelay -= 1;
} else {
stepDelay = 10;
}
}
var realStepDelay = markDelay + stepDelay;
if(realStepDelay > 0) {
this._paused = true;
var self = this;
setTimeout(function() {
self.realStep(resolve, reject);
}, realStepDelay);
} else {
this.realStep(resolve, reject);
}
};
this.realStep = function (resolve, reject) {
this._paused = this.stepMode;
this._debugger.enable_step_mode();
this._debugger.resume.call(this._debugger, resolve, reject);
this._steps += 1;
if(this._lastNbActions != this._nbActions) {
this._lastNbActions = this._nbActions;
this._stepsWithoutAction = 0;
} else {
this._stepsWithoutAction += 1;
}
};
this._onStepSuccess = function (callback) {
// If there are still timeouts, there's still a step in progress
this._stepInProgress = !!this._timeouts.length;
this._continue();
if (typeof callback === 'function') {
callback();
}
};
this._onStepError = function (message, callback) {
context.onExecutionEnd && context.onExecutionEnd();
// We always get there, even on a success
this.stop();
message = '' + message;
// Skulpt doesn't support well NoneTypes
if(message.indexOf("TypeError: Cannot read property") > -1 && message.indexOf("undefined") > -1) {
message = message.replace(/^.* line/, "TypeError: NoneType value used in operation on line");
}
if(message.indexOf('undefined') > -1) {
message += '. ' + window.languageStrings.undefinedMsg;
}
// Transform message depending on whether we successfully
if(this.context.success) {
message = "<span style='color:green;font-weight:bold'>" + message + "</span>";
} else {
message = this.context.messagePrefixFailure + message;
}
this.messageCallback(message);
if (typeof callback === 'function') {
callback();
}
};
this._setBreakpoint = function (bp, isTemporary) {
this._debugger.add_breakpoint(this._editor_filename + ".py", bp, "0", isTemporary);
};
this._asyncCallback = function (editor_filename, code) {
var dumpJS = false;
return function() {
return Sk.importMainWithBody(editor_filename, dumpJS, code, true);
};
};
this.signalAction = function () {
// Allows a context to signal an "action" happened
this._stepsWithoutAction = 0;
};
}
function initBlocklyRunner(context, msgCallback) {
return new PythonInterpreter(context, msgCallback);
};
|
{
editor.session.removeMarker(this._editorMarker);
this._editorMarker = null;
}
|
conditional_block
|
python_runner_1.5.js
|
/*
python_runner:
Python code runner.
*/
var currentPythonContext = null;
function PythonInterpreter(context, msgCallback) {
this.context = context;
this.messageCallback = msgCallback;
this._code = '';
this._editor_filename = "<stdin>";
this.context.runner = this;
this._maxIterations = 4000;
this._maxIterWithoutAction = 50;
this._resetCallstackOnNextStep = false;
this._paused = false;
this._isRunning = false;
this._stepInProgress = false;
this._resetDone = true;
this.stepMode = false;
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = null;
this._hasActions = false;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._timeouts = [];
this._editorMarker = null;
this.availableModules = [];
this._argumentsByBlock = {};
this._definedFunctions = [];
this.nbNodes = 0;
this.curNode = 0;
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
this.waitingOnReadyNode = false;
var that = this;
this._skulptifyHandler = function (name, generatorName, blockName, nbArgs, type) {
if(!arrayContains(this._definedFunctions, name)) { this._definedFunctions.push(name); }
var handler = '';
handler += "\tcurrentPythonContext.runner.checkArgs('" + name + "', '" + generatorName + "', '" + blockName + "', arguments);";
handler += "\n\tvar susp = new Sk.misceval.Suspension();";
handler += "\n\tvar result = Sk.builtin.none.none$;";
// If there are arguments, convert them from Skulpt format to the libs format
handler += "\n\tvar args = Array.prototype.slice.call(arguments);";
handler += "\n\tfor(var i=0; i<args.length; i++) { args[i] = currentPythonContext.runner.skToJs(args[i]); };";
handler += "\n\tsusp.resume = function() { return result; };";
handler += "\n\tsusp.data = {type: 'Sk.promise', promise: new Promise(function(resolve) {";
handler += "\n\targs.push(resolve);";
// Count actions
if(type == 'actions') {
handler += "\n\tcurrentPythonContext.runner._nbActions += 1;";
}
handler += "\n\ttry {";
handler += '\n\t\tcurrentPythonContext["' + generatorName + '"]["' + blockName + '"].apply(currentPythonContext, args);';
handler += "\n\t} catch (e) {";
handler += "\n\t\tcurrentPythonContext.runner._onStepError(e)}";
handler += '\n\t}).then(function (value) {\nresult = value;\nreturn value;\n })};';
handler += '\n\treturn susp;';
return '\nmod.' + name + ' = new Sk.builtin.func(function () {\n' + handler + '\n});\n';
};
this._skulptifyValue = function(value) {
if(typeof value === "number") {
var handler = 'Sk.builtin.int_(' + value + ')';
} else if(typeof value === "boolean") {
var handler = 'Sk.builtin.bool(' + value.toString() + ')';
} else if(typeof value === "string") {
var handler = 'Sk.builtin.str(' + JSON.stringify(value) + ')';
} else if(Array.isArray(value)) {
var list = [];
for(var i=0; i<value.length; i++) {
list.push(this._skulptifyValue(value[i]));
}
var handler = 'Sk.builtin.list([' + list.join(',') + '])';
} else {
throw "Unable to translate value '" + value + "' into a Skulpt constant.";
}
return 'new ' + handler;
}
this._skulptifyConst = function(name, value) {
var handler = this._skulptifyValue(value);
return '\nmod.' + name + ' = ' + handler + ';\n';
};
this._injectFunctions = function () {
// Generate Python custom libraries from all generated blocks
this._definedFunctions = [];
if(this.context.infos && this.context.infos.includeBlocks && this.context.infos.includeBlocks.generatedBlocks) {
// Flatten customBlocks information for easy access
var blocksInfos = {};
for (var generatorName in this.context.customBlocks) {
for (var typeName in this.context.customBlocks[generatorName]) {
var blockList = this.context.customBlocks[generatorName][typeName];
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockInfo = blockList[iBlock];
blocksInfos[blockInfo.name] = {
nbArgs: 0, // handled below
type: typeName};
blocksInfos[blockInfo.name].nbsArgs = [];
if(blockInfo.anyArgs) {
// Allows to specify the function can accept any number of arguments
blocksInfos[blockInfo.name].nbsArgs.push(Infinity);
}
var variants = blockInfo.variants ? blockInfo.variants : (blockInfo.params ? [blockInfo.params] : []);
if(variants.length) {
for(var i=0; i < variants.length; i++) {
blocksInfos[blockInfo.name].nbsArgs.push(variants[i].length);
}
}
}
}
}
// Generate functions used in the task
for (var generatorName in this.context.infos.includeBlocks.generatedBlocks) {
var blockList = this.context.infos.includeBlocks.generatedBlocks[generatorName];
if(!blockList.length) { continue; }
var modContents = "var $builtinmodule = function (name) {\n\nvar mod = {};\nmod.__package__ = Sk.builtin.none.none$;\n";
if(!this._argumentsByBlock[generatorName]) {
this._argumentsByBlock[generatorName] = {};
}
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockName = blockList[iBlock];
var code = this.context.strings.code[blockName];
if (typeof(code) == "undefined") {
code = blockName;
}
var nbsArgs = blocksInfos[blockName] ? (blocksInfos[blockName].nbsArgs ? blocksInfos[blockName].nbsArgs : []) : [];
var type = blocksInfos[blockName] ? blocksInfos[blockName].type : 'actions';
if(type == 'actions') {
this._hasActions = true;
}
this._argumentsByBlock[generatorName][blockName] = nbsArgs;
modContents += this._skulptifyHandler(code, generatorName, blockName, nbsArgs, type);
}
// TODO :: allow selection of constants available in a task
// if(this.context.infos.includeBlocks.constants && this.context.infos.includeBlocks.constants[generatorName]) {
if(this.context.customConstants && this.context.customConstants[generatorName]) {
var constList = this.context.customConstants[generatorName];
for(var iConst=0; iConst < constList.length; iConst++) {
var name = constList[iConst].name;
if(this.context.strings.constant && this.context.strings.constant[name]) {
name = this.context.strings.constant[name];
}
modContents += this._skulptifyConst(name, constList[iConst].value)
}
}
modContents += "\nreturn mod;\n};";
Sk.builtinFiles["files"]["src/lib/"+generatorName+".js"] = modContents;
this.availableModules.push(generatorName);
}
}
};
this.checkArgs = function (name, generatorName, blockName, args) {
// Check the number of arguments corresponds to a variant of the function
if(!this._argumentsByBlock[generatorName] || !this._argumentsByBlock[generatorName][blockName]) {
console.error("Couldn't find the number of arguments for " + generatorName + "/" + blockName + ".");
return;
}
var nbsArgs = this._argumentsByBlock[generatorName][blockName];
if(nbsArgs.length == 0) {
// This function doesn't have arguments
if(args.length > 0) {
msg = name + "() takes no arguments (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
} else if(nbsArgs.indexOf(args.length) == -1 && nbsArgs.indexOf(Infinity) == -1) {
var minArgs = nbsArgs[0];
var maxArgs = nbsArgs[0];
for(var i=1; i < nbsArgs.length; i++) {
minArgs = Math.min(minArgs, nbsArgs[i]);
maxArgs = Math.max(maxArgs, nbsArgs[i]);
}
if (minArgs === maxArgs) {
msg = name + "() takes exactly " + minArgs + " arguments";
} else if (args.length < minArgs) {
msg = name + "() takes at least " + minArgs + " arguments";
} else if (args.length > maxArgs){
msg = name + "() takes at most " + maxArgs + " arguments";
} else {
msg = name + "() doesn't have a variant accepting this number of arguments";
}
msg += " (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
};
this._definePythonNumber = function() {
// Create a class which behaves as a Number, but can have extra properties
this.pythonNumber = function(val) {
this.val = new Number(val);
}
this.pythonNumber.prototype = Object.create(Number.prototype);
function makePrototype(func) {
return function() { return Number.prototype[func].call(this.val); }
}
var funcs = ['toExponential', 'toFixed', 'toLocaleString', 'toPrecision', 'toSource', 'toString', 'valueOf'];
for(var i = 0; i < funcs.length ; i++) {
this.pythonNumber.prototype[funcs[i]] = makePrototype(funcs[i]);
}
}
this.skToJs = function(val) {
// Convert Skulpt item to JavaScript
// TODO :: Might be partly replaceable with Sk.ffi.remapToJs
if(val instanceof Sk.builtin.bool) {
return val.v ? true : false;
} else if(val instanceof Sk.builtin.func) {
return function() {
var args = [];
for(var i = 0; i < arguments.length; i++) {
args.push(that._createPrimitive(arguments[i]));
}
var retp = new Promise(function(resolve, reject) {
var p = Sk.misceval.asyncToPromise(function() { return val.tp$call(args); });
p.then(function(val) { resolve(that.skToJs(val)); });
});
return retp;
}
} else if(val instanceof Sk.builtin.dict) {
var dictKeys = Object.keys(val);
var retVal = {};
for(var i = 0; i < dictKeys.length; i++) {
var key = dictKeys[i];
if(key == 'size' || key == '__class__') { continue; }
var subItems = val[key].items;
for(var j = 0; j < subItems.length; j++) {
var subItem = subItems[j];
retVal[subItem.lhs.v] = this.skToJs(subItem.rhs);
}
}
return retVal;
} else {
var retVal = val.v;
if(val instanceof Sk.builtin.tuple || val instanceof Sk.builtin.list) {
retVal = [];
for(var i = 0; i < val.v.length; i++) {
retVal[i] = this.skToJs(val.v[i]);
}
}
if(val instanceof Sk.builtin.tuple) {
retVal.isTuple = true;
}
if(val instanceof Sk.builtin.float_) {
retVal = new this.pythonNumber(retVal);
retVal.isFloat = true;
}
return retVal;
}
};
this.getDefinedFunctions = function() {
this._injectFunctions();
return this._definedFunctions.slice();
};
this._setTimeout = function(func, time) {
var timeoutId = null;
var that = this;
function wrapper() {
var idx = that._timeouts.indexOf(timeoutId);
if(idx > -1) { that._timeouts.splice(idx, 1); }
func();
}
timeoutId = window.setTimeout(wrapper, time);
this._timeouts.push(timeoutId);
}
this.waitDelay = function (callback, value, delay) {
this._paused = true;
if (delay > 0) {
var _noDelay = this.noDelay.bind(this, callback, value);
this._setTimeout(_noDelay, delay);
// We just waited some time, allow next steps to not be delayed
this._allowStepsWithoutDelay = Math.min(this._allowStepsWithoutDelay + Math.ceil(delay / 10), 100);
} else {
this.noDelay(callback, value);
}
};
this.waitEvent = function (callback, target, eventName, func) {
this._paused = true;
var listenerFunc = null;
var that = this;
listenerFunc = function(e) {
target.removeEventListener(eventName, listenerFunc);
that.noDelay(callback, func(e));
};
target.addEventListener(eventName, listenerFunc);
};
this.waitCallback = function (callback) {
// Returns a callback to be called once we can continue the execution
this._paused = true;
var that = this;
return function(value) {
that.noDelay(callback, value);
};
};
this.noDelay = function (callback, value) {
var primitive = this._createPrimitive(value);
if (primitive !== Sk.builtin.none.none$) {
// Apparently when we create a new primitive, the debugger adds a call to
// the callstack.
this._resetCallstackOnNextStep = true;
this.reportValue(value);
}
this._paused = false;
callback(primitive);
this._setTimeout(this._continue.bind(this), 10);
};
this.allowSwitch = function(callback) {
// Tells the runner that we can switch the execution to another node
var curNode = context.curNode;
var ready = function(readyCallback) {
that.readyNodes[curNode] = function() {
readyCallback(callback);
};
if(that.waitingOnReadyNode) {
that.waitingOnReadyNode = false;
that.startNode(that.curNode, curNode);
}
};
this.readyNodes[curNode] = false;
this.startNextNode(curNode);
return ready;
};
this.defaultSelectNextNode = function(runner, previousNode) {
var i = previousNode + 1;
if(i >= runner.nbNodes) { i = 0; }
do {
if(runner.readyNodes[i]) {
break;
} else {
i++;
}
if(i >= runner.nbNodes) { i = 0; }
} while(i != previousNode);
return i;
};
// Allow the next node selection process to be customized
this.selectNextNode = this.defaultSelectNextNode;
this.startNextNode = function(curNode) {
// Start the next node when one has been switched from
var newNode = this.selectNextNode(this, curNode);
this._paused = true;
if(newNode == curNode) {
// No ready node
this.waitingOnReadyNode = true;
} else {
// TODO :: switch execution
this.startNode(curNode, newNode);
}
};
this.startNode = function(curNode, newNode) {
setTimeout(function() {
that.nodeStates[curNode] = that._debugger.suspension_stack.slice();
that._debugger.suspension_stack = that.nodeStates[newNode];
that.curNode = newNode;
var ready = that.readyNodes[newNode];
if(ready) {
that.readyNodes[newNode] = false;
context.setCurNode(newNode);
if(typeof ready == 'function') {
ready();
} else {
that._paused = false;
that._continue();
}
} else {
that.waitingOnReadyNode = true;
}
}, 0);
};
this._createPrimitive = function (data) {
// TODO :: Might be replaceable with Sk.ffi.remapToPy
if (data === undefined || data === null) {
return Sk.builtin.none.none$; // Reuse the same object.
}
var type = typeof data;
var result = {v: data}; // Emulate a Skulpt object as default
if (type === 'number') {
if(Math.floor(data) == data) { // isInteger isn't supported by IE
result = new Sk.builtin.int_(data);
} else {
result = new Sk.builtin.float_(data);
}
} else if (type === 'string') {
result = new Sk.builtin.str(data);
} else if (type === 'boolean') {
result = new Sk.builtin.bool(data);
} else if (typeof data.length != 'undefined') {
var skl = [];
for(var i = 0; i < data.length; i++) {
skl.push(this._createPrimitive(data[i]));
}
result = new Sk.builtin.list(skl);
} else if (data) {
// Create a dict if it's an object with properties
var props = [];
for(var prop in data) {
if(data.hasOwnProperty(prop)) {
// We can pass a list [prop1name, prop1val, ...] to Skulpt's dict
// constructor ; however to work properly they need to be Skulpt
// primitives too
props.push(this._createPrimitive(prop));
props.push(this._createPrimitive(data[prop]));
}
}
if(props.length > 0) {
result = new Sk.builtin.dict(props);
}
}
return result;
};
this._onOutput = function (_output) {
that.print(_output);
};
this._onDebugOut = function (text) {
// console.log('DEBUG: ', text);
};
this._configure = function () {
Sk.configure({
output: this._onOutput,
debugout: this._onDebugOut,
read: this._builtinRead.bind(this),
yieldLimit: null,
execLimit: null,
debugging: true,
breakpoints: this._debugger.check_breakpoints.bind(this._debugger),
__future__: Sk.python3
});
Sk.pre = "edoutput";
Sk.pre = "codeoutput";
// Disable document library
delete Sk.builtinFiles["files"]["src/lib/document.js"];
this._definePythonNumber();
this.context.callCallback = this.noDelay.bind(this);
};
this.print = function (message, className) {
if (message.trim() === 'Program execution complete') {
this._onFinished();
}
if (message) {
//console.log('PRINT: ', message, className || '');
}
};
this._onFinished = function () {
this.finishedNodes[this.curNode] = true;
this.readyNodes[this.curNode] = false;
if(this.finishedNodes.indexOf(false) != -1) {
// At least one node is not finished
this.startNextNode(this.curNode);
} else {
// All nodes are finished, stop the execution
this.stop();
}
try {
this.context.infos.checkEndCondition(this.context, true);
} catch (e) {
this._onStepError(e);
}
};
this._builtinRead = function (x) {
if (Sk.builtinFiles === undefined || Sk.builtinFiles["files"][x] === undefined)
throw "File not found: '" + x + "'";
return Sk.builtinFiles["files"][x];
};
this.get_source_line = function (lineno) {
return this._code.split('\n')[lineno];
};
this._continue = function () {
if (this.context.infos.checkEndEveryTurn) {
try {
this.context.infos.checkEndCondition(context, false);
} catch(e) {
this._onStepError(e);
return;
}
}
if (!this.context.allowInfiniteLoop && this._steps >= this._maxIterations) {
this._onStepError(window.languageStrings.tooManyIterations);
} else if (!this.context.allowInfiniteLoop && this._stepsWithoutAction >= this._maxIterWithoutAction) {
this._onStepError(window.languageStrings.tooManyIterationsWithoutAction);
} else if (!this._paused && this._isRunning) {
this.step();
}
};
this.initCodes = function (codes) {
// For reportValue in Skulpt.
window.currentPythonRunner = this;
if(Sk.running) {
if(typeof Sk.runQueue === 'undefined') {
Sk.runQueue = [];
}
Sk.runQueue.push({ctrl: this, codes: codes});
return;
}
currentPythonContext = this.context;
this._debugger = new Sk.Debugger(this._editor_filename, this);
this._configure();
this._injectFunctions();
/**
* Add a last instruction at the end of the code so Skupt will generate a Suspension state
* for after the user's last instruction. Otherwise it would be impossible to retrieve the
* modifications made by the last user's line. For skulpt analysis.
*/
this._code = codes[0] + "\npass";
this._setBreakpoint(1, false);
if(typeof this.context.infos.maxIter !== 'undefined') {
this._maxIterations = Math.ceil(this.context.infos.maxIter/10);
}
if(typeof this.context.infos.maxIterWithoutAction !== 'undefined') {
this._maxIterWithoutAction = Math.ceil(this.context.infos.maxIterWithoutAction/10);
}
if(!this._hasActions) {
// No limit on
this._maxIterWithoutAction = this._maxIterations;
}
var susp_handlers = {};
susp_handlers["*"] = this._debugger.suspension_handler.bind(this);
this.nbNodes = codes.length;
this.curNode = 0;
context.setCurNode(this.curNode);
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
for(var i = 0; i < codes.length ; i++) {
this.readyNodes.push(true);
this.finishedNodes.push(false);
try {
var promise = this._debugger.asyncToPromise(this._asyncCallback(this._editor_filename, codes[i]), susp_handlers, this._debugger);
promise.then(this._debugger.success.bind(this._debugger), this._debugger.error.bind(this._debugger));
} catch (e) {
this._onOutput(e.toString() + "\n");
}
this.nodeStates.push(this._debugger.suspension_stack);
this._debugger.suspension_stack = [];
}
this._debugger.suspension_stack = this.nodeStates[0];
this._resetInterpreterState();
Sk.running = true;
this._isRunning = true;
};
this.run = function () {
if(this.stepMode) {
this._paused = this._stepInProgress;
this.stepMode = false;
}
this._setTimeout(this._continue.bind(this), 100);
};
this.runCodes = function(codes) {
this.initCodes(codes);
this.run();
};
this.runStep = function (resolve, reject) {
this.stepMode = true;
if (this._isRunning && !this._stepInProgress) {
this.step(resolve, reject);
}
};
this.nbRunning = function () {
return this._isRunning ? 1 : 0;
};
this.removeEditorMarker = function () {
var editor = this.context.blocklyHelper._aceEditor;
if(editor && this._editorMarker) {
editor.session.removeMarker(this._editorMarker);
this._editorMarker = null;
}
};
this.unSkulptValue = function (origValue) {
// Transform a value, possibly a Skulpt one, into a printable value
if(typeof origValue !== 'object' || origValue === null) {
var value = origValue;
} else if(origValue.constructor === Sk.builtin.dict) {
var keys = Object.keys(origValue);
var dictElems = [];
for(var i=0; i<keys.length; i++) {
if(keys[i] == 'size' || keys[i] == '__class__'
|| !origValue[keys[i]].items
|| !origValue[keys[i]].items[0]) {
continue;
}
var items = origValue[keys[i]].items[0];
dictElems.push('' + this.unSkulptValue(items.lhs) + ': ' + this.unSkulptValue(items.rhs));
}
var value = '{' + dictElems.join(',' ) + '}';
} else if(origValue.constructor === Sk.builtin.list) {
var oldArray = origValue.v;
var newArray = [];
for(var i=0; i<oldArray.length; i++) {
newArray.push(this.unSkulptValue(oldArray[i]));
}
var value = '[' + newArray.join(', ') + ']';
} else if(origValue.v !== undefined) {
var value = origValue.v;
if(typeof value == 'string') {
value = '"' + value + '"';
}
} else if(typeof origValue == 'object') {
var value = origValue;
}
return value;
};
this.reportValue = function (origValue, varName) {
// Show a popup displaying the value of a block in step-by-step mode
if(origValue === undefined
|| (origValue && origValue.constructor === Sk.builtin.func)
|| !this._editorMarker
|| !context.display
|| !this.stepMode) {
return origValue;
}
var value = this.unSkulptValue(origValue);
var highlighted = $('.aceHighlight');
if(highlighted.length == 0) {
return origValue;
} else if(highlighted.find('.ace_start').length > 0) {
var target = highlighted.find('.ace_start')[0];
} else {
var target = highlighted[0];
}
var bbox = target.getBoundingClientRect();
var leftPos = bbox.left+10;
var topPos = bbox.top-14;
if(typeof value == 'boolean') {
var displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
} else if(value === null) {
var displayStr = "None"
} else {
var displayStr = value.toString();
}
if(typeof value == 'boolean') {
displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
}
if(varName) {
displayStr = '' + varName + ' = ' + displayStr;
}
var dropDownDiv = '' +
'<div class="blocklyDropDownDiv" style="transition: transform 0.25s, opacity 0.25s; background-color: rgb(255, 255, 255); border-color: rgb(170, 170, 170); left: '+leftPos+'px; top: '+topPos+'px; display: block; opacity: 1; transform: translate(0px, -20px);">' +
' <div class="blocklyDropDownContent">' +
' <div class="valueReportBox">' +
displayStr +
' </div>' +
' </div>' +
' <div class="blocklyDropDownArrow arrowBottom" style="transform: translate(22px, 15px) rotate(45deg);"></div>' +
'</div>';
$('.blocklyDropDownDiv').remove();
$('body').append(dropDownDiv);
return origValue;
};
this.stop = function () {
for (var i = 0; i < this._timeouts.length; i += 1) {
window.clearTimeout(this._timeouts[i]);
}
this._timeouts = [];
this.removeEditorMarker();
if(Sk.runQueue) {
for (var i=0; i<Sk.runQueue.length; i++) {
if(Sk.runQueue[i].ctrl === this) {
Sk.runQueue.splice(i, 1);
i--;
}
}
}
if(window.quickAlgoInterface) {
window.quickAlgoInterface.setPlayPause(false);
}
this._resetInterpreterState();
};
this.isRunning = function () {
return this._isRunning;
};
this._resetInterpreterState = function () {
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = 0;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._isRunning = false;
this._resetDone = false;
this.stepMode = false;
this._stepInProgress = false;
this._resetCallstackOnNextStep = false;
this._paused = false;
this.waitingOnReadyNode = false;
Sk.running = false;
if(Sk.runQueue && Sk.runQueue.length > 0) {
var nextExec = Sk.runQueue.shift();
setTimeout(function () { nextExec.ctrl.runCodes(nextExec.codes); }, 100);
}
};
this._resetCallstack = function () {
if (this._resetCallstackOnNextStep) {
this._resetCallstackOnNextStep = false;
this._debugger.suspension_stack.pop();
}
};
this.reset = function() {
if(this._resetDone) { return; }
if(this.isRunning()) {
this.stop();
}
this.context.reset();
this._resetDone = true;
};
this.step = function (resolve, reject) {
this._resetCallstack();
this._stepInProgress = true;
var editor = this.context.blocklyHelper._aceEditor;
var markDelay = this.context.infos ? Math.floor(this.context.infos.actionDelay/4) : 0;
if(this.context.display && (this.stepMode || markDelay > 30)) {
var curSusp = this._debugger.suspension_stack[this._debugger.suspension_stack.length-1];
if(curSusp && curSusp.$lineno) {
this.removeEditorMarker();
var splitCode = this._code.split(/[\r\n]/);
var Range = ace.require('ace/range').Range;
this._editorMarker = editor.session.addMarker(
new Range(curSusp.$lineno-1, curSusp.$colno, curSusp.$lineno, 0),
"aceHighlight",
"line");
}
} else {
this.removeEditorMarker();
}
var stepDelay = 0;
if(!this.stepMode && this.context.allowInfiniteLoop) {
// Add a delay in infinite loops to avoid using all CPU
if(this._allowStepsWithoutDelay > 0) {
// We just had a waitDelay, don't delay further
this._allowStepsWithoutDelay -= 1;
} else {
stepDelay = 10;
}
}
var realStepDelay = markDelay + stepDelay;
if(realStepDelay > 0) {
this._paused = true;
var self = this;
setTimeout(function() {
self.realStep(resolve, reject);
}, realStepDelay);
} else {
this.realStep(resolve, reject);
}
};
this.realStep = function (resolve, reject) {
this._paused = this.stepMode;
this._debugger.enable_step_mode();
this._debugger.resume.call(this._debugger, resolve, reject);
this._steps += 1;
if(this._lastNbActions != this._nbActions) {
this._lastNbActions = this._nbActions;
this._stepsWithoutAction = 0;
} else {
this._stepsWithoutAction += 1;
}
};
this._onStepSuccess = function (callback) {
// If there are still timeouts, there's still a step in progress
this._stepInProgress = !!this._timeouts.length;
this._continue();
if (typeof callback === 'function') {
callback();
}
};
this._onStepError = function (message, callback) {
context.onExecutionEnd && context.onExecutionEnd();
// We always get there, even on a success
this.stop();
message = '' + message;
// Skulpt doesn't support well NoneTypes
if(message.indexOf("TypeError: Cannot read property") > -1 && message.indexOf("undefined") > -1) {
message = message.replace(/^.* line/, "TypeError: NoneType value used in operation on line");
}
if(message.indexOf('undefined') > -1) {
message += '. ' + window.languageStrings.undefinedMsg;
}
// Transform message depending on whether we successfully
if(this.context.success) {
message = "<span style='color:green;font-weight:bold'>" + message + "</span>";
} else {
message = this.context.messagePrefixFailure + message;
}
this.messageCallback(message);
if (typeof callback === 'function') {
callback();
}
};
this._setBreakpoint = function (bp, isTemporary) {
this._debugger.add_breakpoint(this._editor_filename + ".py", bp, "0", isTemporary);
};
this._asyncCallback = function (editor_filename, code) {
var dumpJS = false;
return function() {
return Sk.importMainWithBody(editor_filename, dumpJS, code, true);
};
};
this.signalAction = function () {
// Allows a context to signal an "action" happened
this._stepsWithoutAction = 0;
};
}
function
|
(context, msgCallback) {
return new PythonInterpreter(context, msgCallback);
};
|
initBlocklyRunner
|
identifier_name
|
python_runner_1.5.js
|
/*
python_runner:
Python code runner.
*/
var currentPythonContext = null;
function PythonInterpreter(context, msgCallback) {
this.context = context;
this.messageCallback = msgCallback;
this._code = '';
this._editor_filename = "<stdin>";
this.context.runner = this;
this._maxIterations = 4000;
this._maxIterWithoutAction = 50;
this._resetCallstackOnNextStep = false;
this._paused = false;
this._isRunning = false;
this._stepInProgress = false;
this._resetDone = true;
this.stepMode = false;
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = null;
this._hasActions = false;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._timeouts = [];
this._editorMarker = null;
this.availableModules = [];
this._argumentsByBlock = {};
this._definedFunctions = [];
this.nbNodes = 0;
this.curNode = 0;
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
this.waitingOnReadyNode = false;
var that = this;
this._skulptifyHandler = function (name, generatorName, blockName, nbArgs, type) {
if(!arrayContains(this._definedFunctions, name)) { this._definedFunctions.push(name); }
var handler = '';
handler += "\tcurrentPythonContext.runner.checkArgs('" + name + "', '" + generatorName + "', '" + blockName + "', arguments);";
handler += "\n\tvar susp = new Sk.misceval.Suspension();";
handler += "\n\tvar result = Sk.builtin.none.none$;";
// If there are arguments, convert them from Skulpt format to the libs format
handler += "\n\tvar args = Array.prototype.slice.call(arguments);";
handler += "\n\tfor(var i=0; i<args.length; i++) { args[i] = currentPythonContext.runner.skToJs(args[i]); };";
handler += "\n\tsusp.resume = function() { return result; };";
handler += "\n\tsusp.data = {type: 'Sk.promise', promise: new Promise(function(resolve) {";
handler += "\n\targs.push(resolve);";
// Count actions
if(type == 'actions') {
handler += "\n\tcurrentPythonContext.runner._nbActions += 1;";
}
handler += "\n\ttry {";
handler += '\n\t\tcurrentPythonContext["' + generatorName + '"]["' + blockName + '"].apply(currentPythonContext, args);';
handler += "\n\t} catch (e) {";
handler += "\n\t\tcurrentPythonContext.runner._onStepError(e)}";
handler += '\n\t}).then(function (value) {\nresult = value;\nreturn value;\n })};';
handler += '\n\treturn susp;';
return '\nmod.' + name + ' = new Sk.builtin.func(function () {\n' + handler + '\n});\n';
};
this._skulptifyValue = function(value) {
if(typeof value === "number") {
var handler = 'Sk.builtin.int_(' + value + ')';
} else if(typeof value === "boolean") {
var handler = 'Sk.builtin.bool(' + value.toString() + ')';
} else if(typeof value === "string") {
var handler = 'Sk.builtin.str(' + JSON.stringify(value) + ')';
} else if(Array.isArray(value)) {
var list = [];
for(var i=0; i<value.length; i++) {
list.push(this._skulptifyValue(value[i]));
}
var handler = 'Sk.builtin.list([' + list.join(',') + '])';
} else {
throw "Unable to translate value '" + value + "' into a Skulpt constant.";
}
return 'new ' + handler;
}
this._skulptifyConst = function(name, value) {
var handler = this._skulptifyValue(value);
return '\nmod.' + name + ' = ' + handler + ';\n';
};
this._injectFunctions = function () {
// Generate Python custom libraries from all generated blocks
this._definedFunctions = [];
if(this.context.infos && this.context.infos.includeBlocks && this.context.infos.includeBlocks.generatedBlocks) {
// Flatten customBlocks information for easy access
var blocksInfos = {};
for (var generatorName in this.context.customBlocks) {
for (var typeName in this.context.customBlocks[generatorName]) {
var blockList = this.context.customBlocks[generatorName][typeName];
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockInfo = blockList[iBlock];
blocksInfos[blockInfo.name] = {
nbArgs: 0, // handled below
type: typeName};
blocksInfos[blockInfo.name].nbsArgs = [];
if(blockInfo.anyArgs) {
// Allows to specify the function can accept any number of arguments
blocksInfos[blockInfo.name].nbsArgs.push(Infinity);
}
var variants = blockInfo.variants ? blockInfo.variants : (blockInfo.params ? [blockInfo.params] : []);
if(variants.length) {
for(var i=0; i < variants.length; i++) {
blocksInfos[blockInfo.name].nbsArgs.push(variants[i].length);
}
}
}
}
}
// Generate functions used in the task
for (var generatorName in this.context.infos.includeBlocks.generatedBlocks) {
var blockList = this.context.infos.includeBlocks.generatedBlocks[generatorName];
if(!blockList.length) { continue; }
var modContents = "var $builtinmodule = function (name) {\n\nvar mod = {};\nmod.__package__ = Sk.builtin.none.none$;\n";
if(!this._argumentsByBlock[generatorName]) {
this._argumentsByBlock[generatorName] = {};
}
for (var iBlock=0; iBlock < blockList.length; iBlock++) {
var blockName = blockList[iBlock];
var code = this.context.strings.code[blockName];
if (typeof(code) == "undefined") {
code = blockName;
}
var nbsArgs = blocksInfos[blockName] ? (blocksInfos[blockName].nbsArgs ? blocksInfos[blockName].nbsArgs : []) : [];
var type = blocksInfos[blockName] ? blocksInfos[blockName].type : 'actions';
if(type == 'actions') {
this._hasActions = true;
}
this._argumentsByBlock[generatorName][blockName] = nbsArgs;
modContents += this._skulptifyHandler(code, generatorName, blockName, nbsArgs, type);
}
// TODO :: allow selection of constants available in a task
// if(this.context.infos.includeBlocks.constants && this.context.infos.includeBlocks.constants[generatorName]) {
if(this.context.customConstants && this.context.customConstants[generatorName]) {
var constList = this.context.customConstants[generatorName];
for(var iConst=0; iConst < constList.length; iConst++) {
var name = constList[iConst].name;
if(this.context.strings.constant && this.context.strings.constant[name]) {
name = this.context.strings.constant[name];
}
modContents += this._skulptifyConst(name, constList[iConst].value)
}
}
modContents += "\nreturn mod;\n};";
Sk.builtinFiles["files"]["src/lib/"+generatorName+".js"] = modContents;
this.availableModules.push(generatorName);
}
}
};
this.checkArgs = function (name, generatorName, blockName, args) {
// Check the number of arguments corresponds to a variant of the function
if(!this._argumentsByBlock[generatorName] || !this._argumentsByBlock[generatorName][blockName]) {
console.error("Couldn't find the number of arguments for " + generatorName + "/" + blockName + ".");
return;
}
var nbsArgs = this._argumentsByBlock[generatorName][blockName];
if(nbsArgs.length == 0) {
// This function doesn't have arguments
if(args.length > 0) {
msg = name + "() takes no arguments (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
} else if(nbsArgs.indexOf(args.length) == -1 && nbsArgs.indexOf(Infinity) == -1) {
var minArgs = nbsArgs[0];
var maxArgs = nbsArgs[0];
for(var i=1; i < nbsArgs.length; i++) {
minArgs = Math.min(minArgs, nbsArgs[i]);
maxArgs = Math.max(maxArgs, nbsArgs[i]);
}
if (minArgs === maxArgs) {
msg = name + "() takes exactly " + minArgs + " arguments";
} else if (args.length < minArgs) {
msg = name + "() takes at least " + minArgs + " arguments";
} else if (args.length > maxArgs){
msg = name + "() takes at most " + maxArgs + " arguments";
} else {
msg = name + "() doesn't have a variant accepting this number of arguments";
}
msg += " (" + args.length + " given)";
throw new Sk.builtin.TypeError(msg);
}
};
this._definePythonNumber = function() {
// Create a class which behaves as a Number, but can have extra properties
this.pythonNumber = function(val) {
this.val = new Number(val);
}
this.pythonNumber.prototype = Object.create(Number.prototype);
function makePrototype(func)
|
var funcs = ['toExponential', 'toFixed', 'toLocaleString', 'toPrecision', 'toSource', 'toString', 'valueOf'];
for(var i = 0; i < funcs.length ; i++) {
this.pythonNumber.prototype[funcs[i]] = makePrototype(funcs[i]);
}
}
this.skToJs = function(val) {
// Convert Skulpt item to JavaScript
// TODO :: Might be partly replaceable with Sk.ffi.remapToJs
if(val instanceof Sk.builtin.bool) {
return val.v ? true : false;
} else if(val instanceof Sk.builtin.func) {
return function() {
var args = [];
for(var i = 0; i < arguments.length; i++) {
args.push(that._createPrimitive(arguments[i]));
}
var retp = new Promise(function(resolve, reject) {
var p = Sk.misceval.asyncToPromise(function() { return val.tp$call(args); });
p.then(function(val) { resolve(that.skToJs(val)); });
});
return retp;
}
} else if(val instanceof Sk.builtin.dict) {
var dictKeys = Object.keys(val);
var retVal = {};
for(var i = 0; i < dictKeys.length; i++) {
var key = dictKeys[i];
if(key == 'size' || key == '__class__') { continue; }
var subItems = val[key].items;
for(var j = 0; j < subItems.length; j++) {
var subItem = subItems[j];
retVal[subItem.lhs.v] = this.skToJs(subItem.rhs);
}
}
return retVal;
} else {
var retVal = val.v;
if(val instanceof Sk.builtin.tuple || val instanceof Sk.builtin.list) {
retVal = [];
for(var i = 0; i < val.v.length; i++) {
retVal[i] = this.skToJs(val.v[i]);
}
}
if(val instanceof Sk.builtin.tuple) {
retVal.isTuple = true;
}
if(val instanceof Sk.builtin.float_) {
retVal = new this.pythonNumber(retVal);
retVal.isFloat = true;
}
return retVal;
}
};
this.getDefinedFunctions = function() {
this._injectFunctions();
return this._definedFunctions.slice();
};
this._setTimeout = function(func, time) {
var timeoutId = null;
var that = this;
function wrapper() {
var idx = that._timeouts.indexOf(timeoutId);
if(idx > -1) { that._timeouts.splice(idx, 1); }
func();
}
timeoutId = window.setTimeout(wrapper, time);
this._timeouts.push(timeoutId);
}
this.waitDelay = function (callback, value, delay) {
this._paused = true;
if (delay > 0) {
var _noDelay = this.noDelay.bind(this, callback, value);
this._setTimeout(_noDelay, delay);
// We just waited some time, allow next steps to not be delayed
this._allowStepsWithoutDelay = Math.min(this._allowStepsWithoutDelay + Math.ceil(delay / 10), 100);
} else {
this.noDelay(callback, value);
}
};
this.waitEvent = function (callback, target, eventName, func) {
this._paused = true;
var listenerFunc = null;
var that = this;
listenerFunc = function(e) {
target.removeEventListener(eventName, listenerFunc);
that.noDelay(callback, func(e));
};
target.addEventListener(eventName, listenerFunc);
};
this.waitCallback = function (callback) {
// Returns a callback to be called once we can continue the execution
this._paused = true;
var that = this;
return function(value) {
that.noDelay(callback, value);
};
};
this.noDelay = function (callback, value) {
var primitive = this._createPrimitive(value);
if (primitive !== Sk.builtin.none.none$) {
// Apparently when we create a new primitive, the debugger adds a call to
// the callstack.
this._resetCallstackOnNextStep = true;
this.reportValue(value);
}
this._paused = false;
callback(primitive);
this._setTimeout(this._continue.bind(this), 10);
};
this.allowSwitch = function(callback) {
// Tells the runner that we can switch the execution to another node
var curNode = context.curNode;
var ready = function(readyCallback) {
that.readyNodes[curNode] = function() {
readyCallback(callback);
};
if(that.waitingOnReadyNode) {
that.waitingOnReadyNode = false;
that.startNode(that.curNode, curNode);
}
};
this.readyNodes[curNode] = false;
this.startNextNode(curNode);
return ready;
};
this.defaultSelectNextNode = function(runner, previousNode) {
var i = previousNode + 1;
if(i >= runner.nbNodes) { i = 0; }
do {
if(runner.readyNodes[i]) {
break;
} else {
i++;
}
if(i >= runner.nbNodes) { i = 0; }
} while(i != previousNode);
return i;
};
// Allow the next node selection process to be customized
this.selectNextNode = this.defaultSelectNextNode;
this.startNextNode = function(curNode) {
// Start the next node when one has been switched from
var newNode = this.selectNextNode(this, curNode);
this._paused = true;
if(newNode == curNode) {
// No ready node
this.waitingOnReadyNode = true;
} else {
// TODO :: switch execution
this.startNode(curNode, newNode);
}
};
this.startNode = function(curNode, newNode) {
setTimeout(function() {
that.nodeStates[curNode] = that._debugger.suspension_stack.slice();
that._debugger.suspension_stack = that.nodeStates[newNode];
that.curNode = newNode;
var ready = that.readyNodes[newNode];
if(ready) {
that.readyNodes[newNode] = false;
context.setCurNode(newNode);
if(typeof ready == 'function') {
ready();
} else {
that._paused = false;
that._continue();
}
} else {
that.waitingOnReadyNode = true;
}
}, 0);
};
this._createPrimitive = function (data) {
// TODO :: Might be replaceable with Sk.ffi.remapToPy
if (data === undefined || data === null) {
return Sk.builtin.none.none$; // Reuse the same object.
}
var type = typeof data;
var result = {v: data}; // Emulate a Skulpt object as default
if (type === 'number') {
if(Math.floor(data) == data) { // isInteger isn't supported by IE
result = new Sk.builtin.int_(data);
} else {
result = new Sk.builtin.float_(data);
}
} else if (type === 'string') {
result = new Sk.builtin.str(data);
} else if (type === 'boolean') {
result = new Sk.builtin.bool(data);
} else if (typeof data.length != 'undefined') {
var skl = [];
for(var i = 0; i < data.length; i++) {
skl.push(this._createPrimitive(data[i]));
}
result = new Sk.builtin.list(skl);
} else if (data) {
// Create a dict if it's an object with properties
var props = [];
for(var prop in data) {
if(data.hasOwnProperty(prop)) {
// We can pass a list [prop1name, prop1val, ...] to Skulpt's dict
// constructor ; however to work properly they need to be Skulpt
// primitives too
props.push(this._createPrimitive(prop));
props.push(this._createPrimitive(data[prop]));
}
}
if(props.length > 0) {
result = new Sk.builtin.dict(props);
}
}
return result;
};
this._onOutput = function (_output) {
that.print(_output);
};
this._onDebugOut = function (text) {
// console.log('DEBUG: ', text);
};
this._configure = function () {
Sk.configure({
output: this._onOutput,
debugout: this._onDebugOut,
read: this._builtinRead.bind(this),
yieldLimit: null,
execLimit: null,
debugging: true,
breakpoints: this._debugger.check_breakpoints.bind(this._debugger),
__future__: Sk.python3
});
Sk.pre = "edoutput";
Sk.pre = "codeoutput";
// Disable document library
delete Sk.builtinFiles["files"]["src/lib/document.js"];
this._definePythonNumber();
this.context.callCallback = this.noDelay.bind(this);
};
this.print = function (message, className) {
if (message.trim() === 'Program execution complete') {
this._onFinished();
}
if (message) {
//console.log('PRINT: ', message, className || '');
}
};
this._onFinished = function () {
this.finishedNodes[this.curNode] = true;
this.readyNodes[this.curNode] = false;
if(this.finishedNodes.indexOf(false) != -1) {
// At least one node is not finished
this.startNextNode(this.curNode);
} else {
// All nodes are finished, stop the execution
this.stop();
}
try {
this.context.infos.checkEndCondition(this.context, true);
} catch (e) {
this._onStepError(e);
}
};
this._builtinRead = function (x) {
if (Sk.builtinFiles === undefined || Sk.builtinFiles["files"][x] === undefined)
throw "File not found: '" + x + "'";
return Sk.builtinFiles["files"][x];
};
this.get_source_line = function (lineno) {
return this._code.split('\n')[lineno];
};
this._continue = function () {
if (this.context.infos.checkEndEveryTurn) {
try {
this.context.infos.checkEndCondition(context, false);
} catch(e) {
this._onStepError(e);
return;
}
}
if (!this.context.allowInfiniteLoop && this._steps >= this._maxIterations) {
this._onStepError(window.languageStrings.tooManyIterations);
} else if (!this.context.allowInfiniteLoop && this._stepsWithoutAction >= this._maxIterWithoutAction) {
this._onStepError(window.languageStrings.tooManyIterationsWithoutAction);
} else if (!this._paused && this._isRunning) {
this.step();
}
};
this.initCodes = function (codes) {
// For reportValue in Skulpt.
window.currentPythonRunner = this;
if(Sk.running) {
if(typeof Sk.runQueue === 'undefined') {
Sk.runQueue = [];
}
Sk.runQueue.push({ctrl: this, codes: codes});
return;
}
currentPythonContext = this.context;
this._debugger = new Sk.Debugger(this._editor_filename, this);
this._configure();
this._injectFunctions();
/**
* Add a last instruction at the end of the code so Skupt will generate a Suspension state
* for after the user's last instruction. Otherwise it would be impossible to retrieve the
* modifications made by the last user's line. For skulpt analysis.
*/
this._code = codes[0] + "\npass";
this._setBreakpoint(1, false);
if(typeof this.context.infos.maxIter !== 'undefined') {
this._maxIterations = Math.ceil(this.context.infos.maxIter/10);
}
if(typeof this.context.infos.maxIterWithoutAction !== 'undefined') {
this._maxIterWithoutAction = Math.ceil(this.context.infos.maxIterWithoutAction/10);
}
if(!this._hasActions) {
// No limit on
this._maxIterWithoutAction = this._maxIterations;
}
var susp_handlers = {};
susp_handlers["*"] = this._debugger.suspension_handler.bind(this);
this.nbNodes = codes.length;
this.curNode = 0;
context.setCurNode(this.curNode);
this.readyNodes = [];
this.finishedNodes = [];
this.nodeStates = [];
for(var i = 0; i < codes.length ; i++) {
this.readyNodes.push(true);
this.finishedNodes.push(false);
try {
var promise = this._debugger.asyncToPromise(this._asyncCallback(this._editor_filename, codes[i]), susp_handlers, this._debugger);
promise.then(this._debugger.success.bind(this._debugger), this._debugger.error.bind(this._debugger));
} catch (e) {
this._onOutput(e.toString() + "\n");
}
this.nodeStates.push(this._debugger.suspension_stack);
this._debugger.suspension_stack = [];
}
this._debugger.suspension_stack = this.nodeStates[0];
this._resetInterpreterState();
Sk.running = true;
this._isRunning = true;
};
this.run = function () {
if(this.stepMode) {
this._paused = this._stepInProgress;
this.stepMode = false;
}
this._setTimeout(this._continue.bind(this), 100);
};
this.runCodes = function(codes) {
this.initCodes(codes);
this.run();
};
this.runStep = function (resolve, reject) {
this.stepMode = true;
if (this._isRunning && !this._stepInProgress) {
this.step(resolve, reject);
}
};
this.nbRunning = function () {
return this._isRunning ? 1 : 0;
};
this.removeEditorMarker = function () {
var editor = this.context.blocklyHelper._aceEditor;
if(editor && this._editorMarker) {
editor.session.removeMarker(this._editorMarker);
this._editorMarker = null;
}
};
this.unSkulptValue = function (origValue) {
// Transform a value, possibly a Skulpt one, into a printable value
if(typeof origValue !== 'object' || origValue === null) {
var value = origValue;
} else if(origValue.constructor === Sk.builtin.dict) {
var keys = Object.keys(origValue);
var dictElems = [];
for(var i=0; i<keys.length; i++) {
if(keys[i] == 'size' || keys[i] == '__class__'
|| !origValue[keys[i]].items
|| !origValue[keys[i]].items[0]) {
continue;
}
var items = origValue[keys[i]].items[0];
dictElems.push('' + this.unSkulptValue(items.lhs) + ': ' + this.unSkulptValue(items.rhs));
}
var value = '{' + dictElems.join(',' ) + '}';
} else if(origValue.constructor === Sk.builtin.list) {
var oldArray = origValue.v;
var newArray = [];
for(var i=0; i<oldArray.length; i++) {
newArray.push(this.unSkulptValue(oldArray[i]));
}
var value = '[' + newArray.join(', ') + ']';
} else if(origValue.v !== undefined) {
var value = origValue.v;
if(typeof value == 'string') {
value = '"' + value + '"';
}
} else if(typeof origValue == 'object') {
var value = origValue;
}
return value;
};
this.reportValue = function (origValue, varName) {
// Show a popup displaying the value of a block in step-by-step mode
if(origValue === undefined
|| (origValue && origValue.constructor === Sk.builtin.func)
|| !this._editorMarker
|| !context.display
|| !this.stepMode) {
return origValue;
}
var value = this.unSkulptValue(origValue);
var highlighted = $('.aceHighlight');
if(highlighted.length == 0) {
return origValue;
} else if(highlighted.find('.ace_start').length > 0) {
var target = highlighted.find('.ace_start')[0];
} else {
var target = highlighted[0];
}
var bbox = target.getBoundingClientRect();
var leftPos = bbox.left+10;
var topPos = bbox.top-14;
if(typeof value == 'boolean') {
var displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
} else if(value === null) {
var displayStr = "None"
} else {
var displayStr = value.toString();
}
if(typeof value == 'boolean') {
displayStr = value ? window.languageStrings.valueTrue : window.languageStrings.valueFalse;
}
if(varName) {
displayStr = '' + varName + ' = ' + displayStr;
}
var dropDownDiv = '' +
'<div class="blocklyDropDownDiv" style="transition: transform 0.25s, opacity 0.25s; background-color: rgb(255, 255, 255); border-color: rgb(170, 170, 170); left: '+leftPos+'px; top: '+topPos+'px; display: block; opacity: 1; transform: translate(0px, -20px);">' +
' <div class="blocklyDropDownContent">' +
' <div class="valueReportBox">' +
displayStr +
' </div>' +
' </div>' +
' <div class="blocklyDropDownArrow arrowBottom" style="transform: translate(22px, 15px) rotate(45deg);"></div>' +
'</div>';
$('.blocklyDropDownDiv').remove();
$('body').append(dropDownDiv);
return origValue;
};
this.stop = function () {
for (var i = 0; i < this._timeouts.length; i += 1) {
window.clearTimeout(this._timeouts[i]);
}
this._timeouts = [];
this.removeEditorMarker();
if(Sk.runQueue) {
for (var i=0; i<Sk.runQueue.length; i++) {
if(Sk.runQueue[i].ctrl === this) {
Sk.runQueue.splice(i, 1);
i--;
}
}
}
if(window.quickAlgoInterface) {
window.quickAlgoInterface.setPlayPause(false);
}
this._resetInterpreterState();
};
this.isRunning = function () {
return this._isRunning;
};
this._resetInterpreterState = function () {
this._steps = 0;
this._stepsWithoutAction = 0;
this._lastNbActions = 0;
this._nbActions = 0;
this._allowStepsWithoutDelay = 0;
this._isRunning = false;
this._resetDone = false;
this.stepMode = false;
this._stepInProgress = false;
this._resetCallstackOnNextStep = false;
this._paused = false;
this.waitingOnReadyNode = false;
Sk.running = false;
if(Sk.runQueue && Sk.runQueue.length > 0) {
var nextExec = Sk.runQueue.shift();
setTimeout(function () { nextExec.ctrl.runCodes(nextExec.codes); }, 100);
}
};
this._resetCallstack = function () {
if (this._resetCallstackOnNextStep) {
this._resetCallstackOnNextStep = false;
this._debugger.suspension_stack.pop();
}
};
this.reset = function() {
if(this._resetDone) { return; }
if(this.isRunning()) {
this.stop();
}
this.context.reset();
this._resetDone = true;
};
this.step = function (resolve, reject) {
this._resetCallstack();
this._stepInProgress = true;
var editor = this.context.blocklyHelper._aceEditor;
var markDelay = this.context.infos ? Math.floor(this.context.infos.actionDelay/4) : 0;
if(this.context.display && (this.stepMode || markDelay > 30)) {
var curSusp = this._debugger.suspension_stack[this._debugger.suspension_stack.length-1];
if(curSusp && curSusp.$lineno) {
this.removeEditorMarker();
var splitCode = this._code.split(/[\r\n]/);
var Range = ace.require('ace/range').Range;
this._editorMarker = editor.session.addMarker(
new Range(curSusp.$lineno-1, curSusp.$colno, curSusp.$lineno, 0),
"aceHighlight",
"line");
}
} else {
this.removeEditorMarker();
}
var stepDelay = 0;
if(!this.stepMode && this.context.allowInfiniteLoop) {
// Add a delay in infinite loops to avoid using all CPU
if(this._allowStepsWithoutDelay > 0) {
// We just had a waitDelay, don't delay further
this._allowStepsWithoutDelay -= 1;
} else {
stepDelay = 10;
}
}
var realStepDelay = markDelay + stepDelay;
if(realStepDelay > 0) {
this._paused = true;
var self = this;
setTimeout(function() {
self.realStep(resolve, reject);
}, realStepDelay);
} else {
this.realStep(resolve, reject);
}
};
this.realStep = function (resolve, reject) {
this._paused = this.stepMode;
this._debugger.enable_step_mode();
this._debugger.resume.call(this._debugger, resolve, reject);
this._steps += 1;
if(this._lastNbActions != this._nbActions) {
this._lastNbActions = this._nbActions;
this._stepsWithoutAction = 0;
} else {
this._stepsWithoutAction += 1;
}
};
this._onStepSuccess = function (callback) {
// If there are still timeouts, there's still a step in progress
this._stepInProgress = !!this._timeouts.length;
this._continue();
if (typeof callback === 'function') {
callback();
}
};
this._onStepError = function (message, callback) {
context.onExecutionEnd && context.onExecutionEnd();
// We always get there, even on a success
this.stop();
message = '' + message;
// Skulpt doesn't support well NoneTypes
if(message.indexOf("TypeError: Cannot read property") > -1 && message.indexOf("undefined") > -1) {
message = message.replace(/^.* line/, "TypeError: NoneType value used in operation on line");
}
if(message.indexOf('undefined') > -1) {
message += '. ' + window.languageStrings.undefinedMsg;
}
// Transform message depending on whether we successfully
if(this.context.success) {
message = "<span style='color:green;font-weight:bold'>" + message + "</span>";
} else {
message = this.context.messagePrefixFailure + message;
}
this.messageCallback(message);
if (typeof callback === 'function') {
callback();
}
};
this._setBreakpoint = function (bp, isTemporary) {
this._debugger.add_breakpoint(this._editor_filename + ".py", bp, "0", isTemporary);
};
this._asyncCallback = function (editor_filename, code) {
var dumpJS = false;
return function() {
return Sk.importMainWithBody(editor_filename, dumpJS, code, true);
};
};
this.signalAction = function () {
// Allows a context to signal an "action" happened
this._stepsWithoutAction = 0;
};
}
function initBlocklyRunner(context, msgCallback) {
return new PythonInterpreter(context, msgCallback);
};
|
{
return function() { return Number.prototype[func].call(this.val); }
}
|
identifier_body
|
test_section_topics.py
|
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from pkg_resources import resource_filename
import marv_node.testing
from marv_node.testing import make_dataset, run_nodes, temporary_directory
from marv_robotics.detail import connections_section as node
from marv_store import Store
class TestCase(marv_node.testing.TestCase):
# TODO: Generate bags instead, but with connection info!
|
BAGS = [
resource_filename('marv_node.testing._robotics_tests', 'data/test_0.bag'),
resource_filename('marv_node.testing._robotics_tests', 'data/test_1.bag'),
]
async def test_node(self):
with temporary_directory() as storedir:
store = Store(storedir, {})
dataset = make_dataset(self.BAGS)
store.add_dataset(dataset)
streams = await run_nodes(dataset, [node], store)
self.assertNodeOutput(streams[0], node)
# TODO: test also header
|
identifier_body
|
|
test_section_topics.py
|
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from pkg_resources import resource_filename
import marv_node.testing
from marv_node.testing import make_dataset, run_nodes, temporary_directory
from marv_robotics.detail import connections_section as node
from marv_store import Store
class TestCase(marv_node.testing.TestCase):
# TODO: Generate bags instead, but with connection info!
BAGS = [
resource_filename('marv_node.testing._robotics_tests', 'data/test_0.bag'),
resource_filename('marv_node.testing._robotics_tests', 'data/test_1.bag'),
]
async def test_node(self):
with temporary_directory() as storedir:
store = Store(storedir, {})
|
self.assertNodeOutput(streams[0], node)
# TODO: test also header
|
dataset = make_dataset(self.BAGS)
store.add_dataset(dataset)
streams = await run_nodes(dataset, [node], store)
|
random_line_split
|
test_section_topics.py
|
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from pkg_resources import resource_filename
import marv_node.testing
from marv_node.testing import make_dataset, run_nodes, temporary_directory
from marv_robotics.detail import connections_section as node
from marv_store import Store
class TestCase(marv_node.testing.TestCase):
# TODO: Generate bags instead, but with connection info!
BAGS = [
resource_filename('marv_node.testing._robotics_tests', 'data/test_0.bag'),
resource_filename('marv_node.testing._robotics_tests', 'data/test_1.bag'),
]
async def
|
(self):
with temporary_directory() as storedir:
store = Store(storedir, {})
dataset = make_dataset(self.BAGS)
store.add_dataset(dataset)
streams = await run_nodes(dataset, [node], store)
self.assertNodeOutput(streams[0], node)
# TODO: test also header
|
test_node
|
identifier_name
|
jquery.validation.settings.js
|
jQuery(document).ready(function($){
var nameDefault = 'Your name...';
var emailDefault = 'Your email...';
var messageDefault = 'Your message...';
// Setting up existing forms
setupforms();
function setupforms() {
// Applying default values
setupDefaultText('#name',nameDefault);
setupDefaultText('#email',emailDefault);
setupDefaultText('#message',messageDefault);
// Focus / Blur check against defaults
focusField('#name');
focusField('#email');
focusField('#message');
}
function setupDefaultText(fieldID,fieldDefault) {
$(fieldID).val(fieldDefault);
$(fieldID).attr('data-default', fieldDefault);
}
function evalDefault(fieldID) {
if($(fieldID).val() != $(fieldID).attr('data-default')) {
return false;
}
else { return true; }
}
function hasDefaults(formType) {
switch (formType)
{
case "contact" :
if(evalDefault('#name') && evalDefault('#email') && evalDefault('#message')) { return true; }
else { return false; }
default :
return false;
}
}
function focusField(fieldID) {
$(fieldID).focus(function(evaluation) {
if(evalDefault(fieldID)) { $(fieldID).val(''); }
}).blur(function(evaluation) {
if(evalDefault(fieldID) || $(fieldID).val() === '') { $(fieldID).val($(fieldID).attr('data-default')); }
});
}
$('.button-submit').click(function(event) {
event.preventDefault();
});
$('#submit-contact').bind('click', function(){
if(!hasDefaults('contact')) { $('#form-contact').submit(); }
});
$("#form-contact").validate({
rules: {
name: {
required: true,
minlength: 3
},
email: {
required: true,
email: true
},
message: {
required: true,
minlength: 10
}
},
messages: {
name: {
required: "Please enter your name.",
minlength: "Name must have at least 3 characters."
},
email: {
required: "Please enter your email address.",
email: "This is not a valid email address format."
},
message: {
required: "Please enter a message.",
minlength: "Message must have at least 10 characters."
}
}
});
function
|
() {
if(!$('#form-contact').valid()) { return false; }
else { return true; }
}
$("#form-contact").ajaxForm({
beforeSubmit: validateContact,
type: "POST",
url: "assets/php/contact-form-process.php",
data: $("#form-contact").serialize(),
success: function(msg){
$("#form-message").ajaxComplete(function(event, request, settings){
if(msg == 'OK') // Message Sent? Show the 'Thank You' message
{
result = '<span class="form-message-success"><i class="icon-thumbs-up"></i> Your message was sent. Thank you!</span>';
clear = true;
}
else
{
result = '<span class="form-message-error"><i class="icon-thumbs-down"></i> ' + msg +'</span>';
clear = false;
}
$(this).html(result);
if(clear == true) {
$('#name').val('');
$('#email').val('');
$('#message').val('');
}
});
}
});
});
|
validateContact
|
identifier_name
|
jquery.validation.settings.js
|
jQuery(document).ready(function($){
var nameDefault = 'Your name...';
var emailDefault = 'Your email...';
var messageDefault = 'Your message...';
// Setting up existing forms
setupforms();
function setupforms() {
// Applying default values
setupDefaultText('#name',nameDefault);
setupDefaultText('#email',emailDefault);
setupDefaultText('#message',messageDefault);
// Focus / Blur check against defaults
focusField('#name');
focusField('#email');
focusField('#message');
}
function setupDefaultText(fieldID,fieldDefault)
|
function evalDefault(fieldID) {
if($(fieldID).val() != $(fieldID).attr('data-default')) {
return false;
}
else { return true; }
}
function hasDefaults(formType) {
switch (formType)
{
case "contact" :
if(evalDefault('#name') && evalDefault('#email') && evalDefault('#message')) { return true; }
else { return false; }
default :
return false;
}
}
function focusField(fieldID) {
$(fieldID).focus(function(evaluation) {
if(evalDefault(fieldID)) { $(fieldID).val(''); }
}).blur(function(evaluation) {
if(evalDefault(fieldID) || $(fieldID).val() === '') { $(fieldID).val($(fieldID).attr('data-default')); }
});
}
$('.button-submit').click(function(event) {
event.preventDefault();
});
$('#submit-contact').bind('click', function(){
if(!hasDefaults('contact')) { $('#form-contact').submit(); }
});
$("#form-contact").validate({
rules: {
name: {
required: true,
minlength: 3
},
email: {
required: true,
email: true
},
message: {
required: true,
minlength: 10
}
},
messages: {
name: {
required: "Please enter your name.",
minlength: "Name must have at least 3 characters."
},
email: {
required: "Please enter your email address.",
email: "This is not a valid email address format."
},
message: {
required: "Please enter a message.",
minlength: "Message must have at least 10 characters."
}
}
});
function validateContact() {
if(!$('#form-contact').valid()) { return false; }
else { return true; }
}
$("#form-contact").ajaxForm({
beforeSubmit: validateContact,
type: "POST",
url: "assets/php/contact-form-process.php",
data: $("#form-contact").serialize(),
success: function(msg){
$("#form-message").ajaxComplete(function(event, request, settings){
if(msg == 'OK') // Message Sent? Show the 'Thank You' message
{
result = '<span class="form-message-success"><i class="icon-thumbs-up"></i> Your message was sent. Thank you!</span>';
clear = true;
}
else
{
result = '<span class="form-message-error"><i class="icon-thumbs-down"></i> ' + msg +'</span>';
clear = false;
}
$(this).html(result);
if(clear == true) {
$('#name').val('');
$('#email').val('');
$('#message').val('');
}
});
}
});
});
|
{
$(fieldID).val(fieldDefault);
$(fieldID).attr('data-default', fieldDefault);
}
|
identifier_body
|
jquery.validation.settings.js
|
jQuery(document).ready(function($){
var nameDefault = 'Your name...';
var emailDefault = 'Your email...';
var messageDefault = 'Your message...';
// Setting up existing forms
setupforms();
function setupforms() {
// Applying default values
setupDefaultText('#name',nameDefault);
setupDefaultText('#email',emailDefault);
setupDefaultText('#message',messageDefault);
// Focus / Blur check against defaults
focusField('#name');
focusField('#email');
focusField('#message');
}
function setupDefaultText(fieldID,fieldDefault) {
$(fieldID).val(fieldDefault);
$(fieldID).attr('data-default', fieldDefault);
}
function evalDefault(fieldID) {
if($(fieldID).val() != $(fieldID).attr('data-default')) {
return false;
}
else { return true; }
}
function hasDefaults(formType) {
switch (formType)
{
case "contact" :
if(evalDefault('#name') && evalDefault('#email') && evalDefault('#message')) { return true; }
else { return false; }
default :
return false;
}
}
function focusField(fieldID) {
$(fieldID).focus(function(evaluation) {
if(evalDefault(fieldID)) { $(fieldID).val(''); }
}).blur(function(evaluation) {
if(evalDefault(fieldID) || $(fieldID).val() === '') { $(fieldID).val($(fieldID).attr('data-default')); }
});
}
$('.button-submit').click(function(event) {
event.preventDefault();
});
$('#submit-contact').bind('click', function(){
if(!hasDefaults('contact')) { $('#form-contact').submit(); }
});
$("#form-contact").validate({
rules: {
name: {
required: true,
minlength: 3
},
email: {
required: true,
email: true
},
message: {
required: true,
minlength: 10
}
},
messages: {
name: {
required: "Please enter your name.",
minlength: "Name must have at least 3 characters."
},
email: {
required: "Please enter your email address.",
email: "This is not a valid email address format."
},
message: {
required: "Please enter a message.",
minlength: "Message must have at least 10 characters."
}
}
});
function validateContact() {
if(!$('#form-contact').valid()) { return false; }
else { return true; }
}
$("#form-contact").ajaxForm({
beforeSubmit: validateContact,
type: "POST",
url: "assets/php/contact-form-process.php",
data: $("#form-contact").serialize(),
success: function(msg){
$("#form-message").ajaxComplete(function(event, request, settings){
if(msg == 'OK') // Message Sent? Show the 'Thank You' message
|
else
{
result = '<span class="form-message-error"><i class="icon-thumbs-down"></i> ' + msg +'</span>';
clear = false;
}
$(this).html(result);
if(clear == true) {
$('#name').val('');
$('#email').val('');
$('#message').val('');
}
});
}
});
});
|
{
result = '<span class="form-message-success"><i class="icon-thumbs-up"></i> Your message was sent. Thank you!</span>';
clear = true;
}
|
conditional_block
|
jquery.validation.settings.js
|
jQuery(document).ready(function($){
var nameDefault = 'Your name...';
var emailDefault = 'Your email...';
var messageDefault = 'Your message...';
// Setting up existing forms
setupforms();
function setupforms() {
// Applying default values
setupDefaultText('#name',nameDefault);
setupDefaultText('#email',emailDefault);
setupDefaultText('#message',messageDefault);
// Focus / Blur check against defaults
focusField('#name');
focusField('#email');
focusField('#message');
}
function setupDefaultText(fieldID,fieldDefault) {
$(fieldID).val(fieldDefault);
$(fieldID).attr('data-default', fieldDefault);
}
function evalDefault(fieldID) {
if($(fieldID).val() != $(fieldID).attr('data-default')) {
return false;
}
else { return true; }
}
function hasDefaults(formType) {
switch (formType)
{
case "contact" :
if(evalDefault('#name') && evalDefault('#email') && evalDefault('#message')) { return true; }
else { return false; }
default :
return false;
}
}
function focusField(fieldID) {
$(fieldID).focus(function(evaluation) {
if(evalDefault(fieldID)) { $(fieldID).val(''); }
}).blur(function(evaluation) {
if(evalDefault(fieldID) || $(fieldID).val() === '') { $(fieldID).val($(fieldID).attr('data-default')); }
});
}
$('.button-submit').click(function(event) {
event.preventDefault();
});
$('#submit-contact').bind('click', function(){
if(!hasDefaults('contact')) { $('#form-contact').submit(); }
});
$("#form-contact").validate({
rules: {
name: {
required: true,
minlength: 3
},
email: {
required: true,
email: true
},
message: {
required: true,
minlength: 10
}
},
messages: {
name: {
required: "Please enter your name.",
minlength: "Name must have at least 3 characters."
},
email: {
required: "Please enter your email address.",
email: "This is not a valid email address format."
},
message: {
required: "Please enter a message.",
minlength: "Message must have at least 10 characters."
|
}
});
function validateContact() {
if(!$('#form-contact').valid()) { return false; }
else { return true; }
}
$("#form-contact").ajaxForm({
beforeSubmit: validateContact,
type: "POST",
url: "assets/php/contact-form-process.php",
data: $("#form-contact").serialize(),
success: function(msg){
$("#form-message").ajaxComplete(function(event, request, settings){
if(msg == 'OK') // Message Sent? Show the 'Thank You' message
{
result = '<span class="form-message-success"><i class="icon-thumbs-up"></i> Your message was sent. Thank you!</span>';
clear = true;
}
else
{
result = '<span class="form-message-error"><i class="icon-thumbs-down"></i> ' + msg +'</span>';
clear = false;
}
$(this).html(result);
if(clear == true) {
$('#name').val('');
$('#email').val('');
$('#message').val('');
}
});
}
});
});
|
}
|
random_line_split
|
commonconfig.py
|
Copyright 2008, Red Hat, Inc
see AUTHORS
This software may be freely redistributed under the terms of the GNU
general public license.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
"""
from config import BaseConfig, BoolOption, IntOption, Option
class CMConfig(BaseConfig):
log_level = Option('INFO')
listen_addr = Option('')
listen_port = IntOption(51235)
cadir = Option('/etc/pki/certmaster/ca')
cert_dir = Option('/etc/pki/certmaster')
certroot = Option('/var/lib/certmaster/certmaster/certs')
csrroot = Option('/var/lib/certmaster/certmaster/csrs')
cert_extension = Option('cert')
autosign = BoolOption(False)
sync_certs = BoolOption(False)
peering = BoolOption(True)
peerroot = Option('/var/lib/certmaster/peers')
hash_function = Option('sha256')
class MinionConfig(BaseConfig):
log_level = Option('INFO')
certmaster = Option('certmaster')
certmaster_port = IntOption(51235)
cert_dir = Option('/etc/pki/certmaster')
|
"""
Default configuration values for certmaster items when
not specified in config file.
|
random_line_split
|
|
commonconfig.py
|
"""
Default configuration values for certmaster items when
not specified in config file.
Copyright 2008, Red Hat, Inc
see AUTHORS
This software may be freely redistributed under the terms of the GNU
general public license.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
"""
from config import BaseConfig, BoolOption, IntOption, Option
class CMConfig(BaseConfig):
|
class MinionConfig(BaseConfig):
log_level = Option('INFO')
certmaster = Option('certmaster')
certmaster_port = IntOption(51235)
cert_dir = Option('/etc/pki/certmaster')
|
log_level = Option('INFO')
listen_addr = Option('')
listen_port = IntOption(51235)
cadir = Option('/etc/pki/certmaster/ca')
cert_dir = Option('/etc/pki/certmaster')
certroot = Option('/var/lib/certmaster/certmaster/certs')
csrroot = Option('/var/lib/certmaster/certmaster/csrs')
cert_extension = Option('cert')
autosign = BoolOption(False)
sync_certs = BoolOption(False)
peering = BoolOption(True)
peerroot = Option('/var/lib/certmaster/peers')
hash_function = Option('sha256')
|
identifier_body
|
commonconfig.py
|
"""
Default configuration values for certmaster items when
not specified in config file.
Copyright 2008, Red Hat, Inc
see AUTHORS
This software may be freely redistributed under the terms of the GNU
general public license.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
"""
from config import BaseConfig, BoolOption, IntOption, Option
class
|
(BaseConfig):
log_level = Option('INFO')
listen_addr = Option('')
listen_port = IntOption(51235)
cadir = Option('/etc/pki/certmaster/ca')
cert_dir = Option('/etc/pki/certmaster')
certroot = Option('/var/lib/certmaster/certmaster/certs')
csrroot = Option('/var/lib/certmaster/certmaster/csrs')
cert_extension = Option('cert')
autosign = BoolOption(False)
sync_certs = BoolOption(False)
peering = BoolOption(True)
peerroot = Option('/var/lib/certmaster/peers')
hash_function = Option('sha256')
class MinionConfig(BaseConfig):
log_level = Option('INFO')
certmaster = Option('certmaster')
certmaster_port = IntOption(51235)
cert_dir = Option('/etc/pki/certmaster')
|
CMConfig
|
identifier_name
|
articles.js
|
$(function() {
$(".article-sidebar .sticky").css('width', $(".article-sidebar .sticky").width()-2);
$(".article-sidebar .sticky").sticky({topSpacing:45});
$(".article-sidebar ul.categories a").on('click', function(e) {
if (!$(this).siblings('ul').length)
return;
e.preventDefault();
$(this).parent().toggleClass('active');
});
function checkSuggestion() {
var viewport_bottom = $window.scrollTop() + $window.height();
var height = $elem.height();
var bottom = $elem.offset().top + $elem.height();
if (bottom <= viewport_bottom) {
$target.fadeIn();
} else {
$target.fadeOut();
}
}
if ($('.article-suggest').length) {
var $window = $(window);
|
var $elem = $('article.body');
var $target = $('.article-suggest');
$window.scroll(checkSuggestion);
checkSuggestion();
}
// CONTRIBUTORS
$('.contributors > li .header').on('click', function(e) {
$(this).siblings('ul').slideToggle();
});
});
|
random_line_split
|
|
articles.js
|
$(function() {
$(".article-sidebar .sticky").css('width', $(".article-sidebar .sticky").width()-2);
$(".article-sidebar .sticky").sticky({topSpacing:45});
$(".article-sidebar ul.categories a").on('click', function(e) {
if (!$(this).siblings('ul').length)
return;
e.preventDefault();
$(this).parent().toggleClass('active');
});
function
|
() {
var viewport_bottom = $window.scrollTop() + $window.height();
var height = $elem.height();
var bottom = $elem.offset().top + $elem.height();
if (bottom <= viewport_bottom) {
$target.fadeIn();
} else {
$target.fadeOut();
}
}
if ($('.article-suggest').length) {
var $window = $(window);
var $elem = $('article.body');
var $target = $('.article-suggest');
$window.scroll(checkSuggestion);
checkSuggestion();
}
// CONTRIBUTORS
$('.contributors > li .header').on('click', function(e) {
$(this).siblings('ul').slideToggle();
});
});
|
checkSuggestion
|
identifier_name
|
articles.js
|
$(function() {
$(".article-sidebar .sticky").css('width', $(".article-sidebar .sticky").width()-2);
$(".article-sidebar .sticky").sticky({topSpacing:45});
$(".article-sidebar ul.categories a").on('click', function(e) {
if (!$(this).siblings('ul').length)
return;
e.preventDefault();
$(this).parent().toggleClass('active');
});
function checkSuggestion() {
var viewport_bottom = $window.scrollTop() + $window.height();
var height = $elem.height();
var bottom = $elem.offset().top + $elem.height();
if (bottom <= viewport_bottom)
|
else {
$target.fadeOut();
}
}
if ($('.article-suggest').length) {
var $window = $(window);
var $elem = $('article.body');
var $target = $('.article-suggest');
$window.scroll(checkSuggestion);
checkSuggestion();
}
// CONTRIBUTORS
$('.contributors > li .header').on('click', function(e) {
$(this).siblings('ul').slideToggle();
});
});
|
{
$target.fadeIn();
}
|
conditional_block
|
articles.js
|
$(function() {
$(".article-sidebar .sticky").css('width', $(".article-sidebar .sticky").width()-2);
$(".article-sidebar .sticky").sticky({topSpacing:45});
$(".article-sidebar ul.categories a").on('click', function(e) {
if (!$(this).siblings('ul').length)
return;
e.preventDefault();
$(this).parent().toggleClass('active');
});
function checkSuggestion()
|
if ($('.article-suggest').length) {
var $window = $(window);
var $elem = $('article.body');
var $target = $('.article-suggest');
$window.scroll(checkSuggestion);
checkSuggestion();
}
// CONTRIBUTORS
$('.contributors > li .header').on('click', function(e) {
$(this).siblings('ul').slideToggle();
});
});
|
{
var viewport_bottom = $window.scrollTop() + $window.height();
var height = $elem.height();
var bottom = $elem.offset().top + $elem.height();
if (bottom <= viewport_bottom) {
$target.fadeIn();
} else {
$target.fadeOut();
}
}
|
identifier_body
|
solve0043.js
|
var library = require('./library.js');
var check_cond = function(num, div, start)
{
var n = '';
for(var i = start; i < start + 3; i++)
{
n = n + num.toString().charAt(i - 1);
}
if(parseInt(n) % div === 0)
{
return true;
}
return false;
|
var all = [2, 3, 5, 7, 11, 13, 17];
for(var i = 0; i < all.length; i += 1)
{
if(!check_cond(num, all[i], i + 2))
{
return false;
}
}
return true;
}
var solve = function ()
{
var sum = 0;
var start = 1234567890;
var end = 9876543210;
for(var i = start, count = 0; i <= end; i += 1, count += 1)
{
if(count % 1000000 == 0)
{
console.log("\$i : " + i);
}
if(!library.is_pandigital(i, 0))
{
continue;
}
if(!check_all(i))
{
continue;
}
console.log("OK : " + i);
sum += i;
}
};
var check_all_2 = function(num)
{
var y = num.toString();
var n = [0];
for(var i = 0; i < y.length; i += 1)
{
n.push(parseInt(y[i]));
}
if(n[4] % 2 != 0)
{
return false;
}
var a = n[3] + n[4] + n[5];
if(a % 3 != 0)
{
return false;
}
if(n[6] % 5 != 0)
{
return false;
}
var b = n[5] * 10 + n[6] - 2 * n[7];
if(b % 7 != 0)
{
return false;
}
var c = n[6] * 10 + n[7] - n[8];
if(c % 11 != 0)
{
return false;
}
var d = n[7] * 10 + n[8] + 4 * n[9];
if(d % 13 != 0)
{
return false;
}
var e = n[8] * 10 + n[9] - 5 * n[10];
if(e % 17 != 0)
{
return false;
}
return true;
}
var solve_2 = function ()
{
var sum = 0;
var start = 1234567890;
var end = 9876543210;
for(var i = start, count = 0; i <= end; i += 1, count += 1)
{
if(count % 1000000 == 0)
{
console.log("\$i : " + i);
}
if(!check_all_2(i))
{
continue;
}
if(!library.is_pandigital_v2(i, 0))
{
continue;
}
console.log("OK : " + i);
sum += i;
}
};
var sum = solve_2();
console.log(sum);
//var num = process.argv[2];
//console.log(check_all_2(num));
|
}
var check_all = function(num)
{
|
random_line_split
|
solve0043.js
|
var library = require('./library.js');
var check_cond = function(num, div, start)
{
var n = '';
for(var i = start; i < start + 3; i++)
{
n = n + num.toString().charAt(i - 1);
}
if(parseInt(n) % div === 0)
{
return true;
}
return false;
}
var check_all = function(num)
{
var all = [2, 3, 5, 7, 11, 13, 17];
for(var i = 0; i < all.length; i += 1)
{
if(!check_cond(num, all[i], i + 2))
{
return false;
}
}
return true;
}
var solve = function ()
{
var sum = 0;
var start = 1234567890;
var end = 9876543210;
for(var i = start, count = 0; i <= end; i += 1, count += 1)
{
if(count % 1000000 == 0)
{
console.log("\$i : " + i);
}
if(!library.is_pandigital(i, 0))
{
continue;
}
if(!check_all(i))
{
continue;
}
console.log("OK : " + i);
sum += i;
}
};
var check_all_2 = function(num)
{
var y = num.toString();
var n = [0];
for(var i = 0; i < y.length; i += 1)
{
n.push(parseInt(y[i]));
}
if(n[4] % 2 != 0)
{
return false;
}
var a = n[3] + n[4] + n[5];
if(a % 3 != 0)
|
if(n[6] % 5 != 0)
{
return false;
}
var b = n[5] * 10 + n[6] - 2 * n[7];
if(b % 7 != 0)
{
return false;
}
var c = n[6] * 10 + n[7] - n[8];
if(c % 11 != 0)
{
return false;
}
var d = n[7] * 10 + n[8] + 4 * n[9];
if(d % 13 != 0)
{
return false;
}
var e = n[8] * 10 + n[9] - 5 * n[10];
if(e % 17 != 0)
{
return false;
}
return true;
}
var solve_2 = function ()
{
var sum = 0;
var start = 1234567890;
var end = 9876543210;
for(var i = start, count = 0; i <= end; i += 1, count += 1)
{
if(count % 1000000 == 0)
{
console.log("\$i : " + i);
}
if(!check_all_2(i))
{
continue;
}
if(!library.is_pandigital_v2(i, 0))
{
continue;
}
console.log("OK : " + i);
sum += i;
}
};
var sum = solve_2();
console.log(sum);
//var num = process.argv[2];
//console.log(check_all_2(num));
|
{
return false;
}
|
conditional_block
|
model.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
import numpy as np
from tr55.tablelookup import lookup_cn, lookup_bmp_storage, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, \
get_pollutants, get_bmps, lookup_pitt_runoff, lookup_bmp_drainage_ratio
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, evaptrans, soil_type, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
This uses numpy to make a linear interpolation between tabular values to
calculate the exact runoff for a given value
`precip` is the amount of precipitation in inches.
"""
runoff_ratios = lookup_pitt_runoff(soil_type, land_use)
runoff_ratio = np.interp(precip, runoff_ratios['precip'], runoff_ratios['Rv'])
runoff = precip*runoff_ratio
return min(runoff, precip - evaptrans)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
`precip` is the amount of precipitation in inches.
"""
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration in inches per day - this is the
ET for the cell after taking the crop/landscape factor into account
this is NOT the ETmax.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as a volume (inches * #cells).
"""
def clamp(runoff, et, inf, precip):
"""
This function ensures that runoff + et + inf <= precip.
NOTE: Infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration; however, there is evapotranspiration. (It is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation.)
if precip == 0.0:
return {
'runoff-vol': 0.0,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# If the BMP is cluster_housing or no_till, then make it the
# land-use. This is done because those two types of BMPs behave
# more like land-uses than they do BMPs.
if bmp and not is_bmp(bmp):
land_use = bmp or land_use
# When the land-use is a built-type use the Pitt Small Storm Hydrology
# Model until the runoff predicted by the NRCS model is greater than that
# predicted by the NRCS model.
if is_built_type(land_use):
pitt_runoff = runoff_pitt(precip, evaptrans, soil_type, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
# (runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use as input to `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use as input to `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
pct=1.0, current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`pct` is the percentage of calculated water volume to retain.
`cell_res` is the size of each cell/pixel in meters squared
(used for turning inches of water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
pct, cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
runoff_adjustment = result['runoff-vol'] - (result['runoff-vol'] * pct)
result['runoff-vol'] -= runoff_adjustment
result['inf-vol'] += runoff_adjustment
tree.update(result)
|
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def compute_bmp_effect(census, m2_per_pixel, precip):
"""
Compute the overall amount of water retained by infiltration/retention
type BMP's.
Result is a percent of runoff remaining after water is trapped in
infiltration/retention BMP's
"""
meters_per_inch = 0.0254
cubic_meters = census['runoff-vol'] * meters_per_inch * m2_per_pixel
# 'runoff-vol' in census is in inches*#cells
bmp_dict = census.get('BMPs', {})
bmp_keys = set(bmp_dict.keys())
reduction = 0.0
for bmp in set.intersection(set(get_bmps()), bmp_keys):
bmp_area = bmp_dict[bmp]
storage_space = (lookup_bmp_storage(bmp) * bmp_area)
max_reduction = lookup_bmp_drainage_ratio(bmp) * bmp_area * precip * meters_per_inch
bmp_reduction = min(max_reduction, storage_space)
reduction += bmp_reduction
return 0 if not cubic_meters else \
max(0.0, cubic_meters - reduction) / cubic_meters
def simulate_modifications(census, fn, cell_res, precip, pc=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=pc)
pct = compute_bmp_effect(mod, cell_res, precip)
simulate_water_quality(mod, cell_res, fn, pct=pct, precolumbian=pc)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=pc)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
# From the EPA WaterSense data finder for the Philadelphia airport (19153)
# Converted to daily number in inches per day.
# http://www3.epa.gov/watersense/new_homes/wb_data_finder.html
# TODO: include Potential Max ET as a data layer from CGIAR
# http://csi.cgiar.org/aridity/Global_Aridity_PET_Methodolgy.asp
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precip, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")
|
# perform water quality calculation
if n != 0:
|
random_line_split
|
model.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
import numpy as np
from tr55.tablelookup import lookup_cn, lookup_bmp_storage, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, \
get_pollutants, get_bmps, lookup_pitt_runoff, lookup_bmp_drainage_ratio
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, evaptrans, soil_type, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
This uses numpy to make a linear interpolation between tabular values to
calculate the exact runoff for a given value
`precip` is the amount of precipitation in inches.
"""
runoff_ratios = lookup_pitt_runoff(soil_type, land_use)
runoff_ratio = np.interp(precip, runoff_ratios['precip'], runoff_ratios['Rv'])
runoff = precip*runoff_ratio
return min(runoff, precip - evaptrans)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
`precip` is the amount of precipitation in inches.
"""
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
|
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration in inches per day - this is the
ET for the cell after taking the crop/landscape factor into account
this is NOT the ETmax.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as a volume (inches * #cells).
"""
def clamp(runoff, et, inf, precip):
"""
This function ensures that runoff + et + inf <= precip.
NOTE: Infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration; however, there is evapotranspiration. (It is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation.)
if precip == 0.0:
return {
'runoff-vol': 0.0,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# If the BMP is cluster_housing or no_till, then make it the
# land-use. This is done because those two types of BMPs behave
# more like land-uses than they do BMPs.
if bmp and not is_bmp(bmp):
land_use = bmp or land_use
# When the land-use is a built-type use the Pitt Small Storm Hydrology
# Model until the runoff predicted by the NRCS model is greater than that
# predicted by the NRCS model.
if is_built_type(land_use):
pitt_runoff = runoff_pitt(precip, evaptrans, soil_type, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
# (runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use as input to `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use as input to `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
pct=1.0, current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`pct` is the percentage of calculated water volume to retain.
`cell_res` is the size of each cell/pixel in meters squared
(used for turning inches of water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
pct, cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
runoff_adjustment = result['runoff-vol'] - (result['runoff-vol'] * pct)
result['runoff-vol'] -= runoff_adjustment
result['inf-vol'] += runoff_adjustment
tree.update(result)
# perform water quality calculation
if n != 0:
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def compute_bmp_effect(census, m2_per_pixel, precip):
"""
Compute the overall amount of water retained by infiltration/retention
type BMP's.
Result is a percent of runoff remaining after water is trapped in
infiltration/retention BMP's
"""
meters_per_inch = 0.0254
cubic_meters = census['runoff-vol'] * meters_per_inch * m2_per_pixel
# 'runoff-vol' in census is in inches*#cells
bmp_dict = census.get('BMPs', {})
bmp_keys = set(bmp_dict.keys())
reduction = 0.0
for bmp in set.intersection(set(get_bmps()), bmp_keys):
bmp_area = bmp_dict[bmp]
storage_space = (lookup_bmp_storage(bmp) * bmp_area)
max_reduction = lookup_bmp_drainage_ratio(bmp) * bmp_area * precip * meters_per_inch
bmp_reduction = min(max_reduction, storage_space)
reduction += bmp_reduction
return 0 if not cubic_meters else \
max(0.0, cubic_meters - reduction) / cubic_meters
def simulate_modifications(census, fn, cell_res, precip, pc=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=pc)
pct = compute_bmp_effect(mod, cell_res, precip)
simulate_water_quality(mod, cell_res, fn, pct=pct, precolumbian=pc)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=pc)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
# From the EPA WaterSense data finder for the Philadelphia airport (19153)
# Converted to daily number in inches per day.
# http://www3.epa.gov/watersense/new_homes/wb_data_finder.html
# TODO: include Potential Max ET as a data layer from CGIAR
# http://csi.cgiar.org/aridity/Global_Aridity_PET_Methodolgy.asp
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precip, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")
|
return 0.0
|
conditional_block
|
model.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
import numpy as np
from tr55.tablelookup import lookup_cn, lookup_bmp_storage, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, \
get_pollutants, get_bmps, lookup_pitt_runoff, lookup_bmp_drainage_ratio
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, evaptrans, soil_type, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
This uses numpy to make a linear interpolation between tabular values to
calculate the exact runoff for a given value
`precip` is the amount of precipitation in inches.
"""
runoff_ratios = lookup_pitt_runoff(soil_type, land_use)
runoff_ratio = np.interp(precip, runoff_ratios['precip'], runoff_ratios['Rv'])
runoff = precip*runoff_ratio
return min(runoff, precip - evaptrans)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
`precip` is the amount of precipitation in inches.
"""
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration in inches per day - this is the
ET for the cell after taking the crop/landscape factor into account
this is NOT the ETmax.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as a volume (inches * #cells).
"""
def clamp(runoff, et, inf, precip):
|
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration; however, there is evapotranspiration. (It is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation.)
if precip == 0.0:
return {
'runoff-vol': 0.0,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# If the BMP is cluster_housing or no_till, then make it the
# land-use. This is done because those two types of BMPs behave
# more like land-uses than they do BMPs.
if bmp and not is_bmp(bmp):
land_use = bmp or land_use
# When the land-use is a built-type use the Pitt Small Storm Hydrology
# Model until the runoff predicted by the NRCS model is greater than that
# predicted by the NRCS model.
if is_built_type(land_use):
pitt_runoff = runoff_pitt(precip, evaptrans, soil_type, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
# (runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use as input to `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use as input to `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
pct=1.0, current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`pct` is the percentage of calculated water volume to retain.
`cell_res` is the size of each cell/pixel in meters squared
(used for turning inches of water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
pct, cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
runoff_adjustment = result['runoff-vol'] - (result['runoff-vol'] * pct)
result['runoff-vol'] -= runoff_adjustment
result['inf-vol'] += runoff_adjustment
tree.update(result)
# perform water quality calculation
if n != 0:
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def compute_bmp_effect(census, m2_per_pixel, precip):
"""
Compute the overall amount of water retained by infiltration/retention
type BMP's.
Result is a percent of runoff remaining after water is trapped in
infiltration/retention BMP's
"""
meters_per_inch = 0.0254
cubic_meters = census['runoff-vol'] * meters_per_inch * m2_per_pixel
# 'runoff-vol' in census is in inches*#cells
bmp_dict = census.get('BMPs', {})
bmp_keys = set(bmp_dict.keys())
reduction = 0.0
for bmp in set.intersection(set(get_bmps()), bmp_keys):
bmp_area = bmp_dict[bmp]
storage_space = (lookup_bmp_storage(bmp) * bmp_area)
max_reduction = lookup_bmp_drainage_ratio(bmp) * bmp_area * precip * meters_per_inch
bmp_reduction = min(max_reduction, storage_space)
reduction += bmp_reduction
return 0 if not cubic_meters else \
max(0.0, cubic_meters - reduction) / cubic_meters
def simulate_modifications(census, fn, cell_res, precip, pc=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=pc)
pct = compute_bmp_effect(mod, cell_res, precip)
simulate_water_quality(mod, cell_res, fn, pct=pct, precolumbian=pc)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=pc)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
# From the EPA WaterSense data finder for the Philadelphia airport (19153)
# Converted to daily number in inches per day.
# http://www3.epa.gov/watersense/new_homes/wb_data_finder.html
# TODO: include Potential Max ET as a data layer from CGIAR
# http://csi.cgiar.org/aridity/Global_Aridity_PET_Methodolgy.asp
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precip, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")
|
"""
This function ensures that runoff + et + inf <= precip.
NOTE: Infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
|
identifier_body
|
model.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
import numpy as np
from tr55.tablelookup import lookup_cn, lookup_bmp_storage, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, \
get_pollutants, get_bmps, lookup_pitt_runoff, lookup_bmp_drainage_ratio
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, evaptrans, soil_type, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
This uses numpy to make a linear interpolation between tabular values to
calculate the exact runoff for a given value
`precip` is the amount of precipitation in inches.
"""
runoff_ratios = lookup_pitt_runoff(soil_type, land_use)
runoff_ratio = np.interp(precip, runoff_ratios['precip'], runoff_ratios['Rv'])
runoff = precip*runoff_ratio
return min(runoff, precip - evaptrans)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
`precip` is the amount of precipitation in inches.
"""
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration in inches per day - this is the
ET for the cell after taking the crop/landscape factor into account
this is NOT the ETmax.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as a volume (inches * #cells).
"""
def clamp(runoff, et, inf, precip):
"""
This function ensures that runoff + et + inf <= precip.
NOTE: Infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration; however, there is evapotranspiration. (It is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation.)
if precip == 0.0:
return {
'runoff-vol': 0.0,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# If the BMP is cluster_housing or no_till, then make it the
# land-use. This is done because those two types of BMPs behave
# more like land-uses than they do BMPs.
if bmp and not is_bmp(bmp):
land_use = bmp or land_use
# When the land-use is a built-type use the Pitt Small Storm Hydrology
# Model until the runoff predicted by the NRCS model is greater than that
# predicted by the NRCS model.
if is_built_type(land_use):
pitt_runoff = runoff_pitt(precip, evaptrans, soil_type, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
# (runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use as input to `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use as input to `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
pct=1.0, current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`pct` is the percentage of calculated water volume to retain.
`cell_res` is the size of each cell/pixel in meters squared
(used for turning inches of water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
pct, cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
runoff_adjustment = result['runoff-vol'] - (result['runoff-vol'] * pct)
result['runoff-vol'] -= runoff_adjustment
result['inf-vol'] += runoff_adjustment
tree.update(result)
# perform water quality calculation
if n != 0:
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def
|
(census, m2_per_pixel, precip):
"""
Compute the overall amount of water retained by infiltration/retention
type BMP's.
Result is a percent of runoff remaining after water is trapped in
infiltration/retention BMP's
"""
meters_per_inch = 0.0254
cubic_meters = census['runoff-vol'] * meters_per_inch * m2_per_pixel
# 'runoff-vol' in census is in inches*#cells
bmp_dict = census.get('BMPs', {})
bmp_keys = set(bmp_dict.keys())
reduction = 0.0
for bmp in set.intersection(set(get_bmps()), bmp_keys):
bmp_area = bmp_dict[bmp]
storage_space = (lookup_bmp_storage(bmp) * bmp_area)
max_reduction = lookup_bmp_drainage_ratio(bmp) * bmp_area * precip * meters_per_inch
bmp_reduction = min(max_reduction, storage_space)
reduction += bmp_reduction
return 0 if not cubic_meters else \
max(0.0, cubic_meters - reduction) / cubic_meters
def simulate_modifications(census, fn, cell_res, precip, pc=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=pc)
pct = compute_bmp_effect(mod, cell_res, precip)
simulate_water_quality(mod, cell_res, fn, pct=pct, precolumbian=pc)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=pc)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
# From the EPA WaterSense data finder for the Philadelphia airport (19153)
# Converted to daily number in inches per day.
# http://www3.epa.gov/watersense/new_homes/wb_data_finder.html
# TODO: include Potential Max ET as a data layer from CGIAR
# http://csi.cgiar.org/aridity/Global_Aridity_PET_Methodolgy.asp
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precip, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")
|
compute_bmp_effect
|
identifier_name
|
main.py
|
# per-module import for actioninja
# standard imports
import sys # for tracebaks in on_error.
import json # to load the config file.
import traceback # also used to print tracebacks. I'm a lazy ass.
import asyncio # because we're using the async branch of discord.py.
from random import choice # for choosing game ids
import discord # obvious.
# https://github.com/Rapptz/discord.py/tree/async
import cacobot # imports all plugins in the cacobot folder.
# A sample configs/config.json should be supplied.
with open('configs/config.json') as data:
config = json.load(data)
# log in
client = discord.Client(max_messages=100)
def aan(string):
'''Returns "a" or "an" depending on a string's first letter.'''
if string[0].lower() in 'aeiou':
return 'an'
else:
return 'a'
# random game status
async def random_game():
''' Changes the game in the bot's status. '''
while True:
name = choice(config['games'])
game = discord.Game(name=name)
await client.change_status(game=game)
await asyncio.sleep(3600)
@client.event
async def
|
():
''' Executed when the bot successfully connects to Discord. '''
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
# pylint: disable=w1401
# pylint was freaking out about the ascii bullshit so I had to add that.
print("""
____ ____ _ ____ _
/ ___|__ _ ___ ___ | __ ) ___ | |_ | _ \ ___ __ _ __| |_ _
| | / _` |/ __/ _ \| _ \ / _ \| __| | |_) / _ \/ _` |/ _` | | | |
| |__| (_| | (_| (_) | |_) | (_) | |_ | _ < __/ (_| | (_| | |_| |
\____\____|\___\___/|____/ \___/ \__| |_| \_\___/\____|\____|\__ |
|___/
""")
await random_game()
@client.event
async def on_message(message):
'''
Executed when the bot recieves a message.
[message] is a discord.Message object, representing the sent message.
'''
cont = True
# execute Precommands
for func in cacobot.base.pres:
cont = await cacobot.base.pres[func](message, client)
if not cont:
return
if message.content.startswith(config['invoker']) and \
message.author.id != client.user.id and \
len(message.content) > 1:
command = message.content.split()[0][len(cacobot.base.config['invoker']):].lower()
# So basically if the message was ".Repeat Butt talker!!!" this
# would be "repeat"
if command in cacobot.base.functions:
if message.channel.is_private or\
message.channel.permissions_for(message.server.me).send_messages:
await client.send_typing(message.channel)
await cacobot.base.functions[command](message, client)
else:
print('\n===========\nThe bot cannot send messages to #{} in the server "{}"!\n===========\n\nThis message is only showing up because I *tried* to send a message but it didn\'t go through. This probably means the mod team has tried to disable this bot, but someone is still trying to use it!\n\nHere is the command in question:\n\n{}\n\nThis was sent by {}.\n\nIf this message shows up a lot, the bot might be disabled in that server. You should just make it leave if the mod team isn\'t going to just kick it!'.format(
message.channel.name,
message.server.name,
message.content,
message.author.name
)
) # pylint: disable=c0330
for func in cacobot.base.posts:
await cacobot.base.posts[func](message, client)
@client.event
async def on_error(*args):
'''
This event is basically a script-spanning `except` statement.
'''
# args[0] is the message that was recieved prior to the error. At least,
# it should be. We check it first in case the cause of the error wasn't a
# message.
print('An error has been caught.')
print(traceback.format_exc())
if len(args) > 1:
print(args[0], args[1])
if isinstance(args[1], discord.Message):
if args[1].author.id != client.user.id:
if args[1].channel.is_private:
print('This error was caused by a DM with {}.\n'.format(args[1].author))
else:
print(
'This error was caused by a message.\nServer: {}. Channel: #{}.\n'.format(
args[1].server.name,
args[1].channel.name
)
)
if sys.exc_info()[0].__name__ == 'Forbidden':
await client.send_message(
args[1].channel,
'You told me to do something that requires permissions I currently do not have. Ask an administrator to give me a proper role or something!')
elif sys.exc_info()[0].__name__ == 'ClientOSError' or sys.exc_info()[0].__name__ == 'ClientResponseError' or sys.exc_info()[0].__name__ == 'HTTPException':
await client.send_message(
args[1].channel,
'Sorry, I am under heavy load right now! This is probably due to a poor internet connection. Please submit your command again later.'
)
else:
await client.send_message(
args[1].channel,
'{}\n{}: You caused {} **{}** with your command.'.format(
choice(config['error_messages']),
args[1].author.name,
aan(sys.exc_info()[0].__name__),
sys.exc_info()[0].__name__)
)
client.run(config['token'])
# Here's the old manual-loop way of starting the bot.
# def main_task():
# '''
# I'm gonna be honest, I have *no clue* how asyncio works. This is all from
# the example in the docs.
# '''
# yield from client.login(config['email'], config['password'])
# yield from client.connect()
#
# loop = asyncio.get_event_loop()
# loop.run_until_complete(main_task())
# loop.close()
# If you're taking the senic tour of the code, you should check out
# cacobot/__init__.py next.
|
on_ready
|
identifier_name
|
main.py
|
# per-module import for actioninja
# standard imports
import sys # for tracebaks in on_error.
import json # to load the config file.
import traceback # also used to print tracebacks. I'm a lazy ass.
import asyncio # because we're using the async branch of discord.py.
from random import choice # for choosing game ids
import discord # obvious.
# https://github.com/Rapptz/discord.py/tree/async
import cacobot # imports all plugins in the cacobot folder.
# A sample configs/config.json should be supplied.
with open('configs/config.json') as data:
config = json.load(data)
# log in
client = discord.Client(max_messages=100)
def aan(string):
'''Returns "a" or "an" depending on a string's first letter.'''
if string[0].lower() in 'aeiou':
return 'an'
else:
return 'a'
# random game status
async def random_game():
''' Changes the game in the bot's status. '''
while True:
name = choice(config['games'])
game = discord.Game(name=name)
await client.change_status(game=game)
await asyncio.sleep(3600)
@client.event
async def on_ready():
''' Executed when the bot successfully connects to Discord. '''
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
# pylint: disable=w1401
# pylint was freaking out about the ascii bullshit so I had to add that.
print("""
____ ____ _ ____ _
/ ___|__ _ ___ ___ | __ ) ___ | |_ | _ \ ___ __ _ __| |_ _
| | / _` |/ __/ _ \| _ \ / _ \| __| | |_) / _ \/ _` |/ _` | | | |
| |__| (_| | (_| (_) | |_) | (_) | |_ | _ < __/ (_| | (_| | |_| |
\____\____|\___\___/|____/ \___/ \__| |_| \_\___/\____|\____|\__ |
|___/
""")
await random_game()
@client.event
async def on_message(message):
'''
Executed when the bot recieves a message.
[message] is a discord.Message object, representing the sent message.
'''
cont = True
# execute Precommands
for func in cacobot.base.pres:
cont = await cacobot.base.pres[func](message, client)
if not cont:
return
if message.content.startswith(config['invoker']) and \
message.author.id != client.user.id and \
len(message.content) > 1:
command = message.content.split()[0][len(cacobot.base.config['invoker']):].lower()
# So basically if the message was ".Repeat Butt talker!!!" this
# would be "repeat"
if command in cacobot.base.functions:
if message.channel.is_private or\
message.channel.permissions_for(message.server.me).send_messages:
await client.send_typing(message.channel)
await cacobot.base.functions[command](message, client)
else:
print('\n===========\nThe bot cannot send messages to #{} in the server "{}"!\n===========\n\nThis message is only showing up because I *tried* to send a message but it didn\'t go through. This probably means the mod team has tried to disable this bot, but someone is still trying to use it!\n\nHere is the command in question:\n\n{}\n\nThis was sent by {}.\n\nIf this message shows up a lot, the bot might be disabled in that server. You should just make it leave if the mod team isn\'t going to just kick it!'.format(
message.channel.name,
message.server.name,
message.content,
message.author.name
)
) # pylint: disable=c0330
for func in cacobot.base.posts:
await cacobot.base.posts[func](message, client)
@client.event
async def on_error(*args):
|
client.run(config['token'])
# Here's the old manual-loop way of starting the bot.
# def main_task():
# '''
# I'm gonna be honest, I have *no clue* how asyncio works. This is all from
# the example in the docs.
# '''
# yield from client.login(config['email'], config['password'])
# yield from client.connect()
#
# loop = asyncio.get_event_loop()
# loop.run_until_complete(main_task())
# loop.close()
# If you're taking the senic tour of the code, you should check out
# cacobot/__init__.py next.
|
'''
This event is basically a script-spanning `except` statement.
'''
# args[0] is the message that was recieved prior to the error. At least,
# it should be. We check it first in case the cause of the error wasn't a
# message.
print('An error has been caught.')
print(traceback.format_exc())
if len(args) > 1:
print(args[0], args[1])
if isinstance(args[1], discord.Message):
if args[1].author.id != client.user.id:
if args[1].channel.is_private:
print('This error was caused by a DM with {}.\n'.format(args[1].author))
else:
print(
'This error was caused by a message.\nServer: {}. Channel: #{}.\n'.format(
args[1].server.name,
args[1].channel.name
)
)
if sys.exc_info()[0].__name__ == 'Forbidden':
await client.send_message(
args[1].channel,
'You told me to do something that requires permissions I currently do not have. Ask an administrator to give me a proper role or something!')
elif sys.exc_info()[0].__name__ == 'ClientOSError' or sys.exc_info()[0].__name__ == 'ClientResponseError' or sys.exc_info()[0].__name__ == 'HTTPException':
await client.send_message(
args[1].channel,
'Sorry, I am under heavy load right now! This is probably due to a poor internet connection. Please submit your command again later.'
)
else:
await client.send_message(
args[1].channel,
'{}\n{}: You caused {} **{}** with your command.'.format(
choice(config['error_messages']),
args[1].author.name,
aan(sys.exc_info()[0].__name__),
sys.exc_info()[0].__name__)
)
|
identifier_body
|
main.py
|
# per-module import for actioninja
# standard imports
import sys # for tracebaks in on_error.
import json # to load the config file.
import traceback # also used to print tracebacks. I'm a lazy ass.
import asyncio # because we're using the async branch of discord.py.
from random import choice # for choosing game ids
import discord # obvious.
# https://github.com/Rapptz/discord.py/tree/async
import cacobot # imports all plugins in the cacobot folder.
# A sample configs/config.json should be supplied.
with open('configs/config.json') as data:
config = json.load(data)
# log in
client = discord.Client(max_messages=100)
def aan(string):
'''Returns "a" or "an" depending on a string's first letter.'''
if string[0].lower() in 'aeiou':
return 'an'
else:
return 'a'
# random game status
async def random_game():
''' Changes the game in the bot's status. '''
while True:
name = choice(config['games'])
game = discord.Game(name=name)
await client.change_status(game=game)
await asyncio.sleep(3600)
@client.event
async def on_ready():
''' Executed when the bot successfully connects to Discord. '''
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
# pylint: disable=w1401
# pylint was freaking out about the ascii bullshit so I had to add that.
print("""
____ ____ _ ____ _
/ ___|__ _ ___ ___ | __ ) ___ | |_ | _ \ ___ __ _ __| |_ _
| | / _` |/ __/ _ \| _ \ / _ \| __| | |_) / _ \/ _` |/ _` | | | |
| |__| (_| | (_| (_) | |_) | (_) | |_ | _ < __/ (_| | (_| | |_| |
\____\____|\___\___/|____/ \___/ \__| |_| \_\___/\____|\____|\__ |
|___/
""")
await random_game()
@client.event
async def on_message(message):
'''
Executed when the bot recieves a message.
[message] is a discord.Message object, representing the sent message.
'''
cont = True
# execute Precommands
for func in cacobot.base.pres:
cont = await cacobot.base.pres[func](message, client)
if not cont:
return
if message.content.startswith(config['invoker']) and \
message.author.id != client.user.id and \
len(message.content) > 1:
|
for func in cacobot.base.posts:
await cacobot.base.posts[func](message, client)
@client.event
async def on_error(*args):
'''
This event is basically a script-spanning `except` statement.
'''
# args[0] is the message that was recieved prior to the error. At least,
# it should be. We check it first in case the cause of the error wasn't a
# message.
print('An error has been caught.')
print(traceback.format_exc())
if len(args) > 1:
print(args[0], args[1])
if isinstance(args[1], discord.Message):
if args[1].author.id != client.user.id:
if args[1].channel.is_private:
print('This error was caused by a DM with {}.\n'.format(args[1].author))
else:
print(
'This error was caused by a message.\nServer: {}. Channel: #{}.\n'.format(
args[1].server.name,
args[1].channel.name
)
)
if sys.exc_info()[0].__name__ == 'Forbidden':
await client.send_message(
args[1].channel,
'You told me to do something that requires permissions I currently do not have. Ask an administrator to give me a proper role or something!')
elif sys.exc_info()[0].__name__ == 'ClientOSError' or sys.exc_info()[0].__name__ == 'ClientResponseError' or sys.exc_info()[0].__name__ == 'HTTPException':
await client.send_message(
args[1].channel,
'Sorry, I am under heavy load right now! This is probably due to a poor internet connection. Please submit your command again later.'
)
else:
await client.send_message(
args[1].channel,
'{}\n{}: You caused {} **{}** with your command.'.format(
choice(config['error_messages']),
args[1].author.name,
aan(sys.exc_info()[0].__name__),
sys.exc_info()[0].__name__)
)
client.run(config['token'])
# Here's the old manual-loop way of starting the bot.
# def main_task():
# '''
# I'm gonna be honest, I have *no clue* how asyncio works. This is all from
# the example in the docs.
# '''
# yield from client.login(config['email'], config['password'])
# yield from client.connect()
#
# loop = asyncio.get_event_loop()
# loop.run_until_complete(main_task())
# loop.close()
# If you're taking the senic tour of the code, you should check out
# cacobot/__init__.py next.
|
command = message.content.split()[0][len(cacobot.base.config['invoker']):].lower()
# So basically if the message was ".Repeat Butt talker!!!" this
# would be "repeat"
if command in cacobot.base.functions:
if message.channel.is_private or\
message.channel.permissions_for(message.server.me).send_messages:
await client.send_typing(message.channel)
await cacobot.base.functions[command](message, client)
else:
print('\n===========\nThe bot cannot send messages to #{} in the server "{}"!\n===========\n\nThis message is only showing up because I *tried* to send a message but it didn\'t go through. This probably means the mod team has tried to disable this bot, but someone is still trying to use it!\n\nHere is the command in question:\n\n{}\n\nThis was sent by {}.\n\nIf this message shows up a lot, the bot might be disabled in that server. You should just make it leave if the mod team isn\'t going to just kick it!'.format(
message.channel.name,
message.server.name,
message.content,
message.author.name
)
) # pylint: disable=c0330
|
conditional_block
|
main.py
|
# per-module import for actioninja
# standard imports
import sys # for tracebaks in on_error.
import json # to load the config file.
import traceback # also used to print tracebacks. I'm a lazy ass.
import asyncio # because we're using the async branch of discord.py.
from random import choice # for choosing game ids
import discord # obvious.
# https://github.com/Rapptz/discord.py/tree/async
import cacobot # imports all plugins in the cacobot folder.
# A sample configs/config.json should be supplied.
with open('configs/config.json') as data:
config = json.load(data)
# log in
client = discord.Client(max_messages=100)
def aan(string):
'''Returns "a" or "an" depending on a string's first letter.'''
if string[0].lower() in 'aeiou':
return 'an'
else:
return 'a'
# random game status
async def random_game():
''' Changes the game in the bot's status. '''
while True:
name = choice(config['games'])
game = discord.Game(name=name)
await client.change_status(game=game)
await asyncio.sleep(3600)
@client.event
async def on_ready():
''' Executed when the bot successfully connects to Discord. '''
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
# pylint: disable=w1401
# pylint was freaking out about the ascii bullshit so I had to add that.
print("""
____ ____ _ ____ _
/ ___|__ _ ___ ___ | __ ) ___ | |_ | _ \ ___ __ _ __| |_ _
| | / _` |/ __/ _ \| _ \ / _ \| __| | |_) / _ \/ _` |/ _` | | | |
| |__| (_| | (_| (_) | |_) | (_) | |_ | _ < __/ (_| | (_| | |_| |
\____\____|\___\___/|____/ \___/ \__| |_| \_\___/\____|\____|\__ |
|___/
""")
await random_game()
@client.event
async def on_message(message):
'''
Executed when the bot recieves a message.
[message] is a discord.Message object, representing the sent message.
'''
cont = True
# execute Precommands
for func in cacobot.base.pres:
cont = await cacobot.base.pres[func](message, client)
if not cont:
return
if message.content.startswith(config['invoker']) and \
message.author.id != client.user.id and \
len(message.content) > 1:
command = message.content.split()[0][len(cacobot.base.config['invoker']):].lower()
# So basically if the message was ".Repeat Butt talker!!!" this
# would be "repeat"
if command in cacobot.base.functions:
if message.channel.is_private or\
message.channel.permissions_for(message.server.me).send_messages:
await client.send_typing(message.channel)
await cacobot.base.functions[command](message, client)
else:
print('\n===========\nThe bot cannot send messages to #{} in the server "{}"!\n===========\n\nThis message is only showing up because I *tried* to send a message but it didn\'t go through. This probably means the mod team has tried to disable this bot, but someone is still trying to use it!\n\nHere is the command in question:\n\n{}\n\nThis was sent by {}.\n\nIf this message shows up a lot, the bot might be disabled in that server. You should just make it leave if the mod team isn\'t going to just kick it!'.format(
message.channel.name,
message.server.name,
message.content,
message.author.name
)
) # pylint: disable=c0330
for func in cacobot.base.posts:
await cacobot.base.posts[func](message, client)
@client.event
async def on_error(*args):
'''
This event is basically a script-spanning `except` statement.
'''
# args[0] is the message that was recieved prior to the error. At least,
# it should be. We check it first in case the cause of the error wasn't a
# message.
print('An error has been caught.')
print(traceback.format_exc())
if len(args) > 1:
print(args[0], args[1])
if isinstance(args[1], discord.Message):
if args[1].author.id != client.user.id:
if args[1].channel.is_private:
print('This error was caused by a DM with {}.\n'.format(args[1].author))
else:
print(
'This error was caused by a message.\nServer: {}. Channel: #{}.\n'.format(
args[1].server.name,
args[1].channel.name
)
)
if sys.exc_info()[0].__name__ == 'Forbidden':
await client.send_message(
args[1].channel,
'You told me to do something that requires permissions I currently do not have. Ask an administrator to give me a proper role or something!')
elif sys.exc_info()[0].__name__ == 'ClientOSError' or sys.exc_info()[0].__name__ == 'ClientResponseError' or sys.exc_info()[0].__name__ == 'HTTPException':
await client.send_message(
args[1].channel,
'Sorry, I am under heavy load right now! This is probably due to a poor internet connection. Please submit your command again later.'
)
|
args[1].channel,
'{}\n{}: You caused {} **{}** with your command.'.format(
choice(config['error_messages']),
args[1].author.name,
aan(sys.exc_info()[0].__name__),
sys.exc_info()[0].__name__)
)
client.run(config['token'])
# Here's the old manual-loop way of starting the bot.
# def main_task():
# '''
# I'm gonna be honest, I have *no clue* how asyncio works. This is all from
# the example in the docs.
# '''
# yield from client.login(config['email'], config['password'])
# yield from client.connect()
#
# loop = asyncio.get_event_loop()
# loop.run_until_complete(main_task())
# loop.close()
# If you're taking the senic tour of the code, you should check out
# cacobot/__init__.py next.
|
else:
await client.send_message(
|
random_line_split
|
crawler.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
crawler.py
~~~~~~~~~~~~~~
A brief description goes here.
"""
import csv
import urllib2
import urllib
import re
import os
import urlparse
import threading
import logging
import logging.handlers
import time
import random
import bs4
MINIMUM_PDF_SIZE = 4506
TASKS = None
def create_logger(filename, logger_name=None):
logger = logging.getLogger(logger_name or filename)
fmt = '[%(asctime)s] %(levelname)s %(message)s'
datefmt = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
handler = logging.handlers.RotatingFileHandler(filename, maxBytes=1024 * 1024 * 1024, backupCount=10)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
log = create_logger('crawl.log')
class ExceedMaximumRetryError(Exception):
def __init__(self, sbid, url):
self.sbid = sbid
self.url = url
def retrieve(url, sbid, output_folder):
"""Download the PDF or search for the webpage for any PDF link
Args:
url, assuming the input url is valid
"""
def _urlfetch(url, sbid, filename=None, retry=10):
"""
A wrapper for either urlopen or urlretrieve. It depends on the whether
there is a filename as input
"""
if filename and os.path.exists(filename):
log.warn("%s\tDUPLICATED\t%s" % (sbid, url))
return None
sleep_time = random.random() + 0.5
for i in range(1, retry+1):
try:
result = None
if filename:
result = urllib.urlretrieve(url, filename)
log.info("%s\tOK\t%s" % (sbid, url))
else:
# No log now, because later we would like to ensure
# the existance of PDFs
result = urllib2.urlopen(url).read()
return result
except urllib.ContentTooShortError as e:
log.warn("%s\tContentTooShortError\t%s\tRetry:%i&Sleep:%.2f" %
(sbid, url, i, sleep_time))
time.sleep(sleep_time)
except urllib2.HTTPError as e:
log.warn("%s\tHTTP%i\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, e.code, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
# Sleep longer if it is server error
# http://en.wikipedia.org/wiki/Exponential_backoff
if e.code / 100 == 5:
|
except urllib2.URLError as e:
log.warn("%s\tURLError\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
raise ExceedMaximumRetryError(sbid=sbid, url=url)
if url.endswith('.pdf'):
#: sbid is not unique, so use sbid+pdfname as new name
pdf_name = url.split('/')[-1].split('.')[0]
_urlfetch(url, sbid, os.path.join(output_folder, "%s.%s.pdf" % (sbid, pdf_name)))
else:
page = _urlfetch(url, sbid)
soup = bs4.BeautifulSoup(page)
anchors = soup.findAll('a', attrs={'href': re.compile(".pdf$", re.I)})
if not anchors:
log.warn("%s\tNO_PDF_DETECTED\t%s" % (sbid, url))
return None
for a in anchors:
href = a['href']
pdf_name = href.split('/')[-1]
sub_url = urlparse.urljoin(url, href)
_urlfetch(sub_url, sbid, os.path.join(output_folder, "%s.%s" % (sbid, pdf_name)))
def get_tasks(csv_filepath):
"""
Returns:
[{'ScienceBaseID': a1b2c3d4, 'webLinks__uri': 'http://balabala'}, {}]
"""
l = []
with open(csv_filepath, 'r') as f:
reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in reader:
if 'Action' in row and row['Action'].lower() == 'ignore for now':
continue
else:
l.append(row)
return l
def get_completed_tasks(output_folder):
"""
Return downloaded tasks
"""
completed = set()
for f in os.listdir(output_folder):
filepath = os.path.join(output_folder, f)
with open(filepath, 'r') as ff:
head_line = ff.readline()
#if os.stat(filepath).st_size > MINIMUM_PDF_SIZE:
if head_line.startswith("%PDF"):
completed.add(f.split('.')[0])
else:
os.remove(filepath)
print 'deleted: ', filepath, head_line
return completed
def crawl(csv_filepath, output_folder='pdfs', exclude_downloaded=False):
"""main function
"""
global TASKS
TASKS = get_tasks(csv_filepath)
excluded = set()
if exclude_downloaded:
excluded = get_completed_tasks(output_folder)
for i in range(128):
t = threading.Thread(target=crawler, args=(output_folder, excluded))
t.start()
main_thread = threading.current_thread()
for t in threading.enumerate():
if t is main_thread:
continue
t.join()
def crawler(output_folder, excluded=set()):
"""
Thread working function
"""
finished = 0
print "thread %i has started, exclude %i items" %\
(threading.current_thread().ident, len(excluded))
global TASKS
while True:
task = None
try:
task = TASKS.pop()
except IndexError:
print "thread %i finished %i tasks, exiting for no task available"\
% (threading.current_thread().ident, finished)
break
try:
if not task:
break
sbid = task['ScienceBaseID']
# some webLinks__uri looks like:
# http://www.springerlink.com/content/p543611u8317w447/?p=a0e7243d602f4bd3b33b2089b2ed92e4&pi=5 ; http://www.springerlink.com/content/p543611u8317w447/fulltext.pdf
# since both url will redirect to the same url finally, I did not retrieve them twice
url = task['webLinks__uri']
if sbid in excluded:
continue
retrieve(url, sbid, output_folder)
finished += 1
if finished % 20 == 0:
print "%i has finished %i" % (threading.current_thread().ident, finished)
except ExceedMaximumRetryError as e:
log.error("%s\tEXCEED_MAXIMUM_RETRY\t%s" % (e.sbid, e.url))
except Exception as e:
print e, task
log.error("%s\tUNEXPECTED\t%s\t%s" % (sbid, url, e))
def main(argv):
print crawl(argv[1], '/scratch/pdfs')
if __name__ == '__main__':
import sys
main(sys.argv)
|
sleep_time = random.randint(0, 2 ** i - 1)
|
conditional_block
|
crawler.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
crawler.py
~~~~~~~~~~~~~~
A brief description goes here.
"""
import csv
import urllib2
import urllib
import re
import os
import urlparse
import threading
import logging
import logging.handlers
import time
import random
import bs4
MINIMUM_PDF_SIZE = 4506
TASKS = None
def create_logger(filename, logger_name=None):
logger = logging.getLogger(logger_name or filename)
fmt = '[%(asctime)s] %(levelname)s %(message)s'
datefmt = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
handler = logging.handlers.RotatingFileHandler(filename, maxBytes=1024 * 1024 * 1024, backupCount=10)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
log = create_logger('crawl.log')
class ExceedMaximumRetryError(Exception):
def __init__(self, sbid, url):
self.sbid = sbid
self.url = url
def retrieve(url, sbid, output_folder):
"""Download the PDF or search for the webpage for any PDF link
Args:
url, assuming the input url is valid
"""
def _urlfetch(url, sbid, filename=None, retry=10):
"""
A wrapper for either urlopen or urlretrieve. It depends on the whether
there is a filename as input
"""
if filename and os.path.exists(filename):
log.warn("%s\tDUPLICATED\t%s" % (sbid, url))
return None
sleep_time = random.random() + 0.5
for i in range(1, retry+1):
try:
result = None
if filename:
result = urllib.urlretrieve(url, filename)
log.info("%s\tOK\t%s" % (sbid, url))
else:
# No log now, because later we would like to ensure
# the existance of PDFs
result = urllib2.urlopen(url).read()
return result
except urllib.ContentTooShortError as e:
log.warn("%s\tContentTooShortError\t%s\tRetry:%i&Sleep:%.2f" %
(sbid, url, i, sleep_time))
time.sleep(sleep_time)
except urllib2.HTTPError as e:
log.warn("%s\tHTTP%i\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, e.code, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
# Sleep longer if it is server error
# http://en.wikipedia.org/wiki/Exponential_backoff
if e.code / 100 == 5:
sleep_time = random.randint(0, 2 ** i - 1)
except urllib2.URLError as e:
log.warn("%s\tURLError\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
raise ExceedMaximumRetryError(sbid=sbid, url=url)
if url.endswith('.pdf'):
#: sbid is not unique, so use sbid+pdfname as new name
pdf_name = url.split('/')[-1].split('.')[0]
_urlfetch(url, sbid, os.path.join(output_folder, "%s.%s.pdf" % (sbid, pdf_name)))
else:
page = _urlfetch(url, sbid)
soup = bs4.BeautifulSoup(page)
anchors = soup.findAll('a', attrs={'href': re.compile(".pdf$", re.I)})
if not anchors:
log.warn("%s\tNO_PDF_DETECTED\t%s" % (sbid, url))
return None
for a in anchors:
href = a['href']
pdf_name = href.split('/')[-1]
sub_url = urlparse.urljoin(url, href)
_urlfetch(sub_url, sbid, os.path.join(output_folder, "%s.%s" % (sbid, pdf_name)))
def get_tasks(csv_filepath):
|
def get_completed_tasks(output_folder):
"""
Return downloaded tasks
"""
completed = set()
for f in os.listdir(output_folder):
filepath = os.path.join(output_folder, f)
with open(filepath, 'r') as ff:
head_line = ff.readline()
#if os.stat(filepath).st_size > MINIMUM_PDF_SIZE:
if head_line.startswith("%PDF"):
completed.add(f.split('.')[0])
else:
os.remove(filepath)
print 'deleted: ', filepath, head_line
return completed
def crawl(csv_filepath, output_folder='pdfs', exclude_downloaded=False):
"""main function
"""
global TASKS
TASKS = get_tasks(csv_filepath)
excluded = set()
if exclude_downloaded:
excluded = get_completed_tasks(output_folder)
for i in range(128):
t = threading.Thread(target=crawler, args=(output_folder, excluded))
t.start()
main_thread = threading.current_thread()
for t in threading.enumerate():
if t is main_thread:
continue
t.join()
def crawler(output_folder, excluded=set()):
"""
Thread working function
"""
finished = 0
print "thread %i has started, exclude %i items" %\
(threading.current_thread().ident, len(excluded))
global TASKS
while True:
task = None
try:
task = TASKS.pop()
except IndexError:
print "thread %i finished %i tasks, exiting for no task available"\
% (threading.current_thread().ident, finished)
break
try:
if not task:
break
sbid = task['ScienceBaseID']
# some webLinks__uri looks like:
# http://www.springerlink.com/content/p543611u8317w447/?p=a0e7243d602f4bd3b33b2089b2ed92e4&pi=5 ; http://www.springerlink.com/content/p543611u8317w447/fulltext.pdf
# since both url will redirect to the same url finally, I did not retrieve them twice
url = task['webLinks__uri']
if sbid in excluded:
continue
retrieve(url, sbid, output_folder)
finished += 1
if finished % 20 == 0:
print "%i has finished %i" % (threading.current_thread().ident, finished)
except ExceedMaximumRetryError as e:
log.error("%s\tEXCEED_MAXIMUM_RETRY\t%s" % (e.sbid, e.url))
except Exception as e:
print e, task
log.error("%s\tUNEXPECTED\t%s\t%s" % (sbid, url, e))
def main(argv):
print crawl(argv[1], '/scratch/pdfs')
if __name__ == '__main__':
import sys
main(sys.argv)
|
"""
Returns:
[{'ScienceBaseID': a1b2c3d4, 'webLinks__uri': 'http://balabala'}, {}]
"""
l = []
with open(csv_filepath, 'r') as f:
reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in reader:
if 'Action' in row and row['Action'].lower() == 'ignore for now':
continue
else:
l.append(row)
return l
|
identifier_body
|
crawler.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
crawler.py
~~~~~~~~~~~~~~
A brief description goes here.
"""
import csv
import urllib2
import urllib
import re
import os
import urlparse
import threading
import logging
import logging.handlers
import time
import random
import bs4
MINIMUM_PDF_SIZE = 4506
TASKS = None
def create_logger(filename, logger_name=None):
logger = logging.getLogger(logger_name or filename)
fmt = '[%(asctime)s] %(levelname)s %(message)s'
datefmt = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
handler = logging.handlers.RotatingFileHandler(filename, maxBytes=1024 * 1024 * 1024, backupCount=10)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
log = create_logger('crawl.log')
class ExceedMaximumRetryError(Exception):
def __init__(self, sbid, url):
self.sbid = sbid
self.url = url
def retrieve(url, sbid, output_folder):
"""Download the PDF or search for the webpage for any PDF link
Args:
url, assuming the input url is valid
"""
def _urlfetch(url, sbid, filename=None, retry=10):
"""
A wrapper for either urlopen or urlretrieve. It depends on the whether
there is a filename as input
"""
if filename and os.path.exists(filename):
log.warn("%s\tDUPLICATED\t%s" % (sbid, url))
return None
sleep_time = random.random() + 0.5
for i in range(1, retry+1):
try:
result = None
if filename:
result = urllib.urlretrieve(url, filename)
log.info("%s\tOK\t%s" % (sbid, url))
else:
# No log now, because later we would like to ensure
# the existance of PDFs
result = urllib2.urlopen(url).read()
return result
except urllib.ContentTooShortError as e:
log.warn("%s\tContentTooShortError\t%s\tRetry:%i&Sleep:%.2f" %
(sbid, url, i, sleep_time))
time.sleep(sleep_time)
except urllib2.HTTPError as e:
log.warn("%s\tHTTP%i\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, e.code, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
# Sleep longer if it is server error
# http://en.wikipedia.org/wiki/Exponential_backoff
if e.code / 100 == 5:
sleep_time = random.randint(0, 2 ** i - 1)
except urllib2.URLError as e:
log.warn("%s\tURLError\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
raise ExceedMaximumRetryError(sbid=sbid, url=url)
if url.endswith('.pdf'):
|
_urlfetch(url, sbid, os.path.join(output_folder, "%s.%s.pdf" % (sbid, pdf_name)))
else:
page = _urlfetch(url, sbid)
soup = bs4.BeautifulSoup(page)
anchors = soup.findAll('a', attrs={'href': re.compile(".pdf$", re.I)})
if not anchors:
log.warn("%s\tNO_PDF_DETECTED\t%s" % (sbid, url))
return None
for a in anchors:
href = a['href']
pdf_name = href.split('/')[-1]
sub_url = urlparse.urljoin(url, href)
_urlfetch(sub_url, sbid, os.path.join(output_folder, "%s.%s" % (sbid, pdf_name)))
def get_tasks(csv_filepath):
"""
Returns:
[{'ScienceBaseID': a1b2c3d4, 'webLinks__uri': 'http://balabala'}, {}]
"""
l = []
with open(csv_filepath, 'r') as f:
reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in reader:
if 'Action' in row and row['Action'].lower() == 'ignore for now':
continue
else:
l.append(row)
return l
def get_completed_tasks(output_folder):
"""
Return downloaded tasks
"""
completed = set()
for f in os.listdir(output_folder):
filepath = os.path.join(output_folder, f)
with open(filepath, 'r') as ff:
head_line = ff.readline()
#if os.stat(filepath).st_size > MINIMUM_PDF_SIZE:
if head_line.startswith("%PDF"):
completed.add(f.split('.')[0])
else:
os.remove(filepath)
print 'deleted: ', filepath, head_line
return completed
def crawl(csv_filepath, output_folder='pdfs', exclude_downloaded=False):
"""main function
"""
global TASKS
TASKS = get_tasks(csv_filepath)
excluded = set()
if exclude_downloaded:
excluded = get_completed_tasks(output_folder)
for i in range(128):
t = threading.Thread(target=crawler, args=(output_folder, excluded))
t.start()
main_thread = threading.current_thread()
for t in threading.enumerate():
if t is main_thread:
continue
t.join()
def crawler(output_folder, excluded=set()):
"""
Thread working function
"""
finished = 0
print "thread %i has started, exclude %i items" %\
(threading.current_thread().ident, len(excluded))
global TASKS
while True:
task = None
try:
task = TASKS.pop()
except IndexError:
print "thread %i finished %i tasks, exiting for no task available"\
% (threading.current_thread().ident, finished)
break
try:
if not task:
break
sbid = task['ScienceBaseID']
# some webLinks__uri looks like:
# http://www.springerlink.com/content/p543611u8317w447/?p=a0e7243d602f4bd3b33b2089b2ed92e4&pi=5 ; http://www.springerlink.com/content/p543611u8317w447/fulltext.pdf
# since both url will redirect to the same url finally, I did not retrieve them twice
url = task['webLinks__uri']
if sbid in excluded:
continue
retrieve(url, sbid, output_folder)
finished += 1
if finished % 20 == 0:
print "%i has finished %i" % (threading.current_thread().ident, finished)
except ExceedMaximumRetryError as e:
log.error("%s\tEXCEED_MAXIMUM_RETRY\t%s" % (e.sbid, e.url))
except Exception as e:
print e, task
log.error("%s\tUNEXPECTED\t%s\t%s" % (sbid, url, e))
def main(argv):
print crawl(argv[1], '/scratch/pdfs')
if __name__ == '__main__':
import sys
main(sys.argv)
|
#: sbid is not unique, so use sbid+pdfname as new name
pdf_name = url.split('/')[-1].split('.')[0]
|
random_line_split
|
crawler.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
crawler.py
~~~~~~~~~~~~~~
A brief description goes here.
"""
import csv
import urllib2
import urllib
import re
import os
import urlparse
import threading
import logging
import logging.handlers
import time
import random
import bs4
MINIMUM_PDF_SIZE = 4506
TASKS = None
def create_logger(filename, logger_name=None):
logger = logging.getLogger(logger_name or filename)
fmt = '[%(asctime)s] %(levelname)s %(message)s'
datefmt = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
handler = logging.handlers.RotatingFileHandler(filename, maxBytes=1024 * 1024 * 1024, backupCount=10)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
log = create_logger('crawl.log')
class ExceedMaximumRetryError(Exception):
def __init__(self, sbid, url):
self.sbid = sbid
self.url = url
def retrieve(url, sbid, output_folder):
"""Download the PDF or search for the webpage for any PDF link
Args:
url, assuming the input url is valid
"""
def _urlfetch(url, sbid, filename=None, retry=10):
"""
A wrapper for either urlopen or urlretrieve. It depends on the whether
there is a filename as input
"""
if filename and os.path.exists(filename):
log.warn("%s\tDUPLICATED\t%s" % (sbid, url))
return None
sleep_time = random.random() + 0.5
for i in range(1, retry+1):
try:
result = None
if filename:
result = urllib.urlretrieve(url, filename)
log.info("%s\tOK\t%s" % (sbid, url))
else:
# No log now, because later we would like to ensure
# the existance of PDFs
result = urllib2.urlopen(url).read()
return result
except urllib.ContentTooShortError as e:
log.warn("%s\tContentTooShortError\t%s\tRetry:%i&Sleep:%.2f" %
(sbid, url, i, sleep_time))
time.sleep(sleep_time)
except urllib2.HTTPError as e:
log.warn("%s\tHTTP%i\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, e.code, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
# Sleep longer if it is server error
# http://en.wikipedia.org/wiki/Exponential_backoff
if e.code / 100 == 5:
sleep_time = random.randint(0, 2 ** i - 1)
except urllib2.URLError as e:
log.warn("%s\tURLError\t%s\tRetry:%i&Sleep:%.2f\t%s" %
(sbid, url, i, sleep_time, e.reason))
time.sleep(sleep_time)
raise ExceedMaximumRetryError(sbid=sbid, url=url)
if url.endswith('.pdf'):
#: sbid is not unique, so use sbid+pdfname as new name
pdf_name = url.split('/')[-1].split('.')[0]
_urlfetch(url, sbid, os.path.join(output_folder, "%s.%s.pdf" % (sbid, pdf_name)))
else:
page = _urlfetch(url, sbid)
soup = bs4.BeautifulSoup(page)
anchors = soup.findAll('a', attrs={'href': re.compile(".pdf$", re.I)})
if not anchors:
log.warn("%s\tNO_PDF_DETECTED\t%s" % (sbid, url))
return None
for a in anchors:
href = a['href']
pdf_name = href.split('/')[-1]
sub_url = urlparse.urljoin(url, href)
_urlfetch(sub_url, sbid, os.path.join(output_folder, "%s.%s" % (sbid, pdf_name)))
def get_tasks(csv_filepath):
"""
Returns:
[{'ScienceBaseID': a1b2c3d4, 'webLinks__uri': 'http://balabala'}, {}]
"""
l = []
with open(csv_filepath, 'r') as f:
reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in reader:
if 'Action' in row and row['Action'].lower() == 'ignore for now':
continue
else:
l.append(row)
return l
def get_completed_tasks(output_folder):
"""
Return downloaded tasks
"""
completed = set()
for f in os.listdir(output_folder):
filepath = os.path.join(output_folder, f)
with open(filepath, 'r') as ff:
head_line = ff.readline()
#if os.stat(filepath).st_size > MINIMUM_PDF_SIZE:
if head_line.startswith("%PDF"):
completed.add(f.split('.')[0])
else:
os.remove(filepath)
print 'deleted: ', filepath, head_line
return completed
def
|
(csv_filepath, output_folder='pdfs', exclude_downloaded=False):
"""main function
"""
global TASKS
TASKS = get_tasks(csv_filepath)
excluded = set()
if exclude_downloaded:
excluded = get_completed_tasks(output_folder)
for i in range(128):
t = threading.Thread(target=crawler, args=(output_folder, excluded))
t.start()
main_thread = threading.current_thread()
for t in threading.enumerate():
if t is main_thread:
continue
t.join()
def crawler(output_folder, excluded=set()):
"""
Thread working function
"""
finished = 0
print "thread %i has started, exclude %i items" %\
(threading.current_thread().ident, len(excluded))
global TASKS
while True:
task = None
try:
task = TASKS.pop()
except IndexError:
print "thread %i finished %i tasks, exiting for no task available"\
% (threading.current_thread().ident, finished)
break
try:
if not task:
break
sbid = task['ScienceBaseID']
# some webLinks__uri looks like:
# http://www.springerlink.com/content/p543611u8317w447/?p=a0e7243d602f4bd3b33b2089b2ed92e4&pi=5 ; http://www.springerlink.com/content/p543611u8317w447/fulltext.pdf
# since both url will redirect to the same url finally, I did not retrieve them twice
url = task['webLinks__uri']
if sbid in excluded:
continue
retrieve(url, sbid, output_folder)
finished += 1
if finished % 20 == 0:
print "%i has finished %i" % (threading.current_thread().ident, finished)
except ExceedMaximumRetryError as e:
log.error("%s\tEXCEED_MAXIMUM_RETRY\t%s" % (e.sbid, e.url))
except Exception as e:
print e, task
log.error("%s\tUNEXPECTED\t%s\t%s" % (sbid, url, e))
def main(argv):
print crawl(argv[1], '/scratch/pdfs')
if __name__ == '__main__':
import sys
main(sys.argv)
|
crawl
|
identifier_name
|
api_receive_application.py
|
import ssl
import logging
import tornado.ioloop
import tornado.web
import sys
from tornado import httpclient
from functools import partial
from sqlalchemy import create_engine, func
from sqlalchemy.orm import scoped_session, sessionmaker
from create_receive_handler import ReceiveHandler
from wallet_notify_handler import WalletNotifyHandler
from block_notify_handler import BlockNotifyHandler
from authproxy import AuthServiceProxy
class ApiReceiveApplication(tornado.web.Application):
def __init__(self, options, instance_name):
self.options = options
self.instance_name = instance_name
handlers = [
(r"/api/receive", ReceiveHandler),
(r"/api/walletnotify/(?P<txid>[^\/]+)", WalletNotifyHandler),
(r"/api/blocknotify/(?P<hash>[^\/]+)", BlockNotifyHandler),
]
settings = dict(
cookie_secret='cookie_secret'
)
tornado.web.Application.__init__(self, handlers, **settings)
input_log_file_handler = logging.handlers.TimedRotatingFileHandler( self.options.log, when='MIDNIGHT')
formatter = logging.Formatter('%(asctime)s - %(message)s')
input_log_file_handler.setFormatter(formatter)
self.bitcoind = AuthServiceProxy(self.options.rpc_url )
self.paytxfee = self.bitcoind.getinfo()['paytxfee']
self.replay_logger = logging.getLogger(self.instance_name)
self.replay_logger.setLevel(logging.DEBUG)
self.replay_logger.addHandler(input_log_file_handler)
self.replay_logger.info('START')
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
self.replay_logger.addHandler(ch)
from models import Base, db_bootstrap
engine = create_engine( self.options.db_engine, echo=self.options.db_echo)
Base.metadata.create_all(engine)
self.db_session = scoped_session(sessionmaker(bind=engine))
db_bootstrap(self.db_session)
self.log_start_data()
def invoke_callback_url(self, forwarding_address):
url = forwarding_address.get_callback_url()
self.log('EXECUTE', 'curl ' + url)
context = ssl._create_unverified_context()
http_client = httpclient.AsyncHTTPClient(defaults=dict(ssl_options=context))
http_client.fetch(url, partial(self.on_handle_callback_url, forwarding_address.id ))
def on_handle_callback_url(self, forwarding_address_id, response ):
from models import ForwardingAddress
forwarding_address = ForwardingAddress.get_by_id(self.db_session, forwarding_address_id)
if response.error:
self.log('ERROR', str(response.error))
forwarding_address.callback_number_of_errors += 1
self.db_session.add(forwarding_address)
self.db_session.commit()
else:
if response.body == '*ok*':
forwarding_address.is_confirmed_by_client = True
self.db_session.add(forwarding_address)
self.db_session.commit()
def log(self, command, key, value=None):
#if len(logging.getLogger().handlers):
# logging.getLogger().handlers = [] # workaround to avoid stdout logging from the root logger
|
def log_start_data(self):
self.log('PARAM','BEGIN')
self.log('PARAM','port' ,self.options.port)
self.log('PARAM','log' ,self.options.log)
self.log('PARAM','db_echo' ,self.options.db_echo)
self.log('PARAM','db_engine' ,self.options.db_engine)
self.log('PARAM','rpc_url' ,self.options.rpc_url)
self.log('PARAM','END')
from models import ForwardingAddress
fwd_address_list = self.db_session.query(ForwardingAddress)
for fwd_address in fwd_address_list:
self.log('DB_ENTITY', 'FORWARDING_ADDRESS', fwd_address)
bitcoin_info = self.bitcoind.getinfo()
self.log('INFO', 'BITCOIND_GETINFO', str(bitcoin_info))
def clean_up(self):
pass
|
log_msg = command + ',' + key
if value:
try:
log_msg += ',' + value
except Exception,e :
try:
log_msg += ',' + str(value)
except Exception,e :
try:
log_msg += ',' + unicode(value)
except Exception,e :
log_msg += ', [object]'
self.replay_logger.info( log_msg )
|
identifier_body
|
api_receive_application.py
|
import ssl
import logging
import tornado.ioloop
import tornado.web
import sys
from tornado import httpclient
from functools import partial
from sqlalchemy import create_engine, func
from sqlalchemy.orm import scoped_session, sessionmaker
from create_receive_handler import ReceiveHandler
from wallet_notify_handler import WalletNotifyHandler
from block_notify_handler import BlockNotifyHandler
from authproxy import AuthServiceProxy
class ApiReceiveApplication(tornado.web.Application):
def __init__(self, options, instance_name):
self.options = options
self.instance_name = instance_name
handlers = [
(r"/api/receive", ReceiveHandler),
(r"/api/walletnotify/(?P<txid>[^\/]+)", WalletNotifyHandler),
(r"/api/blocknotify/(?P<hash>[^\/]+)", BlockNotifyHandler),
]
settings = dict(
cookie_secret='cookie_secret'
)
tornado.web.Application.__init__(self, handlers, **settings)
input_log_file_handler = logging.handlers.TimedRotatingFileHandler( self.options.log, when='MIDNIGHT')
formatter = logging.Formatter('%(asctime)s - %(message)s')
input_log_file_handler.setFormatter(formatter)
self.bitcoind = AuthServiceProxy(self.options.rpc_url )
self.paytxfee = self.bitcoind.getinfo()['paytxfee']
self.replay_logger = logging.getLogger(self.instance_name)
self.replay_logger.setLevel(logging.DEBUG)
self.replay_logger.addHandler(input_log_file_handler)
self.replay_logger.info('START')
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
self.replay_logger.addHandler(ch)
from models import Base, db_bootstrap
engine = create_engine( self.options.db_engine, echo=self.options.db_echo)
Base.metadata.create_all(engine)
self.db_session = scoped_session(sessionmaker(bind=engine))
db_bootstrap(self.db_session)
self.log_start_data()
def invoke_callback_url(self, forwarding_address):
url = forwarding_address.get_callback_url()
self.log('EXECUTE', 'curl ' + url)
context = ssl._create_unverified_context()
http_client = httpclient.AsyncHTTPClient(defaults=dict(ssl_options=context))
http_client.fetch(url, partial(self.on_handle_callback_url, forwarding_address.id ))
def on_handle_callback_url(self, forwarding_address_id, response ):
from models import ForwardingAddress
forwarding_address = ForwardingAddress.get_by_id(self.db_session, forwarding_address_id)
if response.error:
self.log('ERROR', str(response.error))
forwarding_address.callback_number_of_errors += 1
self.db_session.add(forwarding_address)
self.db_session.commit()
else:
if response.body == '*ok*':
forwarding_address.is_confirmed_by_client = True
self.db_session.add(forwarding_address)
self.db_session.commit()
def log(self, command, key, value=None):
#if len(logging.getLogger().handlers):
# logging.getLogger().handlers = [] # workaround to avoid stdout logging from the root logger
log_msg = command + ',' + key
|
if value:
try:
log_msg += ',' + value
except Exception,e :
try:
log_msg += ',' + str(value)
except Exception,e :
try:
log_msg += ',' + unicode(value)
except Exception,e :
log_msg += ', [object]'
self.replay_logger.info( log_msg )
def log_start_data(self):
self.log('PARAM','BEGIN')
self.log('PARAM','port' ,self.options.port)
self.log('PARAM','log' ,self.options.log)
self.log('PARAM','db_echo' ,self.options.db_echo)
self.log('PARAM','db_engine' ,self.options.db_engine)
self.log('PARAM','rpc_url' ,self.options.rpc_url)
self.log('PARAM','END')
from models import ForwardingAddress
fwd_address_list = self.db_session.query(ForwardingAddress)
for fwd_address in fwd_address_list:
self.log('DB_ENTITY', 'FORWARDING_ADDRESS', fwd_address)
bitcoin_info = self.bitcoind.getinfo()
self.log('INFO', 'BITCOIND_GETINFO', str(bitcoin_info))
def clean_up(self):
pass
|
random_line_split
|
|
api_receive_application.py
|
import ssl
import logging
import tornado.ioloop
import tornado.web
import sys
from tornado import httpclient
from functools import partial
from sqlalchemy import create_engine, func
from sqlalchemy.orm import scoped_session, sessionmaker
from create_receive_handler import ReceiveHandler
from wallet_notify_handler import WalletNotifyHandler
from block_notify_handler import BlockNotifyHandler
from authproxy import AuthServiceProxy
class ApiReceiveApplication(tornado.web.Application):
def __init__(self, options, instance_name):
self.options = options
self.instance_name = instance_name
handlers = [
(r"/api/receive", ReceiveHandler),
(r"/api/walletnotify/(?P<txid>[^\/]+)", WalletNotifyHandler),
(r"/api/blocknotify/(?P<hash>[^\/]+)", BlockNotifyHandler),
]
settings = dict(
cookie_secret='cookie_secret'
)
tornado.web.Application.__init__(self, handlers, **settings)
input_log_file_handler = logging.handlers.TimedRotatingFileHandler( self.options.log, when='MIDNIGHT')
formatter = logging.Formatter('%(asctime)s - %(message)s')
input_log_file_handler.setFormatter(formatter)
self.bitcoind = AuthServiceProxy(self.options.rpc_url )
self.paytxfee = self.bitcoind.getinfo()['paytxfee']
self.replay_logger = logging.getLogger(self.instance_name)
self.replay_logger.setLevel(logging.DEBUG)
self.replay_logger.addHandler(input_log_file_handler)
self.replay_logger.info('START')
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
self.replay_logger.addHandler(ch)
from models import Base, db_bootstrap
engine = create_engine( self.options.db_engine, echo=self.options.db_echo)
Base.metadata.create_all(engine)
self.db_session = scoped_session(sessionmaker(bind=engine))
db_bootstrap(self.db_session)
self.log_start_data()
def invoke_callback_url(self, forwarding_address):
url = forwarding_address.get_callback_url()
self.log('EXECUTE', 'curl ' + url)
context = ssl._create_unverified_context()
http_client = httpclient.AsyncHTTPClient(defaults=dict(ssl_options=context))
http_client.fetch(url, partial(self.on_handle_callback_url, forwarding_address.id ))
def on_handle_callback_url(self, forwarding_address_id, response ):
from models import ForwardingAddress
forwarding_address = ForwardingAddress.get_by_id(self.db_session, forwarding_address_id)
if response.error:
self.log('ERROR', str(response.error))
forwarding_address.callback_number_of_errors += 1
self.db_session.add(forwarding_address)
self.db_session.commit()
else:
if response.body == '*ok*':
forwarding_address.is_confirmed_by_client = True
self.db_session.add(forwarding_address)
self.db_session.commit()
def log(self, command, key, value=None):
#if len(logging.getLogger().handlers):
# logging.getLogger().handlers = [] # workaround to avoid stdout logging from the root logger
log_msg = command + ',' + key
if value:
|
self.replay_logger.info( log_msg )
def log_start_data(self):
self.log('PARAM','BEGIN')
self.log('PARAM','port' ,self.options.port)
self.log('PARAM','log' ,self.options.log)
self.log('PARAM','db_echo' ,self.options.db_echo)
self.log('PARAM','db_engine' ,self.options.db_engine)
self.log('PARAM','rpc_url' ,self.options.rpc_url)
self.log('PARAM','END')
from models import ForwardingAddress
fwd_address_list = self.db_session.query(ForwardingAddress)
for fwd_address in fwd_address_list:
self.log('DB_ENTITY', 'FORWARDING_ADDRESS', fwd_address)
bitcoin_info = self.bitcoind.getinfo()
self.log('INFO', 'BITCOIND_GETINFO', str(bitcoin_info))
def clean_up(self):
pass
|
try:
log_msg += ',' + value
except Exception,e :
try:
log_msg += ',' + str(value)
except Exception,e :
try:
log_msg += ',' + unicode(value)
except Exception,e :
log_msg += ', [object]'
|
conditional_block
|
api_receive_application.py
|
import ssl
import logging
import tornado.ioloop
import tornado.web
import sys
from tornado import httpclient
from functools import partial
from sqlalchemy import create_engine, func
from sqlalchemy.orm import scoped_session, sessionmaker
from create_receive_handler import ReceiveHandler
from wallet_notify_handler import WalletNotifyHandler
from block_notify_handler import BlockNotifyHandler
from authproxy import AuthServiceProxy
class ApiReceiveApplication(tornado.web.Application):
def __init__(self, options, instance_name):
self.options = options
self.instance_name = instance_name
handlers = [
(r"/api/receive", ReceiveHandler),
(r"/api/walletnotify/(?P<txid>[^\/]+)", WalletNotifyHandler),
(r"/api/blocknotify/(?P<hash>[^\/]+)", BlockNotifyHandler),
]
settings = dict(
cookie_secret='cookie_secret'
)
tornado.web.Application.__init__(self, handlers, **settings)
input_log_file_handler = logging.handlers.TimedRotatingFileHandler( self.options.log, when='MIDNIGHT')
formatter = logging.Formatter('%(asctime)s - %(message)s')
input_log_file_handler.setFormatter(formatter)
self.bitcoind = AuthServiceProxy(self.options.rpc_url )
self.paytxfee = self.bitcoind.getinfo()['paytxfee']
self.replay_logger = logging.getLogger(self.instance_name)
self.replay_logger.setLevel(logging.DEBUG)
self.replay_logger.addHandler(input_log_file_handler)
self.replay_logger.info('START')
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
self.replay_logger.addHandler(ch)
from models import Base, db_bootstrap
engine = create_engine( self.options.db_engine, echo=self.options.db_echo)
Base.metadata.create_all(engine)
self.db_session = scoped_session(sessionmaker(bind=engine))
db_bootstrap(self.db_session)
self.log_start_data()
def
|
(self, forwarding_address):
url = forwarding_address.get_callback_url()
self.log('EXECUTE', 'curl ' + url)
context = ssl._create_unverified_context()
http_client = httpclient.AsyncHTTPClient(defaults=dict(ssl_options=context))
http_client.fetch(url, partial(self.on_handle_callback_url, forwarding_address.id ))
def on_handle_callback_url(self, forwarding_address_id, response ):
from models import ForwardingAddress
forwarding_address = ForwardingAddress.get_by_id(self.db_session, forwarding_address_id)
if response.error:
self.log('ERROR', str(response.error))
forwarding_address.callback_number_of_errors += 1
self.db_session.add(forwarding_address)
self.db_session.commit()
else:
if response.body == '*ok*':
forwarding_address.is_confirmed_by_client = True
self.db_session.add(forwarding_address)
self.db_session.commit()
def log(self, command, key, value=None):
#if len(logging.getLogger().handlers):
# logging.getLogger().handlers = [] # workaround to avoid stdout logging from the root logger
log_msg = command + ',' + key
if value:
try:
log_msg += ',' + value
except Exception,e :
try:
log_msg += ',' + str(value)
except Exception,e :
try:
log_msg += ',' + unicode(value)
except Exception,e :
log_msg += ', [object]'
self.replay_logger.info( log_msg )
def log_start_data(self):
self.log('PARAM','BEGIN')
self.log('PARAM','port' ,self.options.port)
self.log('PARAM','log' ,self.options.log)
self.log('PARAM','db_echo' ,self.options.db_echo)
self.log('PARAM','db_engine' ,self.options.db_engine)
self.log('PARAM','rpc_url' ,self.options.rpc_url)
self.log('PARAM','END')
from models import ForwardingAddress
fwd_address_list = self.db_session.query(ForwardingAddress)
for fwd_address in fwd_address_list:
self.log('DB_ENTITY', 'FORWARDING_ADDRESS', fwd_address)
bitcoin_info = self.bitcoind.getinfo()
self.log('INFO', 'BITCOIND_GETINFO', str(bitcoin_info))
def clean_up(self):
pass
|
invoke_callback_url
|
identifier_name
|
scrolltofixed-tests.ts
|
$(document).ready(function() {
$('#mydiv').scrollToFixed();
});
$(document).ready(function() {
$('.header').scrollToFixed({
preFixed: function() { $(this).find('h1').css('color', 'blue'); },
postFixed: function() { $(this).find('h1').css('color', ''); }
});
$('.footer').scrollToFixed( {
bottom: 0,
limit: $('.footer').offset().top,
preFixed: function() { $(this).find('h1').css('color', 'blue'); },
postFixed: function() { $(this).find('h1').css('color', ''); }
});
|
var limit = $('.footer').offset().top - $('#summary').outerHeight(true) - 10;
return limit;
},
zIndex: 999,
preFixed: function() { $(this).find('.title').css('color', 'blue'); },
preAbsolute: function() { $(this).find('.title').css('color', 'red'); },
postFixed: function() { $(this).find('.title').css('color', ''); },
postAbsolute: function() { $(this).find('.title').css('color', ''); }
});
});
var b = $.isScrollToFixed('.header');
|
$('#summary').scrollToFixed({
marginTop: $('.header').outerHeight() + 10,
limit: function() {
|
random_line_split
|
config.py
|
from network import WLAN
###############################################################################
# Settings for WLAN STA mode
###############################################################################
WLAN_MODE = 'off'
#WLAN_SSID = ''
#WLAN_AUTH = (WLAN.WPA2,'')
###############################################################################
# LoRaWAN Configuration
###############################################################################
# May be either 'otaa', 'abp', or 'off'
LORA_MODE = 'otaa'
|
# Settings for mode 'abp'
#LORA_ABP_DEVADDR = ''
#LORA_ABP_NETKEY = ''
#LORA_ABP_APPKEY = ''
# Interval between measures transmitted to TTN.
# Measured airtime of transmission is 56.6 ms, fair use policy limits us to
# 30 seconds per day (= roughly 500 messages). We default to a 180 second
# interval (=480 messages / day).
LORA_SEND_RATE = 180
###############################################################################
# GNSS Configuration
###############################################################################
GNSS_UART_PORT = 1
GNSS_UART_BAUD = 9600
GNSS_ENABLE_PIN = 'P8'
|
# Settings for mode 'otaa'
LORA_OTAA_EUI = '70B3D57EF0001ED4'
LORA_OTAA_KEY = None # See README.md for instructions!
|
random_line_split
|
setup.py
|
"""
MIT License
Copyright (c) 2019 Claude SIMON (https://q37.info/s/rmnmqd49)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
|
version = "0.1.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="tortoise",
version=version,
author="Claude SIMON",
# author_email="[email protected]",
description="Turtle graphics on the web.",
keywords="turtle, web",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/epeios-q37/tortoise-python",
packages=setuptools.find_packages(),
install_requires=[
'atlastk',
],
classifiers=[
"Environment :: Web Environment",
"Development Status :: 3 - Alpha",
"Intended Audience :: Education",
"Intended Audience :: Other Audience",
"License :: OSI Approved :: MIT License ",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Education"
]
)
|
import setuptools
|
random_line_split
|
server.rs
|
use enums::*;
use serde_json as json;
use std::fmt;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct PublicChannel {
pub name: String,
pub mode: ChannelMode,
pub characters: i32,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ORSDetails {
pub name: String,
pub characters: i32,
pub title: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UserObject {
pub identity: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq)]
pub enum Message {
ADL { ops: Vec<String> },
AOP { character: String },
BRO { message: String },
CDS {
channel: String,
description: String,
},
CHA { channels: Vec<String> },
CIU {
sender: String,
title: String,
name: String,
},
CBU {
operator: String,
channel: String,
character: String,
},
CKU {
operator: String,
channel: String,
character: String,
},
COA { character: String, channel: String },
COL {
channel: String,
oplist: Vec<String>,
},
CON { count: i32 },
COR { character: String, channel: String },
CSO { character: String, channel: String },
CTU {
operator: String,
channel: String,
length: i32,
character: String,
},
DOP { character: String },
ERR { number: i32, message: String },
FKS {
characters: Vec<String>,
kinks: Vec<i32>,
},
FLN { character: String },
HLO { message: String },
ICH {
users: Vec<UserObject>,
channel: String,
mode: ChannelMode,
},
IDN { character: String },
JCH {
channel: String,
character: UserObject,
title: String,
},
KID(json::Value),
LCH { channel: String, character: String },
LIS { characters: Vec<Vec<String>> },
NLN {
identity: String,
gender: Gender,
status: CharacterStatus,
},
IGN(json::Value),
FRL { characters: Vec<String> },
ORS { channels: Vec<ORSDetails> },
PIN,
PRD(json::Value),
PRI { character: String, message: String },
MSG {
character: String,
message: String,
channel: String,
},
LRP {
character: String,
message: String,
channel: String,
},
RLL(json::Value),
RMO { mode: ChannelMode, channel: String },
RTB {
#[serde(rename = "type")] _type: String,
character: String,
},
SFC(json::Value),
STA {
status: CharacterStatus,
character: String,
statusmsg: String,
},
SYS {
message: String,
channel: Option<String>,
},
TPN {
character: String,
status: TypingStatus,
},
UPT {
time: i64,
starttime: i64,
startstring: String,
accepted: i64,
channels: i64,
users: i64,
maxusers: i64,
},
VAR {
variable: String,
value: json::Value,
},
}
#[derive(Debug)]
pub enum ParseError {
Json(json::Error),
InvalidMessage,
}
impl ::std::convert::From<json::Error> for ParseError {
fn from(error: json::Error) -> ParseError {
ParseError::Json(error)
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::ParseError::*;
match *self {
Json(ref err) => err.fmt(f),
InvalidMessage => "Invalid F-Chat message received.".fmt(f),
}
}
}
impl ::std::error::Error for ParseError {
fn description(&self) -> &str {
"Error parsing F-Chat message."
}
}
impl Message {
// TODO: Find a way to deserialize without allocating a BTreeMap
fn deserialize(variant: &[u8], text: &[u8]) -> Result<Self, ParseError> {
let mut map = json::Map::new();
let variant =
String::from_utf8(Vec::from(variant)).map_err(|_| ParseError::InvalidMessage)?;
if text != b"" {
let data = json::from_slice(text)?;
map.insert(variant, data);
} else {
map.insert(variant, json::Value::Null);
}
Ok(json::from_value(json::Value::Object(map))?)
}
pub fn from_slice(message: &[u8]) -> Result<Self, ParseError> {
if message.len() < 3 {
Err(ParseError::InvalidMessage)
} else {
let text = if message.len() >= 4 {
&message[4..]
} else {
|
&[]
};
Message::deserialize(&message[..3], text)
}
}
}
|
random_line_split
|
|
server.rs
|
use enums::*;
use serde_json as json;
use std::fmt;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct PublicChannel {
pub name: String,
pub mode: ChannelMode,
pub characters: i32,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ORSDetails {
pub name: String,
pub characters: i32,
pub title: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct
|
{
pub identity: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq)]
pub enum Message {
ADL { ops: Vec<String> },
AOP { character: String },
BRO { message: String },
CDS {
channel: String,
description: String,
},
CHA { channels: Vec<String> },
CIU {
sender: String,
title: String,
name: String,
},
CBU {
operator: String,
channel: String,
character: String,
},
CKU {
operator: String,
channel: String,
character: String,
},
COA { character: String, channel: String },
COL {
channel: String,
oplist: Vec<String>,
},
CON { count: i32 },
COR { character: String, channel: String },
CSO { character: String, channel: String },
CTU {
operator: String,
channel: String,
length: i32,
character: String,
},
DOP { character: String },
ERR { number: i32, message: String },
FKS {
characters: Vec<String>,
kinks: Vec<i32>,
},
FLN { character: String },
HLO { message: String },
ICH {
users: Vec<UserObject>,
channel: String,
mode: ChannelMode,
},
IDN { character: String },
JCH {
channel: String,
character: UserObject,
title: String,
},
KID(json::Value),
LCH { channel: String, character: String },
LIS { characters: Vec<Vec<String>> },
NLN {
identity: String,
gender: Gender,
status: CharacterStatus,
},
IGN(json::Value),
FRL { characters: Vec<String> },
ORS { channels: Vec<ORSDetails> },
PIN,
PRD(json::Value),
PRI { character: String, message: String },
MSG {
character: String,
message: String,
channel: String,
},
LRP {
character: String,
message: String,
channel: String,
},
RLL(json::Value),
RMO { mode: ChannelMode, channel: String },
RTB {
#[serde(rename = "type")] _type: String,
character: String,
},
SFC(json::Value),
STA {
status: CharacterStatus,
character: String,
statusmsg: String,
},
SYS {
message: String,
channel: Option<String>,
},
TPN {
character: String,
status: TypingStatus,
},
UPT {
time: i64,
starttime: i64,
startstring: String,
accepted: i64,
channels: i64,
users: i64,
maxusers: i64,
},
VAR {
variable: String,
value: json::Value,
},
}
#[derive(Debug)]
pub enum ParseError {
Json(json::Error),
InvalidMessage,
}
impl ::std::convert::From<json::Error> for ParseError {
fn from(error: json::Error) -> ParseError {
ParseError::Json(error)
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::ParseError::*;
match *self {
Json(ref err) => err.fmt(f),
InvalidMessage => "Invalid F-Chat message received.".fmt(f),
}
}
}
impl ::std::error::Error for ParseError {
fn description(&self) -> &str {
"Error parsing F-Chat message."
}
}
impl Message {
// TODO: Find a way to deserialize without allocating a BTreeMap
fn deserialize(variant: &[u8], text: &[u8]) -> Result<Self, ParseError> {
let mut map = json::Map::new();
let variant =
String::from_utf8(Vec::from(variant)).map_err(|_| ParseError::InvalidMessage)?;
if text != b"" {
let data = json::from_slice(text)?;
map.insert(variant, data);
} else {
map.insert(variant, json::Value::Null);
}
Ok(json::from_value(json::Value::Object(map))?)
}
pub fn from_slice(message: &[u8]) -> Result<Self, ParseError> {
if message.len() < 3 {
Err(ParseError::InvalidMessage)
} else {
let text = if message.len() >= 4 {
&message[4..]
} else {
&[]
};
Message::deserialize(&message[..3], text)
}
}
}
|
UserObject
|
identifier_name
|
server.rs
|
use enums::*;
use serde_json as json;
use std::fmt;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct PublicChannel {
pub name: String,
pub mode: ChannelMode,
pub characters: i32,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ORSDetails {
pub name: String,
pub characters: i32,
pub title: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UserObject {
pub identity: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq)]
pub enum Message {
ADL { ops: Vec<String> },
AOP { character: String },
BRO { message: String },
CDS {
channel: String,
description: String,
},
CHA { channels: Vec<String> },
CIU {
sender: String,
title: String,
name: String,
},
CBU {
operator: String,
channel: String,
character: String,
},
CKU {
operator: String,
channel: String,
character: String,
},
COA { character: String, channel: String },
COL {
channel: String,
oplist: Vec<String>,
},
CON { count: i32 },
COR { character: String, channel: String },
CSO { character: String, channel: String },
CTU {
operator: String,
channel: String,
length: i32,
character: String,
},
DOP { character: String },
ERR { number: i32, message: String },
FKS {
characters: Vec<String>,
kinks: Vec<i32>,
},
FLN { character: String },
HLO { message: String },
ICH {
users: Vec<UserObject>,
channel: String,
mode: ChannelMode,
},
IDN { character: String },
JCH {
channel: String,
character: UserObject,
title: String,
},
KID(json::Value),
LCH { channel: String, character: String },
LIS { characters: Vec<Vec<String>> },
NLN {
identity: String,
gender: Gender,
status: CharacterStatus,
},
IGN(json::Value),
FRL { characters: Vec<String> },
ORS { channels: Vec<ORSDetails> },
PIN,
PRD(json::Value),
PRI { character: String, message: String },
MSG {
character: String,
message: String,
channel: String,
},
LRP {
character: String,
message: String,
channel: String,
},
RLL(json::Value),
RMO { mode: ChannelMode, channel: String },
RTB {
#[serde(rename = "type")] _type: String,
character: String,
},
SFC(json::Value),
STA {
status: CharacterStatus,
character: String,
statusmsg: String,
},
SYS {
message: String,
channel: Option<String>,
},
TPN {
character: String,
status: TypingStatus,
},
UPT {
time: i64,
starttime: i64,
startstring: String,
accepted: i64,
channels: i64,
users: i64,
maxusers: i64,
},
VAR {
variable: String,
value: json::Value,
},
}
#[derive(Debug)]
pub enum ParseError {
Json(json::Error),
InvalidMessage,
}
impl ::std::convert::From<json::Error> for ParseError {
fn from(error: json::Error) -> ParseError
|
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::ParseError::*;
match *self {
Json(ref err) => err.fmt(f),
InvalidMessage => "Invalid F-Chat message received.".fmt(f),
}
}
}
impl ::std::error::Error for ParseError {
fn description(&self) -> &str {
"Error parsing F-Chat message."
}
}
impl Message {
// TODO: Find a way to deserialize without allocating a BTreeMap
fn deserialize(variant: &[u8], text: &[u8]) -> Result<Self, ParseError> {
let mut map = json::Map::new();
let variant =
String::from_utf8(Vec::from(variant)).map_err(|_| ParseError::InvalidMessage)?;
if text != b"" {
let data = json::from_slice(text)?;
map.insert(variant, data);
} else {
map.insert(variant, json::Value::Null);
}
Ok(json::from_value(json::Value::Object(map))?)
}
pub fn from_slice(message: &[u8]) -> Result<Self, ParseError> {
if message.len() < 3 {
Err(ParseError::InvalidMessage)
} else {
let text = if message.len() >= 4 {
&message[4..]
} else {
&[]
};
Message::deserialize(&message[..3], text)
}
}
}
|
{
ParseError::Json(error)
}
|
identifier_body
|
server.rs
|
use enums::*;
use serde_json as json;
use std::fmt;
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct PublicChannel {
pub name: String,
pub mode: ChannelMode,
pub characters: i32,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ORSDetails {
pub name: String,
pub characters: i32,
pub title: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct UserObject {
pub identity: String,
}
#[derive(Deserialize, Debug, Clone, PartialEq)]
pub enum Message {
ADL { ops: Vec<String> },
AOP { character: String },
BRO { message: String },
CDS {
channel: String,
description: String,
},
CHA { channels: Vec<String> },
CIU {
sender: String,
title: String,
name: String,
},
CBU {
operator: String,
channel: String,
character: String,
},
CKU {
operator: String,
channel: String,
character: String,
},
COA { character: String, channel: String },
COL {
channel: String,
oplist: Vec<String>,
},
CON { count: i32 },
COR { character: String, channel: String },
CSO { character: String, channel: String },
CTU {
operator: String,
channel: String,
length: i32,
character: String,
},
DOP { character: String },
ERR { number: i32, message: String },
FKS {
characters: Vec<String>,
kinks: Vec<i32>,
},
FLN { character: String },
HLO { message: String },
ICH {
users: Vec<UserObject>,
channel: String,
mode: ChannelMode,
},
IDN { character: String },
JCH {
channel: String,
character: UserObject,
title: String,
},
KID(json::Value),
LCH { channel: String, character: String },
LIS { characters: Vec<Vec<String>> },
NLN {
identity: String,
gender: Gender,
status: CharacterStatus,
},
IGN(json::Value),
FRL { characters: Vec<String> },
ORS { channels: Vec<ORSDetails> },
PIN,
PRD(json::Value),
PRI { character: String, message: String },
MSG {
character: String,
message: String,
channel: String,
},
LRP {
character: String,
message: String,
channel: String,
},
RLL(json::Value),
RMO { mode: ChannelMode, channel: String },
RTB {
#[serde(rename = "type")] _type: String,
character: String,
},
SFC(json::Value),
STA {
status: CharacterStatus,
character: String,
statusmsg: String,
},
SYS {
message: String,
channel: Option<String>,
},
TPN {
character: String,
status: TypingStatus,
},
UPT {
time: i64,
starttime: i64,
startstring: String,
accepted: i64,
channels: i64,
users: i64,
maxusers: i64,
},
VAR {
variable: String,
value: json::Value,
},
}
#[derive(Debug)]
pub enum ParseError {
Json(json::Error),
InvalidMessage,
}
impl ::std::convert::From<json::Error> for ParseError {
fn from(error: json::Error) -> ParseError {
ParseError::Json(error)
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::ParseError::*;
match *self {
Json(ref err) => err.fmt(f),
InvalidMessage => "Invalid F-Chat message received.".fmt(f),
}
}
}
impl ::std::error::Error for ParseError {
fn description(&self) -> &str {
"Error parsing F-Chat message."
}
}
impl Message {
// TODO: Find a way to deserialize without allocating a BTreeMap
fn deserialize(variant: &[u8], text: &[u8]) -> Result<Self, ParseError> {
let mut map = json::Map::new();
let variant =
String::from_utf8(Vec::from(variant)).map_err(|_| ParseError::InvalidMessage)?;
if text != b"" {
let data = json::from_slice(text)?;
map.insert(variant, data);
} else {
map.insert(variant, json::Value::Null);
}
Ok(json::from_value(json::Value::Object(map))?)
}
pub fn from_slice(message: &[u8]) -> Result<Self, ParseError> {
if message.len() < 3 {
Err(ParseError::InvalidMessage)
} else
|
}
}
|
{
let text = if message.len() >= 4 {
&message[4..]
} else {
&[]
};
Message::deserialize(&message[..3], text)
}
|
conditional_block
|
MetadataList.tsx
|
/*
MIT License
Copyright (c) 2020 Looker Data Sciences, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import React from "react"
import { Box, Text, Flex, FlexItem } from "@looker/components"
export const MetadataItem = ({
aux,
label,
children,
compact
|
children: React.ReactNode
compact?: boolean
}) => {
if (compact) {
return (
<Flex mb="small">
<FlexItem flex="0 0 auto">
<Text fontSize="medium" fontWeight="semiBold">
{label}
</Text>
</FlexItem>
<FlexItem textAlign="right" flex="1 1 auto">
<Text fontSize="small">{children}</Text>
</FlexItem>
</Flex>
)
} else {
return (
<Box mb="small">
<Box>
<Flex>
<FlexItem flex="0 0 auto">
<Text fontSize="medium" fontWeight="semiBold">
{label}
</Text>
</FlexItem>
{aux && (
<FlexItem textAlign="right" flex="1 1 auto">
<Text fontSize="small" variant="subdued">
{aux}
</Text>
</FlexItem>
)}
</Flex>
</Box>
<Box>
<Text fontSize="small">{children}</Text>
</Box>
</Box>
)
}
}
|
}: {
label: string
aux?: string
|
random_line_split
|
latinEntities.js
|
//>>built
define("dojox/editor/plugins/nls/latinEntities", { root:
//begin v1.x content
({
/* These are already handled in the default RTE
amp:"ampersand",lt:"less-than sign",
gt:"greater-than sign",
nbsp:"no-break space\nnon-breaking space",
quot:"quote",
*/
iexcl:"inverted exclamation mark",
cent:"cent sign",
pound:"pound sign",
curren:"currency sign",
yen:"yen sign\nyuan sign",
brvbar:"broken bar\nbroken vertical bar",
sect:"section sign",
uml:"diaeresis\nspacing diaeresis",
copy:"copyright sign",
ordf:"feminine ordinal indicator",
laquo:"left-pointing double angle quotation mark\nleft pointing guillemet",
not:"not sign",
shy:"soft hyphen\ndiscretionary hyphen",
reg:"registered sign\nregistered trade mark sign",
macr:"macron\nspacing macron\noverline\nAPL overbar",
deg:"degree sign",
plusmn:"plus-minus sign\nplus-or-minus sign",
sup2:"superscript two\nsuperscript digit two\nsquared",
sup3:"superscript three\nsuperscript digit three\ncubed",
acute:"acute accent\nspacing acute",
micro:"micro sign",
para:"pilcrow sign\nparagraph sign",
middot:"middle dot\nGeorgian comma\nGreek middle dot",
cedil:"cedilla\nspacing cedilla",
sup1:"superscript one\nsuperscript digit one",
ordm:"masculine ordinal indicator",
raquo:"right-pointing double angle quotation mark\nright pointing guillemet",
frac14:"vulgar fraction one quarter\nfraction one quarter",
frac12:"vulgar fraction one half\nfraction one half",
frac34:"vulgar fraction three quarters\nfraction three quarters",
iquest:"inverted question mark\nturned question mark",
Agrave:"Latin capital letter A with grave\nLatin capital letter A grave",
Aacute:"Latin capital letter A with acute",
Acirc:"Latin capital letter A with circumflex",
Atilde:"Latin capital letter A with tilde",
Auml:"Latin capital letter A with diaeresis",
Aring:"Latin capital letter A with ring above\nLatin capital letter A ring",
AElig:"Latin capital letter AE\nLatin capital ligature AE",
Ccedil:"Latin capital letter C with cedilla",
Egrave:"Latin capital letter E with grave",
Eacute:"Latin capital letter E with acute",
Ecirc:"Latin capital letter E with circumflex",
Euml:"Latin capital letter E with diaeresis",
Igrave:"Latin capital letter I with grave",
Iacute:"Latin capital letter I with acute",
Icirc:"Latin capital letter I with circumflex",
Iuml:"Latin capital letter I with diaeresis",
ETH:"Latin capital letter ETH",
Ntilde:"Latin capital letter N with tilde",
Ograve:"Latin capital letter O with grave",
Oacute:"Latin capital letter O with acute",
Ocirc:"Latin capital letter O with circumflex",
Otilde:"Latin capital letter O with tilde",
Ouml:"Latin capital letter O with diaeresis",
times:"multiplication sign",
Oslash:"Latin capital letter O with stroke\nLatin capital letter O slash",
Ugrave:"Latin capital letter U with grave",
Uacute:"Latin capital letter U with acute",
Ucirc:"Latin capital letter U with circumflex",
Uuml:"Latin capital letter U with diaeresis",
Yacute:"Latin capital letter Y with acute",
THORN:"Latin capital letter THORN",
szlig:"Latin small letter sharp s\ness-zed",
agrave:"Latin small letter a with grave\nLatin small letter a grave",
aacute:"Latin small letter a with acute",
acirc:"Latin small letter a with circumflex",
atilde:"Latin small letter a with tilde",
auml:"Latin small letter a with diaeresis",
aring:"Latin small letter a with ring above\nLatin small letter a ring",
aelig:"Latin small letter ae\nLatin small ligature ae",
ccedil:"Latin small letter c with cedilla",
egrave:"Latin small letter e with grave",
eacute:"Latin small letter e with acute",
ecirc:"Latin small letter e with circumflex",
euml:"Latin small letter e with diaeresis",
igrave:"Latin small letter i with grave",
iacute:"Latin small letter i with acute",
icirc:"Latin small letter i with circumflex",
iuml:"Latin small letter i with diaeresis",
eth:"Latin small letter eth",
ntilde:"Latin small letter n with tilde",
ograve:"Latin small letter o with grave",
oacute:"Latin small letter o with acute",
ocirc:"Latin small letter o with circumflex",
otilde:"Latin small letter o with tilde",
ouml:"Latin small letter o with diaeresis",
divide:"division sign",
oslash:"Latin small letter o with stroke\nLatin small letter o slash",
ugrave:"Latin small letter u with grave",
uacute:"Latin small letter u with acute",
ucirc:"Latin small letter u with circumflex",
uuml:"Latin small letter u with diaeresis",
yacute:"Latin small letter y with acute",
thorn:"Latin small letter thorn",
yuml:"Latin small letter y with diaeresis",
// Greek Characters and Symbols
fnof:"Latin small f with hook\nfunction\nflorin",
Alpha:"Greek capital letter alpha",
Beta:"Greek capital letter beta",
Gamma:"Greek capital letter gamma",
Delta:"Greek capital letter delta",
|
Epsilon:"Greek capital letter epsilon",
Zeta:"Greek capital letter zeta",
Eta:"Greek capital letter eta",
Theta:"Greek capital letter theta",
Iota:"Greek capital letter iota",
Kappa:"Greek capital letter kappa",
Lambda:"Greek capital letter lambda",
Mu:"Greek capital letter mu",
Nu:"Greek capital letter nu",
Xi:"Greek capital letter xi",
Omicron:"Greek capital letter omicron",
Pi:"Greek capital letter pi",
Rho:"Greek capital letter rho",
Sigma:"Greek capital letter sigma",
Tau:"Greek capital letter tau",
Upsilon:"Greek capital letter upsilon",
Phi:"Greek capital letter phi",
Chi:"Greek capital letter chi",
Psi:"Greek capital letter psi",
Omega:"Greek capital letter omega",
alpha:"Greek small letter alpha",
beta:"Greek small letter beta",
gamma:"Greek small letter gamma",
delta:"Greek small letter delta",
epsilon:"Greek small letter epsilon",
zeta:"Greek small letter zeta",
eta:"Greek small letter eta",
theta:"Greek small letter theta",
iota:"Greek small letter iota",
kappa:"Greek small letter kappa",
lambda:"Greek small letter lambda",
mu:"Greek small letter mu",
nu:"Greek small letter nu",
xi:"Greek small letter xi",
omicron:"Greek small letter omicron",
pi:"Greek small letter pi",
rho:"Greek small letter rho",
sigmaf:"Greek small letter final sigma",
sigma:"Greek small letter sigma",
tau:"Greek small letter tau",
upsilon:"Greek small letter upsilon",
phi:"Greek small letter phi",
chi:"Greek small letter chi",
psi:"Greek small letter psi",
omega:"Greek small letter omega",
thetasym:"Greek small letter theta symbol",
upsih:"Greek upsilon with hook symbol",
piv:"Greek pi symbol",
bull:"bullet\nblack small circle",
hellip:"horizontal ellipsis\nthree dot leader",
prime:"prime\nminutes\nfeet",
Prime:"double prime\nseconds\ninches",
oline:"overline\nspacing overscore",
frasl:"fraction slash",
weierp:"script capital P\npower set\nWeierstrass p",
image:"blackletter capital I\nimaginary part",
real:"blackletter capital R\nreal part symbol",
trade:"trade mark sign",
alefsym:"alef symbol\nfirst transfinite cardinal",
larr:"leftwards arrow",
uarr:"upwards arrow",
rarr:"rightwards arrow",
darr:"downwards arrow",
harr:"left right arrow",
crarr:"downwards arrow with corner leftwards\ncarriage return",
lArr:"leftwards double arrow",
uArr:"upwards double arrow",
rArr:"rightwards double arrow",
dArr:"downwards double arrow",
hArr:"left right double arrow",
forall:"for all",
part:"partial differential",
exist:"there exists",
empty:"empty set\nnull set\ndiameter",
nabla:"nabla\nbackward difference",
isin:"element of",
notin:"not an element of",
ni:"contains as member",
prod:"n-ary product\nproduct sign",
sum:"n-ary sumation",
minus:"minus sign",
lowast:"asterisk operator",
radic:"square root\nradical sign",
prop:"proportional to",
infin:"infinity",
ang:"angle",
and:"logical and\nwedge",
or:"logical or\nvee",
cap:"intersection\ncap",
cup:"union\ncup","int":"integral",
there4:"therefore",
sim:"tilde operator\nvaries with\nsimilar to",
cong:"approximately equal to",
asymp:"almost equal to\nasymptotic to",
ne:"not equal to",
equiv:"identical to",
le:"less-than or equal to",
ge:"greater-than or equal to",
sub:"subset of",
sup:"superset of",
nsub:"not a subset of",
sube:"subset of or equal to",
supe:"superset of or equal to",
oplus:"circled plus\ndirect sum",
otimes:"circled times\nvector product",
perp:"up tack\northogonal to\nperpendicular",
sdot:"dot operator",
lceil:"left ceiling\nAPL upstile",
rceil:"right ceiling",
lfloor:"left floor\nAPL downstile",
rfloor:"right floor",
lang:"left-pointing angle bracket",
rang:"right-pointing angle bracket",
loz:"lozenge",
spades:"black spade suit",
clubs:"black club suit\nshamrock",
hearts:"black heart suit\nvalentine",
diams:"black diamond suit",
OElig:"Latin capital ligature OE",
oelig:"Latin small ligature oe",
Scaron:"Latin capital letter S with caron",
scaron:"Latin small letter s with caron",
Yuml:"Latin capital letter Y with diaeresis",
circ:"modifier letter circumflex accent",
tilde:"small tilde",
ensp:"en space",
emsp:"em space",
thinsp:"thin space",
zwnj:"zero width non-joiner",
zwj:"zero width joiner",
lrm:"left-to-right mark",
rlm:"right-to-left mark",
ndash:"en dash",
mdash:"em dash",
lsquo:"left single quotation mark",
rsquo:"right single quotation mark",
sbquo:"single low-9 quotation mark",
ldquo:"left double quotation mark",
rdquo:"right double quotation mark",
bdquo:"double low-9 quotation mark",
dagger:"dagger",
Dagger:"double dagger",
permil:"per mille sign",
lsaquo:"single left-pointing angle quotation mark",
rsaquo:"single right-pointing angle quotation mark",
euro:"euro sign"
})
,
//end v1.x content
"zh": true,
"zh-tw": true,
"tr": true,
"th": true,
"sv": true,
"sl": true,
"sk": true,
"ru": true,
"ro": true,
"pt": true,
"pt-pt": true,
"pl": true,
"nl": true,
"nb": true,
"ko": true,
"kk": true,
"ja": true,
"it": true,
"hu": true,
"hr": true,
"he": true,
"fr": true,
"fi": true,
"es": true,
"el": true,
"de": true,
"da": true,
"cs": true,
"ca": true,
"ar": true
});
|
random_line_split
|
|
freedesktop_notify_zh_CN.ts
|
<?xml version="1.0" ?><!DOCTYPE TS><TS language="zh_CN" version="2.1">
<context>
<name>@default</name>
<message>
<source>Notifications</source>
<translation type="unfinished"/>
|
<message>
<source>Number of quoted characters</source>
<translation type="unfinished"/>
</message>
<message>
<source>System notifications</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use custom expiration timeout</source>
<translation type="unfinished"/>
</message>
<message>
<source>Expiration timeout</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show message content</source>
<translation type="unfinished"/>
</message>
<message>
<source>Never</source>
<translation type="unfinished"/>
</message>
<message>
<source>Options</source>
<translation type="unfinished"/>
</message>
</context>
</TS>
|
</message>
<message numerus="yes">
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
|
random_line_split
|
run_double.py
|
"""
Copyright 2016 Rasmus Larsen
This software may be modified and distributed under the terms
of the MIT license. See the LICENSE.txt file for details.
"""
import sys
import time
from sacred import Experiment
from core.ALEEmulator import ALEEmulator
from dqn.Agent import Agent
from dqn.DoubleDQN import DoubleDQN
ex = Experiment('double-dqn')
@ex.config
def net_config():
conv_layers = 3
conv_units = [32, 64, 64]
filter_sizes = [8, 4, 3]
strides = [4, 2, 1]
state_frames = 4
fc_layers = 1
fc_units = [512]
in_width = 84
in_height = 84
discount = 0.99
device = '/gpu:0'
lr = 0.00025
opt_decay = 0.95
momentum = 0.0
opt_eps = 0.01
target_sync = 1e4
clip_delta = 1.0
tensorboard = False
tensorboard_freq = 50
@ex.config
def emu_config():
rom_path = '../ale-git/roms/'
rom_name = 'breakout'
display_screen = True
frame_skip = 4
repeat_prob = 0.0
color_avg = True
random_seed = 42
random_start = 30
@ex.config
def agent_config():
hist_size = 1e5
eps = 1.0
eps_min = 0.1
eps_decay = (eps - eps_min) / 1e6
batch_size = 32
train_start = 5e3
train_frames = 5e6
test_freq = 5e4
test_frames = 5e3
update_freq = 4
@ex.command
def test(_config):
|
@ex.automain
def main(_config, _log):
sys.stdout = open('log_' + _config['rom_name'] + time.strftime('%H%M%d%m', time.gmtime()), 'w', buffering=True)
print "#{}".format(_config)
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
agent = Agent(emu, net, _config)
agent.train()
|
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
net.load(_config['rom_name'])
agent = Agent(emu, net, _config)
agent.next(0) # put a frame into the replay memory, TODO: should not be necessary
agent.test()
|
identifier_body
|
run_double.py
|
"""
Copyright 2016 Rasmus Larsen
This software may be modified and distributed under the terms
of the MIT license. See the LICENSE.txt file for details.
"""
import sys
import time
from sacred import Experiment
from core.ALEEmulator import ALEEmulator
from dqn.Agent import Agent
from dqn.DoubleDQN import DoubleDQN
ex = Experiment('double-dqn')
|
filter_sizes = [8, 4, 3]
strides = [4, 2, 1]
state_frames = 4
fc_layers = 1
fc_units = [512]
in_width = 84
in_height = 84
discount = 0.99
device = '/gpu:0'
lr = 0.00025
opt_decay = 0.95
momentum = 0.0
opt_eps = 0.01
target_sync = 1e4
clip_delta = 1.0
tensorboard = False
tensorboard_freq = 50
@ex.config
def emu_config():
rom_path = '../ale-git/roms/'
rom_name = 'breakout'
display_screen = True
frame_skip = 4
repeat_prob = 0.0
color_avg = True
random_seed = 42
random_start = 30
@ex.config
def agent_config():
hist_size = 1e5
eps = 1.0
eps_min = 0.1
eps_decay = (eps - eps_min) / 1e6
batch_size = 32
train_start = 5e3
train_frames = 5e6
test_freq = 5e4
test_frames = 5e3
update_freq = 4
@ex.command
def test(_config):
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
net.load(_config['rom_name'])
agent = Agent(emu, net, _config)
agent.next(0) # put a frame into the replay memory, TODO: should not be necessary
agent.test()
@ex.automain
def main(_config, _log):
sys.stdout = open('log_' + _config['rom_name'] + time.strftime('%H%M%d%m', time.gmtime()), 'w', buffering=True)
print "#{}".format(_config)
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
agent = Agent(emu, net, _config)
agent.train()
|
@ex.config
def net_config():
conv_layers = 3
conv_units = [32, 64, 64]
|
random_line_split
|
run_double.py
|
"""
Copyright 2016 Rasmus Larsen
This software may be modified and distributed under the terms
of the MIT license. See the LICENSE.txt file for details.
"""
import sys
import time
from sacred import Experiment
from core.ALEEmulator import ALEEmulator
from dqn.Agent import Agent
from dqn.DoubleDQN import DoubleDQN
ex = Experiment('double-dqn')
@ex.config
def net_config():
conv_layers = 3
conv_units = [32, 64, 64]
filter_sizes = [8, 4, 3]
strides = [4, 2, 1]
state_frames = 4
fc_layers = 1
fc_units = [512]
in_width = 84
in_height = 84
discount = 0.99
device = '/gpu:0'
lr = 0.00025
opt_decay = 0.95
momentum = 0.0
opt_eps = 0.01
target_sync = 1e4
clip_delta = 1.0
tensorboard = False
tensorboard_freq = 50
@ex.config
def emu_config():
rom_path = '../ale-git/roms/'
rom_name = 'breakout'
display_screen = True
frame_skip = 4
repeat_prob = 0.0
color_avg = True
random_seed = 42
random_start = 30
@ex.config
def agent_config():
hist_size = 1e5
eps = 1.0
eps_min = 0.1
eps_decay = (eps - eps_min) / 1e6
batch_size = 32
train_start = 5e3
train_frames = 5e6
test_freq = 5e4
test_frames = 5e3
update_freq = 4
@ex.command
def
|
(_config):
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
net.load(_config['rom_name'])
agent = Agent(emu, net, _config)
agent.next(0) # put a frame into the replay memory, TODO: should not be necessary
agent.test()
@ex.automain
def main(_config, _log):
sys.stdout = open('log_' + _config['rom_name'] + time.strftime('%H%M%d%m', time.gmtime()), 'w', buffering=True)
print "#{}".format(_config)
emu = ALEEmulator(_config)
_config['num_actions'] = emu.num_actions
net = DoubleDQN(_config)
agent = Agent(emu, net, _config)
agent.train()
|
test
|
identifier_name
|
reports.py
|
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from memoized import memoized
from corehq import privileges
from corehq.apps.accounting.models import BillingAccount
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.dispatcher import UserManagementReportDispatcher
from corehq.apps.reports.filters.users import (
ChangeActionFilter,
ChangedByUserFilter,
EnterpriseUserFilter,
)
from corehq.apps.reports.filters.users import \
ExpandedMobileWorkerFilter as EMWF
from corehq.apps.reports.generic import GenericTabularReport, GetParamsMixin, PaginatedReportMixin
from corehq.apps.reports.standard import DatespanMixin, ProjectReport
from corehq.apps.users.audit.change_messages import (
ASSIGNED_LOCATIONS_FIELD,
CHANGE_MESSAGES_FIELDS,
DOMAIN_FIELD,
LOCATION_FIELD,
PHONE_NUMBERS_FIELD,
ROLE_FIELD,
TWO_FACTOR_FIELD,
get_messages,
)
from corehq.apps.users.models import UserHistory
from corehq.const import USER_DATETIME_FORMAT
from corehq.util.timezones.conversions import ServerTime
class UserHistoryReport(GetParamsMixin, DatespanMixin, GenericTabularReport, ProjectReport, PaginatedReportMixin):
slug = 'user_history'
name = ugettext_lazy("User History")
section_name = ugettext_lazy("User Management")
dispatcher = UserManagementReportDispatcher
fields = [
'corehq.apps.reports.filters.users.AffectedUserFilter',
'corehq.apps.reports.filters.users.ChangedByUserFilter',
'corehq.apps.reports.filters.dates.DatespanFilter',
'corehq.apps.reports.filters.users.ChangeActionFilter',
'corehq.apps.reports.filters.users.UserPropertyFilter',
'corehq.apps.reports.filters.users.UserUploadRecordFilter',
]
description = ugettext_lazy("History of user updates")
ajax_pagination = True
default_sort = {'changed_at': 'desc'}
@classmethod
def get_primary_properties(cls, domain):
|
@property
def headers(self):
h = [
DataTablesColumn(_("Affected User"), sortable=False),
DataTablesColumn(_("Modified by User"), sortable=False),
DataTablesColumn(_("Action"), prop_name='action'),
DataTablesColumn(_("Via"), prop_name='changed_via'),
DataTablesColumn(_("Changes"), sortable=False),
DataTablesColumn(_("Change Message"), sortable=False),
DataTablesColumn(_("Timestamp"), prop_name='changed_at'),
]
return DataTablesHeader(*h)
@property
def total_records(self):
return self._get_queryset().count()
@memoized
def _get_queryset(self):
user_slugs = self.request.GET.getlist(EMWF.slug)
user_ids = self._get_user_ids(user_slugs)
# return empty queryset if no matching users were found
if user_slugs and not user_ids:
return UserHistory.objects.none()
changed_by_user_slugs = self.request.GET.getlist(ChangedByUserFilter.slug)
changed_by_user_ids = self._get_user_ids(changed_by_user_slugs)
# return empty queryset if no matching users were found
if changed_by_user_slugs and not changed_by_user_ids:
return UserHistory.objects.none()
user_property = self.request.GET.get('user_property')
actions = self.request.GET.getlist('action')
user_upload_record_id = self.request.GET.get('user_upload_record')
query = self._build_query(user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id)
return query
def _get_user_ids(self, slugs):
es_query = self._get_users_es_query(slugs)
return es_query.values_list('_id', flat=True)
def _get_users_es_query(self, slugs):
return EnterpriseUserFilter.user_es_query(
self.domain,
slugs,
self.request.couch_user,
)
def _build_query(self, user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id):
filters = Q(for_domain__in=self._for_domains())
if user_ids:
filters = filters & Q(user_id__in=user_ids)
if changed_by_user_ids:
filters = filters & Q(changed_by__in=changed_by_user_ids)
if user_property:
filters = filters & self._get_property_filters(user_property)
if actions and ChangeActionFilter.ALL not in actions:
filters = filters & Q(action__in=actions)
if user_upload_record_id:
filters = filters & Q(user_upload_record_id=user_upload_record_id)
if self.datespan:
filters = filters & Q(changed_at__lt=self.datespan.enddate_adjusted,
changed_at__gte=self.datespan.startdate)
return UserHistory.objects.filter(filters)
def _for_domains(self):
return BillingAccount.get_account_by_domain(self.domain).get_domains()
@staticmethod
def _get_property_filters(user_property):
if user_property in CHANGE_MESSAGES_FIELDS:
query_filters = Q(change_messages__has_key=user_property)
# to include CommCareUser creation from UI where a location can be assigned as a part of user creation
# which is tracked only under "changes" and not "change messages"
if user_property == LOCATION_FIELD:
query_filters = query_filters | Q(changes__has_key='location_id')
else:
query_filters = Q(changes__has_key=user_property)
return query_filters
@property
def rows(self):
records = self._get_queryset().order_by(self.ordering)[
self.pagination.start:self.pagination.start + self.pagination.count
]
for record in records:
yield self._user_history_row(record, self.domain, self.timezone)
@property
def ordering(self):
by, direction = list(self.get_sorting_block()[0].items())[0]
return '-' + by if direction == 'desc' else by
@memoized
def _get_location_name(self, location_id):
from corehq.apps.locations.models import SQLLocation
if not location_id:
return None
try:
location_object = SQLLocation.objects.get(location_id=location_id)
except ObjectDoesNotExist:
return None
return location_object.display_name
def _user_history_row(self, record, domain, timezone):
return [
record.user_repr,
record.changed_by_repr,
_get_action_display(record.action),
record.changed_via,
self._user_history_details_cell(record.changes, domain),
self._html_list(list(get_messages(record.change_messages))),
ServerTime(record.changed_at).user_time(timezone).ui_string(USER_DATETIME_FORMAT),
]
def _html_list(self, changes):
items = []
if isinstance(changes, dict):
for key, value in changes.items():
if isinstance(value, dict):
value = self._html_list(value)
elif isinstance(value, list):
value = format_html(", ".join(value))
else:
value = format_html(str(value))
items.append("<li>{}: {}</li>".format(key, value))
elif isinstance(changes, list):
items = ["<li>{}</li>".format(format_html(change)) for change in changes]
return mark_safe(f"<ul class='list-unstyled'>{''.join(items)}</ul>")
def _user_history_details_cell(self, changes, domain):
properties = UserHistoryReport.get_primary_properties(domain)
properties.pop("user_data", None)
primary_changes = {}
all_changes = {}
for key, value in changes.items():
if key == 'location_id':
value = self._get_location_name(value)
primary_changes[properties[LOCATION_FIELD]] = value
all_changes[properties[LOCATION_FIELD]] = value
elif key == 'user_data':
for user_data_key, user_data_value in changes['user_data'].items():
all_changes[f"user data: {user_data_key}"] = user_data_value
elif key in properties:
primary_changes[properties[key]] = value
all_changes[properties[key]] = value
more_count = len(all_changes) - len(primary_changes)
return render_to_string("reports/standard/partials/user_history_changes.html", {
"primary_changes": self._html_list(primary_changes),
"all_changes": self._html_list(all_changes),
"more_count": more_count,
})
def _get_action_display(logged_action):
action = ugettext_lazy("Updated")
if logged_action == UserHistory.CREATE:
action = ugettext_lazy("Added")
elif logged_action == UserHistory.DELETE:
action = ugettext_lazy("Deleted")
return action
|
"""
Get slugs and human-friendly names for the properties that are available
for filtering and/or displayed by default in the report, without
needing to click "See More".
"""
if domain_has_privilege(domain, privileges.APP_USER_PROFILES):
user_data_label = _("profile or user data")
else:
user_data_label = _("user data")
return {
"username": _("username"),
ROLE_FIELD: _("role"),
"email": _("email"),
DOMAIN_FIELD: _("project"),
"is_active": _("is active"),
"language": _("language"),
PHONE_NUMBERS_FIELD: _("phone numbers"),
LOCATION_FIELD: _("primary location"),
"user_data": user_data_label,
TWO_FACTOR_FIELD: _("two factor authentication disabled"),
ASSIGNED_LOCATIONS_FIELD: _("assigned locations"),
}
|
identifier_body
|
reports.py
|
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from memoized import memoized
from corehq import privileges
from corehq.apps.accounting.models import BillingAccount
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.dispatcher import UserManagementReportDispatcher
from corehq.apps.reports.filters.users import (
ChangeActionFilter,
ChangedByUserFilter,
EnterpriseUserFilter,
)
from corehq.apps.reports.filters.users import \
ExpandedMobileWorkerFilter as EMWF
from corehq.apps.reports.generic import GenericTabularReport, GetParamsMixin, PaginatedReportMixin
from corehq.apps.reports.standard import DatespanMixin, ProjectReport
from corehq.apps.users.audit.change_messages import (
ASSIGNED_LOCATIONS_FIELD,
CHANGE_MESSAGES_FIELDS,
DOMAIN_FIELD,
LOCATION_FIELD,
PHONE_NUMBERS_FIELD,
ROLE_FIELD,
TWO_FACTOR_FIELD,
get_messages,
)
from corehq.apps.users.models import UserHistory
from corehq.const import USER_DATETIME_FORMAT
from corehq.util.timezones.conversions import ServerTime
class UserHistoryReport(GetParamsMixin, DatespanMixin, GenericTabularReport, ProjectReport, PaginatedReportMixin):
slug = 'user_history'
name = ugettext_lazy("User History")
section_name = ugettext_lazy("User Management")
dispatcher = UserManagementReportDispatcher
fields = [
'corehq.apps.reports.filters.users.AffectedUserFilter',
'corehq.apps.reports.filters.users.ChangedByUserFilter',
'corehq.apps.reports.filters.dates.DatespanFilter',
'corehq.apps.reports.filters.users.ChangeActionFilter',
'corehq.apps.reports.filters.users.UserPropertyFilter',
'corehq.apps.reports.filters.users.UserUploadRecordFilter',
]
description = ugettext_lazy("History of user updates")
ajax_pagination = True
default_sort = {'changed_at': 'desc'}
@classmethod
def get_primary_properties(cls, domain):
"""
Get slugs and human-friendly names for the properties that are available
for filtering and/or displayed by default in the report, without
needing to click "See More".
"""
if domain_has_privilege(domain, privileges.APP_USER_PROFILES):
user_data_label = _("profile or user data")
else:
user_data_label = _("user data")
return {
"username": _("username"),
ROLE_FIELD: _("role"),
"email": _("email"),
DOMAIN_FIELD: _("project"),
"is_active": _("is active"),
"language": _("language"),
PHONE_NUMBERS_FIELD: _("phone numbers"),
LOCATION_FIELD: _("primary location"),
"user_data": user_data_label,
TWO_FACTOR_FIELD: _("two factor authentication disabled"),
ASSIGNED_LOCATIONS_FIELD: _("assigned locations"),
}
@property
def headers(self):
h = [
DataTablesColumn(_("Affected User"), sortable=False),
DataTablesColumn(_("Modified by User"), sortable=False),
DataTablesColumn(_("Action"), prop_name='action'),
DataTablesColumn(_("Via"), prop_name='changed_via'),
DataTablesColumn(_("Changes"), sortable=False),
DataTablesColumn(_("Change Message"), sortable=False),
DataTablesColumn(_("Timestamp"), prop_name='changed_at'),
]
return DataTablesHeader(*h)
@property
def total_records(self):
return self._get_queryset().count()
@memoized
def _get_queryset(self):
user_slugs = self.request.GET.getlist(EMWF.slug)
user_ids = self._get_user_ids(user_slugs)
# return empty queryset if no matching users were found
if user_slugs and not user_ids:
return UserHistory.objects.none()
changed_by_user_slugs = self.request.GET.getlist(ChangedByUserFilter.slug)
changed_by_user_ids = self._get_user_ids(changed_by_user_slugs)
# return empty queryset if no matching users were found
if changed_by_user_slugs and not changed_by_user_ids:
return UserHistory.objects.none()
user_property = self.request.GET.get('user_property')
actions = self.request.GET.getlist('action')
user_upload_record_id = self.request.GET.get('user_upload_record')
query = self._build_query(user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id)
return query
def _get_user_ids(self, slugs):
es_query = self._get_users_es_query(slugs)
return es_query.values_list('_id', flat=True)
def _get_users_es_query(self, slugs):
return EnterpriseUserFilter.user_es_query(
self.domain,
slugs,
self.request.couch_user,
)
def _build_query(self, user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id):
filters = Q(for_domain__in=self._for_domains())
if user_ids:
filters = filters & Q(user_id__in=user_ids)
if changed_by_user_ids:
filters = filters & Q(changed_by__in=changed_by_user_ids)
if user_property:
filters = filters & self._get_property_filters(user_property)
if actions and ChangeActionFilter.ALL not in actions:
filters = filters & Q(action__in=actions)
if user_upload_record_id:
filters = filters & Q(user_upload_record_id=user_upload_record_id)
if self.datespan:
filters = filters & Q(changed_at__lt=self.datespan.enddate_adjusted,
changed_at__gte=self.datespan.startdate)
return UserHistory.objects.filter(filters)
def _for_domains(self):
return BillingAccount.get_account_by_domain(self.domain).get_domains()
@staticmethod
def _get_property_filters(user_property):
if user_property in CHANGE_MESSAGES_FIELDS:
query_filters = Q(change_messages__has_key=user_property)
# to include CommCareUser creation from UI where a location can be assigned as a part of user creation
# which is tracked only under "changes" and not "change messages"
if user_property == LOCATION_FIELD:
query_filters = query_filters | Q(changes__has_key='location_id')
else:
query_filters = Q(changes__has_key=user_property)
return query_filters
@property
def rows(self):
records = self._get_queryset().order_by(self.ordering)[
self.pagination.start:self.pagination.start + self.pagination.count
]
for record in records:
yield self._user_history_row(record, self.domain, self.timezone)
@property
def ordering(self):
by, direction = list(self.get_sorting_block()[0].items())[0]
return '-' + by if direction == 'desc' else by
@memoized
def _get_location_name(self, location_id):
from corehq.apps.locations.models import SQLLocation
if not location_id:
return None
try:
location_object = SQLLocation.objects.get(location_id=location_id)
except ObjectDoesNotExist:
return None
return location_object.display_name
def _user_history_row(self, record, domain, timezone):
|
record.user_repr,
record.changed_by_repr,
_get_action_display(record.action),
record.changed_via,
self._user_history_details_cell(record.changes, domain),
self._html_list(list(get_messages(record.change_messages))),
ServerTime(record.changed_at).user_time(timezone).ui_string(USER_DATETIME_FORMAT),
]
def _html_list(self, changes):
items = []
if isinstance(changes, dict):
for key, value in changes.items():
if isinstance(value, dict):
value = self._html_list(value)
elif isinstance(value, list):
value = format_html(", ".join(value))
else:
value = format_html(str(value))
items.append("<li>{}: {}</li>".format(key, value))
elif isinstance(changes, list):
items = ["<li>{}</li>".format(format_html(change)) for change in changes]
return mark_safe(f"<ul class='list-unstyled'>{''.join(items)}</ul>")
def _user_history_details_cell(self, changes, domain):
properties = UserHistoryReport.get_primary_properties(domain)
properties.pop("user_data", None)
primary_changes = {}
all_changes = {}
for key, value in changes.items():
if key == 'location_id':
value = self._get_location_name(value)
primary_changes[properties[LOCATION_FIELD]] = value
all_changes[properties[LOCATION_FIELD]] = value
elif key == 'user_data':
for user_data_key, user_data_value in changes['user_data'].items():
all_changes[f"user data: {user_data_key}"] = user_data_value
elif key in properties:
primary_changes[properties[key]] = value
all_changes[properties[key]] = value
more_count = len(all_changes) - len(primary_changes)
return render_to_string("reports/standard/partials/user_history_changes.html", {
"primary_changes": self._html_list(primary_changes),
"all_changes": self._html_list(all_changes),
"more_count": more_count,
})
def _get_action_display(logged_action):
action = ugettext_lazy("Updated")
if logged_action == UserHistory.CREATE:
action = ugettext_lazy("Added")
elif logged_action == UserHistory.DELETE:
action = ugettext_lazy("Deleted")
return action
|
return [
|
random_line_split
|
reports.py
|
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from memoized import memoized
from corehq import privileges
from corehq.apps.accounting.models import BillingAccount
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.dispatcher import UserManagementReportDispatcher
from corehq.apps.reports.filters.users import (
ChangeActionFilter,
ChangedByUserFilter,
EnterpriseUserFilter,
)
from corehq.apps.reports.filters.users import \
ExpandedMobileWorkerFilter as EMWF
from corehq.apps.reports.generic import GenericTabularReport, GetParamsMixin, PaginatedReportMixin
from corehq.apps.reports.standard import DatespanMixin, ProjectReport
from corehq.apps.users.audit.change_messages import (
ASSIGNED_LOCATIONS_FIELD,
CHANGE_MESSAGES_FIELDS,
DOMAIN_FIELD,
LOCATION_FIELD,
PHONE_NUMBERS_FIELD,
ROLE_FIELD,
TWO_FACTOR_FIELD,
get_messages,
)
from corehq.apps.users.models import UserHistory
from corehq.const import USER_DATETIME_FORMAT
from corehq.util.timezones.conversions import ServerTime
class UserHistoryReport(GetParamsMixin, DatespanMixin, GenericTabularReport, ProjectReport, PaginatedReportMixin):
slug = 'user_history'
name = ugettext_lazy("User History")
section_name = ugettext_lazy("User Management")
dispatcher = UserManagementReportDispatcher
fields = [
'corehq.apps.reports.filters.users.AffectedUserFilter',
'corehq.apps.reports.filters.users.ChangedByUserFilter',
'corehq.apps.reports.filters.dates.DatespanFilter',
'corehq.apps.reports.filters.users.ChangeActionFilter',
'corehq.apps.reports.filters.users.UserPropertyFilter',
'corehq.apps.reports.filters.users.UserUploadRecordFilter',
]
description = ugettext_lazy("History of user updates")
ajax_pagination = True
default_sort = {'changed_at': 'desc'}
@classmethod
def get_primary_properties(cls, domain):
"""
Get slugs and human-friendly names for the properties that are available
for filtering and/or displayed by default in the report, without
needing to click "See More".
"""
if domain_has_privilege(domain, privileges.APP_USER_PROFILES):
user_data_label = _("profile or user data")
else:
user_data_label = _("user data")
return {
"username": _("username"),
ROLE_FIELD: _("role"),
"email": _("email"),
DOMAIN_FIELD: _("project"),
"is_active": _("is active"),
"language": _("language"),
PHONE_NUMBERS_FIELD: _("phone numbers"),
LOCATION_FIELD: _("primary location"),
"user_data": user_data_label,
TWO_FACTOR_FIELD: _("two factor authentication disabled"),
ASSIGNED_LOCATIONS_FIELD: _("assigned locations"),
}
@property
def headers(self):
h = [
DataTablesColumn(_("Affected User"), sortable=False),
DataTablesColumn(_("Modified by User"), sortable=False),
DataTablesColumn(_("Action"), prop_name='action'),
DataTablesColumn(_("Via"), prop_name='changed_via'),
DataTablesColumn(_("Changes"), sortable=False),
DataTablesColumn(_("Change Message"), sortable=False),
DataTablesColumn(_("Timestamp"), prop_name='changed_at'),
]
return DataTablesHeader(*h)
@property
def total_records(self):
return self._get_queryset().count()
@memoized
def _get_queryset(self):
user_slugs = self.request.GET.getlist(EMWF.slug)
user_ids = self._get_user_ids(user_slugs)
# return empty queryset if no matching users were found
if user_slugs and not user_ids:
return UserHistory.objects.none()
changed_by_user_slugs = self.request.GET.getlist(ChangedByUserFilter.slug)
changed_by_user_ids = self._get_user_ids(changed_by_user_slugs)
# return empty queryset if no matching users were found
if changed_by_user_slugs and not changed_by_user_ids:
return UserHistory.objects.none()
user_property = self.request.GET.get('user_property')
actions = self.request.GET.getlist('action')
user_upload_record_id = self.request.GET.get('user_upload_record')
query = self._build_query(user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id)
return query
def _get_user_ids(self, slugs):
es_query = self._get_users_es_query(slugs)
return es_query.values_list('_id', flat=True)
def _get_users_es_query(self, slugs):
return EnterpriseUserFilter.user_es_query(
self.domain,
slugs,
self.request.couch_user,
)
def
|
(self, user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id):
filters = Q(for_domain__in=self._for_domains())
if user_ids:
filters = filters & Q(user_id__in=user_ids)
if changed_by_user_ids:
filters = filters & Q(changed_by__in=changed_by_user_ids)
if user_property:
filters = filters & self._get_property_filters(user_property)
if actions and ChangeActionFilter.ALL not in actions:
filters = filters & Q(action__in=actions)
if user_upload_record_id:
filters = filters & Q(user_upload_record_id=user_upload_record_id)
if self.datespan:
filters = filters & Q(changed_at__lt=self.datespan.enddate_adjusted,
changed_at__gte=self.datespan.startdate)
return UserHistory.objects.filter(filters)
def _for_domains(self):
return BillingAccount.get_account_by_domain(self.domain).get_domains()
@staticmethod
def _get_property_filters(user_property):
if user_property in CHANGE_MESSAGES_FIELDS:
query_filters = Q(change_messages__has_key=user_property)
# to include CommCareUser creation from UI where a location can be assigned as a part of user creation
# which is tracked only under "changes" and not "change messages"
if user_property == LOCATION_FIELD:
query_filters = query_filters | Q(changes__has_key='location_id')
else:
query_filters = Q(changes__has_key=user_property)
return query_filters
@property
def rows(self):
records = self._get_queryset().order_by(self.ordering)[
self.pagination.start:self.pagination.start + self.pagination.count
]
for record in records:
yield self._user_history_row(record, self.domain, self.timezone)
@property
def ordering(self):
by, direction = list(self.get_sorting_block()[0].items())[0]
return '-' + by if direction == 'desc' else by
@memoized
def _get_location_name(self, location_id):
from corehq.apps.locations.models import SQLLocation
if not location_id:
return None
try:
location_object = SQLLocation.objects.get(location_id=location_id)
except ObjectDoesNotExist:
return None
return location_object.display_name
def _user_history_row(self, record, domain, timezone):
return [
record.user_repr,
record.changed_by_repr,
_get_action_display(record.action),
record.changed_via,
self._user_history_details_cell(record.changes, domain),
self._html_list(list(get_messages(record.change_messages))),
ServerTime(record.changed_at).user_time(timezone).ui_string(USER_DATETIME_FORMAT),
]
def _html_list(self, changes):
items = []
if isinstance(changes, dict):
for key, value in changes.items():
if isinstance(value, dict):
value = self._html_list(value)
elif isinstance(value, list):
value = format_html(", ".join(value))
else:
value = format_html(str(value))
items.append("<li>{}: {}</li>".format(key, value))
elif isinstance(changes, list):
items = ["<li>{}</li>".format(format_html(change)) for change in changes]
return mark_safe(f"<ul class='list-unstyled'>{''.join(items)}</ul>")
def _user_history_details_cell(self, changes, domain):
properties = UserHistoryReport.get_primary_properties(domain)
properties.pop("user_data", None)
primary_changes = {}
all_changes = {}
for key, value in changes.items():
if key == 'location_id':
value = self._get_location_name(value)
primary_changes[properties[LOCATION_FIELD]] = value
all_changes[properties[LOCATION_FIELD]] = value
elif key == 'user_data':
for user_data_key, user_data_value in changes['user_data'].items():
all_changes[f"user data: {user_data_key}"] = user_data_value
elif key in properties:
primary_changes[properties[key]] = value
all_changes[properties[key]] = value
more_count = len(all_changes) - len(primary_changes)
return render_to_string("reports/standard/partials/user_history_changes.html", {
"primary_changes": self._html_list(primary_changes),
"all_changes": self._html_list(all_changes),
"more_count": more_count,
})
def _get_action_display(logged_action):
action = ugettext_lazy("Updated")
if logged_action == UserHistory.CREATE:
action = ugettext_lazy("Added")
elif logged_action == UserHistory.DELETE:
action = ugettext_lazy("Deleted")
return action
|
_build_query
|
identifier_name
|
reports.py
|
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from memoized import memoized
from corehq import privileges
from corehq.apps.accounting.models import BillingAccount
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.dispatcher import UserManagementReportDispatcher
from corehq.apps.reports.filters.users import (
ChangeActionFilter,
ChangedByUserFilter,
EnterpriseUserFilter,
)
from corehq.apps.reports.filters.users import \
ExpandedMobileWorkerFilter as EMWF
from corehq.apps.reports.generic import GenericTabularReport, GetParamsMixin, PaginatedReportMixin
from corehq.apps.reports.standard import DatespanMixin, ProjectReport
from corehq.apps.users.audit.change_messages import (
ASSIGNED_LOCATIONS_FIELD,
CHANGE_MESSAGES_FIELDS,
DOMAIN_FIELD,
LOCATION_FIELD,
PHONE_NUMBERS_FIELD,
ROLE_FIELD,
TWO_FACTOR_FIELD,
get_messages,
)
from corehq.apps.users.models import UserHistory
from corehq.const import USER_DATETIME_FORMAT
from corehq.util.timezones.conversions import ServerTime
class UserHistoryReport(GetParamsMixin, DatespanMixin, GenericTabularReport, ProjectReport, PaginatedReportMixin):
slug = 'user_history'
name = ugettext_lazy("User History")
section_name = ugettext_lazy("User Management")
dispatcher = UserManagementReportDispatcher
fields = [
'corehq.apps.reports.filters.users.AffectedUserFilter',
'corehq.apps.reports.filters.users.ChangedByUserFilter',
'corehq.apps.reports.filters.dates.DatespanFilter',
'corehq.apps.reports.filters.users.ChangeActionFilter',
'corehq.apps.reports.filters.users.UserPropertyFilter',
'corehq.apps.reports.filters.users.UserUploadRecordFilter',
]
description = ugettext_lazy("History of user updates")
ajax_pagination = True
default_sort = {'changed_at': 'desc'}
@classmethod
def get_primary_properties(cls, domain):
"""
Get slugs and human-friendly names for the properties that are available
for filtering and/or displayed by default in the report, without
needing to click "See More".
"""
if domain_has_privilege(domain, privileges.APP_USER_PROFILES):
user_data_label = _("profile or user data")
else:
user_data_label = _("user data")
return {
"username": _("username"),
ROLE_FIELD: _("role"),
"email": _("email"),
DOMAIN_FIELD: _("project"),
"is_active": _("is active"),
"language": _("language"),
PHONE_NUMBERS_FIELD: _("phone numbers"),
LOCATION_FIELD: _("primary location"),
"user_data": user_data_label,
TWO_FACTOR_FIELD: _("two factor authentication disabled"),
ASSIGNED_LOCATIONS_FIELD: _("assigned locations"),
}
@property
def headers(self):
h = [
DataTablesColumn(_("Affected User"), sortable=False),
DataTablesColumn(_("Modified by User"), sortable=False),
DataTablesColumn(_("Action"), prop_name='action'),
DataTablesColumn(_("Via"), prop_name='changed_via'),
DataTablesColumn(_("Changes"), sortable=False),
DataTablesColumn(_("Change Message"), sortable=False),
DataTablesColumn(_("Timestamp"), prop_name='changed_at'),
]
return DataTablesHeader(*h)
@property
def total_records(self):
return self._get_queryset().count()
@memoized
def _get_queryset(self):
user_slugs = self.request.GET.getlist(EMWF.slug)
user_ids = self._get_user_ids(user_slugs)
# return empty queryset if no matching users were found
if user_slugs and not user_ids:
return UserHistory.objects.none()
changed_by_user_slugs = self.request.GET.getlist(ChangedByUserFilter.slug)
changed_by_user_ids = self._get_user_ids(changed_by_user_slugs)
# return empty queryset if no matching users were found
if changed_by_user_slugs and not changed_by_user_ids:
return UserHistory.objects.none()
user_property = self.request.GET.get('user_property')
actions = self.request.GET.getlist('action')
user_upload_record_id = self.request.GET.get('user_upload_record')
query = self._build_query(user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id)
return query
def _get_user_ids(self, slugs):
es_query = self._get_users_es_query(slugs)
return es_query.values_list('_id', flat=True)
def _get_users_es_query(self, slugs):
return EnterpriseUserFilter.user_es_query(
self.domain,
slugs,
self.request.couch_user,
)
def _build_query(self, user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id):
filters = Q(for_domain__in=self._for_domains())
if user_ids:
filters = filters & Q(user_id__in=user_ids)
if changed_by_user_ids:
filters = filters & Q(changed_by__in=changed_by_user_ids)
if user_property:
filters = filters & self._get_property_filters(user_property)
if actions and ChangeActionFilter.ALL not in actions:
filters = filters & Q(action__in=actions)
if user_upload_record_id:
|
if self.datespan:
filters = filters & Q(changed_at__lt=self.datespan.enddate_adjusted,
changed_at__gte=self.datespan.startdate)
return UserHistory.objects.filter(filters)
def _for_domains(self):
return BillingAccount.get_account_by_domain(self.domain).get_domains()
@staticmethod
def _get_property_filters(user_property):
if user_property in CHANGE_MESSAGES_FIELDS:
query_filters = Q(change_messages__has_key=user_property)
# to include CommCareUser creation from UI where a location can be assigned as a part of user creation
# which is tracked only under "changes" and not "change messages"
if user_property == LOCATION_FIELD:
query_filters = query_filters | Q(changes__has_key='location_id')
else:
query_filters = Q(changes__has_key=user_property)
return query_filters
@property
def rows(self):
records = self._get_queryset().order_by(self.ordering)[
self.pagination.start:self.pagination.start + self.pagination.count
]
for record in records:
yield self._user_history_row(record, self.domain, self.timezone)
@property
def ordering(self):
by, direction = list(self.get_sorting_block()[0].items())[0]
return '-' + by if direction == 'desc' else by
@memoized
def _get_location_name(self, location_id):
from corehq.apps.locations.models import SQLLocation
if not location_id:
return None
try:
location_object = SQLLocation.objects.get(location_id=location_id)
except ObjectDoesNotExist:
return None
return location_object.display_name
def _user_history_row(self, record, domain, timezone):
return [
record.user_repr,
record.changed_by_repr,
_get_action_display(record.action),
record.changed_via,
self._user_history_details_cell(record.changes, domain),
self._html_list(list(get_messages(record.change_messages))),
ServerTime(record.changed_at).user_time(timezone).ui_string(USER_DATETIME_FORMAT),
]
def _html_list(self, changes):
items = []
if isinstance(changes, dict):
for key, value in changes.items():
if isinstance(value, dict):
value = self._html_list(value)
elif isinstance(value, list):
value = format_html(", ".join(value))
else:
value = format_html(str(value))
items.append("<li>{}: {}</li>".format(key, value))
elif isinstance(changes, list):
items = ["<li>{}</li>".format(format_html(change)) for change in changes]
return mark_safe(f"<ul class='list-unstyled'>{''.join(items)}</ul>")
def _user_history_details_cell(self, changes, domain):
properties = UserHistoryReport.get_primary_properties(domain)
properties.pop("user_data", None)
primary_changes = {}
all_changes = {}
for key, value in changes.items():
if key == 'location_id':
value = self._get_location_name(value)
primary_changes[properties[LOCATION_FIELD]] = value
all_changes[properties[LOCATION_FIELD]] = value
elif key == 'user_data':
for user_data_key, user_data_value in changes['user_data'].items():
all_changes[f"user data: {user_data_key}"] = user_data_value
elif key in properties:
primary_changes[properties[key]] = value
all_changes[properties[key]] = value
more_count = len(all_changes) - len(primary_changes)
return render_to_string("reports/standard/partials/user_history_changes.html", {
"primary_changes": self._html_list(primary_changes),
"all_changes": self._html_list(all_changes),
"more_count": more_count,
})
def _get_action_display(logged_action):
action = ugettext_lazy("Updated")
if logged_action == UserHistory.CREATE:
action = ugettext_lazy("Added")
elif logged_action == UserHistory.DELETE:
action = ugettext_lazy("Deleted")
return action
|
filters = filters & Q(user_upload_record_id=user_upload_record_id)
|
conditional_block
|
text_info.rs
|
use std::ops::{Add, AddAssign, Sub, SubAssign};
use crate::str_utils::{count_chars, count_line_breaks, count_utf16_surrogates};
use crate::tree::Count;
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TextInfo {
pub(crate) bytes: Count,
pub(crate) chars: Count,
pub(crate) utf16_surrogates: Count,
pub(crate) line_breaks: Count,
}
|
impl TextInfo {
#[inline]
pub fn new() -> TextInfo {
TextInfo {
bytes: 0,
chars: 0,
utf16_surrogates: 0,
line_breaks: 0,
}
}
#[inline]
pub fn from_str(text: &str) -> TextInfo {
TextInfo {
bytes: text.len() as Count,
chars: count_chars(text) as Count,
utf16_surrogates: count_utf16_surrogates(text) as Count,
line_breaks: count_line_breaks(text) as Count,
}
}
}
impl Add for TextInfo {
type Output = Self;
#[inline]
fn add(self, rhs: TextInfo) -> TextInfo {
TextInfo {
bytes: self.bytes + rhs.bytes,
chars: self.chars + rhs.chars,
utf16_surrogates: self.utf16_surrogates + rhs.utf16_surrogates,
line_breaks: self.line_breaks + rhs.line_breaks,
}
}
}
impl AddAssign for TextInfo {
#[inline]
fn add_assign(&mut self, other: TextInfo) {
*self = *self + other;
}
}
impl Sub for TextInfo {
type Output = Self;
#[inline]
fn sub(self, rhs: TextInfo) -> TextInfo {
TextInfo {
bytes: self.bytes - rhs.bytes,
chars: self.chars - rhs.chars,
utf16_surrogates: self.utf16_surrogates - rhs.utf16_surrogates,
line_breaks: self.line_breaks - rhs.line_breaks,
}
}
}
impl SubAssign for TextInfo {
#[inline]
fn sub_assign(&mut self, other: TextInfo) {
*self = *self - other;
}
}
|
random_line_split
|
|
text_info.rs
|
use std::ops::{Add, AddAssign, Sub, SubAssign};
use crate::str_utils::{count_chars, count_line_breaks, count_utf16_surrogates};
use crate::tree::Count;
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TextInfo {
pub(crate) bytes: Count,
pub(crate) chars: Count,
pub(crate) utf16_surrogates: Count,
pub(crate) line_breaks: Count,
}
impl TextInfo {
#[inline]
pub fn new() -> TextInfo {
TextInfo {
bytes: 0,
chars: 0,
utf16_surrogates: 0,
line_breaks: 0,
}
}
#[inline]
pub fn from_str(text: &str) -> TextInfo
|
}
impl Add for TextInfo {
type Output = Self;
#[inline]
fn add(self, rhs: TextInfo) -> TextInfo {
TextInfo {
bytes: self.bytes + rhs.bytes,
chars: self.chars + rhs.chars,
utf16_surrogates: self.utf16_surrogates + rhs.utf16_surrogates,
line_breaks: self.line_breaks + rhs.line_breaks,
}
}
}
impl AddAssign for TextInfo {
#[inline]
fn add_assign(&mut self, other: TextInfo) {
*self = *self + other;
}
}
impl Sub for TextInfo {
type Output = Self;
#[inline]
fn sub(self, rhs: TextInfo) -> TextInfo {
TextInfo {
bytes: self.bytes - rhs.bytes,
chars: self.chars - rhs.chars,
utf16_surrogates: self.utf16_surrogates - rhs.utf16_surrogates,
line_breaks: self.line_breaks - rhs.line_breaks,
}
}
}
impl SubAssign for TextInfo {
#[inline]
fn sub_assign(&mut self, other: TextInfo) {
*self = *self - other;
}
}
|
{
TextInfo {
bytes: text.len() as Count,
chars: count_chars(text) as Count,
utf16_surrogates: count_utf16_surrogates(text) as Count,
line_breaks: count_line_breaks(text) as Count,
}
}
|
identifier_body
|
text_info.rs
|
use std::ops::{Add, AddAssign, Sub, SubAssign};
use crate::str_utils::{count_chars, count_line_breaks, count_utf16_surrogates};
use crate::tree::Count;
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct TextInfo {
pub(crate) bytes: Count,
pub(crate) chars: Count,
pub(crate) utf16_surrogates: Count,
pub(crate) line_breaks: Count,
}
impl TextInfo {
#[inline]
pub fn new() -> TextInfo {
TextInfo {
bytes: 0,
chars: 0,
utf16_surrogates: 0,
line_breaks: 0,
}
}
#[inline]
pub fn from_str(text: &str) -> TextInfo {
TextInfo {
bytes: text.len() as Count,
chars: count_chars(text) as Count,
utf16_surrogates: count_utf16_surrogates(text) as Count,
line_breaks: count_line_breaks(text) as Count,
}
}
}
impl Add for TextInfo {
type Output = Self;
#[inline]
fn add(self, rhs: TextInfo) -> TextInfo {
TextInfo {
bytes: self.bytes + rhs.bytes,
chars: self.chars + rhs.chars,
utf16_surrogates: self.utf16_surrogates + rhs.utf16_surrogates,
line_breaks: self.line_breaks + rhs.line_breaks,
}
}
}
impl AddAssign for TextInfo {
#[inline]
fn
|
(&mut self, other: TextInfo) {
*self = *self + other;
}
}
impl Sub for TextInfo {
type Output = Self;
#[inline]
fn sub(self, rhs: TextInfo) -> TextInfo {
TextInfo {
bytes: self.bytes - rhs.bytes,
chars: self.chars - rhs.chars,
utf16_surrogates: self.utf16_surrogates - rhs.utf16_surrogates,
line_breaks: self.line_breaks - rhs.line_breaks,
}
}
}
impl SubAssign for TextInfo {
#[inline]
fn sub_assign(&mut self, other: TextInfo) {
*self = *self - other;
}
}
|
add_assign
|
identifier_name
|
create-api.component.ts
|
import {
Component
} from '@angular/core';
import {
JsonDataService
} from '../../Services/jsonData.service';
var validator = require('../../../lib/lib-validator');
var selectKeysObj = require('../../../lib/lib-selectKey');
@Component({
selector: 'create-api',
templateUrl: './create-api.component.html',
styleUrls: ['./create-api.component.scss'],
providers: [JsonDataService]
})
export class CreateApiComponent {
private _rapidPage: Object;
private _errorMsg: string;
enablesKeys: [string];
selectKey : string;
private _jsonString = '';
private _describe = '';
constructor(private jsonDataService: JsonDataService) {}
onSaveData(): void {
let jsonString = this.jsonStringValue;
let describe = this.describeValue;
validator.config = {
jsonString: 'isArrayAndHaveData',
describe: 'isNonEmpty'
}
var validResult = validator.validate({
"jsonString": jsonString,
"describe": describe
});
if (validResult) {
var result = this.jsonDataService.createJsonData(jsonString, describe,this.selectKey);
} else {
alert('Please enter correctly data');
}
}
onShowSelectKey(): void {
let jsonString = this.jsonStringValue;
let keys = selectKeysObj.selectEnablesKeys(jsonString);
this.enablesKeys = keys;
}
onSelectKey(event: any): void {
var selectedClass = 'keysModal__keyButton--active';
|
this.selectKey = "";
if (oldSelectElement.length !== 0) {
oldSelectElement[0].classList.remove(selectedClass);
}
if (!element.classList.contains(selectedClass)) {
this.selectKey = element.textContent;
element.classList.add(selectedClass);
}
}
onSelectPrettyJson(): void {
//this.jsonString = JSON.stringify(this.jsonString, null, 2);
}
get describeValue() {
return this._describe;
}
set describeValue(s: string) {
this._describe = s;
}
get jsonStringValue() {
return this._jsonString;
}
set jsonStringValue(v: string) {
try {
this._jsonString = JSON.parse(v);
this._errorMsg = "";
var element = document.getElementsByClassName('json__createButton')[0];
element["disabled"] = false;
element.classList.add('json__createButton--active');
} catch (e) {
this._errorMsg = "error this string not json";
};
}
get errorMsgValue() {
return this._errorMsg;
}
}
|
var element = event.target;
var oldSelectElement = document.getElementsByClassName(selectedClass);
|
random_line_split
|
create-api.component.ts
|
import {
Component
} from '@angular/core';
import {
JsonDataService
} from '../../Services/jsonData.service';
var validator = require('../../../lib/lib-validator');
var selectKeysObj = require('../../../lib/lib-selectKey');
@Component({
selector: 'create-api',
templateUrl: './create-api.component.html',
styleUrls: ['./create-api.component.scss'],
providers: [JsonDataService]
})
export class CreateApiComponent {
private _rapidPage: Object;
private _errorMsg: string;
enablesKeys: [string];
selectKey : string;
private _jsonString = '';
private _describe = '';
constructor(private jsonDataService: JsonDataService) {}
onSaveData(): void {
let jsonString = this.jsonStringValue;
let describe = this.describeValue;
validator.config = {
jsonString: 'isArrayAndHaveData',
describe: 'isNonEmpty'
}
var validResult = validator.validate({
"jsonString": jsonString,
"describe": describe
});
if (validResult)
|
else {
alert('Please enter correctly data');
}
}
onShowSelectKey(): void {
let jsonString = this.jsonStringValue;
let keys = selectKeysObj.selectEnablesKeys(jsonString);
this.enablesKeys = keys;
}
onSelectKey(event: any): void {
var selectedClass = 'keysModal__keyButton--active';
var element = event.target;
var oldSelectElement = document.getElementsByClassName(selectedClass);
this.selectKey = "";
if (oldSelectElement.length !== 0) {
oldSelectElement[0].classList.remove(selectedClass);
}
if (!element.classList.contains(selectedClass)) {
this.selectKey = element.textContent;
element.classList.add(selectedClass);
}
}
onSelectPrettyJson(): void {
//this.jsonString = JSON.stringify(this.jsonString, null, 2);
}
get describeValue() {
return this._describe;
}
set describeValue(s: string) {
this._describe = s;
}
get jsonStringValue() {
return this._jsonString;
}
set jsonStringValue(v: string) {
try {
this._jsonString = JSON.parse(v);
this._errorMsg = "";
var element = document.getElementsByClassName('json__createButton')[0];
element["disabled"] = false;
element.classList.add('json__createButton--active');
} catch (e) {
this._errorMsg = "error this string not json";
};
}
get errorMsgValue() {
return this._errorMsg;
}
}
|
{
var result = this.jsonDataService.createJsonData(jsonString, describe,this.selectKey);
}
|
conditional_block
|
create-api.component.ts
|
import {
Component
} from '@angular/core';
import {
JsonDataService
} from '../../Services/jsonData.service';
var validator = require('../../../lib/lib-validator');
var selectKeysObj = require('../../../lib/lib-selectKey');
@Component({
selector: 'create-api',
templateUrl: './create-api.component.html',
styleUrls: ['./create-api.component.scss'],
providers: [JsonDataService]
})
export class CreateApiComponent {
private _rapidPage: Object;
private _errorMsg: string;
enablesKeys: [string];
selectKey : string;
private _jsonString = '';
private _describe = '';
constructor(private jsonDataService: JsonDataService) {}
onSaveData(): void {
let jsonString = this.jsonStringValue;
let describe = this.describeValue;
validator.config = {
jsonString: 'isArrayAndHaveData',
describe: 'isNonEmpty'
}
var validResult = validator.validate({
"jsonString": jsonString,
"describe": describe
});
if (validResult) {
var result = this.jsonDataService.createJsonData(jsonString, describe,this.selectKey);
} else {
alert('Please enter correctly data');
}
}
onShowSelectKey(): void {
let jsonString = this.jsonStringValue;
let keys = selectKeysObj.selectEnablesKeys(jsonString);
this.enablesKeys = keys;
}
onSelectKey(event: any): void {
var selectedClass = 'keysModal__keyButton--active';
var element = event.target;
var oldSelectElement = document.getElementsByClassName(selectedClass);
this.selectKey = "";
if (oldSelectElement.length !== 0) {
oldSelectElement[0].classList.remove(selectedClass);
}
if (!element.classList.contains(selectedClass)) {
this.selectKey = element.textContent;
element.classList.add(selectedClass);
}
}
onSelectPrettyJson(): void {
//this.jsonString = JSON.stringify(this.jsonString, null, 2);
}
get describeValue() {
return this._describe;
}
set describeValue(s: string) {
this._describe = s;
}
get jsonStringValue() {
return this._jsonString;
}
set jsonStringValue(v: string)
|
get errorMsgValue() {
return this._errorMsg;
}
}
|
{
try {
this._jsonString = JSON.parse(v);
this._errorMsg = "";
var element = document.getElementsByClassName('json__createButton')[0];
element["disabled"] = false;
element.classList.add('json__createButton--active');
} catch (e) {
this._errorMsg = "error this string not json";
};
}
|
identifier_body
|
create-api.component.ts
|
import {
Component
} from '@angular/core';
import {
JsonDataService
} from '../../Services/jsonData.service';
var validator = require('../../../lib/lib-validator');
var selectKeysObj = require('../../../lib/lib-selectKey');
@Component({
selector: 'create-api',
templateUrl: './create-api.component.html',
styleUrls: ['./create-api.component.scss'],
providers: [JsonDataService]
})
export class CreateApiComponent {
private _rapidPage: Object;
private _errorMsg: string;
enablesKeys: [string];
selectKey : string;
private _jsonString = '';
private _describe = '';
constructor(private jsonDataService: JsonDataService) {}
onSaveData(): void {
let jsonString = this.jsonStringValue;
let describe = this.describeValue;
validator.config = {
jsonString: 'isArrayAndHaveData',
describe: 'isNonEmpty'
}
var validResult = validator.validate({
"jsonString": jsonString,
"describe": describe
});
if (validResult) {
var result = this.jsonDataService.createJsonData(jsonString, describe,this.selectKey);
} else {
alert('Please enter correctly data');
}
}
onShowSelectKey(): void {
let jsonString = this.jsonStringValue;
let keys = selectKeysObj.selectEnablesKeys(jsonString);
this.enablesKeys = keys;
}
onSelectKey(event: any): void {
var selectedClass = 'keysModal__keyButton--active';
var element = event.target;
var oldSelectElement = document.getElementsByClassName(selectedClass);
this.selectKey = "";
if (oldSelectElement.length !== 0) {
oldSelectElement[0].classList.remove(selectedClass);
}
if (!element.classList.contains(selectedClass)) {
this.selectKey = element.textContent;
element.classList.add(selectedClass);
}
}
onSelectPrettyJson(): void {
//this.jsonString = JSON.stringify(this.jsonString, null, 2);
}
get
|
() {
return this._describe;
}
set describeValue(s: string) {
this._describe = s;
}
get jsonStringValue() {
return this._jsonString;
}
set jsonStringValue(v: string) {
try {
this._jsonString = JSON.parse(v);
this._errorMsg = "";
var element = document.getElementsByClassName('json__createButton')[0];
element["disabled"] = false;
element.classList.add('json__createButton--active');
} catch (e) {
this._errorMsg = "error this string not json";
};
}
get errorMsgValue() {
return this._errorMsg;
}
}
|
describeValue
|
identifier_name
|
issue-15381.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main()
|
{
let values: Vec<u8> = vec![1,2,3,4,5,6,7,8];
for
[x,y,z]
//~^ ERROR refutable pattern in `for` loop binding: `[]` not covered
in values.as_slice().chunks(3).filter(|&xs| xs.len() == 3) {
println!("y={}", y);
}
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.