file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
util.py | import functools
import logging
import os
import random
import sys
import time
from gym import error
logger = logging.getLogger(__name__)
def utf8(value):
if isinstance(value, unicode) and sys.version_info < (3, 0):
|
else:
return value
def file_size(f):
return os.fstat(f.fileno()).st_size
def retry_exponential_backoff(f, errors, max_retries=5, interval=1):
@functools.wraps(f)
def wrapped(*args, **kwargs):
num_retries = 0
caught_errors = []
while True:
try:
result = f(*args, **kwargs)
except errors as e:
logger.error("Caught error in %s: %s" % (f.__name__, e))
caught_errors.append(e)
if num_retries < max_retries:
backoff = random.randint(1, 2 ** num_retries) * interval
logger.error("Retrying in %.1fs..." % backoff)
time.sleep(backoff)
num_retries += 1
else:
msg = "Exceeded allowed retries. Here are the individual error messages:\n\n"
msg += "\n\n".join("%s: %s" % (type(e).__name__, str(e)) for e in caught_errors)
raise error.RetriesExceededError(msg)
else:
break
return result
return wrapped
| return value.encode('utf-8') | conditional_block |
util.py | import functools
import logging
import os
import random
import sys
import time
from gym import error
logger = logging.getLogger(__name__)
def utf8(value):
if isinstance(value, unicode) and sys.version_info < (3, 0):
return value.encode('utf-8')
else:
return value
def file_size(f):
|
def retry_exponential_backoff(f, errors, max_retries=5, interval=1):
@functools.wraps(f)
def wrapped(*args, **kwargs):
num_retries = 0
caught_errors = []
while True:
try:
result = f(*args, **kwargs)
except errors as e:
logger.error("Caught error in %s: %s" % (f.__name__, e))
caught_errors.append(e)
if num_retries < max_retries:
backoff = random.randint(1, 2 ** num_retries) * interval
logger.error("Retrying in %.1fs..." % backoff)
time.sleep(backoff)
num_retries += 1
else:
msg = "Exceeded allowed retries. Here are the individual error messages:\n\n"
msg += "\n\n".join("%s: %s" % (type(e).__name__, str(e)) for e in caught_errors)
raise error.RetriesExceededError(msg)
else:
break
return result
return wrapped
| return os.fstat(f.fileno()).st_size | identifier_body |
util.py | import functools
import logging
import os
import random
import sys
import time
from gym import error
logger = logging.getLogger(__name__)
def utf8(value):
if isinstance(value, unicode) and sys.version_info < (3, 0):
return value.encode('utf-8')
else:
return value
def file_size(f):
return os.fstat(f.fileno()).st_size
def | (f, errors, max_retries=5, interval=1):
@functools.wraps(f)
def wrapped(*args, **kwargs):
num_retries = 0
caught_errors = []
while True:
try:
result = f(*args, **kwargs)
except errors as e:
logger.error("Caught error in %s: %s" % (f.__name__, e))
caught_errors.append(e)
if num_retries < max_retries:
backoff = random.randint(1, 2 ** num_retries) * interval
logger.error("Retrying in %.1fs..." % backoff)
time.sleep(backoff)
num_retries += 1
else:
msg = "Exceeded allowed retries. Here are the individual error messages:\n\n"
msg += "\n\n".join("%s: %s" % (type(e).__name__, str(e)) for e in caught_errors)
raise error.RetriesExceededError(msg)
else:
break
return result
return wrapped
| retry_exponential_backoff | identifier_name |
util.py | import functools
import logging
import os
import random
import sys
import time
from gym import error
logger = logging.getLogger(__name__)
def utf8(value):
if isinstance(value, unicode) and sys.version_info < (3, 0):
return value.encode('utf-8')
else:
return value
def file_size(f):
return os.fstat(f.fileno()).st_size
def retry_exponential_backoff(f, errors, max_retries=5, interval=1):
@functools.wraps(f)
def wrapped(*args, **kwargs):
num_retries = 0
caught_errors = []
while True:
try:
result = f(*args, **kwargs)
except errors as e:
logger.error("Caught error in %s: %s" % (f.__name__, e))
caught_errors.append(e)
if num_retries < max_retries: | backoff = random.randint(1, 2 ** num_retries) * interval
logger.error("Retrying in %.1fs..." % backoff)
time.sleep(backoff)
num_retries += 1
else:
msg = "Exceeded allowed retries. Here are the individual error messages:\n\n"
msg += "\n\n".join("%s: %s" % (type(e).__name__, str(e)) for e in caught_errors)
raise error.RetriesExceededError(msg)
else:
break
return result
return wrapped | random_line_split |
|
closure.lib.d.ts | // Work around for https://github.com/Microsoft/TypeScript/issues/983
// All clutz namespaces are below ಠ_ಠ.clutz, thus
// this acts as global.
declare namespace ಠ_ಠ.clutz {
type GlobalError = Error;
var GlobalError: ErrorConstructor;
type GlobalEvent = Event;
var GlobalEvent: typeof Event;
/** Represents the type returned when goog.require-ing an unknown symbol */
type ClosureSymbolNotGoogProvided = void;
/** Represents a Closure type that is private, represented by an empty interface. */
type PrivateType = void;
/**
* Represents a Closure class that is private. Only used for extending. When in
* type position PrivateType is used.
*/
class PrivateClass {}
/**
* Represents a Closure interface that is private. Only used for extending/implementing. When in
* type position PrivateType is used.
*/
interface PrivateInterface {}
interface IObject<KEY1, VALUE> {} | declare namespace ಠ_ಠ.clutz.goog {
var __namespace_needs_to_be_non_value_empty__: void;
}
/**
* Global variable indicating whether the JavaScript code has been compiled.
* This variable is defined in Closure's base.js, but not on the `goog` namespace, which
* means it has to be explcitly declared here, similar to `goog` itself above.
*/
declare var COMPILED: boolean; | }
// Will be extended if base.js is a dependency. | random_line_split |
closure.lib.d.ts | // Work around for https://github.com/Microsoft/TypeScript/issues/983
// All clutz namespaces are below ಠ_ಠ.clutz, thus
// this acts as global.
declare namespace ಠ_ಠ.clutz {
type GlobalError = Error;
var GlobalError: ErrorConstructor;
type GlobalEvent = Event;
var GlobalEvent: typeof Event;
/** Represents the type returned when goog.require-ing an unknown symbol */
type ClosureSymbolNotGoogProvided = void;
/** Represents a Closure type that is private, represented by an empty interface. */
type PrivateType = void;
/**
* Represents a Closure class that is private. Only used for extending. When in
* type position PrivateType is used.
*/
class PrivateC | *
* Represents a Closure interface that is private. Only used for extending/implementing. When in
* type position PrivateType is used.
*/
interface PrivateInterface {}
interface IObject<KEY1, VALUE> {}
}
// Will be extended if base.js is a dependency.
declare namespace ಠ_ಠ.clutz.goog {
var __namespace_needs_to_be_non_value_empty__: void;
}
/**
* Global variable indicating whether the JavaScript code has been compiled.
* This variable is defined in Closure's base.js, but not on the `goog` namespace, which
* means it has to be explcitly declared here, similar to `goog` itself above.
*/
declare var COMPILED: boolean;
| lass {}
/* | identifier_name |
spawn_task.rs | use super::*;
use crate::ok_or_shutdown;
use crate::state_helper::{pause_on_failure, save_state, LockedState};
impl TaskHandler {
/// See if we can start a new queued task.
pub fn spawn_new(&mut self) {
let cloned_state_mutex = self.state.clone();
let mut state = cloned_state_mutex.lock().unwrap();
// Check whether a new task can be started.
// Spawn tasks until we no longer have free slots available.
while let Some(id) = self.get_next_task_id(&state) {
self.start_process(id, &mut state);
}
}
/// Search and return the next task that can be started.
/// Precondition for a task to be started:
/// - is in Queued state
/// - There are free slots in the task's group
/// - The group is running
/// - has all its dependencies in `Done` state
pub fn get_next_task_id(&mut self, state: &LockedState) -> Option<usize> {
state
.tasks
.iter()
.filter(|(_, task)| task.status == TaskStatus::Queued)
.filter(|(_, task)| {
// Make sure the task is assigned to an existing group.
let group = match state.groups.get(&task.group) {
Some(group) => group,
None => {
error!(
"Got task with unknown group {}. Please report this!",
&task.group
);
return false;
}
};
// Let's check if the group is running. If it isn't, simply return false.
if group.status != GroupStatus::Running {
return false;
}
// Get the currently running tasks by looking at the actually running processes.
// They're sorted by group, which makes this quite convenient.
let running_tasks = match self.children.0.get(&task.group) {
Some(children) => children.len(),
None => {
error!(
"Got valid group {}, but no worker pool has been initialized. This is a bug!",
&task.group
);
return false
}
};
// Make sure there are free slots in the task's group
running_tasks < group.parallel_tasks
})
.find(|(_, task)| {
// Check whether all dependencies for this task are fulfilled.
task.dependencies
.iter()
.flat_map(|id| state.tasks.get(id))
.all(|task| matches!(task.status, TaskStatus::Done(TaskResult::Success)))
})
.map(|(id, _)| *id)
}
/// Actually spawn a new sub process
/// The output of subprocesses is piped into a seperate file for easier access
pub fn start_process(&mut self, task_id: usize, state: &mut LockedState) |
}
| {
// Check if the task exists and can actually be spawned. Otherwise do an early return.
match state.tasks.get(&task_id) {
Some(task) => {
if !matches!(
&task.status,
TaskStatus::Stashed { .. } | TaskStatus::Queued | TaskStatus::Paused
) {
info!("Tried to start task with status: {}", task.status);
return;
}
}
None => {
info!("Tried to start non-existing task: {task_id}");
return;
}
};
// Try to get the log file to which the output of the process will be written to.
// Panic if this doesn't work! This is unrecoverable.
let (stdout_log, stderr_log) = match create_log_file_handles(task_id, &self.pueue_directory)
{
Ok((out, err)) => (out, err),
Err(err) => {
panic!("Failed to create child log files: {err:?}");
}
};
// Get all necessary info for starting the task
let (command, path, group, mut envs) = {
let task = state.tasks.get(&task_id).unwrap();
(
task.command.clone(),
task.path.clone(),
task.group.clone(),
task.envs.clone(),
)
};
// Build the shell command that should be executed.
let mut command = compile_shell_command(&command);
// Determine the worker's id depending on the current group.
// Inject that info into the environment.
let worker_id = self.children.get_next_group_worker(&group);
envs.insert("PUEUE_GROUP".into(), group.clone());
envs.insert("PUEUE_WORKER_ID".into(), worker_id.to_string());
// Spawn the actual subprocess
let spawned_command = command
.current_dir(path)
.stdin(Stdio::piped())
.envs(envs.clone())
.stdout(Stdio::from(stdout_log))
.stderr(Stdio::from(stderr_log))
.spawn();
// Check if the task managed to spawn
let child = match spawned_command {
Ok(child) => child,
Err(err) => {
let error = format!("Failed to spawn child {task_id} with err: {err:?}");
error!("{}", error);
clean_log_handles(task_id, &self.pueue_directory);
// Update all necessary fields on the task.
let group = {
let task = state.tasks.get_mut(&task_id).unwrap();
task.status = TaskStatus::Done(TaskResult::FailedToSpawn(error));
task.start = Some(Local::now());
task.end = Some(Local::now());
self.spawn_callback(task);
task.group.clone()
};
pause_on_failure(state, &group);
ok_or_shutdown!(self, save_state(state));
return;
}
};
// Save the process handle in our self.children datastructure.
self.children.add_child(&group, worker_id, task_id, child);
let task = state.tasks.get_mut(&task_id).unwrap();
task.start = Some(Local::now());
task.status = TaskStatus::Running;
// Overwrite the task's environment variables with the new ones, containing the
// PUEUE_WORKER_ID and PUEUE_GROUP variables.
task.envs = envs;
info!("Started task: {}", task.command);
ok_or_shutdown!(self, save_state(state));
} | identifier_body |
spawn_task.rs | use super::*;
use crate::ok_or_shutdown;
use crate::state_helper::{pause_on_failure, save_state, LockedState};
impl TaskHandler {
/// See if we can start a new queued task.
pub fn | (&mut self) {
let cloned_state_mutex = self.state.clone();
let mut state = cloned_state_mutex.lock().unwrap();
// Check whether a new task can be started.
// Spawn tasks until we no longer have free slots available.
while let Some(id) = self.get_next_task_id(&state) {
self.start_process(id, &mut state);
}
}
/// Search and return the next task that can be started.
/// Precondition for a task to be started:
/// - is in Queued state
/// - There are free slots in the task's group
/// - The group is running
/// - has all its dependencies in `Done` state
pub fn get_next_task_id(&mut self, state: &LockedState) -> Option<usize> {
state
.tasks
.iter()
.filter(|(_, task)| task.status == TaskStatus::Queued)
.filter(|(_, task)| {
// Make sure the task is assigned to an existing group.
let group = match state.groups.get(&task.group) {
Some(group) => group,
None => {
error!(
"Got task with unknown group {}. Please report this!",
&task.group
);
return false;
}
};
// Let's check if the group is running. If it isn't, simply return false.
if group.status != GroupStatus::Running {
return false;
}
// Get the currently running tasks by looking at the actually running processes.
// They're sorted by group, which makes this quite convenient.
let running_tasks = match self.children.0.get(&task.group) {
Some(children) => children.len(),
None => {
error!(
"Got valid group {}, but no worker pool has been initialized. This is a bug!",
&task.group
);
return false
}
};
// Make sure there are free slots in the task's group
running_tasks < group.parallel_tasks
})
.find(|(_, task)| {
// Check whether all dependencies for this task are fulfilled.
task.dependencies
.iter()
.flat_map(|id| state.tasks.get(id))
.all(|task| matches!(task.status, TaskStatus::Done(TaskResult::Success)))
})
.map(|(id, _)| *id)
}
/// Actually spawn a new sub process
/// The output of subprocesses is piped into a seperate file for easier access
pub fn start_process(&mut self, task_id: usize, state: &mut LockedState) {
// Check if the task exists and can actually be spawned. Otherwise do an early return.
match state.tasks.get(&task_id) {
Some(task) => {
if !matches!(
&task.status,
TaskStatus::Stashed { .. } | TaskStatus::Queued | TaskStatus::Paused
) {
info!("Tried to start task with status: {}", task.status);
return;
}
}
None => {
info!("Tried to start non-existing task: {task_id}");
return;
}
};
// Try to get the log file to which the output of the process will be written to.
// Panic if this doesn't work! This is unrecoverable.
let (stdout_log, stderr_log) = match create_log_file_handles(task_id, &self.pueue_directory)
{
Ok((out, err)) => (out, err),
Err(err) => {
panic!("Failed to create child log files: {err:?}");
}
};
// Get all necessary info for starting the task
let (command, path, group, mut envs) = {
let task = state.tasks.get(&task_id).unwrap();
(
task.command.clone(),
task.path.clone(),
task.group.clone(),
task.envs.clone(),
)
};
// Build the shell command that should be executed.
let mut command = compile_shell_command(&command);
// Determine the worker's id depending on the current group.
// Inject that info into the environment.
let worker_id = self.children.get_next_group_worker(&group);
envs.insert("PUEUE_GROUP".into(), group.clone());
envs.insert("PUEUE_WORKER_ID".into(), worker_id.to_string());
// Spawn the actual subprocess
let spawned_command = command
.current_dir(path)
.stdin(Stdio::piped())
.envs(envs.clone())
.stdout(Stdio::from(stdout_log))
.stderr(Stdio::from(stderr_log))
.spawn();
// Check if the task managed to spawn
let child = match spawned_command {
Ok(child) => child,
Err(err) => {
let error = format!("Failed to spawn child {task_id} with err: {err:?}");
error!("{}", error);
clean_log_handles(task_id, &self.pueue_directory);
// Update all necessary fields on the task.
let group = {
let task = state.tasks.get_mut(&task_id).unwrap();
task.status = TaskStatus::Done(TaskResult::FailedToSpawn(error));
task.start = Some(Local::now());
task.end = Some(Local::now());
self.spawn_callback(task);
task.group.clone()
};
pause_on_failure(state, &group);
ok_or_shutdown!(self, save_state(state));
return;
}
};
// Save the process handle in our self.children datastructure.
self.children.add_child(&group, worker_id, task_id, child);
let task = state.tasks.get_mut(&task_id).unwrap();
task.start = Some(Local::now());
task.status = TaskStatus::Running;
// Overwrite the task's environment variables with the new ones, containing the
// PUEUE_WORKER_ID and PUEUE_GROUP variables.
task.envs = envs;
info!("Started task: {}", task.command);
ok_or_shutdown!(self, save_state(state));
}
}
| spawn_new | identifier_name |
spawn_task.rs | use super::*;
use crate::ok_or_shutdown;
use crate::state_helper::{pause_on_failure, save_state, LockedState};
impl TaskHandler {
/// See if we can start a new queued task.
pub fn spawn_new(&mut self) {
let cloned_state_mutex = self.state.clone();
let mut state = cloned_state_mutex.lock().unwrap();
// Check whether a new task can be started.
// Spawn tasks until we no longer have free slots available.
while let Some(id) = self.get_next_task_id(&state) {
self.start_process(id, &mut state);
}
}
/// Search and return the next task that can be started.
/// Precondition for a task to be started:
/// - is in Queued state
/// - There are free slots in the task's group
/// - The group is running
/// - has all its dependencies in `Done` state
pub fn get_next_task_id(&mut self, state: &LockedState) -> Option<usize> {
state
.tasks
.iter()
.filter(|(_, task)| task.status == TaskStatus::Queued)
.filter(|(_, task)| {
// Make sure the task is assigned to an existing group.
let group = match state.groups.get(&task.group) {
Some(group) => group,
None => {
error!(
"Got task with unknown group {}. Please report this!",
&task.group
);
return false; | return false;
}
// Get the currently running tasks by looking at the actually running processes.
// They're sorted by group, which makes this quite convenient.
let running_tasks = match self.children.0.get(&task.group) {
Some(children) => children.len(),
None => {
error!(
"Got valid group {}, but no worker pool has been initialized. This is a bug!",
&task.group
);
return false
}
};
// Make sure there are free slots in the task's group
running_tasks < group.parallel_tasks
})
.find(|(_, task)| {
// Check whether all dependencies for this task are fulfilled.
task.dependencies
.iter()
.flat_map(|id| state.tasks.get(id))
.all(|task| matches!(task.status, TaskStatus::Done(TaskResult::Success)))
})
.map(|(id, _)| *id)
}
/// Actually spawn a new sub process
/// The output of subprocesses is piped into a seperate file for easier access
pub fn start_process(&mut self, task_id: usize, state: &mut LockedState) {
// Check if the task exists and can actually be spawned. Otherwise do an early return.
match state.tasks.get(&task_id) {
Some(task) => {
if !matches!(
&task.status,
TaskStatus::Stashed { .. } | TaskStatus::Queued | TaskStatus::Paused
) {
info!("Tried to start task with status: {}", task.status);
return;
}
}
None => {
info!("Tried to start non-existing task: {task_id}");
return;
}
};
// Try to get the log file to which the output of the process will be written to.
// Panic if this doesn't work! This is unrecoverable.
let (stdout_log, stderr_log) = match create_log_file_handles(task_id, &self.pueue_directory)
{
Ok((out, err)) => (out, err),
Err(err) => {
panic!("Failed to create child log files: {err:?}");
}
};
// Get all necessary info for starting the task
let (command, path, group, mut envs) = {
let task = state.tasks.get(&task_id).unwrap();
(
task.command.clone(),
task.path.clone(),
task.group.clone(),
task.envs.clone(),
)
};
// Build the shell command that should be executed.
let mut command = compile_shell_command(&command);
// Determine the worker's id depending on the current group.
// Inject that info into the environment.
let worker_id = self.children.get_next_group_worker(&group);
envs.insert("PUEUE_GROUP".into(), group.clone());
envs.insert("PUEUE_WORKER_ID".into(), worker_id.to_string());
// Spawn the actual subprocess
let spawned_command = command
.current_dir(path)
.stdin(Stdio::piped())
.envs(envs.clone())
.stdout(Stdio::from(stdout_log))
.stderr(Stdio::from(stderr_log))
.spawn();
// Check if the task managed to spawn
let child = match spawned_command {
Ok(child) => child,
Err(err) => {
let error = format!("Failed to spawn child {task_id} with err: {err:?}");
error!("{}", error);
clean_log_handles(task_id, &self.pueue_directory);
// Update all necessary fields on the task.
let group = {
let task = state.tasks.get_mut(&task_id).unwrap();
task.status = TaskStatus::Done(TaskResult::FailedToSpawn(error));
task.start = Some(Local::now());
task.end = Some(Local::now());
self.spawn_callback(task);
task.group.clone()
};
pause_on_failure(state, &group);
ok_or_shutdown!(self, save_state(state));
return;
}
};
// Save the process handle in our self.children datastructure.
self.children.add_child(&group, worker_id, task_id, child);
let task = state.tasks.get_mut(&task_id).unwrap();
task.start = Some(Local::now());
task.status = TaskStatus::Running;
// Overwrite the task's environment variables with the new ones, containing the
// PUEUE_WORKER_ID and PUEUE_GROUP variables.
task.envs = envs;
info!("Started task: {}", task.command);
ok_or_shutdown!(self, save_state(state));
}
} | }
};
// Let's check if the group is running. If it isn't, simply return false.
if group.status != GroupStatus::Running { | random_line_split |
classHelix_1_1Logic_1_1admin_1_1Row.js | var classHelix_1_1Logic_1_1admin_1_1Row =
[
[ "Row", "classHelix_1_1Logic_1_1admin_1_1Row.html#a5cca50abc389396e26ec68cbd3fb6596", null ],
[ "Row", "classHelix_1_1Logic_1_1admin_1_1Row.html#a9dbe8548daa0a4887688199e00c25ebe", null ],
[ "~Row", "classHelix_1_1Logic_1_1admin_1_1Row.html#a8cc0869c91dc94ee595069309ba15690", null ],
[ "Row", "classHelix_1_1Logic_1_1admin_1_1Row.html#ab03288f17c220e20c93ad9c0d09061ac", null ],
[ "checkSize", "classHelix_1_1Logic_1_1admin_1_1Row.html#a48bcdc5b9f9538ed41e080803d663678", null ], | [ "init", "classHelix_1_1Logic_1_1admin_1_1Row.html#a2b565af0d1dde980ea1554a9f2f701e2", null ],
[ "Name", "classHelix_1_1Logic_1_1admin_1_1Row.html#a4d274004c656534d5e53e2e71aa56ced", null ],
[ "operator=", "classHelix_1_1Logic_1_1admin_1_1Row.html#a87ba0481b0ca4da736c29902ae99ae16", null ],
[ "readXmlChildren", "classHelix_1_1Logic_1_1admin_1_1Row.html#af67bc7d9b68ddddbaa4c50877307a3eb", null ],
[ "readXmlNode", "classHelix_1_1Logic_1_1admin_1_1Row.html#ad725b5dab419e46fa1756db5723fe056", null ],
[ "unused", "classHelix_1_1Logic_1_1admin_1_1Row.html#aea46573efcd2acb8be86d94f3eeefd75", null ],
[ "unused", "classHelix_1_1Logic_1_1admin_1_1Row.html#a03fa2b5f39fc6e782c6d0bb50ff2fc1c", null ],
[ "unused_getSQL", "classHelix_1_1Logic_1_1admin_1_1Row.html#aa184ff4c1a1e4f0c5d60a423d82acda7", null ],
[ "unused_prepSQL", "classHelix_1_1Logic_1_1admin_1_1Row.html#a68b113538ca9e7713c8db5079acc469d", null ],
[ "Cols", "classHelix_1_1Logic_1_1admin_1_1Row.html#ac018a27345b236d23211f5a2d903b21d", null ],
[ "idx", "classHelix_1_1Logic_1_1admin_1_1Row.html#aa01047fcfb3bed97affba31071c65054", null ]
]; | [ "createXmlChildren", "classHelix_1_1Logic_1_1admin_1_1Row.html#a214f15c6d3d9f8fafb0e0c06eb11b055", null ],
[ "createXmlDoc", "classHelix_1_1Logic_1_1admin_1_1Row.html#a94214dcf9a6b54b4a6e9e29e22c7fdaf", null ],
[ "createXmlNode", "classHelix_1_1Logic_1_1admin_1_1Row.html#a6574128e020e000ed6d8f6237170b6ab", null ],
[ "deleteVector", "classHelix_1_1Logic_1_1admin_1_1Row.html#a0315f22e3e824261c07faa598b2924cb", null ], | random_line_split |
telephone.py | import sys, math
# Auto-generated code below aims at helping you parse |
class Tree(object):
def __repr__(self):
return self.val
def __init__(self, val=None):
self.val = val
self.childs = []
def add_number(self, number):
if not number:
return
for child in self.childs:
if number[0] == child.val:
del number[0]
child.add_number(number)
return
new_child = Tree(number[0])
self.childs.append(new_child)
del number[0]
new_child.add_number(number)
def calculate(self):
plus = 1 if self.val else 0
return plus + sum([child.calculate() for child in self.childs])
def show(self, order=''):
print order + str(self.val)
order += ' '
for child in self.childs:
child.show(order)
# N = int(raw_input())
# for i in xrange(N):
# telephone = raw_input()
# # Write an action using print
# # To debug: print >> sys.stderr, "Debug messages..."
# print "number" # The number of elements (referencing a number) stored in the structure.
if __name__ == '__main__':
t = Tree()
t.add_number(list('0123456789'))
t.add_number(list('0123'))
print t.calculate()
t.show() | # the standard input according to the problem statement. | random_line_split |
telephone.py | import sys, math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
class Tree(object):
def __repr__(self):
return self.val
def __init__(self, val=None):
self.val = val
self.childs = []
def add_number(self, number):
if not number:
return
for child in self.childs:
if number[0] == child.val:
del number[0]
child.add_number(number)
return
new_child = Tree(number[0])
self.childs.append(new_child)
del number[0]
new_child.add_number(number)
def calculate(self):
plus = 1 if self.val else 0
return plus + sum([child.calculate() for child in self.childs])
def show(self, order=''):
print order + str(self.val)
order += ' '
for child in self.childs:
|
# N = int(raw_input())
# for i in xrange(N):
# telephone = raw_input()
# # Write an action using print
# # To debug: print >> sys.stderr, "Debug messages..."
# print "number" # The number of elements (referencing a number) stored in the structure.
if __name__ == '__main__':
t = Tree()
t.add_number(list('0123456789'))
t.add_number(list('0123'))
print t.calculate()
t.show()
| child.show(order) | conditional_block |
telephone.py | import sys, math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
class Tree(object):
def __repr__(self):
|
def __init__(self, val=None):
self.val = val
self.childs = []
def add_number(self, number):
if not number:
return
for child in self.childs:
if number[0] == child.val:
del number[0]
child.add_number(number)
return
new_child = Tree(number[0])
self.childs.append(new_child)
del number[0]
new_child.add_number(number)
def calculate(self):
plus = 1 if self.val else 0
return plus + sum([child.calculate() for child in self.childs])
def show(self, order=''):
print order + str(self.val)
order += ' '
for child in self.childs:
child.show(order)
# N = int(raw_input())
# for i in xrange(N):
# telephone = raw_input()
# # Write an action using print
# # To debug: print >> sys.stderr, "Debug messages..."
# print "number" # The number of elements (referencing a number) stored in the structure.
if __name__ == '__main__':
t = Tree()
t.add_number(list('0123456789'))
t.add_number(list('0123'))
print t.calculate()
t.show()
| return self.val | identifier_body |
telephone.py | import sys, math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
class Tree(object):
def __repr__(self):
return self.val
def | (self, val=None):
self.val = val
self.childs = []
def add_number(self, number):
if not number:
return
for child in self.childs:
if number[0] == child.val:
del number[0]
child.add_number(number)
return
new_child = Tree(number[0])
self.childs.append(new_child)
del number[0]
new_child.add_number(number)
def calculate(self):
plus = 1 if self.val else 0
return plus + sum([child.calculate() for child in self.childs])
def show(self, order=''):
print order + str(self.val)
order += ' '
for child in self.childs:
child.show(order)
# N = int(raw_input())
# for i in xrange(N):
# telephone = raw_input()
# # Write an action using print
# # To debug: print >> sys.stderr, "Debug messages..."
# print "number" # The number of elements (referencing a number) stored in the structure.
if __name__ == '__main__':
t = Tree()
t.add_number(list('0123456789'))
t.add_number(list('0123'))
print t.calculate()
t.show()
| __init__ | identifier_name |
actorRegistry.ts | /**
* Holds instances to Actor classes for instantiation
*/
import { ActorType } from '../../actor/actor';
/* tslint:disable-next-line interface-over-type-literal */
type ActorRegister = { [actorClassName: string]: ActorType };
export class ActorRegistry {
private static self: ActorRegistry = new ActorRegistry();
private registry: ActorRegister = {};
public static getInstance(): ActorRegistry {
return ActorRegistry.self;
}
constructor() {
if (ActorRegistry.self) {
throw new Error('Use getInstance to retrieve registry');
}
ActorRegistry.self = this;
}
/**
* Register an actor with the registry
* @param {ActorType} actor - The actor to register with the registry
*/
public register(actor: ActorType): void {
if (!this.registry[actor.name]) {
this.registry[actor.name] = actor;
}
}
/**
* Check to see if an actor exists in the registry
* @param {string} actor - The string name of the actor class
* @return {boolean} - Whether the actor exists in the registry
*/
public exists(actor: string): boolean {
return !!this.registry[actor];
}
/**
* Get the specified ActorType from the registry, or throw
* @param {string} actor - The string name of the actor class
* @return {ActorType} - The actor class, or throw
*/
public get(actor: string): ActorType {
if (this.exists(actor)) {
return this.registry[actor];
} else {
throw new Error(`Actor ${actor} is not registered`);
}
}
/**
* Clear the repository
* NOTE: This is mostly for testing
*/
public clear(): void { | this.registry = {};
}
} | random_line_split |
|
actorRegistry.ts | /**
* Holds instances to Actor classes for instantiation
*/
import { ActorType } from '../../actor/actor';
/* tslint:disable-next-line interface-over-type-literal */
type ActorRegister = { [actorClassName: string]: ActorType };
export class ActorRegistry {
private static self: ActorRegistry = new ActorRegistry();
private registry: ActorRegister = {};
public static getInstance(): ActorRegistry {
return ActorRegistry.self;
}
constructor() {
if (ActorRegistry.self) {
throw new Error('Use getInstance to retrieve registry');
}
ActorRegistry.self = this;
}
/**
* Register an actor with the registry
* @param {ActorType} actor - The actor to register with the registry
*/
public register(actor: ActorType): void {
if (!this.registry[actor.name]) |
}
/**
* Check to see if an actor exists in the registry
* @param {string} actor - The string name of the actor class
* @return {boolean} - Whether the actor exists in the registry
*/
public exists(actor: string): boolean {
return !!this.registry[actor];
}
/**
* Get the specified ActorType from the registry, or throw
* @param {string} actor - The string name of the actor class
* @return {ActorType} - The actor class, or throw
*/
public get(actor: string): ActorType {
if (this.exists(actor)) {
return this.registry[actor];
} else {
throw new Error(`Actor ${actor} is not registered`);
}
}
/**
* Clear the repository
* NOTE: This is mostly for testing
*/
public clear(): void {
this.registry = {};
}
}
| {
this.registry[actor.name] = actor;
} | conditional_block |
actorRegistry.ts | /**
* Holds instances to Actor classes for instantiation
*/
import { ActorType } from '../../actor/actor';
/* tslint:disable-next-line interface-over-type-literal */
type ActorRegister = { [actorClassName: string]: ActorType };
export class ActorRegistry {
private static self: ActorRegistry = new ActorRegistry();
private registry: ActorRegister = {};
public static getInstance(): ActorRegistry {
return ActorRegistry.self;
}
constructor() {
if (ActorRegistry.self) {
throw new Error('Use getInstance to retrieve registry');
}
ActorRegistry.self = this;
}
/**
* Register an actor with the registry
* @param {ActorType} actor - The actor to register with the registry
*/
public register(actor: ActorType): void {
if (!this.registry[actor.name]) {
this.registry[actor.name] = actor;
}
}
/**
* Check to see if an actor exists in the registry
* @param {string} actor - The string name of the actor class
* @return {boolean} - Whether the actor exists in the registry
*/
public | (actor: string): boolean {
return !!this.registry[actor];
}
/**
* Get the specified ActorType from the registry, or throw
* @param {string} actor - The string name of the actor class
* @return {ActorType} - The actor class, or throw
*/
public get(actor: string): ActorType {
if (this.exists(actor)) {
return this.registry[actor];
} else {
throw new Error(`Actor ${actor} is not registered`);
}
}
/**
* Clear the repository
* NOTE: This is mostly for testing
*/
public clear(): void {
this.registry = {};
}
}
| exists | identifier_name |
web-request-event.ts | /* eslint-disable no-bitwise */
import { ipcRenderer } from 'electron';
(String.prototype as any).hashCode = function hashCode() {
let hash = 0;
let i;
let chr;
if (this.length === 0) {
return hash;
}
for (i = 0; i < this.length; i += 1) {
chr = this.charCodeAt(i);
hash = ((hash << 5) - hash) + chr;
hash |= 0; // convert to 32bit integer
}
return hash;
};
class Event {
name: string;
scope: string;
event: string;
listeners: ((details: any) => any)[];
constructor(name: string, scope: string, event: string) |
addListener(callback: (details: any) => any, filter = {}): void {
const digest = (callback.toString() as any).hashCode();
this.listeners.push(digest);
ipcRenderer.on(
`lulumi-${this.scope}-${this.event}-intercepted-${digest}`, (event, requestId, details) => {
ipcRenderer.send(
`lulumi-${this.scope}-${this.event}-response-${digest}-${requestId}`, callback(details)
);
}
);
ipcRenderer.send(
`lulumi-${this.scope}-add-listener-${this.event}`,
this.name,
this.event,
digest,
filter,
);
}
removeListener(callback: (details: any) => any): void {
const digest = (callback.toString() as any).hashCode();
this.listeners = this.listeners.filter(c => (c !== digest));
ipcRenderer.removeAllListeners(`lulumi-${this.scope}-${this.event}-intercepted-${digest}`);
ipcRenderer.send(`lulumi-${this.scope}-remove-listener-${this.event}`, this.name, this.event);
}
removeAllListeners(): void {
this.listeners.forEach(l => ipcRenderer.removeAllListeners(
`lulumi-${this.scope}-${this.event}-intercepted-${l}`
));
ipcRenderer.send(
`lulumi-${this.scope}-remove-listener-${this.event}`,
this.name,
this.event,
);
this.listeners = [];
}
}
export default Event;
| {
this.name = name; // extension's name
this.scope = scope;
this.event = event;
this.listeners = [];
} | identifier_body |
web-request-event.ts | /* eslint-disable no-bitwise */
import { ipcRenderer } from 'electron';
(String.prototype as any).hashCode = function hashCode() {
let hash = 0;
let i;
let chr;
if (this.length === 0) {
return hash;
}
for (i = 0; i < this.length; i += 1) {
chr = this.charCodeAt(i);
hash = ((hash << 5) - hash) + chr;
hash |= 0; // convert to 32bit integer
}
return hash;
};
class Event {
name: string;
scope: string;
event: string;
listeners: ((details: any) => any)[];
constructor(name: string, scope: string, event: string) {
this.name = name; // extension's name
this.scope = scope;
this.event = event;
this.listeners = [];
}
addListener(callback: (details: any) => any, filter = {}): void {
const digest = (callback.toString() as any).hashCode();
this.listeners.push(digest);
ipcRenderer.on(
`lulumi-${this.scope}-${this.event}-intercepted-${digest}`, (event, requestId, details) => {
ipcRenderer.send(
`lulumi-${this.scope}-${this.event}-response-${digest}-${requestId}`, callback(details)
);
}
); | filter,
);
}
removeListener(callback: (details: any) => any): void {
const digest = (callback.toString() as any).hashCode();
this.listeners = this.listeners.filter(c => (c !== digest));
ipcRenderer.removeAllListeners(`lulumi-${this.scope}-${this.event}-intercepted-${digest}`);
ipcRenderer.send(`lulumi-${this.scope}-remove-listener-${this.event}`, this.name, this.event);
}
removeAllListeners(): void {
this.listeners.forEach(l => ipcRenderer.removeAllListeners(
`lulumi-${this.scope}-${this.event}-intercepted-${l}`
));
ipcRenderer.send(
`lulumi-${this.scope}-remove-listener-${this.event}`,
this.name,
this.event,
);
this.listeners = [];
}
}
export default Event; | ipcRenderer.send(
`lulumi-${this.scope}-add-listener-${this.event}`,
this.name,
this.event,
digest, | random_line_split |
web-request-event.ts | /* eslint-disable no-bitwise */
import { ipcRenderer } from 'electron';
(String.prototype as any).hashCode = function hashCode() {
let hash = 0;
let i;
let chr;
if (this.length === 0) {
return hash;
}
for (i = 0; i < this.length; i += 1) {
chr = this.charCodeAt(i);
hash = ((hash << 5) - hash) + chr;
hash |= 0; // convert to 32bit integer
}
return hash;
};
class Event {
name: string;
scope: string;
event: string;
listeners: ((details: any) => any)[];
| (name: string, scope: string, event: string) {
this.name = name; // extension's name
this.scope = scope;
this.event = event;
this.listeners = [];
}
addListener(callback: (details: any) => any, filter = {}): void {
const digest = (callback.toString() as any).hashCode();
this.listeners.push(digest);
ipcRenderer.on(
`lulumi-${this.scope}-${this.event}-intercepted-${digest}`, (event, requestId, details) => {
ipcRenderer.send(
`lulumi-${this.scope}-${this.event}-response-${digest}-${requestId}`, callback(details)
);
}
);
ipcRenderer.send(
`lulumi-${this.scope}-add-listener-${this.event}`,
this.name,
this.event,
digest,
filter,
);
}
removeListener(callback: (details: any) => any): void {
const digest = (callback.toString() as any).hashCode();
this.listeners = this.listeners.filter(c => (c !== digest));
ipcRenderer.removeAllListeners(`lulumi-${this.scope}-${this.event}-intercepted-${digest}`);
ipcRenderer.send(`lulumi-${this.scope}-remove-listener-${this.event}`, this.name, this.event);
}
removeAllListeners(): void {
this.listeners.forEach(l => ipcRenderer.removeAllListeners(
`lulumi-${this.scope}-${this.event}-intercepted-${l}`
));
ipcRenderer.send(
`lulumi-${this.scope}-remove-listener-${this.event}`,
this.name,
this.event,
);
this.listeners = [];
}
}
export default Event;
| constructor | identifier_name |
web-request-event.ts | /* eslint-disable no-bitwise */
import { ipcRenderer } from 'electron';
(String.prototype as any).hashCode = function hashCode() {
let hash = 0;
let i;
let chr;
if (this.length === 0) {
return hash;
}
for (i = 0; i < this.length; i += 1) |
return hash;
};
class Event {
name: string;
scope: string;
event: string;
listeners: ((details: any) => any)[];
constructor(name: string, scope: string, event: string) {
this.name = name; // extension's name
this.scope = scope;
this.event = event;
this.listeners = [];
}
addListener(callback: (details: any) => any, filter = {}): void {
const digest = (callback.toString() as any).hashCode();
this.listeners.push(digest);
ipcRenderer.on(
`lulumi-${this.scope}-${this.event}-intercepted-${digest}`, (event, requestId, details) => {
ipcRenderer.send(
`lulumi-${this.scope}-${this.event}-response-${digest}-${requestId}`, callback(details)
);
}
);
ipcRenderer.send(
`lulumi-${this.scope}-add-listener-${this.event}`,
this.name,
this.event,
digest,
filter,
);
}
removeListener(callback: (details: any) => any): void {
const digest = (callback.toString() as any).hashCode();
this.listeners = this.listeners.filter(c => (c !== digest));
ipcRenderer.removeAllListeners(`lulumi-${this.scope}-${this.event}-intercepted-${digest}`);
ipcRenderer.send(`lulumi-${this.scope}-remove-listener-${this.event}`, this.name, this.event);
}
removeAllListeners(): void {
this.listeners.forEach(l => ipcRenderer.removeAllListeners(
`lulumi-${this.scope}-${this.event}-intercepted-${l}`
));
ipcRenderer.send(
`lulumi-${this.scope}-remove-listener-${this.event}`,
this.name,
this.event,
);
this.listeners = [];
}
}
export default Event;
| {
chr = this.charCodeAt(i);
hash = ((hash << 5) - hash) + chr;
hash |= 0; // convert to 32bit integer
} | conditional_block |
const-contents.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #570
static lsl : int = 1 << 2;
static add : int = 1 + 2;
static addf : f64 = 1.0 + 2.0;
static not : int = !0;
static notb : bool = !true;
static neg : int = -(1);
pub fn | () {
assert_eq!(lsl, 4);
assert_eq!(add, 3);
assert_eq!(addf, 3.0);
assert_eq!(not, -1);
assert_eq!(notb, false);
assert_eq!(neg, -1);
}
| main | identifier_name |
const-contents.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #570
static lsl : int = 1 << 2;
static add : int = 1 + 2;
static addf : f64 = 1.0 + 2.0;
static not : int = !0;
static notb : bool = !true;
static neg : int = -(1);
pub fn main() | {
assert_eq!(lsl, 4);
assert_eq!(add, 3);
assert_eq!(addf, 3.0);
assert_eq!(not, -1);
assert_eq!(notb, false);
assert_eq!(neg, -1);
} | identifier_body |
|
const-contents.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #570
static lsl : int = 1 << 2;
static add : int = 1 + 2;
static addf : f64 = 1.0 + 2.0;
static not : int = !0;
static notb : bool = !true;
static neg : int = -(1);
pub fn main() {
assert_eq!(lsl, 4);
assert_eq!(add, 3);
assert_eq!(addf, 3.0);
assert_eq!(not, -1);
assert_eq!(notb, false);
assert_eq!(neg, -1);
} | random_line_split |
|
container.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Anne Archibald <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
class ContainerError(ValueError):
"""Error signaling something went wrong with container handling"""
pass
class Container(object):
|
class ContainerList(list,Container):
"""A ContainerList is a list whose children know they're in it.
Each element in the ContainerList has a .container attribute which points
to the ContainerList itself. This container pointer is maintained automatically.
"""
def __init__(self, items=[], owner=None):
list.__init__(self, items)
self._set_container_multi(items)
self.owner = owner
def __repr__(self):
return "<CL %s>" % list.__repr__(self)
def append(self, item):
self._set_container(item)
list.append(self,item)
def extend(self, items):
self._set_container_multi(items)
list.extend(self,items)
def insert(self, i, item):
self._set_container(item)
list.insert(self,i,item)
def remove(self, item):
self._unset_container(item)
list.remove(self,item)
def pop(self, i=-1):
self._unset_container(self[i])
return list.pop(self,i)
# These don't work because they make the elements part of more than one list, or one list more than once
def __add__(self, other):
raise NotImplementedError
def __radd__(self, other):
raise NotImplementedError
def __imul__(self,other):
raise NotImplementedError
def __mul__(self, other):
raise NotImplementedError
def __rmul__(self,other):
raise NotImplementedError
# only works if other is not also a Container
def __iadd__(self, other):
self.extend(other)
return self
def __setitem__(self, key, value):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
try:
self._set_container_multi(value)
except ContainerError:
self._set_container_multi(self[key])
raise
else:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
self._set_container(self[key])
raise
list.__setitem__(self,key,value)
def __delitem__(self, key):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
else:
self._unset_container(self[key])
list.__delitem__(self,key)
# Needed for python2, forbidden for python3
def __delslice__(self,i,j):
del self[slice(i,j,None)]
class ContainerDict(dict,Container):
"""A ContainerDict is a dict whose children know they're in it.
Each element in the ContainerDict has a .container attribute which points
to the ContainerDict itself. This container pointer is maintained automatically.
"""
def __init__(self, contents=None, **kwargs):
if contents is None:
dict.__init__(self, **kwargs)
else:
dict.__init__(self, contents, **kwargs)
self._set_container_multi(list(self.values()))
def __repr__(self):
return "<CD %s>" % dict.__repr__(self)
def __setitem__(self, key, value):
if key in self:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
if key in self:
self._set_container(self[key])
raise
dict.__setitem__(self,key,value)
def __delitem__(self, key):
if key in self:
self._unset_container(self[key])
dict.__delitem__(self,key)
def pop(self, key):
if key in self:
self._unset_container(self[key])
return dict.pop(self,key)
def popitem(self):
key, value = dict.popitem(self)
self._unset_container(value)
return key, value
def setdefault(self, key, default=None):
if key not in self:
self._set_container(default)
dict.setdefault(self, key, default)
def update(self, other):
for (k,v) in list(other.items()):
self[k] = v
if __name__=='__main__':
class Gear(object):
def __init__(self, name, container=None):
self.name = name
self.container = container
def __repr__(self):
return "<G "+str(self.name)+">"
gears = [Gear(n) for n in range(10)]
a = Gear("A")
b = Gear("B")
c = Gear("C")
d = Gear("D")
e = Gear("E")
p = ContainerList([a,b,c])
print(p)
try:
p.append(a)
except ContainerError as err:
print(err)
else:
raise AssertionError
print(p[1])
print(p[::2])
p[1] = d
print(p)
p[1] = b
p[::2] = [d,e]
print(p)
del p[:]
p2 = ContainerList([a,b,c])
print(p2)
p2.extend([d,e])
print(p2)
print(p2.pop())
print(p2)
p2.remove(d)
print(p2)
p2 += [d,e]
print(p2)
try:
d = ContainerDict(a=a, b=b, c=c)
except ContainerError as err:
print(err)
else:
raise AssertionError
del p2[:]
d = ContainerDict(a=a, b=b, c=c)
print(d)
print(d["a"])
d["a"] = a
try:
d["a"] = b
except ContainerError as err:
print(err)
else:
raise AssertionError
del d["a"]
d["a"] = a
d.pop("a")
print(d)
d["a"] = a
k,v = d.popitem()
d[k] = v
d.setdefault("e",e)
d.setdefault("e",e)
print(d)
del d["e"]
d.update(dict(e=e))
print(d)
| """A container is an object that manages objects it contains.
The objects in a container each have a .container attribute that
points to the container. This attribute is managed by the container
itself.
This class is a base class that provides common container functionality,
to be used to simplify implementation of list and dict containers.
"""
def _set_container(self, item):
if hasattr( item, "container" ) and item.container not in (None,self):
# raise ContainerError("Item %s was added to container %s but was already in container %s" % (item, self, item.container))
item.container.remove( item )
item.container = self
def _unset_container(self, item):
if item.container is not self:
raise ContainerError("Item %s was removed from container %s but was not in it" % (item, self))
item.container = None
def _set_container_multi(self, items):
"""Put items in the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._set_container(i)
r.append(i)
r = None
finally: # Make sure items don't get added to this if any fail
if r is not None:
for i in r:
try:
self._unset_container(i)
except ContainerError:
pass
def _unset_container_multi(self, items):
"""Remove items from the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._unset_container(i)
r.append(i)
r = None
finally:
if r is not None:
for i in r:
try:
self._set_container(i)
except ContainerError:
pass | identifier_body |
container.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Anne Archibald <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
class ContainerError(ValueError):
"""Error signaling something went wrong with container handling"""
pass
class Container(object):
"""A container is an object that manages objects it contains.
The objects in a container each have a .container attribute that
points to the container. This attribute is managed by the container
itself.
This class is a base class that provides common container functionality,
to be used to simplify implementation of list and dict containers.
"""
def _set_container(self, item):
if hasattr( item, "container" ) and item.container not in (None,self):
# raise ContainerError("Item %s was added to container %s but was already in container %s" % (item, self, item.container))
item.container.remove( item )
item.container = self
def _unset_container(self, item):
if item.container is not self:
raise ContainerError("Item %s was removed from container %s but was not in it" % (item, self))
item.container = None
def _set_container_multi(self, items):
"""Put items in the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._set_container(i)
r.append(i)
r = None
finally: # Make sure items don't get added to this if any fail
if r is not None:
for i in r:
try:
self._unset_container(i)
except ContainerError:
pass
def _unset_container_multi(self, items):
"""Remove items from the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._unset_container(i)
r.append(i)
r = None
finally:
if r is not None:
for i in r:
try:
self._set_container(i)
except ContainerError:
pass
class ContainerList(list,Container):
"""A ContainerList is a list whose children know they're in it.
Each element in the ContainerList has a .container attribute which points
to the ContainerList itself. This container pointer is maintained automatically.
"""
def __init__(self, items=[], owner=None):
list.__init__(self, items)
self._set_container_multi(items)
self.owner = owner
def __repr__(self):
return "<CL %s>" % list.__repr__(self)
def append(self, item):
self._set_container(item)
list.append(self,item)
def extend(self, items):
self._set_container_multi(items)
list.extend(self,items)
def | (self, i, item):
self._set_container(item)
list.insert(self,i,item)
def remove(self, item):
self._unset_container(item)
list.remove(self,item)
def pop(self, i=-1):
self._unset_container(self[i])
return list.pop(self,i)
# These don't work because they make the elements part of more than one list, or one list more than once
def __add__(self, other):
raise NotImplementedError
def __radd__(self, other):
raise NotImplementedError
def __imul__(self,other):
raise NotImplementedError
def __mul__(self, other):
raise NotImplementedError
def __rmul__(self,other):
raise NotImplementedError
# only works if other is not also a Container
def __iadd__(self, other):
self.extend(other)
return self
def __setitem__(self, key, value):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
try:
self._set_container_multi(value)
except ContainerError:
self._set_container_multi(self[key])
raise
else:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
self._set_container(self[key])
raise
list.__setitem__(self,key,value)
def __delitem__(self, key):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
else:
self._unset_container(self[key])
list.__delitem__(self,key)
# Needed for python2, forbidden for python3
def __delslice__(self,i,j):
del self[slice(i,j,None)]
class ContainerDict(dict,Container):
"""A ContainerDict is a dict whose children know they're in it.
Each element in the ContainerDict has a .container attribute which points
to the ContainerDict itself. This container pointer is maintained automatically.
"""
def __init__(self, contents=None, **kwargs):
if contents is None:
dict.__init__(self, **kwargs)
else:
dict.__init__(self, contents, **kwargs)
self._set_container_multi(list(self.values()))
def __repr__(self):
return "<CD %s>" % dict.__repr__(self)
def __setitem__(self, key, value):
if key in self:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
if key in self:
self._set_container(self[key])
raise
dict.__setitem__(self,key,value)
def __delitem__(self, key):
if key in self:
self._unset_container(self[key])
dict.__delitem__(self,key)
def pop(self, key):
if key in self:
self._unset_container(self[key])
return dict.pop(self,key)
def popitem(self):
key, value = dict.popitem(self)
self._unset_container(value)
return key, value
def setdefault(self, key, default=None):
if key not in self:
self._set_container(default)
dict.setdefault(self, key, default)
def update(self, other):
for (k,v) in list(other.items()):
self[k] = v
if __name__=='__main__':
class Gear(object):
def __init__(self, name, container=None):
self.name = name
self.container = container
def __repr__(self):
return "<G "+str(self.name)+">"
gears = [Gear(n) for n in range(10)]
a = Gear("A")
b = Gear("B")
c = Gear("C")
d = Gear("D")
e = Gear("E")
p = ContainerList([a,b,c])
print(p)
try:
p.append(a)
except ContainerError as err:
print(err)
else:
raise AssertionError
print(p[1])
print(p[::2])
p[1] = d
print(p)
p[1] = b
p[::2] = [d,e]
print(p)
del p[:]
p2 = ContainerList([a,b,c])
print(p2)
p2.extend([d,e])
print(p2)
print(p2.pop())
print(p2)
p2.remove(d)
print(p2)
p2 += [d,e]
print(p2)
try:
d = ContainerDict(a=a, b=b, c=c)
except ContainerError as err:
print(err)
else:
raise AssertionError
del p2[:]
d = ContainerDict(a=a, b=b, c=c)
print(d)
print(d["a"])
d["a"] = a
try:
d["a"] = b
except ContainerError as err:
print(err)
else:
raise AssertionError
del d["a"]
d["a"] = a
d.pop("a")
print(d)
d["a"] = a
k,v = d.popitem()
d[k] = v
d.setdefault("e",e)
d.setdefault("e",e)
print(d)
del d["e"]
d.update(dict(e=e))
print(d)
| insert | identifier_name |
container.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Anne Archibald <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
class ContainerError(ValueError):
"""Error signaling something went wrong with container handling"""
pass
class Container(object):
"""A container is an object that manages objects it contains.
The objects in a container each have a .container attribute that
points to the container. This attribute is managed by the container
itself.
This class is a base class that provides common container functionality,
to be used to simplify implementation of list and dict containers.
"""
def _set_container(self, item):
if hasattr( item, "container" ) and item.container not in (None,self):
# raise ContainerError("Item %s was added to container %s but was already in container %s" % (item, self, item.container))
item.container.remove( item )
item.container = self
def _unset_container(self, item):
if item.container is not self:
raise ContainerError("Item %s was removed from container %s but was not in it" % (item, self))
item.container = None
def _set_container_multi(self, items):
"""Put items in the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._set_container(i)
r.append(i)
r = None
finally: # Make sure items don't get added to this if any fail
if r is not None:
for i in r:
try:
self._unset_container(i)
except ContainerError:
pass
def _unset_container_multi(self, items):
"""Remove items from the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._unset_container(i)
r.append(i)
r = None
finally:
if r is not None:
for i in r:
try:
self._set_container(i)
except ContainerError:
pass
class ContainerList(list,Container):
"""A ContainerList is a list whose children know they're in it.
Each element in the ContainerList has a .container attribute which points
to the ContainerList itself. This container pointer is maintained automatically.
"""
def __init__(self, items=[], owner=None):
list.__init__(self, items)
self._set_container_multi(items)
self.owner = owner
def __repr__(self):
return "<CL %s>" % list.__repr__(self)
def append(self, item):
self._set_container(item)
list.append(self,item)
def extend(self, items):
self._set_container_multi(items)
list.extend(self,items)
def insert(self, i, item):
self._set_container(item)
list.insert(self,i,item)
def remove(self, item):
self._unset_container(item)
list.remove(self,item)
def pop(self, i=-1):
self._unset_container(self[i])
return list.pop(self,i)
# These don't work because they make the elements part of more than one list, or one list more than once
def __add__(self, other):
raise NotImplementedError
def __radd__(self, other):
raise NotImplementedError
def __imul__(self,other):
raise NotImplementedError
def __mul__(self, other):
raise NotImplementedError
def __rmul__(self,other):
raise NotImplementedError
# only works if other is not also a Container
def __iadd__(self, other):
self.extend(other)
return self
def __setitem__(self, key, value):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
try:
self._set_container_multi(value)
except ContainerError:
self._set_container_multi(self[key])
raise
else:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
self._set_container(self[key])
raise
list.__setitem__(self,key,value)
def __delitem__(self, key):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
else:
self._unset_container(self[key])
list.__delitem__(self,key)
# Needed for python2, forbidden for python3
def __delslice__(self,i,j):
del self[slice(i,j,None)]
class ContainerDict(dict,Container):
"""A ContainerDict is a dict whose children know they're in it.
Each element in the ContainerDict has a .container attribute which points
to the ContainerDict itself. This container pointer is maintained automatically.
"""
def __init__(self, contents=None, **kwargs):
if contents is None:
dict.__init__(self, **kwargs)
else:
dict.__init__(self, contents, **kwargs)
self._set_container_multi(list(self.values()))
def __repr__(self):
return "<CD %s>" % dict.__repr__(self)
def __setitem__(self, key, value):
if key in self:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
if key in self:
self._set_container(self[key])
raise
dict.__setitem__(self,key,value)
def __delitem__(self, key):
if key in self:
self._unset_container(self[key])
dict.__delitem__(self,key)
def pop(self, key):
if key in self:
self._unset_container(self[key])
return dict.pop(self,key)
def popitem(self):
key, value = dict.popitem(self)
self._unset_container(value)
return key, value
def setdefault(self, key, default=None):
if key not in self:
self._set_container(default)
dict.setdefault(self, key, default)
def update(self, other):
for (k,v) in list(other.items()):
self[k] = v
if __name__=='__main__':
class Gear(object):
def __init__(self, name, container=None):
self.name = name
self.container = container
def __repr__(self):
return "<G "+str(self.name)+">"
gears = [Gear(n) for n in range(10)]
a = Gear("A")
b = Gear("B")
c = Gear("C")
d = Gear("D")
e = Gear("E")
p = ContainerList([a,b,c])
print(p)
try:
p.append(a)
except ContainerError as err:
print(err)
else:
raise AssertionError
print(p[1])
print(p[::2])
p[1] = d
print(p)
p[1] = b
p[::2] = [d,e]
print(p)
del p[:]
p2 = ContainerList([a,b,c])
print(p2)
p2.extend([d,e])
print(p2)
print(p2.pop())
print(p2)
p2.remove(d)
print(p2)
p2 += [d,e]
print(p2)
try: | del p2[:]
d = ContainerDict(a=a, b=b, c=c)
print(d)
print(d["a"])
d["a"] = a
try:
d["a"] = b
except ContainerError as err:
print(err)
else:
raise AssertionError
del d["a"]
d["a"] = a
d.pop("a")
print(d)
d["a"] = a
k,v = d.popitem()
d[k] = v
d.setdefault("e",e)
d.setdefault("e",e)
print(d)
del d["e"]
d.update(dict(e=e))
print(d) | d = ContainerDict(a=a, b=b, c=c)
except ContainerError as err:
print(err)
else:
raise AssertionError | random_line_split |
container.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Anne Archibald <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
class ContainerError(ValueError):
"""Error signaling something went wrong with container handling"""
pass
class Container(object):
"""A container is an object that manages objects it contains.
The objects in a container each have a .container attribute that
points to the container. This attribute is managed by the container
itself.
This class is a base class that provides common container functionality,
to be used to simplify implementation of list and dict containers.
"""
def _set_container(self, item):
if hasattr( item, "container" ) and item.container not in (None,self):
# raise ContainerError("Item %s was added to container %s but was already in container %s" % (item, self, item.container))
item.container.remove( item )
item.container = self
def _unset_container(self, item):
if item.container is not self:
raise ContainerError("Item %s was removed from container %s but was not in it" % (item, self))
item.container = None
def _set_container_multi(self, items):
"""Put items in the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._set_container(i)
r.append(i)
r = None
finally: # Make sure items don't get added to this if any fail
if r is not None:
|
def _unset_container_multi(self, items):
"""Remove items from the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._unset_container(i)
r.append(i)
r = None
finally:
if r is not None:
for i in r:
try:
self._set_container(i)
except ContainerError:
pass
class ContainerList(list,Container):
"""A ContainerList is a list whose children know they're in it.
Each element in the ContainerList has a .container attribute which points
to the ContainerList itself. This container pointer is maintained automatically.
"""
def __init__(self, items=[], owner=None):
list.__init__(self, items)
self._set_container_multi(items)
self.owner = owner
def __repr__(self):
return "<CL %s>" % list.__repr__(self)
def append(self, item):
self._set_container(item)
list.append(self,item)
def extend(self, items):
self._set_container_multi(items)
list.extend(self,items)
def insert(self, i, item):
self._set_container(item)
list.insert(self,i,item)
def remove(self, item):
self._unset_container(item)
list.remove(self,item)
def pop(self, i=-1):
self._unset_container(self[i])
return list.pop(self,i)
# These don't work because they make the elements part of more than one list, or one list more than once
def __add__(self, other):
raise NotImplementedError
def __radd__(self, other):
raise NotImplementedError
def __imul__(self,other):
raise NotImplementedError
def __mul__(self, other):
raise NotImplementedError
def __rmul__(self,other):
raise NotImplementedError
# only works if other is not also a Container
def __iadd__(self, other):
self.extend(other)
return self
def __setitem__(self, key, value):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
try:
self._set_container_multi(value)
except ContainerError:
self._set_container_multi(self[key])
raise
else:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
self._set_container(self[key])
raise
list.__setitem__(self,key,value)
def __delitem__(self, key):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
else:
self._unset_container(self[key])
list.__delitem__(self,key)
# Needed for python2, forbidden for python3
def __delslice__(self,i,j):
del self[slice(i,j,None)]
class ContainerDict(dict,Container):
"""A ContainerDict is a dict whose children know they're in it.
Each element in the ContainerDict has a .container attribute which points
to the ContainerDict itself. This container pointer is maintained automatically.
"""
def __init__(self, contents=None, **kwargs):
if contents is None:
dict.__init__(self, **kwargs)
else:
dict.__init__(self, contents, **kwargs)
self._set_container_multi(list(self.values()))
def __repr__(self):
return "<CD %s>" % dict.__repr__(self)
def __setitem__(self, key, value):
if key in self:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
if key in self:
self._set_container(self[key])
raise
dict.__setitem__(self,key,value)
def __delitem__(self, key):
if key in self:
self._unset_container(self[key])
dict.__delitem__(self,key)
def pop(self, key):
if key in self:
self._unset_container(self[key])
return dict.pop(self,key)
def popitem(self):
key, value = dict.popitem(self)
self._unset_container(value)
return key, value
def setdefault(self, key, default=None):
if key not in self:
self._set_container(default)
dict.setdefault(self, key, default)
def update(self, other):
for (k,v) in list(other.items()):
self[k] = v
if __name__=='__main__':
class Gear(object):
def __init__(self, name, container=None):
self.name = name
self.container = container
def __repr__(self):
return "<G "+str(self.name)+">"
gears = [Gear(n) for n in range(10)]
a = Gear("A")
b = Gear("B")
c = Gear("C")
d = Gear("D")
e = Gear("E")
p = ContainerList([a,b,c])
print(p)
try:
p.append(a)
except ContainerError as err:
print(err)
else:
raise AssertionError
print(p[1])
print(p[::2])
p[1] = d
print(p)
p[1] = b
p[::2] = [d,e]
print(p)
del p[:]
p2 = ContainerList([a,b,c])
print(p2)
p2.extend([d,e])
print(p2)
print(p2.pop())
print(p2)
p2.remove(d)
print(p2)
p2 += [d,e]
print(p2)
try:
d = ContainerDict(a=a, b=b, c=c)
except ContainerError as err:
print(err)
else:
raise AssertionError
del p2[:]
d = ContainerDict(a=a, b=b, c=c)
print(d)
print(d["a"])
d["a"] = a
try:
d["a"] = b
except ContainerError as err:
print(err)
else:
raise AssertionError
del d["a"]
d["a"] = a
d.pop("a")
print(d)
d["a"] = a
k,v = d.popitem()
d[k] = v
d.setdefault("e",e)
d.setdefault("e",e)
print(d)
del d["e"]
d.update(dict(e=e))
print(d)
| for i in r:
try:
self._unset_container(i)
except ContainerError:
pass | conditional_block |
reprojection.js | goog.require('ol.Map');
goog.require('ol.View');
goog.require('ol.extent');
goog.require('ol.format.WMTSCapabilities');
goog.require('ol.layer.Tile');
goog.require('ol.proj');
goog.require('ol.source.OSM');
goog.require('ol.source.TileImage');
goog.require('ol.source.TileWMS');
goog.require('ol.source.WMTS');
goog.require('ol.source.XYZ');
goog.require('ol.tilegrid.TileGrid');
proj4.defs('EPSG:27700', '+proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 ' +
'+x_0=400000 +y_0=-100000 +ellps=airy ' +
'+towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 ' +
'+units=m +no_defs');
var proj27700 = ol.proj.get('EPSG:27700');
proj27700.setExtent([0, 0, 700000, 1300000]);
proj4.defs('EPSG:23032', '+proj=utm +zone=32 +ellps=intl ' +
'+towgs84=-87,-98,-121,0,0,0,0 +units=m +no_defs');
var proj23032 = ol.proj.get('EPSG:23032');
proj23032.setExtent([-1206118.71, 4021309.92, 1295389.00, 8051813.28]);
proj4.defs('EPSG:5479', '+proj=lcc +lat_1=-76.66666666666667 +lat_2=' +
'-79.33333333333333 +lat_0=-78 +lon_0=163 +x_0=7000000 +y_0=5000000 ' +
'+ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs');
var proj5479 = ol.proj.get('EPSG:5479');
proj5479.setExtent([6825737.53, 4189159.80, 9633741.96, 5782472.71]);
proj4.defs('EPSG:21781', '+proj=somerc +lat_0=46.95240555555556 ' +
'+lon_0=7.439583333333333 +k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel ' +
'+towgs84=674.4,15.1,405.3,0,0,0,0 +units=m +no_defs');
var proj21781 = ol.proj.get('EPSG:21781');
proj21781.setExtent([485071.54, 75346.36, 828515.78, 299941.84]);
proj4.defs('EPSG:3413', '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 ' +
'+x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs');
var proj3413 = ol.proj.get('EPSG:3413');
proj3413.setExtent([-4194304, -4194304, 4194304, 4194304]);
proj4.defs('EPSG:2163', '+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 ' +
'+a=6370997 +b=6370997 +units=m +no_defs');
var proj2163 = ol.proj.get('EPSG:2163');
proj2163.setExtent([-8040784.5135, -2577524.9210, 3668901.4484, 4785105.1096]);
proj4.defs('ESRI:54009', '+proj=moll +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 ' +
'+units=m +no_defs');
var proj54009 = ol.proj.get('ESRI:54009');
proj54009.setExtent([-18e6, -9e6, 18e6, 9e6]);
var layers = {};
layers['bng'] = new ol.layer.Tile({
source: new ol.source.XYZ({
projection: 'EPSG:27700',
url: 'https://tileserver.maptiler.com/miniscale/{z}/{x}/{y}.png',
crossOrigin: '',
maxZoom: 6
})
});
layers['osm'] = new ol.layer.Tile({
source: new ol.source.OSM()
});
layers['wms4326'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {
'LAYERS': 'ne:NE1_HR_LC_SR_W_DR',
'TILED': true
},
projection: 'EPSG:4326'
})
});
layers['wms21781'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
attributions: '© <a href="http://www.geo.admin.ch/internet/geoportal/' +
'en/home.html">Pixelmap 1:1000000 / geo.admin.ch</a>',
crossOrigin: 'anonymous',
params: {
'LAYERS': 'ch.swisstopo.pixelkarte-farbe-pk1000.noscale',
'FORMAT': 'image/jpeg'
},
url: 'https://wms.geo.admin.ch/',
projection: 'EPSG:21781'
})
});
var parser = new ol.format.WMTSCapabilities();
var url = 'https://map1.vis.earthdata.nasa.gov/wmts-arctic/' +
'wmts.cgi?SERVICE=WMTS&request=GetCapabilities';
fetch(url).then(function(response) {
return response.text();
}).then(function(text) {
var result = parser.read(text);
var options = ol.source.WMTS.optionsFromCapabilities(result, {
layer: 'OSM_Land_Mask',
matrixSet: 'EPSG3413_250m'
});
options.crossOrigin = '';
options.projection = 'EPSG:3413';
options.wrapX = false;
layers['wmts3413'] = new ol.layer.Tile({
source: new ol.source.WMTS(/** @type {!olx.source.WMTSOptions} */ (options))
});
});
layers['grandcanyon'] = new ol.layer.Tile({
source: new ol.source.XYZ({
url: 'https://tileserver.maptiler.com/grandcanyon@2x/{z}/{x}/{y}.png',
crossOrigin: '',
tilePixelRatio: 2,
maxZoom: 15,
attributions: 'Tiles © USGS, rendered with ' +
'<a href="http://www.maptiler.com/">MapTiler</a>'
})
});
var startResolution =
ol.extent.getWidth(ol.proj.get('EPSG:3857').getExtent()) / 256;
var resolutions = new Array(22);
for (var i = 0, ii = resolutions.length; i < ii; ++i) {
resolutions[i] = startResolution / Math.pow(2, i);
}
layers['states'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {'LAYERS': 'topp:states'},
serverType: 'geoserver',
tileGrid: new ol.tilegrid.TileGrid({
extent: [-13884991, 2870341, -7455066, 6338219],
resolutions: resolutions,
tileSize: [512, 256]
}),
projection: 'EPSG:3857'
})
});
var map = new ol.Map({
layers: [
layers['osm'],
layers['bng']
],
target: 'map',
view: new ol.View({
projection: 'EPSG:3857',
center: [0, 0],
zoom: 2
})
});
var baseLayerSelect = document.getElementById('base-layer');
var overlayLayerSelect = document.getElementById('overlay-layer');
var viewProjSelect = document.getElementById('view-projection');
var renderEdgesCheckbox = document.getElementById('render-edges');
var renderEdges = false;
function updateViewProjection() {
var newProj = ol.proj.get(viewProjSelect.value);
var newProjExtent = newProj.getExtent();
var newView = new ol.View({
projection: newProj,
center: ol.extent.getCenter(newProjExtent || [0, 0, 0, 0]),
zoom: 0,
extent: newProjExtent || undefined
});
map.setView(newView);
// Example how to prevent double occurrence of map by limiting layer extent
if (newProj == ol.proj.get('EPSG:3857')) {
layers['bng'].setExtent([-1057216, 6405988, 404315, 8759696]);
} else {
layers['bng'].setExtent(undefined);
}
}
/**
* Handle change event.
*/
viewProjSelect.onchange = function() {
updateViewProjection();
};
updateViewProjection();
var updateRenderEdgesOnLayer = function(layer) {
if (layer instanceof ol.layer.Tile) {
var source = layer.getSource();
if (source instanceof ol.source.TileImage) {
source.setRenderReprojectionEdges(renderEdges);
}
}
};
/**
* Handle change event.
*/
baseLayerSelect.onchange = function() {
var layer = layers[baseLayerSelect.value];
if (layer) {
layer.setOpacity(1);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(0, layer);
}
};
/**
* Handle change event.
*/
overlayLayerSelect.onchange = function() {
var layer = layers[overlayLayerSelect.value];
if (layer) {
layer.setOpacity(0.7);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(1, layer);
}
};
| * Handle change event.
*/
renderEdgesCheckbox.onchange = function() {
renderEdges = renderEdgesCheckbox.checked;
map.getLayers().forEach(function(layer) {
updateRenderEdgesOnLayer(layer);
});
}; | /** | random_line_split |
reprojection.js | goog.require('ol.Map');
goog.require('ol.View');
goog.require('ol.extent');
goog.require('ol.format.WMTSCapabilities');
goog.require('ol.layer.Tile');
goog.require('ol.proj');
goog.require('ol.source.OSM');
goog.require('ol.source.TileImage');
goog.require('ol.source.TileWMS');
goog.require('ol.source.WMTS');
goog.require('ol.source.XYZ');
goog.require('ol.tilegrid.TileGrid');
proj4.defs('EPSG:27700', '+proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 ' +
'+x_0=400000 +y_0=-100000 +ellps=airy ' +
'+towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 ' +
'+units=m +no_defs');
var proj27700 = ol.proj.get('EPSG:27700');
proj27700.setExtent([0, 0, 700000, 1300000]);
proj4.defs('EPSG:23032', '+proj=utm +zone=32 +ellps=intl ' +
'+towgs84=-87,-98,-121,0,0,0,0 +units=m +no_defs');
var proj23032 = ol.proj.get('EPSG:23032');
proj23032.setExtent([-1206118.71, 4021309.92, 1295389.00, 8051813.28]);
proj4.defs('EPSG:5479', '+proj=lcc +lat_1=-76.66666666666667 +lat_2=' +
'-79.33333333333333 +lat_0=-78 +lon_0=163 +x_0=7000000 +y_0=5000000 ' +
'+ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs');
var proj5479 = ol.proj.get('EPSG:5479');
proj5479.setExtent([6825737.53, 4189159.80, 9633741.96, 5782472.71]);
proj4.defs('EPSG:21781', '+proj=somerc +lat_0=46.95240555555556 ' +
'+lon_0=7.439583333333333 +k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel ' +
'+towgs84=674.4,15.1,405.3,0,0,0,0 +units=m +no_defs');
var proj21781 = ol.proj.get('EPSG:21781');
proj21781.setExtent([485071.54, 75346.36, 828515.78, 299941.84]);
proj4.defs('EPSG:3413', '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 ' +
'+x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs');
var proj3413 = ol.proj.get('EPSG:3413');
proj3413.setExtent([-4194304, -4194304, 4194304, 4194304]);
proj4.defs('EPSG:2163', '+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 ' +
'+a=6370997 +b=6370997 +units=m +no_defs');
var proj2163 = ol.proj.get('EPSG:2163');
proj2163.setExtent([-8040784.5135, -2577524.9210, 3668901.4484, 4785105.1096]);
proj4.defs('ESRI:54009', '+proj=moll +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 ' +
'+units=m +no_defs');
var proj54009 = ol.proj.get('ESRI:54009');
proj54009.setExtent([-18e6, -9e6, 18e6, 9e6]);
var layers = {};
layers['bng'] = new ol.layer.Tile({
source: new ol.source.XYZ({
projection: 'EPSG:27700',
url: 'https://tileserver.maptiler.com/miniscale/{z}/{x}/{y}.png',
crossOrigin: '',
maxZoom: 6
})
});
layers['osm'] = new ol.layer.Tile({
source: new ol.source.OSM()
});
layers['wms4326'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {
'LAYERS': 'ne:NE1_HR_LC_SR_W_DR',
'TILED': true
},
projection: 'EPSG:4326'
})
});
layers['wms21781'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
attributions: '© <a href="http://www.geo.admin.ch/internet/geoportal/' +
'en/home.html">Pixelmap 1:1000000 / geo.admin.ch</a>',
crossOrigin: 'anonymous',
params: {
'LAYERS': 'ch.swisstopo.pixelkarte-farbe-pk1000.noscale',
'FORMAT': 'image/jpeg'
},
url: 'https://wms.geo.admin.ch/',
projection: 'EPSG:21781'
})
});
var parser = new ol.format.WMTSCapabilities();
var url = 'https://map1.vis.earthdata.nasa.gov/wmts-arctic/' +
'wmts.cgi?SERVICE=WMTS&request=GetCapabilities';
fetch(url).then(function(response) {
return response.text();
}).then(function(text) {
var result = parser.read(text);
var options = ol.source.WMTS.optionsFromCapabilities(result, {
layer: 'OSM_Land_Mask',
matrixSet: 'EPSG3413_250m'
});
options.crossOrigin = '';
options.projection = 'EPSG:3413';
options.wrapX = false;
layers['wmts3413'] = new ol.layer.Tile({
source: new ol.source.WMTS(/** @type {!olx.source.WMTSOptions} */ (options))
});
});
layers['grandcanyon'] = new ol.layer.Tile({
source: new ol.source.XYZ({
url: 'https://tileserver.maptiler.com/grandcanyon@2x/{z}/{x}/{y}.png',
crossOrigin: '',
tilePixelRatio: 2,
maxZoom: 15,
attributions: 'Tiles © USGS, rendered with ' +
'<a href="http://www.maptiler.com/">MapTiler</a>'
})
});
var startResolution =
ol.extent.getWidth(ol.proj.get('EPSG:3857').getExtent()) / 256;
var resolutions = new Array(22);
for (var i = 0, ii = resolutions.length; i < ii; ++i) {
resolutions[i] = startResolution / Math.pow(2, i);
}
layers['states'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {'LAYERS': 'topp:states'},
serverType: 'geoserver',
tileGrid: new ol.tilegrid.TileGrid({
extent: [-13884991, 2870341, -7455066, 6338219],
resolutions: resolutions,
tileSize: [512, 256]
}),
projection: 'EPSG:3857'
})
});
var map = new ol.Map({
layers: [
layers['osm'],
layers['bng']
],
target: 'map',
view: new ol.View({
projection: 'EPSG:3857',
center: [0, 0],
zoom: 2
})
});
var baseLayerSelect = document.getElementById('base-layer');
var overlayLayerSelect = document.getElementById('overlay-layer');
var viewProjSelect = document.getElementById('view-projection');
var renderEdgesCheckbox = document.getElementById('render-edges');
var renderEdges = false;
function updateViewProjection() {
var newProj = ol.proj.get(viewProjSelect.value);
var newProjExtent = newProj.getExtent();
var newView = new ol.View({
projection: newProj,
center: ol.extent.getCenter(newProjExtent || [0, 0, 0, 0]),
zoom: 0,
extent: newProjExtent || undefined
});
map.setView(newView);
// Example how to prevent double occurrence of map by limiting layer extent
if (newProj == ol.proj.get('EPSG:3857')) {
| lse {
layers['bng'].setExtent(undefined);
}
}
/**
* Handle change event.
*/
viewProjSelect.onchange = function() {
updateViewProjection();
};
updateViewProjection();
var updateRenderEdgesOnLayer = function(layer) {
if (layer instanceof ol.layer.Tile) {
var source = layer.getSource();
if (source instanceof ol.source.TileImage) {
source.setRenderReprojectionEdges(renderEdges);
}
}
};
/**
* Handle change event.
*/
baseLayerSelect.onchange = function() {
var layer = layers[baseLayerSelect.value];
if (layer) {
layer.setOpacity(1);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(0, layer);
}
};
/**
* Handle change event.
*/
overlayLayerSelect.onchange = function() {
var layer = layers[overlayLayerSelect.value];
if (layer) {
layer.setOpacity(0.7);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(1, layer);
}
};
/**
* Handle change event.
*/
renderEdgesCheckbox.onchange = function() {
renderEdges = renderEdgesCheckbox.checked;
map.getLayers().forEach(function(layer) {
updateRenderEdgesOnLayer(layer);
});
};
| layers['bng'].setExtent([-1057216, 6405988, 404315, 8759696]);
} e | conditional_block |
reprojection.js | goog.require('ol.Map');
goog.require('ol.View');
goog.require('ol.extent');
goog.require('ol.format.WMTSCapabilities');
goog.require('ol.layer.Tile');
goog.require('ol.proj');
goog.require('ol.source.OSM');
goog.require('ol.source.TileImage');
goog.require('ol.source.TileWMS');
goog.require('ol.source.WMTS');
goog.require('ol.source.XYZ');
goog.require('ol.tilegrid.TileGrid');
proj4.defs('EPSG:27700', '+proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 ' +
'+x_0=400000 +y_0=-100000 +ellps=airy ' +
'+towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 ' +
'+units=m +no_defs');
var proj27700 = ol.proj.get('EPSG:27700');
proj27700.setExtent([0, 0, 700000, 1300000]);
proj4.defs('EPSG:23032', '+proj=utm +zone=32 +ellps=intl ' +
'+towgs84=-87,-98,-121,0,0,0,0 +units=m +no_defs');
var proj23032 = ol.proj.get('EPSG:23032');
proj23032.setExtent([-1206118.71, 4021309.92, 1295389.00, 8051813.28]);
proj4.defs('EPSG:5479', '+proj=lcc +lat_1=-76.66666666666667 +lat_2=' +
'-79.33333333333333 +lat_0=-78 +lon_0=163 +x_0=7000000 +y_0=5000000 ' +
'+ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs');
var proj5479 = ol.proj.get('EPSG:5479');
proj5479.setExtent([6825737.53, 4189159.80, 9633741.96, 5782472.71]);
proj4.defs('EPSG:21781', '+proj=somerc +lat_0=46.95240555555556 ' +
'+lon_0=7.439583333333333 +k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel ' +
'+towgs84=674.4,15.1,405.3,0,0,0,0 +units=m +no_defs');
var proj21781 = ol.proj.get('EPSG:21781');
proj21781.setExtent([485071.54, 75346.36, 828515.78, 299941.84]);
proj4.defs('EPSG:3413', '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 ' +
'+x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs');
var proj3413 = ol.proj.get('EPSG:3413');
proj3413.setExtent([-4194304, -4194304, 4194304, 4194304]);
proj4.defs('EPSG:2163', '+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 ' +
'+a=6370997 +b=6370997 +units=m +no_defs');
var proj2163 = ol.proj.get('EPSG:2163');
proj2163.setExtent([-8040784.5135, -2577524.9210, 3668901.4484, 4785105.1096]);
proj4.defs('ESRI:54009', '+proj=moll +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 ' +
'+units=m +no_defs');
var proj54009 = ol.proj.get('ESRI:54009');
proj54009.setExtent([-18e6, -9e6, 18e6, 9e6]);
var layers = {};
layers['bng'] = new ol.layer.Tile({
source: new ol.source.XYZ({
projection: 'EPSG:27700',
url: 'https://tileserver.maptiler.com/miniscale/{z}/{x}/{y}.png',
crossOrigin: '',
maxZoom: 6
})
});
layers['osm'] = new ol.layer.Tile({
source: new ol.source.OSM()
});
layers['wms4326'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {
'LAYERS': 'ne:NE1_HR_LC_SR_W_DR',
'TILED': true
},
projection: 'EPSG:4326'
})
});
layers['wms21781'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
attributions: '© <a href="http://www.geo.admin.ch/internet/geoportal/' +
'en/home.html">Pixelmap 1:1000000 / geo.admin.ch</a>',
crossOrigin: 'anonymous',
params: {
'LAYERS': 'ch.swisstopo.pixelkarte-farbe-pk1000.noscale',
'FORMAT': 'image/jpeg'
},
url: 'https://wms.geo.admin.ch/',
projection: 'EPSG:21781'
})
});
var parser = new ol.format.WMTSCapabilities();
var url = 'https://map1.vis.earthdata.nasa.gov/wmts-arctic/' +
'wmts.cgi?SERVICE=WMTS&request=GetCapabilities';
fetch(url).then(function(response) {
return response.text();
}).then(function(text) {
var result = parser.read(text);
var options = ol.source.WMTS.optionsFromCapabilities(result, {
layer: 'OSM_Land_Mask',
matrixSet: 'EPSG3413_250m'
});
options.crossOrigin = '';
options.projection = 'EPSG:3413';
options.wrapX = false;
layers['wmts3413'] = new ol.layer.Tile({
source: new ol.source.WMTS(/** @type {!olx.source.WMTSOptions} */ (options))
});
});
layers['grandcanyon'] = new ol.layer.Tile({
source: new ol.source.XYZ({
url: 'https://tileserver.maptiler.com/grandcanyon@2x/{z}/{x}/{y}.png',
crossOrigin: '',
tilePixelRatio: 2,
maxZoom: 15,
attributions: 'Tiles © USGS, rendered with ' +
'<a href="http://www.maptiler.com/">MapTiler</a>'
})
});
var startResolution =
ol.extent.getWidth(ol.proj.get('EPSG:3857').getExtent()) / 256;
var resolutions = new Array(22);
for (var i = 0, ii = resolutions.length; i < ii; ++i) {
resolutions[i] = startResolution / Math.pow(2, i);
}
layers['states'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {'LAYERS': 'topp:states'},
serverType: 'geoserver',
tileGrid: new ol.tilegrid.TileGrid({
extent: [-13884991, 2870341, -7455066, 6338219],
resolutions: resolutions,
tileSize: [512, 256]
}),
projection: 'EPSG:3857'
})
});
var map = new ol.Map({
layers: [
layers['osm'],
layers['bng']
],
target: 'map',
view: new ol.View({
projection: 'EPSG:3857',
center: [0, 0],
zoom: 2
})
});
var baseLayerSelect = document.getElementById('base-layer');
var overlayLayerSelect = document.getElementById('overlay-layer');
var viewProjSelect = document.getElementById('view-projection');
var renderEdgesCheckbox = document.getElementById('render-edges');
var renderEdges = false;
function up | {
var newProj = ol.proj.get(viewProjSelect.value);
var newProjExtent = newProj.getExtent();
var newView = new ol.View({
projection: newProj,
center: ol.extent.getCenter(newProjExtent || [0, 0, 0, 0]),
zoom: 0,
extent: newProjExtent || undefined
});
map.setView(newView);
// Example how to prevent double occurrence of map by limiting layer extent
if (newProj == ol.proj.get('EPSG:3857')) {
layers['bng'].setExtent([-1057216, 6405988, 404315, 8759696]);
} else {
layers['bng'].setExtent(undefined);
}
}
/**
* Handle change event.
*/
viewProjSelect.onchange = function() {
updateViewProjection();
};
updateViewProjection();
var updateRenderEdgesOnLayer = function(layer) {
if (layer instanceof ol.layer.Tile) {
var source = layer.getSource();
if (source instanceof ol.source.TileImage) {
source.setRenderReprojectionEdges(renderEdges);
}
}
};
/**
* Handle change event.
*/
baseLayerSelect.onchange = function() {
var layer = layers[baseLayerSelect.value];
if (layer) {
layer.setOpacity(1);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(0, layer);
}
};
/**
* Handle change event.
*/
overlayLayerSelect.onchange = function() {
var layer = layers[overlayLayerSelect.value];
if (layer) {
layer.setOpacity(0.7);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(1, layer);
}
};
/**
* Handle change event.
*/
renderEdgesCheckbox.onchange = function() {
renderEdges = renderEdgesCheckbox.checked;
map.getLayers().forEach(function(layer) {
updateRenderEdgesOnLayer(layer);
});
};
| dateViewProjection() | identifier_name |
reprojection.js | goog.require('ol.Map');
goog.require('ol.View');
goog.require('ol.extent');
goog.require('ol.format.WMTSCapabilities');
goog.require('ol.layer.Tile');
goog.require('ol.proj');
goog.require('ol.source.OSM');
goog.require('ol.source.TileImage');
goog.require('ol.source.TileWMS');
goog.require('ol.source.WMTS');
goog.require('ol.source.XYZ');
goog.require('ol.tilegrid.TileGrid');
proj4.defs('EPSG:27700', '+proj=tmerc +lat_0=49 +lon_0=-2 +k=0.9996012717 ' +
'+x_0=400000 +y_0=-100000 +ellps=airy ' +
'+towgs84=446.448,-125.157,542.06,0.15,0.247,0.842,-20.489 ' +
'+units=m +no_defs');
var proj27700 = ol.proj.get('EPSG:27700');
proj27700.setExtent([0, 0, 700000, 1300000]);
proj4.defs('EPSG:23032', '+proj=utm +zone=32 +ellps=intl ' +
'+towgs84=-87,-98,-121,0,0,0,0 +units=m +no_defs');
var proj23032 = ol.proj.get('EPSG:23032');
proj23032.setExtent([-1206118.71, 4021309.92, 1295389.00, 8051813.28]);
proj4.defs('EPSG:5479', '+proj=lcc +lat_1=-76.66666666666667 +lat_2=' +
'-79.33333333333333 +lat_0=-78 +lon_0=163 +x_0=7000000 +y_0=5000000 ' +
'+ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs');
var proj5479 = ol.proj.get('EPSG:5479');
proj5479.setExtent([6825737.53, 4189159.80, 9633741.96, 5782472.71]);
proj4.defs('EPSG:21781', '+proj=somerc +lat_0=46.95240555555556 ' +
'+lon_0=7.439583333333333 +k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel ' +
'+towgs84=674.4,15.1,405.3,0,0,0,0 +units=m +no_defs');
var proj21781 = ol.proj.get('EPSG:21781');
proj21781.setExtent([485071.54, 75346.36, 828515.78, 299941.84]);
proj4.defs('EPSG:3413', '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 ' +
'+x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs');
var proj3413 = ol.proj.get('EPSG:3413');
proj3413.setExtent([-4194304, -4194304, 4194304, 4194304]);
proj4.defs('EPSG:2163', '+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 ' +
'+a=6370997 +b=6370997 +units=m +no_defs');
var proj2163 = ol.proj.get('EPSG:2163');
proj2163.setExtent([-8040784.5135, -2577524.9210, 3668901.4484, 4785105.1096]);
proj4.defs('ESRI:54009', '+proj=moll +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 ' +
'+units=m +no_defs');
var proj54009 = ol.proj.get('ESRI:54009');
proj54009.setExtent([-18e6, -9e6, 18e6, 9e6]);
var layers = {};
layers['bng'] = new ol.layer.Tile({
source: new ol.source.XYZ({
projection: 'EPSG:27700',
url: 'https://tileserver.maptiler.com/miniscale/{z}/{x}/{y}.png',
crossOrigin: '',
maxZoom: 6
})
});
layers['osm'] = new ol.layer.Tile({
source: new ol.source.OSM()
});
layers['wms4326'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {
'LAYERS': 'ne:NE1_HR_LC_SR_W_DR',
'TILED': true
},
projection: 'EPSG:4326'
})
});
layers['wms21781'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
attributions: '© <a href="http://www.geo.admin.ch/internet/geoportal/' +
'en/home.html">Pixelmap 1:1000000 / geo.admin.ch</a>',
crossOrigin: 'anonymous',
params: {
'LAYERS': 'ch.swisstopo.pixelkarte-farbe-pk1000.noscale',
'FORMAT': 'image/jpeg'
},
url: 'https://wms.geo.admin.ch/',
projection: 'EPSG:21781'
})
});
var parser = new ol.format.WMTSCapabilities();
var url = 'https://map1.vis.earthdata.nasa.gov/wmts-arctic/' +
'wmts.cgi?SERVICE=WMTS&request=GetCapabilities';
fetch(url).then(function(response) {
return response.text();
}).then(function(text) {
var result = parser.read(text);
var options = ol.source.WMTS.optionsFromCapabilities(result, {
layer: 'OSM_Land_Mask',
matrixSet: 'EPSG3413_250m'
});
options.crossOrigin = '';
options.projection = 'EPSG:3413';
options.wrapX = false;
layers['wmts3413'] = new ol.layer.Tile({
source: new ol.source.WMTS(/** @type {!olx.source.WMTSOptions} */ (options))
});
});
layers['grandcanyon'] = new ol.layer.Tile({
source: new ol.source.XYZ({
url: 'https://tileserver.maptiler.com/grandcanyon@2x/{z}/{x}/{y}.png',
crossOrigin: '',
tilePixelRatio: 2,
maxZoom: 15,
attributions: 'Tiles © USGS, rendered with ' +
'<a href="http://www.maptiler.com/">MapTiler</a>'
})
});
var startResolution =
ol.extent.getWidth(ol.proj.get('EPSG:3857').getExtent()) / 256;
var resolutions = new Array(22);
for (var i = 0, ii = resolutions.length; i < ii; ++i) {
resolutions[i] = startResolution / Math.pow(2, i);
}
layers['states'] = new ol.layer.Tile({
source: new ol.source.TileWMS({
url: 'https://ahocevar.com/geoserver/wms',
crossOrigin: '',
params: {'LAYERS': 'topp:states'},
serverType: 'geoserver',
tileGrid: new ol.tilegrid.TileGrid({
extent: [-13884991, 2870341, -7455066, 6338219],
resolutions: resolutions,
tileSize: [512, 256]
}),
projection: 'EPSG:3857'
})
});
var map = new ol.Map({
layers: [
layers['osm'],
layers['bng']
],
target: 'map',
view: new ol.View({
projection: 'EPSG:3857',
center: [0, 0],
zoom: 2
})
});
var baseLayerSelect = document.getElementById('base-layer');
var overlayLayerSelect = document.getElementById('overlay-layer');
var viewProjSelect = document.getElementById('view-projection');
var renderEdgesCheckbox = document.getElementById('render-edges');
var renderEdges = false;
function updateViewProjection() {
|
/**
* Handle change event.
*/
viewProjSelect.onchange = function() {
updateViewProjection();
};
updateViewProjection();
var updateRenderEdgesOnLayer = function(layer) {
if (layer instanceof ol.layer.Tile) {
var source = layer.getSource();
if (source instanceof ol.source.TileImage) {
source.setRenderReprojectionEdges(renderEdges);
}
}
};
/**
* Handle change event.
*/
baseLayerSelect.onchange = function() {
var layer = layers[baseLayerSelect.value];
if (layer) {
layer.setOpacity(1);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(0, layer);
}
};
/**
* Handle change event.
*/
overlayLayerSelect.onchange = function() {
var layer = layers[overlayLayerSelect.value];
if (layer) {
layer.setOpacity(0.7);
updateRenderEdgesOnLayer(layer);
map.getLayers().setAt(1, layer);
}
};
/**
* Handle change event.
*/
renderEdgesCheckbox.onchange = function() {
renderEdges = renderEdgesCheckbox.checked;
map.getLayers().forEach(function(layer) {
updateRenderEdgesOnLayer(layer);
});
};
| var newProj = ol.proj.get(viewProjSelect.value);
var newProjExtent = newProj.getExtent();
var newView = new ol.View({
projection: newProj,
center: ol.extent.getCenter(newProjExtent || [0, 0, 0, 0]),
zoom: 0,
extent: newProjExtent || undefined
});
map.setView(newView);
// Example how to prevent double occurrence of map by limiting layer extent
if (newProj == ol.proj.get('EPSG:3857')) {
layers['bng'].setExtent([-1057216, 6405988, 404315, 8759696]);
} else {
layers['bng'].setExtent(undefined);
}
}
| identifier_body |
common.rs | use byteorder::{BigEndian, WriteBytesExt, ReadBytesExt};
use std::io::{self, Read, Write};
pub enum | {
Connect(u32),
Connected(u32),
Data(u32, Vec<u8>),
Disconnect(u32),
Disconnected(u32), // Really want FIN ACK?
}
/*
impl SimpleFwdOp {
pub fn write_to<A: Write + Sized>(&self, os: &mut A) -> io::Result<()> {
use SimpleFwdOp::*;
match self {
&Connect(id) => {
try!(os.write_byte(1));
try!(os.write_all(1));
}
}
}
}
*/
pub fn read_exact<A: Read + Sized>(stream: &mut A, buf: &mut [u8]) -> io::Result<()> {
stream.take(buf.len() as u64)
.read(buf)
.map(|_| ())
}
pub fn read_frame<A: Read + Sized>(stream: &mut A) -> io::Result<Vec<u8>> {
let length = try!(stream.read_u32::<BigEndian>());
let mut buf = vec![0; length as usize];
try!(read_exact(stream, &mut buf));
Ok(buf)
}
pub fn write_frame<A: Write + Sized>(stream: &mut A, buf: &[u8]) -> io::Result<()> {
try!(stream.write_u32::<BigEndian>(buf.len() as u32));
try!(stream.write_all(buf));
stream.flush()
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
fn check_write_read(bs: &[u8]) {
let mut buf = vec![];
write_frame(&mut buf, &bs).unwrap();
let bs2 = read_frame(&mut Cursor::new(buf)).unwrap();
assert_eq!(bs2, bs);
}
#[test]
fn test_write_read() {
check_write_read(&[]);
check_write_read(b"a");
check_write_read(b"asdf");
check_write_read(b"asdf5");
}
}
| SimpleFwdOp | identifier_name |
common.rs | use byteorder::{BigEndian, WriteBytesExt, ReadBytesExt};
use std::io::{self, Read, Write};
pub enum SimpleFwdOp {
Connect(u32),
Connected(u32),
Data(u32, Vec<u8>),
Disconnect(u32),
Disconnected(u32), // Really want FIN ACK?
}
/*
impl SimpleFwdOp {
pub fn write_to<A: Write + Sized>(&self, os: &mut A) -> io::Result<()> {
use SimpleFwdOp::*;
match self {
&Connect(id) => {
try!(os.write_byte(1));
try!(os.write_all(1));
}
}
}
}
*/
pub fn read_exact<A: Read + Sized>(stream: &mut A, buf: &mut [u8]) -> io::Result<()> {
stream.take(buf.len() as u64)
.read(buf)
.map(|_| ())
}
pub fn read_frame<A: Read + Sized>(stream: &mut A) -> io::Result<Vec<u8>> |
pub fn write_frame<A: Write + Sized>(stream: &mut A, buf: &[u8]) -> io::Result<()> {
try!(stream.write_u32::<BigEndian>(buf.len() as u32));
try!(stream.write_all(buf));
stream.flush()
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
fn check_write_read(bs: &[u8]) {
let mut buf = vec![];
write_frame(&mut buf, &bs).unwrap();
let bs2 = read_frame(&mut Cursor::new(buf)).unwrap();
assert_eq!(bs2, bs);
}
#[test]
fn test_write_read() {
check_write_read(&[]);
check_write_read(b"a");
check_write_read(b"asdf");
check_write_read(b"asdf5");
}
}
| {
let length = try!(stream.read_u32::<BigEndian>());
let mut buf = vec![0; length as usize];
try!(read_exact(stream, &mut buf));
Ok(buf)
} | identifier_body |
common.rs | use byteorder::{BigEndian, WriteBytesExt, ReadBytesExt};
use std::io::{self, Read, Write};
pub enum SimpleFwdOp {
Connect(u32),
Connected(u32),
Data(u32, Vec<u8>),
Disconnect(u32),
Disconnected(u32), // Really want FIN ACK?
}
/*
impl SimpleFwdOp {
pub fn write_to<A: Write + Sized>(&self, os: &mut A) -> io::Result<()> {
use SimpleFwdOp::*;
match self {
&Connect(id) => {
try!(os.write_byte(1));
try!(os.write_all(1));
}
}
}
}
*/
pub fn read_exact<A: Read + Sized>(stream: &mut A, buf: &mut [u8]) -> io::Result<()> {
stream.take(buf.len() as u64)
.read(buf)
.map(|_| ())
}
pub fn read_frame<A: Read + Sized>(stream: &mut A) -> io::Result<Vec<u8>> {
let length = try!(stream.read_u32::<BigEndian>());
let mut buf = vec![0; length as usize];
try!(read_exact(stream, &mut buf));
Ok(buf)
}
pub fn write_frame<A: Write + Sized>(stream: &mut A, buf: &[u8]) -> io::Result<()> {
try!(stream.write_u32::<BigEndian>(buf.len() as u32));
try!(stream.write_all(buf));
stream.flush()
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
fn check_write_read(bs: &[u8]) {
let mut buf = vec![];
write_frame(&mut buf, &bs).unwrap();
let bs2 = read_frame(&mut Cursor::new(buf)).unwrap();
assert_eq!(bs2, bs);
}
#[test]
fn test_write_read() {
check_write_read(&[]);
check_write_read(b"a");
check_write_read(b"asdf");
check_write_read(b"asdf5"); | }
} | random_line_split |
|
state_defs.py | # Copyright (c) 2001, Stanford University
# All rights reserved.
#
# See the file LICENSE.txt for information on redistributing this software.
import sys
import apiutil
apiutil.CopyrightDef()
print """DESCRIPTION ""
EXPORTS
"""
keys = apiutil.GetDispatchedFunctions(sys.argv[1]+"/APIspec.txt")
for func_name in apiutil.AllSpecials( 'state' ):
print "crState%s" % func_name
for func_name in apiutil.AllSpecials( 'state_feedback' ):
|
for func_name in apiutil.AllSpecials( 'state_select' ):
print "crStateSelect%s" % func_name
print """crStateInit
crStateReadPixels
crStateGetChromiumParametervCR
crStateCreateContext
crStateCreateContextEx
crStateDestroyContext
crStateDiffContext
crStateSwitchContext
crStateMakeCurrent
crStateSetCurrent
crStateFlushFunc
crStateFlushArg
crStateDiffAPI
crStateSetCurrentPointers
crStateResetCurrentPointers
crStateCurrentRecover
crStateTransformUpdateTransform
crStateColorMaterialRecover
crStateError
crStateUpdateColorBits
crStateClientInit
crStateGetCurrent
crStateLimitsInit
crStateMergeExtensions
crStateRasterPosUpdate
crStateTextureCheckDirtyImages
crStateExtensionsInit
crStateSetExtensionString
crStateUseServerArrays
crStateUseServerArrayElements
crStateComputeVersion
crStateTransformXformPointMatrixf
crStateTransformXformPointMatrixd
crStateInitMatrixStack
crStateLoadMatrix
__currentBits
"""
| print "crStateFeedback%s" % func_name | conditional_block |
state_defs.py | # Copyright (c) 2001, Stanford University
# All rights reserved.
#
# See the file LICENSE.txt for information on redistributing this software.
import sys
import apiutil
apiutil.CopyrightDef()
print """DESCRIPTION ""
EXPORTS
"""
keys = apiutil.GetDispatchedFunctions(sys.argv[1]+"/APIspec.txt")
for func_name in apiutil.AllSpecials( 'state' ):
print "crState%s" % func_name
for func_name in apiutil.AllSpecials( 'state_feedback' ):
print "crStateFeedback%s" % func_name
for func_name in apiutil.AllSpecials( 'state_select' ):
print "crStateSelect%s" % func_name
print """crStateInit
crStateReadPixels
crStateGetChromiumParametervCR
crStateCreateContext
crStateCreateContextEx
crStateDestroyContext
crStateDiffContext
crStateSwitchContext
crStateMakeCurrent
crStateSetCurrent
crStateFlushFunc
crStateFlushArg
crStateDiffAPI
crStateSetCurrentPointers
crStateResetCurrentPointers
crStateCurrentRecover
crStateTransformUpdateTransform
crStateColorMaterialRecover
crStateError
crStateUpdateColorBits
crStateClientInit
crStateGetCurrent
crStateLimitsInit
crStateMergeExtensions
crStateRasterPosUpdate | crStateTextureCheckDirtyImages
crStateExtensionsInit
crStateSetExtensionString
crStateUseServerArrays
crStateUseServerArrayElements
crStateComputeVersion
crStateTransformXformPointMatrixf
crStateTransformXformPointMatrixd
crStateInitMatrixStack
crStateLoadMatrix
__currentBits
""" | random_line_split |
|
logger.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Ivo Tzvetkov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function, unicode_literals, absolute_import
import sys
from redmsg import Subscriber
from .handlers.sqlalchemy import SQLAlchemyHandler
handlers = {
'sqlalchemy': SQLAlchemyHandler
}
class Logger(object):
def __init__(self, config):
self.channel = config['channel']
self.subscriber = Subscriber(**config['redmsg'])
self.handler = handlers[config['handler']](config[config['handler']])
def start(self):
self.subscriber.subscribe(self.channel)
latest_txid = self.handler.get_latest_txid(self.channel)
generator = self.subscriber.listen() if latest_txid is None else \
self.subscriber.listen_from(latest_txid + 1, ignore_missing=True)
for message in generator:
|
def main():
import yaml
from argparse import ArgumentParser
arg_parser = ArgumentParser()
arg_parser.description = 'RedMsg logging service.'
arg_parser.add_argument('--config', metavar='FILE', default='config.yaml',
help='path to config file (default: %(default)s)')
args = arg_parser.parse_args()
with open(args.config, 'r') as file:
config = yaml.load(file)
logger = Logger(config)
logger.start()
if __name__ == '__main__':
main()
| try:
self.handler.handle(message)
except Exception as e:
sys.stderr.write('{0}: {1}: {2}\n'.format(e.__class__.__name__, e, message).encode('utf-8')) | conditional_block |
logger.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Ivo Tzvetkov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function, unicode_literals, absolute_import
import sys
from redmsg import Subscriber
from .handlers.sqlalchemy import SQLAlchemyHandler
handlers = {
'sqlalchemy': SQLAlchemyHandler
}
class Logger(object):
def __init__(self, config):
self.channel = config['channel']
self.subscriber = Subscriber(**config['redmsg'])
self.handler = handlers[config['handler']](config[config['handler']])
def start(self):
self.subscriber.subscribe(self.channel)
latest_txid = self.handler.get_latest_txid(self.channel)
generator = self.subscriber.listen() if latest_txid is None else \
self.subscriber.listen_from(latest_txid + 1, ignore_missing=True)
for message in generator:
try:
self.handler.handle(message)
except Exception as e:
sys.stderr.write('{0}: {1}: {2}\n'.format(e.__class__.__name__, e, message).encode('utf-8'))
def main():
|
if __name__ == '__main__':
main()
| import yaml
from argparse import ArgumentParser
arg_parser = ArgumentParser()
arg_parser.description = 'RedMsg logging service.'
arg_parser.add_argument('--config', metavar='FILE', default='config.yaml',
help='path to config file (default: %(default)s)')
args = arg_parser.parse_args()
with open(args.config, 'r') as file:
config = yaml.load(file)
logger = Logger(config)
logger.start() | identifier_body |
logger.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Ivo Tzvetkov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function, unicode_literals, absolute_import
import sys
from redmsg import Subscriber
from .handlers.sqlalchemy import SQLAlchemyHandler
handlers = {
'sqlalchemy': SQLAlchemyHandler
}
class Logger(object):
def __init__(self, config):
self.channel = config['channel']
self.subscriber = Subscriber(**config['redmsg'])
self.handler = handlers[config['handler']](config[config['handler']])
| latest_txid = self.handler.get_latest_txid(self.channel)
generator = self.subscriber.listen() if latest_txid is None else \
self.subscriber.listen_from(latest_txid + 1, ignore_missing=True)
for message in generator:
try:
self.handler.handle(message)
except Exception as e:
sys.stderr.write('{0}: {1}: {2}\n'.format(e.__class__.__name__, e, message).encode('utf-8'))
def main():
import yaml
from argparse import ArgumentParser
arg_parser = ArgumentParser()
arg_parser.description = 'RedMsg logging service.'
arg_parser.add_argument('--config', metavar='FILE', default='config.yaml',
help='path to config file (default: %(default)s)')
args = arg_parser.parse_args()
with open(args.config, 'r') as file:
config = yaml.load(file)
logger = Logger(config)
logger.start()
if __name__ == '__main__':
main() | def start(self):
self.subscriber.subscribe(self.channel) | random_line_split |
logger.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Ivo Tzvetkov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function, unicode_literals, absolute_import
import sys
from redmsg import Subscriber
from .handlers.sqlalchemy import SQLAlchemyHandler
handlers = {
'sqlalchemy': SQLAlchemyHandler
}
class Logger(object):
def | (self, config):
self.channel = config['channel']
self.subscriber = Subscriber(**config['redmsg'])
self.handler = handlers[config['handler']](config[config['handler']])
def start(self):
self.subscriber.subscribe(self.channel)
latest_txid = self.handler.get_latest_txid(self.channel)
generator = self.subscriber.listen() if latest_txid is None else \
self.subscriber.listen_from(latest_txid + 1, ignore_missing=True)
for message in generator:
try:
self.handler.handle(message)
except Exception as e:
sys.stderr.write('{0}: {1}: {2}\n'.format(e.__class__.__name__, e, message).encode('utf-8'))
def main():
import yaml
from argparse import ArgumentParser
arg_parser = ArgumentParser()
arg_parser.description = 'RedMsg logging service.'
arg_parser.add_argument('--config', metavar='FILE', default='config.yaml',
help='path to config file (default: %(default)s)')
args = arg_parser.parse_args()
with open(args.config, 'r') as file:
config = yaml.load(file)
logger = Logger(config)
logger.start()
if __name__ == '__main__':
main()
| __init__ | identifier_name |
setup.py | import os
import re
from setuptools import setup, find_packages
THIS_DIR = os.path.dirname(os.path.realpath(__name__))
def read(*parts):
with open(os.path.join(THIS_DIR, *parts)) as f:
return f.read()
def get_version():
|
setup(name='marionette_client',
version=get_version(),
description="Marionette test automation client",
long_description='See http://marionette-client.readthedocs.org/',
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='mozilla',
author='Jonathan Griffin',
author_email='[email protected]',
url='https://wiki.mozilla.org/Auto-tools/Projects/Marionette',
license='MPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
package_data={'marionette': ['touch/*.js']},
include_package_data=True,
zip_safe=False,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
marionette = marionette.runtests:cli
""",
install_requires=read('requirements.txt').splitlines(),
)
| return re.findall("__version__ = '([\d\.]+)'",
read('marionette', '__init__.py'), re.M)[0] | identifier_body |
setup.py | import os
import re
from setuptools import setup, find_packages
THIS_DIR = os.path.dirname(os.path.realpath(__name__))
def | (*parts):
with open(os.path.join(THIS_DIR, *parts)) as f:
return f.read()
def get_version():
return re.findall("__version__ = '([\d\.]+)'",
read('marionette', '__init__.py'), re.M)[0]
setup(name='marionette_client',
version=get_version(),
description="Marionette test automation client",
long_description='See http://marionette-client.readthedocs.org/',
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='mozilla',
author='Jonathan Griffin',
author_email='[email protected]',
url='https://wiki.mozilla.org/Auto-tools/Projects/Marionette',
license='MPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
package_data={'marionette': ['touch/*.js']},
include_package_data=True,
zip_safe=False,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
marionette = marionette.runtests:cli
""",
install_requires=read('requirements.txt').splitlines(),
)
| read | identifier_name |
setup.py | import os
import re
from setuptools import setup, find_packages
THIS_DIR = os.path.dirname(os.path.realpath(__name__))
def read(*parts):
with open(os.path.join(THIS_DIR, *parts)) as f:
return f.read()
def get_version():
return re.findall("__version__ = '([\d\.]+)'", | setup(name='marionette_client',
version=get_version(),
description="Marionette test automation client",
long_description='See http://marionette-client.readthedocs.org/',
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='mozilla',
author='Jonathan Griffin',
author_email='[email protected]',
url='https://wiki.mozilla.org/Auto-tools/Projects/Marionette',
license='MPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
package_data={'marionette': ['touch/*.js']},
include_package_data=True,
zip_safe=False,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
marionette = marionette.runtests:cli
""",
install_requires=read('requirements.txt').splitlines(),
) | read('marionette', '__init__.py'), re.M)[0]
| random_line_split |
build_projects.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import optparse
import os
import posixpath
import sys
import urllib2
import buildbot_common
import build_version
import generate_make
import parse_dsc
from build_paths import SDK_SRC_DIR, OUT_DIR, SDK_RESOURCE_DIR
from build_paths import GSTORE
from generate_index import LandingPage
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import getos
MAKE = 'nacl_sdk/make_3.99.90-26-gf80222c/make.exe'
LIB_DICT = {
'linux': [],
'mac': [],
'win': ['x86_32']
}
VALID_TOOLCHAINS = [
'bionic',
'newlib',
'glibc',
'pnacl',
'win',
'linux',
'mac',
]
# Global verbosity setting.
# If set to True (normally via a command line arg) then build_projects will
# add V=1 to all calls to 'make'
verbose = False
def Trace(msg):
if verbose:
sys.stderr.write(str(msg) + '\n')
def CopyFilesFromTo(filelist, srcdir, dstdir):
for filename in filelist:
srcpath = os.path.join(srcdir, filename)
dstpath = os.path.join(dstdir, filename)
buildbot_common.CopyFile(srcpath, dstpath)
def | (pepperdir, clobber=False):
tools_dir = os.path.join(pepperdir, 'tools')
if not os.path.exists(tools_dir):
buildbot_common.ErrorExit('SDK tools dir is missing: %s' % tools_dir)
exampledir = os.path.join(pepperdir, 'examples')
if clobber:
buildbot_common.RemoveDir(exampledir)
buildbot_common.MakeDir(exampledir)
# Copy files for individual build and landing page
files = ['favicon.ico', 'httpd.cmd', 'index.css', 'index.js',
'button_close.png', 'button_close_hover.png']
CopyFilesFromTo(files, SDK_RESOURCE_DIR, exampledir)
# Copy tools scripts and make includes
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.py'),
tools_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.mk'),
tools_dir)
# Copy tools/lib scripts
tools_lib_dir = os.path.join(pepperdir, 'tools', 'lib')
buildbot_common.MakeDir(tools_lib_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', 'lib', '*.py'),
tools_lib_dir)
# On Windows add a prebuilt make
if getos.GetPlatform() == 'win':
buildbot_common.BuildStep('Add MAKE')
make_url = posixpath.join(GSTORE, MAKE)
make_exe = os.path.join(tools_dir, 'make.exe')
with open(make_exe, 'wb') as f:
f.write(urllib2.urlopen(make_url).read())
def ValidateToolchains(toolchains):
invalid_toolchains = set(toolchains) - set(VALID_TOOLCHAINS)
if invalid_toolchains:
buildbot_common.ErrorExit('Invalid toolchain(s): %s' % (
', '.join(invalid_toolchains)))
def GetDeps(projects):
out = {}
# Build list of all project names
localtargets = [proj['NAME'] for proj in projects]
# For each project
for proj in projects:
deplist = []
# generate a list of dependencies
for targ in proj.get('TARGETS', []):
deplist.extend(targ.get('DEPS', []) + targ.get('LIBS', []))
# and add dependencies to targets built in this subtree
localdeps = [dep for dep in deplist if dep in localtargets]
if localdeps:
out[proj['NAME']] = localdeps
return out
def UpdateProjects(pepperdir, project_tree, toolchains,
clobber=False, configs=None, first_toolchain=False):
if configs is None:
configs = ['Debug', 'Release']
if not os.path.exists(os.path.join(pepperdir, 'tools')):
buildbot_common.ErrorExit('Examples depend on missing tools.')
if not os.path.exists(os.path.join(pepperdir, 'toolchain')):
buildbot_common.ErrorExit('Examples depend on missing toolchains.')
ValidateToolchains(toolchains)
# Create the library output directories
libdir = os.path.join(pepperdir, 'lib')
platform = getos.GetPlatform()
for config in configs:
for arch in LIB_DICT[platform]:
dirpath = os.path.join(libdir, '%s_%s_host' % (platform, arch), config)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
landing_page = None
for branch, projects in project_tree.iteritems():
dirpath = os.path.join(pepperdir, branch)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
targets = [desc['NAME'] for desc in projects]
deps = GetDeps(projects)
# Generate master make for this branch of projects
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch),
targets, deps)
if branch.startswith('examples') and not landing_page:
landing_page = LandingPage()
# Generate individual projects
for desc in projects:
srcroot = os.path.dirname(desc['FILEPATH'])
generate_make.ProcessProject(pepperdir, srcroot, pepperdir, desc,
toolchains, configs=configs,
first_toolchain=first_toolchain)
if branch.startswith('examples'):
landing_page.AddDesc(desc)
if landing_page:
# Generate the landing page text file.
index_html = os.path.join(pepperdir, 'examples', 'index.html')
index_template = os.path.join(SDK_RESOURCE_DIR, 'index.html.template')
with open(index_html, 'w') as fh:
out = landing_page.GeneratePage(index_template)
fh.write(out)
# Generate top Make for examples
targets = ['api', 'demo', 'getting_started', 'tutorial']
targets = [x for x in targets if 'examples/'+x in project_tree]
branch_name = 'examples'
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch_name),
targets, {})
def BuildProjectsBranch(pepperdir, branch, deps, clean, config, args=None):
make_dir = os.path.join(pepperdir, branch)
print "\nMake: " + make_dir
if getos.GetPlatform() == 'win':
# We need to modify the environment to build host on Windows.
make = os.path.join(make_dir, 'make.bat')
else:
make = 'make'
env = None
if os.environ.get('USE_GOMA') == '1':
env = dict(os.environ)
env['NACL_COMPILER_PREFIX'] = 'gomacc'
# Add -m32 to the CFLAGS when building using i686-nacl-gcc
# otherwise goma won't recognise it as different to the x86_64
# build.
env['X86_32_CFLAGS'] = '-m32'
env['X86_32_CXXFLAGS'] = '-m32'
jobs = '50'
else:
jobs = str(multiprocessing.cpu_count())
make_cmd = [make, '-j', jobs]
make_cmd.append('CONFIG='+config)
# We always ENABLE_BIONIC in case we need it. If neither --bionic nor
# -t bionic have been provided on the command line, then VALID_TOOLCHAINS
# will not contain a bionic target.
make_cmd.append('ENABLE_BIONIC=1')
if not deps:
make_cmd.append('IGNORE_DEPS=1')
if verbose:
make_cmd.append('V=1')
if args:
make_cmd += args
else:
make_cmd.append('TOOLCHAIN=all')
buildbot_common.Run(make_cmd, cwd=make_dir, env=env)
if clean:
# Clean to remove temporary files but keep the built
buildbot_common.Run(make_cmd + ['clean'], cwd=make_dir, env=env)
def BuildProjects(pepperdir, project_tree, deps=True,
clean=False, config='Debug'):
# Make sure we build libraries (which live in 'src') before
# any of the examples.
build_first = [p for p in project_tree if p != 'src']
build_second = [p for p in project_tree if p == 'src']
for branch in build_first + build_second:
BuildProjectsBranch(pepperdir, branch, deps, clean, config)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-c', '--clobber',
help='Clobber project directories before copying new files',
action='store_true', default=False)
parser.add_option('-b', '--build',
help='Build the projects. Otherwise the projects are only copied.',
action='store_true')
parser.add_option('--config',
help='Choose configuration to build (Debug or Release). Builds both '
'by default')
parser.add_option('--bionic',
help='Enable bionic projects', action='store_true')
parser.add_option('-x', '--experimental',
help='Build experimental projects', action='store_true')
parser.add_option('-t', '--toolchain',
help='Build using toolchain. Can be passed more than once.',
action='append', default=[])
parser.add_option('-d', '--dest',
help='Select which build destinations (project types) are valid.',
action='append')
parser.add_option('-v', '--verbose', action='store_true')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete build_projects.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options, args = parser.parse_args(argv[1:])
global verbose
if options.verbose:
verbose = True
buildbot_common.verbose = verbose
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# on the build.
del os.environ['NACL_SDK_ROOT']
pepper_ver = str(int(build_version.ChromeMajorVersion()))
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
if not options.toolchain:
# Order matters here: the default toolchain for an example's Makefile will
# be the first toolchain in this list that is available in the example.
# e.g. If an example supports newlib and glibc, then the default will be
# newlib.
options.toolchain = ['pnacl', 'newlib', 'glibc', 'host']
if options.experimental or options.bionic:
options.toolchain.append('bionic')
if 'host' in options.toolchain:
options.toolchain.remove('host')
options.toolchain.append(getos.GetPlatform())
Trace('Adding platform: ' + getos.GetPlatform())
ValidateToolchains(options.toolchain)
filters = {}
if options.toolchain:
filters['TOOLS'] = options.toolchain
Trace('Filter by toolchain: ' + str(options.toolchain))
if not options.experimental:
filters['EXPERIMENTAL'] = False
if options.dest:
filters['DEST'] = options.dest
Trace('Filter by type: ' + str(options.dest))
if args:
filters['NAME'] = args
Trace('Filter by name: ' + str(args))
try:
project_tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit(str(e))
if verbose:
parse_dsc.PrintProjectTree(project_tree)
UpdateHelpers(pepperdir, clobber=options.clobber)
UpdateProjects(pepperdir, project_tree, options.toolchain,
clobber=options.clobber)
if options.build:
if options.config:
configs = [options.config]
else:
configs = ['Debug', 'Release']
for config in configs:
BuildProjects(pepperdir, project_tree, config=config, deps=False)
return 0
if __name__ == '__main__':
script_name = os.path.basename(sys.argv[0])
try:
sys.exit(main(sys.argv))
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit('%s: %s' % (script_name, e))
except KeyboardInterrupt:
buildbot_common.ErrorExit('%s: interrupted' % script_name)
| UpdateHelpers | identifier_name |
build_projects.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import optparse
import os
import posixpath
import sys
import urllib2
import buildbot_common
import build_version
import generate_make
import parse_dsc
from build_paths import SDK_SRC_DIR, OUT_DIR, SDK_RESOURCE_DIR
from build_paths import GSTORE
from generate_index import LandingPage
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import getos
MAKE = 'nacl_sdk/make_3.99.90-26-gf80222c/make.exe'
LIB_DICT = {
'linux': [],
'mac': [],
'win': ['x86_32']
}
VALID_TOOLCHAINS = [
'bionic',
'newlib',
'glibc',
'pnacl',
'win',
'linux',
'mac',
]
# Global verbosity setting.
# If set to True (normally via a command line arg) then build_projects will
# add V=1 to all calls to 'make'
verbose = False
def Trace(msg):
if verbose:
sys.stderr.write(str(msg) + '\n')
def CopyFilesFromTo(filelist, srcdir, dstdir):
for filename in filelist:
srcpath = os.path.join(srcdir, filename)
dstpath = os.path.join(dstdir, filename)
buildbot_common.CopyFile(srcpath, dstpath)
def UpdateHelpers(pepperdir, clobber=False):
tools_dir = os.path.join(pepperdir, 'tools')
if not os.path.exists(tools_dir):
buildbot_common.ErrorExit('SDK tools dir is missing: %s' % tools_dir)
exampledir = os.path.join(pepperdir, 'examples')
if clobber:
buildbot_common.RemoveDir(exampledir)
buildbot_common.MakeDir(exampledir)
# Copy files for individual build and landing page
files = ['favicon.ico', 'httpd.cmd', 'index.css', 'index.js',
'button_close.png', 'button_close_hover.png']
CopyFilesFromTo(files, SDK_RESOURCE_DIR, exampledir)
# Copy tools scripts and make includes
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.py'),
tools_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.mk'),
tools_dir)
# Copy tools/lib scripts
tools_lib_dir = os.path.join(pepperdir, 'tools', 'lib')
buildbot_common.MakeDir(tools_lib_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', 'lib', '*.py'),
tools_lib_dir)
# On Windows add a prebuilt make
if getos.GetPlatform() == 'win':
buildbot_common.BuildStep('Add MAKE')
make_url = posixpath.join(GSTORE, MAKE)
make_exe = os.path.join(tools_dir, 'make.exe')
with open(make_exe, 'wb') as f:
f.write(urllib2.urlopen(make_url).read())
def ValidateToolchains(toolchains):
invalid_toolchains = set(toolchains) - set(VALID_TOOLCHAINS)
if invalid_toolchains:
buildbot_common.ErrorExit('Invalid toolchain(s): %s' % (
', '.join(invalid_toolchains)))
def GetDeps(projects):
out = {}
# Build list of all project names
localtargets = [proj['NAME'] for proj in projects]
# For each project
for proj in projects:
deplist = []
# generate a list of dependencies
for targ in proj.get('TARGETS', []):
deplist.extend(targ.get('DEPS', []) + targ.get('LIBS', []))
# and add dependencies to targets built in this subtree
localdeps = [dep for dep in deplist if dep in localtargets]
if localdeps:
out[proj['NAME']] = localdeps
return out
def UpdateProjects(pepperdir, project_tree, toolchains,
clobber=False, configs=None, first_toolchain=False):
if configs is None:
configs = ['Debug', 'Release']
if not os.path.exists(os.path.join(pepperdir, 'tools')):
buildbot_common.ErrorExit('Examples depend on missing tools.')
if not os.path.exists(os.path.join(pepperdir, 'toolchain')):
buildbot_common.ErrorExit('Examples depend on missing toolchains.')
ValidateToolchains(toolchains)
# Create the library output directories
libdir = os.path.join(pepperdir, 'lib')
platform = getos.GetPlatform()
for config in configs:
for arch in LIB_DICT[platform]:
dirpath = os.path.join(libdir, '%s_%s_host' % (platform, arch), config)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
landing_page = None
for branch, projects in project_tree.iteritems():
dirpath = os.path.join(pepperdir, branch)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
targets = [desc['NAME'] for desc in projects]
deps = GetDeps(projects)
# Generate master make for this branch of projects
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch),
targets, deps)
if branch.startswith('examples') and not landing_page:
landing_page = LandingPage()
# Generate individual projects
for desc in projects:
srcroot = os.path.dirname(desc['FILEPATH'])
generate_make.ProcessProject(pepperdir, srcroot, pepperdir, desc,
toolchains, configs=configs,
first_toolchain=first_toolchain)
if branch.startswith('examples'):
landing_page.AddDesc(desc)
if landing_page:
# Generate the landing page text file.
index_html = os.path.join(pepperdir, 'examples', 'index.html')
index_template = os.path.join(SDK_RESOURCE_DIR, 'index.html.template')
with open(index_html, 'w') as fh:
out = landing_page.GeneratePage(index_template)
fh.write(out)
# Generate top Make for examples
targets = ['api', 'demo', 'getting_started', 'tutorial']
targets = [x for x in targets if 'examples/'+x in project_tree]
branch_name = 'examples'
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch_name),
targets, {})
def BuildProjectsBranch(pepperdir, branch, deps, clean, config, args=None):
make_dir = os.path.join(pepperdir, branch)
print "\nMake: " + make_dir
if getos.GetPlatform() == 'win':
# We need to modify the environment to build host on Windows.
make = os.path.join(make_dir, 'make.bat')
else:
make = 'make'
env = None
if os.environ.get('USE_GOMA') == '1':
env = dict(os.environ)
env['NACL_COMPILER_PREFIX'] = 'gomacc'
# Add -m32 to the CFLAGS when building using i686-nacl-gcc
# otherwise goma won't recognise it as different to the x86_64
# build.
env['X86_32_CFLAGS'] = '-m32'
env['X86_32_CXXFLAGS'] = '-m32'
jobs = '50'
else:
jobs = str(multiprocessing.cpu_count())
make_cmd = [make, '-j', jobs]
make_cmd.append('CONFIG='+config)
# We always ENABLE_BIONIC in case we need it. If neither --bionic nor
# -t bionic have been provided on the command line, then VALID_TOOLCHAINS
# will not contain a bionic target.
make_cmd.append('ENABLE_BIONIC=1')
if not deps:
make_cmd.append('IGNORE_DEPS=1')
if verbose:
make_cmd.append('V=1')
if args:
make_cmd += args
else:
make_cmd.append('TOOLCHAIN=all')
buildbot_common.Run(make_cmd, cwd=make_dir, env=env)
if clean:
# Clean to remove temporary files but keep the built
buildbot_common.Run(make_cmd + ['clean'], cwd=make_dir, env=env)
def BuildProjects(pepperdir, project_tree, deps=True,
clean=False, config='Debug'):
# Make sure we build libraries (which live in 'src') before
# any of the examples.
build_first = [p for p in project_tree if p != 'src']
build_second = [p for p in project_tree if p == 'src']
for branch in build_first + build_second:
BuildProjectsBranch(pepperdir, branch, deps, clean, config)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-c', '--clobber',
help='Clobber project directories before copying new files',
action='store_true', default=False)
parser.add_option('-b', '--build',
help='Build the projects. Otherwise the projects are only copied.',
action='store_true')
parser.add_option('--config',
help='Choose configuration to build (Debug or Release). Builds both '
'by default')
parser.add_option('--bionic',
help='Enable bionic projects', action='store_true')
parser.add_option('-x', '--experimental',
help='Build experimental projects', action='store_true')
parser.add_option('-t', '--toolchain',
help='Build using toolchain. Can be passed more than once.',
action='append', default=[])
parser.add_option('-d', '--dest',
help='Select which build destinations (project types) are valid.',
action='append')
parser.add_option('-v', '--verbose', action='store_true')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc: | try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options, args = parser.parse_args(argv[1:])
global verbose
if options.verbose:
verbose = True
buildbot_common.verbose = verbose
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# on the build.
del os.environ['NACL_SDK_ROOT']
pepper_ver = str(int(build_version.ChromeMajorVersion()))
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
if not options.toolchain:
# Order matters here: the default toolchain for an example's Makefile will
# be the first toolchain in this list that is available in the example.
# e.g. If an example supports newlib and glibc, then the default will be
# newlib.
options.toolchain = ['pnacl', 'newlib', 'glibc', 'host']
if options.experimental or options.bionic:
options.toolchain.append('bionic')
if 'host' in options.toolchain:
options.toolchain.remove('host')
options.toolchain.append(getos.GetPlatform())
Trace('Adding platform: ' + getos.GetPlatform())
ValidateToolchains(options.toolchain)
filters = {}
if options.toolchain:
filters['TOOLS'] = options.toolchain
Trace('Filter by toolchain: ' + str(options.toolchain))
if not options.experimental:
filters['EXPERIMENTAL'] = False
if options.dest:
filters['DEST'] = options.dest
Trace('Filter by type: ' + str(options.dest))
if args:
filters['NAME'] = args
Trace('Filter by name: ' + str(args))
try:
project_tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit(str(e))
if verbose:
parse_dsc.PrintProjectTree(project_tree)
UpdateHelpers(pepperdir, clobber=options.clobber)
UpdateProjects(pepperdir, project_tree, options.toolchain,
clobber=options.clobber)
if options.build:
if options.config:
configs = [options.config]
else:
configs = ['Debug', 'Release']
for config in configs:
BuildProjects(pepperdir, project_tree, config=config, deps=False)
return 0
if __name__ == '__main__':
script_name = os.path.basename(sys.argv[0])
try:
sys.exit(main(sys.argv))
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit('%s: %s' % (script_name, e))
except KeyboardInterrupt:
buildbot_common.ErrorExit('%s: interrupted' % script_name) | # complete -F _optcomplete build_projects.py | random_line_split |
build_projects.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import optparse
import os
import posixpath
import sys
import urllib2
import buildbot_common
import build_version
import generate_make
import parse_dsc
from build_paths import SDK_SRC_DIR, OUT_DIR, SDK_RESOURCE_DIR
from build_paths import GSTORE
from generate_index import LandingPage
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import getos
MAKE = 'nacl_sdk/make_3.99.90-26-gf80222c/make.exe'
LIB_DICT = {
'linux': [],
'mac': [],
'win': ['x86_32']
}
VALID_TOOLCHAINS = [
'bionic',
'newlib',
'glibc',
'pnacl',
'win',
'linux',
'mac',
]
# Global verbosity setting.
# If set to True (normally via a command line arg) then build_projects will
# add V=1 to all calls to 'make'
verbose = False
def Trace(msg):
if verbose:
sys.stderr.write(str(msg) + '\n')
def CopyFilesFromTo(filelist, srcdir, dstdir):
for filename in filelist:
srcpath = os.path.join(srcdir, filename)
dstpath = os.path.join(dstdir, filename)
buildbot_common.CopyFile(srcpath, dstpath)
def UpdateHelpers(pepperdir, clobber=False):
tools_dir = os.path.join(pepperdir, 'tools')
if not os.path.exists(tools_dir):
buildbot_common.ErrorExit('SDK tools dir is missing: %s' % tools_dir)
exampledir = os.path.join(pepperdir, 'examples')
if clobber:
buildbot_common.RemoveDir(exampledir)
buildbot_common.MakeDir(exampledir)
# Copy files for individual build and landing page
files = ['favicon.ico', 'httpd.cmd', 'index.css', 'index.js',
'button_close.png', 'button_close_hover.png']
CopyFilesFromTo(files, SDK_RESOURCE_DIR, exampledir)
# Copy tools scripts and make includes
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.py'),
tools_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.mk'),
tools_dir)
# Copy tools/lib scripts
tools_lib_dir = os.path.join(pepperdir, 'tools', 'lib')
buildbot_common.MakeDir(tools_lib_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', 'lib', '*.py'),
tools_lib_dir)
# On Windows add a prebuilt make
if getos.GetPlatform() == 'win':
buildbot_common.BuildStep('Add MAKE')
make_url = posixpath.join(GSTORE, MAKE)
make_exe = os.path.join(tools_dir, 'make.exe')
with open(make_exe, 'wb') as f:
f.write(urllib2.urlopen(make_url).read())
def ValidateToolchains(toolchains):
invalid_toolchains = set(toolchains) - set(VALID_TOOLCHAINS)
if invalid_toolchains:
buildbot_common.ErrorExit('Invalid toolchain(s): %s' % (
', '.join(invalid_toolchains)))
def GetDeps(projects):
|
def UpdateProjects(pepperdir, project_tree, toolchains,
clobber=False, configs=None, first_toolchain=False):
if configs is None:
configs = ['Debug', 'Release']
if not os.path.exists(os.path.join(pepperdir, 'tools')):
buildbot_common.ErrorExit('Examples depend on missing tools.')
if not os.path.exists(os.path.join(pepperdir, 'toolchain')):
buildbot_common.ErrorExit('Examples depend on missing toolchains.')
ValidateToolchains(toolchains)
# Create the library output directories
libdir = os.path.join(pepperdir, 'lib')
platform = getos.GetPlatform()
for config in configs:
for arch in LIB_DICT[platform]:
dirpath = os.path.join(libdir, '%s_%s_host' % (platform, arch), config)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
landing_page = None
for branch, projects in project_tree.iteritems():
dirpath = os.path.join(pepperdir, branch)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
targets = [desc['NAME'] for desc in projects]
deps = GetDeps(projects)
# Generate master make for this branch of projects
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch),
targets, deps)
if branch.startswith('examples') and not landing_page:
landing_page = LandingPage()
# Generate individual projects
for desc in projects:
srcroot = os.path.dirname(desc['FILEPATH'])
generate_make.ProcessProject(pepperdir, srcroot, pepperdir, desc,
toolchains, configs=configs,
first_toolchain=first_toolchain)
if branch.startswith('examples'):
landing_page.AddDesc(desc)
if landing_page:
# Generate the landing page text file.
index_html = os.path.join(pepperdir, 'examples', 'index.html')
index_template = os.path.join(SDK_RESOURCE_DIR, 'index.html.template')
with open(index_html, 'w') as fh:
out = landing_page.GeneratePage(index_template)
fh.write(out)
# Generate top Make for examples
targets = ['api', 'demo', 'getting_started', 'tutorial']
targets = [x for x in targets if 'examples/'+x in project_tree]
branch_name = 'examples'
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch_name),
targets, {})
def BuildProjectsBranch(pepperdir, branch, deps, clean, config, args=None):
make_dir = os.path.join(pepperdir, branch)
print "\nMake: " + make_dir
if getos.GetPlatform() == 'win':
# We need to modify the environment to build host on Windows.
make = os.path.join(make_dir, 'make.bat')
else:
make = 'make'
env = None
if os.environ.get('USE_GOMA') == '1':
env = dict(os.environ)
env['NACL_COMPILER_PREFIX'] = 'gomacc'
# Add -m32 to the CFLAGS when building using i686-nacl-gcc
# otherwise goma won't recognise it as different to the x86_64
# build.
env['X86_32_CFLAGS'] = '-m32'
env['X86_32_CXXFLAGS'] = '-m32'
jobs = '50'
else:
jobs = str(multiprocessing.cpu_count())
make_cmd = [make, '-j', jobs]
make_cmd.append('CONFIG='+config)
# We always ENABLE_BIONIC in case we need it. If neither --bionic nor
# -t bionic have been provided on the command line, then VALID_TOOLCHAINS
# will not contain a bionic target.
make_cmd.append('ENABLE_BIONIC=1')
if not deps:
make_cmd.append('IGNORE_DEPS=1')
if verbose:
make_cmd.append('V=1')
if args:
make_cmd += args
else:
make_cmd.append('TOOLCHAIN=all')
buildbot_common.Run(make_cmd, cwd=make_dir, env=env)
if clean:
# Clean to remove temporary files but keep the built
buildbot_common.Run(make_cmd + ['clean'], cwd=make_dir, env=env)
def BuildProjects(pepperdir, project_tree, deps=True,
clean=False, config='Debug'):
# Make sure we build libraries (which live in 'src') before
# any of the examples.
build_first = [p for p in project_tree if p != 'src']
build_second = [p for p in project_tree if p == 'src']
for branch in build_first + build_second:
BuildProjectsBranch(pepperdir, branch, deps, clean, config)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-c', '--clobber',
help='Clobber project directories before copying new files',
action='store_true', default=False)
parser.add_option('-b', '--build',
help='Build the projects. Otherwise the projects are only copied.',
action='store_true')
parser.add_option('--config',
help='Choose configuration to build (Debug or Release). Builds both '
'by default')
parser.add_option('--bionic',
help='Enable bionic projects', action='store_true')
parser.add_option('-x', '--experimental',
help='Build experimental projects', action='store_true')
parser.add_option('-t', '--toolchain',
help='Build using toolchain. Can be passed more than once.',
action='append', default=[])
parser.add_option('-d', '--dest',
help='Select which build destinations (project types) are valid.',
action='append')
parser.add_option('-v', '--verbose', action='store_true')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete build_projects.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options, args = parser.parse_args(argv[1:])
global verbose
if options.verbose:
verbose = True
buildbot_common.verbose = verbose
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# on the build.
del os.environ['NACL_SDK_ROOT']
pepper_ver = str(int(build_version.ChromeMajorVersion()))
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
if not options.toolchain:
# Order matters here: the default toolchain for an example's Makefile will
# be the first toolchain in this list that is available in the example.
# e.g. If an example supports newlib and glibc, then the default will be
# newlib.
options.toolchain = ['pnacl', 'newlib', 'glibc', 'host']
if options.experimental or options.bionic:
options.toolchain.append('bionic')
if 'host' in options.toolchain:
options.toolchain.remove('host')
options.toolchain.append(getos.GetPlatform())
Trace('Adding platform: ' + getos.GetPlatform())
ValidateToolchains(options.toolchain)
filters = {}
if options.toolchain:
filters['TOOLS'] = options.toolchain
Trace('Filter by toolchain: ' + str(options.toolchain))
if not options.experimental:
filters['EXPERIMENTAL'] = False
if options.dest:
filters['DEST'] = options.dest
Trace('Filter by type: ' + str(options.dest))
if args:
filters['NAME'] = args
Trace('Filter by name: ' + str(args))
try:
project_tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit(str(e))
if verbose:
parse_dsc.PrintProjectTree(project_tree)
UpdateHelpers(pepperdir, clobber=options.clobber)
UpdateProjects(pepperdir, project_tree, options.toolchain,
clobber=options.clobber)
if options.build:
if options.config:
configs = [options.config]
else:
configs = ['Debug', 'Release']
for config in configs:
BuildProjects(pepperdir, project_tree, config=config, deps=False)
return 0
if __name__ == '__main__':
script_name = os.path.basename(sys.argv[0])
try:
sys.exit(main(sys.argv))
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit('%s: %s' % (script_name, e))
except KeyboardInterrupt:
buildbot_common.ErrorExit('%s: interrupted' % script_name)
| out = {}
# Build list of all project names
localtargets = [proj['NAME'] for proj in projects]
# For each project
for proj in projects:
deplist = []
# generate a list of dependencies
for targ in proj.get('TARGETS', []):
deplist.extend(targ.get('DEPS', []) + targ.get('LIBS', []))
# and add dependencies to targets built in this subtree
localdeps = [dep for dep in deplist if dep in localtargets]
if localdeps:
out[proj['NAME']] = localdeps
return out | identifier_body |
build_projects.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import optparse
import os
import posixpath
import sys
import urllib2
import buildbot_common
import build_version
import generate_make
import parse_dsc
from build_paths import SDK_SRC_DIR, OUT_DIR, SDK_RESOURCE_DIR
from build_paths import GSTORE
from generate_index import LandingPage
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import getos
MAKE = 'nacl_sdk/make_3.99.90-26-gf80222c/make.exe'
LIB_DICT = {
'linux': [],
'mac': [],
'win': ['x86_32']
}
VALID_TOOLCHAINS = [
'bionic',
'newlib',
'glibc',
'pnacl',
'win',
'linux',
'mac',
]
# Global verbosity setting.
# If set to True (normally via a command line arg) then build_projects will
# add V=1 to all calls to 'make'
verbose = False
def Trace(msg):
if verbose:
sys.stderr.write(str(msg) + '\n')
def CopyFilesFromTo(filelist, srcdir, dstdir):
for filename in filelist:
srcpath = os.path.join(srcdir, filename)
dstpath = os.path.join(dstdir, filename)
buildbot_common.CopyFile(srcpath, dstpath)
def UpdateHelpers(pepperdir, clobber=False):
tools_dir = os.path.join(pepperdir, 'tools')
if not os.path.exists(tools_dir):
buildbot_common.ErrorExit('SDK tools dir is missing: %s' % tools_dir)
exampledir = os.path.join(pepperdir, 'examples')
if clobber:
buildbot_common.RemoveDir(exampledir)
buildbot_common.MakeDir(exampledir)
# Copy files for individual build and landing page
files = ['favicon.ico', 'httpd.cmd', 'index.css', 'index.js',
'button_close.png', 'button_close_hover.png']
CopyFilesFromTo(files, SDK_RESOURCE_DIR, exampledir)
# Copy tools scripts and make includes
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.py'),
tools_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.mk'),
tools_dir)
# Copy tools/lib scripts
tools_lib_dir = os.path.join(pepperdir, 'tools', 'lib')
buildbot_common.MakeDir(tools_lib_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', 'lib', '*.py'),
tools_lib_dir)
# On Windows add a prebuilt make
if getos.GetPlatform() == 'win':
buildbot_common.BuildStep('Add MAKE')
make_url = posixpath.join(GSTORE, MAKE)
make_exe = os.path.join(tools_dir, 'make.exe')
with open(make_exe, 'wb') as f:
f.write(urllib2.urlopen(make_url).read())
def ValidateToolchains(toolchains):
invalid_toolchains = set(toolchains) - set(VALID_TOOLCHAINS)
if invalid_toolchains:
buildbot_common.ErrorExit('Invalid toolchain(s): %s' % (
', '.join(invalid_toolchains)))
def GetDeps(projects):
out = {}
# Build list of all project names
localtargets = [proj['NAME'] for proj in projects]
# For each project
for proj in projects:
deplist = []
# generate a list of dependencies
for targ in proj.get('TARGETS', []):
deplist.extend(targ.get('DEPS', []) + targ.get('LIBS', []))
# and add dependencies to targets built in this subtree
localdeps = [dep for dep in deplist if dep in localtargets]
if localdeps:
out[proj['NAME']] = localdeps
return out
def UpdateProjects(pepperdir, project_tree, toolchains,
clobber=False, configs=None, first_toolchain=False):
if configs is None:
configs = ['Debug', 'Release']
if not os.path.exists(os.path.join(pepperdir, 'tools')):
buildbot_common.ErrorExit('Examples depend on missing tools.')
if not os.path.exists(os.path.join(pepperdir, 'toolchain')):
buildbot_common.ErrorExit('Examples depend on missing toolchains.')
ValidateToolchains(toolchains)
# Create the library output directories
libdir = os.path.join(pepperdir, 'lib')
platform = getos.GetPlatform()
for config in configs:
for arch in LIB_DICT[platform]:
dirpath = os.path.join(libdir, '%s_%s_host' % (platform, arch), config)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
landing_page = None
for branch, projects in project_tree.iteritems():
dirpath = os.path.join(pepperdir, branch)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
targets = [desc['NAME'] for desc in projects]
deps = GetDeps(projects)
# Generate master make for this branch of projects
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch),
targets, deps)
if branch.startswith('examples') and not landing_page:
landing_page = LandingPage()
# Generate individual projects
for desc in projects:
srcroot = os.path.dirname(desc['FILEPATH'])
generate_make.ProcessProject(pepperdir, srcroot, pepperdir, desc,
toolchains, configs=configs,
first_toolchain=first_toolchain)
if branch.startswith('examples'):
landing_page.AddDesc(desc)
if landing_page:
# Generate the landing page text file.
index_html = os.path.join(pepperdir, 'examples', 'index.html')
index_template = os.path.join(SDK_RESOURCE_DIR, 'index.html.template')
with open(index_html, 'w') as fh:
out = landing_page.GeneratePage(index_template)
fh.write(out)
# Generate top Make for examples
targets = ['api', 'demo', 'getting_started', 'tutorial']
targets = [x for x in targets if 'examples/'+x in project_tree]
branch_name = 'examples'
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch_name),
targets, {})
def BuildProjectsBranch(pepperdir, branch, deps, clean, config, args=None):
make_dir = os.path.join(pepperdir, branch)
print "\nMake: " + make_dir
if getos.GetPlatform() == 'win':
# We need to modify the environment to build host on Windows.
make = os.path.join(make_dir, 'make.bat')
else:
make = 'make'
env = None
if os.environ.get('USE_GOMA') == '1':
env = dict(os.environ)
env['NACL_COMPILER_PREFIX'] = 'gomacc'
# Add -m32 to the CFLAGS when building using i686-nacl-gcc
# otherwise goma won't recognise it as different to the x86_64
# build.
env['X86_32_CFLAGS'] = '-m32'
env['X86_32_CXXFLAGS'] = '-m32'
jobs = '50'
else:
jobs = str(multiprocessing.cpu_count())
make_cmd = [make, '-j', jobs]
make_cmd.append('CONFIG='+config)
# We always ENABLE_BIONIC in case we need it. If neither --bionic nor
# -t bionic have been provided on the command line, then VALID_TOOLCHAINS
# will not contain a bionic target.
make_cmd.append('ENABLE_BIONIC=1')
if not deps:
make_cmd.append('IGNORE_DEPS=1')
if verbose:
make_cmd.append('V=1')
if args:
make_cmd += args
else:
make_cmd.append('TOOLCHAIN=all')
buildbot_common.Run(make_cmd, cwd=make_dir, env=env)
if clean:
# Clean to remove temporary files but keep the built
buildbot_common.Run(make_cmd + ['clean'], cwd=make_dir, env=env)
def BuildProjects(pepperdir, project_tree, deps=True,
clean=False, config='Debug'):
# Make sure we build libraries (which live in 'src') before
# any of the examples.
build_first = [p for p in project_tree if p != 'src']
build_second = [p for p in project_tree if p == 'src']
for branch in build_first + build_second:
BuildProjectsBranch(pepperdir, branch, deps, clean, config)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-c', '--clobber',
help='Clobber project directories before copying new files',
action='store_true', default=False)
parser.add_option('-b', '--build',
help='Build the projects. Otherwise the projects are only copied.',
action='store_true')
parser.add_option('--config',
help='Choose configuration to build (Debug or Release). Builds both '
'by default')
parser.add_option('--bionic',
help='Enable bionic projects', action='store_true')
parser.add_option('-x', '--experimental',
help='Build experimental projects', action='store_true')
parser.add_option('-t', '--toolchain',
help='Build using toolchain. Can be passed more than once.',
action='append', default=[])
parser.add_option('-d', '--dest',
help='Select which build destinations (project types) are valid.',
action='append')
parser.add_option('-v', '--verbose', action='store_true')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete build_projects.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options, args = parser.parse_args(argv[1:])
global verbose
if options.verbose:
verbose = True
buildbot_common.verbose = verbose
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# on the build.
del os.environ['NACL_SDK_ROOT']
pepper_ver = str(int(build_version.ChromeMajorVersion()))
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
if not options.toolchain:
# Order matters here: the default toolchain for an example's Makefile will
# be the first toolchain in this list that is available in the example.
# e.g. If an example supports newlib and glibc, then the default will be
# newlib.
options.toolchain = ['pnacl', 'newlib', 'glibc', 'host']
if options.experimental or options.bionic:
options.toolchain.append('bionic')
if 'host' in options.toolchain:
options.toolchain.remove('host')
options.toolchain.append(getos.GetPlatform())
Trace('Adding platform: ' + getos.GetPlatform())
ValidateToolchains(options.toolchain)
filters = {}
if options.toolchain:
filters['TOOLS'] = options.toolchain
Trace('Filter by toolchain: ' + str(options.toolchain))
if not options.experimental:
filters['EXPERIMENTAL'] = False
if options.dest:
|
if args:
filters['NAME'] = args
Trace('Filter by name: ' + str(args))
try:
project_tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit(str(e))
if verbose:
parse_dsc.PrintProjectTree(project_tree)
UpdateHelpers(pepperdir, clobber=options.clobber)
UpdateProjects(pepperdir, project_tree, options.toolchain,
clobber=options.clobber)
if options.build:
if options.config:
configs = [options.config]
else:
configs = ['Debug', 'Release']
for config in configs:
BuildProjects(pepperdir, project_tree, config=config, deps=False)
return 0
if __name__ == '__main__':
script_name = os.path.basename(sys.argv[0])
try:
sys.exit(main(sys.argv))
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit('%s: %s' % (script_name, e))
except KeyboardInterrupt:
buildbot_common.ErrorExit('%s: interrupted' % script_name)
| filters['DEST'] = options.dest
Trace('Filter by type: ' + str(options.dest)) | conditional_block |
scan_dihedral.py | #!/usr/bin/env python
import sys
import string
import subprocess
#----------------------------------------------------------------------
# Define some handy functions
#----------------------------------------------------------------------
def | (file):
f = open(file,'r')
fc = f.readlines()
f.close()
return fc
def write_to_file(file,fc):
f = open(file,'w')
f.writelines(fc)
f.close()
#----------------------------------------------------------------------
# The main function
#----------------------------------------------------------------------
def main():
# Read in identities of dihedrals to change, step size and number of steps per dihedral
# Note that we will take one extra step per dihedral to capture the initial conformation
# Read in name of original pdb file, and store the base file name (without the pdb)
pdb_file = sys.argv[1]
base = pdb_file.split('/')[-1].split('.')[0]
gzmat_file = base + ".gzmat"
diheds = []
stepsizes = []
nsteps = []
for i in range(0,n_dihed):
diheds.append(sys.argv[3*i+2])
stepsizes.append(float(sys.argv[3*i+3]))
nsteps.append(int(sys.argv[3*i+4])+1)
#----------------------------------------------------------------------
# Generate gzmat file from pdb file
#----------------------------------------------------------------------
process = subprocess.Popen("babel -ipdb {0} -ogzmat {1}".format(pdb_file, gzmat_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
gzmat = get_contents(gzmat_file)
#----------------------------------------------------------------------
# Find the line numbers and initial values for each dihedral angle in the gzmat file
#----------------------------------------------------------------------
lines = []
values = []
for i in range(0,n_dihed):
dihed = diheds[i]
for j in range(0,len(gzmat)):
if string.find(gzmat[j],dihed+"=") != -1:
lines.append(j)
value = gzmat[j].split()[1]
values.append(float(value))
#----------------------------------------------------------------------
# The main bit of code that actually drives the process
# of generating new input files with altered dihedrals
# Note: the "for x in range(0,y)" is essentially a do loop,
# using different values of x at each iteration x = 0,1,2,...,y-1
# Also note that python starts counting at 0 rather than 1
#----------------------------------------------------------------------
for i0 in range(0,nsteps[0]):
# for first dihedral, set new value to initial + step number*step size
newvalue0 = values[0] + float(i0)*stepsizes[0]
gzmat[lines[0]] = diheds[0] + "= " + str(newvalue0) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + ".pdb"
if n_dihed == 1:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i1 in range(0,nsteps[1]):
# for second dihedral, set new value to initial + step number*step size
newvalue1 = values[1] + float(i1)*stepsizes[1]
gzmat[lines[1]] = diheds[1] + "= " + str(newvalue1) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".pdb"
if n_dihed == 2:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i2 in range(0,nsteps[2]):
# for third dihedral, set new value to initial + step number*step size
newvalue2 = values[2] + float(i2)*stepsizes[2]
gzmat[lines[2]] = diheds[2] + "= " + str(newvalue2) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".pdb"
if n_dihed == 3:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
print 'Error: more than 3 dihedrals, should not be able to get here'
sys.exit()
if __name__ == '__main__':
#----------------------------------------------------------------------
# Read in arguments from command line
#----------------------------------------------------------------------
if (len(sys.argv)-2)%3 != 0 or len(sys.argv) == 2:
print 'Usage: scan_dihedral.py <pdb_file_name> <dihedral name 1> <step size 1> <number of steps 1>'
print ' ... ... ... ... <dihedral name N> <step size N> <number of steps N>'
else:
# Calculate the number of dihedrals to change based on the number of arguments supplied
n_dihed = (len(sys.argv)-2)/3
if (n_dihed > 3):
print 'Changing more than 3 dihedrals at once, are you sure?'
print 'If so, you will need to edit the python script to remove the sys.exit() statement'
print 'And write some more do loops in the main part of the code'
sys.exit()
main()
| get_contents | identifier_name |
scan_dihedral.py | #!/usr/bin/env python
import sys
import string
import subprocess
#----------------------------------------------------------------------
# Define some handy functions
#----------------------------------------------------------------------
def get_contents(file):
f = open(file,'r')
fc = f.readlines()
f.close()
return fc
def write_to_file(file,fc):
f = open(file,'w')
f.writelines(fc)
f.close()
#----------------------------------------------------------------------
# The main function
#----------------------------------------------------------------------
def main():
# Read in identities of dihedrals to change, step size and number of steps per dihedral
# Note that we will take one extra step per dihedral to capture the initial conformation
# Read in name of original pdb file, and store the base file name (without the pdb)
|
if __name__ == '__main__':
#----------------------------------------------------------------------
# Read in arguments from command line
#----------------------------------------------------------------------
if (len(sys.argv)-2)%3 != 0 or len(sys.argv) == 2:
print 'Usage: scan_dihedral.py <pdb_file_name> <dihedral name 1> <step size 1> <number of steps 1>'
print ' ... ... ... ... <dihedral name N> <step size N> <number of steps N>'
else:
# Calculate the number of dihedrals to change based on the number of arguments supplied
n_dihed = (len(sys.argv)-2)/3
if (n_dihed > 3):
print 'Changing more than 3 dihedrals at once, are you sure?'
print 'If so, you will need to edit the python script to remove the sys.exit() statement'
print 'And write some more do loops in the main part of the code'
sys.exit()
main()
| pdb_file = sys.argv[1]
base = pdb_file.split('/')[-1].split('.')[0]
gzmat_file = base + ".gzmat"
diheds = []
stepsizes = []
nsteps = []
for i in range(0,n_dihed):
diheds.append(sys.argv[3*i+2])
stepsizes.append(float(sys.argv[3*i+3]))
nsteps.append(int(sys.argv[3*i+4])+1)
#----------------------------------------------------------------------
# Generate gzmat file from pdb file
#----------------------------------------------------------------------
process = subprocess.Popen("babel -ipdb {0} -ogzmat {1}".format(pdb_file, gzmat_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
gzmat = get_contents(gzmat_file)
#----------------------------------------------------------------------
# Find the line numbers and initial values for each dihedral angle in the gzmat file
#----------------------------------------------------------------------
lines = []
values = []
for i in range(0,n_dihed):
dihed = diheds[i]
for j in range(0,len(gzmat)):
if string.find(gzmat[j],dihed+"=") != -1:
lines.append(j)
value = gzmat[j].split()[1]
values.append(float(value))
#----------------------------------------------------------------------
# The main bit of code that actually drives the process
# of generating new input files with altered dihedrals
# Note: the "for x in range(0,y)" is essentially a do loop,
# using different values of x at each iteration x = 0,1,2,...,y-1
# Also note that python starts counting at 0 rather than 1
#----------------------------------------------------------------------
for i0 in range(0,nsteps[0]):
# for first dihedral, set new value to initial + step number*step size
newvalue0 = values[0] + float(i0)*stepsizes[0]
gzmat[lines[0]] = diheds[0] + "= " + str(newvalue0) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + ".pdb"
if n_dihed == 1:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i1 in range(0,nsteps[1]):
# for second dihedral, set new value to initial + step number*step size
newvalue1 = values[1] + float(i1)*stepsizes[1]
gzmat[lines[1]] = diheds[1] + "= " + str(newvalue1) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".pdb"
if n_dihed == 2:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i2 in range(0,nsteps[2]):
# for third dihedral, set new value to initial + step number*step size
newvalue2 = values[2] + float(i2)*stepsizes[2]
gzmat[lines[2]] = diheds[2] + "= " + str(newvalue2) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".pdb"
if n_dihed == 3:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
print 'Error: more than 3 dihedrals, should not be able to get here'
sys.exit() | identifier_body |
scan_dihedral.py | #!/usr/bin/env python
import sys
import string
import subprocess
#----------------------------------------------------------------------
# Define some handy functions
#----------------------------------------------------------------------
def get_contents(file):
f = open(file,'r')
fc = f.readlines()
f.close()
return fc
def write_to_file(file,fc):
f = open(file,'w')
f.writelines(fc)
f.close()
#----------------------------------------------------------------------
# The main function
#----------------------------------------------------------------------
def main():
# Read in identities of dihedrals to change, step size and number of steps per dihedral
# Note that we will take one extra step per dihedral to capture the initial conformation
# Read in name of original pdb file, and store the base file name (without the pdb)
pdb_file = sys.argv[1]
base = pdb_file.split('/')[-1].split('.')[0]
gzmat_file = base + ".gzmat"
diheds = []
stepsizes = []
nsteps = []
for i in range(0,n_dihed):
diheds.append(sys.argv[3*i+2])
stepsizes.append(float(sys.argv[3*i+3]))
nsteps.append(int(sys.argv[3*i+4])+1)
#----------------------------------------------------------------------
# Generate gzmat file from pdb file
#----------------------------------------------------------------------
process = subprocess.Popen("babel -ipdb {0} -ogzmat {1}".format(pdb_file, gzmat_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
gzmat = get_contents(gzmat_file)
#----------------------------------------------------------------------
# Find the line numbers and initial values for each dihedral angle in the gzmat file
#----------------------------------------------------------------------
lines = []
values = []
for i in range(0,n_dihed):
dihed = diheds[i]
for j in range(0,len(gzmat)):
if string.find(gzmat[j],dihed+"=") != -1:
lines.append(j)
value = gzmat[j].split()[1]
values.append(float(value))
#----------------------------------------------------------------------
# The main bit of code that actually drives the process
# of generating new input files with altered dihedrals
# Note: the "for x in range(0,y)" is essentially a do loop,
# using different values of x at each iteration x = 0,1,2,...,y-1
# Also note that python starts counting at 0 rather than 1
#----------------------------------------------------------------------
for i0 in range(0,nsteps[0]):
# for first dihedral, set new value to initial + step number*step size
newvalue0 = values[0] + float(i0)*stepsizes[0]
gzmat[lines[0]] = diheds[0] + "= " + str(newvalue0) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + ".pdb"
if n_dihed == 1:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i1 in range(0,nsteps[1]):
# for second dihedral, set new value to initial + step number*step size
|
if __name__ == '__main__':
#----------------------------------------------------------------------
# Read in arguments from command line
#----------------------------------------------------------------------
if (len(sys.argv)-2)%3 != 0 or len(sys.argv) == 2:
print 'Usage: scan_dihedral.py <pdb_file_name> <dihedral name 1> <step size 1> <number of steps 1>'
print ' ... ... ... ... <dihedral name N> <step size N> <number of steps N>'
else:
# Calculate the number of dihedrals to change based on the number of arguments supplied
n_dihed = (len(sys.argv)-2)/3
if (n_dihed > 3):
print 'Changing more than 3 dihedrals at once, are you sure?'
print 'If so, you will need to edit the python script to remove the sys.exit() statement'
print 'And write some more do loops in the main part of the code'
sys.exit()
main()
| newvalue1 = values[1] + float(i1)*stepsizes[1]
gzmat[lines[1]] = diheds[1] + "= " + str(newvalue1) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".pdb"
if n_dihed == 2:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i2 in range(0,nsteps[2]):
# for third dihedral, set new value to initial + step number*step size
newvalue2 = values[2] + float(i2)*stepsizes[2]
gzmat[lines[2]] = diheds[2] + "= " + str(newvalue2) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".pdb"
if n_dihed == 3:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
print 'Error: more than 3 dihedrals, should not be able to get here'
sys.exit() | conditional_block |
scan_dihedral.py | #!/usr/bin/env python
import sys
import string
import subprocess
#----------------------------------------------------------------------
# Define some handy functions
#----------------------------------------------------------------------
def get_contents(file):
f = open(file,'r')
fc = f.readlines()
f.close()
return fc
def write_to_file(file,fc):
f = open(file,'w')
f.writelines(fc)
f.close()
#----------------------------------------------------------------------
# The main function
#----------------------------------------------------------------------
def main():
# Read in identities of dihedrals to change, step size and number of steps per dihedral
# Note that we will take one extra step per dihedral to capture the initial conformation
# Read in name of original pdb file, and store the base file name (without the pdb)
pdb_file = sys.argv[1]
base = pdb_file.split('/')[-1].split('.')[0]
gzmat_file = base + ".gzmat"
diheds = []
stepsizes = []
nsteps = []
for i in range(0,n_dihed):
diheds.append(sys.argv[3*i+2])
stepsizes.append(float(sys.argv[3*i+3]))
nsteps.append(int(sys.argv[3*i+4])+1)
#----------------------------------------------------------------------
# Generate gzmat file from pdb file
#----------------------------------------------------------------------
process = subprocess.Popen("babel -ipdb {0} -ogzmat {1}".format(pdb_file, gzmat_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
gzmat = get_contents(gzmat_file)
#----------------------------------------------------------------------
# Find the line numbers and initial values for each dihedral angle in the gzmat file
#----------------------------------------------------------------------
lines = []
values = []
for i in range(0,n_dihed):
dihed = diheds[i]
for j in range(0,len(gzmat)):
if string.find(gzmat[j],dihed+"=") != -1:
lines.append(j)
value = gzmat[j].split()[1]
values.append(float(value))
#----------------------------------------------------------------------
# The main bit of code that actually drives the process
# of generating new input files with altered dihedrals
# Note: the "for x in range(0,y)" is essentially a do loop,
# using different values of x at each iteration x = 0,1,2,...,y-1
# Also note that python starts counting at 0 rather than 1
#----------------------------------------------------------------------
for i0 in range(0,nsteps[0]):
# for first dihedral, set new value to initial + step number*step size
newvalue0 = values[0] + float(i0)*stepsizes[0]
gzmat[lines[0]] = diheds[0] + "= " + str(newvalue0) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + ".pdb"
if n_dihed == 1:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate() | newvalue1 = values[1] + float(i1)*stepsizes[1]
gzmat[lines[1]] = diheds[1] + "= " + str(newvalue1) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + ".pdb"
if n_dihed == 2:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
for i2 in range(0,nsteps[2]):
# for third dihedral, set new value to initial + step number*step size
newvalue2 = values[2] + float(i2)*stepsizes[2]
gzmat[lines[2]] = diheds[2] + "= " + str(newvalue2) + "\n"
new_gzmat_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".gzmat"
new_pdb_file = base + "_" + str(i0).zfill(3) + "_" + str(i1).zfill(3) + "_" + str(i2).zfill(3) + ".pdb"
if n_dihed == 3:
# generate file and convert back to pdb format
write_to_file(new_gzmat_file,gzmat)
process = subprocess.Popen("babel -igzmat {0} -opdb {1}".format(new_gzmat_file, new_pdb_file).split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.communicate()
else:
print 'Error: more than 3 dihedrals, should not be able to get here'
sys.exit()
if __name__ == '__main__':
#----------------------------------------------------------------------
# Read in arguments from command line
#----------------------------------------------------------------------
if (len(sys.argv)-2)%3 != 0 or len(sys.argv) == 2:
print 'Usage: scan_dihedral.py <pdb_file_name> <dihedral name 1> <step size 1> <number of steps 1>'
print ' ... ... ... ... <dihedral name N> <step size N> <number of steps N>'
else:
# Calculate the number of dihedrals to change based on the number of arguments supplied
n_dihed = (len(sys.argv)-2)/3
if (n_dihed > 3):
print 'Changing more than 3 dihedrals at once, are you sure?'
print 'If so, you will need to edit the python script to remove the sys.exit() statement'
print 'And write some more do loops in the main part of the code'
sys.exit()
main() | else:
for i1 in range(0,nsteps[1]):
# for second dihedral, set new value to initial + step number*step size | random_line_split |
vec_delete_left.rs | use malachite_base::vecs::vec_delete_left;
use malachite_base_test_util::bench::bucketers::pair_1_vec_len_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::unsigned_vec_unsigned_pair_gen_var_1;
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_vec_delete_left);
register_bench!(runner, benchmark_vec_delete_left);
}
fn demo_vec_delete_left(gm: GenMode, config: GenConfig, limit: usize) {
for (mut xs, amount) in unsigned_vec_unsigned_pair_gen_var_1::<u8>()
.get(gm, &config)
.take(limit)
{
let old_xs = xs.clone();
vec_delete_left(&mut xs, amount);
println!(
"xs := {:?}; vec_delete_left(&mut xs, {}); xs = {:?}",
old_xs, amount, xs
);
}
}
fn benchmark_vec_delete_left(gm: GenMode, config: GenConfig, limit: usize, file_name: &str) | {
run_benchmark(
"vec_delete_left(&mut [T], usize)",
BenchmarkType::Single,
unsigned_vec_unsigned_pair_gen_var_1::<u8>().get(gm, &config),
gm.name(),
limit,
file_name,
&pair_1_vec_len_bucketer("xs"),
&mut [("Malachite", &mut |(mut xs, amount)| {
vec_delete_left(&mut xs, amount)
})],
);
} | identifier_body |
|
vec_delete_left.rs | use malachite_base::vecs::vec_delete_left;
use malachite_base_test_util::bench::bucketers::pair_1_vec_len_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::unsigned_vec_unsigned_pair_gen_var_1;
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_vec_delete_left);
register_bench!(runner, benchmark_vec_delete_left);
}
fn demo_vec_delete_left(gm: GenMode, config: GenConfig, limit: usize) { | vec_delete_left(&mut xs, amount);
println!(
"xs := {:?}; vec_delete_left(&mut xs, {}); xs = {:?}",
old_xs, amount, xs
);
}
}
fn benchmark_vec_delete_left(gm: GenMode, config: GenConfig, limit: usize, file_name: &str) {
run_benchmark(
"vec_delete_left(&mut [T], usize)",
BenchmarkType::Single,
unsigned_vec_unsigned_pair_gen_var_1::<u8>().get(gm, &config),
gm.name(),
limit,
file_name,
&pair_1_vec_len_bucketer("xs"),
&mut [("Malachite", &mut |(mut xs, amount)| {
vec_delete_left(&mut xs, amount)
})],
);
} | for (mut xs, amount) in unsigned_vec_unsigned_pair_gen_var_1::<u8>()
.get(gm, &config)
.take(limit)
{
let old_xs = xs.clone(); | random_line_split |
vec_delete_left.rs | use malachite_base::vecs::vec_delete_left;
use malachite_base_test_util::bench::bucketers::pair_1_vec_len_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::unsigned_vec_unsigned_pair_gen_var_1;
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_vec_delete_left);
register_bench!(runner, benchmark_vec_delete_left);
}
fn demo_vec_delete_left(gm: GenMode, config: GenConfig, limit: usize) {
for (mut xs, amount) in unsigned_vec_unsigned_pair_gen_var_1::<u8>()
.get(gm, &config)
.take(limit)
{
let old_xs = xs.clone();
vec_delete_left(&mut xs, amount);
println!(
"xs := {:?}; vec_delete_left(&mut xs, {}); xs = {:?}",
old_xs, amount, xs
);
}
}
fn | (gm: GenMode, config: GenConfig, limit: usize, file_name: &str) {
run_benchmark(
"vec_delete_left(&mut [T], usize)",
BenchmarkType::Single,
unsigned_vec_unsigned_pair_gen_var_1::<u8>().get(gm, &config),
gm.name(),
limit,
file_name,
&pair_1_vec_len_bucketer("xs"),
&mut [("Malachite", &mut |(mut xs, amount)| {
vec_delete_left(&mut xs, amount)
})],
);
}
| benchmark_vec_delete_left | identifier_name |
del_cluster_systemlist.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Contains the logic for `aq del cluster systemlist --hostname`. """
from aquilon.aqdb.model import SystemList
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.del_cluster_member_priority import \
CommandDelClusterMemberPriority
| required_parameters = ["cluster", "hostname"]
resource_class = SystemList
def render(self, hostname, **kwargs):
super(CommandDelClusterSystemList, self).render(hostname=None,
metacluster=None,
comments=None,
member=hostname,
**kwargs) | class CommandDelClusterSystemList(CommandDelClusterMemberPriority):
| random_line_split |
del_cluster_systemlist.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Contains the logic for `aq del cluster systemlist --hostname`. """
from aquilon.aqdb.model import SystemList
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.del_cluster_member_priority import \
CommandDelClusterMemberPriority
class CommandDelClusterSystemList(CommandDelClusterMemberPriority):
required_parameters = ["cluster", "hostname"]
resource_class = SystemList
def | (self, hostname, **kwargs):
super(CommandDelClusterSystemList, self).render(hostname=None,
metacluster=None,
comments=None,
member=hostname,
**kwargs)
| render | identifier_name |
del_cluster_systemlist.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Contains the logic for `aq del cluster systemlist --hostname`. """
from aquilon.aqdb.model import SystemList
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.del_cluster_member_priority import \
CommandDelClusterMemberPriority
class CommandDelClusterSystemList(CommandDelClusterMemberPriority):
required_parameters = ["cluster", "hostname"]
resource_class = SystemList
def render(self, hostname, **kwargs):
| super(CommandDelClusterSystemList, self).render(hostname=None,
metacluster=None,
comments=None,
member=hostname,
**kwargs) | identifier_body |
|
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(plugin)]
#![cfg_attr(test, feature(core_intrinsics))]
#![plugin(plugins)]
extern crate app_units;
extern crate cssparser;
extern crate euclid;
extern crate selectors;
#[macro_use(atom, ns)] extern crate string_cache;
extern crate style;
extern crate style_traits;
extern crate url;
extern crate util;
#[cfg(test)] mod stylesheets;
#[cfg(test)] mod media_queries;
#[cfg(test)] mod viewport;
#[cfg(test)] mod writing_modes {
use style::properties::{INITIAL_VALUES, get_writing_mode};
use util::logical_geometry::WritingMode;
#[test]
fn initial_writing_mode_is_empty() |
}
| {
assert_eq!(get_writing_mode(INITIAL_VALUES.get_inheritedbox()), WritingMode::empty())
} | identifier_body |
lib.rs | #![cfg_attr(test, feature(core_intrinsics))]
#![plugin(plugins)]
extern crate app_units;
extern crate cssparser;
extern crate euclid;
extern crate selectors;
#[macro_use(atom, ns)] extern crate string_cache;
extern crate style;
extern crate style_traits;
extern crate url;
extern crate util;
#[cfg(test)] mod stylesheets;
#[cfg(test)] mod media_queries;
#[cfg(test)] mod viewport;
#[cfg(test)] mod writing_modes {
use style::properties::{INITIAL_VALUES, get_writing_mode};
use util::logical_geometry::WritingMode;
#[test]
fn initial_writing_mode_is_empty() {
assert_eq!(get_writing_mode(INITIAL_VALUES.get_inheritedbox()), WritingMode::empty())
}
} | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(plugin)] | random_line_split |
|
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(plugin)]
#![cfg_attr(test, feature(core_intrinsics))]
#![plugin(plugins)]
extern crate app_units;
extern crate cssparser;
extern crate euclid;
extern crate selectors;
#[macro_use(atom, ns)] extern crate string_cache;
extern crate style;
extern crate style_traits;
extern crate url;
extern crate util;
#[cfg(test)] mod stylesheets;
#[cfg(test)] mod media_queries;
#[cfg(test)] mod viewport;
#[cfg(test)] mod writing_modes {
use style::properties::{INITIAL_VALUES, get_writing_mode};
use util::logical_geometry::WritingMode;
#[test]
fn | () {
assert_eq!(get_writing_mode(INITIAL_VALUES.get_inheritedbox()), WritingMode::empty())
}
}
| initial_writing_mode_is_empty | identifier_name |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
HTTP streaming toolbox with flow control, written in Python.
:copyright: (c) 2014 Runzhou Li (Leo)
:license: The MIT License (MIT), see LICENSE for details.
"""
__title__ = 'tidehunter'
__version__ = '1.0.1'
VERSION = tuple(map(int, __version__.split('.')))
__author__ = 'Runzhou Li (Leo)'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Runzhou Li (Leo)'
from tidehunter.stream import (
Hunter, SimpleStateCounter
)
__all__ = [
'Hunter', 'SimpleStateCounter'
]
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError: # pragma: no cover
class | (logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| NullHandler | identifier_name |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" | :copyright: (c) 2014 Runzhou Li (Leo)
:license: The MIT License (MIT), see LICENSE for details.
"""
__title__ = 'tidehunter'
__version__ = '1.0.1'
VERSION = tuple(map(int, __version__.split('.')))
__author__ = 'Runzhou Li (Leo)'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Runzhou Li (Leo)'
from tidehunter.stream import (
Hunter, SimpleStateCounter
)
__all__ = [
'Hunter', 'SimpleStateCounter'
]
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError: # pragma: no cover
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler()) | HTTP streaming toolbox with flow control, written in Python.
| random_line_split |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
HTTP streaming toolbox with flow control, written in Python.
:copyright: (c) 2014 Runzhou Li (Leo)
:license: The MIT License (MIT), see LICENSE for details.
"""
__title__ = 'tidehunter'
__version__ = '1.0.1'
VERSION = tuple(map(int, __version__.split('.')))
__author__ = 'Runzhou Li (Leo)'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Runzhou Li (Leo)'
from tidehunter.stream import (
Hunter, SimpleStateCounter
)
__all__ = [
'Hunter', 'SimpleStateCounter'
]
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError: # pragma: no cover
class NullHandler(logging.Handler):
|
logging.getLogger(__name__).addHandler(NullHandler())
| def emit(self, record):
pass | identifier_body |
agents_vm.ts | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from "lodash";
import Stream from "mithril/stream";
import {Agent, AgentConfigState, Agents} from "models/agents/agents";
import {AgentComparator} from "models/agents/agent_comparator";
import {SortOrder, TableSortHandler} from "views/components/table";
export abstract class BaseVM {
protected agents: Agents;
readonly filterText: Stream<string> = Stream();
readonly showBuildDetailsForAgent: Stream<string> = Stream();
readonly agentsSortHandler: AgentSortHandler;
protected constructor(agents: Agents) {
this.agents = agents;
this.agentsSortHandler = this.getAgentSortHandler();
}
sync(agents: Agents) {
this.agents = _.cloneDeep(agents);
this.agentsSortHandler.sort();
}
list(): Agent[] {
if (!this.filterText()) {
return this.agents;
}
return this.agents.filter((agent) => this.searchPredicate(agent));
}
filterBy(agentConfigState: AgentConfigState): Agent[] {
return this.list().filter((agent) => agent.agentConfigState === agentConfigState);
}
all() {
return this.agents;
}
protected abstract getAgentSortHandler(): AgentSortHandler;
private static getOrEmpty(str: string | number) {
return str ? str.toString().toLowerCase() : "";
}
private searchPredicate(agent: Agent) {
const lowercaseFilterText = this.filterText().toLowerCase();
return _.includes(BaseVM.getOrEmpty(agent.hostname), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.operatingSystem), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.sandbox), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.ipAddress), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.freeSpace.toString()), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.status()), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.resources.join(", ")), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.environmentNames().join(", ")), lowercaseFilterText);
}
}
export class StaticAgentsVM extends BaseVM {
readonly showResources: Stream<boolean> = Stream();
readonly showEnvironments: Stream<boolean> = Stream();
private readonly _selectedAgentsUUID: Stream<string[]> = Stream([] as string[]);
constructor(agents: Agents = new Agents()) {
super(agents);
}
sync(agents: Agents) {
super.sync(agents);
StaticAgentsVM.syncAgentSelection(this._selectedAgentsUUID, this.selectedAgentsUUID(), this.agents);
}
all() {
return this.agents;
}
totalCount() {
return this.agents.length;
}
selectedAgentsUUID() {
return this._selectedAgentsUUID();
}
selectAgent(uuid: string) {
if (!this.isAgentSelected(uuid)) {
this._selectedAgentsUUID().push(uuid);
}
}
toggleAgentSelection(uuid: string) {
if (this.isAgentSelected(uuid)) {
this.selectedAgentsUUID().splice(this.selectedAgentsUUID().indexOf(uuid), 1);
} else {
this.selectedAgentsUUID().push(uuid);
}
}
isAgentSelected(uuid: string) {
return this.selectedAgentsUUID().indexOf(uuid) !== -1;
}
isAllStaticAgentSelected(): boolean {
return this.selectedAgentsUUID().length === this.list().length;
}
toggleAgentsSelection() {
this.isAllStaticAgentSelected() ? this.unselectAll() : this.list().forEach((agent) => this.selectAgent(agent.uuid));
}
filterBy(agentConfigState: AgentConfigState): Agent[] {
return this.list().filter((agent) => agent.agentConfigState === agentConfigState);
}
unselectAll() {
this._selectedAgentsUUID([]);
}
list() {
return super.list().filter((agent) => !agent.isElastic());
}
protected getAgentSortHandler() {
return new AgentSortHandler(this, new Map(
[
[1, "hostname"], [2, "sandbox"], [3, "operatingSystem"], [4, "ipAddress"],
[5, "agentState"], [6, "freeSpace"], [7, "resources"], [8, "environments"],
])
);
}
private static syncAgentSelection(resultStream: Stream<string[]>,
currentSelection: string[],
agentsFromServer: Agents) {
const reducerFn = (accumulator: string[], agent: Agent): string[] => {
const indexOfUUID = currentSelection.indexOf(agent.uuid);
if (indexOfUUID !== -1) {
accumulator.push(agent.uuid);
currentSelection.splice(indexOfUUID, 1);
}
return accumulator;
};
resultStream(agentsFromServer.reduce(reducerFn, []));
}
}
export class ElasticAgentVM extends BaseVM {
constructor(agents: Agents = new Agents()) {
super(agents);
}
sync(agents: Agents) {
super.sync(agents);
}
list() {
return super.list().filter((agent) => agent.isElastic());
}
protected getAgentSortHandler(): AgentSortHandler {
return new AgentSortHandler(this, new Map(
[
[1, "hostname"], [2, "sandbox"], [3, "operatingSystem"], [4, "ipAddress"],
[5, "agentState"], [6, "freeSpace"], [7, "environments"],
])
);
}
}
export class AgentSortHandler implements TableSortHandler {
private readonly sortableColumns: Map<number, string>;
private readonly agentsVM: ElasticAgentVM | StaticAgentsVM;
private sortOnColumn: number = 5;
private sortOrder: SortOrder = SortOrder.ASC;
constructor(agentsVM: ElasticAgentVM | StaticAgentsVM, sortableColumns: Map<number, string>) {
this.sortableColumns = sortableColumns;
this.agentsVM = agentsVM;
}
getSortableColumns(): number[] {
return Array.from(this.sortableColumns.keys()).sort();
}
onColumnClick(columnIndex: number): void {
if (this.sortOnColumn === columnIndex) | else {
this.sortOrder = SortOrder.ASC;
this.sortOnColumn = columnIndex;
}
this.sort();
}
sort() {
const agentComparator = new AgentComparator(this.sortableColumns.get(this.sortOnColumn) as string);
this.agentsVM.all().sort(agentComparator.compare.bind(agentComparator));
if (this.sortOrder === SortOrder.DESC) {
this.agentsVM.all().reverse();
}
}
currentSortedColumnIndex(): number {
return this.sortOnColumn;
}
getCurrentSortOrder(): SortOrder {
return this.sortOrder;
}
}
| {
this.sortOrder = this.sortOrder === SortOrder.ASC ? SortOrder.DESC : SortOrder.ASC;
} | conditional_block |
agents_vm.ts | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from "lodash";
import Stream from "mithril/stream";
import {Agent, AgentConfigState, Agents} from "models/agents/agents";
import {AgentComparator} from "models/agents/agent_comparator";
import {SortOrder, TableSortHandler} from "views/components/table";
export abstract class BaseVM {
protected agents: Agents;
readonly filterText: Stream<string> = Stream();
readonly showBuildDetailsForAgent: Stream<string> = Stream();
readonly agentsSortHandler: AgentSortHandler;
protected constructor(agents: Agents) {
this.agents = agents;
this.agentsSortHandler = this.getAgentSortHandler();
}
sync(agents: Agents) {
this.agents = _.cloneDeep(agents);
this.agentsSortHandler.sort();
}
list(): Agent[] {
if (!this.filterText()) {
return this.agents;
}
return this.agents.filter((agent) => this.searchPredicate(agent));
}
filterBy(agentConfigState: AgentConfigState): Agent[] {
return this.list().filter((agent) => agent.agentConfigState === agentConfigState);
}
all() {
return this.agents;
}
protected abstract getAgentSortHandler(): AgentSortHandler;
private static getOrEmpty(str: string | number) {
return str ? str.toString().toLowerCase() : "";
}
private searchPredicate(agent: Agent) {
const lowercaseFilterText = this.filterText().toLowerCase();
return _.includes(BaseVM.getOrEmpty(agent.hostname), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.operatingSystem), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.sandbox), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.ipAddress), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.freeSpace.toString()), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.status()), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.resources.join(", ")), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.environmentNames().join(", ")), lowercaseFilterText);
}
}
export class StaticAgentsVM extends BaseVM {
readonly showResources: Stream<boolean> = Stream();
readonly showEnvironments: Stream<boolean> = Stream();
private readonly _selectedAgentsUUID: Stream<string[]> = Stream([] as string[]);
constructor(agents: Agents = new Agents()) {
super(agents);
}
sync(agents: Agents) {
super.sync(agents);
StaticAgentsVM.syncAgentSelection(this._selectedAgentsUUID, this.selectedAgentsUUID(), this.agents);
}
all() {
return this.agents;
}
totalCount() {
return this.agents.length;
}
selectedAgentsUUID() {
return this._selectedAgentsUUID();
}
selectAgent(uuid: string) {
if (!this.isAgentSelected(uuid)) {
this._selectedAgentsUUID().push(uuid);
}
}
toggleAgentSelection(uuid: string) {
if (this.isAgentSelected(uuid)) {
this.selectedAgentsUUID().splice(this.selectedAgentsUUID().indexOf(uuid), 1);
} else {
this.selectedAgentsUUID().push(uuid);
}
}
isAgentSelected(uuid: string) {
return this.selectedAgentsUUID().indexOf(uuid) !== -1;
}
isAllStaticAgentSelected(): boolean {
return this.selectedAgentsUUID().length === this.list().length;
}
toggleAgentsSelection() {
this.isAllStaticAgentSelected() ? this.unselectAll() : this.list().forEach((agent) => this.selectAgent(agent.uuid));
}
| (agentConfigState: AgentConfigState): Agent[] {
return this.list().filter((agent) => agent.agentConfigState === agentConfigState);
}
unselectAll() {
this._selectedAgentsUUID([]);
}
list() {
return super.list().filter((agent) => !agent.isElastic());
}
protected getAgentSortHandler() {
return new AgentSortHandler(this, new Map(
[
[1, "hostname"], [2, "sandbox"], [3, "operatingSystem"], [4, "ipAddress"],
[5, "agentState"], [6, "freeSpace"], [7, "resources"], [8, "environments"],
])
);
}
private static syncAgentSelection(resultStream: Stream<string[]>,
currentSelection: string[],
agentsFromServer: Agents) {
const reducerFn = (accumulator: string[], agent: Agent): string[] => {
const indexOfUUID = currentSelection.indexOf(agent.uuid);
if (indexOfUUID !== -1) {
accumulator.push(agent.uuid);
currentSelection.splice(indexOfUUID, 1);
}
return accumulator;
};
resultStream(agentsFromServer.reduce(reducerFn, []));
}
}
export class ElasticAgentVM extends BaseVM {
constructor(agents: Agents = new Agents()) {
super(agents);
}
sync(agents: Agents) {
super.sync(agents);
}
list() {
return super.list().filter((agent) => agent.isElastic());
}
protected getAgentSortHandler(): AgentSortHandler {
return new AgentSortHandler(this, new Map(
[
[1, "hostname"], [2, "sandbox"], [3, "operatingSystem"], [4, "ipAddress"],
[5, "agentState"], [6, "freeSpace"], [7, "environments"],
])
);
}
}
export class AgentSortHandler implements TableSortHandler {
private readonly sortableColumns: Map<number, string>;
private readonly agentsVM: ElasticAgentVM | StaticAgentsVM;
private sortOnColumn: number = 5;
private sortOrder: SortOrder = SortOrder.ASC;
constructor(agentsVM: ElasticAgentVM | StaticAgentsVM, sortableColumns: Map<number, string>) {
this.sortableColumns = sortableColumns;
this.agentsVM = agentsVM;
}
getSortableColumns(): number[] {
return Array.from(this.sortableColumns.keys()).sort();
}
onColumnClick(columnIndex: number): void {
if (this.sortOnColumn === columnIndex) {
this.sortOrder = this.sortOrder === SortOrder.ASC ? SortOrder.DESC : SortOrder.ASC;
} else {
this.sortOrder = SortOrder.ASC;
this.sortOnColumn = columnIndex;
}
this.sort();
}
sort() {
const agentComparator = new AgentComparator(this.sortableColumns.get(this.sortOnColumn) as string);
this.agentsVM.all().sort(agentComparator.compare.bind(agentComparator));
if (this.sortOrder === SortOrder.DESC) {
this.agentsVM.all().reverse();
}
}
currentSortedColumnIndex(): number {
return this.sortOnColumn;
}
getCurrentSortOrder(): SortOrder {
return this.sortOrder;
}
}
| filterBy | identifier_name |
agents_vm.ts | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); | *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from "lodash";
import Stream from "mithril/stream";
import {Agent, AgentConfigState, Agents} from "models/agents/agents";
import {AgentComparator} from "models/agents/agent_comparator";
import {SortOrder, TableSortHandler} from "views/components/table";
export abstract class BaseVM {
protected agents: Agents;
readonly filterText: Stream<string> = Stream();
readonly showBuildDetailsForAgent: Stream<string> = Stream();
readonly agentsSortHandler: AgentSortHandler;
protected constructor(agents: Agents) {
this.agents = agents;
this.agentsSortHandler = this.getAgentSortHandler();
}
sync(agents: Agents) {
this.agents = _.cloneDeep(agents);
this.agentsSortHandler.sort();
}
list(): Agent[] {
if (!this.filterText()) {
return this.agents;
}
return this.agents.filter((agent) => this.searchPredicate(agent));
}
filterBy(agentConfigState: AgentConfigState): Agent[] {
return this.list().filter((agent) => agent.agentConfigState === agentConfigState);
}
all() {
return this.agents;
}
protected abstract getAgentSortHandler(): AgentSortHandler;
private static getOrEmpty(str: string | number) {
return str ? str.toString().toLowerCase() : "";
}
private searchPredicate(agent: Agent) {
const lowercaseFilterText = this.filterText().toLowerCase();
return _.includes(BaseVM.getOrEmpty(agent.hostname), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.operatingSystem), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.sandbox), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.ipAddress), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.freeSpace.toString()), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.status()), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.resources.join(", ")), lowercaseFilterText) ||
_.includes(BaseVM.getOrEmpty(agent.environmentNames().join(", ")), lowercaseFilterText);
}
}
export class StaticAgentsVM extends BaseVM {
readonly showResources: Stream<boolean> = Stream();
readonly showEnvironments: Stream<boolean> = Stream();
private readonly _selectedAgentsUUID: Stream<string[]> = Stream([] as string[]);
constructor(agents: Agents = new Agents()) {
super(agents);
}
sync(agents: Agents) {
super.sync(agents);
StaticAgentsVM.syncAgentSelection(this._selectedAgentsUUID, this.selectedAgentsUUID(), this.agents);
}
all() {
return this.agents;
}
totalCount() {
return this.agents.length;
}
selectedAgentsUUID() {
return this._selectedAgentsUUID();
}
selectAgent(uuid: string) {
if (!this.isAgentSelected(uuid)) {
this._selectedAgentsUUID().push(uuid);
}
}
toggleAgentSelection(uuid: string) {
if (this.isAgentSelected(uuid)) {
this.selectedAgentsUUID().splice(this.selectedAgentsUUID().indexOf(uuid), 1);
} else {
this.selectedAgentsUUID().push(uuid);
}
}
isAgentSelected(uuid: string) {
return this.selectedAgentsUUID().indexOf(uuid) !== -1;
}
isAllStaticAgentSelected(): boolean {
return this.selectedAgentsUUID().length === this.list().length;
}
toggleAgentsSelection() {
this.isAllStaticAgentSelected() ? this.unselectAll() : this.list().forEach((agent) => this.selectAgent(agent.uuid));
}
filterBy(agentConfigState: AgentConfigState): Agent[] {
return this.list().filter((agent) => agent.agentConfigState === agentConfigState);
}
unselectAll() {
this._selectedAgentsUUID([]);
}
list() {
return super.list().filter((agent) => !agent.isElastic());
}
protected getAgentSortHandler() {
return new AgentSortHandler(this, new Map(
[
[1, "hostname"], [2, "sandbox"], [3, "operatingSystem"], [4, "ipAddress"],
[5, "agentState"], [6, "freeSpace"], [7, "resources"], [8, "environments"],
])
);
}
private static syncAgentSelection(resultStream: Stream<string[]>,
currentSelection: string[],
agentsFromServer: Agents) {
const reducerFn = (accumulator: string[], agent: Agent): string[] => {
const indexOfUUID = currentSelection.indexOf(agent.uuid);
if (indexOfUUID !== -1) {
accumulator.push(agent.uuid);
currentSelection.splice(indexOfUUID, 1);
}
return accumulator;
};
resultStream(agentsFromServer.reduce(reducerFn, []));
}
}
export class ElasticAgentVM extends BaseVM {
constructor(agents: Agents = new Agents()) {
super(agents);
}
sync(agents: Agents) {
super.sync(agents);
}
list() {
return super.list().filter((agent) => agent.isElastic());
}
protected getAgentSortHandler(): AgentSortHandler {
return new AgentSortHandler(this, new Map(
[
[1, "hostname"], [2, "sandbox"], [3, "operatingSystem"], [4, "ipAddress"],
[5, "agentState"], [6, "freeSpace"], [7, "environments"],
])
);
}
}
export class AgentSortHandler implements TableSortHandler {
private readonly sortableColumns: Map<number, string>;
private readonly agentsVM: ElasticAgentVM | StaticAgentsVM;
private sortOnColumn: number = 5;
private sortOrder: SortOrder = SortOrder.ASC;
constructor(agentsVM: ElasticAgentVM | StaticAgentsVM, sortableColumns: Map<number, string>) {
this.sortableColumns = sortableColumns;
this.agentsVM = agentsVM;
}
getSortableColumns(): number[] {
return Array.from(this.sortableColumns.keys()).sort();
}
onColumnClick(columnIndex: number): void {
if (this.sortOnColumn === columnIndex) {
this.sortOrder = this.sortOrder === SortOrder.ASC ? SortOrder.DESC : SortOrder.ASC;
} else {
this.sortOrder = SortOrder.ASC;
this.sortOnColumn = columnIndex;
}
this.sort();
}
sort() {
const agentComparator = new AgentComparator(this.sortableColumns.get(this.sortOnColumn) as string);
this.agentsVM.all().sort(agentComparator.compare.bind(agentComparator));
if (this.sortOrder === SortOrder.DESC) {
this.agentsVM.all().reverse();
}
}
currentSortedColumnIndex(): number {
return this.sortOnColumn;
}
getCurrentSortOrder(): SortOrder {
return this.sortOrder;
}
} | * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at | random_line_split |
vcloud_handler.py | # Copyright (c) 2015-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cosmo_tester.framework.handlers import (
BaseHandler,
BaseCloudifyInputsConfigReader)
from pyvcloud.schema.vcd.v1_5.schemas.vcloud import taskType
from pyvcloud import vcloudair
import time
import requests
TEST_VDC = "systest"
class VcloudCleanupContext(BaseHandler.CleanupContext):
def __init__(self, context_name, env):
super(VcloudCleanupContext, self).__init__(context_name, env)
@classmethod
def clean_all(cls, env):
"""
Cleans *all* resources, including resources that were not
created by the test
"""
super(VcloudCleanupContext, cls).clean_all(env)
class | (BaseCloudifyInputsConfigReader):
def __init__(self, cloudify_config, manager_blueprint_path, **kwargs):
super(CloudifyVcloudInputsConfigReader, self).__init__(
cloudify_config, manager_blueprint_path=manager_blueprint_path,
**kwargs)
@property
def vcloud_username(self):
return self.config['vcloud_username']
@property
def vcloud_password(self):
return self.config['vcloud_password']
@property
def vcloud_url(self):
return self.config['vcloud_url']
@property
def vcloud_service(self):
return self.config['vcloud_service']
@property
def vcloud_org(self):
return self.config['vcloud_org']
@property
def vcloud_vdc(self):
return self.config['vcloud_vdc']
@property
def manager_server_name(self):
return self.config['server_name']
@property
def manager_server_catalog(self):
return self.config['catalog']
@property
def manager_server_template(self):
return self.config['template']
@property
def management_network_use_existing(self):
return self.config['management_network_use_existing']
@property
def management_network_name(self):
return self.config['management_network_name']
@property
def edge_gateway(self):
return self.config['edge_gateway']
@property
def floating_ip_public_ip(self):
return self.config['floating_ip_public_ip']
@property
def ssh_key_filename(self):
return self.config['ssh_key_filename']
@property
def agent_private_key_path(self):
return self.config['agent_private_key_path']
@property
def user_public_key(self):
return self.config['user_public_key']
@property
def agent_public_key(self):
return self.config['user_public_key']
@property
def management_port_ip_allocation_mode(self):
return self.config['management_port_ip_allocation_mode']
@property
def vcloud_service_type(self):
return self.config['vcloud_service_type']
@property
def vcloud_region(self):
return self.config['vcloud_region']
@property
def public_catalog(self):
return 'Public Catalog'
@property
def ubuntu_precise_template(self):
return 'Ubuntu Server 12.04 LTS (amd64 20150127)'
class VcloudHandler(BaseHandler):
CleanupContext = VcloudCleanupContext
CloudifyConfigReader = CloudifyVcloudInputsConfigReader
def before_bootstrap(self):
super(VcloudHandler, self).before_bootstrap()
vca = login(self.env.cloudify_config)
if vca.get_vdc(TEST_VDC):
status, task = vca.delete_vdc(TEST_VDC)
if status:
wait_for_task(vca, task)
else:
raise RuntimeError("Can't delete test VDC")
if vca:
task = vca.create_vdc(TEST_VDC)
wait_for_task(vca, task)
else:
raise RuntimeError("Can't create test VDC")
handler = VcloudHandler
def login(env):
vca = vcloudair.VCA(
host=env['vcloud_url'],
username=env['vcloud_username'],
service_type=env['vcloud_service_type'],
version="5.7",
verify=False)
logined = (vca.login(env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], None,
vca.vcloud_session.token, vca.vcloud_session.org_url))
if logined:
return vca
else:
return None
def wait_for_task(vca_client, task):
TASK_RECHECK_TIMEOUT = 5
TASK_STATUS_SUCCESS = 'success'
TASK_STATUS_ERROR = 'error'
WAIT_TIME_MAX_MINUTES = 30
MAX_ATTEMPTS = WAIT_TIME_MAX_MINUTES * 60 / TASK_RECHECK_TIMEOUT
status = task.get_status()
for attempt in xrange(MAX_ATTEMPTS):
if status == TASK_STATUS_SUCCESS:
return
if status == TASK_STATUS_ERROR:
error = task.get_Error()
raise RuntimeError(
"Error during task execution: {0}".format(error.get_message()))
time.sleep(TASK_RECHECK_TIMEOUT)
response = requests.get(
task.get_href(),
headers=vca_client.vcloud_session.get_vcloud_headers(),
verify=False)
task = taskType.parseString(response.content, True)
status = task.get_status()
raise RuntimeError("Wait for task timeout.")
| CloudifyVcloudInputsConfigReader | identifier_name |
vcloud_handler.py | # Copyright (c) 2015-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cosmo_tester.framework.handlers import (
BaseHandler,
BaseCloudifyInputsConfigReader)
from pyvcloud.schema.vcd.v1_5.schemas.vcloud import taskType
from pyvcloud import vcloudair
import time
import requests
TEST_VDC = "systest"
class VcloudCleanupContext(BaseHandler.CleanupContext):
def __init__(self, context_name, env):
super(VcloudCleanupContext, self).__init__(context_name, env)
@classmethod
def clean_all(cls, env):
"""
Cleans *all* resources, including resources that were not
created by the test
"""
super(VcloudCleanupContext, cls).clean_all(env)
class CloudifyVcloudInputsConfigReader(BaseCloudifyInputsConfigReader):
def __init__(self, cloudify_config, manager_blueprint_path, **kwargs):
super(CloudifyVcloudInputsConfigReader, self).__init__(
cloudify_config, manager_blueprint_path=manager_blueprint_path,
**kwargs)
@property
def vcloud_username(self):
return self.config['vcloud_username']
@property
def vcloud_password(self):
return self.config['vcloud_password']
@property
def vcloud_url(self):
return self.config['vcloud_url']
@property
def vcloud_service(self):
return self.config['vcloud_service']
@property
def vcloud_org(self):
return self.config['vcloud_org']
@property
def vcloud_vdc(self):
return self.config['vcloud_vdc']
@property
def manager_server_name(self):
return self.config['server_name']
@property
def manager_server_catalog(self):
return self.config['catalog']
@property
def manager_server_template(self):
return self.config['template']
@property
def management_network_use_existing(self):
return self.config['management_network_use_existing']
@property
def management_network_name(self):
return self.config['management_network_name']
@property
def edge_gateway(self):
return self.config['edge_gateway']
@property
def floating_ip_public_ip(self):
return self.config['floating_ip_public_ip']
@property
def ssh_key_filename(self):
return self.config['ssh_key_filename']
@property
def agent_private_key_path(self):
return self.config['agent_private_key_path']
@property
def user_public_key(self):
return self.config['user_public_key']
@property
def agent_public_key(self):
return self.config['user_public_key']
@property
def management_port_ip_allocation_mode(self):
return self.config['management_port_ip_allocation_mode']
@property
def vcloud_service_type(self):
return self.config['vcloud_service_type']
@property
def vcloud_region(self):
return self.config['vcloud_region']
@property
def public_catalog(self):
return 'Public Catalog'
@property
def ubuntu_precise_template(self):
return 'Ubuntu Server 12.04 LTS (amd64 20150127)'
class VcloudHandler(BaseHandler):
|
handler = VcloudHandler
def login(env):
vca = vcloudair.VCA(
host=env['vcloud_url'],
username=env['vcloud_username'],
service_type=env['vcloud_service_type'],
version="5.7",
verify=False)
logined = (vca.login(env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], None,
vca.vcloud_session.token, vca.vcloud_session.org_url))
if logined:
return vca
else:
return None
def wait_for_task(vca_client, task):
TASK_RECHECK_TIMEOUT = 5
TASK_STATUS_SUCCESS = 'success'
TASK_STATUS_ERROR = 'error'
WAIT_TIME_MAX_MINUTES = 30
MAX_ATTEMPTS = WAIT_TIME_MAX_MINUTES * 60 / TASK_RECHECK_TIMEOUT
status = task.get_status()
for attempt in xrange(MAX_ATTEMPTS):
if status == TASK_STATUS_SUCCESS:
return
if status == TASK_STATUS_ERROR:
error = task.get_Error()
raise RuntimeError(
"Error during task execution: {0}".format(error.get_message()))
time.sleep(TASK_RECHECK_TIMEOUT)
response = requests.get(
task.get_href(),
headers=vca_client.vcloud_session.get_vcloud_headers(),
verify=False)
task = taskType.parseString(response.content, True)
status = task.get_status()
raise RuntimeError("Wait for task timeout.")
| CleanupContext = VcloudCleanupContext
CloudifyConfigReader = CloudifyVcloudInputsConfigReader
def before_bootstrap(self):
super(VcloudHandler, self).before_bootstrap()
vca = login(self.env.cloudify_config)
if vca.get_vdc(TEST_VDC):
status, task = vca.delete_vdc(TEST_VDC)
if status:
wait_for_task(vca, task)
else:
raise RuntimeError("Can't delete test VDC")
if vca:
task = vca.create_vdc(TEST_VDC)
wait_for_task(vca, task)
else:
raise RuntimeError("Can't create test VDC") | identifier_body |
vcloud_handler.py | # Copyright (c) 2015-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cosmo_tester.framework.handlers import (
BaseHandler,
BaseCloudifyInputsConfigReader)
from pyvcloud.schema.vcd.v1_5.schemas.vcloud import taskType
from pyvcloud import vcloudair
import time
import requests
TEST_VDC = "systest"
class VcloudCleanupContext(BaseHandler.CleanupContext):
def __init__(self, context_name, env):
super(VcloudCleanupContext, self).__init__(context_name, env)
@classmethod
def clean_all(cls, env):
"""
Cleans *all* resources, including resources that were not
created by the test
"""
super(VcloudCleanupContext, cls).clean_all(env)
class CloudifyVcloudInputsConfigReader(BaseCloudifyInputsConfigReader):
def __init__(self, cloudify_config, manager_blueprint_path, **kwargs):
super(CloudifyVcloudInputsConfigReader, self).__init__(
cloudify_config, manager_blueprint_path=manager_blueprint_path,
**kwargs)
@property
def vcloud_username(self):
return self.config['vcloud_username']
@property
def vcloud_password(self):
return self.config['vcloud_password']
@property
def vcloud_url(self):
return self.config['vcloud_url']
@property
def vcloud_service(self):
return self.config['vcloud_service']
@property
def vcloud_org(self):
return self.config['vcloud_org']
@property
def vcloud_vdc(self):
return self.config['vcloud_vdc']
@property
def manager_server_name(self):
return self.config['server_name']
@property
def manager_server_catalog(self):
return self.config['catalog']
@property
def manager_server_template(self):
return self.config['template']
@property
def management_network_use_existing(self):
return self.config['management_network_use_existing']
@property
def management_network_name(self):
return self.config['management_network_name']
@property
def edge_gateway(self):
return self.config['edge_gateway']
@property
def floating_ip_public_ip(self):
return self.config['floating_ip_public_ip']
@property
def ssh_key_filename(self):
return self.config['ssh_key_filename']
@property
def agent_private_key_path(self):
return self.config['agent_private_key_path']
@property
def user_public_key(self):
return self.config['user_public_key']
@property
def agent_public_key(self):
return self.config['user_public_key']
@property
def management_port_ip_allocation_mode(self):
return self.config['management_port_ip_allocation_mode']
@property
def vcloud_service_type(self):
return self.config['vcloud_service_type']
@property
def vcloud_region(self):
return self.config['vcloud_region']
@property
def public_catalog(self):
return 'Public Catalog'
@property
def ubuntu_precise_template(self):
return 'Ubuntu Server 12.04 LTS (amd64 20150127)'
class VcloudHandler(BaseHandler):
CleanupContext = VcloudCleanupContext
CloudifyConfigReader = CloudifyVcloudInputsConfigReader
def before_bootstrap(self):
super(VcloudHandler, self).before_bootstrap()
vca = login(self.env.cloudify_config)
if vca.get_vdc(TEST_VDC):
status, task = vca.delete_vdc(TEST_VDC)
if status:
wait_for_task(vca, task)
else:
raise RuntimeError("Can't delete test VDC")
if vca:
task = vca.create_vdc(TEST_VDC)
wait_for_task(vca, task)
else:
raise RuntimeError("Can't create test VDC")
handler = VcloudHandler
def login(env):
vca = vcloudair.VCA(
host=env['vcloud_url'],
username=env['vcloud_username'],
service_type=env['vcloud_service_type'],
version="5.7",
verify=False)
logined = (vca.login(env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], None,
vca.vcloud_session.token, vca.vcloud_session.org_url))
if logined:
|
else:
return None
def wait_for_task(vca_client, task):
TASK_RECHECK_TIMEOUT = 5
TASK_STATUS_SUCCESS = 'success'
TASK_STATUS_ERROR = 'error'
WAIT_TIME_MAX_MINUTES = 30
MAX_ATTEMPTS = WAIT_TIME_MAX_MINUTES * 60 / TASK_RECHECK_TIMEOUT
status = task.get_status()
for attempt in xrange(MAX_ATTEMPTS):
if status == TASK_STATUS_SUCCESS:
return
if status == TASK_STATUS_ERROR:
error = task.get_Error()
raise RuntimeError(
"Error during task execution: {0}".format(error.get_message()))
time.sleep(TASK_RECHECK_TIMEOUT)
response = requests.get(
task.get_href(),
headers=vca_client.vcloud_session.get_vcloud_headers(),
verify=False)
task = taskType.parseString(response.content, True)
status = task.get_status()
raise RuntimeError("Wait for task timeout.")
| return vca | conditional_block |
vcloud_handler.py | # Copyright (c) 2015-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cosmo_tester.framework.handlers import (
BaseHandler,
BaseCloudifyInputsConfigReader)
from pyvcloud.schema.vcd.v1_5.schemas.vcloud import taskType
from pyvcloud import vcloudair
import time
import requests
TEST_VDC = "systest"
|
@classmethod
def clean_all(cls, env):
"""
Cleans *all* resources, including resources that were not
created by the test
"""
super(VcloudCleanupContext, cls).clean_all(env)
class CloudifyVcloudInputsConfigReader(BaseCloudifyInputsConfigReader):
def __init__(self, cloudify_config, manager_blueprint_path, **kwargs):
super(CloudifyVcloudInputsConfigReader, self).__init__(
cloudify_config, manager_blueprint_path=manager_blueprint_path,
**kwargs)
@property
def vcloud_username(self):
return self.config['vcloud_username']
@property
def vcloud_password(self):
return self.config['vcloud_password']
@property
def vcloud_url(self):
return self.config['vcloud_url']
@property
def vcloud_service(self):
return self.config['vcloud_service']
@property
def vcloud_org(self):
return self.config['vcloud_org']
@property
def vcloud_vdc(self):
return self.config['vcloud_vdc']
@property
def manager_server_name(self):
return self.config['server_name']
@property
def manager_server_catalog(self):
return self.config['catalog']
@property
def manager_server_template(self):
return self.config['template']
@property
def management_network_use_existing(self):
return self.config['management_network_use_existing']
@property
def management_network_name(self):
return self.config['management_network_name']
@property
def edge_gateway(self):
return self.config['edge_gateway']
@property
def floating_ip_public_ip(self):
return self.config['floating_ip_public_ip']
@property
def ssh_key_filename(self):
return self.config['ssh_key_filename']
@property
def agent_private_key_path(self):
return self.config['agent_private_key_path']
@property
def user_public_key(self):
return self.config['user_public_key']
@property
def agent_public_key(self):
return self.config['user_public_key']
@property
def management_port_ip_allocation_mode(self):
return self.config['management_port_ip_allocation_mode']
@property
def vcloud_service_type(self):
return self.config['vcloud_service_type']
@property
def vcloud_region(self):
return self.config['vcloud_region']
@property
def public_catalog(self):
return 'Public Catalog'
@property
def ubuntu_precise_template(self):
return 'Ubuntu Server 12.04 LTS (amd64 20150127)'
class VcloudHandler(BaseHandler):
CleanupContext = VcloudCleanupContext
CloudifyConfigReader = CloudifyVcloudInputsConfigReader
def before_bootstrap(self):
super(VcloudHandler, self).before_bootstrap()
vca = login(self.env.cloudify_config)
if vca.get_vdc(TEST_VDC):
status, task = vca.delete_vdc(TEST_VDC)
if status:
wait_for_task(vca, task)
else:
raise RuntimeError("Can't delete test VDC")
if vca:
task = vca.create_vdc(TEST_VDC)
wait_for_task(vca, task)
else:
raise RuntimeError("Can't create test VDC")
handler = VcloudHandler
def login(env):
vca = vcloudair.VCA(
host=env['vcloud_url'],
username=env['vcloud_username'],
service_type=env['vcloud_service_type'],
version="5.7",
verify=False)
logined = (vca.login(env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], None,
vca.vcloud_session.token, vca.vcloud_session.org_url))
if logined:
return vca
else:
return None
def wait_for_task(vca_client, task):
TASK_RECHECK_TIMEOUT = 5
TASK_STATUS_SUCCESS = 'success'
TASK_STATUS_ERROR = 'error'
WAIT_TIME_MAX_MINUTES = 30
MAX_ATTEMPTS = WAIT_TIME_MAX_MINUTES * 60 / TASK_RECHECK_TIMEOUT
status = task.get_status()
for attempt in xrange(MAX_ATTEMPTS):
if status == TASK_STATUS_SUCCESS:
return
if status == TASK_STATUS_ERROR:
error = task.get_Error()
raise RuntimeError(
"Error during task execution: {0}".format(error.get_message()))
time.sleep(TASK_RECHECK_TIMEOUT)
response = requests.get(
task.get_href(),
headers=vca_client.vcloud_session.get_vcloud_headers(),
verify=False)
task = taskType.parseString(response.content, True)
status = task.get_status()
raise RuntimeError("Wait for task timeout.") |
class VcloudCleanupContext(BaseHandler.CleanupContext):
def __init__(self, context_name, env):
super(VcloudCleanupContext, self).__init__(context_name, env) | random_line_split |
example.py | from Robinhood import Robinhood
#Setup
my_trader = Robinhood(username="YOUR_USERNAME", password="YOUR_PASSWORD");
#Get stock information
#Note: Sometimes more than one instrument may be returned for a given stock symbol
stock_instrument = my_trader.instruments("GEVO")[0]
#Get a stock's quote
my_trader.print_quote("AAPL")
#Prompt for a symbol
my_trader.print_quote();
#Print multiple symbols
my_trader.print_quotes(stocks=["BBRY", "FB", "MSFT"])
#View all data for a given stock ie. Ask price and size, bid price and size, previous close, adjusted previous close, etc.
quote_info = my_trader.quote_data("GEVO")
print(quote_info);
#Place a buy order (uses market bid price)
buy_order = my_trader.place_buy_order(stock_instrument, 1)
| sell_order = my_trader.place_sell_order(stock_instrument, 1) | #Place a sell order | random_line_split |
build.py | # -*- coding: utf-8 -*-
import logging
import os
from django.conf import settings
from django.core.management import BaseCommand
from django.apps import apps
from django.core.management import CommandError
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.db import models
logger = logging.getLogger(__name__)
class BaseGenerator(object):
template_names = []
def __init__(self, context, path):
self.context = context
self.path = path
def get_destination(self, template_name, app_name="", model_name=""):
destination = self.path + template_name.replace(
'scaffold/', '/'
).replace(
'.py.html', '.py'
).replace(
'APP_NAME', app_name
).replace(
'MODEL_NAME', model_name
)
# Create the directory if it does not exist.
directory = os.path.dirname(destination)
if not os.path.exists(directory):
os.makedirs(directory)
return destination
def | (self):
for template_name in self.template_names:
template = get_template(template_name)
data = template.render(self.context)
destination = self.get_destination(template_name, app_name=self.context['app_name'])
with open(destination, 'wb') as out:
out.write(data.encode('utf-8'))
logger.info(u"Write %s", destination)
class SingleFileGenerator(BaseGenerator):
"""SingeFileGenerator uses the complete context (all models) per template."""
template_names = [
'scaffold/admin.py.html',
'scaffold/context_processors.py.html',
'scaffold/model_mixins.py.html',
'scaffold/static/APP_NAME/styles.css',
'scaffold/templates/APP_NAME/index.html',
'scaffold/templates/APP_NAME/pagination.html',
'scaffold/templates/base.html',
'scaffold/templatetags/__init__.py',
'scaffold/templatetags/APP_NAME_tags.py',
'scaffold/urls.py.html',
'scaffold/views.py.html',
]
class MultiFileGenerator(BaseGenerator):
"""MultiFileGenerator splits the context into a context for each model. It generates multiple files per model."""
template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_base.html',
'scaffold/templates/APP_NAME/MODEL_NAME_confirm_delete.html',
'scaffold/templates/APP_NAME/MODEL_NAME_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_form.html',
'scaffold/templates/APP_NAME/MODEL_NAME_list.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_list.html',
]
def generate(self):
for obj in self.context['items']:
date_template_names = []
if obj['date_fields']:
date_template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_archive.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_day.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_month.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_week.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_year.html',
]
for template_name in self.template_names + date_template_names:
template = get_template(template_name)
data = template.render(obj)
destination = self.get_destination(template_name, obj['app_name'], obj['url_name'])
with open(destination, 'w') as out:
out.write(data)
logger.debug("Write %s", destination)
class Command(BaseCommand):
"""The handle method is executed by the `./manage.py build app_name` command.
Introspect all models in the given app and call generators.
The get fields methods are loosely based on:
https://docs.djangoproject.com/en/1.10/ref/models/meta/
"""
def add_arguments(self, parser):
parser.add_argument('app_name', nargs='+', type=str)
def get_fields(self, model):
"""All model fields, fields dynamically added from the other end excluded.
`include_hidden` is False by default. If set to True, get_fields() will include fields that are used to
back other field’s functionality. This will also include any fields that have a related_name (such as
ManyToManyField, or ForeignKey) that start with a `+`."""
return [field.name for field in model._meta.get_fields(include_hidden=False)]
def get_concrete_fields(self, model):
"""All model fields, like get_fields but NO backward related fields."""
fields = [
(f, f.model if f.model != model else None)
for f in model._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
)
]
return [field.name for field, model in fields]
def get_related_fields(self, model):
"""Related fields like ForeignKey, OneToOne fields."""
return [
field.name
for field in model._meta.get_fields()
if (field.one_to_many or field.one_to_one)
and field.auto_created and not field.concrete
]
def get_many_to_many_fields(self, model):
"""ManyToMany fields"""
return [
field.name
for field in model._meta.get_fields()
if field.many_to_many and not field.auto_created
]
def get_date_fields(self, model):
"""Date or datetime fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.DateField, models.DateTimeField)
]
def get_text_fields(self, model):
"""Text fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.CharField, models.TextField)
]
def get_related_with_models(self, model):
fields = [
(f.related_model.__name__, f.model if f.model != model else None)
for f in model._meta.get_fields()
if (f.one_to_many or f.one_to_one)
and f.auto_created and not f.concrete
]
return list(set([model_name for model_name, _ in fields]))
def handle(self, *args, **options):
"""Handle the command"""
# Raise error if app is not in INSTALLED_APPS.
app_name = options['app_name'][0]
if app_name not in settings.INSTALLED_APPS:
raise CommandError('Add {} to installed apps'.format(app_name))
# Build one big context of all models and their fields.
context = {'items': [], 'app_name': app_name}
all_models = apps.all_models[app_name]
for name, model in all_models.items():
if "_" not in name: # Django auto generated cross tables do have `_`. Exclude them.
context['items'].append({
'app_name': app_name,
'model': model,
'model_name': model.__name__,
'url_name': slugify(model._meta.verbose_name).replace('-', ''),
'model_slug': slugify(model._meta.verbose_name).replace('-', ''),
'verbose_name': model._meta.verbose_name,
'verbose_plural': model._meta.verbose_name,
'table_name': model._meta.db_table,
'slug': slugify(model._meta.verbose_name),
'slug_plural': slugify(model._meta.verbose_name),
'fields': self.get_fields(model),
'concrete_fields': self.get_concrete_fields(model),
'related_fields': self.get_related_fields(model),
'many_to_many_fields': self.get_many_to_many_fields(model),
'date_fields': self.get_date_fields(model),
'text_fields': self.get_text_fields(model),
'releated_with_models': self.get_related_with_models(model),
})
logger.info(context)
print(context)
path = apps.app_configs[app_name].path
for generator in [
SingleFileGenerator,
MultiFileGenerator,
]:
generator(context=context, path=path).generate()
logger.info('Success!')
| generate | identifier_name |
build.py | # -*- coding: utf-8 -*-
import logging
import os
from django.conf import settings
from django.core.management import BaseCommand
from django.apps import apps
from django.core.management import CommandError
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.db import models
logger = logging.getLogger(__name__)
class BaseGenerator(object):
template_names = []
def __init__(self, context, path):
self.context = context
self.path = path
def get_destination(self, template_name, app_name="", model_name=""):
|
def generate(self):
for template_name in self.template_names:
template = get_template(template_name)
data = template.render(self.context)
destination = self.get_destination(template_name, app_name=self.context['app_name'])
with open(destination, 'wb') as out:
out.write(data.encode('utf-8'))
logger.info(u"Write %s", destination)
class SingleFileGenerator(BaseGenerator):
"""SingeFileGenerator uses the complete context (all models) per template."""
template_names = [
'scaffold/admin.py.html',
'scaffold/context_processors.py.html',
'scaffold/model_mixins.py.html',
'scaffold/static/APP_NAME/styles.css',
'scaffold/templates/APP_NAME/index.html',
'scaffold/templates/APP_NAME/pagination.html',
'scaffold/templates/base.html',
'scaffold/templatetags/__init__.py',
'scaffold/templatetags/APP_NAME_tags.py',
'scaffold/urls.py.html',
'scaffold/views.py.html',
]
class MultiFileGenerator(BaseGenerator):
"""MultiFileGenerator splits the context into a context for each model. It generates multiple files per model."""
template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_base.html',
'scaffold/templates/APP_NAME/MODEL_NAME_confirm_delete.html',
'scaffold/templates/APP_NAME/MODEL_NAME_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_form.html',
'scaffold/templates/APP_NAME/MODEL_NAME_list.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_list.html',
]
def generate(self):
for obj in self.context['items']:
date_template_names = []
if obj['date_fields']:
date_template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_archive.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_day.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_month.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_week.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_year.html',
]
for template_name in self.template_names + date_template_names:
template = get_template(template_name)
data = template.render(obj)
destination = self.get_destination(template_name, obj['app_name'], obj['url_name'])
with open(destination, 'w') as out:
out.write(data)
logger.debug("Write %s", destination)
class Command(BaseCommand):
"""The handle method is executed by the `./manage.py build app_name` command.
Introspect all models in the given app and call generators.
The get fields methods are loosely based on:
https://docs.djangoproject.com/en/1.10/ref/models/meta/
"""
def add_arguments(self, parser):
parser.add_argument('app_name', nargs='+', type=str)
def get_fields(self, model):
"""All model fields, fields dynamically added from the other end excluded.
`include_hidden` is False by default. If set to True, get_fields() will include fields that are used to
back other field’s functionality. This will also include any fields that have a related_name (such as
ManyToManyField, or ForeignKey) that start with a `+`."""
return [field.name for field in model._meta.get_fields(include_hidden=False)]
def get_concrete_fields(self, model):
"""All model fields, like get_fields but NO backward related fields."""
fields = [
(f, f.model if f.model != model else None)
for f in model._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
)
]
return [field.name for field, model in fields]
def get_related_fields(self, model):
"""Related fields like ForeignKey, OneToOne fields."""
return [
field.name
for field in model._meta.get_fields()
if (field.one_to_many or field.one_to_one)
and field.auto_created and not field.concrete
]
def get_many_to_many_fields(self, model):
"""ManyToMany fields"""
return [
field.name
for field in model._meta.get_fields()
if field.many_to_many and not field.auto_created
]
def get_date_fields(self, model):
"""Date or datetime fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.DateField, models.DateTimeField)
]
def get_text_fields(self, model):
"""Text fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.CharField, models.TextField)
]
def get_related_with_models(self, model):
fields = [
(f.related_model.__name__, f.model if f.model != model else None)
for f in model._meta.get_fields()
if (f.one_to_many or f.one_to_one)
and f.auto_created and not f.concrete
]
return list(set([model_name for model_name, _ in fields]))
def handle(self, *args, **options):
"""Handle the command"""
# Raise error if app is not in INSTALLED_APPS.
app_name = options['app_name'][0]
if app_name not in settings.INSTALLED_APPS:
raise CommandError('Add {} to installed apps'.format(app_name))
# Build one big context of all models and their fields.
context = {'items': [], 'app_name': app_name}
all_models = apps.all_models[app_name]
for name, model in all_models.items():
if "_" not in name: # Django auto generated cross tables do have `_`. Exclude them.
context['items'].append({
'app_name': app_name,
'model': model,
'model_name': model.__name__,
'url_name': slugify(model._meta.verbose_name).replace('-', ''),
'model_slug': slugify(model._meta.verbose_name).replace('-', ''),
'verbose_name': model._meta.verbose_name,
'verbose_plural': model._meta.verbose_name,
'table_name': model._meta.db_table,
'slug': slugify(model._meta.verbose_name),
'slug_plural': slugify(model._meta.verbose_name),
'fields': self.get_fields(model),
'concrete_fields': self.get_concrete_fields(model),
'related_fields': self.get_related_fields(model),
'many_to_many_fields': self.get_many_to_many_fields(model),
'date_fields': self.get_date_fields(model),
'text_fields': self.get_text_fields(model),
'releated_with_models': self.get_related_with_models(model),
})
logger.info(context)
print(context)
path = apps.app_configs[app_name].path
for generator in [
SingleFileGenerator,
MultiFileGenerator,
]:
generator(context=context, path=path).generate()
logger.info('Success!')
| destination = self.path + template_name.replace(
'scaffold/', '/'
).replace(
'.py.html', '.py'
).replace(
'APP_NAME', app_name
).replace(
'MODEL_NAME', model_name
)
# Create the directory if it does not exist.
directory = os.path.dirname(destination)
if not os.path.exists(directory):
os.makedirs(directory)
return destination | identifier_body |
build.py | # -*- coding: utf-8 -*-
import logging
import os
from django.conf import settings
from django.core.management import BaseCommand
from django.apps import apps
from django.core.management import CommandError
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.db import models
logger = logging.getLogger(__name__)
class BaseGenerator(object):
template_names = []
def __init__(self, context, path):
self.context = context
self.path = path
def get_destination(self, template_name, app_name="", model_name=""):
destination = self.path + template_name.replace(
'scaffold/', '/'
).replace( | )
# Create the directory if it does not exist.
directory = os.path.dirname(destination)
if not os.path.exists(directory):
os.makedirs(directory)
return destination
def generate(self):
for template_name in self.template_names:
template = get_template(template_name)
data = template.render(self.context)
destination = self.get_destination(template_name, app_name=self.context['app_name'])
with open(destination, 'wb') as out:
out.write(data.encode('utf-8'))
logger.info(u"Write %s", destination)
class SingleFileGenerator(BaseGenerator):
"""SingeFileGenerator uses the complete context (all models) per template."""
template_names = [
'scaffold/admin.py.html',
'scaffold/context_processors.py.html',
'scaffold/model_mixins.py.html',
'scaffold/static/APP_NAME/styles.css',
'scaffold/templates/APP_NAME/index.html',
'scaffold/templates/APP_NAME/pagination.html',
'scaffold/templates/base.html',
'scaffold/templatetags/__init__.py',
'scaffold/templatetags/APP_NAME_tags.py',
'scaffold/urls.py.html',
'scaffold/views.py.html',
]
class MultiFileGenerator(BaseGenerator):
"""MultiFileGenerator splits the context into a context for each model. It generates multiple files per model."""
template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_base.html',
'scaffold/templates/APP_NAME/MODEL_NAME_confirm_delete.html',
'scaffold/templates/APP_NAME/MODEL_NAME_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_form.html',
'scaffold/templates/APP_NAME/MODEL_NAME_list.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_list.html',
]
def generate(self):
for obj in self.context['items']:
date_template_names = []
if obj['date_fields']:
date_template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_archive.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_day.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_month.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_week.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_year.html',
]
for template_name in self.template_names + date_template_names:
template = get_template(template_name)
data = template.render(obj)
destination = self.get_destination(template_name, obj['app_name'], obj['url_name'])
with open(destination, 'w') as out:
out.write(data)
logger.debug("Write %s", destination)
class Command(BaseCommand):
"""The handle method is executed by the `./manage.py build app_name` command.
Introspect all models in the given app and call generators.
The get fields methods are loosely based on:
https://docs.djangoproject.com/en/1.10/ref/models/meta/
"""
def add_arguments(self, parser):
parser.add_argument('app_name', nargs='+', type=str)
def get_fields(self, model):
"""All model fields, fields dynamically added from the other end excluded.
`include_hidden` is False by default. If set to True, get_fields() will include fields that are used to
back other field’s functionality. This will also include any fields that have a related_name (such as
ManyToManyField, or ForeignKey) that start with a `+`."""
return [field.name for field in model._meta.get_fields(include_hidden=False)]
def get_concrete_fields(self, model):
"""All model fields, like get_fields but NO backward related fields."""
fields = [
(f, f.model if f.model != model else None)
for f in model._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
)
]
return [field.name for field, model in fields]
def get_related_fields(self, model):
"""Related fields like ForeignKey, OneToOne fields."""
return [
field.name
for field in model._meta.get_fields()
if (field.one_to_many or field.one_to_one)
and field.auto_created and not field.concrete
]
def get_many_to_many_fields(self, model):
"""ManyToMany fields"""
return [
field.name
for field in model._meta.get_fields()
if field.many_to_many and not field.auto_created
]
def get_date_fields(self, model):
"""Date or datetime fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.DateField, models.DateTimeField)
]
def get_text_fields(self, model):
"""Text fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.CharField, models.TextField)
]
def get_related_with_models(self, model):
fields = [
(f.related_model.__name__, f.model if f.model != model else None)
for f in model._meta.get_fields()
if (f.one_to_many or f.one_to_one)
and f.auto_created and not f.concrete
]
return list(set([model_name for model_name, _ in fields]))
def handle(self, *args, **options):
"""Handle the command"""
# Raise error if app is not in INSTALLED_APPS.
app_name = options['app_name'][0]
if app_name not in settings.INSTALLED_APPS:
raise CommandError('Add {} to installed apps'.format(app_name))
# Build one big context of all models and their fields.
context = {'items': [], 'app_name': app_name}
all_models = apps.all_models[app_name]
for name, model in all_models.items():
if "_" not in name: # Django auto generated cross tables do have `_`. Exclude them.
context['items'].append({
'app_name': app_name,
'model': model,
'model_name': model.__name__,
'url_name': slugify(model._meta.verbose_name).replace('-', ''),
'model_slug': slugify(model._meta.verbose_name).replace('-', ''),
'verbose_name': model._meta.verbose_name,
'verbose_plural': model._meta.verbose_name,
'table_name': model._meta.db_table,
'slug': slugify(model._meta.verbose_name),
'slug_plural': slugify(model._meta.verbose_name),
'fields': self.get_fields(model),
'concrete_fields': self.get_concrete_fields(model),
'related_fields': self.get_related_fields(model),
'many_to_many_fields': self.get_many_to_many_fields(model),
'date_fields': self.get_date_fields(model),
'text_fields': self.get_text_fields(model),
'releated_with_models': self.get_related_with_models(model),
})
logger.info(context)
print(context)
path = apps.app_configs[app_name].path
for generator in [
SingleFileGenerator,
MultiFileGenerator,
]:
generator(context=context, path=path).generate()
logger.info('Success!') | '.py.html', '.py'
).replace(
'APP_NAME', app_name
).replace(
'MODEL_NAME', model_name | random_line_split |
build.py | # -*- coding: utf-8 -*-
import logging
import os
from django.conf import settings
from django.core.management import BaseCommand
from django.apps import apps
from django.core.management import CommandError
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.db import models
logger = logging.getLogger(__name__)
class BaseGenerator(object):
template_names = []
def __init__(self, context, path):
self.context = context
self.path = path
def get_destination(self, template_name, app_name="", model_name=""):
destination = self.path + template_name.replace(
'scaffold/', '/'
).replace(
'.py.html', '.py'
).replace(
'APP_NAME', app_name
).replace(
'MODEL_NAME', model_name
)
# Create the directory if it does not exist.
directory = os.path.dirname(destination)
if not os.path.exists(directory):
|
return destination
def generate(self):
for template_name in self.template_names:
template = get_template(template_name)
data = template.render(self.context)
destination = self.get_destination(template_name, app_name=self.context['app_name'])
with open(destination, 'wb') as out:
out.write(data.encode('utf-8'))
logger.info(u"Write %s", destination)
class SingleFileGenerator(BaseGenerator):
"""SingeFileGenerator uses the complete context (all models) per template."""
template_names = [
'scaffold/admin.py.html',
'scaffold/context_processors.py.html',
'scaffold/model_mixins.py.html',
'scaffold/static/APP_NAME/styles.css',
'scaffold/templates/APP_NAME/index.html',
'scaffold/templates/APP_NAME/pagination.html',
'scaffold/templates/base.html',
'scaffold/templatetags/__init__.py',
'scaffold/templatetags/APP_NAME_tags.py',
'scaffold/urls.py.html',
'scaffold/views.py.html',
]
class MultiFileGenerator(BaseGenerator):
"""MultiFileGenerator splits the context into a context for each model. It generates multiple files per model."""
template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_base.html',
'scaffold/templates/APP_NAME/MODEL_NAME_confirm_delete.html',
'scaffold/templates/APP_NAME/MODEL_NAME_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_form.html',
'scaffold/templates/APP_NAME/MODEL_NAME_list.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_list.html',
]
def generate(self):
for obj in self.context['items']:
date_template_names = []
if obj['date_fields']:
date_template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_archive.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_day.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_month.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_week.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_year.html',
]
for template_name in self.template_names + date_template_names:
template = get_template(template_name)
data = template.render(obj)
destination = self.get_destination(template_name, obj['app_name'], obj['url_name'])
with open(destination, 'w') as out:
out.write(data)
logger.debug("Write %s", destination)
class Command(BaseCommand):
"""The handle method is executed by the `./manage.py build app_name` command.
Introspect all models in the given app and call generators.
The get fields methods are loosely based on:
https://docs.djangoproject.com/en/1.10/ref/models/meta/
"""
def add_arguments(self, parser):
parser.add_argument('app_name', nargs='+', type=str)
def get_fields(self, model):
"""All model fields, fields dynamically added from the other end excluded.
`include_hidden` is False by default. If set to True, get_fields() will include fields that are used to
back other field’s functionality. This will also include any fields that have a related_name (such as
ManyToManyField, or ForeignKey) that start with a `+`."""
return [field.name for field in model._meta.get_fields(include_hidden=False)]
def get_concrete_fields(self, model):
"""All model fields, like get_fields but NO backward related fields."""
fields = [
(f, f.model if f.model != model else None)
for f in model._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
)
]
return [field.name for field, model in fields]
def get_related_fields(self, model):
"""Related fields like ForeignKey, OneToOne fields."""
return [
field.name
for field in model._meta.get_fields()
if (field.one_to_many or field.one_to_one)
and field.auto_created and not field.concrete
]
def get_many_to_many_fields(self, model):
"""ManyToMany fields"""
return [
field.name
for field in model._meta.get_fields()
if field.many_to_many and not field.auto_created
]
def get_date_fields(self, model):
"""Date or datetime fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.DateField, models.DateTimeField)
]
def get_text_fields(self, model):
"""Text fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.CharField, models.TextField)
]
def get_related_with_models(self, model):
fields = [
(f.related_model.__name__, f.model if f.model != model else None)
for f in model._meta.get_fields()
if (f.one_to_many or f.one_to_one)
and f.auto_created and not f.concrete
]
return list(set([model_name for model_name, _ in fields]))
def handle(self, *args, **options):
"""Handle the command"""
# Raise error if app is not in INSTALLED_APPS.
app_name = options['app_name'][0]
if app_name not in settings.INSTALLED_APPS:
raise CommandError('Add {} to installed apps'.format(app_name))
# Build one big context of all models and their fields.
context = {'items': [], 'app_name': app_name}
all_models = apps.all_models[app_name]
for name, model in all_models.items():
if "_" not in name: # Django auto generated cross tables do have `_`. Exclude them.
context['items'].append({
'app_name': app_name,
'model': model,
'model_name': model.__name__,
'url_name': slugify(model._meta.verbose_name).replace('-', ''),
'model_slug': slugify(model._meta.verbose_name).replace('-', ''),
'verbose_name': model._meta.verbose_name,
'verbose_plural': model._meta.verbose_name,
'table_name': model._meta.db_table,
'slug': slugify(model._meta.verbose_name),
'slug_plural': slugify(model._meta.verbose_name),
'fields': self.get_fields(model),
'concrete_fields': self.get_concrete_fields(model),
'related_fields': self.get_related_fields(model),
'many_to_many_fields': self.get_many_to_many_fields(model),
'date_fields': self.get_date_fields(model),
'text_fields': self.get_text_fields(model),
'releated_with_models': self.get_related_with_models(model),
})
logger.info(context)
print(context)
path = apps.app_configs[app_name].path
for generator in [
SingleFileGenerator,
MultiFileGenerator,
]:
generator(context=context, path=path).generate()
logger.info('Success!')
| os.makedirs(directory) | conditional_block |
issue-33537.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
const fn foo() -> *const i8 {
b"foo" as *const _ as *const i8
}
const fn bar() -> i32 {
*&{(1, 2, 3).1}
}
fn main() {
assert_eq!(foo(), b"foo" as *const _ as *const i8); | } | assert_eq!(bar(), 2); | random_line_split |
issue-33537.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
const fn foo() -> *const i8 {
b"foo" as *const _ as *const i8
}
const fn | () -> i32 {
*&{(1, 2, 3).1}
}
fn main() {
assert_eq!(foo(), b"foo" as *const _ as *const i8);
assert_eq!(bar(), 2);
}
| bar | identifier_name |
issue-33537.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
const fn foo() -> *const i8 {
b"foo" as *const _ as *const i8
}
const fn bar() -> i32 |
fn main() {
assert_eq!(foo(), b"foo" as *const _ as *const i8);
assert_eq!(bar(), 2);
}
| {
*&{(1, 2, 3).1}
} | identifier_body |
globPatternContribution.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import HtmlContent = require('vs/base/common/htmlContent');
import Strings = require('vs/base/common/strings');
import Modes = require('vs/editor/common/modes');
import WinJS = require('vs/base/common/winjs.base');
import nls = require('vs/nls');
import JSONWorker = require('vs/languages/json/common/jsonWorker');
import URI from 'vs/base/common/uri';
import {JSONLocation} from 'vs/languages/json/common/parser/jsonLocation';
var globProperties:Modes.ISuggestion[] = [
{ type: 'value', label: nls.localize('fileLabel', "Files by Extension"), codeSnippet: '"**/*.{{extension}}": true', documentationLabel: nls.localize('fileDescription', "Match all files of a specific file extension.")},
{ type: 'value', label: nls.localize('filesLabel', "Files with Multiple Extensions"), codeSnippet: '"**/*.{ext1,ext2,ext3}": true', documentationLabel: nls.localize('filesDescription', "Match all files with any of the file extensions.")},
{ type: 'value', label: nls.localize('derivedLabel', "Files with Siblings by Name"), codeSnippet: '"**/*.{{source-extension}}": { "when": "$(basename).{{target-extension}}" }', documentationLabel: nls.localize('derivedDescription', "Match files that have siblings with the same name but a different extension.")},
{ type: 'value', label: nls.localize('topFolderLabel', "Folder by Name (Top Level)"), codeSnippet: '"{{name}}": true', documentationLabel: nls.localize('topFolderDescription', "Match a top level folder with a specific name.")},
{ type: 'value', label: nls.localize('topFoldersLabel', "Folders with Multiple Names (Top Level)"), codeSnippet: '"{folder1,folder2,folder3}": true', documentationLabel: nls.localize('topFoldersDescription', "Match multiple top level folders.")},
{ type: 'value', label: nls.localize('folderLabel', "Folder by Name (Any Location)"), codeSnippet: '"**/{{name}}": true', documentationLabel: nls.localize('folderDescription', "Match a folder with a specific name in any location.")},
];
var globValues:Modes.ISuggestion[] = [
{ type: 'value', label: nls.localize('trueLabel', "True"), codeSnippet: 'true', documentationLabel: nls.localize('trueDescription', "Enable the pattern.")},
{ type: 'value', label: nls.localize('falseLabel', "False"), codeSnippet: 'false', documentationLabel: nls.localize('falseDescription', "Disable the pattern.")},
{ type: 'value', label: nls.localize('derivedLabel', "Files with Siblings by Name"), codeSnippet: '{ "when": "$(basename).{{extension}}" }', documentationLabel: nls.localize('siblingsDescription', "Match files that have siblings with the same name but a different extension.")}
];
export class GlobPatternContribution implements JSONWorker.IJSONWorkerContribution {
constructor() {
}
private isSettingsFile(resource: URI): boolean {
var path = resource.path;
return Strings.endsWith(path, '/settings.json');
}
public collectDefaultSuggestions(resource: URI, result: JSONWorker.ISuggestionsCollector): WinJS.Promise {
return null;
}
public collectPropertySuggestions(resource: URI, location: JSONLocation, currentWord: string, addValue: boolean, isLast:boolean, result: JSONWorker.ISuggestionsCollector) : WinJS.Promise {
if (this.isSettingsFile(resource) && (location.matches(['files.exclude']) || location.matches(['search.exclude']))) {
globProperties.forEach((e) => result.add(e));
}
return null;
}
public collectValueSuggestions(resource: URI, location: JSONLocation, currentKey: string, result: JSONWorker.ISuggestionsCollector): WinJS.Promise {
if (this.isSettingsFile(resource) && (location.matches(['files.exclude']) || location.matches(['search.exclude']))) {
globValues.forEach((e) => result.add(e));
}
return null;
}
public getInfoContribution(resource: URI, location: JSONLocation): WinJS.TPromise<HtmlContent.IHTMLContentElement[]> {
return null; | }
} | random_line_split |
|
globPatternContribution.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import HtmlContent = require('vs/base/common/htmlContent');
import Strings = require('vs/base/common/strings');
import Modes = require('vs/editor/common/modes');
import WinJS = require('vs/base/common/winjs.base');
import nls = require('vs/nls');
import JSONWorker = require('vs/languages/json/common/jsonWorker');
import URI from 'vs/base/common/uri';
import {JSONLocation} from 'vs/languages/json/common/parser/jsonLocation';
var globProperties:Modes.ISuggestion[] = [
{ type: 'value', label: nls.localize('fileLabel', "Files by Extension"), codeSnippet: '"**/*.{{extension}}": true', documentationLabel: nls.localize('fileDescription', "Match all files of a specific file extension.")},
{ type: 'value', label: nls.localize('filesLabel', "Files with Multiple Extensions"), codeSnippet: '"**/*.{ext1,ext2,ext3}": true', documentationLabel: nls.localize('filesDescription', "Match all files with any of the file extensions.")},
{ type: 'value', label: nls.localize('derivedLabel', "Files with Siblings by Name"), codeSnippet: '"**/*.{{source-extension}}": { "when": "$(basename).{{target-extension}}" }', documentationLabel: nls.localize('derivedDescription', "Match files that have siblings with the same name but a different extension.")},
{ type: 'value', label: nls.localize('topFolderLabel', "Folder by Name (Top Level)"), codeSnippet: '"{{name}}": true', documentationLabel: nls.localize('topFolderDescription', "Match a top level folder with a specific name.")},
{ type: 'value', label: nls.localize('topFoldersLabel', "Folders with Multiple Names (Top Level)"), codeSnippet: '"{folder1,folder2,folder3}": true', documentationLabel: nls.localize('topFoldersDescription', "Match multiple top level folders.")},
{ type: 'value', label: nls.localize('folderLabel', "Folder by Name (Any Location)"), codeSnippet: '"**/{{name}}": true', documentationLabel: nls.localize('folderDescription', "Match a folder with a specific name in any location.")},
];
var globValues:Modes.ISuggestion[] = [
{ type: 'value', label: nls.localize('trueLabel', "True"), codeSnippet: 'true', documentationLabel: nls.localize('trueDescription', "Enable the pattern.")},
{ type: 'value', label: nls.localize('falseLabel', "False"), codeSnippet: 'false', documentationLabel: nls.localize('falseDescription', "Disable the pattern.")},
{ type: 'value', label: nls.localize('derivedLabel', "Files with Siblings by Name"), codeSnippet: '{ "when": "$(basename).{{extension}}" }', documentationLabel: nls.localize('siblingsDescription', "Match files that have siblings with the same name but a different extension.")}
];
export class GlobPatternContribution implements JSONWorker.IJSONWorkerContribution {
constructor() {
}
private isSettingsFile(resource: URI): boolean {
var path = resource.path;
return Strings.endsWith(path, '/settings.json');
}
public collectDefaultSuggestions(resource: URI, result: JSONWorker.ISuggestionsCollector): WinJS.Promise {
return null;
}
public collectPropertySuggestions(resource: URI, location: JSONLocation, currentWord: string, addValue: boolean, isLast:boolean, result: JSONWorker.ISuggestionsCollector) : WinJS.Promise {
if (this.isSettingsFile(resource) && (location.matches(['files.exclude']) || location.matches(['search.exclude']))) {
globProperties.forEach((e) => result.add(e));
}
return null;
}
public | (resource: URI, location: JSONLocation, currentKey: string, result: JSONWorker.ISuggestionsCollector): WinJS.Promise {
if (this.isSettingsFile(resource) && (location.matches(['files.exclude']) || location.matches(['search.exclude']))) {
globValues.forEach((e) => result.add(e));
}
return null;
}
public getInfoContribution(resource: URI, location: JSONLocation): WinJS.TPromise<HtmlContent.IHTMLContentElement[]> {
return null;
}
} | collectValueSuggestions | identifier_name |
globPatternContribution.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import HtmlContent = require('vs/base/common/htmlContent');
import Strings = require('vs/base/common/strings');
import Modes = require('vs/editor/common/modes');
import WinJS = require('vs/base/common/winjs.base');
import nls = require('vs/nls');
import JSONWorker = require('vs/languages/json/common/jsonWorker');
import URI from 'vs/base/common/uri';
import {JSONLocation} from 'vs/languages/json/common/parser/jsonLocation';
var globProperties:Modes.ISuggestion[] = [
{ type: 'value', label: nls.localize('fileLabel', "Files by Extension"), codeSnippet: '"**/*.{{extension}}": true', documentationLabel: nls.localize('fileDescription', "Match all files of a specific file extension.")},
{ type: 'value', label: nls.localize('filesLabel', "Files with Multiple Extensions"), codeSnippet: '"**/*.{ext1,ext2,ext3}": true', documentationLabel: nls.localize('filesDescription', "Match all files with any of the file extensions.")},
{ type: 'value', label: nls.localize('derivedLabel', "Files with Siblings by Name"), codeSnippet: '"**/*.{{source-extension}}": { "when": "$(basename).{{target-extension}}" }', documentationLabel: nls.localize('derivedDescription', "Match files that have siblings with the same name but a different extension.")},
{ type: 'value', label: nls.localize('topFolderLabel', "Folder by Name (Top Level)"), codeSnippet: '"{{name}}": true', documentationLabel: nls.localize('topFolderDescription', "Match a top level folder with a specific name.")},
{ type: 'value', label: nls.localize('topFoldersLabel', "Folders with Multiple Names (Top Level)"), codeSnippet: '"{folder1,folder2,folder3}": true', documentationLabel: nls.localize('topFoldersDescription', "Match multiple top level folders.")},
{ type: 'value', label: nls.localize('folderLabel', "Folder by Name (Any Location)"), codeSnippet: '"**/{{name}}": true', documentationLabel: nls.localize('folderDescription', "Match a folder with a specific name in any location.")},
];
var globValues:Modes.ISuggestion[] = [
{ type: 'value', label: nls.localize('trueLabel', "True"), codeSnippet: 'true', documentationLabel: nls.localize('trueDescription', "Enable the pattern.")},
{ type: 'value', label: nls.localize('falseLabel', "False"), codeSnippet: 'false', documentationLabel: nls.localize('falseDescription', "Disable the pattern.")},
{ type: 'value', label: nls.localize('derivedLabel', "Files with Siblings by Name"), codeSnippet: '{ "when": "$(basename).{{extension}}" }', documentationLabel: nls.localize('siblingsDescription', "Match files that have siblings with the same name but a different extension.")}
];
export class GlobPatternContribution implements JSONWorker.IJSONWorkerContribution {
constructor() {
}
private isSettingsFile(resource: URI): boolean {
var path = resource.path;
return Strings.endsWith(path, '/settings.json');
}
public collectDefaultSuggestions(resource: URI, result: JSONWorker.ISuggestionsCollector): WinJS.Promise {
return null;
}
public collectPropertySuggestions(resource: URI, location: JSONLocation, currentWord: string, addValue: boolean, isLast:boolean, result: JSONWorker.ISuggestionsCollector) : WinJS.Promise {
if (this.isSettingsFile(resource) && (location.matches(['files.exclude']) || location.matches(['search.exclude']))) {
globProperties.forEach((e) => result.add(e));
}
return null;
}
public collectValueSuggestions(resource: URI, location: JSONLocation, currentKey: string, result: JSONWorker.ISuggestionsCollector): WinJS.Promise {
if (this.isSettingsFile(resource) && (location.matches(['files.exclude']) || location.matches(['search.exclude']))) |
return null;
}
public getInfoContribution(resource: URI, location: JSONLocation): WinJS.TPromise<HtmlContent.IHTMLContentElement[]> {
return null;
}
} | {
globValues.forEach((e) => result.add(e));
} | conditional_block |
sample_help.py | #! /usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import division,print_function,absolute_import,unicode_literals
import sys
import os
os.chdir(sys.path[0])
sys.path.append('/mnt/sda2/github/TSF1KEV/TSFpy')
from TSF_io import *
#from TSF_Forth import *
from TSF_shuffle import *
from TSF_match import *
from TSF_calc import *
from TSF_time import *
TSF_Forth_init(TSF_io_argvs(),[TSF_shuffle_Initwords,TSF_match_Initwords,TSF_calc_Initwords,TSF_time_Initwords])
TSF_Forth_setTSF("TSF_Tab-Separated-Forth:",
"\t".join(["UTF-8","#TSF_encoding","replace:","#TSF_this","help:","#TSF_echothe","0","#TSF_fin."]),
TSF_style="T")
TSF_Forth_setTSF("help:",
"\t".join(["usage: ./TSF.py [command|file.tsf] [argv] ...",
"commands:",
" --help this commands view",
" --about about TSF UTF-8 text (Japanese) view\" ",
" --python TSF.tsf to Python.py view or save\" ",
" --helloworld \"Hello world 1 #TSF_echoN\" sample",
" --quine TSF_Forth_viewthey() Quine (self source) sample",
" --99beer 99 Bottles of Beer sample",
" --fizzbuzz ([0]#3Z1~0)+([0]#5Z2~0) Fizz Buzz Fizz&Buzz sample",
" --zundoko Zun Zun Zun Zun Doko VeronCho sample",
" --fibonacci Fibonacci number 0,1,1,2,3,5,8,13,21,55... sample",
" --prime prime numbers 2,3,5,7,11,13,17,19,23,29... sample",
" --calcFX fractions calculator \"1/3-m1|2\"-> p5|6 sample",
" --calcDC fractions calculator \"1/3-m1|2\"-> 0.8333... sample",
" --calcKN fractions calculator \"1/3-m1|2\"-> 6 bunno 5 sample",
" --calender \"@000y@0m@0dm@wdec@0h@0n@0s\"-> TSF_time_getdaytime() sample"]), | TSF_Forth_setTSF("replaceO:",
"\t".join(["TSF_time_getdaytime()"]),
TSF_style="N")
TSF_Forth_setTSF("replaceN:",
"\t".join(["@000y@0m@0dm@wdec@0h@0n@0s"]),
TSF_style="N")
TSF_Forth_addfin(TSF_io_argvs())
TSF_Forth_argvsleftcut(TSF_io_argvs(),1)
TSF_Forth_run() | TSF_style="N")
TSF_Forth_setTSF("replace:",
"\t".join(["replaceN:","#TSF_carbonthe","#TSF_calender","replaceN:","0","#TSF_pokethe","help:","replaceO:","replaceN:","#TSF_replacestacks"]),
TSF_style="T") | random_line_split |
uart.rs | use core::fmt;
use core::str::StrExt;
use core::result::Result;
use hw::HW;
pub trait Uart<W : HW> {
fn put(&self, &mut W, ch : u8);
}
pub trait UartWriter : fmt::Write { }
pub struct | ;
impl UartWriter for DummyUartWriter { }
impl fmt::Write for DummyUartWriter {
fn write_str(&mut self, _: &str) -> fmt::Result {
Result::Ok(())
}
}
pub struct BlockingUartWriter<H : 'static+HW> {
uart : &'static Uart<H>,
hw : &'static mut H,
}
impl<H : HW> UartWriter for BlockingUartWriter<H> { }
impl<H> BlockingUartWriter<H>
where H : HW {
pub fn new(hw : &'static mut H, uart : &'static Uart<H>) -> BlockingUartWriter<H> {
BlockingUartWriter { uart: uart, hw: hw }
}
}
impl<H> fmt::Write for BlockingUartWriter<H>
where H : HW {
fn write_str(&mut self, s: &str) -> fmt::Result {
for ch in s.bytes() {
self.uart.put(self.hw, ch);
}
Result::Ok(())
}
}
| DummyUartWriter | identifier_name |
uart.rs | use core::fmt;
use core::str::StrExt;
use core::result::Result;
|
pub trait UartWriter : fmt::Write { }
pub struct DummyUartWriter;
impl UartWriter for DummyUartWriter { }
impl fmt::Write for DummyUartWriter {
fn write_str(&mut self, _: &str) -> fmt::Result {
Result::Ok(())
}
}
pub struct BlockingUartWriter<H : 'static+HW> {
uart : &'static Uart<H>,
hw : &'static mut H,
}
impl<H : HW> UartWriter for BlockingUartWriter<H> { }
impl<H> BlockingUartWriter<H>
where H : HW {
pub fn new(hw : &'static mut H, uart : &'static Uart<H>) -> BlockingUartWriter<H> {
BlockingUartWriter { uart: uart, hw: hw }
}
}
impl<H> fmt::Write for BlockingUartWriter<H>
where H : HW {
fn write_str(&mut self, s: &str) -> fmt::Result {
for ch in s.bytes() {
self.uart.put(self.hw, ch);
}
Result::Ok(())
}
} | use hw::HW;
pub trait Uart<W : HW> {
fn put(&self, &mut W, ch : u8);
} | random_line_split |
uart.rs | use core::fmt;
use core::str::StrExt;
use core::result::Result;
use hw::HW;
pub trait Uart<W : HW> {
fn put(&self, &mut W, ch : u8);
}
pub trait UartWriter : fmt::Write { }
pub struct DummyUartWriter;
impl UartWriter for DummyUartWriter { }
impl fmt::Write for DummyUartWriter {
fn write_str(&mut self, _: &str) -> fmt::Result {
Result::Ok(())
}
}
pub struct BlockingUartWriter<H : 'static+HW> {
uart : &'static Uart<H>,
hw : &'static mut H,
}
impl<H : HW> UartWriter for BlockingUartWriter<H> { }
impl<H> BlockingUartWriter<H>
where H : HW {
pub fn new(hw : &'static mut H, uart : &'static Uart<H>) -> BlockingUartWriter<H> |
}
impl<H> fmt::Write for BlockingUartWriter<H>
where H : HW {
fn write_str(&mut self, s: &str) -> fmt::Result {
for ch in s.bytes() {
self.uart.put(self.hw, ch);
}
Result::Ok(())
}
}
| {
BlockingUartWriter { uart: uart, hw: hw }
} | identifier_body |
styles.py | # -*- coding: utf-8 -*-
# Copyright © 2014-2018 GWHAT Project Contributors
# https://github.com/jnsebgosselin/gwhat
#
# This file is part of GWHAT (Ground-Water Hydrograph Analysis Toolbox).
# Licensed under the terms of the GNU General Public License.
# Standard library imports :
import platform
# Third party imports :
from PyQt5.QtGui import QIcon, QFont, QFontDatabase
from PyQt5.QtCore import QSize
class StyleDB(object):
def _ | self):
# ---- frame
self.frame = 22
self.HLine = 52
self.VLine = 53
self.sideBarWidth = 275
# ----- colors
self.red = '#C83737'
self.lightgray = '#E6E6E6'
self.rain = '#0000CC'
self.snow = '0.7'
self.wlvl = '#0000CC' # '#000099'
if platform.system() == 'Windows':
self.font1 = QFont('Segoe UI', 11) # Calibri, Cambria
self.font_console = QFont('Segoe UI', 9)
self.font_menubar = QFont('Segoe UI', 10)
elif platform.system() == 'Linux':
self.font1 = QFont('Ubuntu', 11)
self.font_console = QFont('Ubuntu', 9)
self.font_menubar = QFont('Ubuntu', 10)
# database = QFontDatabase()
# print database.families()
if platform.system() == 'Windows':
self.fontfamily = "Segoe UI" # "Cambria" #"Calibri" #"Segoe UI""
elif platform.system() == 'Linux':
self.fontfamily = "Ubuntu"
# self.fontSize1.setPointSize(11)
# 17 = QtGui.QFrame.Box | QtGui.QFrame.Plain
# 22 = QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain
# 20 = QtGui.QFrame.HLine | QtGui.QFrame.Plain
# 52 = QtGui.QFrame.HLine | QtGui.QFrame.Sunken
# 53 = QtGui.QFrame.VLine | QtGui.QFrame.Sunken
| _init__( | identifier_name |
styles.py | # -*- coding: utf-8 -*-
# Copyright © 2014-2018 GWHAT Project Contributors
# https://github.com/jnsebgosselin/gwhat
#
# This file is part of GWHAT (Ground-Water Hydrograph Analysis Toolbox).
# Licensed under the terms of the GNU General Public License.
# Standard library imports :
import platform
# Third party imports :
from PyQt5.QtGui import QIcon, QFont, QFontDatabase
from PyQt5.QtCore import QSize
class StyleDB(object):
d |
# self.fontSize1.setPointSize(11)
# 17 = QtGui.QFrame.Box | QtGui.QFrame.Plain
# 22 = QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain
# 20 = QtGui.QFrame.HLine | QtGui.QFrame.Plain
# 52 = QtGui.QFrame.HLine | QtGui.QFrame.Sunken
# 53 = QtGui.QFrame.VLine | QtGui.QFrame.Sunken
| ef __init__(self):
# ---- frame
self.frame = 22
self.HLine = 52
self.VLine = 53
self.sideBarWidth = 275
# ----- colors
self.red = '#C83737'
self.lightgray = '#E6E6E6'
self.rain = '#0000CC'
self.snow = '0.7'
self.wlvl = '#0000CC' # '#000099'
if platform.system() == 'Windows':
self.font1 = QFont('Segoe UI', 11) # Calibri, Cambria
self.font_console = QFont('Segoe UI', 9)
self.font_menubar = QFont('Segoe UI', 10)
elif platform.system() == 'Linux':
self.font1 = QFont('Ubuntu', 11)
self.font_console = QFont('Ubuntu', 9)
self.font_menubar = QFont('Ubuntu', 10)
# database = QFontDatabase()
# print database.families()
if platform.system() == 'Windows':
self.fontfamily = "Segoe UI" # "Cambria" #"Calibri" #"Segoe UI""
elif platform.system() == 'Linux':
self.fontfamily = "Ubuntu"
| identifier_body |
styles.py | # -*- coding: utf-8 -*-
# Copyright © 2014-2018 GWHAT Project Contributors
# https://github.com/jnsebgosselin/gwhat
#
# This file is part of GWHAT (Ground-Water Hydrograph Analysis Toolbox).
# Licensed under the terms of the GNU General Public License.
# Standard library imports :
import platform
# Third party imports :
from PyQt5.QtGui import QIcon, QFont, QFontDatabase
from PyQt5.QtCore import QSize | class StyleDB(object):
def __init__(self):
# ---- frame
self.frame = 22
self.HLine = 52
self.VLine = 53
self.sideBarWidth = 275
# ----- colors
self.red = '#C83737'
self.lightgray = '#E6E6E6'
self.rain = '#0000CC'
self.snow = '0.7'
self.wlvl = '#0000CC' # '#000099'
if platform.system() == 'Windows':
self.font1 = QFont('Segoe UI', 11) # Calibri, Cambria
self.font_console = QFont('Segoe UI', 9)
self.font_menubar = QFont('Segoe UI', 10)
elif platform.system() == 'Linux':
self.font1 = QFont('Ubuntu', 11)
self.font_console = QFont('Ubuntu', 9)
self.font_menubar = QFont('Ubuntu', 10)
# database = QFontDatabase()
# print database.families()
if platform.system() == 'Windows':
self.fontfamily = "Segoe UI" # "Cambria" #"Calibri" #"Segoe UI""
elif platform.system() == 'Linux':
self.fontfamily = "Ubuntu"
# self.fontSize1.setPointSize(11)
# 17 = QtGui.QFrame.Box | QtGui.QFrame.Plain
# 22 = QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain
# 20 = QtGui.QFrame.HLine | QtGui.QFrame.Plain
# 52 = QtGui.QFrame.HLine | QtGui.QFrame.Sunken
# 53 = QtGui.QFrame.VLine | QtGui.QFrame.Sunken | random_line_split |
|
styles.py | # -*- coding: utf-8 -*-
# Copyright © 2014-2018 GWHAT Project Contributors
# https://github.com/jnsebgosselin/gwhat
#
# This file is part of GWHAT (Ground-Water Hydrograph Analysis Toolbox).
# Licensed under the terms of the GNU General Public License.
# Standard library imports :
import platform
# Third party imports :
from PyQt5.QtGui import QIcon, QFont, QFontDatabase
from PyQt5.QtCore import QSize
class StyleDB(object):
def __init__(self):
# ---- frame
self.frame = 22
self.HLine = 52
self.VLine = 53
self.sideBarWidth = 275
# ----- colors
self.red = '#C83737'
self.lightgray = '#E6E6E6'
self.rain = '#0000CC'
self.snow = '0.7'
self.wlvl = '#0000CC' # '#000099'
if platform.system() == 'Windows':
self.font1 = QFont('Segoe UI', 11) # Calibri, Cambria
self.font_console = QFont('Segoe UI', 9)
self.font_menubar = QFont('Segoe UI', 10)
elif platform.system() == 'Linux':
self.font1 = QFont('Ubuntu', 11)
self.font_console = QFont('Ubuntu', 9)
self.font_menubar = QFont('Ubuntu', 10)
# database = QFontDatabase()
# print database.families()
if platform.system() == 'Windows':
s | elif platform.system() == 'Linux':
self.fontfamily = "Ubuntu"
# self.fontSize1.setPointSize(11)
# 17 = QtGui.QFrame.Box | QtGui.QFrame.Plain
# 22 = QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain
# 20 = QtGui.QFrame.HLine | QtGui.QFrame.Plain
# 52 = QtGui.QFrame.HLine | QtGui.QFrame.Sunken
# 53 = QtGui.QFrame.VLine | QtGui.QFrame.Sunken
| elf.fontfamily = "Segoe UI" # "Cambria" #"Calibri" #"Segoe UI""
| conditional_block |
node_group.py | #!/usr/bin/env python
# This demonstrates a node group configurations.
#
# Node groups can be defined with the syntax "-g N@IP0,IP1-IP2,IP3".
# This says to create a group of N nodes with IPs IP0, IP1, ..., IP2,
# IP3. Run it with deterministic IPs causes lots of gratuitous IP
# reassignments. Running with --nd fixes this.
import ctdb_takeover
import sys
from optparse import make_option
import string
ctdb_takeover.process_args([
make_option("-g", "--group",
action="append", type="string", dest="groups",
help="define a node group using N@IPs syntax"),
])
def expand_range(r):
sr = r.split("-", 1)
if len(sr) == 2:
all = string.ascii_uppercase + string.ascii_lowercase
sr = list(all[all.index(sr[0]):all.index(sr[1])+1])
return sr
def add_node_group(s):
(count, ips_str) = s.split("@", 1)
ips = [i for r in ips_str.split(",") \
for i in expand_range(r) if r != ""]
for i in range(int(count)):
c.add_node(ctdb_takeover.Node(ips))
c = ctdb_takeover.Cluster()
if ctdb_takeover.options.groups is None:
print "Error: no node groups defined."
sys.exit(1)
for g in ctdb_takeover.options.groups:
add_node_group(g)
| c.recover()
c.random_iterations() | random_line_split |
|
node_group.py | #!/usr/bin/env python
# This demonstrates a node group configurations.
#
# Node groups can be defined with the syntax "-g N@IP0,IP1-IP2,IP3".
# This says to create a group of N nodes with IPs IP0, IP1, ..., IP2,
# IP3. Run it with deterministic IPs causes lots of gratuitous IP
# reassignments. Running with --nd fixes this.
import ctdb_takeover
import sys
from optparse import make_option
import string
ctdb_takeover.process_args([
make_option("-g", "--group",
action="append", type="string", dest="groups",
help="define a node group using N@IPs syntax"),
])
def expand_range(r):
sr = r.split("-", 1)
if len(sr) == 2:
all = string.ascii_uppercase + string.ascii_lowercase
sr = list(all[all.index(sr[0]):all.index(sr[1])+1])
return sr
def | (s):
(count, ips_str) = s.split("@", 1)
ips = [i for r in ips_str.split(",") \
for i in expand_range(r) if r != ""]
for i in range(int(count)):
c.add_node(ctdb_takeover.Node(ips))
c = ctdb_takeover.Cluster()
if ctdb_takeover.options.groups is None:
print "Error: no node groups defined."
sys.exit(1)
for g in ctdb_takeover.options.groups:
add_node_group(g)
c.recover()
c.random_iterations()
| add_node_group | identifier_name |
node_group.py | #!/usr/bin/env python
# This demonstrates a node group configurations.
#
# Node groups can be defined with the syntax "-g N@IP0,IP1-IP2,IP3".
# This says to create a group of N nodes with IPs IP0, IP1, ..., IP2,
# IP3. Run it with deterministic IPs causes lots of gratuitous IP
# reassignments. Running with --nd fixes this.
import ctdb_takeover
import sys
from optparse import make_option
import string
ctdb_takeover.process_args([
make_option("-g", "--group",
action="append", type="string", dest="groups",
help="define a node group using N@IPs syntax"),
])
def expand_range(r):
|
def add_node_group(s):
(count, ips_str) = s.split("@", 1)
ips = [i for r in ips_str.split(",") \
for i in expand_range(r) if r != ""]
for i in range(int(count)):
c.add_node(ctdb_takeover.Node(ips))
c = ctdb_takeover.Cluster()
if ctdb_takeover.options.groups is None:
print "Error: no node groups defined."
sys.exit(1)
for g in ctdb_takeover.options.groups:
add_node_group(g)
c.recover()
c.random_iterations()
| sr = r.split("-", 1)
if len(sr) == 2:
all = string.ascii_uppercase + string.ascii_lowercase
sr = list(all[all.index(sr[0]):all.index(sr[1])+1])
return sr | identifier_body |
node_group.py | #!/usr/bin/env python
# This demonstrates a node group configurations.
#
# Node groups can be defined with the syntax "-g N@IP0,IP1-IP2,IP3".
# This says to create a group of N nodes with IPs IP0, IP1, ..., IP2,
# IP3. Run it with deterministic IPs causes lots of gratuitous IP
# reassignments. Running with --nd fixes this.
import ctdb_takeover
import sys
from optparse import make_option
import string
ctdb_takeover.process_args([
make_option("-g", "--group",
action="append", type="string", dest="groups",
help="define a node group using N@IPs syntax"),
])
def expand_range(r):
sr = r.split("-", 1)
if len(sr) == 2:
all = string.ascii_uppercase + string.ascii_lowercase
sr = list(all[all.index(sr[0]):all.index(sr[1])+1])
return sr
def add_node_group(s):
(count, ips_str) = s.split("@", 1)
ips = [i for r in ips_str.split(",") \
for i in expand_range(r) if r != ""]
for i in range(int(count)):
c.add_node(ctdb_takeover.Node(ips))
c = ctdb_takeover.Cluster()
if ctdb_takeover.options.groups is None:
print "Error: no node groups defined."
sys.exit(1)
for g in ctdb_takeover.options.groups:
|
c.recover()
c.random_iterations()
| add_node_group(g) | conditional_block |
sky_box_actor.js | (function (exports) {
exports.SkyBoxActor = SkyBoxActor;
// The instance of this class is sent throght network.
function SkyBoxActor(actorInfo, actorManager) | ;
SkyBoxActor.prototype = Object.create(exports.Actor.prototype);
SkyBoxActor.prototype.constructor = exports.Actor;
exports.ActorManager.setCreator(
exports.SkyBoxActorInfo.prototype.type,
function(actorInfo, actorManager){
return new exports.SkyBoxActor(actorInfo, actorManager);
}
);
})(typeof teien === 'undefined' ? module.exports : teien); | {
exports.Actor.call(this, actorInfo, actorManager);
var cShape = new Ammo.btSphereShape(1);
var inertia = new Ammo.btVector3();
cShape.calculateLocalInertia(0, inertia);
this.physicsState = new exports.PhysicsState(actorManager.physics);
this.physicsState.rigidBody = new Ammo.btRigidBody(0, this, cShape, inertia);
} | identifier_body |
sky_box_actor.js | (function (exports) {
exports.SkyBoxActor = SkyBoxActor;
// The instance of this class is sent throght network.
function | (actorInfo, actorManager) {
exports.Actor.call(this, actorInfo, actorManager);
var cShape = new Ammo.btSphereShape(1);
var inertia = new Ammo.btVector3();
cShape.calculateLocalInertia(0, inertia);
this.physicsState = new exports.PhysicsState(actorManager.physics);
this.physicsState.rigidBody = new Ammo.btRigidBody(0, this, cShape, inertia);
};
SkyBoxActor.prototype = Object.create(exports.Actor.prototype);
SkyBoxActor.prototype.constructor = exports.Actor;
exports.ActorManager.setCreator(
exports.SkyBoxActorInfo.prototype.type,
function(actorInfo, actorManager){
return new exports.SkyBoxActor(actorInfo, actorManager);
}
);
})(typeof teien === 'undefined' ? module.exports : teien); | SkyBoxActor | identifier_name |
sky_box_actor.js | (function (exports) {
exports.SkyBoxActor = SkyBoxActor;
// The instance of this class is sent throght network.
function SkyBoxActor(actorInfo, actorManager) {
exports.Actor.call(this, actorInfo, actorManager);
var cShape = new Ammo.btSphereShape(1);
var inertia = new Ammo.btVector3();
cShape.calculateLocalInertia(0, inertia);
this.physicsState = new exports.PhysicsState(actorManager.physics);
this.physicsState.rigidBody = new Ammo.btRigidBody(0, this, cShape, inertia);
};
SkyBoxActor.prototype = Object.create(exports.Actor.prototype);
SkyBoxActor.prototype.constructor = exports.Actor;
exports.ActorManager.setCreator(
exports.SkyBoxActorInfo.prototype.type,
function(actorInfo, actorManager){ | );
})(typeof teien === 'undefined' ? module.exports : teien); | return new exports.SkyBoxActor(actorInfo, actorManager);
} | random_line_split |
connecting.rs | extern crate ftp;
use std::str;
use std::io::Cursor;
use ftp::FtpStream;
fn | () {
let mut ftp_stream = match FtpStream::connect("127.0.0.1", 21) {
Ok(s) => s,
Err(e) => panic!("{}", e)
};
match ftp_stream.login("username", "password") {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
match ftp_stream.current_dir() {
Ok(dir) => println!("{}", dir),
Err(e) => panic!("{}", e)
}
match ftp_stream.change_dir("test_data") {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
//An easy way to retreive a file
let remote_file = match ftp_stream.simple_retr("ftpext-charter.txt") {
Ok(file) => file,
Err(e) => panic!("{}", e)
};
match str::from_utf8(&remote_file.into_inner()) {
Ok(s) => print!("{}", s),
Err(e) => panic!("Error reading file data: {}", e)
};
//Store a file
let file_data = format!("Some awesome file data man!!");
let reader: &mut Cursor<Vec<u8>> = &mut Cursor::new(file_data.into_bytes());
match ftp_stream.stor("my_random_file.txt", reader) {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
let _ = ftp_stream.quit();
}
| main | identifier_name |
connecting.rs | extern crate ftp;
use std::str;
use std::io::Cursor;
use ftp::FtpStream;
fn main() | {
let mut ftp_stream = match FtpStream::connect("127.0.0.1", 21) {
Ok(s) => s,
Err(e) => panic!("{}", e)
};
match ftp_stream.login("username", "password") {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
match ftp_stream.current_dir() {
Ok(dir) => println!("{}", dir),
Err(e) => panic!("{}", e)
}
match ftp_stream.change_dir("test_data") {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
//An easy way to retreive a file
let remote_file = match ftp_stream.simple_retr("ftpext-charter.txt") {
Ok(file) => file,
Err(e) => panic!("{}", e)
};
match str::from_utf8(&remote_file.into_inner()) {
Ok(s) => print!("{}", s),
Err(e) => panic!("Error reading file data: {}", e)
};
//Store a file
let file_data = format!("Some awesome file data man!!");
let reader: &mut Cursor<Vec<u8>> = &mut Cursor::new(file_data.into_bytes());
match ftp_stream.stor("my_random_file.txt", reader) {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
let _ = ftp_stream.quit();
} | identifier_body |
|
connecting.rs | extern crate ftp;
use std::str;
use std::io::Cursor;
use ftp::FtpStream;
fn main() {
let mut ftp_stream = match FtpStream::connect("127.0.0.1", 21) {
Ok(s) => s,
Err(e) => panic!("{}", e)
};
match ftp_stream.login("username", "password") {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
match ftp_stream.current_dir() {
Ok(dir) => println!("{}", dir),
Err(e) => panic!("{}", e)
}
match ftp_stream.change_dir("test_data") {
Ok(_) => (),
Err(e) => panic!("{}", e)
}
//An easy way to retreive a file
let remote_file = match ftp_stream.simple_retr("ftpext-charter.txt") {
Ok(file) => file,
Err(e) => panic!("{}", e)
};
match str::from_utf8(&remote_file.into_inner()) {
Ok(s) => print!("{}", s),
Err(e) => panic!("Error reading file data: {}", e)
};
//Store a file
let file_data = format!("Some awesome file data man!!");
let reader: &mut Cursor<Vec<u8>> = &mut Cursor::new(file_data.into_bytes());
match ftp_stream.stor("my_random_file.txt", reader) {
Ok(_) => (),
Err(e) => panic!("{}", e) | let _ = ftp_stream.quit();
} | }
| random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.