file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
import_visits.py | import csv, os
from Products.CMFCore.utils import getToolByName
def get_folder(self, type, name):
folder_brains = self.queryCatalog({'portal_type':type, 'title':name})[0]
return folder_brains.getObject()
def create_object_in_directory(self, container, type):
id = container.generateUniqueId(type)
container.invokeFactory(id=id, type_name=type)
return container[id]
def get_type_or_create(self, type, folder, cmp, val):
brains = self.queryCatalog({'portal_type':type, cmp:val})
if len(brains) > 0:
return brains[0].getObject()
else:
return create_object_in_directory(self, folder, type)
def set_reference(self, object, visit):
existing_visits = object.getVisits()
if visit not in existing_visits:
existing_visits.append(visit)
object.setVisits(existing_visits) | for row in reader:
if not row: continue
header = ['School', 'Student Name', 'Instrument', 'Student Email', 'Student Phone', 'Student Address', 'Student City',
'Student Zip', 'Contact Name', 'Contact Title', 'Contact Phone', 'Contact Email', 'Is Contact Alumni', 'Date']
school_name = row[0].strip().strip('"').strip("'")
student_name = row[1].strip().strip('"').strip("'")
instrument = row[2].strip().strip('"').strip("'")
student_email = row[3].strip().strip('"').strip("'")
student_phone = row[4].strip().strip('"').strip("'")
student_address = row[5].strip().strip('"').strip("'")
student_city = row[6].strip().strip('"').strip("'")
student_zip = row[7].strip().strip('"').strip("'")
contact_name = row[8].strip().strip('"').strip("'")
contact_title = row[9].strip().strip('"').strip("'")
contact_phone = row[10].strip().strip('"').strip("'")
contact_email = row[11].strip().strip('"').strip("'")
is_contact_alumni = row[12].strip().upper() == 'TRUE'
date = row[13].strip().strip('"').strip("'")
user_id = self.portal_membership.getAuthenticatedMember().id
student = get_type_or_create(self, 'Student', get_folder(self, 'StudentFolder', 'Students'), 'title', student_name)
contact = get_type_or_create(self, 'Contact', get_folder(self, 'ContactFolder', 'Contacts'), 'title', contact_name)
faculty = get_type_or_create(self, 'FacultyMember', get_folder(self, 'FacultyMemberFolder', 'FacultyMembers'), 'title', user_id)
school = get_type_or_create(self, 'School', get_folder(self, 'SchoolFolder', 'Schools'), 'title', school_name)
visit = create_object_in_directory(self, get_folder(self,'VisitFolder', 'Visits'), 'Visit')
set_reference(student, visit)
set_reference(contact, visit)
set_reference(faculty, visit)
set_reference(school, visit)
school.edit(title = school_name)
student.edit(title=student_name, instrument=instrument, email=student_email, phone=student_phone, address=student_address, city=student_city, zip=student_zip)
contact.edit(title=contact_name, type=contact_title, phone=contact_phone, email=contact_email, isAlumni=is_contact_alumni)
faculty.edit(title=user_id)
visit_title = "%s-%s-%s-%s" % (school_name, student_name, contact_name, user_id)
visit.edit(title=visit_title, dateOfVisit = date, schools = school, contacts = contact, students = student, facultymembers = faculty) |
def import_visits(self):
reader = csv.reader(self.REQUEST.get('csv-file-contents').split(os.linesep), delimiter="\t")
| random_line_split |
import_visits.py | import csv, os
from Products.CMFCore.utils import getToolByName
def get_folder(self, type, name):
folder_brains = self.queryCatalog({'portal_type':type, 'title':name})[0]
return folder_brains.getObject()
def create_object_in_directory(self, container, type):
id = container.generateUniqueId(type)
container.invokeFactory(id=id, type_name=type)
return container[id]
def get_type_or_create(self, type, folder, cmp, val):
brains = self.queryCatalog({'portal_type':type, cmp:val})
if len(brains) > 0:
return brains[0].getObject()
else:
|
def set_reference(self, object, visit):
existing_visits = object.getVisits()
if visit not in existing_visits:
existing_visits.append(visit)
object.setVisits(existing_visits)
def import_visits(self):
reader = csv.reader(self.REQUEST.get('csv-file-contents').split(os.linesep), delimiter="\t")
for row in reader:
if not row: continue
header = ['School', 'Student Name', 'Instrument', 'Student Email', 'Student Phone', 'Student Address', 'Student City',
'Student Zip', 'Contact Name', 'Contact Title', 'Contact Phone', 'Contact Email', 'Is Contact Alumni', 'Date']
school_name = row[0].strip().strip('"').strip("'")
student_name = row[1].strip().strip('"').strip("'")
instrument = row[2].strip().strip('"').strip("'")
student_email = row[3].strip().strip('"').strip("'")
student_phone = row[4].strip().strip('"').strip("'")
student_address = row[5].strip().strip('"').strip("'")
student_city = row[6].strip().strip('"').strip("'")
student_zip = row[7].strip().strip('"').strip("'")
contact_name = row[8].strip().strip('"').strip("'")
contact_title = row[9].strip().strip('"').strip("'")
contact_phone = row[10].strip().strip('"').strip("'")
contact_email = row[11].strip().strip('"').strip("'")
is_contact_alumni = row[12].strip().upper() == 'TRUE'
date = row[13].strip().strip('"').strip("'")
user_id = self.portal_membership.getAuthenticatedMember().id
student = get_type_or_create(self, 'Student', get_folder(self, 'StudentFolder', 'Students'), 'title', student_name)
contact = get_type_or_create(self, 'Contact', get_folder(self, 'ContactFolder', 'Contacts'), 'title', contact_name)
faculty = get_type_or_create(self, 'FacultyMember', get_folder(self, 'FacultyMemberFolder', 'FacultyMembers'), 'title', user_id)
school = get_type_or_create(self, 'School', get_folder(self, 'SchoolFolder', 'Schools'), 'title', school_name)
visit = create_object_in_directory(self, get_folder(self,'VisitFolder', 'Visits'), 'Visit')
set_reference(student, visit)
set_reference(contact, visit)
set_reference(faculty, visit)
set_reference(school, visit)
school.edit(title = school_name)
student.edit(title=student_name, instrument=instrument, email=student_email, phone=student_phone, address=student_address, city=student_city, zip=student_zip)
contact.edit(title=contact_name, type=contact_title, phone=contact_phone, email=contact_email, isAlumni=is_contact_alumni)
faculty.edit(title=user_id)
visit_title = "%s-%s-%s-%s" % (school_name, student_name, contact_name, user_id)
visit.edit(title=visit_title, dateOfVisit = date, schools = school, contacts = contact, students = student, facultymembers = faculty)
| return create_object_in_directory(self, folder, type) | conditional_block |
import_visits.py | import csv, os
from Products.CMFCore.utils import getToolByName
def get_folder(self, type, name):
folder_brains = self.queryCatalog({'portal_type':type, 'title':name})[0]
return folder_brains.getObject()
def create_object_in_directory(self, container, type):
id = container.generateUniqueId(type)
container.invokeFactory(id=id, type_name=type)
return container[id]
def get_type_or_create(self, type, folder, cmp, val):
brains = self.queryCatalog({'portal_type':type, cmp:val})
if len(brains) > 0:
return brains[0].getObject()
else:
return create_object_in_directory(self, folder, type)
def set_reference(self, object, visit):
existing_visits = object.getVisits()
if visit not in existing_visits:
existing_visits.append(visit)
object.setVisits(existing_visits)
def import_visits(self):
| reader = csv.reader(self.REQUEST.get('csv-file-contents').split(os.linesep), delimiter="\t")
for row in reader:
if not row: continue
header = ['School', 'Student Name', 'Instrument', 'Student Email', 'Student Phone', 'Student Address', 'Student City',
'Student Zip', 'Contact Name', 'Contact Title', 'Contact Phone', 'Contact Email', 'Is Contact Alumni', 'Date']
school_name = row[0].strip().strip('"').strip("'")
student_name = row[1].strip().strip('"').strip("'")
instrument = row[2].strip().strip('"').strip("'")
student_email = row[3].strip().strip('"').strip("'")
student_phone = row[4].strip().strip('"').strip("'")
student_address = row[5].strip().strip('"').strip("'")
student_city = row[6].strip().strip('"').strip("'")
student_zip = row[7].strip().strip('"').strip("'")
contact_name = row[8].strip().strip('"').strip("'")
contact_title = row[9].strip().strip('"').strip("'")
contact_phone = row[10].strip().strip('"').strip("'")
contact_email = row[11].strip().strip('"').strip("'")
is_contact_alumni = row[12].strip().upper() == 'TRUE'
date = row[13].strip().strip('"').strip("'")
user_id = self.portal_membership.getAuthenticatedMember().id
student = get_type_or_create(self, 'Student', get_folder(self, 'StudentFolder', 'Students'), 'title', student_name)
contact = get_type_or_create(self, 'Contact', get_folder(self, 'ContactFolder', 'Contacts'), 'title', contact_name)
faculty = get_type_or_create(self, 'FacultyMember', get_folder(self, 'FacultyMemberFolder', 'FacultyMembers'), 'title', user_id)
school = get_type_or_create(self, 'School', get_folder(self, 'SchoolFolder', 'Schools'), 'title', school_name)
visit = create_object_in_directory(self, get_folder(self,'VisitFolder', 'Visits'), 'Visit')
set_reference(student, visit)
set_reference(contact, visit)
set_reference(faculty, visit)
set_reference(school, visit)
school.edit(title = school_name)
student.edit(title=student_name, instrument=instrument, email=student_email, phone=student_phone, address=student_address, city=student_city, zip=student_zip)
contact.edit(title=contact_name, type=contact_title, phone=contact_phone, email=contact_email, isAlumni=is_contact_alumni)
faculty.edit(title=user_id)
visit_title = "%s-%s-%s-%s" % (school_name, student_name, contact_name, user_id)
visit.edit(title=visit_title, dateOfVisit = date, schools = school, contacts = contact, students = student, facultymembers = faculty) | identifier_body |
|
ffi.rs | //! Useful tools for C FFI
use std::ffi::CString;
use std::os::raw::{c_char, c_int};
use std::ptr;
/// Call a main-like function with an argument vector
///
/// # Safety
///
/// Safety depends on the safety of the target FFI function.
pub unsafe fn run_with_args<S: AsRef<str>, Argv: IntoIterator<Item = S>>(
func: unsafe extern "C" fn(c_int, *mut *mut c_char) -> c_int,
args: Argv,
) -> i32 {
// 1. First clone the string values into safe storage.
let cstring_buffer: Vec<_> = args
.into_iter()
.map(|arg| CString::new(arg.as_ref()).expect("String to CString conversion failed"))
.collect();
// 2. Total number of args is fixed.
let argc = cstring_buffer.len() as c_int;
// 3. Prepare raw vector
let mut c_char_buffer: Vec<*mut c_char> = Vec::new();
for cstring in &cstring_buffer { | }
c_char_buffer.push(ptr::null_mut());
let c_argv = c_char_buffer.as_mut_ptr();
// 4. Now call the function
func(argc, c_argv) as i32
} | c_char_buffer.push(cstring.as_bytes_with_nul().as_ptr() as *mut c_char); | random_line_split |
ffi.rs | //! Useful tools for C FFI
use std::ffi::CString;
use std::os::raw::{c_char, c_int};
use std::ptr;
/// Call a main-like function with an argument vector
///
/// # Safety
///
/// Safety depends on the safety of the target FFI function.
pub unsafe fn run_with_args<S: AsRef<str>, Argv: IntoIterator<Item = S>>(
func: unsafe extern "C" fn(c_int, *mut *mut c_char) -> c_int,
args: Argv,
) -> i32 | {
// 1. First clone the string values into safe storage.
let cstring_buffer: Vec<_> = args
.into_iter()
.map(|arg| CString::new(arg.as_ref()).expect("String to CString conversion failed"))
.collect();
// 2. Total number of args is fixed.
let argc = cstring_buffer.len() as c_int;
// 3. Prepare raw vector
let mut c_char_buffer: Vec<*mut c_char> = Vec::new();
for cstring in &cstring_buffer {
c_char_buffer.push(cstring.as_bytes_with_nul().as_ptr() as *mut c_char);
}
c_char_buffer.push(ptr::null_mut());
let c_argv = c_char_buffer.as_mut_ptr();
// 4. Now call the function
func(argc, c_argv) as i32
} | identifier_body |
|
ffi.rs | //! Useful tools for C FFI
use std::ffi::CString;
use std::os::raw::{c_char, c_int};
use std::ptr;
/// Call a main-like function with an argument vector
///
/// # Safety
///
/// Safety depends on the safety of the target FFI function.
pub unsafe fn | <S: AsRef<str>, Argv: IntoIterator<Item = S>>(
func: unsafe extern "C" fn(c_int, *mut *mut c_char) -> c_int,
args: Argv,
) -> i32 {
// 1. First clone the string values into safe storage.
let cstring_buffer: Vec<_> = args
.into_iter()
.map(|arg| CString::new(arg.as_ref()).expect("String to CString conversion failed"))
.collect();
// 2. Total number of args is fixed.
let argc = cstring_buffer.len() as c_int;
// 3. Prepare raw vector
let mut c_char_buffer: Vec<*mut c_char> = Vec::new();
for cstring in &cstring_buffer {
c_char_buffer.push(cstring.as_bytes_with_nul().as_ptr() as *mut c_char);
}
c_char_buffer.push(ptr::null_mut());
let c_argv = c_char_buffer.as_mut_ptr();
// 4. Now call the function
func(argc, c_argv) as i32
}
| run_with_args | identifier_name |
shorten_path.py | #!/usr/bin/env python
import sys
import os
import re
try:
path = sys.argv[1]
length = int(sys.argv[2])
except:
print >>sys.stderr, "Usage: $0 <path> <length>"
sys.exit(1)
path = re.sub(os.getenv('HOME'), '~', path)
while len(path) > length:
dirs = path.split("/");
# Find the longest directory in the path.
max_index = -1
max_length = 3
for i in range(len(dirs) - 1):
if len(dirs[i]) > max_length:
|
# Shorten it by one character.
if max_index >= 0:
dirs[max_index] = dirs[max_index][:max_length-3] + ".."
path = "/".join(dirs)
# Didn't find anything to shorten. This is as good as it gets.
else:
break
print(path)
| max_index = i
max_length = len(dirs[i]) | conditional_block |
shorten_path.py | #!/usr/bin/env python
import sys
import os
import re
try:
path = sys.argv[1]
length = int(sys.argv[2])
except:
print >>sys.stderr, "Usage: $0 <path> <length>"
sys.exit(1)
path = re.sub(os.getenv('HOME'), '~', path)
while len(path) > length:
dirs = path.split("/");
# Find the longest directory in the path.
max_index = -1
max_length = 3
for i in range(len(dirs) - 1):
if len(dirs[i]) > max_length:
max_index = i
max_length = len(dirs[i])
# Shorten it by one character.
if max_index >= 0: | else:
break
print(path) | dirs[max_index] = dirs[max_index][:max_length-3] + ".."
path = "/".join(dirs)
# Didn't find anything to shorten. This is as good as it gets. | random_line_split |
landmarks_file.py | # ID-Fits
# Copyright (c) 2015 Institut National de l'Audiovisuel, INA, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import numpy as np
def readPtsLandmarkFile(filename, landmarks_number):
f = open(filename)
# Skip first 3 lines
for i in range(3):
|
# Read landmarks position
landmarks = np.empty((landmarks_number, 2), dtype=np.float)
for i in range(landmarks_number):
landmarks[i] = np.array([float(x) for x in f.readline().split()])
return landmarks
| f.readline() | conditional_block |
landmarks_file.py | # ID-Fits
# Copyright (c) 2015 Institut National de l'Audiovisuel, INA, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version. | # but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import numpy as np
def readPtsLandmarkFile(filename, landmarks_number):
f = open(filename)
# Skip first 3 lines
for i in range(3):
f.readline()
# Read landmarks position
landmarks = np.empty((landmarks_number, 2), dtype=np.float)
for i in range(landmarks_number):
landmarks[i] = np.array([float(x) for x in f.readline().split()])
return landmarks | #
# This library is distributed in the hope that it will be useful, | random_line_split |
landmarks_file.py | # ID-Fits
# Copyright (c) 2015 Institut National de l'Audiovisuel, INA, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import numpy as np
def readPtsLandmarkFile(filename, landmarks_number):
| f = open(filename)
# Skip first 3 lines
for i in range(3):
f.readline()
# Read landmarks position
landmarks = np.empty((landmarks_number, 2), dtype=np.float)
for i in range(landmarks_number):
landmarks[i] = np.array([float(x) for x in f.readline().split()])
return landmarks | identifier_body |
|
landmarks_file.py | # ID-Fits
# Copyright (c) 2015 Institut National de l'Audiovisuel, INA, All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import numpy as np
def | (filename, landmarks_number):
f = open(filename)
# Skip first 3 lines
for i in range(3):
f.readline()
# Read landmarks position
landmarks = np.empty((landmarks_number, 2), dtype=np.float)
for i in range(landmarks_number):
landmarks[i] = np.array([float(x) for x in f.readline().split()])
return landmarks
| readPtsLandmarkFile | identifier_name |
translate_to_glib.rs | use crate::{
analysis::{function_parameters::TransformationType, ref_mode::RefMode},
library::Transfer,
};
pub trait TranslateToGlib {
fn translate_to_glib(&self) -> String;
}
impl TranslateToGlib for TransformationType {
fn translate_to_glib(&self) -> String {
use self::TransformationType::*;
match *self {
ToGlibDirect { ref name } => name.clone(),
ToGlibScalar { ref name, .. } => format!("{}{}", name, ".to_glib()"),
ToGlibPointer {
ref name,
instance_parameter,
transfer,
ref_mode,
ref to_glib_extra,
ref pointer_cast,
ref explicit_target_type,
in_trait,
nullable,
} => {
let (left, right) = to_glib_xxx(transfer, ref_mode, explicit_target_type);
let to_glib_extra = if nullable && !to_glib_extra.is_empty() {
format!(".map(|p| p{})", to_glib_extra)
} else {
to_glib_extra.clone()
};
if instance_parameter {
format!(
"{}self{}{}{}",
left,
if in_trait { to_glib_extra } else { "".into() },
right,
pointer_cast
)
} else {
format!("{}{}{}{}{}", left, name, to_glib_extra, right, pointer_cast)
}
}
ToGlibBorrow => "/*Not applicable conversion Borrow*/".to_owned(),
ToGlibUnknown { ref name } => format!("/*Unknown conversion*/{}", name),
ToSome(ref name) => format!("Some({})", name),
IntoRaw(ref name) => format!("Box_::into_raw({}) as *mut _", name),
_ => unreachable!("Unexpected transformation type {:?}", self),
}
}
}
fn to_glib_xxx(
transfer: Transfer,
ref_mode: RefMode,
explicit_target_type: &str,
) -> (String, &'static str) | {
use self::Transfer::*;
match transfer {
None => {
match ref_mode {
RefMode::None => ("".into(), ".to_glib_none_mut().0"), //unreachable!(),
RefMode::ByRef => {
if explicit_target_type.is_empty() {
("".into(), ".to_glib_none().0")
} else {
(
format!("ToGlibPtr::<{}>::to_glib_none(", explicit_target_type),
").0",
)
}
}
RefMode::ByRefMut => ("".into(), ".to_glib_none_mut().0"),
RefMode::ByRefImmut => ("mut_override(".into(), ".to_glib_none().0)"),
RefMode::ByRefConst => ("const_override(".into(), ".to_glib_none().0)"),
RefMode::ByRefFake => ("".into(), ""), //unreachable!(),
}
}
Full => ("".into(), ".to_glib_full()"),
Container => ("".into(), ".to_glib_container().0"),
}
} | identifier_body |
|
translate_to_glib.rs | use crate::{
analysis::{function_parameters::TransformationType, ref_mode::RefMode},
library::Transfer,
};
pub trait TranslateToGlib {
fn translate_to_glib(&self) -> String;
}
impl TranslateToGlib for TransformationType {
fn translate_to_glib(&self) -> String {
use self::TransformationType::*;
match *self {
ToGlibDirect { ref name } => name.clone(),
ToGlibScalar { ref name, .. } => format!("{}{}", name, ".to_glib()"),
ToGlibPointer {
ref name,
instance_parameter,
transfer,
ref_mode,
ref to_glib_extra,
ref pointer_cast,
ref explicit_target_type,
in_trait,
nullable,
} => { | format!(".map(|p| p{})", to_glib_extra)
} else {
to_glib_extra.clone()
};
if instance_parameter {
format!(
"{}self{}{}{}",
left,
if in_trait { to_glib_extra } else { "".into() },
right,
pointer_cast
)
} else {
format!("{}{}{}{}{}", left, name, to_glib_extra, right, pointer_cast)
}
}
ToGlibBorrow => "/*Not applicable conversion Borrow*/".to_owned(),
ToGlibUnknown { ref name } => format!("/*Unknown conversion*/{}", name),
ToSome(ref name) => format!("Some({})", name),
IntoRaw(ref name) => format!("Box_::into_raw({}) as *mut _", name),
_ => unreachable!("Unexpected transformation type {:?}", self),
}
}
}
fn to_glib_xxx(
transfer: Transfer,
ref_mode: RefMode,
explicit_target_type: &str,
) -> (String, &'static str) {
use self::Transfer::*;
match transfer {
None => {
match ref_mode {
RefMode::None => ("".into(), ".to_glib_none_mut().0"), //unreachable!(),
RefMode::ByRef => {
if explicit_target_type.is_empty() {
("".into(), ".to_glib_none().0")
} else {
(
format!("ToGlibPtr::<{}>::to_glib_none(", explicit_target_type),
").0",
)
}
}
RefMode::ByRefMut => ("".into(), ".to_glib_none_mut().0"),
RefMode::ByRefImmut => ("mut_override(".into(), ".to_glib_none().0)"),
RefMode::ByRefConst => ("const_override(".into(), ".to_glib_none().0)"),
RefMode::ByRefFake => ("".into(), ""), //unreachable!(),
}
}
Full => ("".into(), ".to_glib_full()"),
Container => ("".into(), ".to_glib_container().0"),
}
} | let (left, right) = to_glib_xxx(transfer, ref_mode, explicit_target_type);
let to_glib_extra = if nullable && !to_glib_extra.is_empty() { | random_line_split |
translate_to_glib.rs | use crate::{
analysis::{function_parameters::TransformationType, ref_mode::RefMode},
library::Transfer,
};
pub trait TranslateToGlib {
fn translate_to_glib(&self) -> String;
}
impl TranslateToGlib for TransformationType {
fn translate_to_glib(&self) -> String {
use self::TransformationType::*;
match *self {
ToGlibDirect { ref name } => name.clone(),
ToGlibScalar { ref name, .. } => format!("{}{}", name, ".to_glib()"),
ToGlibPointer {
ref name,
instance_parameter,
transfer,
ref_mode,
ref to_glib_extra,
ref pointer_cast,
ref explicit_target_type,
in_trait,
nullable,
} => {
let (left, right) = to_glib_xxx(transfer, ref_mode, explicit_target_type);
let to_glib_extra = if nullable && !to_glib_extra.is_empty() {
format!(".map(|p| p{})", to_glib_extra)
} else {
to_glib_extra.clone()
};
if instance_parameter {
format!(
"{}self{}{}{}",
left,
if in_trait { to_glib_extra } else { "".into() },
right,
pointer_cast
)
} else {
format!("{}{}{}{}{}", left, name, to_glib_extra, right, pointer_cast)
}
}
ToGlibBorrow => "/*Not applicable conversion Borrow*/".to_owned(),
ToGlibUnknown { ref name } => format!("/*Unknown conversion*/{}", name),
ToSome(ref name) => format!("Some({})", name),
IntoRaw(ref name) => format!("Box_::into_raw({}) as *mut _", name),
_ => unreachable!("Unexpected transformation type {:?}", self),
}
}
}
fn | (
transfer: Transfer,
ref_mode: RefMode,
explicit_target_type: &str,
) -> (String, &'static str) {
use self::Transfer::*;
match transfer {
None => {
match ref_mode {
RefMode::None => ("".into(), ".to_glib_none_mut().0"), //unreachable!(),
RefMode::ByRef => {
if explicit_target_type.is_empty() {
("".into(), ".to_glib_none().0")
} else {
(
format!("ToGlibPtr::<{}>::to_glib_none(", explicit_target_type),
").0",
)
}
}
RefMode::ByRefMut => ("".into(), ".to_glib_none_mut().0"),
RefMode::ByRefImmut => ("mut_override(".into(), ".to_glib_none().0)"),
RefMode::ByRefConst => ("const_override(".into(), ".to_glib_none().0)"),
RefMode::ByRefFake => ("".into(), ""), //unreachable!(),
}
}
Full => ("".into(), ".to_glib_full()"),
Container => ("".into(), ".to_glib_container().0"),
}
}
| to_glib_xxx | identifier_name |
weak.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Task-local reference counted smart pointers with weak pointer support
use mem::transmute;
use ops::Drop;
use cmp::{Eq, Ord};
use clone::{Clone, DeepClone};
use heap::free;
use ptr::read_ptr;
use option::{Option, Some, None};
use kinds::marker::NoSend;
struct RcBox<T> {
value: T,
strong: uint,
weak: uint,
no_send: NoSend
}
#[unsafe_no_drop_flag]
pub struct Strong<T> {
ptr: *mut RcBox<T>
}
impl<T> Strong<T> {
pub fn new(value: T) -> Strong<T> {
unsafe {
// The `Strong` pointers share a single `weak` reference count. This prevents the
// premature deallocation of the box when the last weak pointer is freed.
Strong { ptr: transmute(~RcBox { value: value, strong: 1, weak: 1, no_send: NoSend }) }
}
}
#[inline(always)]
pub fn borrow<'a>(&'a self) -> &'a T {
unsafe { &(*self.ptr).value }
}
pub fn downgrade(&self) -> Weak<T> |
}
#[unsafe_destructor]
impl<T> Drop for Strong<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).strong -= 1;
if (*self.ptr).strong == 0 {
read_ptr(self.borrow()); // destroy the contained object
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
}
impl<T> Clone for Strong<T> {
#[inline]
fn clone(&self) -> Strong<T> {
unsafe {
(*self.ptr).strong += 1;
Strong { ptr: self.ptr }
}
}
}
impl<T: DeepClone> DeepClone for Strong<T> {
#[inline]
fn deep_clone(&self) -> Strong<T> {
Strong::new(self.borrow().deep_clone())
}
}
impl<T: Eq> Eq for Strong<T> {
#[inline(always)]
fn eq(&self, other: &Strong<T>) -> bool { *self.borrow() == *other.borrow() }
#[inline(always)]
fn ne(&self, other: &Strong<T>) -> bool { *self.borrow() != *other.borrow() }
}
impl<T: Ord> Ord for Strong<T> {
#[inline(always)]
fn lt(&self, other: &Strong<T>) -> bool { *self.borrow() < *other.borrow() }
#[inline(always)]
fn le(&self, other: &Strong<T>) -> bool { *self.borrow() <= *other.borrow() }
#[inline(always)]
fn gt(&self, other: &Strong<T>) -> bool { *self.borrow() > *other.borrow() }
#[inline(always)]
fn ge(&self, other: &Strong<T>) -> bool { *self.borrow() >= *other.borrow() }
}
#[unsafe_no_drop_flag]
pub struct Weak<T> {
ptr: *mut RcBox<T>
}
impl<T> Weak<T> {
pub fn upgrade(&self) -> Option<Strong<T>> {
unsafe {
if (*self.ptr).strong == 0 {
None
} else {
(*self.ptr).strong += 1;
Some(Strong { ptr: self.ptr })
}
}
}
}
#[unsafe_destructor]
impl<T> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
impl<T> Clone for Weak<T> {
#[inline]
fn clone(&self) -> Weak<T> {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
}
}
| {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
} | identifier_body |
weak.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Task-local reference counted smart pointers with weak pointer support
use mem::transmute;
use ops::Drop;
use cmp::{Eq, Ord};
use clone::{Clone, DeepClone};
use heap::free;
use ptr::read_ptr;
use option::{Option, Some, None};
use kinds::marker::NoSend;
struct RcBox<T> {
value: T,
strong: uint,
weak: uint,
no_send: NoSend
}
#[unsafe_no_drop_flag]
pub struct Strong<T> {
ptr: *mut RcBox<T>
}
impl<T> Strong<T> {
pub fn new(value: T) -> Strong<T> {
unsafe {
// The `Strong` pointers share a single `weak` reference count. This prevents the
// premature deallocation of the box when the last weak pointer is freed.
Strong { ptr: transmute(~RcBox { value: value, strong: 1, weak: 1, no_send: NoSend }) }
}
}
#[inline(always)]
pub fn borrow<'a>(&'a self) -> &'a T {
unsafe { &(*self.ptr).value }
}
pub fn downgrade(&self) -> Weak<T> {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
}
}
#[unsafe_destructor]
impl<T> Drop for Strong<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).strong -= 1;
if (*self.ptr).strong == 0 {
read_ptr(self.borrow()); // destroy the contained object
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
}
impl<T> Clone for Strong<T> {
#[inline]
fn clone(&self) -> Strong<T> {
unsafe {
(*self.ptr).strong += 1;
Strong { ptr: self.ptr }
}
}
}
impl<T: DeepClone> DeepClone for Strong<T> {
#[inline]
fn deep_clone(&self) -> Strong<T> {
Strong::new(self.borrow().deep_clone())
}
}
impl<T: Eq> Eq for Strong<T> {
#[inline(always)]
fn eq(&self, other: &Strong<T>) -> bool { *self.borrow() == *other.borrow() }
#[inline(always)]
fn ne(&self, other: &Strong<T>) -> bool { *self.borrow() != *other.borrow() }
}
impl<T: Ord> Ord for Strong<T> {
#[inline(always)]
fn lt(&self, other: &Strong<T>) -> bool { *self.borrow() < *other.borrow() }
#[inline(always)]
fn le(&self, other: &Strong<T>) -> bool { *self.borrow() <= *other.borrow() }
#[inline(always)]
fn gt(&self, other: &Strong<T>) -> bool { *self.borrow() > *other.borrow() }
#[inline(always)]
fn ge(&self, other: &Strong<T>) -> bool { *self.borrow() >= *other.borrow() }
}
#[unsafe_no_drop_flag]
pub struct Weak<T> {
ptr: *mut RcBox<T>
}
impl<T> Weak<T> {
pub fn upgrade(&self) -> Option<Strong<T>> {
unsafe {
if (*self.ptr).strong == 0 {
None
} else |
}
}
}
#[unsafe_destructor]
impl<T> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
impl<T> Clone for Weak<T> {
#[inline]
fn clone(&self) -> Weak<T> {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
}
}
| {
(*self.ptr).strong += 1;
Some(Strong { ptr: self.ptr })
} | conditional_block |
weak.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Task-local reference counted smart pointers with weak pointer support
use mem::transmute;
use ops::Drop;
use cmp::{Eq, Ord};
use clone::{Clone, DeepClone};
use heap::free;
use ptr::read_ptr;
use option::{Option, Some, None};
use kinds::marker::NoSend;
struct RcBox<T> {
value: T,
strong: uint,
weak: uint,
no_send: NoSend
}
#[unsafe_no_drop_flag]
pub struct Strong<T> {
ptr: *mut RcBox<T>
}
impl<T> Strong<T> {
pub fn new(value: T) -> Strong<T> {
unsafe {
// The `Strong` pointers share a single `weak` reference count. This prevents the
// premature deallocation of the box when the last weak pointer is freed.
Strong { ptr: transmute(~RcBox { value: value, strong: 1, weak: 1, no_send: NoSend }) }
}
}
#[inline(always)]
pub fn borrow<'a>(&'a self) -> &'a T {
unsafe { &(*self.ptr).value }
}
pub fn downgrade(&self) -> Weak<T> {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
}
}
#[unsafe_destructor]
impl<T> Drop for Strong<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).strong -= 1;
if (*self.ptr).strong == 0 {
read_ptr(self.borrow()); // destroy the contained object
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
}
impl<T> Clone for Strong<T> {
#[inline]
fn clone(&self) -> Strong<T> {
unsafe {
(*self.ptr).strong += 1;
Strong { ptr: self.ptr }
}
} | #[inline]
fn deep_clone(&self) -> Strong<T> {
Strong::new(self.borrow().deep_clone())
}
}
impl<T: Eq> Eq for Strong<T> {
#[inline(always)]
fn eq(&self, other: &Strong<T>) -> bool { *self.borrow() == *other.borrow() }
#[inline(always)]
fn ne(&self, other: &Strong<T>) -> bool { *self.borrow() != *other.borrow() }
}
impl<T: Ord> Ord for Strong<T> {
#[inline(always)]
fn lt(&self, other: &Strong<T>) -> bool { *self.borrow() < *other.borrow() }
#[inline(always)]
fn le(&self, other: &Strong<T>) -> bool { *self.borrow() <= *other.borrow() }
#[inline(always)]
fn gt(&self, other: &Strong<T>) -> bool { *self.borrow() > *other.borrow() }
#[inline(always)]
fn ge(&self, other: &Strong<T>) -> bool { *self.borrow() >= *other.borrow() }
}
#[unsafe_no_drop_flag]
pub struct Weak<T> {
ptr: *mut RcBox<T>
}
impl<T> Weak<T> {
pub fn upgrade(&self) -> Option<Strong<T>> {
unsafe {
if (*self.ptr).strong == 0 {
None
} else {
(*self.ptr).strong += 1;
Some(Strong { ptr: self.ptr })
}
}
}
}
#[unsafe_destructor]
impl<T> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
impl<T> Clone for Weak<T> {
#[inline]
fn clone(&self) -> Weak<T> {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
}
} | }
impl<T: DeepClone> DeepClone for Strong<T> { | random_line_split |
weak.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Task-local reference counted smart pointers with weak pointer support
use mem::transmute;
use ops::Drop;
use cmp::{Eq, Ord};
use clone::{Clone, DeepClone};
use heap::free;
use ptr::read_ptr;
use option::{Option, Some, None};
use kinds::marker::NoSend;
struct RcBox<T> {
value: T,
strong: uint,
weak: uint,
no_send: NoSend
}
#[unsafe_no_drop_flag]
pub struct Strong<T> {
ptr: *mut RcBox<T>
}
impl<T> Strong<T> {
pub fn new(value: T) -> Strong<T> {
unsafe {
// The `Strong` pointers share a single `weak` reference count. This prevents the
// premature deallocation of the box when the last weak pointer is freed.
Strong { ptr: transmute(~RcBox { value: value, strong: 1, weak: 1, no_send: NoSend }) }
}
}
#[inline(always)]
pub fn borrow<'a>(&'a self) -> &'a T {
unsafe { &(*self.ptr).value }
}
pub fn downgrade(&self) -> Weak<T> {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
}
}
#[unsafe_destructor]
impl<T> Drop for Strong<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).strong -= 1;
if (*self.ptr).strong == 0 {
read_ptr(self.borrow()); // destroy the contained object
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
}
impl<T> Clone for Strong<T> {
#[inline]
fn clone(&self) -> Strong<T> {
unsafe {
(*self.ptr).strong += 1;
Strong { ptr: self.ptr }
}
}
}
impl<T: DeepClone> DeepClone for Strong<T> {
#[inline]
fn deep_clone(&self) -> Strong<T> {
Strong::new(self.borrow().deep_clone())
}
}
impl<T: Eq> Eq for Strong<T> {
#[inline(always)]
fn eq(&self, other: &Strong<T>) -> bool { *self.borrow() == *other.borrow() }
#[inline(always)]
fn ne(&self, other: &Strong<T>) -> bool { *self.borrow() != *other.borrow() }
}
impl<T: Ord> Ord for Strong<T> {
#[inline(always)]
fn lt(&self, other: &Strong<T>) -> bool { *self.borrow() < *other.borrow() }
#[inline(always)]
fn le(&self, other: &Strong<T>) -> bool { *self.borrow() <= *other.borrow() }
#[inline(always)]
fn | (&self, other: &Strong<T>) -> bool { *self.borrow() > *other.borrow() }
#[inline(always)]
fn ge(&self, other: &Strong<T>) -> bool { *self.borrow() >= *other.borrow() }
}
#[unsafe_no_drop_flag]
pub struct Weak<T> {
ptr: *mut RcBox<T>
}
impl<T> Weak<T> {
pub fn upgrade(&self) -> Option<Strong<T>> {
unsafe {
if (*self.ptr).strong == 0 {
None
} else {
(*self.ptr).strong += 1;
Some(Strong { ptr: self.ptr })
}
}
}
}
#[unsafe_destructor]
impl<T> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
if self.ptr != 0 as *mut RcBox<T> {
(*self.ptr).weak -= 1;
if (*self.ptr).weak == 0 {
free(self.ptr as *mut u8)
}
}
}
}
}
impl<T> Clone for Weak<T> {
#[inline]
fn clone(&self) -> Weak<T> {
unsafe {
(*self.ptr).weak += 1;
Weak { ptr: self.ptr }
}
}
}
| gt | identifier_name |
service.py | """
System services
===============
This module provides low-level tools for managing system services,
using the ``service`` command. It supports both `upstart`_ services
and traditional SysV-style ``/etc/init.d/`` scripts.
.. _upstart: http://upstart.ubuntu.com/
"""
from __future__ import with_statement
from fabric.api import *
def is_running(service):
"""
Check if a service is running.
::
import fabtools
if fabtools.service.is_running('foo'):
print "Service foo is running!"
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = sudo('service %(service)s status' % locals())
return res.succeeded
def start(service):
"""
Start a service.
::
import fabtools
# Start service if it is not running
if not fabtools.service.is_running('foo'):
fabtools.service.start('foo')
"""
sudo('service %(service)s start' % locals())
def stop(service):
"""
Stop a service.
::
import fabtools
# Stop service if it is running
if fabtools.service.is_running('foo'):
fabtools.service.stop('foo')
"""
sudo('service %(service)s stop' % locals())
def restart(service):
"""
Restart a service.
::
import fabtools
# Start service, or restart it if it is already running
if fabtools.service.is_running('foo'):
fabtools.service.restart('foo')
else:
fabtools.service.start('foo')
"""
sudo('service %(service)s restart' % locals())
def reload(service):
"""
Reload a service.
::
import fabtools
# Reload service
fabtools.service.reload('foo')
.. warning::
The service needs to support the ``reload`` operation.
"""
sudo('service %(service)s reload' % locals())
def force_reload(service):
| """
Force reload a service.
::
import fabtools
# Force reload service
fabtools.service.force_reload('foo')
.. warning::
The service needs to support the ``force-reload`` operation.
"""
sudo('service %(service)s force-reload' % locals()) | identifier_body |
|
service.py | """
System services
===============
This module provides low-level tools for managing system services,
using the ``service`` command. It supports both `upstart`_ services
and traditional SysV-style ``/etc/init.d/`` scripts.
.. _upstart: http://upstart.ubuntu.com/
"""
from __future__ import with_statement
from fabric.api import *
def is_running(service):
"""
Check if a service is running.
::
import fabtools
if fabtools.service.is_running('foo'):
print "Service foo is running!"
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = sudo('service %(service)s status' % locals())
return res.succeeded
def start(service):
"""
Start a service.
::
import fabtools
# Start service if it is not running
if not fabtools.service.is_running('foo'):
fabtools.service.start('foo')
"""
sudo('service %(service)s start' % locals())
def | (service):
"""
Stop a service.
::
import fabtools
# Stop service if it is running
if fabtools.service.is_running('foo'):
fabtools.service.stop('foo')
"""
sudo('service %(service)s stop' % locals())
def restart(service):
"""
Restart a service.
::
import fabtools
# Start service, or restart it if it is already running
if fabtools.service.is_running('foo'):
fabtools.service.restart('foo')
else:
fabtools.service.start('foo')
"""
sudo('service %(service)s restart' % locals())
def reload(service):
"""
Reload a service.
::
import fabtools
# Reload service
fabtools.service.reload('foo')
.. warning::
The service needs to support the ``reload`` operation.
"""
sudo('service %(service)s reload' % locals())
def force_reload(service):
"""
Force reload a service.
::
import fabtools
# Force reload service
fabtools.service.force_reload('foo')
.. warning::
The service needs to support the ``force-reload`` operation.
"""
sudo('service %(service)s force-reload' % locals())
| stop | identifier_name |
service.py | """
System services
===============
This module provides low-level tools for managing system services,
using the ``service`` command. It supports both `upstart`_ services
and traditional SysV-style ``/etc/init.d/`` scripts.
.. _upstart: http://upstart.ubuntu.com/
"""
from __future__ import with_statement
from fabric.api import *
def is_running(service):
"""
Check if a service is running.
::
import fabtools
if fabtools.service.is_running('foo'):
print "Service foo is running!"
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = sudo('service %(service)s status' % locals())
return res.succeeded
def start(service):
"""
Start a service.
::
import fabtools
# Start service if it is not running | fabtools.service.start('foo')
"""
sudo('service %(service)s start' % locals())
def stop(service):
"""
Stop a service.
::
import fabtools
# Stop service if it is running
if fabtools.service.is_running('foo'):
fabtools.service.stop('foo')
"""
sudo('service %(service)s stop' % locals())
def restart(service):
"""
Restart a service.
::
import fabtools
# Start service, or restart it if it is already running
if fabtools.service.is_running('foo'):
fabtools.service.restart('foo')
else:
fabtools.service.start('foo')
"""
sudo('service %(service)s restart' % locals())
def reload(service):
"""
Reload a service.
::
import fabtools
# Reload service
fabtools.service.reload('foo')
.. warning::
The service needs to support the ``reload`` operation.
"""
sudo('service %(service)s reload' % locals())
def force_reload(service):
"""
Force reload a service.
::
import fabtools
# Force reload service
fabtools.service.force_reload('foo')
.. warning::
The service needs to support the ``force-reload`` operation.
"""
sudo('service %(service)s force-reload' % locals()) | if not fabtools.service.is_running('foo'): | random_line_split |
import_onnx.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=invalid-name,too-many-locals,no-self-use
""" Support import export formats."""
from __future__ import absolute_import as _abs
from .... import symbol
from .... import ndarray as nd
from ....base import string_types
from .import_helper import _convert_map as convert_map
class | (object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._num_input = 0
self._num_param = 0
def _convert_operator(self, node_name, op_name, attrs, inputs):
"""Convert from onnx operator to mxnet operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
:param node_name : str
name of the node to be translated.
:param op_name : str
Operator name, such as Convolution, FullyConnected
:param attrs : dict
Dict of operator attributes
:param inputs: list
list of inputs to the operator
Returns
-------
:return mxnet_sym
Converted mxnet symbol
"""
if op_name in convert_map:
op_name, new_attrs, inputs = convert_map[op_name](attrs, inputs, self)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
if isinstance(op_name, string_types):
new_op = getattr(symbol, op_name, None)
if not new_op:
raise RuntimeError("Unable to map op_name {} to sym".format(op_name))
if node_name is None:
mxnet_sym = new_op(*inputs, **new_attrs)
else:
mxnet_sym = new_op(name=node_name, *inputs, **new_attrs)
return mxnet_sym
return op_name
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :symbol.Symbol
The returned mxnet symbol
params : dict
A dict of name: nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
self._nodes[i.name] = symbol.Variable(name=i.name,
shape=self._params[i.name].shape)
else:
self._nodes[i.name] = symbol.Variable(name=i.name)
# For storing arg and aux params for the graph.
auxDict = {}
argDict = {}
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
inputs = [self._nodes[i] for i in node.input]
mxnet_sym = self._convert_operator(node_name, op_name, onnx_attr, inputs)
for k, i in zip(list(node.output), range(len(mxnet_sym.list_outputs()))):
self._nodes[k] = mxnet_sym[i]
# splitting params into args and aux params
for args in mxnet_sym.list_arguments():
if args in self._params:
argDict.update({args: nd.array(self._params[args])})
for aux in mxnet_sym.list_auxiliary_states():
if aux in self._params:
auxDict.update({aux: nd.array(self._params[aux])})
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = symbol.Group(out)
else:
out = out[0]
return out, argDict, auxDict
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError:
raise ImportError("Onnx and protobuf need to be installed. "
+ "Instructions to install - https://github.com/onnx/onnx")
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
# Needed for supporting python version > 3.5
if isinstance(attrs[a.name], bytes):
attrs[a.name] = attrs[a.name].decode(encoding='utf-8')
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
| GraphProto | identifier_name |
import_onnx.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=invalid-name,too-many-locals,no-self-use
""" Support import export formats."""
from __future__ import absolute_import as _abs
from .... import symbol
from .... import ndarray as nd
from ....base import string_types
from .import_helper import _convert_map as convert_map
class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._num_input = 0
self._num_param = 0
def _convert_operator(self, node_name, op_name, attrs, inputs):
"""Convert from onnx operator to mxnet operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
:param node_name : str
name of the node to be translated.
:param op_name : str
Operator name, such as Convolution, FullyConnected
:param attrs : dict
Dict of operator attributes
:param inputs: list
list of inputs to the operator
Returns
-------
:return mxnet_sym
Converted mxnet symbol
"""
if op_name in convert_map:
op_name, new_attrs, inputs = convert_map[op_name](attrs, inputs, self)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
if isinstance(op_name, string_types):
new_op = getattr(symbol, op_name, None)
if not new_op:
raise RuntimeError("Unable to map op_name {} to sym".format(op_name))
if node_name is None:
mxnet_sym = new_op(*inputs, **new_attrs)
else:
mxnet_sym = new_op(name=node_name, *inputs, **new_attrs)
return mxnet_sym
return op_name
def from_onnx(self, graph):
|
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError:
raise ImportError("Onnx and protobuf need to be installed. "
+ "Instructions to install - https://github.com/onnx/onnx")
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
# Needed for supporting python version > 3.5
if isinstance(attrs[a.name], bytes):
attrs[a.name] = attrs[a.name].decode(encoding='utf-8')
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
| """Construct symbol from onnx graph.
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :symbol.Symbol
The returned mxnet symbol
params : dict
A dict of name: nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
self._nodes[i.name] = symbol.Variable(name=i.name,
shape=self._params[i.name].shape)
else:
self._nodes[i.name] = symbol.Variable(name=i.name)
# For storing arg and aux params for the graph.
auxDict = {}
argDict = {}
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
inputs = [self._nodes[i] for i in node.input]
mxnet_sym = self._convert_operator(node_name, op_name, onnx_attr, inputs)
for k, i in zip(list(node.output), range(len(mxnet_sym.list_outputs()))):
self._nodes[k] = mxnet_sym[i]
# splitting params into args and aux params
for args in mxnet_sym.list_arguments():
if args in self._params:
argDict.update({args: nd.array(self._params[args])})
for aux in mxnet_sym.list_auxiliary_states():
if aux in self._params:
auxDict.update({aux: nd.array(self._params[aux])})
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = symbol.Group(out)
else:
out = out[0]
return out, argDict, auxDict | identifier_body |
import_onnx.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=invalid-name,too-many-locals,no-self-use
""" Support import export formats."""
from __future__ import absolute_import as _abs
from .... import symbol
from .... import ndarray as nd
from ....base import string_types
from .import_helper import _convert_map as convert_map
class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._num_input = 0
self._num_param = 0
def _convert_operator(self, node_name, op_name, attrs, inputs):
"""Convert from onnx operator to mxnet operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
:param node_name : str
name of the node to be translated.
:param op_name : str
Operator name, such as Convolution, FullyConnected
:param attrs : dict
Dict of operator attributes
:param inputs: list
list of inputs to the operator
Returns
-------
:return mxnet_sym
Converted mxnet symbol
"""
if op_name in convert_map:
op_name, new_attrs, inputs = convert_map[op_name](attrs, inputs, self)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
if isinstance(op_name, string_types):
new_op = getattr(symbol, op_name, None)
if not new_op:
raise RuntimeError("Unable to map op_name {} to sym".format(op_name))
if node_name is None:
mxnet_sym = new_op(*inputs, **new_attrs)
else:
mxnet_sym = new_op(name=node_name, *inputs, **new_attrs)
return mxnet_sym
return op_name
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :symbol.Symbol
The returned mxnet symbol
params : dict
A dict of name: nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
|
else:
self._nodes[i.name] = symbol.Variable(name=i.name)
# For storing arg and aux params for the graph.
auxDict = {}
argDict = {}
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
inputs = [self._nodes[i] for i in node.input]
mxnet_sym = self._convert_operator(node_name, op_name, onnx_attr, inputs)
for k, i in zip(list(node.output), range(len(mxnet_sym.list_outputs()))):
self._nodes[k] = mxnet_sym[i]
# splitting params into args and aux params
for args in mxnet_sym.list_arguments():
if args in self._params:
argDict.update({args: nd.array(self._params[args])})
for aux in mxnet_sym.list_auxiliary_states():
if aux in self._params:
auxDict.update({aux: nd.array(self._params[aux])})
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = symbol.Group(out)
else:
out = out[0]
return out, argDict, auxDict
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError:
raise ImportError("Onnx and protobuf need to be installed. "
+ "Instructions to install - https://github.com/onnx/onnx")
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
# Needed for supporting python version > 3.5
if isinstance(attrs[a.name], bytes):
attrs[a.name] = attrs[a.name].decode(encoding='utf-8')
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
| self._nodes[i.name] = symbol.Variable(name=i.name,
shape=self._params[i.name].shape) | conditional_block |
import_onnx.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=invalid-name,too-many-locals,no-self-use
""" Support import export formats."""
from __future__ import absolute_import as _abs
from .... import symbol
from .... import ndarray as nd
from ....base import string_types
from .import_helper import _convert_map as convert_map
class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._num_input = 0
self._num_param = 0
def _convert_operator(self, node_name, op_name, attrs, inputs):
"""Convert from onnx operator to mxnet operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
:param node_name : str
name of the node to be translated.
:param op_name : str
Operator name, such as Convolution, FullyConnected
:param attrs : dict
Dict of operator attributes
:param inputs: list
list of inputs to the operator
Returns
-------
:return mxnet_sym
Converted mxnet symbol
"""
if op_name in convert_map:
op_name, new_attrs, inputs = convert_map[op_name](attrs, inputs, self)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
if isinstance(op_name, string_types):
new_op = getattr(symbol, op_name, None)
if not new_op: | if node_name is None:
mxnet_sym = new_op(*inputs, **new_attrs)
else:
mxnet_sym = new_op(name=node_name, *inputs, **new_attrs)
return mxnet_sym
return op_name
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :symbol.Symbol
The returned mxnet symbol
params : dict
A dict of name: nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
self._nodes[i.name] = symbol.Variable(name=i.name,
shape=self._params[i.name].shape)
else:
self._nodes[i.name] = symbol.Variable(name=i.name)
# For storing arg and aux params for the graph.
auxDict = {}
argDict = {}
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
inputs = [self._nodes[i] for i in node.input]
mxnet_sym = self._convert_operator(node_name, op_name, onnx_attr, inputs)
for k, i in zip(list(node.output), range(len(mxnet_sym.list_outputs()))):
self._nodes[k] = mxnet_sym[i]
# splitting params into args and aux params
for args in mxnet_sym.list_arguments():
if args in self._params:
argDict.update({args: nd.array(self._params[args])})
for aux in mxnet_sym.list_auxiliary_states():
if aux in self._params:
auxDict.update({aux: nd.array(self._params[aux])})
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = symbol.Group(out)
else:
out = out[0]
return out, argDict, auxDict
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError:
raise ImportError("Onnx and protobuf need to be installed. "
+ "Instructions to install - https://github.com/onnx/onnx")
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
# Needed for supporting python version > 3.5
if isinstance(attrs[a.name], bytes):
attrs[a.name] = attrs[a.name].decode(encoding='utf-8')
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs | raise RuntimeError("Unable to map op_name {} to sym".format(op_name)) | random_line_split |
VProgressLinear.js | import {
VFadeTransition,
VSlideXTransition
} from '~components/transitions'
export default {
name: 'v-progress-linear',
components: {
VFadeTransition,
VSlideXTransition
},
props: {
active: {
type: Boolean,
default: true
},
buffer: Boolean,
bufferValue: Number,
error: Boolean,
height: {
type: [Number, String],
default: 7
},
indeterminate: Boolean,
info: Boolean,
secondary: Boolean,
success: Boolean,
query: Boolean,
warning: Boolean,
value: {
type: [Number, String],
default: 0
},
colorFront: {
type: String,
default: null
},
colorBack: {
type: String,
default: null
}
},
computed: {
classes () {
return {
'progress-linear--query': this.query,
'progress-linear--secondary': this.secondary,
'progress-linear--success': this.success,
'progress-linear--info': this.info,
'progress-linear--warning': this.warning,
'progress-linear--error': this.error
}
},
styles () {
const styles = {}
if (!this.active) {
styles.height = 0
}
if (this.buffer) {
styles.width = `${this.bufferValue}%`
}
return styles
},
bufferStyles () {
const styles = {}
if (!this.active) {
styles.height = 0
}
return styles
}
},
methods: {
genDeterminate (h) {
return h('div', {
ref: 'front',
class: ['progress-linear__bar__determinate', this.colorFront],
style: { width: `${this.value}%` }
})
},
genBar (h, name) {
return h('div', {
class: [
'progress-linear__bar__indeterminate',
name,
this.colorFront
]
})
},
| (h) {
return h('div', {
ref: 'front',
class: {
'progress-linear__bar__indeterminate': true,
'progress-linear__bar__indeterminate--active': this.active
}
}, [
this.genBar(h, 'long'),
this.genBar(h, 'short')
])
}
},
render (h) {
const fade = h('v-fade-transition', [this.indeterminate && this.genIndeterminate(h)])
const slide = h('v-slide-x-transition', [!this.indeterminate && this.genDeterminate(h)])
const bar = h('div', { class: ['progress-linear__bar', this.colorBack], style: this.styles }, [fade, slide])
return h('div', {
class: ['progress-linear', this.classes],
style: { height: `${this.height}px` },
on: this.$listeners
}, [bar])
}
}
| genIndeterminate | identifier_name |
VProgressLinear.js | import {
VFadeTransition,
VSlideXTransition
} from '~components/transitions'
export default {
name: 'v-progress-linear',
components: {
VFadeTransition,
VSlideXTransition
},
props: {
active: {
type: Boolean,
default: true
},
buffer: Boolean,
bufferValue: Number,
error: Boolean,
height: {
type: [Number, String],
default: 7
},
indeterminate: Boolean,
info: Boolean,
secondary: Boolean,
success: Boolean,
query: Boolean,
warning: Boolean,
value: {
type: [Number, String],
default: 0
},
colorFront: {
type: String,
default: null
},
colorBack: {
type: String,
default: null
}
},
computed: {
classes () {
return {
'progress-linear--query': this.query,
'progress-linear--secondary': this.secondary,
'progress-linear--success': this.success,
'progress-linear--info': this.info,
'progress-linear--warning': this.warning,
'progress-linear--error': this.error
}
},
styles () {
const styles = {}
if (!this.active) {
styles.height = 0
}
if (this.buffer) {
styles.width = `${this.bufferValue}%`
}
return styles
},
bufferStyles () {
const styles = {}
if (!this.active) |
return styles
}
},
methods: {
genDeterminate (h) {
return h('div', {
ref: 'front',
class: ['progress-linear__bar__determinate', this.colorFront],
style: { width: `${this.value}%` }
})
},
genBar (h, name) {
return h('div', {
class: [
'progress-linear__bar__indeterminate',
name,
this.colorFront
]
})
},
genIndeterminate (h) {
return h('div', {
ref: 'front',
class: {
'progress-linear__bar__indeterminate': true,
'progress-linear__bar__indeterminate--active': this.active
}
}, [
this.genBar(h, 'long'),
this.genBar(h, 'short')
])
}
},
render (h) {
const fade = h('v-fade-transition', [this.indeterminate && this.genIndeterminate(h)])
const slide = h('v-slide-x-transition', [!this.indeterminate && this.genDeterminate(h)])
const bar = h('div', { class: ['progress-linear__bar', this.colorBack], style: this.styles }, [fade, slide])
return h('div', {
class: ['progress-linear', this.classes],
style: { height: `${this.height}px` },
on: this.$listeners
}, [bar])
}
}
| {
styles.height = 0
} | conditional_block |
VProgressLinear.js | import {
VFadeTransition,
VSlideXTransition
} from '~components/transitions'
export default {
name: 'v-progress-linear',
components: {
VFadeTransition,
VSlideXTransition
},
props: {
active: {
type: Boolean,
default: true
}, | bufferValue: Number,
error: Boolean,
height: {
type: [Number, String],
default: 7
},
indeterminate: Boolean,
info: Boolean,
secondary: Boolean,
success: Boolean,
query: Boolean,
warning: Boolean,
value: {
type: [Number, String],
default: 0
},
colorFront: {
type: String,
default: null
},
colorBack: {
type: String,
default: null
}
},
computed: {
classes () {
return {
'progress-linear--query': this.query,
'progress-linear--secondary': this.secondary,
'progress-linear--success': this.success,
'progress-linear--info': this.info,
'progress-linear--warning': this.warning,
'progress-linear--error': this.error
}
},
styles () {
const styles = {}
if (!this.active) {
styles.height = 0
}
if (this.buffer) {
styles.width = `${this.bufferValue}%`
}
return styles
},
bufferStyles () {
const styles = {}
if (!this.active) {
styles.height = 0
}
return styles
}
},
methods: {
genDeterminate (h) {
return h('div', {
ref: 'front',
class: ['progress-linear__bar__determinate', this.colorFront],
style: { width: `${this.value}%` }
})
},
genBar (h, name) {
return h('div', {
class: [
'progress-linear__bar__indeterminate',
name,
this.colorFront
]
})
},
genIndeterminate (h) {
return h('div', {
ref: 'front',
class: {
'progress-linear__bar__indeterminate': true,
'progress-linear__bar__indeterminate--active': this.active
}
}, [
this.genBar(h, 'long'),
this.genBar(h, 'short')
])
}
},
render (h) {
const fade = h('v-fade-transition', [this.indeterminate && this.genIndeterminate(h)])
const slide = h('v-slide-x-transition', [!this.indeterminate && this.genDeterminate(h)])
const bar = h('div', { class: ['progress-linear__bar', this.colorBack], style: this.styles }, [fade, slide])
return h('div', {
class: ['progress-linear', this.classes],
style: { height: `${this.height}px` },
on: this.$listeners
}, [bar])
}
} | buffer: Boolean, | random_line_split |
VProgressLinear.js | import {
VFadeTransition,
VSlideXTransition
} from '~components/transitions'
export default {
name: 'v-progress-linear',
components: {
VFadeTransition,
VSlideXTransition
},
props: {
active: {
type: Boolean,
default: true
},
buffer: Boolean,
bufferValue: Number,
error: Boolean,
height: {
type: [Number, String],
default: 7
},
indeterminate: Boolean,
info: Boolean,
secondary: Boolean,
success: Boolean,
query: Boolean,
warning: Boolean,
value: {
type: [Number, String],
default: 0
},
colorFront: {
type: String,
default: null
},
colorBack: {
type: String,
default: null
}
},
computed: {
classes () {
return {
'progress-linear--query': this.query,
'progress-linear--secondary': this.secondary,
'progress-linear--success': this.success,
'progress-linear--info': this.info,
'progress-linear--warning': this.warning,
'progress-linear--error': this.error
}
},
styles () {
const styles = {}
if (!this.active) {
styles.height = 0
}
if (this.buffer) {
styles.width = `${this.bufferValue}%`
}
return styles
},
bufferStyles () {
const styles = {}
if (!this.active) {
styles.height = 0
}
return styles
}
},
methods: {
genDeterminate (h) {
return h('div', {
ref: 'front',
class: ['progress-linear__bar__determinate', this.colorFront],
style: { width: `${this.value}%` }
})
},
genBar (h, name) {
return h('div', {
class: [
'progress-linear__bar__indeterminate',
name,
this.colorFront
]
})
},
genIndeterminate (h) |
},
render (h) {
const fade = h('v-fade-transition', [this.indeterminate && this.genIndeterminate(h)])
const slide = h('v-slide-x-transition', [!this.indeterminate && this.genDeterminate(h)])
const bar = h('div', { class: ['progress-linear__bar', this.colorBack], style: this.styles }, [fade, slide])
return h('div', {
class: ['progress-linear', this.classes],
style: { height: `${this.height}px` },
on: this.$listeners
}, [bar])
}
}
| {
return h('div', {
ref: 'front',
class: {
'progress-linear__bar__indeterminate': true,
'progress-linear__bar__indeterminate--active': this.active
}
}, [
this.genBar(h, 'long'),
this.genBar(h, 'short')
])
} | identifier_body |
test_context.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation. | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import openstack.common.context
from openstack.common.middleware import context
from openstack.common import test
class ContextMiddlewareTest(test.BaseTestCase):
def test_process_request(self):
req = mock.Mock()
app = mock.Mock()
options = mock.MagicMock()
ctx = mock.sentinel.context
with mock.patch.object(context.ContextMiddleware,
'make_context',
mock.Mock(return_value=ctx)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.process_request(req)
self.assertEqual(req.context, ctx)
def test_make_context(self):
app = mock.Mock()
options = mock.MagicMock()
with mock.patch.object(openstack.common.context.RequestContext,
'__init__',
mock.Mock(return_value=None)) as init:
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
init.assert_called_with(mock.sentinel.arg)
def test_make_explicit_context(self):
app = mock.Mock()
import_class = mock.Mock()
options = {'context_class': mock.sentinel.context_class}
with mock.patch('openstack.common.importutils.import_class',
mock.Mock(return_value=import_class)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
import_class.assert_called_with(mock.sentinel.arg)
class FilterFactoryTest(test.BaseTestCase):
def test_filter_factory(self):
global_conf = dict(sentinel=mock.sentinel.global_conf)
app = mock.sentinel.app
target = 'openstack.common.middleware.context.ContextMiddleware'
def check_ctx_middleware(arg_app, arg_conf):
self.assertEqual(app, arg_app)
self.assertEqual(global_conf['sentinel'], arg_conf['sentinel'])
return mock.DEFAULT
with mock.patch(target,
mock.Mock(return_value=mock.sentinel.ctx)) as mid:
mid.side_effect = check_ctx_middleware
filter = context.filter_factory(global_conf)
self.assertEqual(filter(app), mock.sentinel.ctx) | # All Rights Reserved. | random_line_split |
test_context.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import openstack.common.context
from openstack.common.middleware import context
from openstack.common import test
class ContextMiddlewareTest(test.BaseTestCase):
def test_process_request(self):
req = mock.Mock()
app = mock.Mock()
options = mock.MagicMock()
ctx = mock.sentinel.context
with mock.patch.object(context.ContextMiddleware,
'make_context',
mock.Mock(return_value=ctx)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.process_request(req)
self.assertEqual(req.context, ctx)
def test_make_context(self):
app = mock.Mock()
options = mock.MagicMock()
with mock.patch.object(openstack.common.context.RequestContext,
'__init__',
mock.Mock(return_value=None)) as init:
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
init.assert_called_with(mock.sentinel.arg)
def test_make_explicit_context(self):
|
class FilterFactoryTest(test.BaseTestCase):
def test_filter_factory(self):
global_conf = dict(sentinel=mock.sentinel.global_conf)
app = mock.sentinel.app
target = 'openstack.common.middleware.context.ContextMiddleware'
def check_ctx_middleware(arg_app, arg_conf):
self.assertEqual(app, arg_app)
self.assertEqual(global_conf['sentinel'], arg_conf['sentinel'])
return mock.DEFAULT
with mock.patch(target,
mock.Mock(return_value=mock.sentinel.ctx)) as mid:
mid.side_effect = check_ctx_middleware
filter = context.filter_factory(global_conf)
self.assertEqual(filter(app), mock.sentinel.ctx)
| app = mock.Mock()
import_class = mock.Mock()
options = {'context_class': mock.sentinel.context_class}
with mock.patch('openstack.common.importutils.import_class',
mock.Mock(return_value=import_class)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
import_class.assert_called_with(mock.sentinel.arg) | identifier_body |
test_context.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import openstack.common.context
from openstack.common.middleware import context
from openstack.common import test
class ContextMiddlewareTest(test.BaseTestCase):
def test_process_request(self):
req = mock.Mock()
app = mock.Mock()
options = mock.MagicMock()
ctx = mock.sentinel.context
with mock.patch.object(context.ContextMiddleware,
'make_context',
mock.Mock(return_value=ctx)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.process_request(req)
self.assertEqual(req.context, ctx)
def test_make_context(self):
app = mock.Mock()
options = mock.MagicMock()
with mock.patch.object(openstack.common.context.RequestContext,
'__init__',
mock.Mock(return_value=None)) as init:
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
init.assert_called_with(mock.sentinel.arg)
def | (self):
app = mock.Mock()
import_class = mock.Mock()
options = {'context_class': mock.sentinel.context_class}
with mock.patch('openstack.common.importutils.import_class',
mock.Mock(return_value=import_class)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
import_class.assert_called_with(mock.sentinel.arg)
class FilterFactoryTest(test.BaseTestCase):
def test_filter_factory(self):
global_conf = dict(sentinel=mock.sentinel.global_conf)
app = mock.sentinel.app
target = 'openstack.common.middleware.context.ContextMiddleware'
def check_ctx_middleware(arg_app, arg_conf):
self.assertEqual(app, arg_app)
self.assertEqual(global_conf['sentinel'], arg_conf['sentinel'])
return mock.DEFAULT
with mock.patch(target,
mock.Mock(return_value=mock.sentinel.ctx)) as mid:
mid.side_effect = check_ctx_middleware
filter = context.filter_factory(global_conf)
self.assertEqual(filter(app), mock.sentinel.ctx)
| test_make_explicit_context | identifier_name |
variables_15.js | var searchData=
[
['scroll',['scroll',['../select2_8js.html#a6e3896ca7181e81b7757bfcca39055ec',1,'select2.js']]],
['scrollbardimensions',['scrollBarDimensions',['../select2_8js.html#a30eb565a7710bd54761b6bfbcfd9d3fd',1,'select2.js']]],
['select',['select',['../select2_8js.html#ac07257b5178416a2fbbba7365d2703a6',1,'select2.js']]],
['select2',['Select2',['../select2_8js.html#affb4af66e7784ac044be37289c148953',1,'Select2(): select2.js'],['../select2_8js.html#a30a3359bfdd9ad6a698d6fd69a38a76a',1,'select2(): select2.js']]], | ]; | ['self',['self',['../select2_8js.html#ab3e74e211b41d329801623ae131d2585',1,'select2.js']]],
['singleselect2',['SingleSelect2',['../select2_8js.html#ab661105ae7b811b84224646465ea5c63',1,'select2.js']]],
['sizer',['sizer',['../select2_8js.html#a9db90058e07b269a04a8990251dab3c4',1,'select2.js']]],
['sn',['sn',['../j_query_8js.html#a70ece1b3f74db2cb3cb7e4b72d59b226',1,'jQuery.js']]] | random_line_split |
adt-tuple-struct.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Unit test for the "user substitutions" that are annotated on each
// node.
#![feature(nll)]
struct SomeStruct<T>(T);
fn no_annot() {
let c = 66;
SomeStruct(&c); | }
fn annot_underscore() {
let c = 66;
SomeStruct::<_>(&c);
}
fn annot_reference_any_lifetime() {
let c = 66;
SomeStruct::<&u32>(&c);
}
fn annot_reference_static_lifetime() {
let c = 66;
SomeStruct::<&'static u32>(&c); //~ ERROR
}
fn annot_reference_named_lifetime<'a>(_d: &'a u32) {
let c = 66;
SomeStruct::<&'a u32>(&c); //~ ERROR
}
fn annot_reference_named_lifetime_ok<'a>(c: &'a u32) {
SomeStruct::<&'a u32>(c);
}
fn annot_reference_named_lifetime_in_closure<'a>(_: &'a u32) {
let _closure = || {
let c = 66;
SomeStruct::<&'a u32>(&c); //~ ERROR
};
}
fn annot_reference_named_lifetime_in_closure_ok<'a>(c: &'a u32) {
let _closure = || {
SomeStruct::<&'a u32>(c);
};
}
fn main() { } | random_line_split |
|
adt-tuple-struct.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Unit test for the "user substitutions" that are annotated on each
// node.
#![feature(nll)]
struct SomeStruct<T>(T);
fn no_annot() {
let c = 66;
SomeStruct(&c);
}
fn annot_underscore() |
fn annot_reference_any_lifetime() {
let c = 66;
SomeStruct::<&u32>(&c);
}
fn annot_reference_static_lifetime() {
let c = 66;
SomeStruct::<&'static u32>(&c); //~ ERROR
}
fn annot_reference_named_lifetime<'a>(_d: &'a u32) {
let c = 66;
SomeStruct::<&'a u32>(&c); //~ ERROR
}
fn annot_reference_named_lifetime_ok<'a>(c: &'a u32) {
SomeStruct::<&'a u32>(c);
}
fn annot_reference_named_lifetime_in_closure<'a>(_: &'a u32) {
let _closure = || {
let c = 66;
SomeStruct::<&'a u32>(&c); //~ ERROR
};
}
fn annot_reference_named_lifetime_in_closure_ok<'a>(c: &'a u32) {
let _closure = || {
SomeStruct::<&'a u32>(c);
};
}
fn main() { }
| {
let c = 66;
SomeStruct::<_>(&c);
} | identifier_body |
adt-tuple-struct.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Unit test for the "user substitutions" that are annotated on each
// node.
#![feature(nll)]
struct | <T>(T);
fn no_annot() {
let c = 66;
SomeStruct(&c);
}
fn annot_underscore() {
let c = 66;
SomeStruct::<_>(&c);
}
fn annot_reference_any_lifetime() {
let c = 66;
SomeStruct::<&u32>(&c);
}
fn annot_reference_static_lifetime() {
let c = 66;
SomeStruct::<&'static u32>(&c); //~ ERROR
}
fn annot_reference_named_lifetime<'a>(_d: &'a u32) {
let c = 66;
SomeStruct::<&'a u32>(&c); //~ ERROR
}
fn annot_reference_named_lifetime_ok<'a>(c: &'a u32) {
SomeStruct::<&'a u32>(c);
}
fn annot_reference_named_lifetime_in_closure<'a>(_: &'a u32) {
let _closure = || {
let c = 66;
SomeStruct::<&'a u32>(&c); //~ ERROR
};
}
fn annot_reference_named_lifetime_in_closure_ok<'a>(c: &'a u32) {
let _closure = || {
SomeStruct::<&'a u32>(c);
};
}
fn main() { }
| SomeStruct | identifier_name |
menu.py | # GNU Enterprise Forms - wx 2.6 UI Driver - Menu widget
#
# Copyright 2001-2007 Free Software Foundation
#
# This file is part of GNU Enterprise
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: menu.py,v 1.6 2011/07/01 20:08:23 oleg Exp $
from src.gnue.forms.uidrivers.java.widgets._base import UIWidget
from src.gnue.forms.uidrivers.java.widgets._remote import MenuBar, Menu, PopupMenu
# =============================================================================
# Wrap an UI layer around a wxMenu widget
# =============================================================================
class UIMenu(UIWidget):
"""
Implements a menu object.
"""
# -------------------------------------------------------------------------
# Constructor
# -------------------------------------------------------------------------
def __init__(self, event):
UIWidget.__init__(self, event)
# -------------------------------------------------------------------------
# Create a menu widget
# -------------------------------------------------------------------------
def _create_widget_(self, event):
"""
Creates a new Menu widget.
"""
if self._gfObject.name == '__main_menu__':
if not self._form._features['GUI:MENUBAR:SUPPRESS'] and not self._uiForm.isEmbeded() and self._form.style != 'dialog':
assert self._uiForm.menuBar is None
# We do not set the menubar using main_window.SetMenuBar() here,
# because on OS X some menu items will get rearranged (like Quit,
# About, Help-menu ...). This rearrangement won't work if the
# menubar is set before the items are created.
self._container = self._uiForm.menuBar = MenuBar(self)
elif event.container is not None:
# Submenu
|
elif hasattr(self._gfObject.getParent().uiWidget, '_ui_set_context_menu_'):
self._container = PopupMenu(self, self._gfObject.label or '')
# table tree and button supports _ui_set_context_menu_() interface
self._gfObject.getParent().uiWidget._ui_set_context_menu_(self._container, self._gfObject.name)
return self._container
#def onPostInit(self):
"""
setups key accelerators for context menu
must run after menuitems created
"""
#parentUiWidget = self._gfObject.getParent().uiWidget
#
#if hasattr(parentUiWidget, '_ui_set_context_menu_'):
#
# accelerators = []
#
# # for context menu i should connect all accelerators manually @oleg
# for mi in self._gfObject.findChildrenOfType('GFMenuItem', includeSelf=False, allowAllChildren=True):
# hotkey = getattr(mi, 'hotkey', '')
# if hotkey:
# accel = wx.GetAccelFromString("dummy\t" + hotkey)
# if accel:
# accel.Set(accel.GetFlags(), accel.GetKeyCode(), mi.uiWidget.getId())
# accelerators.append(accel)
# if accelerators:
# aTable = wx.AcceleratorTable(accelerators)
# if parentUiWidget.widget.GetAcceleratorTable() != aTable:
# parentUiWidget.widget.SetAcceleratorTable(aTable)
#super(UIMenu, self).onPostInit()
# =============================================================================
# Configuration data
# =============================================================================
configuration = {
'baseClass': UIMenu,
'provides' : 'GFMenu',
'container': 1,
}
| self._container = Menu(self, self._gfObject.label or '')
if isinstance(event.container, (MenuBar, Menu)):
event.container.uiAddMenu(self._container) | conditional_block |
menu.py | # GNU Enterprise Forms - wx 2.6 UI Driver - Menu widget
#
# Copyright 2001-2007 Free Software Foundation
#
# This file is part of GNU Enterprise
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: menu.py,v 1.6 2011/07/01 20:08:23 oleg Exp $
from src.gnue.forms.uidrivers.java.widgets._base import UIWidget
from src.gnue.forms.uidrivers.java.widgets._remote import MenuBar, Menu, PopupMenu
# =============================================================================
# Wrap an UI layer around a wxMenu widget
# =============================================================================
class UIMenu(UIWidget):
"""
Implements a menu object.
"""
# -------------------------------------------------------------------------
# Constructor
# -------------------------------------------------------------------------
def __init__(self, event):
UIWidget.__init__(self, event)
# -------------------------------------------------------------------------
# Create a menu widget
# -------------------------------------------------------------------------
def _create_widget_(self, event):
"""
Creates a new Menu widget.
"""
if self._gfObject.name == '__main_menu__':
if not self._form._features['GUI:MENUBAR:SUPPRESS'] and not self._uiForm.isEmbeded() and self._form.style != 'dialog':
assert self._uiForm.menuBar is None
# We do not set the menubar using main_window.SetMenuBar() here,
# because on OS X some menu items will get rearranged (like Quit,
# About, Help-menu ...). This rearrangement won't work if the
# menubar is set before the items are created.
self._container = self._uiForm.menuBar = MenuBar(self)
elif event.container is not None:
# Submenu
self._container = Menu(self, self._gfObject.label or '')
if isinstance(event.container, (MenuBar, Menu)):
event.container.uiAddMenu(self._container)
elif hasattr(self._gfObject.getParent().uiWidget, '_ui_set_context_menu_'):
self._container = PopupMenu(self, self._gfObject.label or '')
# table tree and button supports _ui_set_context_menu_() interface
self._gfObject.getParent().uiWidget._ui_set_context_menu_(self._container, self._gfObject.name)
return self._container
#def onPostInit(self):
"""
setups key accelerators for context menu
must run after menuitems created
"""
#parentUiWidget = self._gfObject.getParent().uiWidget
#
#if hasattr(parentUiWidget, '_ui_set_context_menu_'):
#
# accelerators = []
#
# # for context menu i should connect all accelerators manually @oleg
# for mi in self._gfObject.findChildrenOfType('GFMenuItem', includeSelf=False, allowAllChildren=True):
# hotkey = getattr(mi, 'hotkey', '')
# if hotkey:
# accel = wx.GetAccelFromString("dummy\t" + hotkey)
# if accel:
# accel.Set(accel.GetFlags(), accel.GetKeyCode(), mi.uiWidget.getId())
# accelerators.append(accel)
# if accelerators:
# aTable = wx.AcceleratorTable(accelerators)
# if parentUiWidget.widget.GetAcceleratorTable() != aTable:
# parentUiWidget.widget.SetAcceleratorTable(aTable)
#super(UIMenu, self).onPostInit() | # =============================================================================
# Configuration data
# =============================================================================
configuration = {
'baseClass': UIMenu,
'provides' : 'GFMenu',
'container': 1,
} | random_line_split |
|
menu.py | # GNU Enterprise Forms - wx 2.6 UI Driver - Menu widget
#
# Copyright 2001-2007 Free Software Foundation
#
# This file is part of GNU Enterprise
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: menu.py,v 1.6 2011/07/01 20:08:23 oleg Exp $
from src.gnue.forms.uidrivers.java.widgets._base import UIWidget
from src.gnue.forms.uidrivers.java.widgets._remote import MenuBar, Menu, PopupMenu
# =============================================================================
# Wrap an UI layer around a wxMenu widget
# =============================================================================
class UIMenu(UIWidget):
"""
Implements a menu object.
"""
# -------------------------------------------------------------------------
# Constructor
# -------------------------------------------------------------------------
def __init__(self, event):
UIWidget.__init__(self, event)
# -------------------------------------------------------------------------
# Create a menu widget
# -------------------------------------------------------------------------
def _create_widget_(self, event):
|
# =============================================================================
# Configuration data
# =============================================================================
configuration = {
'baseClass': UIMenu,
'provides' : 'GFMenu',
'container': 1,
}
| """
Creates a new Menu widget.
"""
if self._gfObject.name == '__main_menu__':
if not self._form._features['GUI:MENUBAR:SUPPRESS'] and not self._uiForm.isEmbeded() and self._form.style != 'dialog':
assert self._uiForm.menuBar is None
# We do not set the menubar using main_window.SetMenuBar() here,
# because on OS X some menu items will get rearranged (like Quit,
# About, Help-menu ...). This rearrangement won't work if the
# menubar is set before the items are created.
self._container = self._uiForm.menuBar = MenuBar(self)
elif event.container is not None:
# Submenu
self._container = Menu(self, self._gfObject.label or '')
if isinstance(event.container, (MenuBar, Menu)):
event.container.uiAddMenu(self._container)
elif hasattr(self._gfObject.getParent().uiWidget, '_ui_set_context_menu_'):
self._container = PopupMenu(self, self._gfObject.label or '')
# table tree and button supports _ui_set_context_menu_() interface
self._gfObject.getParent().uiWidget._ui_set_context_menu_(self._container, self._gfObject.name)
return self._container
#def onPostInit(self):
"""
setups key accelerators for context menu
must run after menuitems created
"""
#parentUiWidget = self._gfObject.getParent().uiWidget
#
#if hasattr(parentUiWidget, '_ui_set_context_menu_'):
#
# accelerators = []
#
# # for context menu i should connect all accelerators manually @oleg
# for mi in self._gfObject.findChildrenOfType('GFMenuItem', includeSelf=False, allowAllChildren=True):
# hotkey = getattr(mi, 'hotkey', '')
# if hotkey:
# accel = wx.GetAccelFromString("dummy\t" + hotkey)
# if accel:
# accel.Set(accel.GetFlags(), accel.GetKeyCode(), mi.uiWidget.getId())
# accelerators.append(accel)
# if accelerators:
# aTable = wx.AcceleratorTable(accelerators)
# if parentUiWidget.widget.GetAcceleratorTable() != aTable:
# parentUiWidget.widget.SetAcceleratorTable(aTable)
#super(UIMenu, self).onPostInit() | identifier_body |
menu.py | # GNU Enterprise Forms - wx 2.6 UI Driver - Menu widget
#
# Copyright 2001-2007 Free Software Foundation
#
# This file is part of GNU Enterprise
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: menu.py,v 1.6 2011/07/01 20:08:23 oleg Exp $
from src.gnue.forms.uidrivers.java.widgets._base import UIWidget
from src.gnue.forms.uidrivers.java.widgets._remote import MenuBar, Menu, PopupMenu
# =============================================================================
# Wrap an UI layer around a wxMenu widget
# =============================================================================
class | (UIWidget):
"""
Implements a menu object.
"""
# -------------------------------------------------------------------------
# Constructor
# -------------------------------------------------------------------------
def __init__(self, event):
UIWidget.__init__(self, event)
# -------------------------------------------------------------------------
# Create a menu widget
# -------------------------------------------------------------------------
def _create_widget_(self, event):
"""
Creates a new Menu widget.
"""
if self._gfObject.name == '__main_menu__':
if not self._form._features['GUI:MENUBAR:SUPPRESS'] and not self._uiForm.isEmbeded() and self._form.style != 'dialog':
assert self._uiForm.menuBar is None
# We do not set the menubar using main_window.SetMenuBar() here,
# because on OS X some menu items will get rearranged (like Quit,
# About, Help-menu ...). This rearrangement won't work if the
# menubar is set before the items are created.
self._container = self._uiForm.menuBar = MenuBar(self)
elif event.container is not None:
# Submenu
self._container = Menu(self, self._gfObject.label or '')
if isinstance(event.container, (MenuBar, Menu)):
event.container.uiAddMenu(self._container)
elif hasattr(self._gfObject.getParent().uiWidget, '_ui_set_context_menu_'):
self._container = PopupMenu(self, self._gfObject.label or '')
# table tree and button supports _ui_set_context_menu_() interface
self._gfObject.getParent().uiWidget._ui_set_context_menu_(self._container, self._gfObject.name)
return self._container
#def onPostInit(self):
"""
setups key accelerators for context menu
must run after menuitems created
"""
#parentUiWidget = self._gfObject.getParent().uiWidget
#
#if hasattr(parentUiWidget, '_ui_set_context_menu_'):
#
# accelerators = []
#
# # for context menu i should connect all accelerators manually @oleg
# for mi in self._gfObject.findChildrenOfType('GFMenuItem', includeSelf=False, allowAllChildren=True):
# hotkey = getattr(mi, 'hotkey', '')
# if hotkey:
# accel = wx.GetAccelFromString("dummy\t" + hotkey)
# if accel:
# accel.Set(accel.GetFlags(), accel.GetKeyCode(), mi.uiWidget.getId())
# accelerators.append(accel)
# if accelerators:
# aTable = wx.AcceleratorTable(accelerators)
# if parentUiWidget.widget.GetAcceleratorTable() != aTable:
# parentUiWidget.widget.SetAcceleratorTable(aTable)
#super(UIMenu, self).onPostInit()
# =============================================================================
# Configuration data
# =============================================================================
configuration = {
'baseClass': UIMenu,
'provides' : 'GFMenu',
'container': 1,
}
| UIMenu | identifier_name |
axis_input_box_group.tsx | import * as React from "react";
import { AxisInputBox } from "./axis_input_box";
import { Row, Col } from "../ui/index";
import { AxisInputBoxGroupProps, AxisInputBoxGroupState } from "./interfaces";
import { isNumber } from "lodash";
import { Vector3 } from "farmbot";
import { t } from "../i18next_wrapper";
/** Coordinate input and GO button for Move widget. */
export class AxisInputBoxGroup extends
React.Component<AxisInputBoxGroupProps, Partial<AxisInputBoxGroupState>> {
| (props: AxisInputBoxGroupProps) {
super(props);
this.state = {};
}
change = (axis: keyof Vector3, val: number) => {
this.setState({ [axis]: val });
}
get vector() {
const { x, y, z } = this.state;
const p = this.props.position;
const x2 = p.x,
y2 = p.y,
z2 = p.z;
return {
x: isNumber(x) ? x : (x2 || 0),
y: isNumber(y) ? y : (y2 || 0),
z: isNumber(z) ? z : (z2 || 0)
};
}
clicked = () => {
this.props.onCommit(this.vector);
this.setState({ x: undefined, y: undefined, z: undefined });
}
render() {
const { x, y, z } = this.state;
return <Row>
<AxisInputBox
onChange={this.change}
axis={"x"}
value={x} />
<AxisInputBox
onChange={this.change}
axis={"y"}
value={y} />
<AxisInputBox
onChange={this.change}
axis={"z"}
value={z} />
<Col xs={3}>
<button
onClick={this.clicked}
disabled={this.props.disabled || false}
title={t("Move to chosen location")}
className="full-width green go fb-button">
{t("GO")}
</button>
</Col>
</Row>;
}
}
| constructor | identifier_name |
axis_input_box_group.tsx | import * as React from "react";
import { AxisInputBox } from "./axis_input_box";
import { Row, Col } from "../ui/index";
import { AxisInputBoxGroupProps, AxisInputBoxGroupState } from "./interfaces";
import { isNumber } from "lodash";
import { Vector3 } from "farmbot";
import { t } from "../i18next_wrapper";
/** Coordinate input and GO button for Move widget. */
export class AxisInputBoxGroup extends
React.Component<AxisInputBoxGroupProps, Partial<AxisInputBoxGroupState>> {
constructor(props: AxisInputBoxGroupProps) |
change = (axis: keyof Vector3, val: number) => {
this.setState({ [axis]: val });
}
get vector() {
const { x, y, z } = this.state;
const p = this.props.position;
const x2 = p.x,
y2 = p.y,
z2 = p.z;
return {
x: isNumber(x) ? x : (x2 || 0),
y: isNumber(y) ? y : (y2 || 0),
z: isNumber(z) ? z : (z2 || 0)
};
}
clicked = () => {
this.props.onCommit(this.vector);
this.setState({ x: undefined, y: undefined, z: undefined });
}
render() {
const { x, y, z } = this.state;
return <Row>
<AxisInputBox
onChange={this.change}
axis={"x"}
value={x} />
<AxisInputBox
onChange={this.change}
axis={"y"}
value={y} />
<AxisInputBox
onChange={this.change}
axis={"z"}
value={z} />
<Col xs={3}>
<button
onClick={this.clicked}
disabled={this.props.disabled || false}
title={t("Move to chosen location")}
className="full-width green go fb-button">
{t("GO")}
</button>
</Col>
</Row>;
}
}
| {
super(props);
this.state = {};
} | identifier_body |
axis_input_box_group.tsx | import * as React from "react";
import { AxisInputBox } from "./axis_input_box";
import { Row, Col } from "../ui/index";
import { AxisInputBoxGroupProps, AxisInputBoxGroupState } from "./interfaces";
import { isNumber } from "lodash";
import { Vector3 } from "farmbot";
import { t } from "../i18next_wrapper";
/** Coordinate input and GO button for Move widget. */
export class AxisInputBoxGroup extends
React.Component<AxisInputBoxGroupProps, Partial<AxisInputBoxGroupState>> {
constructor(props: AxisInputBoxGroupProps) {
super(props);
this.state = {};
}
change = (axis: keyof Vector3, val: number) => { |
get vector() {
const { x, y, z } = this.state;
const p = this.props.position;
const x2 = p.x,
y2 = p.y,
z2 = p.z;
return {
x: isNumber(x) ? x : (x2 || 0),
y: isNumber(y) ? y : (y2 || 0),
z: isNumber(z) ? z : (z2 || 0)
};
}
clicked = () => {
this.props.onCommit(this.vector);
this.setState({ x: undefined, y: undefined, z: undefined });
}
render() {
const { x, y, z } = this.state;
return <Row>
<AxisInputBox
onChange={this.change}
axis={"x"}
value={x} />
<AxisInputBox
onChange={this.change}
axis={"y"}
value={y} />
<AxisInputBox
onChange={this.change}
axis={"z"}
value={z} />
<Col xs={3}>
<button
onClick={this.clicked}
disabled={this.props.disabled || false}
title={t("Move to chosen location")}
className="full-width green go fb-button">
{t("GO")}
</button>
</Col>
</Row>;
}
} | this.setState({ [axis]: val });
} | random_line_split |
main.rs | /*
--- Day 5: How About a Nice Game of Chess? ---
You are faced with a security door designed by Easter Bunny engineers that seem to have acquired most of their security knowledge by watching hacking movies.
The eight-character password for the door is generated one character at a time by finding the MD5 hash of some Door ID (your puzzle input) and an increasing integer index (starting with 0).
A hash indicates the next character in the password if its hexadecimal representation starts with five zeroes. If it does, the sixth character in the hash is the next character of the password.
For example, if the Door ID is abc:
The first index which produces a hash that starts with five zeroes is 3231929, which we find by hashing abc3231929; the sixth character of the hash, and thus the first character of the password, is 1.
5017308 produces the next interesting hash, which starts with 000008f82..., so the second character of the password is 8.
The third time a hash starts with five zeroes is for abc5278568, discovering the character f.
In this example, after continuing this search a total of eight times, the password is 18f47a30.
Given the actual Door ID, what is the password?
--- Part Two ---
As the door slides open, you are presented with a second door that uses a slightly more inspired security mechanism. Clearly unimpressed by the last version (in what movie is the password decrypted in order?!), the Easter Bunny engineers have worked out a better solution.
Instead of simply filling in the password from left to right, the hash now also indicates the position within the password to fill. You still look for hashes that begin with five zeroes; however, now, the sixth character represents the position (0-7), and the seventh character is the character to put in that position.
A hash result of 000001f means that f is the second character in the password. Use only the first result for each position, and ignore invalid positions.
For example, if the Door ID is abc:
The first interesting hash is from abc3231929, which produces 0000015...; so, 5 goes in position 1: _5______.
In the previous method, 5017308 produced an interesting hash; however, it is ignored, because it specifies an invalid position (8).
The second interesting hash is at index 5357525, which produces 000004e...; so, e goes in position 4: _5__e___.
You almost choke on your popcorn as the final character falls into place, producing the password 05ace8e3.
Given the actual Door ID and this new method, what is the password? Be extra proud of your solution if it uses a cinematic "decrypting" animation.
*/
extern crate crypto;
use crypto::md5::Md5;
use crypto::digest::Digest;
fn hash(input : &str) -> String {
let mut hash = Md5::new();
hash.input_str(&input);
hash.result_str()
}
fn solve_part_a(puzzle_input : &str){
let mut pass = Vec::new();
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
pass.push(result.chars().nth(5).unwrap());
// once the pass reaches 8 chars, break
if pass.len() == 8 {
break;
}
}
}
// we got em. 😏
println!("[PART A] cracked pass: {:?}", pass);
}
fn solve_part_b(puzzle_input : &str){
let mut pass = vec![None; 8];
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
if let Some(location) = result.chars().nth(5).unwrap().to_digit(10) {
let location = location as usize;
if location > 7 { continue };
if pass.get(location).unwrap().is_some() { continue; }
let character = result.chars().nth(6).unwrap();
pass[location] = result.chars().nth(6);
println!("found character {} for location {}", character, location);
}
}
// once all characters are filled in, break
if pass.iter().all(|c| c.is_some()) {
| }
// we got em. 😏
let pass_string : String = pass.iter().map(|c| c.unwrap()).collect();
println!("[PART B] cracked the pass: {:?}", pass_string);
}
fn main() {
const PUZZLE_INPUT : &'static str = "reyedfim";
solve_part_a(PUZZLE_INPUT);
solve_part_b(PUZZLE_INPUT);
}
| break;
}
| conditional_block |
main.rs | /*
--- Day 5: How About a Nice Game of Chess? ---
You are faced with a security door designed by Easter Bunny engineers that seem to have acquired most of their security knowledge by watching hacking movies.
| The eight-character password for the door is generated one character at a time by finding the MD5 hash of some Door ID (your puzzle input) and an increasing integer index (starting with 0).
A hash indicates the next character in the password if its hexadecimal representation starts with five zeroes. If it does, the sixth character in the hash is the next character of the password.
For example, if the Door ID is abc:
The first index which produces a hash that starts with five zeroes is 3231929, which we find by hashing abc3231929; the sixth character of the hash, and thus the first character of the password, is 1.
5017308 produces the next interesting hash, which starts with 000008f82..., so the second character of the password is 8.
The third time a hash starts with five zeroes is for abc5278568, discovering the character f.
In this example, after continuing this search a total of eight times, the password is 18f47a30.
Given the actual Door ID, what is the password?
--- Part Two ---
As the door slides open, you are presented with a second door that uses a slightly more inspired security mechanism. Clearly unimpressed by the last version (in what movie is the password decrypted in order?!), the Easter Bunny engineers have worked out a better solution.
Instead of simply filling in the password from left to right, the hash now also indicates the position within the password to fill. You still look for hashes that begin with five zeroes; however, now, the sixth character represents the position (0-7), and the seventh character is the character to put in that position.
A hash result of 000001f means that f is the second character in the password. Use only the first result for each position, and ignore invalid positions.
For example, if the Door ID is abc:
The first interesting hash is from abc3231929, which produces 0000015...; so, 5 goes in position 1: _5______.
In the previous method, 5017308 produced an interesting hash; however, it is ignored, because it specifies an invalid position (8).
The second interesting hash is at index 5357525, which produces 000004e...; so, e goes in position 4: _5__e___.
You almost choke on your popcorn as the final character falls into place, producing the password 05ace8e3.
Given the actual Door ID and this new method, what is the password? Be extra proud of your solution if it uses a cinematic "decrypting" animation.
*/
extern crate crypto;
use crypto::md5::Md5;
use crypto::digest::Digest;
fn hash(input : &str) -> String {
let mut hash = Md5::new();
hash.input_str(&input);
hash.result_str()
}
fn solve_part_a(puzzle_input : &str){
let mut pass = Vec::new();
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
pass.push(result.chars().nth(5).unwrap());
// once the pass reaches 8 chars, break
if pass.len() == 8 {
break;
}
}
}
// we got em. 😏
println!("[PART A] cracked pass: {:?}", pass);
}
fn solve_part_b(puzzle_input : &str){
let mut pass = vec![None; 8];
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
if let Some(location) = result.chars().nth(5).unwrap().to_digit(10) {
let location = location as usize;
if location > 7 { continue };
if pass.get(location).unwrap().is_some() { continue; }
let character = result.chars().nth(6).unwrap();
pass[location] = result.chars().nth(6);
println!("found character {} for location {}", character, location);
}
}
// once all characters are filled in, break
if pass.iter().all(|c| c.is_some()) {
break;
}
}
// we got em. 😏
let pass_string : String = pass.iter().map(|c| c.unwrap()).collect();
println!("[PART B] cracked the pass: {:?}", pass_string);
}
fn main() {
const PUZZLE_INPUT : &'static str = "reyedfim";
solve_part_a(PUZZLE_INPUT);
solve_part_b(PUZZLE_INPUT);
} | random_line_split |
|
main.rs | /*
--- Day 5: How About a Nice Game of Chess? ---
You are faced with a security door designed by Easter Bunny engineers that seem to have acquired most of their security knowledge by watching hacking movies.
The eight-character password for the door is generated one character at a time by finding the MD5 hash of some Door ID (your puzzle input) and an increasing integer index (starting with 0).
A hash indicates the next character in the password if its hexadecimal representation starts with five zeroes. If it does, the sixth character in the hash is the next character of the password.
For example, if the Door ID is abc:
The first index which produces a hash that starts with five zeroes is 3231929, which we find by hashing abc3231929; the sixth character of the hash, and thus the first character of the password, is 1.
5017308 produces the next interesting hash, which starts with 000008f82..., so the second character of the password is 8.
The third time a hash starts with five zeroes is for abc5278568, discovering the character f.
In this example, after continuing this search a total of eight times, the password is 18f47a30.
Given the actual Door ID, what is the password?
--- Part Two ---
As the door slides open, you are presented with a second door that uses a slightly more inspired security mechanism. Clearly unimpressed by the last version (in what movie is the password decrypted in order?!), the Easter Bunny engineers have worked out a better solution.
Instead of simply filling in the password from left to right, the hash now also indicates the position within the password to fill. You still look for hashes that begin with five zeroes; however, now, the sixth character represents the position (0-7), and the seventh character is the character to put in that position.
A hash result of 000001f means that f is the second character in the password. Use only the first result for each position, and ignore invalid positions.
For example, if the Door ID is abc:
The first interesting hash is from abc3231929, which produces 0000015...; so, 5 goes in position 1: _5______.
In the previous method, 5017308 produced an interesting hash; however, it is ignored, because it specifies an invalid position (8).
The second interesting hash is at index 5357525, which produces 000004e...; so, e goes in position 4: _5__e___.
You almost choke on your popcorn as the final character falls into place, producing the password 05ace8e3.
Given the actual Door ID and this new method, what is the password? Be extra proud of your solution if it uses a cinematic "decrypting" animation.
*/
extern crate crypto;
use crypto::md5::Md5;
use crypto::digest::Digest;
fn hash(input : &str) -> String {
let mut hash = Md5::new();
hash.input_str(&input);
hash.result_str()
}
fn solve_part_a(puzzle_input : &str){
let mut pass = Vec::new();
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
pass.push(result.chars().nth(5).unwrap());
// once the pass reaches 8 chars, break
if pass.len() == 8 {
break;
}
}
}
// we got em. 😏
println!("[PART A] cracked pass: {:?}", pass);
}
fn solve_part_b(puzzle_input : &str){
let mut pass = vec![None; 8];
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
if let Some(location) = result.chars().nth(5).unwrap().to_digit(10) {
let location = location as usize;
if location > 7 { continue };
if pass.get(location).unwrap().is_some() { continue; }
let character = result.chars().nth(6).unwrap();
pass[location] = result.chars().nth(6);
println!("found character {} for location {}", character, location);
}
}
// once all characters are filled in, break
if pass.iter().all(|c| c.is_some()) {
break;
}
}
// we got em. 😏
let pass_string : String = pass.iter().map(|c| c.unwrap()).collect();
println!("[PART B] cracked the pass: {:?}", pass_string);
}
fn main() {
| const PUZZLE_INPUT : &'static str = "reyedfim";
solve_part_a(PUZZLE_INPUT);
solve_part_b(PUZZLE_INPUT);
}
| identifier_body |
|
main.rs | /*
--- Day 5: How About a Nice Game of Chess? ---
You are faced with a security door designed by Easter Bunny engineers that seem to have acquired most of their security knowledge by watching hacking movies.
The eight-character password for the door is generated one character at a time by finding the MD5 hash of some Door ID (your puzzle input) and an increasing integer index (starting with 0).
A hash indicates the next character in the password if its hexadecimal representation starts with five zeroes. If it does, the sixth character in the hash is the next character of the password.
For example, if the Door ID is abc:
The first index which produces a hash that starts with five zeroes is 3231929, which we find by hashing abc3231929; the sixth character of the hash, and thus the first character of the password, is 1.
5017308 produces the next interesting hash, which starts with 000008f82..., so the second character of the password is 8.
The third time a hash starts with five zeroes is for abc5278568, discovering the character f.
In this example, after continuing this search a total of eight times, the password is 18f47a30.
Given the actual Door ID, what is the password?
--- Part Two ---
As the door slides open, you are presented with a second door that uses a slightly more inspired security mechanism. Clearly unimpressed by the last version (in what movie is the password decrypted in order?!), the Easter Bunny engineers have worked out a better solution.
Instead of simply filling in the password from left to right, the hash now also indicates the position within the password to fill. You still look for hashes that begin with five zeroes; however, now, the sixth character represents the position (0-7), and the seventh character is the character to put in that position.
A hash result of 000001f means that f is the second character in the password. Use only the first result for each position, and ignore invalid positions.
For example, if the Door ID is abc:
The first interesting hash is from abc3231929, which produces 0000015...; so, 5 goes in position 1: _5______.
In the previous method, 5017308 produced an interesting hash; however, it is ignored, because it specifies an invalid position (8).
The second interesting hash is at index 5357525, which produces 000004e...; so, e goes in position 4: _5__e___.
You almost choke on your popcorn as the final character falls into place, producing the password 05ace8e3.
Given the actual Door ID and this new method, what is the password? Be extra proud of your solution if it uses a cinematic "decrypting" animation.
*/
extern crate crypto;
use crypto::md5::Md5;
use crypto::digest::Digest;
fn hash(input : &str) -> String {
let mut hash = Md5::new();
hash.input_str(&input);
hash.result_str()
}
fn solve_part_a(puzzle_input : &str){
let mut pass = Vec::new();
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
pass.push(result.chars().nth(5).unwrap());
// once the pass reaches 8 chars, break
if pass.len() == 8 {
break;
}
}
}
// we got em. 😏
println!("[PART A] cracked pass: {:?}", pass);
}
fn solve_part_b(puzzle_input : &str){
let mut pass = vec![None; 8];
// iterate forever until we break
for i in 0..std::u64::MAX {
let input : &str = &format!("{}{}", puzzle_input, i);
let result = hash(&input);
if result.starts_with("00000") {
if let Some(location) = result.chars().nth(5).unwrap().to_digit(10) {
let location = location as usize;
if location > 7 { continue };
if pass.get(location).unwrap().is_some() { continue; }
let character = result.chars().nth(6).unwrap();
pass[location] = result.chars().nth(6);
println!("found character {} for location {}", character, location);
}
}
// once all characters are filled in, break
if pass.iter().all(|c| c.is_some()) {
break;
}
}
// we got em. 😏
let pass_string : String = pass.iter().map(|c| c.unwrap()).collect();
println!("[PART B] cracked the pass: {:?}", pass_string);
}
fn main() | const PUZZLE_INPUT : &'static str = "reyedfim";
solve_part_a(PUZZLE_INPUT);
solve_part_b(PUZZLE_INPUT);
}
| {
| identifier_name |
htmlheadelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLHeadElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHeadElementDerived;
use dom::bindings::js::JS;
use dom::document::Document;
use dom::element::HTMLHeadElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct HTMLHeadElement {
htmlelement: HTMLElement
}
impl HTMLHeadElementDerived for EventTarget {
fn is_htmlheadelement(&self) -> bool {
match self.type_id {
NodeTargetTypeId(ElementNodeTypeId(HTMLHeadElementTypeId)) => true,
_ => false
}
}
}
impl HTMLHeadElement {
pub fn new_inherited(localName: DOMString, document: JS<Document>) -> HTMLHeadElement {
HTMLHeadElement {
htmlelement: HTMLElement::new_inherited(HTMLHeadElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JS<Document>) -> JS<HTMLHeadElement> {
let element = HTMLHeadElement::new_inherited(localName, document.clone());
Node::reflect_node(~element, document, HTMLHeadElementBinding::Wrap)
} | } | random_line_split |
|
htmlheadelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLHeadElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHeadElementDerived;
use dom::bindings::js::JS;
use dom::document::Document;
use dom::element::HTMLHeadElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
pub struct | {
htmlelement: HTMLElement
}
impl HTMLHeadElementDerived for EventTarget {
fn is_htmlheadelement(&self) -> bool {
match self.type_id {
NodeTargetTypeId(ElementNodeTypeId(HTMLHeadElementTypeId)) => true,
_ => false
}
}
}
impl HTMLHeadElement {
pub fn new_inherited(localName: DOMString, document: JS<Document>) -> HTMLHeadElement {
HTMLHeadElement {
htmlelement: HTMLElement::new_inherited(HTMLHeadElementTypeId, localName, document)
}
}
pub fn new(localName: DOMString, document: &JS<Document>) -> JS<HTMLHeadElement> {
let element = HTMLHeadElement::new_inherited(localName, document.clone());
Node::reflect_node(~element, document, HTMLHeadElementBinding::Wrap)
}
}
| HTMLHeadElement | identifier_name |
const.py | """Freebox component constants."""
from __future__ import annotations
import socket
from homeassistant.components.sensor import SensorEntityDescription
from homeassistant.const import DATA_RATE_KILOBYTES_PER_SECOND, PERCENTAGE, Platform
DOMAIN = "freebox"
SERVICE_REBOOT = "reboot"
APP_DESC = {
"app_id": "hass",
"app_name": "Home Assistant",
"app_version": "0.106", |
DEFAULT_DEVICE_NAME = "Unknown device"
# to store the cookie
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
CONNECTION_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="rate_down",
name="Freebox download speed",
native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND,
icon="mdi:download-network",
),
SensorEntityDescription(
key="rate_up",
name="Freebox upload speed",
native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND,
icon="mdi:upload-network",
),
)
CONNECTION_SENSORS_KEYS: list[str] = [desc.key for desc in CONNECTION_SENSORS]
CALL_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="missed",
name="Freebox missed calls",
icon="mdi:phone-missed",
),
)
DISK_PARTITION_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="partition_free_space",
name="free space",
native_unit_of_measurement=PERCENTAGE,
icon="mdi:harddisk",
),
)
# Icons
DEVICE_ICONS = {
"freebox_delta": "mdi:television-guide",
"freebox_hd": "mdi:television-guide",
"freebox_mini": "mdi:television-guide",
"freebox_player": "mdi:television-guide",
"ip_camera": "mdi:cctv",
"ip_phone": "mdi:phone-voip",
"laptop": "mdi:laptop",
"multimedia_device": "mdi:play-network",
"nas": "mdi:nas",
"networking_device": "mdi:network",
"printer": "mdi:printer",
"router": "mdi:router-wireless",
"smartphone": "mdi:cellphone",
"tablet": "mdi:tablet",
"television": "mdi:television",
"vg_console": "mdi:gamepad-variant",
"workstation": "mdi:desktop-tower-monitor",
} | "device_name": socket.gethostname(),
}
API_VERSION = "v6"
PLATFORMS = [Platform.BUTTON, Platform.DEVICE_TRACKER, Platform.SENSOR, Platform.SWITCH] | random_line_split |
sort.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! In-place sorting.
fn quicksort_helper<T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T], left: int, right: int) {
if right <= left {
return
}
let mut i: int = left - 1;
let mut j: int = right;
let mut p: int = i;
let mut q: int = j;
unsafe {
let v: *mut T = &mut arr[right as uint];
loop {
i += 1;
while arr[i as uint] < (*v) {
i += 1
}
j -= 1;
while (*v) < arr[j as uint] {
if j == left {
break
}
j -= 1;
}
if i >= j {
break
}
arr.swap(i as uint, j as uint);
if arr[i as uint] == (*v) {
p += 1;
arr.swap(p as uint, i as uint)
}
if (*v) == arr[j as uint] {
q -= 1;
arr.swap(j as uint, q as uint)
}
}
}
arr.swap(i as uint, right as uint);
j = i - 1;
i += 1;
let mut k: int = left;
while k < p {
arr.swap(k as uint, j as uint);
k += 1;
j -= 1;
assert!(k < arr.len() as int);
}
k = right - 1;
while k > q {
arr.swap(i as uint, k as uint);
k -= 1;
i += 1;
assert!(k != 0);
}
quicksort_helper(arr, left, j);
quicksort_helper(arr, i, right);
}
/// An in-place quicksort.
///
/// The algorithm is from Sedgewick and Bentley, "Quicksort is Optimal":
/// http://www.cs.princeton.edu/~rs/talks/QuicksortIsOptimal.pdf
pub fn quicksort<T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T]) |
#[cfg(test)]
pub mod test {
use std::rand;
use std::rand::Rng;
use sort;
#[test]
pub fn random() {
let mut rng = rand::task_rng();
for _ in range(0, 50000) {
let len: uint = rng.gen();
let mut v: Vec<int> = rng.gen_iter::<int>().take((len % 32) + 1).collect();
sort::quicksort(v.as_mut_slice());
for i in range(0, v.len() - 1) {
assert!(v.get(i) <= v.get(i + 1))
}
}
}
}
| {
if arr.len() <= 1 {
return
}
let len = arr.len();
quicksort_helper(arr, 0, (len - 1) as int);
} | identifier_body |
sort.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! In-place sorting.
fn | <T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T], left: int, right: int) {
if right <= left {
return
}
let mut i: int = left - 1;
let mut j: int = right;
let mut p: int = i;
let mut q: int = j;
unsafe {
let v: *mut T = &mut arr[right as uint];
loop {
i += 1;
while arr[i as uint] < (*v) {
i += 1
}
j -= 1;
while (*v) < arr[j as uint] {
if j == left {
break
}
j -= 1;
}
if i >= j {
break
}
arr.swap(i as uint, j as uint);
if arr[i as uint] == (*v) {
p += 1;
arr.swap(p as uint, i as uint)
}
if (*v) == arr[j as uint] {
q -= 1;
arr.swap(j as uint, q as uint)
}
}
}
arr.swap(i as uint, right as uint);
j = i - 1;
i += 1;
let mut k: int = left;
while k < p {
arr.swap(k as uint, j as uint);
k += 1;
j -= 1;
assert!(k < arr.len() as int);
}
k = right - 1;
while k > q {
arr.swap(i as uint, k as uint);
k -= 1;
i += 1;
assert!(k != 0);
}
quicksort_helper(arr, left, j);
quicksort_helper(arr, i, right);
}
/// An in-place quicksort.
///
/// The algorithm is from Sedgewick and Bentley, "Quicksort is Optimal":
/// http://www.cs.princeton.edu/~rs/talks/QuicksortIsOptimal.pdf
pub fn quicksort<T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T]) {
if arr.len() <= 1 {
return
}
let len = arr.len();
quicksort_helper(arr, 0, (len - 1) as int);
}
#[cfg(test)]
pub mod test {
use std::rand;
use std::rand::Rng;
use sort;
#[test]
pub fn random() {
let mut rng = rand::task_rng();
for _ in range(0, 50000) {
let len: uint = rng.gen();
let mut v: Vec<int> = rng.gen_iter::<int>().take((len % 32) + 1).collect();
sort::quicksort(v.as_mut_slice());
for i in range(0, v.len() - 1) {
assert!(v.get(i) <= v.get(i + 1))
}
}
}
}
| quicksort_helper | identifier_name |
sort.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! In-place sorting.
fn quicksort_helper<T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T], left: int, right: int) {
if right <= left {
return
}
let mut i: int = left - 1;
let mut j: int = right;
let mut p: int = i;
let mut q: int = j;
unsafe {
let v: *mut T = &mut arr[right as uint];
loop {
i += 1;
while arr[i as uint] < (*v) {
i += 1
}
j -= 1;
while (*v) < arr[j as uint] {
if j == left {
break
}
j -= 1;
}
if i >= j |
arr.swap(i as uint, j as uint);
if arr[i as uint] == (*v) {
p += 1;
arr.swap(p as uint, i as uint)
}
if (*v) == arr[j as uint] {
q -= 1;
arr.swap(j as uint, q as uint)
}
}
}
arr.swap(i as uint, right as uint);
j = i - 1;
i += 1;
let mut k: int = left;
while k < p {
arr.swap(k as uint, j as uint);
k += 1;
j -= 1;
assert!(k < arr.len() as int);
}
k = right - 1;
while k > q {
arr.swap(i as uint, k as uint);
k -= 1;
i += 1;
assert!(k != 0);
}
quicksort_helper(arr, left, j);
quicksort_helper(arr, i, right);
}
/// An in-place quicksort.
///
/// The algorithm is from Sedgewick and Bentley, "Quicksort is Optimal":
/// http://www.cs.princeton.edu/~rs/talks/QuicksortIsOptimal.pdf
pub fn quicksort<T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T]) {
if arr.len() <= 1 {
return
}
let len = arr.len();
quicksort_helper(arr, 0, (len - 1) as int);
}
#[cfg(test)]
pub mod test {
use std::rand;
use std::rand::Rng;
use sort;
#[test]
pub fn random() {
let mut rng = rand::task_rng();
for _ in range(0, 50000) {
let len: uint = rng.gen();
let mut v: Vec<int> = rng.gen_iter::<int>().take((len % 32) + 1).collect();
sort::quicksort(v.as_mut_slice());
for i in range(0, v.len() - 1) {
assert!(v.get(i) <= v.get(i + 1))
}
}
}
}
| {
break
} | conditional_block |
sort.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
fn quicksort_helper<T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T], left: int, right: int) {
if right <= left {
return
}
let mut i: int = left - 1;
let mut j: int = right;
let mut p: int = i;
let mut q: int = j;
unsafe {
let v: *mut T = &mut arr[right as uint];
loop {
i += 1;
while arr[i as uint] < (*v) {
i += 1
}
j -= 1;
while (*v) < arr[j as uint] {
if j == left {
break
}
j -= 1;
}
if i >= j {
break
}
arr.swap(i as uint, j as uint);
if arr[i as uint] == (*v) {
p += 1;
arr.swap(p as uint, i as uint)
}
if (*v) == arr[j as uint] {
q -= 1;
arr.swap(j as uint, q as uint)
}
}
}
arr.swap(i as uint, right as uint);
j = i - 1;
i += 1;
let mut k: int = left;
while k < p {
arr.swap(k as uint, j as uint);
k += 1;
j -= 1;
assert!(k < arr.len() as int);
}
k = right - 1;
while k > q {
arr.swap(i as uint, k as uint);
k -= 1;
i += 1;
assert!(k != 0);
}
quicksort_helper(arr, left, j);
quicksort_helper(arr, i, right);
}
/// An in-place quicksort.
///
/// The algorithm is from Sedgewick and Bentley, "Quicksort is Optimal":
/// http://www.cs.princeton.edu/~rs/talks/QuicksortIsOptimal.pdf
pub fn quicksort<T:Ord + Eq + PartialOrd + PartialEq>(arr: &mut [T]) {
if arr.len() <= 1 {
return
}
let len = arr.len();
quicksort_helper(arr, 0, (len - 1) as int);
}
#[cfg(test)]
pub mod test {
use std::rand;
use std::rand::Rng;
use sort;
#[test]
pub fn random() {
let mut rng = rand::task_rng();
for _ in range(0, 50000) {
let len: uint = rng.gen();
let mut v: Vec<int> = rng.gen_iter::<int>().take((len % 32) + 1).collect();
sort::quicksort(v.as_mut_slice());
for i in range(0, v.len() - 1) {
assert!(v.get(i) <= v.get(i + 1))
}
}
}
} |
//! In-place sorting. | random_line_split |
projections.py | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Test mixed layer, projections and operators.
'''
from paddle.trainer_config_helpers import *
settings(batch_size=1000, learning_rate=1e-4)
din = data_layer(name='test', size=100)
din = embedding_layer(input=din, size=256)
with mixed_layer(size=100) as m1:
m1 += full_matrix_projection(input=din)
with mixed_layer(size=100) as m2:
m2 += table_projection(input=m1)
with mixed_layer(size=100) as m3:
m3 += identity_projection(input=m2)
with mixed_layer(size=100) as m4:
m4 += dotmul_projection(input=m3)
with mixed_layer() as m5:
m5 += context_projection(input=m4, context_len=3)
with mixed_layer() as m6:
m6 += dotmul_operator(a=m3, b=m4)
m6 += scaling_projection(m3)
img = data_layer(name='img', size=32 * 32)
flt = data_layer(name='filter', size=3 * 3 * 1 * 64)
with mixed_layer() as m7:
m7 += conv_operator(
img=img, filter=flt, num_filters=64, num_channels=1, filter_size=3)
m7 += conv_projection(img, filter_size=3, num_filters=64, num_channels=1)
with mixed_layer() as m8:
m8 += conv_operator(
img=img,
filter=flt,
num_filters=64,
num_channels=1,
filter_size=3,
stride=2,
padding=1,
trans=True)
m8 += conv_projection(
img,
filter_size=3,
num_filters=64,
num_channels=1,
stride=2,
padding=1, | input=[
full_matrix_projection(input=m5),
trans_full_matrix_projection(input=m6),
full_matrix_projection(input=m7), full_matrix_projection(input=m8)
],
size=100,
layer_attr=ExtraAttr(
drop_rate=0.5, error_clipping_threshold=40))
outputs(end) | trans=True)
end = mixed_layer( | random_line_split |
membership-add-on.module.ts | import {Server} from "../../../core/server";
import {PassportService} from "../../passport/service/passport.service"; | import {MembershipAddOnRepository} from "./repository/membership-add-on.repository";
import {MembershipAddOnSocketComponent} from "./component/membership-add-on-socket.component";
export class MembershipAddOnModule {
service: MembershipAddOnService;
component: MembershipAddOnComponent;
routes: MembershipAddOnSocketRoutes;
constructor(passportService: PassportService,
MembershipAddOnRepository: MembershipAddOnRepository,
server: Server) {
this.service = new MembershipAddOnService(passportService, MembershipAddOnRepository);
this.component = new MembershipAddOnSocketComponent(this.service);
this.routes = new MembershipAddOnSocketRoutes(server, this.component);
}
} | import {MembershipAddOnService} from "./service/membership-add-on.service";
import {MembershipAddOnComponent} from "./component/membership-add-on.component";
import {MembershipAddOnSocketRoutes} from "./routes/membership-add-on-socket.routes"; | random_line_split |
membership-add-on.module.ts | import {Server} from "../../../core/server";
import {PassportService} from "../../passport/service/passport.service";
import {MembershipAddOnService} from "./service/membership-add-on.service";
import {MembershipAddOnComponent} from "./component/membership-add-on.component";
import {MembershipAddOnSocketRoutes} from "./routes/membership-add-on-socket.routes";
import {MembershipAddOnRepository} from "./repository/membership-add-on.repository";
import {MembershipAddOnSocketComponent} from "./component/membership-add-on-socket.component";
export class | {
service: MembershipAddOnService;
component: MembershipAddOnComponent;
routes: MembershipAddOnSocketRoutes;
constructor(passportService: PassportService,
MembershipAddOnRepository: MembershipAddOnRepository,
server: Server) {
this.service = new MembershipAddOnService(passportService, MembershipAddOnRepository);
this.component = new MembershipAddOnSocketComponent(this.service);
this.routes = new MembershipAddOnSocketRoutes(server, this.component);
}
}
| MembershipAddOnModule | identifier_name |
group.py | # This file is part of Booktype.
# Copyright (c) 2012 Aleksandar Erkalovic <[email protected]>
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
from django.db import transaction
from booki.editor import models
from booki.utils import security
def remote_get_status_messages(request, message, groupid):
from booki.statusnet.models import searchMessages
group = models.BookiGroup.objects.get(url_name=groupid)
mess = searchMessages('%%23%s' % group.url_name)
# remove this hard code
messages = ['<a href="http://status.flossmanuals.net/notice/%s">%s: %s</a>' % (m['id'], m['from_user'], m['text']) for m in mess['results']]
return {"list": messages}
def remote_init_group(request, message, groupid):
import sputnik
## get online users
try:
_onlineUsers = sputnik.smembers("sputnik:channel:%s:users" % message["channel"])
except:
_onlineUsers = []
if request.user.username not in _onlineUsers:
try:
sputnik.sadd("sputnik:channel:%s:users" % message["channel"], request.user.username)
except:
pass
return {}
def remote_leave_group(request, message, groupid):
group = models.BookiGroup.objects.get(url_name=groupid)
group.members.remove(request.user)
transaction.commit()
return {"result": True} | def remote_join_group(request, message, groupid):
group = models.BookiGroup.objects.get(url_name=groupid)
group.members.add(request.user)
transaction.commit()
return {"result": True} | random_line_split |
|
group.py | # This file is part of Booktype.
# Copyright (c) 2012 Aleksandar Erkalovic <[email protected]>
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
from django.db import transaction
from booki.editor import models
from booki.utils import security
def | (request, message, groupid):
from booki.statusnet.models import searchMessages
group = models.BookiGroup.objects.get(url_name=groupid)
mess = searchMessages('%%23%s' % group.url_name)
# remove this hard code
messages = ['<a href="http://status.flossmanuals.net/notice/%s">%s: %s</a>' % (m['id'], m['from_user'], m['text']) for m in mess['results']]
return {"list": messages}
def remote_init_group(request, message, groupid):
import sputnik
## get online users
try:
_onlineUsers = sputnik.smembers("sputnik:channel:%s:users" % message["channel"])
except:
_onlineUsers = []
if request.user.username not in _onlineUsers:
try:
sputnik.sadd("sputnik:channel:%s:users" % message["channel"], request.user.username)
except:
pass
return {}
def remote_leave_group(request, message, groupid):
group = models.BookiGroup.objects.get(url_name=groupid)
group.members.remove(request.user)
transaction.commit()
return {"result": True}
def remote_join_group(request, message, groupid):
group = models.BookiGroup.objects.get(url_name=groupid)
group.members.add(request.user)
transaction.commit()
return {"result": True}
| remote_get_status_messages | identifier_name |
group.py | # This file is part of Booktype.
# Copyright (c) 2012 Aleksandar Erkalovic <[email protected]>
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
from django.db import transaction
from booki.editor import models
from booki.utils import security
def remote_get_status_messages(request, message, groupid):
from booki.statusnet.models import searchMessages
group = models.BookiGroup.objects.get(url_name=groupid)
mess = searchMessages('%%23%s' % group.url_name)
# remove this hard code
messages = ['<a href="http://status.flossmanuals.net/notice/%s">%s: %s</a>' % (m['id'], m['from_user'], m['text']) for m in mess['results']]
return {"list": messages}
def remote_init_group(request, message, groupid):
import sputnik
## get online users
try:
_onlineUsers = sputnik.smembers("sputnik:channel:%s:users" % message["channel"])
except:
_onlineUsers = []
if request.user.username not in _onlineUsers:
|
return {}
def remote_leave_group(request, message, groupid):
group = models.BookiGroup.objects.get(url_name=groupid)
group.members.remove(request.user)
transaction.commit()
return {"result": True}
def remote_join_group(request, message, groupid):
group = models.BookiGroup.objects.get(url_name=groupid)
group.members.add(request.user)
transaction.commit()
return {"result": True}
| try:
sputnik.sadd("sputnik:channel:%s:users" % message["channel"], request.user.username)
except:
pass | conditional_block |
group.py | # This file is part of Booktype.
# Copyright (c) 2012 Aleksandar Erkalovic <[email protected]>
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
from django.db import transaction
from booki.editor import models
from booki.utils import security
def remote_get_status_messages(request, message, groupid):
from booki.statusnet.models import searchMessages
group = models.BookiGroup.objects.get(url_name=groupid)
mess = searchMessages('%%23%s' % group.url_name)
# remove this hard code
messages = ['<a href="http://status.flossmanuals.net/notice/%s">%s: %s</a>' % (m['id'], m['from_user'], m['text']) for m in mess['results']]
return {"list": messages}
def remote_init_group(request, message, groupid):
import sputnik
## get online users
try:
_onlineUsers = sputnik.smembers("sputnik:channel:%s:users" % message["channel"])
except:
_onlineUsers = []
if request.user.username not in _onlineUsers:
try:
sputnik.sadd("sputnik:channel:%s:users" % message["channel"], request.user.username)
except:
pass
return {}
def remote_leave_group(request, message, groupid):
group = models.BookiGroup.objects.get(url_name=groupid)
group.members.remove(request.user)
transaction.commit()
return {"result": True}
def remote_join_group(request, message, groupid):
| group = models.BookiGroup.objects.get(url_name=groupid)
group.members.add(request.user)
transaction.commit()
return {"result": True} | identifier_body |
|
task-killjoin.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32
// Create a task that is supervised by another task, join the supervised task
// from the supervising task, then fail the supervised task. The supervised
// task will kill the supervising task, waking it up. The supervising task no
// longer needs to be wakened when the supervised task exits.
fn supervised() {
// Yield to make sure the supervisor joins before we fail. This is
// currently not needed because the supervisor runs first, but I can
// imagine that changing.
task::yield();
fail!();
}
fn supervisor() {
// Unsupervise this task so the process doesn't return a failure status as
// a result of the main task being killed.
let f = supervised;
task::try(supervised);
}
pub fn main() |
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
| {
task::spawn_unlinked(supervisor)
} | identifier_body |
task-killjoin.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32
// Create a task that is supervised by another task, join the supervised task
// from the supervising task, then fail the supervised task. The supervised
// task will kill the supervising task, waking it up. The supervising task no
// longer needs to be wakened when the supervised task exits.
| // currently not needed because the supervisor runs first, but I can
// imagine that changing.
task::yield();
fail!();
}
fn supervisor() {
// Unsupervise this task so the process doesn't return a failure status as
// a result of the main task being killed.
let f = supervised;
task::try(supervised);
}
pub fn main() {
task::spawn_unlinked(supervisor)
}
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End: | fn supervised() {
// Yield to make sure the supervisor joins before we fail. This is | random_line_split |
task-killjoin.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32
// Create a task that is supervised by another task, join the supervised task
// from the supervising task, then fail the supervised task. The supervised
// task will kill the supervising task, waking it up. The supervising task no
// longer needs to be wakened when the supervised task exits.
fn | () {
// Yield to make sure the supervisor joins before we fail. This is
// currently not needed because the supervisor runs first, but I can
// imagine that changing.
task::yield();
fail!();
}
fn supervisor() {
// Unsupervise this task so the process doesn't return a failure status as
// a result of the main task being killed.
let f = supervised;
task::try(supervised);
}
pub fn main() {
task::spawn_unlinked(supervisor)
}
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
| supervised | identifier_name |
interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.js | var interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore =
[
[ "getDevices", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#a31af140046e965889b1e344e53ab37e1", null ],
[ "getMaster", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#a2bed6e1e4895ca63cb4e5bd55c15e48f", null ], | [ "getTermFor", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#af1f35746cf3838fc0b143fb8c8baae5d", null ],
[ "relinquishAllRole", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#ab1d5f9847d612b2884a86ce70f476404", null ],
[ "relinquishRole", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#a2e80344c4c9d4225937db2fa9ead93b7", null ],
[ "requestRole", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#a0835c16a1d78211cec93c738450470ab", null ],
[ "setMaster", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#aefe91986d2d154a58b4a2bef73ec1c7a", null ],
[ "setStandby", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#ae6569bb66221e4fbd3c585f309035744", null ]
]; | [ "getNodes", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#afb48638578c77456adf13f1cd46614c7", null ],
[ "getRole", "interfaceorg_1_1onosproject_1_1mastership_1_1MastershipStore.html#ac2d2cf6f0fc0057267ce7b285bdb91e7", null ], | random_line_split |
eval_assignment.rs | use crate::helpers::{values::*, *};
use ostrov::errors::RuntimeError::*;
#[test]
fn returns_expression() {
assert_eval_val(
"(define x 0)
(set! x (+ x 1))",
unspecified(),
);
}
#[test]
fn overwrites_variables() {
assert_eval(
"(define x 0)
(set! x (+ x 1))
x",
"1",
);
}
#[test]
fn overwrites_variables_on_upper_scopes() {
assert_eval(
"(define x 0)
(define (f)
(set! x (+ x 1)))
(f)
(f)
(f)
x",
"3",
);
}
#[test]
fn | () {
assert_eval(
"(define (gen-counter)
(define counter 0)
(lambda ()
(set! counter (+ counter 1))
counter))
(define count (gen-counter))
(count)
(count)
(count)",
"3",
);
}
#[test]
fn malformed_variable_name() {
assert_eval_err("(set! 3 3)", MalformedExpression);
}
#[test]
fn unknown_variable() {
assert_eval_err("(set! x 3)", UnboundVariable("x".into()));
}
#[test]
fn wrong_arguments_number() {
assert_eval_err("(set!)", BadArity(Some("set!".into())));
assert_eval_err("(set! x)", BadArity(Some("set!".into())));
assert_eval_err("(set! x 2 3)", BadArity(Some("set!".into())));
}
| overwrites_variables_in_captured_scopes | identifier_name |
eval_assignment.rs | use crate::helpers::{values::*, *};
use ostrov::errors::RuntimeError::*;
#[test]
fn returns_expression() {
assert_eval_val(
"(define x 0)
(set! x (+ x 1))",
unspecified(),
);
}
#[test]
fn overwrites_variables() {
assert_eval(
"(define x 0)
(set! x (+ x 1))
x",
"1",
);
}
#[test]
fn overwrites_variables_on_upper_scopes() {
assert_eval(
"(define x 0)
(define (f)
(set! x (+ x 1)))
(f)
(f)
(f)
x",
"3",
);
}
#[test]
fn overwrites_variables_in_captured_scopes() {
assert_eval(
"(define (gen-counter)
(define counter 0)
(lambda ()
(set! counter (+ counter 1))
counter))
(define count (gen-counter))
(count)
(count)
(count)", | "3",
);
}
#[test]
fn malformed_variable_name() {
assert_eval_err("(set! 3 3)", MalformedExpression);
}
#[test]
fn unknown_variable() {
assert_eval_err("(set! x 3)", UnboundVariable("x".into()));
}
#[test]
fn wrong_arguments_number() {
assert_eval_err("(set!)", BadArity(Some("set!".into())));
assert_eval_err("(set! x)", BadArity(Some("set!".into())));
assert_eval_err("(set! x 2 3)", BadArity(Some("set!".into())));
} | random_line_split |
|
eval_assignment.rs | use crate::helpers::{values::*, *};
use ostrov::errors::RuntimeError::*;
#[test]
fn returns_expression() {
assert_eval_val(
"(define x 0)
(set! x (+ x 1))",
unspecified(),
);
}
#[test]
fn overwrites_variables() {
assert_eval(
"(define x 0)
(set! x (+ x 1))
x",
"1",
);
}
#[test]
fn overwrites_variables_on_upper_scopes() {
assert_eval(
"(define x 0)
(define (f)
(set! x (+ x 1)))
(f)
(f)
(f)
x",
"3",
);
}
#[test]
fn overwrites_variables_in_captured_scopes() {
assert_eval(
"(define (gen-counter)
(define counter 0)
(lambda ()
(set! counter (+ counter 1))
counter))
(define count (gen-counter))
(count)
(count)
(count)",
"3",
);
}
#[test]
fn malformed_variable_name() |
#[test]
fn unknown_variable() {
assert_eval_err("(set! x 3)", UnboundVariable("x".into()));
}
#[test]
fn wrong_arguments_number() {
assert_eval_err("(set!)", BadArity(Some("set!".into())));
assert_eval_err("(set! x)", BadArity(Some("set!".into())));
assert_eval_err("(set! x 2 3)", BadArity(Some("set!".into())));
}
| {
assert_eval_err("(set! 3 3)", MalformedExpression);
} | identifier_body |
production.py | # In production set the environment variable like this:
# DJANGO_SETTINGS_MODULE=my_proj.settings.production
from .base import * # NOQA
import logging.config
# For security and performance reasons, DEBUG is turned off
DEBUG = False
# Must mention ALLOWED_HOSTS in production!
ALLOWED_HOSTS = ['172.16.0.66','cellexpress.cgm.ntu.edu.tw']
# Cache the templates in memory for speed-up
loaders = [
(
'django.template.loaders.cached.Loader',
[
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
),
]
TEMPLATES[0]['OPTIONS'].update({"loaders": loaders})
TEMPLATES[0]['OPTIONS'].update({"debug": False})
TEMPLATES[0]['APP_DIRS'] = False
# Email settings
EMAIL_BACKEND = env.str('EMAIL_BACKEND')
EMAIL_HOST = env.str('EMAIL_HOST')
EMAIL_HOST_USER = env.str('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = env.str('EMAIL_HOST_PASSWORD')
EMAIL_PORT = env.int('EMAIL_PORT')
EMAIL_USE_SSL = env.bool('EMAIL_USE_SSL')
EMAIL_USE_TLS = env.bool('EMAIL_USE_TLS')
DEFAULT_FROM_EMAIL = SERVER_EMAIL = '{name} <{addr}>'.format(
name='BioCloud Dev',
addr='[email protected]',
)
# Securiy related settings
# SECURE_HSTS_SECONDS = 2592000
# SECURE_BROWSER_XSS_FILTER = True
# SECURE_CONTENT_TYPE_NOSNIFF=True
# SESSION_COOKIE_SECURE = True
# CSRF_COOKIE_SECURE = True
# CSRF_COOKIE_HTTPONLY = True
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# X_FRAME_OPTIONS = 'DENY'
# Log everything to the logs directory at the top
LOGFILE_ROOT = join(BASE_DIR, 'logs')
# Reset logging
LOGGING_CONFIG = None
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': (
'[%(asctime)s] %(levelname)s '
'[%(pathname)s:%(lineno)s] %(message)s'
),
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'django_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'django.log'),
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django': {
'handlers': ['django_log_file', ],
'propagate': True,
'level': 'DEBUG',
},
}
}
for app in LOCAL_APPS:
|
logging.config.dictConfig(LOGGING)
| app_handler = '%s_log_file' % app
app_log_filepath = '%s.log' % app
LOGGING['loggers'][app] = {
'handlers': [app_handler, 'console', ],
'level': 'DEBUG',
}
LOGGING['handlers'][app_handler] = {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, app_log_filepath),
'formatter': 'verbose',
} | conditional_block |
production.py | # In production set the environment variable like this:
# DJANGO_SETTINGS_MODULE=my_proj.settings.production
from .base import * # NOQA
import logging.config
# For security and performance reasons, DEBUG is turned off
DEBUG = False
# Must mention ALLOWED_HOSTS in production!
ALLOWED_HOSTS = ['172.16.0.66','cellexpress.cgm.ntu.edu.tw']
# Cache the templates in memory for speed-up
loaders = [
(
'django.template.loaders.cached.Loader',
[
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
),
]
TEMPLATES[0]['OPTIONS'].update({"loaders": loaders})
TEMPLATES[0]['OPTIONS'].update({"debug": False})
TEMPLATES[0]['APP_DIRS'] = False
# Email settings
EMAIL_BACKEND = env.str('EMAIL_BACKEND')
EMAIL_HOST = env.str('EMAIL_HOST')
EMAIL_HOST_USER = env.str('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = env.str('EMAIL_HOST_PASSWORD')
EMAIL_PORT = env.int('EMAIL_PORT')
EMAIL_USE_SSL = env.bool('EMAIL_USE_SSL')
EMAIL_USE_TLS = env.bool('EMAIL_USE_TLS')
DEFAULT_FROM_EMAIL = SERVER_EMAIL = '{name} <{addr}>'.format(
name='BioCloud Dev',
addr='[email protected]',
)
# Securiy related settings
# SECURE_HSTS_SECONDS = 2592000
# SECURE_BROWSER_XSS_FILTER = True
# SECURE_CONTENT_TYPE_NOSNIFF=True
# SESSION_COOKIE_SECURE = True
# CSRF_COOKIE_SECURE = True
# CSRF_COOKIE_HTTPONLY = True | # X_FRAME_OPTIONS = 'DENY'
# Log everything to the logs directory at the top
LOGFILE_ROOT = join(BASE_DIR, 'logs')
# Reset logging
LOGGING_CONFIG = None
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': (
'[%(asctime)s] %(levelname)s '
'[%(pathname)s:%(lineno)s] %(message)s'
),
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'django_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'django.log'),
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django': {
'handlers': ['django_log_file', ],
'propagate': True,
'level': 'DEBUG',
},
}
}
for app in LOCAL_APPS:
app_handler = '%s_log_file' % app
app_log_filepath = '%s.log' % app
LOGGING['loggers'][app] = {
'handlers': [app_handler, 'console', ],
'level': 'DEBUG',
}
LOGGING['handlers'][app_handler] = {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, app_log_filepath),
'formatter': 'verbose',
}
logging.config.dictConfig(LOGGING) | # SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') | random_line_split |
DaqDevDiscovery01.py | """
File: DaqDevDiscovery01.py
Library Call Demonstrated: mcculw.ul.get_daq_device_inventory()
mcculw.ul.create_daq_device()
mcculw.ul.release_daq_device()
Purpose: Discovers DAQ devices and assigns board number to
the detected devices.
Demonstration: Displays the detected DAQ devices and flashes the
LED of the selected device.
Other Library Calls: mcculw.ul.ignore_instacal()
mcculw.ul.flash_led()
"""
from __future__ import absolute_import, division, print_function
from builtins import * # @UnusedWildImport
import tkinter as tk
from tkinter import StringVar
from tkinter.ttk import Combobox # @UnresolvedImport
from mcculw import ul
from mcculw.enums import InterfaceType
from mcculw.ul import ULError
try:
from ui_examples_util import UIExample, show_ul_error
except ImportError:
from .ui_examples_util import UIExample, show_ul_error
class DaqDevDiscovery01(UIExample):
def __init__(self, master):
super(DaqDevDiscovery01, self).__init__(master)
self.board_num = 0
self.device_created = False
# Tell the UL to ignore any boards configured in InstaCal
ul.ignore_instacal()
self.create_widgets()
def discover_devices(self):
self.inventory = ul.get_daq_device_inventory(InterfaceType.ANY)
if len(self.inventory) > 0:
combobox_values = []
for device in self.inventory:
combobox_values.append(str(device))
self.devices_combobox["values"] = combobox_values
self.devices_combobox.current(0)
self.status_label["text"] = (str(len(self.inventory))
+ " DAQ Device(s) Discovered")
self.devices_combobox["state"] = "readonly"
self.flash_led_button["state"] = "normal"
else:
self.devices_combobox["values"] = [""]
self.devices_combobox.current(0)
self.status_label["text"] = "No Devices Discovered"
self.devices_combobox["state"] = "disabled"
self.flash_led_button["state"] = "disabled"
def flash_led(self):
try:
# Flash the device LED
ul.flash_led(self.board_num)
except ULError as e:
show_ul_error(e)
def selected_device_changed(self, *args): # @UnusedVariable
selected_index = self.devices_combobox.current()
inventory_count = len(self.inventory)
if self.device_created:
# Release any previously configured DAQ device from the UL.
ul.release_daq_device(self.board_num)
self.device_created = False
if inventory_count > 0 and selected_index < inventory_count:
|
def create_widgets(self):
'''Create the tkinter UI'''
main_frame = tk.Frame(self)
main_frame.pack(fill=tk.X, anchor=tk.NW)
discover_button = tk.Button(main_frame)
discover_button["text"] = "Discover DAQ Devices"
discover_button["command"] = self.discover_devices
discover_button.pack(padx=3, pady=3)
self.status_label = tk.Label(main_frame)
self.status_label["text"] = "Status"
self.status_label.pack(anchor=tk.NW, padx=3, pady=3)
results_group = tk.LabelFrame(self, text="Discovered Devices")
results_group.pack(fill=tk.X, anchor=tk.NW, padx=3, pady=3)
self.selected_device_textvar = StringVar()
self.selected_device_textvar.trace('w', self.selected_device_changed)
self.devices_combobox = Combobox(
results_group, textvariable=self.selected_device_textvar)
self.devices_combobox["state"] = "disabled"
self.devices_combobox.pack(fill=tk.X, padx=3, pady=3)
device_id_frame = tk.Frame(results_group)
device_id_frame.pack(anchor=tk.NW)
device_id_left_label = tk.Label(device_id_frame)
device_id_left_label["text"] = "Device Identifier:"
device_id_left_label.grid(row=0, column=0, sticky=tk.W, padx=3, pady=3)
self.device_id_label = tk.Label(device_id_frame)
self.device_id_label.grid(row=0, column=1, sticky=tk.W, padx=3, pady=3)
self.flash_led_button = tk.Button(results_group)
self.flash_led_button["text"] = "Flash LED"
self.flash_led_button["command"] = self.flash_led
self.flash_led_button["state"] = "disabled"
self.flash_led_button.pack(padx=3, pady=3)
button_frame = tk.Frame(self)
button_frame.pack(fill=tk.X, side=tk.RIGHT, anchor=tk.SE)
quit_button = tk.Button(button_frame)
quit_button["text"] = "Quit"
quit_button["command"] = self.master.destroy
quit_button.grid(row=0, column=1, padx=3, pady=3)
# Start the example if this module is being run
if __name__ == "__main__":
# Start the example
DaqDevDiscovery01(master=tk.Tk()).mainloop()
| descriptor = self.inventory[selected_index]
# Update the device ID label
self.device_id_label["text"] = descriptor.unique_id
# Create the DAQ device from the descriptor
# For performance reasons, it is not recommended to create
# and release the device every time hardware communication is
# required. Instead, create the device once and do not release
# it until no additional library calls will be made for this
# device
ul.create_daq_device(self.board_num, descriptor)
self.device_created = True | conditional_block |
DaqDevDiscovery01.py | """
File: DaqDevDiscovery01.py
Library Call Demonstrated: mcculw.ul.get_daq_device_inventory()
mcculw.ul.create_daq_device()
mcculw.ul.release_daq_device()
Purpose: Discovers DAQ devices and assigns board number to
the detected devices.
Demonstration: Displays the detected DAQ devices and flashes the
LED of the selected device.
Other Library Calls: mcculw.ul.ignore_instacal()
mcculw.ul.flash_led()
"""
from __future__ import absolute_import, division, print_function
from builtins import * # @UnusedWildImport
import tkinter as tk
from tkinter import StringVar
from tkinter.ttk import Combobox # @UnresolvedImport
from mcculw import ul
from mcculw.enums import InterfaceType
from mcculw.ul import ULError
try:
from ui_examples_util import UIExample, show_ul_error
except ImportError:
from .ui_examples_util import UIExample, show_ul_error
class DaqDevDiscovery01(UIExample):
def __init__(self, master):
super(DaqDevDiscovery01, self).__init__(master)
self.board_num = 0
self.device_created = False
# Tell the UL to ignore any boards configured in InstaCal
ul.ignore_instacal()
self.create_widgets()
def discover_devices(self):
self.inventory = ul.get_daq_device_inventory(InterfaceType.ANY)
if len(self.inventory) > 0:
combobox_values = []
for device in self.inventory:
combobox_values.append(str(device))
self.devices_combobox["values"] = combobox_values
self.devices_combobox.current(0)
self.status_label["text"] = (str(len(self.inventory))
+ " DAQ Device(s) Discovered")
self.devices_combobox["state"] = "readonly"
self.flash_led_button["state"] = "normal"
else:
self.devices_combobox["values"] = [""]
self.devices_combobox.current(0)
self.status_label["text"] = "No Devices Discovered"
self.devices_combobox["state"] = "disabled"
self.flash_led_button["state"] = "disabled"
def flash_led(self):
try:
# Flash the device LED
ul.flash_led(self.board_num)
except ULError as e:
show_ul_error(e)
def selected_device_changed(self, *args): # @UnusedVariable
selected_index = self.devices_combobox.current()
inventory_count = len(self.inventory)
if self.device_created:
# Release any previously configured DAQ device from the UL. |
if inventory_count > 0 and selected_index < inventory_count:
descriptor = self.inventory[selected_index]
# Update the device ID label
self.device_id_label["text"] = descriptor.unique_id
# Create the DAQ device from the descriptor
# For performance reasons, it is not recommended to create
# and release the device every time hardware communication is
# required. Instead, create the device once and do not release
# it until no additional library calls will be made for this
# device
ul.create_daq_device(self.board_num, descriptor)
self.device_created = True
def create_widgets(self):
'''Create the tkinter UI'''
main_frame = tk.Frame(self)
main_frame.pack(fill=tk.X, anchor=tk.NW)
discover_button = tk.Button(main_frame)
discover_button["text"] = "Discover DAQ Devices"
discover_button["command"] = self.discover_devices
discover_button.pack(padx=3, pady=3)
self.status_label = tk.Label(main_frame)
self.status_label["text"] = "Status"
self.status_label.pack(anchor=tk.NW, padx=3, pady=3)
results_group = tk.LabelFrame(self, text="Discovered Devices")
results_group.pack(fill=tk.X, anchor=tk.NW, padx=3, pady=3)
self.selected_device_textvar = StringVar()
self.selected_device_textvar.trace('w', self.selected_device_changed)
self.devices_combobox = Combobox(
results_group, textvariable=self.selected_device_textvar)
self.devices_combobox["state"] = "disabled"
self.devices_combobox.pack(fill=tk.X, padx=3, pady=3)
device_id_frame = tk.Frame(results_group)
device_id_frame.pack(anchor=tk.NW)
device_id_left_label = tk.Label(device_id_frame)
device_id_left_label["text"] = "Device Identifier:"
device_id_left_label.grid(row=0, column=0, sticky=tk.W, padx=3, pady=3)
self.device_id_label = tk.Label(device_id_frame)
self.device_id_label.grid(row=0, column=1, sticky=tk.W, padx=3, pady=3)
self.flash_led_button = tk.Button(results_group)
self.flash_led_button["text"] = "Flash LED"
self.flash_led_button["command"] = self.flash_led
self.flash_led_button["state"] = "disabled"
self.flash_led_button.pack(padx=3, pady=3)
button_frame = tk.Frame(self)
button_frame.pack(fill=tk.X, side=tk.RIGHT, anchor=tk.SE)
quit_button = tk.Button(button_frame)
quit_button["text"] = "Quit"
quit_button["command"] = self.master.destroy
quit_button.grid(row=0, column=1, padx=3, pady=3)
# Start the example if this module is being run
if __name__ == "__main__":
# Start the example
DaqDevDiscovery01(master=tk.Tk()).mainloop() | ul.release_daq_device(self.board_num)
self.device_created = False | random_line_split |
DaqDevDiscovery01.py | """
File: DaqDevDiscovery01.py
Library Call Demonstrated: mcculw.ul.get_daq_device_inventory()
mcculw.ul.create_daq_device()
mcculw.ul.release_daq_device()
Purpose: Discovers DAQ devices and assigns board number to
the detected devices.
Demonstration: Displays the detected DAQ devices and flashes the
LED of the selected device.
Other Library Calls: mcculw.ul.ignore_instacal()
mcculw.ul.flash_led()
"""
from __future__ import absolute_import, division, print_function
from builtins import * # @UnusedWildImport
import tkinter as tk
from tkinter import StringVar
from tkinter.ttk import Combobox # @UnresolvedImport
from mcculw import ul
from mcculw.enums import InterfaceType
from mcculw.ul import ULError
try:
from ui_examples_util import UIExample, show_ul_error
except ImportError:
from .ui_examples_util import UIExample, show_ul_error
class DaqDevDiscovery01(UIExample):
def __init__(self, master):
super(DaqDevDiscovery01, self).__init__(master)
self.board_num = 0
self.device_created = False
# Tell the UL to ignore any boards configured in InstaCal
ul.ignore_instacal()
self.create_widgets()
def discover_devices(self):
self.inventory = ul.get_daq_device_inventory(InterfaceType.ANY)
if len(self.inventory) > 0:
combobox_values = []
for device in self.inventory:
combobox_values.append(str(device))
self.devices_combobox["values"] = combobox_values
self.devices_combobox.current(0)
self.status_label["text"] = (str(len(self.inventory))
+ " DAQ Device(s) Discovered")
self.devices_combobox["state"] = "readonly"
self.flash_led_button["state"] = "normal"
else:
self.devices_combobox["values"] = [""]
self.devices_combobox.current(0)
self.status_label["text"] = "No Devices Discovered"
self.devices_combobox["state"] = "disabled"
self.flash_led_button["state"] = "disabled"
def | (self):
try:
# Flash the device LED
ul.flash_led(self.board_num)
except ULError as e:
show_ul_error(e)
def selected_device_changed(self, *args): # @UnusedVariable
selected_index = self.devices_combobox.current()
inventory_count = len(self.inventory)
if self.device_created:
# Release any previously configured DAQ device from the UL.
ul.release_daq_device(self.board_num)
self.device_created = False
if inventory_count > 0 and selected_index < inventory_count:
descriptor = self.inventory[selected_index]
# Update the device ID label
self.device_id_label["text"] = descriptor.unique_id
# Create the DAQ device from the descriptor
# For performance reasons, it is not recommended to create
# and release the device every time hardware communication is
# required. Instead, create the device once and do not release
# it until no additional library calls will be made for this
# device
ul.create_daq_device(self.board_num, descriptor)
self.device_created = True
def create_widgets(self):
'''Create the tkinter UI'''
main_frame = tk.Frame(self)
main_frame.pack(fill=tk.X, anchor=tk.NW)
discover_button = tk.Button(main_frame)
discover_button["text"] = "Discover DAQ Devices"
discover_button["command"] = self.discover_devices
discover_button.pack(padx=3, pady=3)
self.status_label = tk.Label(main_frame)
self.status_label["text"] = "Status"
self.status_label.pack(anchor=tk.NW, padx=3, pady=3)
results_group = tk.LabelFrame(self, text="Discovered Devices")
results_group.pack(fill=tk.X, anchor=tk.NW, padx=3, pady=3)
self.selected_device_textvar = StringVar()
self.selected_device_textvar.trace('w', self.selected_device_changed)
self.devices_combobox = Combobox(
results_group, textvariable=self.selected_device_textvar)
self.devices_combobox["state"] = "disabled"
self.devices_combobox.pack(fill=tk.X, padx=3, pady=3)
device_id_frame = tk.Frame(results_group)
device_id_frame.pack(anchor=tk.NW)
device_id_left_label = tk.Label(device_id_frame)
device_id_left_label["text"] = "Device Identifier:"
device_id_left_label.grid(row=0, column=0, sticky=tk.W, padx=3, pady=3)
self.device_id_label = tk.Label(device_id_frame)
self.device_id_label.grid(row=0, column=1, sticky=tk.W, padx=3, pady=3)
self.flash_led_button = tk.Button(results_group)
self.flash_led_button["text"] = "Flash LED"
self.flash_led_button["command"] = self.flash_led
self.flash_led_button["state"] = "disabled"
self.flash_led_button.pack(padx=3, pady=3)
button_frame = tk.Frame(self)
button_frame.pack(fill=tk.X, side=tk.RIGHT, anchor=tk.SE)
quit_button = tk.Button(button_frame)
quit_button["text"] = "Quit"
quit_button["command"] = self.master.destroy
quit_button.grid(row=0, column=1, padx=3, pady=3)
# Start the example if this module is being run
if __name__ == "__main__":
# Start the example
DaqDevDiscovery01(master=tk.Tk()).mainloop()
| flash_led | identifier_name |
DaqDevDiscovery01.py | """
File: DaqDevDiscovery01.py
Library Call Demonstrated: mcculw.ul.get_daq_device_inventory()
mcculw.ul.create_daq_device()
mcculw.ul.release_daq_device()
Purpose: Discovers DAQ devices and assigns board number to
the detected devices.
Demonstration: Displays the detected DAQ devices and flashes the
LED of the selected device.
Other Library Calls: mcculw.ul.ignore_instacal()
mcculw.ul.flash_led()
"""
from __future__ import absolute_import, division, print_function
from builtins import * # @UnusedWildImport
import tkinter as tk
from tkinter import StringVar
from tkinter.ttk import Combobox # @UnresolvedImport
from mcculw import ul
from mcculw.enums import InterfaceType
from mcculw.ul import ULError
try:
from ui_examples_util import UIExample, show_ul_error
except ImportError:
from .ui_examples_util import UIExample, show_ul_error
class DaqDevDiscovery01(UIExample):
|
# Start the example if this module is being run
if __name__ == "__main__":
# Start the example
DaqDevDiscovery01(master=tk.Tk()).mainloop()
| def __init__(self, master):
super(DaqDevDiscovery01, self).__init__(master)
self.board_num = 0
self.device_created = False
# Tell the UL to ignore any boards configured in InstaCal
ul.ignore_instacal()
self.create_widgets()
def discover_devices(self):
self.inventory = ul.get_daq_device_inventory(InterfaceType.ANY)
if len(self.inventory) > 0:
combobox_values = []
for device in self.inventory:
combobox_values.append(str(device))
self.devices_combobox["values"] = combobox_values
self.devices_combobox.current(0)
self.status_label["text"] = (str(len(self.inventory))
+ " DAQ Device(s) Discovered")
self.devices_combobox["state"] = "readonly"
self.flash_led_button["state"] = "normal"
else:
self.devices_combobox["values"] = [""]
self.devices_combobox.current(0)
self.status_label["text"] = "No Devices Discovered"
self.devices_combobox["state"] = "disabled"
self.flash_led_button["state"] = "disabled"
def flash_led(self):
try:
# Flash the device LED
ul.flash_led(self.board_num)
except ULError as e:
show_ul_error(e)
def selected_device_changed(self, *args): # @UnusedVariable
selected_index = self.devices_combobox.current()
inventory_count = len(self.inventory)
if self.device_created:
# Release any previously configured DAQ device from the UL.
ul.release_daq_device(self.board_num)
self.device_created = False
if inventory_count > 0 and selected_index < inventory_count:
descriptor = self.inventory[selected_index]
# Update the device ID label
self.device_id_label["text"] = descriptor.unique_id
# Create the DAQ device from the descriptor
# For performance reasons, it is not recommended to create
# and release the device every time hardware communication is
# required. Instead, create the device once and do not release
# it until no additional library calls will be made for this
# device
ul.create_daq_device(self.board_num, descriptor)
self.device_created = True
def create_widgets(self):
'''Create the tkinter UI'''
main_frame = tk.Frame(self)
main_frame.pack(fill=tk.X, anchor=tk.NW)
discover_button = tk.Button(main_frame)
discover_button["text"] = "Discover DAQ Devices"
discover_button["command"] = self.discover_devices
discover_button.pack(padx=3, pady=3)
self.status_label = tk.Label(main_frame)
self.status_label["text"] = "Status"
self.status_label.pack(anchor=tk.NW, padx=3, pady=3)
results_group = tk.LabelFrame(self, text="Discovered Devices")
results_group.pack(fill=tk.X, anchor=tk.NW, padx=3, pady=3)
self.selected_device_textvar = StringVar()
self.selected_device_textvar.trace('w', self.selected_device_changed)
self.devices_combobox = Combobox(
results_group, textvariable=self.selected_device_textvar)
self.devices_combobox["state"] = "disabled"
self.devices_combobox.pack(fill=tk.X, padx=3, pady=3)
device_id_frame = tk.Frame(results_group)
device_id_frame.pack(anchor=tk.NW)
device_id_left_label = tk.Label(device_id_frame)
device_id_left_label["text"] = "Device Identifier:"
device_id_left_label.grid(row=0, column=0, sticky=tk.W, padx=3, pady=3)
self.device_id_label = tk.Label(device_id_frame)
self.device_id_label.grid(row=0, column=1, sticky=tk.W, padx=3, pady=3)
self.flash_led_button = tk.Button(results_group)
self.flash_led_button["text"] = "Flash LED"
self.flash_led_button["command"] = self.flash_led
self.flash_led_button["state"] = "disabled"
self.flash_led_button.pack(padx=3, pady=3)
button_frame = tk.Frame(self)
button_frame.pack(fill=tk.X, side=tk.RIGHT, anchor=tk.SE)
quit_button = tk.Button(button_frame)
quit_button["text"] = "Quit"
quit_button["command"] = self.master.destroy
quit_button.grid(row=0, column=1, padx=3, pady=3) | identifier_body |
urlmap.py | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import urllib2
from oslo_log import log
import paste.urlmap
from manila.api.openstack import wsgi
_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"'
_option_header_piece_re = re.compile(
r';\s*([^\s;=]+|%s)\s*'
r'(?:=\s*([^;]+|%s))?\s*' %
(_quoted_string_re, _quoted_string_re))
LOG = log.getLogger(__name__)
def unquote_header_value(value):
"""Unquotes a header value.
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
return value
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in urllib2.parse_http_list(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
def parse_options_header(value):
"""Parse header into content type and options.
Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('Content-Type: text/html; mimetype=text/html')
('Content-Type:', {'mimetype': 'text/html'})
:param value: the header to parse.
:return: (str, options)
"""
def _tokenize(string):
for match in _option_header_piece_re.finditer(string):
key, value = match.groups()
key = unquote_header_value(key)
if value is not None:
value = unquote_header_value(value)
yield key, value
if not value:
return '', {}
parts = _tokenize(';' + value)
name = next(parts)[0]
extra = dict(parts)
return name, extra
class Accept(object):
def __init__(self, value):
self._content_types = [parse_options_header(v) for v in
parse_list_header(value)]
def best_match(self, supported_content_types):
# FIXME: Should we have a more sophisticated matching algorithm that
# takes into account the version as well?
best_quality = -1
best_content_type = None
best_params = {}
best_match = '*/*'
for content_type in supported_content_types:
for content_mask, params in self._content_types:
try:
quality = float(params.get('q', 1))
except ValueError:
continue
if quality < best_quality:
continue
elif best_quality == quality:
if best_match.count('*') <= content_mask.count('*'):
continue
if self._match_mask(content_mask, content_type):
best_quality = quality
best_content_type = content_type
best_params = params
best_match = content_mask
return best_content_type, best_params
def content_type_params(self, best_content_type):
"""Find parameters in Accept header for given content type."""
for content_type, params in self._content_types:
if best_content_type == content_type:
return params
return {}
def _match_mask(self, mask, content_type):
if '*' not in mask:
return content_type == mask
if mask == '*/*':
return True
mask_major = mask[:-2]
content_type_major = content_type.split('/', 1)[0]
return content_type_major == mask_major
def urlmap_factory(loader, global_conf, **local_conf):
if 'not_found_app' in local_conf:
not_found_app = local_conf.pop('not_found_app')
else:
not_found_app = global_conf.get('not_found_app')
if not_found_app:
not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
urlmap = URLMap(not_found_app=not_found_app)
for path, app_name in local_conf.items():
path = paste.urlmap.parse_path_expression(path)
app = loader.get_app(app_name, global_conf=global_conf)
urlmap[path] = app
return urlmap
class URLMap(paste.urlmap.URLMap):
def _match(self, host, port, path_info):
"""Find longest match for a given URL path."""
for (domain, app_url), app in self.applications:
if domain and domain != host and domain != host + ':' + port:
continue
if (path_info == app_url or path_info.startswith(app_url + '/')):
return app, app_url
return None, None
def | (self, app, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
return app(environ, start_response)
return wrap
def _munge_path(self, app, path_info, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
environ['PATH_INFO'] = path_info[len(app_url):]
return app(environ, start_response)
return wrap
def _path_strategy(self, host, port, path_info):
"""Check path suffix for MIME type and path prefix for API version."""
mime_type = app = app_url = None
parts = path_info.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in wsgi.SUPPORTED_CONTENT_TYPES:
mime_type = possible_type
parts = path_info.split('/')
if len(parts) > 1:
possible_app, possible_app_url = self._match(host, port, path_info)
# Don't use prefix if it ends up matching default
if possible_app and possible_app_url:
app_url = possible_app_url
app = self._munge_path(possible_app, path_info, app_url)
return mime_type, app, app_url
def _content_type_strategy(self, host, port, environ):
"""Check Content-Type header for API version."""
app = None
params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1]
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return app
def _accept_strategy(self, host, port, environ, supported_content_types):
"""Check Accept header for best matching MIME type and API version."""
accept = Accept(environ.get('HTTP_ACCEPT', ''))
app = None
# Find the best match in the Accept header
mime_type, params = accept.best_match(supported_content_types)
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return mime_type, app
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
if ':' in host:
host, port = host.split(':', 1)
else:
if environ['wsgi.url_scheme'] == 'http':
port = '80'
else:
port = '443'
path_info = environ['PATH_INFO']
path_info = self.normalize_url(path_info, False)[1]
# The API version is determined in one of three ways:
# 1) URL path prefix (eg /v1.1/tenant/servers/detail)
# 2) Content-Type header (eg application/json;version=1.1)
# 3) Accept header (eg application/json;q=0.8;version=1.1)
# Manila supports only application/json as MIME type for the responses.
supported_content_types = list(wsgi.SUPPORTED_CONTENT_TYPES)
mime_type, app, app_url = self._path_strategy(host, port, path_info)
if not app:
app = self._content_type_strategy(host, port, environ)
if not mime_type or not app:
possible_mime_type, possible_app = self._accept_strategy(
host, port, environ, supported_content_types)
if possible_mime_type and not mime_type:
mime_type = possible_mime_type
if possible_app and not app:
app = possible_app
if not mime_type:
mime_type = 'application/json'
if not app:
# Didn't match a particular version, probably matches default
app, app_url = self._match(host, port, path_info)
if app:
app = self._munge_path(app, path_info, app_url)
if app:
environ['manila.best_content_type'] = mime_type
return app(environ, start_response)
environ['paste.urlmap_object'] = self
return self.not_found_application(environ, start_response)
| _set_script_name | identifier_name |
urlmap.py | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import urllib2
from oslo_log import log
import paste.urlmap
from manila.api.openstack import wsgi
_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"'
_option_header_piece_re = re.compile(
r';\s*([^\s;=]+|%s)\s*'
r'(?:=\s*([^;]+|%s))?\s*' %
(_quoted_string_re, _quoted_string_re))
LOG = log.getLogger(__name__)
def unquote_header_value(value):
"""Unquotes a header value.
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
return value
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in urllib2.parse_http_list(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
| """Parse header into content type and options.
Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('Content-Type: text/html; mimetype=text/html')
('Content-Type:', {'mimetype': 'text/html'})
:param value: the header to parse.
:return: (str, options)
"""
def _tokenize(string):
for match in _option_header_piece_re.finditer(string):
key, value = match.groups()
key = unquote_header_value(key)
if value is not None:
value = unquote_header_value(value)
yield key, value
if not value:
return '', {}
parts = _tokenize(';' + value)
name = next(parts)[0]
extra = dict(parts)
return name, extra
class Accept(object):
def __init__(self, value):
self._content_types = [parse_options_header(v) for v in
parse_list_header(value)]
def best_match(self, supported_content_types):
# FIXME: Should we have a more sophisticated matching algorithm that
# takes into account the version as well?
best_quality = -1
best_content_type = None
best_params = {}
best_match = '*/*'
for content_type in supported_content_types:
for content_mask, params in self._content_types:
try:
quality = float(params.get('q', 1))
except ValueError:
continue
if quality < best_quality:
continue
elif best_quality == quality:
if best_match.count('*') <= content_mask.count('*'):
continue
if self._match_mask(content_mask, content_type):
best_quality = quality
best_content_type = content_type
best_params = params
best_match = content_mask
return best_content_type, best_params
def content_type_params(self, best_content_type):
"""Find parameters in Accept header for given content type."""
for content_type, params in self._content_types:
if best_content_type == content_type:
return params
return {}
def _match_mask(self, mask, content_type):
if '*' not in mask:
return content_type == mask
if mask == '*/*':
return True
mask_major = mask[:-2]
content_type_major = content_type.split('/', 1)[0]
return content_type_major == mask_major
def urlmap_factory(loader, global_conf, **local_conf):
if 'not_found_app' in local_conf:
not_found_app = local_conf.pop('not_found_app')
else:
not_found_app = global_conf.get('not_found_app')
if not_found_app:
not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
urlmap = URLMap(not_found_app=not_found_app)
for path, app_name in local_conf.items():
path = paste.urlmap.parse_path_expression(path)
app = loader.get_app(app_name, global_conf=global_conf)
urlmap[path] = app
return urlmap
class URLMap(paste.urlmap.URLMap):
def _match(self, host, port, path_info):
"""Find longest match for a given URL path."""
for (domain, app_url), app in self.applications:
if domain and domain != host and domain != host + ':' + port:
continue
if (path_info == app_url or path_info.startswith(app_url + '/')):
return app, app_url
return None, None
def _set_script_name(self, app, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
return app(environ, start_response)
return wrap
def _munge_path(self, app, path_info, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
environ['PATH_INFO'] = path_info[len(app_url):]
return app(environ, start_response)
return wrap
def _path_strategy(self, host, port, path_info):
"""Check path suffix for MIME type and path prefix for API version."""
mime_type = app = app_url = None
parts = path_info.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in wsgi.SUPPORTED_CONTENT_TYPES:
mime_type = possible_type
parts = path_info.split('/')
if len(parts) > 1:
possible_app, possible_app_url = self._match(host, port, path_info)
# Don't use prefix if it ends up matching default
if possible_app and possible_app_url:
app_url = possible_app_url
app = self._munge_path(possible_app, path_info, app_url)
return mime_type, app, app_url
def _content_type_strategy(self, host, port, environ):
"""Check Content-Type header for API version."""
app = None
params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1]
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return app
def _accept_strategy(self, host, port, environ, supported_content_types):
"""Check Accept header for best matching MIME type and API version."""
accept = Accept(environ.get('HTTP_ACCEPT', ''))
app = None
# Find the best match in the Accept header
mime_type, params = accept.best_match(supported_content_types)
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return mime_type, app
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
if ':' in host:
host, port = host.split(':', 1)
else:
if environ['wsgi.url_scheme'] == 'http':
port = '80'
else:
port = '443'
path_info = environ['PATH_INFO']
path_info = self.normalize_url(path_info, False)[1]
# The API version is determined in one of three ways:
# 1) URL path prefix (eg /v1.1/tenant/servers/detail)
# 2) Content-Type header (eg application/json;version=1.1)
# 3) Accept header (eg application/json;q=0.8;version=1.1)
# Manila supports only application/json as MIME type for the responses.
supported_content_types = list(wsgi.SUPPORTED_CONTENT_TYPES)
mime_type, app, app_url = self._path_strategy(host, port, path_info)
if not app:
app = self._content_type_strategy(host, port, environ)
if not mime_type or not app:
possible_mime_type, possible_app = self._accept_strategy(
host, port, environ, supported_content_types)
if possible_mime_type and not mime_type:
mime_type = possible_mime_type
if possible_app and not app:
app = possible_app
if not mime_type:
mime_type = 'application/json'
if not app:
# Didn't match a particular version, probably matches default
app, app_url = self._match(host, port, path_info)
if app:
app = self._munge_path(app, path_info, app_url)
if app:
environ['manila.best_content_type'] = mime_type
return app(environ, start_response)
environ['paste.urlmap_object'] = self
return self.not_found_application(environ, start_response) | def parse_options_header(value): | random_line_split |
urlmap.py | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import urllib2
from oslo_log import log
import paste.urlmap
from manila.api.openstack import wsgi
_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"'
_option_header_piece_re = re.compile(
r';\s*([^\s;=]+|%s)\s*'
r'(?:=\s*([^;]+|%s))?\s*' %
(_quoted_string_re, _quoted_string_re))
LOG = log.getLogger(__name__)
def unquote_header_value(value):
"""Unquotes a header value.
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
return value
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in urllib2.parse_http_list(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
def parse_options_header(value):
"""Parse header into content type and options.
Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('Content-Type: text/html; mimetype=text/html')
('Content-Type:', {'mimetype': 'text/html'})
:param value: the header to parse.
:return: (str, options)
"""
def _tokenize(string):
for match in _option_header_piece_re.finditer(string):
key, value = match.groups()
key = unquote_header_value(key)
if value is not None:
value = unquote_header_value(value)
yield key, value
if not value:
|
parts = _tokenize(';' + value)
name = next(parts)[0]
extra = dict(parts)
return name, extra
class Accept(object):
def __init__(self, value):
self._content_types = [parse_options_header(v) for v in
parse_list_header(value)]
def best_match(self, supported_content_types):
# FIXME: Should we have a more sophisticated matching algorithm that
# takes into account the version as well?
best_quality = -1
best_content_type = None
best_params = {}
best_match = '*/*'
for content_type in supported_content_types:
for content_mask, params in self._content_types:
try:
quality = float(params.get('q', 1))
except ValueError:
continue
if quality < best_quality:
continue
elif best_quality == quality:
if best_match.count('*') <= content_mask.count('*'):
continue
if self._match_mask(content_mask, content_type):
best_quality = quality
best_content_type = content_type
best_params = params
best_match = content_mask
return best_content_type, best_params
def content_type_params(self, best_content_type):
"""Find parameters in Accept header for given content type."""
for content_type, params in self._content_types:
if best_content_type == content_type:
return params
return {}
def _match_mask(self, mask, content_type):
if '*' not in mask:
return content_type == mask
if mask == '*/*':
return True
mask_major = mask[:-2]
content_type_major = content_type.split('/', 1)[0]
return content_type_major == mask_major
def urlmap_factory(loader, global_conf, **local_conf):
if 'not_found_app' in local_conf:
not_found_app = local_conf.pop('not_found_app')
else:
not_found_app = global_conf.get('not_found_app')
if not_found_app:
not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
urlmap = URLMap(not_found_app=not_found_app)
for path, app_name in local_conf.items():
path = paste.urlmap.parse_path_expression(path)
app = loader.get_app(app_name, global_conf=global_conf)
urlmap[path] = app
return urlmap
class URLMap(paste.urlmap.URLMap):
def _match(self, host, port, path_info):
"""Find longest match for a given URL path."""
for (domain, app_url), app in self.applications:
if domain and domain != host and domain != host + ':' + port:
continue
if (path_info == app_url or path_info.startswith(app_url + '/')):
return app, app_url
return None, None
def _set_script_name(self, app, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
return app(environ, start_response)
return wrap
def _munge_path(self, app, path_info, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
environ['PATH_INFO'] = path_info[len(app_url):]
return app(environ, start_response)
return wrap
def _path_strategy(self, host, port, path_info):
"""Check path suffix for MIME type and path prefix for API version."""
mime_type = app = app_url = None
parts = path_info.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in wsgi.SUPPORTED_CONTENT_TYPES:
mime_type = possible_type
parts = path_info.split('/')
if len(parts) > 1:
possible_app, possible_app_url = self._match(host, port, path_info)
# Don't use prefix if it ends up matching default
if possible_app and possible_app_url:
app_url = possible_app_url
app = self._munge_path(possible_app, path_info, app_url)
return mime_type, app, app_url
def _content_type_strategy(self, host, port, environ):
"""Check Content-Type header for API version."""
app = None
params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1]
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return app
def _accept_strategy(self, host, port, environ, supported_content_types):
"""Check Accept header for best matching MIME type and API version."""
accept = Accept(environ.get('HTTP_ACCEPT', ''))
app = None
# Find the best match in the Accept header
mime_type, params = accept.best_match(supported_content_types)
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return mime_type, app
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
if ':' in host:
host, port = host.split(':', 1)
else:
if environ['wsgi.url_scheme'] == 'http':
port = '80'
else:
port = '443'
path_info = environ['PATH_INFO']
path_info = self.normalize_url(path_info, False)[1]
# The API version is determined in one of three ways:
# 1) URL path prefix (eg /v1.1/tenant/servers/detail)
# 2) Content-Type header (eg application/json;version=1.1)
# 3) Accept header (eg application/json;q=0.8;version=1.1)
# Manila supports only application/json as MIME type for the responses.
supported_content_types = list(wsgi.SUPPORTED_CONTENT_TYPES)
mime_type, app, app_url = self._path_strategy(host, port, path_info)
if not app:
app = self._content_type_strategy(host, port, environ)
if not mime_type or not app:
possible_mime_type, possible_app = self._accept_strategy(
host, port, environ, supported_content_types)
if possible_mime_type and not mime_type:
mime_type = possible_mime_type
if possible_app and not app:
app = possible_app
if not mime_type:
mime_type = 'application/json'
if not app:
# Didn't match a particular version, probably matches default
app, app_url = self._match(host, port, path_info)
if app:
app = self._munge_path(app, path_info, app_url)
if app:
environ['manila.best_content_type'] = mime_type
return app(environ, start_response)
environ['paste.urlmap_object'] = self
return self.not_found_application(environ, start_response)
| return '', {} | conditional_block |
urlmap.py | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import urllib2
from oslo_log import log
import paste.urlmap
from manila.api.openstack import wsgi
_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"'
_option_header_piece_re = re.compile(
r';\s*([^\s;=]+|%s)\s*'
r'(?:=\s*([^;]+|%s))?\s*' %
(_quoted_string_re, _quoted_string_re))
LOG = log.getLogger(__name__)
def unquote_header_value(value):
"""Unquotes a header value.
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
return value
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in urllib2.parse_http_list(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
def parse_options_header(value):
"""Parse header into content type and options.
Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('Content-Type: text/html; mimetype=text/html')
('Content-Type:', {'mimetype': 'text/html'})
:param value: the header to parse.
:return: (str, options)
"""
def _tokenize(string):
for match in _option_header_piece_re.finditer(string):
key, value = match.groups()
key = unquote_header_value(key)
if value is not None:
value = unquote_header_value(value)
yield key, value
if not value:
return '', {}
parts = _tokenize(';' + value)
name = next(parts)[0]
extra = dict(parts)
return name, extra
class Accept(object):
def __init__(self, value):
self._content_types = [parse_options_header(v) for v in
parse_list_header(value)]
def best_match(self, supported_content_types):
# FIXME: Should we have a more sophisticated matching algorithm that
# takes into account the version as well?
best_quality = -1
best_content_type = None
best_params = {}
best_match = '*/*'
for content_type in supported_content_types:
for content_mask, params in self._content_types:
try:
quality = float(params.get('q', 1))
except ValueError:
continue
if quality < best_quality:
continue
elif best_quality == quality:
if best_match.count('*') <= content_mask.count('*'):
continue
if self._match_mask(content_mask, content_type):
best_quality = quality
best_content_type = content_type
best_params = params
best_match = content_mask
return best_content_type, best_params
def content_type_params(self, best_content_type):
"""Find parameters in Accept header for given content type."""
for content_type, params in self._content_types:
if best_content_type == content_type:
return params
return {}
def _match_mask(self, mask, content_type):
if '*' not in mask:
return content_type == mask
if mask == '*/*':
return True
mask_major = mask[:-2]
content_type_major = content_type.split('/', 1)[0]
return content_type_major == mask_major
def urlmap_factory(loader, global_conf, **local_conf):
if 'not_found_app' in local_conf:
not_found_app = local_conf.pop('not_found_app')
else:
not_found_app = global_conf.get('not_found_app')
if not_found_app:
not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
urlmap = URLMap(not_found_app=not_found_app)
for path, app_name in local_conf.items():
path = paste.urlmap.parse_path_expression(path)
app = loader.get_app(app_name, global_conf=global_conf)
urlmap[path] = app
return urlmap
class URLMap(paste.urlmap.URLMap):
def _match(self, host, port, path_info):
"""Find longest match for a given URL path."""
for (domain, app_url), app in self.applications:
if domain and domain != host and domain != host + ':' + port:
continue
if (path_info == app_url or path_info.startswith(app_url + '/')):
return app, app_url
return None, None
def _set_script_name(self, app, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
return app(environ, start_response)
return wrap
def _munge_path(self, app, path_info, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
environ['PATH_INFO'] = path_info[len(app_url):]
return app(environ, start_response)
return wrap
def _path_strategy(self, host, port, path_info):
|
def _content_type_strategy(self, host, port, environ):
"""Check Content-Type header for API version."""
app = None
params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1]
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return app
def _accept_strategy(self, host, port, environ, supported_content_types):
"""Check Accept header for best matching MIME type and API version."""
accept = Accept(environ.get('HTTP_ACCEPT', ''))
app = None
# Find the best match in the Accept header
mime_type, params = accept.best_match(supported_content_types)
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return mime_type, app
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
if ':' in host:
host, port = host.split(':', 1)
else:
if environ['wsgi.url_scheme'] == 'http':
port = '80'
else:
port = '443'
path_info = environ['PATH_INFO']
path_info = self.normalize_url(path_info, False)[1]
# The API version is determined in one of three ways:
# 1) URL path prefix (eg /v1.1/tenant/servers/detail)
# 2) Content-Type header (eg application/json;version=1.1)
# 3) Accept header (eg application/json;q=0.8;version=1.1)
# Manila supports only application/json as MIME type for the responses.
supported_content_types = list(wsgi.SUPPORTED_CONTENT_TYPES)
mime_type, app, app_url = self._path_strategy(host, port, path_info)
if not app:
app = self._content_type_strategy(host, port, environ)
if not mime_type or not app:
possible_mime_type, possible_app = self._accept_strategy(
host, port, environ, supported_content_types)
if possible_mime_type and not mime_type:
mime_type = possible_mime_type
if possible_app and not app:
app = possible_app
if not mime_type:
mime_type = 'application/json'
if not app:
# Didn't match a particular version, probably matches default
app, app_url = self._match(host, port, path_info)
if app:
app = self._munge_path(app, path_info, app_url)
if app:
environ['manila.best_content_type'] = mime_type
return app(environ, start_response)
environ['paste.urlmap_object'] = self
return self.not_found_application(environ, start_response)
| """Check path suffix for MIME type and path prefix for API version."""
mime_type = app = app_url = None
parts = path_info.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in wsgi.SUPPORTED_CONTENT_TYPES:
mime_type = possible_type
parts = path_info.split('/')
if len(parts) > 1:
possible_app, possible_app_url = self._match(host, port, path_info)
# Don't use prefix if it ends up matching default
if possible_app and possible_app_url:
app_url = possible_app_url
app = self._munge_path(possible_app, path_info, app_url)
return mime_type, app, app_url | identifier_body |
run-ci-e2e-tests.js | /**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
'use strict';
/**
* This script tests that React Native end to end installation/bootstrap works for different platforms
* Available arguments:
* --ios - 'react-native init' and check iOS app doesn't redbox
* --tvos - 'react-native init' and check tvOS app doesn't redbox
* --android - 'react-native init' and check Android app doesn't redbox
* --js - 'react-native init' and only check the packager returns a bundle
* --skip-cli-install - to skip react-native-cli global installation (for local debugging)
* --retries [num] - how many times to retry possible flaky commands: yarn add and running tests, default 1
*/
/*eslint-disable no-undef */
require('shelljs/global');
const spawn = require('child_process').spawn;
const argv = require('yargs').argv;
const path = require('path');
const SCRIPTS = __dirname;
const ROOT = path.normalize(path.join(__dirname, '..'));
const tryExecNTimes = require('./try-n-times');
const TEMP = exec('mktemp -d /tmp/react-native-XXXXXXXX').stdout.trim();
// To make sure we actually installed the local version
// of react-native, we will create a temp file inside the template
// and check that it exists after `react-native init
const MARKER_IOS = exec(`mktemp ${ROOT}/local-cli/templates/HelloWorld/ios/HelloWorld/XXXXXXXX`).stdout.trim();
const MARKER_ANDROID = exec(`mktemp ${ROOT}/local-cli/templates/HelloWorld/android/XXXXXXXX`).stdout.trim();
const numberOfRetries = argv.retries || 1;
let SERVER_PID;
let APPIUM_PID;
let exitCode;
try {
// install CLI
cd('react-native-cli');
exec('yarn pack');
const CLI_PACKAGE = path.join(ROOT, 'react-native-cli', 'react-native-cli-*.tgz');
cd('..');
if (!argv['skip-cli-install']) {
if (exec(`sudo yarn global add ${CLI_PACKAGE}`).code) {
echo('Could not install react-native-cli globally.');
echo('Run with --skip-cli-install to skip this step');
exitCode = 1;
throw Error(exitCode);
}
}
if (argv.android) {
if (exec('./gradlew :ReactAndroid:installArchives -Pjobs=1 -Dorg.gradle.jvmargs="-Xmx512m -XX:+HeapDumpOnOutOfMemoryError"').code) {
echo('Failed to compile Android binaries');
exitCode = 1;
throw Error(exitCode);
}
}
if (exec('yarn pack').code) {
echo('Failed to pack react-native');
exitCode = 1;
throw Error(exitCode);
}
const PACKAGE = path.join(ROOT, 'react-native-*.tgz');
cd(TEMP);
if (tryExecNTimes(
() => {
exec('sleep 10s');
return exec(`react-native init EndToEndTest --version ${PACKAGE}`).code;
},
numberOfRetries,
() => rm('-rf', 'EndToEndTest'))) {
echo('Failed to execute react-native init');
echo('Most common reason is npm registry connectivity, try again');
exitCode = 1;
throw Error(exitCode);
}
cd('EndToEndTest');
if (argv.android) {
echo('Running an Android e2e test');
echo('Installing e2e framework');
if (tryExecNTimes(
() => exec('yarn add --dev [email protected] [email protected] [email protected] [email protected] [email protected]', { silent: true }).code,
numberOfRetries)) |
cp(`${SCRIPTS}/android-e2e-test.js`, 'android-e2e-test.js');
cd('android');
echo('Downloading Maven deps');
exec('./gradlew :app:copyDownloadableDepsToLibs');
// Make sure we installed local version of react-native
if (!test('-e', path.basename(MARKER_ANDROID))) {
echo('Android marker was not found, react native init command failed?');
exitCode = 1;
throw Error(exitCode);
}
cd('..');
exec('keytool -genkey -v -keystore android/keystores/debug.keystore -storepass android -alias androiddebugkey -keypass android -dname "CN=Android Debug,O=Android,C=US"');
echo(`Starting appium server, ${APPIUM_PID}`);
const appiumProcess = spawn('node', ['./node_modules/.bin/appium']);
APPIUM_PID = appiumProcess.pid;
echo('Building the app');
if (exec('buck build android/app').code) {
echo('could not execute Buck build, is it installed and in PATH?');
exitCode = 1;
throw Error(exitCode);
}
echo(`Starting packager server, ${SERVER_PID}`);
// shelljs exec('', {async: true}) does not emit stdout events, so we rely on good old spawn
const packagerProcess = spawn('yarn', ['start', '--max-workers 1'], {
env: process.env
});
SERVER_PID = packagerProcess.pid;
// wait a bit to allow packager to startup
exec('sleep 15s');
echo('Executing android e2e test');
if (tryExecNTimes(
() => {
exec('sleep 10s');
return exec('node node_modules/.bin/_mocha android-e2e-test.js').code;
},
numberOfRetries)) {
echo('Failed to run Android e2e tests');
echo('Most likely the code is broken');
exitCode = 1;
throw Error(exitCode);
}
}
if (argv.ios || argv.tvos) {
var iosTestType = (argv.tvos ? 'tvOS' : 'iOS');
echo('Running the ' + iosTestType + 'app');
cd('ios');
// Make sure we installed local version of react-native
if (!test('-e', path.join('EndToEndTest', path.basename(MARKER_IOS)))) {
echo('iOS marker was not found, `react-native init` command failed?');
exitCode = 1;
throw Error(exitCode);
}
// shelljs exec('', {async: true}) does not emit stdout events, so we rely on good old spawn
const packagerEnv = Object.create(process.env);
packagerEnv.REACT_NATIVE_MAX_WORKERS = 1;
const packagerProcess = spawn('yarn', ['start', '--nonPersistent'],
{
stdio: 'inherit',
env: packagerEnv
});
SERVER_PID = packagerProcess.pid;
exec('sleep 15s');
// prepare cache to reduce chances of possible red screen "Can't fibd variable __fbBatchedBridge..."
exec('response=$(curl --write-out %{http_code} --silent --output /dev/null localhost:8081/index.bundle?platform=ios&dev=true)');
echo(`Starting packager server, ${SERVER_PID}`);
echo('Executing ' + iosTestType + ' e2e test');
if (tryExecNTimes(
() => {
exec('sleep 10s');
if (argv.tvos) {
return exec('xcodebuild -destination "platform=tvOS Simulator,name=Apple TV 1080p,OS=10.0" -scheme EndToEndTest-tvOS -sdk appletvsimulator test | xcpretty && exit ${PIPESTATUS[0]}').code;
} else {
return exec('xcodebuild -destination "platform=iOS Simulator,name=iPhone 5s,OS=10.3.1" -scheme EndToEndTest -sdk iphonesimulator test | xcpretty && exit ${PIPESTATUS[0]}').code;
}
},
numberOfRetries)) {
echo('Failed to run ' + iosTestType + ' e2e tests');
echo('Most likely the code is broken');
exitCode = 1;
throw Error(exitCode);
}
cd('..');
}
if (argv.js) {
// Check the packager produces a bundle (doesn't throw an error)
if (exec('react-native bundle --max-workers 1 --platform android --dev true --entry-file index.js --bundle-output android-bundle.js').code) {
echo('Could not build Android bundle');
exitCode = 1;
throw Error(exitCode);
}
if (exec('react-native --max-workers 1 bundle --platform ios --dev true --entry-file index.js --bundle-output ios-bundle.js').code) {
echo('Could not build iOS bundle');
exitCode = 1;
throw Error(exitCode);
}
if (exec(`${ROOT}/node_modules/.bin/flow check`).code) {
echo('Flow check does not pass');
exitCode = 1;
throw Error(exitCode);
}
if (exec('yarn test').code) {
echo('Jest test failure');
exitCode = 1;
throw Error(exitCode);
}
}
exitCode = 0;
} finally {
cd(ROOT);
rm(MARKER_IOS);
rm(MARKER_ANDROID);
if (SERVER_PID) {
echo(`Killing packager ${SERVER_PID}`);
exec(`kill -9 ${SERVER_PID}`);
// this is quite drastic but packager starts a daemon that we can't kill by killing the parent process
// it will be fixed in April (quote David Aurelio), so until then we will kill the zombie by the port number
exec("lsof -i tcp:8081 | awk 'NR!=1 {print $2}' | xargs kill");
}
if (APPIUM_PID) {
echo(`Killing appium ${APPIUM_PID}`);
exec(`kill -9 ${APPIUM_PID}`);
}
}
exit(exitCode);
/*eslint-enable no-undef */
| {
echo('Failed to install appium');
echo('Most common reason is npm registry connectivity, try again');
exitCode = 1;
throw Error(exitCode);
} | conditional_block |
run-ci-e2e-tests.js | /**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
'use strict';
/**
* This script tests that React Native end to end installation/bootstrap works for different platforms
* Available arguments:
* --ios - 'react-native init' and check iOS app doesn't redbox
* --tvos - 'react-native init' and check tvOS app doesn't redbox
* --android - 'react-native init' and check Android app doesn't redbox
* --js - 'react-native init' and only check the packager returns a bundle
* --skip-cli-install - to skip react-native-cli global installation (for local debugging)
* --retries [num] - how many times to retry possible flaky commands: yarn add and running tests, default 1
*/
/*eslint-disable no-undef */
require('shelljs/global');
const spawn = require('child_process').spawn;
const argv = require('yargs').argv;
const path = require('path');
const SCRIPTS = __dirname;
const ROOT = path.normalize(path.join(__dirname, '..'));
const tryExecNTimes = require('./try-n-times');
const TEMP = exec('mktemp -d /tmp/react-native-XXXXXXXX').stdout.trim();
// To make sure we actually installed the local version
// of react-native, we will create a temp file inside the template
// and check that it exists after `react-native init
const MARKER_IOS = exec(`mktemp ${ROOT}/local-cli/templates/HelloWorld/ios/HelloWorld/XXXXXXXX`).stdout.trim();
const MARKER_ANDROID = exec(`mktemp ${ROOT}/local-cli/templates/HelloWorld/android/XXXXXXXX`).stdout.trim();
const numberOfRetries = argv.retries || 1;
let SERVER_PID;
let APPIUM_PID;
let exitCode;
try {
// install CLI
cd('react-native-cli');
exec('yarn pack');
const CLI_PACKAGE = path.join(ROOT, 'react-native-cli', 'react-native-cli-*.tgz');
cd('..');
if (!argv['skip-cli-install']) {
if (exec(`sudo yarn global add ${CLI_PACKAGE}`).code) {
echo('Could not install react-native-cli globally.');
echo('Run with --skip-cli-install to skip this step');
exitCode = 1;
throw Error(exitCode);
}
}
if (argv.android) {
if (exec('./gradlew :ReactAndroid:installArchives -Pjobs=1 -Dorg.gradle.jvmargs="-Xmx512m -XX:+HeapDumpOnOutOfMemoryError"').code) {
echo('Failed to compile Android binaries');
exitCode = 1;
throw Error(exitCode);
}
}
if (exec('yarn pack').code) {
echo('Failed to pack react-native');
exitCode = 1;
throw Error(exitCode);
}
const PACKAGE = path.join(ROOT, 'react-native-*.tgz');
cd(TEMP);
if (tryExecNTimes(
() => {
exec('sleep 10s');
return exec(`react-native init EndToEndTest --version ${PACKAGE}`).code;
},
numberOfRetries,
() => rm('-rf', 'EndToEndTest'))) {
echo('Failed to execute react-native init');
echo('Most common reason is npm registry connectivity, try again');
exitCode = 1;
throw Error(exitCode);
}
cd('EndToEndTest');
if (argv.android) {
echo('Running an Android e2e test');
echo('Installing e2e framework');
if (tryExecNTimes(
() => exec('yarn add --dev [email protected] [email protected] [email protected] [email protected] [email protected]', { silent: true }).code,
numberOfRetries)) {
echo('Failed to install appium');
echo('Most common reason is npm registry connectivity, try again');
exitCode = 1;
throw Error(exitCode);
}
cp(`${SCRIPTS}/android-e2e-test.js`, 'android-e2e-test.js');
cd('android');
echo('Downloading Maven deps');
exec('./gradlew :app:copyDownloadableDepsToLibs');
// Make sure we installed local version of react-native
if (!test('-e', path.basename(MARKER_ANDROID))) {
echo('Android marker was not found, react native init command failed?');
exitCode = 1;
throw Error(exitCode);
}
cd('..');
exec('keytool -genkey -v -keystore android/keystores/debug.keystore -storepass android -alias androiddebugkey -keypass android -dname "CN=Android Debug,O=Android,C=US"');
echo(`Starting appium server, ${APPIUM_PID}`);
const appiumProcess = spawn('node', ['./node_modules/.bin/appium']);
APPIUM_PID = appiumProcess.pid;
echo('Building the app');
if (exec('buck build android/app').code) {
echo('could not execute Buck build, is it installed and in PATH?');
exitCode = 1;
throw Error(exitCode);
}
echo(`Starting packager server, ${SERVER_PID}`);
// shelljs exec('', {async: true}) does not emit stdout events, so we rely on good old spawn
const packagerProcess = spawn('yarn', ['start', '--max-workers 1'], {
env: process.env
});
SERVER_PID = packagerProcess.pid;
// wait a bit to allow packager to startup
exec('sleep 15s');
echo('Executing android e2e test');
if (tryExecNTimes(
() => {
exec('sleep 10s');
return exec('node node_modules/.bin/_mocha android-e2e-test.js').code;
},
numberOfRetries)) {
echo('Failed to run Android e2e tests');
echo('Most likely the code is broken');
exitCode = 1;
throw Error(exitCode);
}
}
if (argv.ios || argv.tvos) {
var iosTestType = (argv.tvos ? 'tvOS' : 'iOS');
echo('Running the ' + iosTestType + 'app');
cd('ios');
// Make sure we installed local version of react-native
if (!test('-e', path.join('EndToEndTest', path.basename(MARKER_IOS)))) {
echo('iOS marker was not found, `react-native init` command failed?');
exitCode = 1;
throw Error(exitCode);
}
// shelljs exec('', {async: true}) does not emit stdout events, so we rely on good old spawn
const packagerEnv = Object.create(process.env);
packagerEnv.REACT_NATIVE_MAX_WORKERS = 1;
const packagerProcess = spawn('yarn', ['start', '--nonPersistent'],
{
stdio: 'inherit',
env: packagerEnv
});
SERVER_PID = packagerProcess.pid;
exec('sleep 15s');
// prepare cache to reduce chances of possible red screen "Can't fibd variable __fbBatchedBridge..."
exec('response=$(curl --write-out %{http_code} --silent --output /dev/null localhost:8081/index.bundle?platform=ios&dev=true)');
echo(`Starting packager server, ${SERVER_PID}`); | if (tryExecNTimes(
() => {
exec('sleep 10s');
if (argv.tvos) {
return exec('xcodebuild -destination "platform=tvOS Simulator,name=Apple TV 1080p,OS=10.0" -scheme EndToEndTest-tvOS -sdk appletvsimulator test | xcpretty && exit ${PIPESTATUS[0]}').code;
} else {
return exec('xcodebuild -destination "platform=iOS Simulator,name=iPhone 5s,OS=10.3.1" -scheme EndToEndTest -sdk iphonesimulator test | xcpretty && exit ${PIPESTATUS[0]}').code;
}
},
numberOfRetries)) {
echo('Failed to run ' + iosTestType + ' e2e tests');
echo('Most likely the code is broken');
exitCode = 1;
throw Error(exitCode);
}
cd('..');
}
if (argv.js) {
// Check the packager produces a bundle (doesn't throw an error)
if (exec('react-native bundle --max-workers 1 --platform android --dev true --entry-file index.js --bundle-output android-bundle.js').code) {
echo('Could not build Android bundle');
exitCode = 1;
throw Error(exitCode);
}
if (exec('react-native --max-workers 1 bundle --platform ios --dev true --entry-file index.js --bundle-output ios-bundle.js').code) {
echo('Could not build iOS bundle');
exitCode = 1;
throw Error(exitCode);
}
if (exec(`${ROOT}/node_modules/.bin/flow check`).code) {
echo('Flow check does not pass');
exitCode = 1;
throw Error(exitCode);
}
if (exec('yarn test').code) {
echo('Jest test failure');
exitCode = 1;
throw Error(exitCode);
}
}
exitCode = 0;
} finally {
cd(ROOT);
rm(MARKER_IOS);
rm(MARKER_ANDROID);
if (SERVER_PID) {
echo(`Killing packager ${SERVER_PID}`);
exec(`kill -9 ${SERVER_PID}`);
// this is quite drastic but packager starts a daemon that we can't kill by killing the parent process
// it will be fixed in April (quote David Aurelio), so until then we will kill the zombie by the port number
exec("lsof -i tcp:8081 | awk 'NR!=1 {print $2}' | xargs kill");
}
if (APPIUM_PID) {
echo(`Killing appium ${APPIUM_PID}`);
exec(`kill -9 ${APPIUM_PID}`);
}
}
exit(exitCode);
/*eslint-enable no-undef */ | echo('Executing ' + iosTestType + ' e2e test'); | random_line_split |
author.py | from whiffle import wikidotapi
from util import hook
@hook.command
def | (inp):
".author <Author Name> -- Will return details regarding the author"
if firstrefresh == 0:#make sure the cache actually exists
return "Cache has not yet updated, please wait a minute and search again."
api = wikidotapi.connection()
api.Site = "wanderers-library"
pages = api.refresh_pages()
authpages = []
totalrating = 0
pagetotal = 0
pagerating = 0
author = "None"
multimatch = []
authorpage = ""
for page in pages:
for item in pagecache: #these two for loops iterate through every item within each page dictionary, the proper syntax for accessing a specific item is item[page][itemname],
try:
if "entry" in item[page]["tags"]: #makes sure only articles are counted
if author == item[page]["created_by"]:
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1
if inp.lower() in item[page]["created_by"].lower() and author == "None": #this just matches the author with the first author match
author = item[page]["created_by"]
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1 #all lines above provide page data, math is pretty easy and self-explanatory
else:
if "author" in item[page]["tags"]:
if author == item[page]["created_by"]:
authorpage = "http://wanderers-library.wikidot.com/"+item[page]["fullname"] +" - "
except KeyError: #must do error handling for code to be valid, iterates through incorrect keys multiple times, do not print things in the except clause, slows down program immensely
pass
for page in pages: #this loop checks to see if multiple authors match input
for item in pagecache:
try:
if "entry" in item[page]["tags"]:
if inp.lower() in item[page]["created_by"].lower():
multimatch.append(item[page]["created_by"])
except KeyError:
pass
for authors in multimatch: #checks to see if multiple authors found
if authors != author:
return "There are "+ str(len(multimatch)) + " authors matching you query. Please be more specifc. "
avgrating = 0
if pagetotal is not 0: #just so no division by zero
avgrating = totalrating/pagetotal
if not authpages: #if no author pages are added
return "Author not found."
return "nonick::"+ authorpage+""+author +" has written " + str(pagetotal) + " pages. They have " + str(totalrating)+ " net upvotes with an average rating of " + str(avgrating) + ". Their most recent article is " + pagetitle + "(Rating:" + str(pagerating) + ")"#+"- http://wanderers-library.wikidot.com/" + authpages[-1].lower() | author | identifier_name |
author.py | from whiffle import wikidotapi
from util import hook
@hook.command
def author(inp):
".author <Author Name> -- Will return details regarding the author"
if firstrefresh == 0:#make sure the cache actually exists
return "Cache has not yet updated, please wait a minute and search again."
api = wikidotapi.connection()
api.Site = "wanderers-library"
pages = api.refresh_pages()
authpages = []
totalrating = 0
pagetotal = 0
pagerating = 0
author = "None"
multimatch = []
authorpage = ""
for page in pages:
for item in pagecache: #these two for loops iterate through every item within each page dictionary, the proper syntax for accessing a specific item is item[page][itemname],
try:
if "entry" in item[page]["tags"]: #makes sure only articles are counted
if author == item[page]["created_by"]:
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1
if inp.lower() in item[page]["created_by"].lower() and author == "None": #this just matches the author with the first author match
author = item[page]["created_by"]
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1 #all lines above provide page data, math is pretty easy and self-explanatory
else:
if "author" in item[page]["tags"]:
if author == item[page]["created_by"]:
authorpage = "http://wanderers-library.wikidot.com/"+item[page]["fullname"] +" - "
except KeyError: #must do error handling for code to be valid, iterates through incorrect keys multiple times, do not print things in the except clause, slows down program immensely
pass
for page in pages: #this loop checks to see if multiple authors match input
for item in pagecache:
try:
if "entry" in item[page]["tags"]:
if inp.lower() in item[page]["created_by"].lower():
multimatch.append(item[page]["created_by"])
except KeyError:
pass
for authors in multimatch: #checks to see if multiple authors found
if authors != author:
return "There are "+ str(len(multimatch)) + " authors matching you query. Please be more specifc. "
avgrating = 0
if pagetotal is not 0: #just so no division by zero
avgrating = totalrating/pagetotal
if not authpages: #if no author pages are added
|
return "nonick::"+ authorpage+""+author +" has written " + str(pagetotal) + " pages. They have " + str(totalrating)+ " net upvotes with an average rating of " + str(avgrating) + ". Their most recent article is " + pagetitle + "(Rating:" + str(pagerating) + ")"#+"- http://wanderers-library.wikidot.com/" + authpages[-1].lower() | return "Author not found." | conditional_block |
author.py | from whiffle import wikidotapi
from util import hook
@hook.command
def author(inp):
| ".author <Author Name> -- Will return details regarding the author"
if firstrefresh == 0:#make sure the cache actually exists
return "Cache has not yet updated, please wait a minute and search again."
api = wikidotapi.connection()
api.Site = "wanderers-library"
pages = api.refresh_pages()
authpages = []
totalrating = 0
pagetotal = 0
pagerating = 0
author = "None"
multimatch = []
authorpage = ""
for page in pages:
for item in pagecache: #these two for loops iterate through every item within each page dictionary, the proper syntax for accessing a specific item is item[page][itemname],
try:
if "entry" in item[page]["tags"]: #makes sure only articles are counted
if author == item[page]["created_by"]:
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1
if inp.lower() in item[page]["created_by"].lower() and author == "None": #this just matches the author with the first author match
author = item[page]["created_by"]
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1 #all lines above provide page data, math is pretty easy and self-explanatory
else:
if "author" in item[page]["tags"]:
if author == item[page]["created_by"]:
authorpage = "http://wanderers-library.wikidot.com/"+item[page]["fullname"] +" - "
except KeyError: #must do error handling for code to be valid, iterates through incorrect keys multiple times, do not print things in the except clause, slows down program immensely
pass
for page in pages: #this loop checks to see if multiple authors match input
for item in pagecache:
try:
if "entry" in item[page]["tags"]:
if inp.lower() in item[page]["created_by"].lower():
multimatch.append(item[page]["created_by"])
except KeyError:
pass
for authors in multimatch: #checks to see if multiple authors found
if authors != author:
return "There are "+ str(len(multimatch)) + " authors matching you query. Please be more specifc. "
avgrating = 0
if pagetotal is not 0: #just so no division by zero
avgrating = totalrating/pagetotal
if not authpages: #if no author pages are added
return "Author not found."
return "nonick::"+ authorpage+""+author +" has written " + str(pagetotal) + " pages. They have " + str(totalrating)+ " net upvotes with an average rating of " + str(avgrating) + ". Their most recent article is " + pagetitle + "(Rating:" + str(pagerating) + ")"#+"- http://wanderers-library.wikidot.com/" + authpages[-1].lower() | identifier_body |
|
author.py | from whiffle import wikidotapi
from util import hook
@hook.command
def author(inp):
".author <Author Name> -- Will return details regarding the author"
if firstrefresh == 0:#make sure the cache actually exists
return "Cache has not yet updated, please wait a minute and search again."
api = wikidotapi.connection()
api.Site = "wanderers-library"
pages = api.refresh_pages()
authpages = []
totalrating = 0
pagetotal = 0
pagerating = 0
author = "None"
multimatch = []
authorpage = ""
for page in pages:
for item in pagecache: #these two for loops iterate through every item within each page dictionary, the proper syntax for accessing a specific item is item[page][itemname],
try:
if "entry" in item[page]["tags"]: #makes sure only articles are counted
if author == item[page]["created_by"]:
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1
if inp.lower() in item[page]["created_by"].lower() and author == "None": #this just matches the author with the first author match
author = item[page]["created_by"]
authpages.append(page)
pagetitle = item[page]["title"]
pagerating = item[page]["rating"]
totalrating = totalrating + pagerating
print page
pagetotal = pagetotal + 1 #all lines above provide page data, math is pretty easy and self-explanatory
else:
if "author" in item[page]["tags"]:
if author == item[page]["created_by"]:
authorpage = "http://wanderers-library.wikidot.com/"+item[page]["fullname"] +" - "
except KeyError: #must do error handling for code to be valid, iterates through incorrect keys multiple times, do not print things in the except clause, slows down program immensely
pass
for page in pages: #this loop checks to see if multiple authors match input
for item in pagecache:
try:
if "entry" in item[page]["tags"]:
if inp.lower() in item[page]["created_by"].lower():
multimatch.append(item[page]["created_by"])
except KeyError:
pass
for authors in multimatch: #checks to see if multiple authors found
if authors != author:
return "There are "+ str(len(multimatch)) + " authors matching you query. Please be more specifc. "
avgrating = 0
if pagetotal is not 0: #just so no division by zero | avgrating = totalrating/pagetotal
if not authpages: #if no author pages are added
return "Author not found."
return "nonick::"+ authorpage+""+author +" has written " + str(pagetotal) + " pages. They have " + str(totalrating)+ " net upvotes with an average rating of " + str(avgrating) + ". Their most recent article is " + pagetitle + "(Rating:" + str(pagerating) + ")"#+"- http://wanderers-library.wikidot.com/" + authpages[-1].lower() | random_line_split |
|
dom.ts | import {isBoolean, isString, isArray, isPlainObject} from "./util/types"
export type HTMLAttrs = {[name: string]: any}
export type HTMLChild = string | HTMLElement | (string | HTMLElement)[]
const _createElement = <T extends keyof HTMLElementTagNameMap>(tag: T) =>
(attrs: HTMLAttrs = {}, ...children: HTMLChild[]): HTMLElementTagNameMap[T] => {
const element = document.createElement(tag)
for (const attr in attrs) {
const value = attrs[attr]
if (value == null || isBoolean(value) && !value)
continue
if (attr === "class" && isArray(value)) {
for (const cls of (value as string[])) {
if (cls != null) element.classList.add(cls)
}
continue
}
if (attr === "style" && isPlainObject(value)) {
for (const prop in value) {
(element.style as any)[prop] = value[prop]
}
continue
}
if (attr === "data" && isPlainObject(value)) {
for (const key in value) {
element.dataset[key] = value[key] as string | undefined // XXX: attrs needs a better type
}
continue
}
element.setAttribute(attr, value)
}
function append(child: HTMLElement | string) |
for (const child of children) {
if (isArray(child)) {
for (const _child of child)
append(_child)
} else
append(child)
}
return element
}
export function createElement<T extends keyof HTMLElementTagNameMap>(tag: T,
attrs: HTMLAttrs, ...children: HTMLChild[]): HTMLElementTagNameMap[T] {
return _createElement(tag)(attrs, ...children)
}
export const
div = _createElement("div"),
span = _createElement("span"),
link = _createElement("link"),
style = _createElement("style"),
a = _createElement("a"),
p = _createElement("p"),
i = _createElement("i"),
pre = _createElement("pre"),
button = _createElement("button"),
label = _createElement("label"),
input = _createElement("input"),
select = _createElement("select"),
option = _createElement("option"),
optgroup = _createElement("optgroup"),
textarea = _createElement("textarea"),
canvas = _createElement("canvas"),
ul = _createElement("ul"),
ol = _createElement("ol"),
li = _createElement("li");
export const nbsp = document.createTextNode("\u00a0")
export function removeElement(element: HTMLElement): void {
const parent = element.parentNode
if (parent != null) {
parent.removeChild(element)
}
}
export function replaceWith(element: HTMLElement, replacement: HTMLElement): void {
const parent = element.parentNode
if (parent != null) {
parent.replaceChild(replacement, element)
}
}
export function prepend(element: HTMLElement, ...nodes: Node[]): void {
const first = element.firstChild
for (const node of nodes) {
element.insertBefore(node, first)
}
}
export function empty(element: HTMLElement): void {
let child
while (child = element.firstChild) {
element.removeChild(child)
}
}
export function show(element: HTMLElement): void {
element.style.display = ""
}
export function hide(element: HTMLElement): void {
element.style.display = "none"
}
export function position(element: HTMLElement) {
return {
top: element.offsetTop,
left: element.offsetLeft,
}
}
export function offset(element: HTMLElement) {
const rect = element.getBoundingClientRect()
return {
top: rect.top + window.pageYOffset - document.documentElement.clientTop,
left: rect.left + window.pageXOffset - document.documentElement.clientLeft,
}
}
export function matches(el: HTMLElement, selector: string): boolean {
const p: any = Element.prototype
const f = p.matches || p.webkitMatchesSelector || p.mozMatchesSelector || p.msMatchesSelector
return f.call(el, selector)
}
export function parent(el: HTMLElement, selector: string): HTMLElement | null {
let node: HTMLElement | null = el
while (node = node.parentElement) {
if (matches(node, selector))
return node
}
return null
}
export type Sizing = {top: number, bottom: number, left: number, right: number}
export function margin(el: HTMLElement): Sizing {
const style = getComputedStyle(el)
return {
top: parseFloat(style.marginTop!) || 0,
bottom: parseFloat(style.marginBottom!) || 0,
left: parseFloat(style.marginLeft!) || 0,
right: parseFloat(style.marginRight!) || 0,
}
}
export function padding(el: HTMLElement): Sizing {
const style = getComputedStyle(el)
return {
top: parseFloat(style.paddingTop!) || 0,
bottom: parseFloat(style.paddingBottom!) || 0,
left: parseFloat(style.paddingLeft!) || 0,
right: parseFloat(style.paddingRight!) || 0,
}
}
export enum Keys {
Backspace = 8,
Tab = 9,
Enter = 13,
Esc = 27,
PageUp = 33,
PageDown = 34,
Left = 37,
Up = 38,
Right = 39,
Down = 40,
Delete = 46,
}
| {
if (child instanceof HTMLElement)
element.appendChild(child)
else if (isString(child))
element.appendChild(document.createTextNode(child))
else if (child != null && child !== false)
throw new Error(`expected an HTMLElement, string, false or null, got ${JSON.stringify(child)}`)
} | identifier_body |
dom.ts | import {isBoolean, isString, isArray, isPlainObject} from "./util/types"
export type HTMLAttrs = {[name: string]: any}
export type HTMLChild = string | HTMLElement | (string | HTMLElement)[]
const _createElement = <T extends keyof HTMLElementTagNameMap>(tag: T) =>
(attrs: HTMLAttrs = {}, ...children: HTMLChild[]): HTMLElementTagNameMap[T] => {
const element = document.createElement(tag)
for (const attr in attrs) {
const value = attrs[attr]
if (value == null || isBoolean(value) && !value)
continue
if (attr === "class" && isArray(value)) {
for (const cls of (value as string[])) {
if (cls != null) element.classList.add(cls)
}
continue
}
if (attr === "style" && isPlainObject(value)) {
for (const prop in value) {
(element.style as any)[prop] = value[prop]
}
continue
}
if (attr === "data" && isPlainObject(value)) {
for (const key in value) {
element.dataset[key] = value[key] as string | undefined // XXX: attrs needs a better type
}
continue
}
element.setAttribute(attr, value)
}
function append(child: HTMLElement | string) {
if (child instanceof HTMLElement)
element.appendChild(child)
else if (isString(child))
element.appendChild(document.createTextNode(child))
else if (child != null && child !== false)
throw new Error(`expected an HTMLElement, string, false or null, got ${JSON.stringify(child)}`)
}
for (const child of children) {
if (isArray(child)) {
for (const _child of child)
append(_child)
} else
append(child)
}
return element
}
export function createElement<T extends keyof HTMLElementTagNameMap>(tag: T,
attrs: HTMLAttrs, ...children: HTMLChild[]): HTMLElementTagNameMap[T] {
return _createElement(tag)(attrs, ...children)
}
export const
div = _createElement("div"),
span = _createElement("span"),
link = _createElement("link"),
style = _createElement("style"),
a = _createElement("a"),
p = _createElement("p"),
i = _createElement("i"),
pre = _createElement("pre"),
button = _createElement("button"),
label = _createElement("label"),
input = _createElement("input"),
select = _createElement("select"),
option = _createElement("option"),
optgroup = _createElement("optgroup"),
textarea = _createElement("textarea"),
canvas = _createElement("canvas"),
ul = _createElement("ul"),
ol = _createElement("ol"),
li = _createElement("li");
export const nbsp = document.createTextNode("\u00a0")
export function removeElement(element: HTMLElement): void {
const parent = element.parentNode
if (parent != null) {
parent.removeChild(element)
}
}
export function replaceWith(element: HTMLElement, replacement: HTMLElement): void {
const parent = element.parentNode
if (parent != null) {
parent.replaceChild(replacement, element)
}
}
export function prepend(element: HTMLElement, ...nodes: Node[]): void {
const first = element.firstChild
for (const node of nodes) {
element.insertBefore(node, first)
}
}
export function empty(element: HTMLElement): void {
let child
while (child = element.firstChild) {
element.removeChild(child)
}
}
export function show(element: HTMLElement): void {
element.style.display = ""
}
export function hide(element: HTMLElement): void {
element.style.display = "none"
}
export function position(element: HTMLElement) {
return {
top: element.offsetTop,
left: element.offsetLeft,
}
}
export function offset(element: HTMLElement) {
const rect = element.getBoundingClientRect()
return {
top: rect.top + window.pageYOffset - document.documentElement.clientTop,
left: rect.left + window.pageXOffset - document.documentElement.clientLeft,
}
}
export function matches(el: HTMLElement, selector: string): boolean {
const p: any = Element.prototype
const f = p.matches || p.webkitMatchesSelector || p.mozMatchesSelector || p.msMatchesSelector
return f.call(el, selector)
}
export function parent(el: HTMLElement, selector: string): HTMLElement | null {
let node: HTMLElement | null = el
while (node = node.parentElement) {
if (matches(node, selector))
return node
}
return null
}
export type Sizing = {top: number, bottom: number, left: number, right: number}
export function margin(el: HTMLElement): Sizing {
const style = getComputedStyle(el)
return {
top: parseFloat(style.marginTop!) || 0,
bottom: parseFloat(style.marginBottom!) || 0,
left: parseFloat(style.marginLeft!) || 0,
right: parseFloat(style.marginRight!) || 0,
}
}
export function | (el: HTMLElement): Sizing {
const style = getComputedStyle(el)
return {
top: parseFloat(style.paddingTop!) || 0,
bottom: parseFloat(style.paddingBottom!) || 0,
left: parseFloat(style.paddingLeft!) || 0,
right: parseFloat(style.paddingRight!) || 0,
}
}
export enum Keys {
Backspace = 8,
Tab = 9,
Enter = 13,
Esc = 27,
PageUp = 33,
PageDown = 34,
Left = 37,
Up = 38,
Right = 39,
Down = 40,
Delete = 46,
}
| padding | identifier_name |
dom.ts | import {isBoolean, isString, isArray, isPlainObject} from "./util/types"
export type HTMLAttrs = {[name: string]: any}
export type HTMLChild = string | HTMLElement | (string | HTMLElement)[]
const _createElement = <T extends keyof HTMLElementTagNameMap>(tag: T) =>
(attrs: HTMLAttrs = {}, ...children: HTMLChild[]): HTMLElementTagNameMap[T] => {
const element = document.createElement(tag)
for (const attr in attrs) {
const value = attrs[attr]
if (value == null || isBoolean(value) && !value)
continue
if (attr === "class" && isArray(value)) {
for (const cls of (value as string[])) {
if (cls != null) element.classList.add(cls)
}
continue
}
if (attr === "style" && isPlainObject(value)) {
for (const prop in value) {
(element.style as any)[prop] = value[prop]
}
continue
}
if (attr === "data" && isPlainObject(value)) {
for (const key in value) {
element.dataset[key] = value[key] as string | undefined // XXX: attrs needs a better type
}
continue
}
element.setAttribute(attr, value)
}
function append(child: HTMLElement | string) {
if (child instanceof HTMLElement)
element.appendChild(child)
else if (isString(child))
element.appendChild(document.createTextNode(child))
else if (child != null && child !== false)
throw new Error(`expected an HTMLElement, string, false or null, got ${JSON.stringify(child)}`)
}
for (const child of children) {
if (isArray(child)) {
for (const _child of child)
append(_child)
} else
append(child)
}
return element
}
export function createElement<T extends keyof HTMLElementTagNameMap>(tag: T,
attrs: HTMLAttrs, ...children: HTMLChild[]): HTMLElementTagNameMap[T] {
return _createElement(tag)(attrs, ...children)
}
export const
div = _createElement("div"),
span = _createElement("span"),
link = _createElement("link"),
style = _createElement("style"),
a = _createElement("a"),
p = _createElement("p"),
i = _createElement("i"),
pre = _createElement("pre"),
button = _createElement("button"),
label = _createElement("label"),
input = _createElement("input"),
select = _createElement("select"),
option = _createElement("option"),
optgroup = _createElement("optgroup"),
textarea = _createElement("textarea"),
canvas = _createElement("canvas"),
ul = _createElement("ul"),
ol = _createElement("ol"),
li = _createElement("li");
export const nbsp = document.createTextNode("\u00a0")
export function removeElement(element: HTMLElement): void {
const parent = element.parentNode
if (parent != null) {
parent.removeChild(element)
}
}
export function replaceWith(element: HTMLElement, replacement: HTMLElement): void {
const parent = element.parentNode
if (parent != null) {
parent.replaceChild(replacement, element)
}
}
export function prepend(element: HTMLElement, ...nodes: Node[]): void {
const first = element.firstChild
for (const node of nodes) {
element.insertBefore(node, first)
}
}
export function empty(element: HTMLElement): void {
let child
while (child = element.firstChild) {
element.removeChild(child)
}
}
export function show(element: HTMLElement): void {
element.style.display = ""
}
export function hide(element: HTMLElement): void {
element.style.display = "none"
}
export function position(element: HTMLElement) {
return {
top: element.offsetTop,
left: element.offsetLeft,
}
}
export function offset(element: HTMLElement) {
const rect = element.getBoundingClientRect()
return {
top: rect.top + window.pageYOffset - document.documentElement.clientTop,
left: rect.left + window.pageXOffset - document.documentElement.clientLeft,
}
}
export function matches(el: HTMLElement, selector: string): boolean {
const p: any = Element.prototype
const f = p.matches || p.webkitMatchesSelector || p.mozMatchesSelector || p.msMatchesSelector
return f.call(el, selector)
}
export function parent(el: HTMLElement, selector: string): HTMLElement | null {
let node: HTMLElement | null = el
while (node = node.parentElement) {
if (matches(node, selector))
return node
}
return null
}
export type Sizing = {top: number, bottom: number, left: number, right: number}
export function margin(el: HTMLElement): Sizing {
const style = getComputedStyle(el)
return {
top: parseFloat(style.marginTop!) || 0,
bottom: parseFloat(style.marginBottom!) || 0,
left: parseFloat(style.marginLeft!) || 0, |
export function padding(el: HTMLElement): Sizing {
const style = getComputedStyle(el)
return {
top: parseFloat(style.paddingTop!) || 0,
bottom: parseFloat(style.paddingBottom!) || 0,
left: parseFloat(style.paddingLeft!) || 0,
right: parseFloat(style.paddingRight!) || 0,
}
}
export enum Keys {
Backspace = 8,
Tab = 9,
Enter = 13,
Esc = 27,
PageUp = 33,
PageDown = 34,
Left = 37,
Up = 38,
Right = 39,
Down = 40,
Delete = 46,
} | right: parseFloat(style.marginRight!) || 0,
}
} | random_line_split |
models.py | """
Models used by the block structure framework.
"""
from __future__ import absolute_import
import errno
from contextlib import contextmanager
from datetime import datetime
from logging import getLogger
import six
from six.moves import map
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.core.files.base import ContentFile
from django.db import models, transaction
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from openedx.core.djangoapps.xmodule_django.models import UsageKeyWithRunField
from openedx.core.storage import get_storage
from . import config
from .exceptions import BlockStructureNotFound
log = getLogger(__name__)
def _create_path(directory, filename):
"""
Returns the full path for the given directory and filename.
"""
return '{}/{}'.format(directory, filename)
def _directory_name(data_usage_key):
"""
Returns the directory name for the given
data_usage_key.
"""
# replace any '/' in the usage key so they aren't interpreted
# as folder separators.
encoded_usage_key = six.text_type(data_usage_key).replace('/', '_')
return '{}{}'.format(
settings.BLOCK_STRUCTURES_SETTINGS.get('DIRECTORY_PREFIX', ''),
encoded_usage_key,
)
def _path_name(bs_model, _filename):
"""
Returns path name to use for the given
BlockStructureModel instance.
"""
filename = datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%S-%f')
return _create_path(
_directory_name(bs_model.data_usage_key),
filename,
)
def _bs_model_storage():
"""
Get django Storage object for BlockStructureModel.
"""
return get_storage(
settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_CLASS'),
**settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_KWARGS', {})
)
class CustomizableFileField(models.FileField):
"""
Subclass of FileField that allows custom settings to not
be serialized (hard-coded) in migrations. Otherwise,
migrations include optional settings for storage (such as
the storage class and bucket name); we don't want to
create new migration files for each configuration change.
"""
def __init__(self, *args, **kwargs):
kwargs.update(dict(
upload_to=_path_name,
storage=_bs_model_storage(),
max_length=500, # allocate enough for base path + prefix + usage_key + timestamp in filepath
))
super(CustomizableFileField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(CustomizableFileField, self).deconstruct()
del kwargs['upload_to']
del kwargs['storage']
del kwargs['max_length']
return name, path, args, kwargs
@contextmanager
def _storage_error_handling(bs_model, operation, is_read_operation=False):
"""
Helpful context manager that handles various errors
from the backend storage.
Typical errors at read time on configuration changes:
IOError:
- File not found (S3 or FS)
- Bucket name changed (S3)
SuspiciousOperation
- Path mismatches when changing backends
Other known errors:
OSError
- Access issues in creating files (FS)
S3ResponseError
- Incorrect credentials with 403 status (S3)
- Non-existent bucket with 404 status (S3)
"""
try:
yield
except Exception as error: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception %s on store %s; %s.', error.__class__, operation, bs_model)
if isinstance(error, OSError) and error.errno in (errno.EACCES, errno.EPERM): # pylint: disable=no-member
raise
elif is_read_operation and isinstance(error, (IOError, SuspiciousOperation)):
# May have been caused by one of the possible error
# situations listed above. Raise BlockStructureNotFound
# so the block structure can be regenerated and restored.
raise BlockStructureNotFound(bs_model.data_usage_key)
else:
raise
@python_2_unicode_compatible
class BlockStructureModel(TimeStampedModel):
"""
Model for storing Block Structure information.
.. no_pii:
"""
VERSION_FIELDS = [
u'data_version',
u'data_edit_timestamp',
u'transformers_schema_version',
u'block_structure_schema_version',
]
UNIQUENESS_FIELDS = [u'data_usage_key'] + VERSION_FIELDS
class Meta(object):
db_table = 'block_structure'
data_usage_key = UsageKeyWithRunField(
u'Identifier of the data being collected.',
blank=False,
max_length=255,
unique=True,
)
data_version = models.CharField(
u'Version of the data at the time of collection.',
blank=True,
null=True,
max_length=255,
)
data_edit_timestamp = models.DateTimeField(
u'Edit timestamp of the data at the time of collection.',
blank=True,
null=True,
)
transformers_schema_version = models.CharField(
u'Representation of the schema version of the transformers used during collection.',
blank=False,
max_length=255,
)
block_structure_schema_version = models.CharField(
u'Version of the block structure schema at the time of collection.',
blank=False,
max_length=255,
)
data = CustomizableFileField()
def get_serialized_data(self):
"""
Returns the collected data for this instance.
"""
operation = u'Read'
with _storage_error_handling(self, operation, is_read_operation=True):
serialized_data = self.data.read()
self._log(self, operation, serialized_data)
return serialized_data
@classmethod
def get(cls, data_usage_key):
"""
Returns the entry associated with the given data_usage_key.
Raises:
BlockStructureNotFound if an entry for data_usage_key is not found.
"""
try:
return cls.objects.get(data_usage_key=data_usage_key)
except cls.DoesNotExist:
log.info(u'BlockStructure: Not found in table; %s.', data_usage_key)
raise BlockStructureNotFound(data_usage_key)
@classmethod
def update_or_create(cls, serialized_data, data_usage_key, **kwargs):
"""
Updates or creates the BlockStructureModel entry
for the given data_usage_key in the kwargs,
uploading serialized_data as the content data.
"""
# Use an atomic transaction so the model isn't updated
# unless the file is successfully persisted.
with transaction.atomic():
bs_model, created = cls.objects.update_or_create(defaults=kwargs, data_usage_key=data_usage_key)
operation = u'Created' if created else u'Updated'
with _storage_error_handling(bs_model, operation):
bs_model.data.save('', ContentFile(serialized_data))
cls._log(bs_model, operation, serialized_data)
if not created:
cls._prune_files(data_usage_key)
return bs_model, created
def __str__(self):
"""
Returns a string representation of this model.
"""
return u', '.join(
u'{}: {}'.format(field_name, six.text_type(getattr(self, field_name)))
for field_name in self.UNIQUENESS_FIELDS
)
@classmethod
def _prune_files(cls, data_usage_key, num_to_keep=None):
"""
Deletes previous file versions for data_usage_key.
"""
if not settings.BLOCK_STRUCTURES_SETTINGS.get('PRUNING_ACTIVE', False):
return
if num_to_keep is None:
num_to_keep = config.num_versions_to_keep()
try:
all_files_by_date = sorted(cls._get_all_files(data_usage_key))
files_to_delete = all_files_by_date[:-num_to_keep] if num_to_keep > 0 else all_files_by_date
cls._delete_files(files_to_delete)
log.info(
u'BlockStructure: Deleted %d out of total %d files in store; data_usage_key: %s, num_to_keep: %d.',
len(files_to_delete),
len(all_files_by_date),
data_usage_key,
num_to_keep,
)
except Exception: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception when deleting old files; data_usage_key: %s.', data_usage_key)
@classmethod
def | (cls, files):
"""
Deletes the given files from storage.
"""
storage = _bs_model_storage()
list(map(storage.delete, files))
@classmethod
def _get_all_files(cls, data_usage_key):
"""
Returns all filenames that exist for the given key.
"""
directory = _directory_name(data_usage_key)
_, filenames = _bs_model_storage().listdir(directory)
return [
_create_path(directory, filename)
for filename in filenames
if filename and not filename.startswith('.')
]
@classmethod
def _log(cls, bs_model, operation, serialized_data):
"""
Writes log information for the given values.
"""
log.info(
u'BlockStructure: %s in store %s at %s%s; %s, size: %d',
operation,
bs_model.data.storage.__class__,
getattr(bs_model.data.storage, 'bucket_name', ''),
getattr(bs_model.data.storage, 'location', ''),
bs_model,
len(serialized_data),
)
| _delete_files | identifier_name |
models.py | """
Models used by the block structure framework.
"""
from __future__ import absolute_import
import errno
from contextlib import contextmanager
from datetime import datetime
from logging import getLogger
import six
from six.moves import map
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.core.files.base import ContentFile
from django.db import models, transaction
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from openedx.core.djangoapps.xmodule_django.models import UsageKeyWithRunField
from openedx.core.storage import get_storage
from . import config
from .exceptions import BlockStructureNotFound
log = getLogger(__name__)
def _create_path(directory, filename):
"""
Returns the full path for the given directory and filename.
"""
return '{}/{}'.format(directory, filename)
def _directory_name(data_usage_key):
"""
Returns the directory name for the given
data_usage_key.
"""
# replace any '/' in the usage key so they aren't interpreted
# as folder separators.
encoded_usage_key = six.text_type(data_usage_key).replace('/', '_')
return '{}{}'.format(
settings.BLOCK_STRUCTURES_SETTINGS.get('DIRECTORY_PREFIX', ''),
encoded_usage_key,
)
def _path_name(bs_model, _filename):
"""
Returns path name to use for the given
BlockStructureModel instance.
"""
filename = datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%S-%f')
return _create_path(
_directory_name(bs_model.data_usage_key), | filename,
)
def _bs_model_storage():
"""
Get django Storage object for BlockStructureModel.
"""
return get_storage(
settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_CLASS'),
**settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_KWARGS', {})
)
class CustomizableFileField(models.FileField):
"""
Subclass of FileField that allows custom settings to not
be serialized (hard-coded) in migrations. Otherwise,
migrations include optional settings for storage (such as
the storage class and bucket name); we don't want to
create new migration files for each configuration change.
"""
def __init__(self, *args, **kwargs):
kwargs.update(dict(
upload_to=_path_name,
storage=_bs_model_storage(),
max_length=500, # allocate enough for base path + prefix + usage_key + timestamp in filepath
))
super(CustomizableFileField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(CustomizableFileField, self).deconstruct()
del kwargs['upload_to']
del kwargs['storage']
del kwargs['max_length']
return name, path, args, kwargs
@contextmanager
def _storage_error_handling(bs_model, operation, is_read_operation=False):
"""
Helpful context manager that handles various errors
from the backend storage.
Typical errors at read time on configuration changes:
IOError:
- File not found (S3 or FS)
- Bucket name changed (S3)
SuspiciousOperation
- Path mismatches when changing backends
Other known errors:
OSError
- Access issues in creating files (FS)
S3ResponseError
- Incorrect credentials with 403 status (S3)
- Non-existent bucket with 404 status (S3)
"""
try:
yield
except Exception as error: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception %s on store %s; %s.', error.__class__, operation, bs_model)
if isinstance(error, OSError) and error.errno in (errno.EACCES, errno.EPERM): # pylint: disable=no-member
raise
elif is_read_operation and isinstance(error, (IOError, SuspiciousOperation)):
# May have been caused by one of the possible error
# situations listed above. Raise BlockStructureNotFound
# so the block structure can be regenerated and restored.
raise BlockStructureNotFound(bs_model.data_usage_key)
else:
raise
@python_2_unicode_compatible
class BlockStructureModel(TimeStampedModel):
"""
Model for storing Block Structure information.
.. no_pii:
"""
VERSION_FIELDS = [
u'data_version',
u'data_edit_timestamp',
u'transformers_schema_version',
u'block_structure_schema_version',
]
UNIQUENESS_FIELDS = [u'data_usage_key'] + VERSION_FIELDS
class Meta(object):
db_table = 'block_structure'
data_usage_key = UsageKeyWithRunField(
u'Identifier of the data being collected.',
blank=False,
max_length=255,
unique=True,
)
data_version = models.CharField(
u'Version of the data at the time of collection.',
blank=True,
null=True,
max_length=255,
)
data_edit_timestamp = models.DateTimeField(
u'Edit timestamp of the data at the time of collection.',
blank=True,
null=True,
)
transformers_schema_version = models.CharField(
u'Representation of the schema version of the transformers used during collection.',
blank=False,
max_length=255,
)
block_structure_schema_version = models.CharField(
u'Version of the block structure schema at the time of collection.',
blank=False,
max_length=255,
)
data = CustomizableFileField()
def get_serialized_data(self):
"""
Returns the collected data for this instance.
"""
operation = u'Read'
with _storage_error_handling(self, operation, is_read_operation=True):
serialized_data = self.data.read()
self._log(self, operation, serialized_data)
return serialized_data
@classmethod
def get(cls, data_usage_key):
"""
Returns the entry associated with the given data_usage_key.
Raises:
BlockStructureNotFound if an entry for data_usage_key is not found.
"""
try:
return cls.objects.get(data_usage_key=data_usage_key)
except cls.DoesNotExist:
log.info(u'BlockStructure: Not found in table; %s.', data_usage_key)
raise BlockStructureNotFound(data_usage_key)
@classmethod
def update_or_create(cls, serialized_data, data_usage_key, **kwargs):
"""
Updates or creates the BlockStructureModel entry
for the given data_usage_key in the kwargs,
uploading serialized_data as the content data.
"""
# Use an atomic transaction so the model isn't updated
# unless the file is successfully persisted.
with transaction.atomic():
bs_model, created = cls.objects.update_or_create(defaults=kwargs, data_usage_key=data_usage_key)
operation = u'Created' if created else u'Updated'
with _storage_error_handling(bs_model, operation):
bs_model.data.save('', ContentFile(serialized_data))
cls._log(bs_model, operation, serialized_data)
if not created:
cls._prune_files(data_usage_key)
return bs_model, created
def __str__(self):
"""
Returns a string representation of this model.
"""
return u', '.join(
u'{}: {}'.format(field_name, six.text_type(getattr(self, field_name)))
for field_name in self.UNIQUENESS_FIELDS
)
@classmethod
def _prune_files(cls, data_usage_key, num_to_keep=None):
"""
Deletes previous file versions for data_usage_key.
"""
if not settings.BLOCK_STRUCTURES_SETTINGS.get('PRUNING_ACTIVE', False):
return
if num_to_keep is None:
num_to_keep = config.num_versions_to_keep()
try:
all_files_by_date = sorted(cls._get_all_files(data_usage_key))
files_to_delete = all_files_by_date[:-num_to_keep] if num_to_keep > 0 else all_files_by_date
cls._delete_files(files_to_delete)
log.info(
u'BlockStructure: Deleted %d out of total %d files in store; data_usage_key: %s, num_to_keep: %d.',
len(files_to_delete),
len(all_files_by_date),
data_usage_key,
num_to_keep,
)
except Exception: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception when deleting old files; data_usage_key: %s.', data_usage_key)
@classmethod
def _delete_files(cls, files):
"""
Deletes the given files from storage.
"""
storage = _bs_model_storage()
list(map(storage.delete, files))
@classmethod
def _get_all_files(cls, data_usage_key):
"""
Returns all filenames that exist for the given key.
"""
directory = _directory_name(data_usage_key)
_, filenames = _bs_model_storage().listdir(directory)
return [
_create_path(directory, filename)
for filename in filenames
if filename and not filename.startswith('.')
]
@classmethod
def _log(cls, bs_model, operation, serialized_data):
"""
Writes log information for the given values.
"""
log.info(
u'BlockStructure: %s in store %s at %s%s; %s, size: %d',
operation,
bs_model.data.storage.__class__,
getattr(bs_model.data.storage, 'bucket_name', ''),
getattr(bs_model.data.storage, 'location', ''),
bs_model,
len(serialized_data),
) | random_line_split |
|
models.py | """
Models used by the block structure framework.
"""
from __future__ import absolute_import
import errno
from contextlib import contextmanager
from datetime import datetime
from logging import getLogger
import six
from six.moves import map
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.core.files.base import ContentFile
from django.db import models, transaction
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from openedx.core.djangoapps.xmodule_django.models import UsageKeyWithRunField
from openedx.core.storage import get_storage
from . import config
from .exceptions import BlockStructureNotFound
log = getLogger(__name__)
def _create_path(directory, filename):
"""
Returns the full path for the given directory and filename.
"""
return '{}/{}'.format(directory, filename)
def _directory_name(data_usage_key):
"""
Returns the directory name for the given
data_usage_key.
"""
# replace any '/' in the usage key so they aren't interpreted
# as folder separators.
encoded_usage_key = six.text_type(data_usage_key).replace('/', '_')
return '{}{}'.format(
settings.BLOCK_STRUCTURES_SETTINGS.get('DIRECTORY_PREFIX', ''),
encoded_usage_key,
)
def _path_name(bs_model, _filename):
"""
Returns path name to use for the given
BlockStructureModel instance.
"""
filename = datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%S-%f')
return _create_path(
_directory_name(bs_model.data_usage_key),
filename,
)
def _bs_model_storage():
"""
Get django Storage object for BlockStructureModel.
"""
return get_storage(
settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_CLASS'),
**settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_KWARGS', {})
)
class CustomizableFileField(models.FileField):
"""
Subclass of FileField that allows custom settings to not
be serialized (hard-coded) in migrations. Otherwise,
migrations include optional settings for storage (such as
the storage class and bucket name); we don't want to
create new migration files for each configuration change.
"""
def __init__(self, *args, **kwargs):
kwargs.update(dict(
upload_to=_path_name,
storage=_bs_model_storage(),
max_length=500, # allocate enough for base path + prefix + usage_key + timestamp in filepath
))
super(CustomizableFileField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(CustomizableFileField, self).deconstruct()
del kwargs['upload_to']
del kwargs['storage']
del kwargs['max_length']
return name, path, args, kwargs
@contextmanager
def _storage_error_handling(bs_model, operation, is_read_operation=False):
"""
Helpful context manager that handles various errors
from the backend storage.
Typical errors at read time on configuration changes:
IOError:
- File not found (S3 or FS)
- Bucket name changed (S3)
SuspiciousOperation
- Path mismatches when changing backends
Other known errors:
OSError
- Access issues in creating files (FS)
S3ResponseError
- Incorrect credentials with 403 status (S3)
- Non-existent bucket with 404 status (S3)
"""
try:
yield
except Exception as error: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception %s on store %s; %s.', error.__class__, operation, bs_model)
if isinstance(error, OSError) and error.errno in (errno.EACCES, errno.EPERM): # pylint: disable=no-member
raise
elif is_read_operation and isinstance(error, (IOError, SuspiciousOperation)):
# May have been caused by one of the possible error
# situations listed above. Raise BlockStructureNotFound
# so the block structure can be regenerated and restored.
raise BlockStructureNotFound(bs_model.data_usage_key)
else:
|
@python_2_unicode_compatible
class BlockStructureModel(TimeStampedModel):
"""
Model for storing Block Structure information.
.. no_pii:
"""
VERSION_FIELDS = [
u'data_version',
u'data_edit_timestamp',
u'transformers_schema_version',
u'block_structure_schema_version',
]
UNIQUENESS_FIELDS = [u'data_usage_key'] + VERSION_FIELDS
class Meta(object):
db_table = 'block_structure'
data_usage_key = UsageKeyWithRunField(
u'Identifier of the data being collected.',
blank=False,
max_length=255,
unique=True,
)
data_version = models.CharField(
u'Version of the data at the time of collection.',
blank=True,
null=True,
max_length=255,
)
data_edit_timestamp = models.DateTimeField(
u'Edit timestamp of the data at the time of collection.',
blank=True,
null=True,
)
transformers_schema_version = models.CharField(
u'Representation of the schema version of the transformers used during collection.',
blank=False,
max_length=255,
)
block_structure_schema_version = models.CharField(
u'Version of the block structure schema at the time of collection.',
blank=False,
max_length=255,
)
data = CustomizableFileField()
def get_serialized_data(self):
"""
Returns the collected data for this instance.
"""
operation = u'Read'
with _storage_error_handling(self, operation, is_read_operation=True):
serialized_data = self.data.read()
self._log(self, operation, serialized_data)
return serialized_data
@classmethod
def get(cls, data_usage_key):
"""
Returns the entry associated with the given data_usage_key.
Raises:
BlockStructureNotFound if an entry for data_usage_key is not found.
"""
try:
return cls.objects.get(data_usage_key=data_usage_key)
except cls.DoesNotExist:
log.info(u'BlockStructure: Not found in table; %s.', data_usage_key)
raise BlockStructureNotFound(data_usage_key)
@classmethod
def update_or_create(cls, serialized_data, data_usage_key, **kwargs):
"""
Updates or creates the BlockStructureModel entry
for the given data_usage_key in the kwargs,
uploading serialized_data as the content data.
"""
# Use an atomic transaction so the model isn't updated
# unless the file is successfully persisted.
with transaction.atomic():
bs_model, created = cls.objects.update_or_create(defaults=kwargs, data_usage_key=data_usage_key)
operation = u'Created' if created else u'Updated'
with _storage_error_handling(bs_model, operation):
bs_model.data.save('', ContentFile(serialized_data))
cls._log(bs_model, operation, serialized_data)
if not created:
cls._prune_files(data_usage_key)
return bs_model, created
def __str__(self):
"""
Returns a string representation of this model.
"""
return u', '.join(
u'{}: {}'.format(field_name, six.text_type(getattr(self, field_name)))
for field_name in self.UNIQUENESS_FIELDS
)
@classmethod
def _prune_files(cls, data_usage_key, num_to_keep=None):
"""
Deletes previous file versions for data_usage_key.
"""
if not settings.BLOCK_STRUCTURES_SETTINGS.get('PRUNING_ACTIVE', False):
return
if num_to_keep is None:
num_to_keep = config.num_versions_to_keep()
try:
all_files_by_date = sorted(cls._get_all_files(data_usage_key))
files_to_delete = all_files_by_date[:-num_to_keep] if num_to_keep > 0 else all_files_by_date
cls._delete_files(files_to_delete)
log.info(
u'BlockStructure: Deleted %d out of total %d files in store; data_usage_key: %s, num_to_keep: %d.',
len(files_to_delete),
len(all_files_by_date),
data_usage_key,
num_to_keep,
)
except Exception: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception when deleting old files; data_usage_key: %s.', data_usage_key)
@classmethod
def _delete_files(cls, files):
"""
Deletes the given files from storage.
"""
storage = _bs_model_storage()
list(map(storage.delete, files))
@classmethod
def _get_all_files(cls, data_usage_key):
"""
Returns all filenames that exist for the given key.
"""
directory = _directory_name(data_usage_key)
_, filenames = _bs_model_storage().listdir(directory)
return [
_create_path(directory, filename)
for filename in filenames
if filename and not filename.startswith('.')
]
@classmethod
def _log(cls, bs_model, operation, serialized_data):
"""
Writes log information for the given values.
"""
log.info(
u'BlockStructure: %s in store %s at %s%s; %s, size: %d',
operation,
bs_model.data.storage.__class__,
getattr(bs_model.data.storage, 'bucket_name', ''),
getattr(bs_model.data.storage, 'location', ''),
bs_model,
len(serialized_data),
)
| raise | conditional_block |
models.py | """
Models used by the block structure framework.
"""
from __future__ import absolute_import
import errno
from contextlib import contextmanager
from datetime import datetime
from logging import getLogger
import six
from six.moves import map
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.core.files.base import ContentFile
from django.db import models, transaction
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from openedx.core.djangoapps.xmodule_django.models import UsageKeyWithRunField
from openedx.core.storage import get_storage
from . import config
from .exceptions import BlockStructureNotFound
log = getLogger(__name__)
def _create_path(directory, filename):
"""
Returns the full path for the given directory and filename.
"""
return '{}/{}'.format(directory, filename)
def _directory_name(data_usage_key):
"""
Returns the directory name for the given
data_usage_key.
"""
# replace any '/' in the usage key so they aren't interpreted
# as folder separators.
encoded_usage_key = six.text_type(data_usage_key).replace('/', '_')
return '{}{}'.format(
settings.BLOCK_STRUCTURES_SETTINGS.get('DIRECTORY_PREFIX', ''),
encoded_usage_key,
)
def _path_name(bs_model, _filename):
"""
Returns path name to use for the given
BlockStructureModel instance.
"""
filename = datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%S-%f')
return _create_path(
_directory_name(bs_model.data_usage_key),
filename,
)
def _bs_model_storage():
"""
Get django Storage object for BlockStructureModel.
"""
return get_storage(
settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_CLASS'),
**settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_KWARGS', {})
)
class CustomizableFileField(models.FileField):
"""
Subclass of FileField that allows custom settings to not
be serialized (hard-coded) in migrations. Otherwise,
migrations include optional settings for storage (such as
the storage class and bucket name); we don't want to
create new migration files for each configuration change.
"""
def __init__(self, *args, **kwargs):
kwargs.update(dict(
upload_to=_path_name,
storage=_bs_model_storage(),
max_length=500, # allocate enough for base path + prefix + usage_key + timestamp in filepath
))
super(CustomizableFileField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(CustomizableFileField, self).deconstruct()
del kwargs['upload_to']
del kwargs['storage']
del kwargs['max_length']
return name, path, args, kwargs
@contextmanager
def _storage_error_handling(bs_model, operation, is_read_operation=False):
"""
Helpful context manager that handles various errors
from the backend storage.
Typical errors at read time on configuration changes:
IOError:
- File not found (S3 or FS)
- Bucket name changed (S3)
SuspiciousOperation
- Path mismatches when changing backends
Other known errors:
OSError
- Access issues in creating files (FS)
S3ResponseError
- Incorrect credentials with 403 status (S3)
- Non-existent bucket with 404 status (S3)
"""
try:
yield
except Exception as error: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception %s on store %s; %s.', error.__class__, operation, bs_model)
if isinstance(error, OSError) and error.errno in (errno.EACCES, errno.EPERM): # pylint: disable=no-member
raise
elif is_read_operation and isinstance(error, (IOError, SuspiciousOperation)):
# May have been caused by one of the possible error
# situations listed above. Raise BlockStructureNotFound
# so the block structure can be regenerated and restored.
raise BlockStructureNotFound(bs_model.data_usage_key)
else:
raise
@python_2_unicode_compatible
class BlockStructureModel(TimeStampedModel):
"""
Model for storing Block Structure information.
.. no_pii:
"""
VERSION_FIELDS = [
u'data_version',
u'data_edit_timestamp',
u'transformers_schema_version',
u'block_structure_schema_version',
]
UNIQUENESS_FIELDS = [u'data_usage_key'] + VERSION_FIELDS
class Meta(object):
db_table = 'block_structure'
data_usage_key = UsageKeyWithRunField(
u'Identifier of the data being collected.',
blank=False,
max_length=255,
unique=True,
)
data_version = models.CharField(
u'Version of the data at the time of collection.',
blank=True,
null=True,
max_length=255,
)
data_edit_timestamp = models.DateTimeField(
u'Edit timestamp of the data at the time of collection.',
blank=True,
null=True,
)
transformers_schema_version = models.CharField(
u'Representation of the schema version of the transformers used during collection.',
blank=False,
max_length=255,
)
block_structure_schema_version = models.CharField(
u'Version of the block structure schema at the time of collection.',
blank=False,
max_length=255,
)
data = CustomizableFileField()
def get_serialized_data(self):
"""
Returns the collected data for this instance.
"""
operation = u'Read'
with _storage_error_handling(self, operation, is_read_operation=True):
serialized_data = self.data.read()
self._log(self, operation, serialized_data)
return serialized_data
@classmethod
def get(cls, data_usage_key):
|
@classmethod
def update_or_create(cls, serialized_data, data_usage_key, **kwargs):
"""
Updates or creates the BlockStructureModel entry
for the given data_usage_key in the kwargs,
uploading serialized_data as the content data.
"""
# Use an atomic transaction so the model isn't updated
# unless the file is successfully persisted.
with transaction.atomic():
bs_model, created = cls.objects.update_or_create(defaults=kwargs, data_usage_key=data_usage_key)
operation = u'Created' if created else u'Updated'
with _storage_error_handling(bs_model, operation):
bs_model.data.save('', ContentFile(serialized_data))
cls._log(bs_model, operation, serialized_data)
if not created:
cls._prune_files(data_usage_key)
return bs_model, created
def __str__(self):
"""
Returns a string representation of this model.
"""
return u', '.join(
u'{}: {}'.format(field_name, six.text_type(getattr(self, field_name)))
for field_name in self.UNIQUENESS_FIELDS
)
@classmethod
def _prune_files(cls, data_usage_key, num_to_keep=None):
"""
Deletes previous file versions for data_usage_key.
"""
if not settings.BLOCK_STRUCTURES_SETTINGS.get('PRUNING_ACTIVE', False):
return
if num_to_keep is None:
num_to_keep = config.num_versions_to_keep()
try:
all_files_by_date = sorted(cls._get_all_files(data_usage_key))
files_to_delete = all_files_by_date[:-num_to_keep] if num_to_keep > 0 else all_files_by_date
cls._delete_files(files_to_delete)
log.info(
u'BlockStructure: Deleted %d out of total %d files in store; data_usage_key: %s, num_to_keep: %d.',
len(files_to_delete),
len(all_files_by_date),
data_usage_key,
num_to_keep,
)
except Exception: # pylint: disable=broad-except
log.exception(u'BlockStructure: Exception when deleting old files; data_usage_key: %s.', data_usage_key)
@classmethod
def _delete_files(cls, files):
"""
Deletes the given files from storage.
"""
storage = _bs_model_storage()
list(map(storage.delete, files))
@classmethod
def _get_all_files(cls, data_usage_key):
"""
Returns all filenames that exist for the given key.
"""
directory = _directory_name(data_usage_key)
_, filenames = _bs_model_storage().listdir(directory)
return [
_create_path(directory, filename)
for filename in filenames
if filename and not filename.startswith('.')
]
@classmethod
def _log(cls, bs_model, operation, serialized_data):
"""
Writes log information for the given values.
"""
log.info(
u'BlockStructure: %s in store %s at %s%s; %s, size: %d',
operation,
bs_model.data.storage.__class__,
getattr(bs_model.data.storage, 'bucket_name', ''),
getattr(bs_model.data.storage, 'location', ''),
bs_model,
len(serialized_data),
)
| """
Returns the entry associated with the given data_usage_key.
Raises:
BlockStructureNotFound if an entry for data_usage_key is not found.
"""
try:
return cls.objects.get(data_usage_key=data_usage_key)
except cls.DoesNotExist:
log.info(u'BlockStructure: Not found in table; %s.', data_usage_key)
raise BlockStructureNotFound(data_usage_key) | identifier_body |
mcts.rs | use crate::actions::Action;
use crate::Role;
use mcts::{statistics, SearchSettings};
use rand::Rng;
use search_graph;
use std::{cmp, mem};
#[derive(Clone, Debug)]
pub struct Game {}
impl statistics::two_player::PlayerMapping for Role {
fn player_one() -> Self {
Role::Dwarf
}
fn player_two() -> Self {
Role::Troll
}
fn resolve_player(&self) -> statistics::two_player::Player {
match *self {
Role::Dwarf => statistics::two_player::Player::One,
Role::Troll => statistics::two_player::Player::Two,
}
}
}
impl mcts::game::State for crate::state::State {
type Action = Action;
type PlayerId = Role;
fn active_player(&self) -> &Role {
&self.active_role()
}
fn actions<'s>(&'s self) -> Box<dyn Iterator<Item = Action> + 's> {
Box::new(self.actions())
}
fn do_action(&mut self, action: &Action) {
self.do_action(action);
}
}
impl mcts::game::Game for Game {
type Action = Action;
type PlayerId = Role;
type Payoff = statistics::two_player::ScoredPayoff;
type State = crate::state::State;
type Statistics = statistics::two_player::ScoredStatistics<Role>;
fn payoff_of(state: &Self::State) -> Option<Self::Payoff> {
if state.terminated() {
Some(statistics::two_player::ScoredPayoff {
visits: 1,
score_one: state.score(Role::Dwarf) as u32,
score_two: state.score(Role::Troll) as u32,
})
} else {
None
}
}
}
/// Controls how a game action is selected by the [MCTS
/// agent](struct.Agent.html) after MCTS search has terminated and all
/// statistics have been gathered.
#[derive(Debug, Clone, Copy)]
pub enum ActionSelect {
/// Select the action that was visited the most times.
VisitCount,
/// Select the action with the best UCB score.
Ucb,
}
/// Controls how graph compaction is done by the [MCTS agent](struct.Agent.html)
/// before each round of MCTS search.
#[derive(Debug, Clone, Copy)]
pub enum GraphCompact {
/// Prune the search graph so that the current game state and all its
/// descendants are retained, but game states that are not reachable from the
/// current game state are removed.
Prune,
/// Clear the entire search graph.
Clear,
/// Retain the entire contents of the search graph.
Retain,
}
type SearchGraph =
search_graph::Graph<crate::state::State, mcts::graph::VertexData, mcts::graph::EdgeData<Game>>;
pub struct Agent<R: Rng> {
settings: SearchSettings,
iterations: u32,
rng: R,
action_select: ActionSelect,
graph_compact: GraphCompact,
graph: SearchGraph,
}
impl<R: Rng> Agent<R> {
pub fn new(
settings: SearchSettings,
iterations: u32,
rng: R,
action_select: ActionSelect,
graph_compact: GraphCompact,
) -> Self {
Agent {
settings,
iterations,
rng,
action_select,
graph_compact,
graph: SearchGraph::new(),
}
}
}
fn find_most_visited_child<'a, 'id, R: Rng>(
view: &search_graph::view::View<
'a, | crate::state::State,
mcts::graph::VertexData,
mcts::graph::EdgeData<Game>,
>,
root: search_graph::view::NodeRef<'id>,
mut rng: R,
) -> search_graph::view::EdgeRef<'id> {
let mut children = view.children(root);
let mut best_child = children.next().unwrap();
let mut best_child_visits = view[best_child].statistics.visits();
let mut reservoir_count = 1u32;
for child in children {
let visits = view[child].statistics.visits();
match visits.cmp(&best_child_visits) {
cmp::Ordering::Less => continue,
cmp::Ordering::Equal => {
reservoir_count += 1;
if !rng.gen_bool(1.0f64 / (reservoir_count as f64)) {
continue;
}
}
cmp::Ordering::Greater => reservoir_count = 1,
}
best_child = child;
best_child_visits = visits;
}
best_child
}
impl<R: Rng + Send> crate::agent::Agent for Agent<R> {
fn propose_action(&mut self, state: &crate::state::State) -> crate::agent::Result {
match self.graph_compact {
GraphCompact::Prune => {
if let Some(node) = self.graph.find_node_mut(state) {
search_graph::view::of_node(node, |view, node| {
view.retain_reachable_from(Some(node).into_iter());
});
} else {
mem::swap(&mut self.graph, &mut SearchGraph::new());
}
}
GraphCompact::Clear => mem::swap(&mut self.graph, &mut SearchGraph::new()),
GraphCompact::Retain => (),
}
// Borrow/copy stuff out of self because the closure passed to of_graph
// can't borrow self.
let (rng, graph, settings, iterations, action_select) = (
&mut self.rng,
&mut self.graph,
self.settings.clone(),
self.iterations,
self.action_select,
);
search_graph::view::of_graph(graph, |view| -> crate::agent::Result {
let mut rollout = mcts::RolloutPhase::initialize(rng, settings, state.clone(), view);
for _ in 0..iterations {
let scoring = match rollout.rollout::<mcts::ucb::Rollout>() {
Ok(s) => s,
Err(e) => return Err(Box::new(e)),
};
let backprop = match scoring.score::<mcts::simulation::RandomSimulator>() {
Ok(b) => b,
Err(e) => return Err(Box::new(e)),
};
rollout = backprop
.backprop::<mcts::ucb::BestParentBackprop>()
.expand();
}
let (rng, view) = rollout.recover_components();
let root = view.find_node(state).unwrap();
let child_edge = match action_select {
ActionSelect::Ucb => {
match mcts::ucb::find_best_child(&view, root, settings.explore_bias, rng) {
Ok(child) => child,
Err(e) => return Err(Box::new(e)),
}
}
ActionSelect::VisitCount => find_most_visited_child(&view, root, rng),
};
// Because search graph de-duplication maps each set of equivalent game
// states to a single "canonical" game state, the state in the search graph
// that corresponds to `state` may not actually be the game state at `root`. As
// a result, actions on the root game state need to be mapped back into the
// set of actions on `state`.
let transposed_to_state = view.node_state(view.edge_target(child_edge));
for action in state.actions() {
let mut actual_to_state = state.clone();
actual_to_state.do_action(&action);
if actual_to_state == *transposed_to_state {
return Ok(action);
}
}
unreachable!()
})
}
} | 'id, | random_line_split |
mcts.rs | use crate::actions::Action;
use crate::Role;
use mcts::{statistics, SearchSettings};
use rand::Rng;
use search_graph;
use std::{cmp, mem};
#[derive(Clone, Debug)]
pub struct Game {}
impl statistics::two_player::PlayerMapping for Role {
fn player_one() -> Self {
Role::Dwarf
}
fn player_two() -> Self {
Role::Troll
}
fn resolve_player(&self) -> statistics::two_player::Player {
match *self {
Role::Dwarf => statistics::two_player::Player::One,
Role::Troll => statistics::two_player::Player::Two,
}
}
}
impl mcts::game::State for crate::state::State {
type Action = Action;
type PlayerId = Role;
fn active_player(&self) -> &Role {
&self.active_role()
}
fn actions<'s>(&'s self) -> Box<dyn Iterator<Item = Action> + 's> {
Box::new(self.actions())
}
fn do_action(&mut self, action: &Action) {
self.do_action(action);
}
}
impl mcts::game::Game for Game {
type Action = Action;
type PlayerId = Role;
type Payoff = statistics::two_player::ScoredPayoff;
type State = crate::state::State;
type Statistics = statistics::two_player::ScoredStatistics<Role>;
fn payoff_of(state: &Self::State) -> Option<Self::Payoff> {
if state.terminated() {
Some(statistics::two_player::ScoredPayoff {
visits: 1,
score_one: state.score(Role::Dwarf) as u32,
score_two: state.score(Role::Troll) as u32,
})
} else {
None
}
}
}
/// Controls how a game action is selected by the [MCTS
/// agent](struct.Agent.html) after MCTS search has terminated and all
/// statistics have been gathered.
#[derive(Debug, Clone, Copy)]
pub enum ActionSelect {
/// Select the action that was visited the most times.
VisitCount,
/// Select the action with the best UCB score.
Ucb,
}
/// Controls how graph compaction is done by the [MCTS agent](struct.Agent.html)
/// before each round of MCTS search.
#[derive(Debug, Clone, Copy)]
pub enum GraphCompact {
/// Prune the search graph so that the current game state and all its
/// descendants are retained, but game states that are not reachable from the
/// current game state are removed.
Prune,
/// Clear the entire search graph.
Clear,
/// Retain the entire contents of the search graph.
Retain,
}
type SearchGraph =
search_graph::Graph<crate::state::State, mcts::graph::VertexData, mcts::graph::EdgeData<Game>>;
pub struct Agent<R: Rng> {
settings: SearchSettings,
iterations: u32,
rng: R,
action_select: ActionSelect,
graph_compact: GraphCompact,
graph: SearchGraph,
}
impl<R: Rng> Agent<R> {
pub fn new(
settings: SearchSettings,
iterations: u32,
rng: R,
action_select: ActionSelect,
graph_compact: GraphCompact,
) -> Self {
Agent {
settings,
iterations,
rng,
action_select,
graph_compact,
graph: SearchGraph::new(),
}
}
}
fn find_most_visited_child<'a, 'id, R: Rng>(
view: &search_graph::view::View<
'a,
'id,
crate::state::State,
mcts::graph::VertexData,
mcts::graph::EdgeData<Game>,
>,
root: search_graph::view::NodeRef<'id>,
mut rng: R,
) -> search_graph::view::EdgeRef<'id> {
let mut children = view.children(root);
let mut best_child = children.next().unwrap();
let mut best_child_visits = view[best_child].statistics.visits();
let mut reservoir_count = 1u32;
for child in children {
let visits = view[child].statistics.visits();
match visits.cmp(&best_child_visits) {
cmp::Ordering::Less => continue,
cmp::Ordering::Equal => {
reservoir_count += 1;
if !rng.gen_bool(1.0f64 / (reservoir_count as f64)) {
continue;
}
}
cmp::Ordering::Greater => reservoir_count = 1,
}
best_child = child;
best_child_visits = visits;
}
best_child
}
impl<R: Rng + Send> crate::agent::Agent for Agent<R> {
fn propose_action(&mut self, state: &crate::state::State) -> crate::agent::Result |
}
| {
match self.graph_compact {
GraphCompact::Prune => {
if let Some(node) = self.graph.find_node_mut(state) {
search_graph::view::of_node(node, |view, node| {
view.retain_reachable_from(Some(node).into_iter());
});
} else {
mem::swap(&mut self.graph, &mut SearchGraph::new());
}
}
GraphCompact::Clear => mem::swap(&mut self.graph, &mut SearchGraph::new()),
GraphCompact::Retain => (),
}
// Borrow/copy stuff out of self because the closure passed to of_graph
// can't borrow self.
let (rng, graph, settings, iterations, action_select) = (
&mut self.rng,
&mut self.graph,
self.settings.clone(),
self.iterations,
self.action_select,
);
search_graph::view::of_graph(graph, |view| -> crate::agent::Result {
let mut rollout = mcts::RolloutPhase::initialize(rng, settings, state.clone(), view);
for _ in 0..iterations {
let scoring = match rollout.rollout::<mcts::ucb::Rollout>() {
Ok(s) => s,
Err(e) => return Err(Box::new(e)),
};
let backprop = match scoring.score::<mcts::simulation::RandomSimulator>() {
Ok(b) => b,
Err(e) => return Err(Box::new(e)),
};
rollout = backprop
.backprop::<mcts::ucb::BestParentBackprop>()
.expand();
}
let (rng, view) = rollout.recover_components();
let root = view.find_node(state).unwrap();
let child_edge = match action_select {
ActionSelect::Ucb => {
match mcts::ucb::find_best_child(&view, root, settings.explore_bias, rng) {
Ok(child) => child,
Err(e) => return Err(Box::new(e)),
}
}
ActionSelect::VisitCount => find_most_visited_child(&view, root, rng),
};
// Because search graph de-duplication maps each set of equivalent game
// states to a single "canonical" game state, the state in the search graph
// that corresponds to `state` may not actually be the game state at `root`. As
// a result, actions on the root game state need to be mapped back into the
// set of actions on `state`.
let transposed_to_state = view.node_state(view.edge_target(child_edge));
for action in state.actions() {
let mut actual_to_state = state.clone();
actual_to_state.do_action(&action);
if actual_to_state == *transposed_to_state {
return Ok(action);
}
}
unreachable!()
})
} | identifier_body |
mcts.rs | use crate::actions::Action;
use crate::Role;
use mcts::{statistics, SearchSettings};
use rand::Rng;
use search_graph;
use std::{cmp, mem};
#[derive(Clone, Debug)]
pub struct Game {}
impl statistics::two_player::PlayerMapping for Role {
fn | () -> Self {
Role::Dwarf
}
fn player_two() -> Self {
Role::Troll
}
fn resolve_player(&self) -> statistics::two_player::Player {
match *self {
Role::Dwarf => statistics::two_player::Player::One,
Role::Troll => statistics::two_player::Player::Two,
}
}
}
impl mcts::game::State for crate::state::State {
type Action = Action;
type PlayerId = Role;
fn active_player(&self) -> &Role {
&self.active_role()
}
fn actions<'s>(&'s self) -> Box<dyn Iterator<Item = Action> + 's> {
Box::new(self.actions())
}
fn do_action(&mut self, action: &Action) {
self.do_action(action);
}
}
impl mcts::game::Game for Game {
type Action = Action;
type PlayerId = Role;
type Payoff = statistics::two_player::ScoredPayoff;
type State = crate::state::State;
type Statistics = statistics::two_player::ScoredStatistics<Role>;
fn payoff_of(state: &Self::State) -> Option<Self::Payoff> {
if state.terminated() {
Some(statistics::two_player::ScoredPayoff {
visits: 1,
score_one: state.score(Role::Dwarf) as u32,
score_two: state.score(Role::Troll) as u32,
})
} else {
None
}
}
}
/// Controls how a game action is selected by the [MCTS
/// agent](struct.Agent.html) after MCTS search has terminated and all
/// statistics have been gathered.
#[derive(Debug, Clone, Copy)]
pub enum ActionSelect {
/// Select the action that was visited the most times.
VisitCount,
/// Select the action with the best UCB score.
Ucb,
}
/// Controls how graph compaction is done by the [MCTS agent](struct.Agent.html)
/// before each round of MCTS search.
#[derive(Debug, Clone, Copy)]
pub enum GraphCompact {
/// Prune the search graph so that the current game state and all its
/// descendants are retained, but game states that are not reachable from the
/// current game state are removed.
Prune,
/// Clear the entire search graph.
Clear,
/// Retain the entire contents of the search graph.
Retain,
}
type SearchGraph =
search_graph::Graph<crate::state::State, mcts::graph::VertexData, mcts::graph::EdgeData<Game>>;
pub struct Agent<R: Rng> {
settings: SearchSettings,
iterations: u32,
rng: R,
action_select: ActionSelect,
graph_compact: GraphCompact,
graph: SearchGraph,
}
impl<R: Rng> Agent<R> {
pub fn new(
settings: SearchSettings,
iterations: u32,
rng: R,
action_select: ActionSelect,
graph_compact: GraphCompact,
) -> Self {
Agent {
settings,
iterations,
rng,
action_select,
graph_compact,
graph: SearchGraph::new(),
}
}
}
fn find_most_visited_child<'a, 'id, R: Rng>(
view: &search_graph::view::View<
'a,
'id,
crate::state::State,
mcts::graph::VertexData,
mcts::graph::EdgeData<Game>,
>,
root: search_graph::view::NodeRef<'id>,
mut rng: R,
) -> search_graph::view::EdgeRef<'id> {
let mut children = view.children(root);
let mut best_child = children.next().unwrap();
let mut best_child_visits = view[best_child].statistics.visits();
let mut reservoir_count = 1u32;
for child in children {
let visits = view[child].statistics.visits();
match visits.cmp(&best_child_visits) {
cmp::Ordering::Less => continue,
cmp::Ordering::Equal => {
reservoir_count += 1;
if !rng.gen_bool(1.0f64 / (reservoir_count as f64)) {
continue;
}
}
cmp::Ordering::Greater => reservoir_count = 1,
}
best_child = child;
best_child_visits = visits;
}
best_child
}
impl<R: Rng + Send> crate::agent::Agent for Agent<R> {
fn propose_action(&mut self, state: &crate::state::State) -> crate::agent::Result {
match self.graph_compact {
GraphCompact::Prune => {
if let Some(node) = self.graph.find_node_mut(state) {
search_graph::view::of_node(node, |view, node| {
view.retain_reachable_from(Some(node).into_iter());
});
} else {
mem::swap(&mut self.graph, &mut SearchGraph::new());
}
}
GraphCompact::Clear => mem::swap(&mut self.graph, &mut SearchGraph::new()),
GraphCompact::Retain => (),
}
// Borrow/copy stuff out of self because the closure passed to of_graph
// can't borrow self.
let (rng, graph, settings, iterations, action_select) = (
&mut self.rng,
&mut self.graph,
self.settings.clone(),
self.iterations,
self.action_select,
);
search_graph::view::of_graph(graph, |view| -> crate::agent::Result {
let mut rollout = mcts::RolloutPhase::initialize(rng, settings, state.clone(), view);
for _ in 0..iterations {
let scoring = match rollout.rollout::<mcts::ucb::Rollout>() {
Ok(s) => s,
Err(e) => return Err(Box::new(e)),
};
let backprop = match scoring.score::<mcts::simulation::RandomSimulator>() {
Ok(b) => b,
Err(e) => return Err(Box::new(e)),
};
rollout = backprop
.backprop::<mcts::ucb::BestParentBackprop>()
.expand();
}
let (rng, view) = rollout.recover_components();
let root = view.find_node(state).unwrap();
let child_edge = match action_select {
ActionSelect::Ucb => {
match mcts::ucb::find_best_child(&view, root, settings.explore_bias, rng) {
Ok(child) => child,
Err(e) => return Err(Box::new(e)),
}
}
ActionSelect::VisitCount => find_most_visited_child(&view, root, rng),
};
// Because search graph de-duplication maps each set of equivalent game
// states to a single "canonical" game state, the state in the search graph
// that corresponds to `state` may not actually be the game state at `root`. As
// a result, actions on the root game state need to be mapped back into the
// set of actions on `state`.
let transposed_to_state = view.node_state(view.edge_target(child_edge));
for action in state.actions() {
let mut actual_to_state = state.clone();
actual_to_state.do_action(&action);
if actual_to_state == *transposed_to_state {
return Ok(action);
}
}
unreachable!()
})
}
}
| player_one | identifier_name |
setup.py | from setuptools import setup, find_packages
from os.path import join, dirname
setup(
name="fant_sizer",
version="0.7",
author="Rypiuk Oleksandr",
author_email="[email protected]",
description="fant_sizer command-line file-information",
url="https://github.com/ripiuk/fant_sizer",
keywords="file command-line information size tool recursively",
license="MIT",
classifiers=[
'Topic :: Utilities', | 'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3.6'
],
packages=find_packages(),
long_description=open(join(dirname(__file__), "README.rst")).read(),
entry_points={
"console_scripts":
['fant_sizer = fant_sizer.fant_sizer:_main'],
},
) | 'Environment :: Console', | random_line_split |
jvector.js | /**
* JVectormap demo page
*/
(function ($) {
'use strict';
$('.world-map').vectorMap({
map: 'world_mill_en',
backgroundColor: 'transparent',
zoomOnScroll: false,
strokeWidth: 1,
regionStyle: {
initial: {
fill: $.staticApp.dark,
'fill-opacity': 0.2
},
hover: {
'fill-opacity': 0.3
}
},
markerStyle: {
initial: {
fill: $.staticApp.primary,
stroke: $.staticApp.primary,
'fill-opacity': 1,
'stroke-width': 8,
'stroke-opacity': 0.3,
r: 5
},
hover: {
r: 8,
stroke: $.staticApp.primary, | },
markers: [{
latLng: [41.90, 12.45],
name: 'Vatican City'
}, {
latLng: [43.73, 7.41],
name: 'Monaco'
}, {
latLng: [-0.52, 166.93],
name: 'Nauru'
}, {
latLng: [-8.51, 179.21],
name: 'Tuvalu'
}, {
latLng: [43.93, 12.46],
name: 'San Marino'
}, {
latLng: [47.14, 9.52],
name: 'Liechtenstein'
}, {
latLng: [35.88, 14.5],
name: 'Malta'
}, {
latLng: [13.16, -61.23],
name: 'Saint Vincent and the Grenadines'
}, {
latLng: [-4.61, 55.45],
name: 'Seychelles'
}, {
latLng: [7.35, 134.46],
name: 'Palau'
}, {
latLng: [42.5, 1.51],
name: 'Andorra'
}, {
latLng: [6.91, 158.18],
name: 'Federated States of Micronesia'
}, {
latLng: [1.3, 103.8],
name: 'Singapore'
}, {
latLng: [1.46, 173.03],
name: 'Kiribati'
}, {
latLng: [-21.13, -175.2],
name: 'Tonga'
}, {
latLng: [-20.2, 57.5],
name: 'Mauritius'
}, {
latLng: [26.02, 50.55],
name: 'Bahrain'
}]
});
})(jQuery); | 'stroke-width': 10
} | random_line_split |
klopfer.py | import directory
import scanner
import mapper
import board
import os
class Klopfer(object):
| def __init__(self, import_dir, export_dir):
self.import_dir = import_dir
self.export_dir = export_dir
print "Klopfer class"
def run(self):
# open dir and get oldest file with the given extension
dir = directory.Directory(os, self.import_dir, ['jpg', 'jpeg'])
self.imagefile = dir.get_oldest_file()
# open image
scan = scanner.Scanner(self.imagefile.name)
self.remove_image()
informations = scan.scan()
# load board_id and cards
mapping = mapper.Mapper(informations)
board_id = mapping.board_id
cards = mapping.get_cards()
# create board
current_board = board.Board(board_id, cards)
# write board to json
current_board.export_json(self.export_dir)
# remove old image
def remove_image(self):
# Uncomment in production version when multiple input files are present
# os.remove(self.imagefile.name)
pass | identifier_body |
|
klopfer.py | import directory
import scanner
import mapper
import board
import os
class Klopfer(object):
def __init__(self, import_dir, export_dir):
self.import_dir = import_dir
self.export_dir = export_dir
print "Klopfer class"
def | (self):
# open dir and get oldest file with the given extension
dir = directory.Directory(os, self.import_dir, ['jpg', 'jpeg'])
self.imagefile = dir.get_oldest_file()
# open image
scan = scanner.Scanner(self.imagefile.name)
self.remove_image()
informations = scan.scan()
# load board_id and cards
mapping = mapper.Mapper(informations)
board_id = mapping.board_id
cards = mapping.get_cards()
# create board
current_board = board.Board(board_id, cards)
# write board to json
current_board.export_json(self.export_dir)
# remove old image
def remove_image(self):
# Uncomment in production version when multiple input files are present
# os.remove(self.imagefile.name)
pass
| run | identifier_name |
klopfer.py | import directory
import scanner
import mapper
import board
import os
class Klopfer(object):
def __init__(self, import_dir, export_dir):
self.import_dir = import_dir
self.export_dir = export_dir
print "Klopfer class"
def run(self):
# open dir and get oldest file with the given extension
dir = directory.Directory(os, self.import_dir, ['jpg', 'jpeg'])
self.imagefile = dir.get_oldest_file()
# open image | # load board_id and cards
mapping = mapper.Mapper(informations)
board_id = mapping.board_id
cards = mapping.get_cards()
# create board
current_board = board.Board(board_id, cards)
# write board to json
current_board.export_json(self.export_dir)
# remove old image
def remove_image(self):
# Uncomment in production version when multiple input files are present
# os.remove(self.imagefile.name)
pass | scan = scanner.Scanner(self.imagefile.name)
self.remove_image()
informations = scan.scan() | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.