file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
info.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from __future__ import print_function import inspect import textwrap from six.moves import zip_longest import llnl.util.tty as tty import llnl.util.tty.color as color from llnl.util.tty.colify import colify import spack.cmd.common.arguments as arguments import spack.fetch_strategy as fs import spack.repo import spack.spec from spack.package_base import has_test_method, preferred_version description = "get detailed information on a particular package" section = "basic" level = "short" header_color = "@*b" plain_format = "@." def padder(str_list, extra=0): """Return a function to pad elements of a list.""" length = max(len(str(s)) for s in str_list) + extra def pad(string): string = str(string) padding = max(0, length - len(string)) return string + (padding * " ") return pad def setup_parser(subparser): subparser.add_argument( "-a", "--all", action="store_true", default=False, help="output all package information" ) options = [ ("--detectable", print_detectable.__doc__), ("--maintainers", print_maintainers.__doc__), ("--no-dependencies", "do not " + print_dependencies.__doc__), ("--no-variants", "do not " + print_variants.__doc__), ("--no-versions", "do not " + print_versions.__doc__), ("--phases", print_phases.__doc__), ("--tags", print_tags.__doc__), ("--tests", print_tests.__doc__), ("--virtuals", print_virtuals.__doc__), ] for opt, help_comment in options: subparser.add_argument(opt, action="store_true", help=help_comment) arguments.add_common_arguments(subparser, ["package"]) def section_title(s): return header_color + s + plain_format def version(s): return spack.spec.version_color + s + plain_format def variant(s): return spack.spec.enabled_variant_color + s + plain_format class VariantFormatter(object): def __init__(self, variants): self.variants = variants self.headers = ("Name [Default]", "When", "Allowed values", "Description") # Formats fmt_name = "{0} [{1}]" # Initialize column widths with the length of the # corresponding headers, as they cannot be shorter # than that self.column_widths = [len(x) for x in self.headers] # Expand columns based on max line lengths for k, e in variants.items(): v, w = e candidate_max_widths = ( len(fmt_name.format(k, self.default(v))), # Name [Default] len(str(w)), len(v.allowed_values), # Allowed values len(v.description), # Description ) self.column_widths = ( max(self.column_widths[0], candidate_max_widths[0]), max(self.column_widths[1], candidate_max_widths[1]), max(self.column_widths[2], candidate_max_widths[2]), max(self.column_widths[3], candidate_max_widths[3]), ) # Don't let name or possible values be less than max widths _, cols = tty.terminal_size() max_name = min(self.column_widths[0], 30) max_when = min(self.column_widths[1], 30) max_vals = min(self.column_widths[2], 20) # allow the description column to extend as wide as the terminal. max_description = min( self.column_widths[3], # min width 70 cols, 14 cols of margins and column spacing max(cols, 70) - max_name - max_vals - 14, ) self.column_widths = (max_name, max_when, max_vals, max_description) # Compute the format self.fmt = "%%-%ss%%-%ss%%-%ss%%s" % ( self.column_widths[0] + 4, self.column_widths[1] + 4, self.column_widths[2] + 4, ) def default(self, v): s = "on" if v.default is True else "off" if not isinstance(v.default, bool): s = v.default
return s @property def lines(self): if not self.variants: yield " None" else: yield " " + self.fmt % self.headers underline = tuple([w * "=" for w in self.column_widths]) yield " " + self.fmt % underline yield "" for k, e in sorted(self.variants.items()): v, w = e name = textwrap.wrap( "{0} [{1}]".format(k, self.default(v)), width=self.column_widths[0] ) if all(spec == spack.spec.Spec() for spec in w): w = "--" when = textwrap.wrap(str(w), width=self.column_widths[1]) allowed = v.allowed_values.replace("True, False", "on, off") allowed = textwrap.wrap(allowed, width=self.column_widths[2]) description = [] for d_line in v.description.split("\n"): description += textwrap.wrap(d_line, width=self.column_widths[3]) for t in zip_longest(name, when, allowed, description, fillvalue=""): yield " " + self.fmt % t def print_dependencies(pkg): """output build, link, and run package dependencies""" for deptype in ("build", "link", "run"): color.cprint("") color.cprint(section_title("%s Dependencies:" % deptype.capitalize())) deps = sorted(pkg.dependencies_of_type(deptype)) if deps: colify(deps, indent=4) else: color.cprint(" None") def print_detectable(pkg): """output information on external detection""" color.cprint("") color.cprint(section_title("Externally Detectable: ")) # If the package has an 'executables' of 'libraries' field, it # can detect an installation if hasattr(pkg, "executables") or hasattr(pkg, "libraries"): find_attributes = [] if hasattr(pkg, "determine_version"): find_attributes.append("version") if hasattr(pkg, "determine_variants"): find_attributes.append("variants") # If the package does not define 'determine_version' nor # 'determine_variants', then it must use some custom detection # mechanism. In this case, just inform the user it's detectable somehow. color.cprint( " True{0}".format( " (" + ", ".join(find_attributes) + ")" if find_attributes else "" ) ) else: color.cprint(" False") def print_maintainers(pkg): """output package maintainers""" if len(pkg.maintainers) > 0: mnt = " ".join(["@@" + m for m in pkg.maintainers]) color.cprint("") color.cprint(section_title("Maintainers: ") + mnt) def print_phases(pkg): """output installation phases""" if hasattr(pkg, "phases") and pkg.phases: color.cprint("") color.cprint(section_title("Installation Phases:")) phase_str = "" for phase in pkg.phases: phase_str += " {0}".format(phase) color.cprint(phase_str) def print_tags(pkg): """output package tags""" color.cprint("") color.cprint(section_title("Tags: ")) if hasattr(pkg, "tags"): tags = sorted(pkg.tags) colify(tags, indent=4) else: color.cprint(" None") def print_tests(pkg): """output relevant build-time and stand-alone tests""" # Some built-in base packages (e.g., Autotools) define callback (e.g., # check) inherited by descendant packages. These checks may not result # in build-time testing if the package's build does not implement the # expected functionality (e.g., a 'check' or 'test' targets). # # So the presence of a callback in Spack does not necessarily correspond # to the actual presence of built-time tests for a package. for callbacks, phase in [ (pkg.build_time_test_callbacks, "Build"), (pkg.install_time_test_callbacks, "Install"), ]: color.cprint("") color.cprint(section_title("Available {0} Phase Test Methods:".format(phase))) names = [] if callbacks: for name in callbacks: if getattr(pkg, name, False): names.append(name) if names: colify(sorted(names), indent=4) else: color.cprint(" None") # PackageBase defines an empty install/smoke test but we want to know # if it has been overridden and, therefore, assumed to be implemented. color.cprint("") color.cprint(section_title("Stand-Alone/Smoke Test Methods:")) names = [] pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__ if has_test_method(pkg_cls): pkg_base = spack.package_base.PackageBase test_pkgs = [ str(cls.test) for cls in inspect.getmro(pkg_cls) if issubclass(cls, pkg_base) and cls.test != pkg_base.test ] test_pkgs = list(set(test_pkgs)) names.extend([(test.split()[1]).lower() for test in test_pkgs]) # TODO Refactor START # Use code from package_base.py's test_process IF this functionality is # accepted. v_names = list(set([vspec.name for vspec in pkg.virtuals_provided])) # hack for compilers that are not dependencies (yet) # TODO: this all eventually goes away c_names = ("gcc", "intel", "intel-parallel-studio", "pgi") if pkg.name in c_names: v_names.extend(["c", "cxx", "fortran"]) if pkg.spec.satisfies("llvm+clang"): v_names.extend(["c", "cxx"]) # TODO Refactor END v_specs = [spack.spec.Spec(v_name) for v_name in v_names] for v_spec in v_specs: try: pkg_cls = spack.repo.path.get_pkg_class(v_spec.name) if has_test_method(pkg_cls): names.append("{0}.test".format(pkg_cls.name.lower())) except spack.repo.UnknownPackageError: pass if names: colify(sorted(names), indent=4) else: color.cprint(" None") def print_variants(pkg): """output variants""" color.cprint("") color.cprint(section_title("Variants:")) formatter = VariantFormatter(pkg.variants) for line in formatter.lines: color.cprint(color.cescape(line)) def print_versions(pkg): """output versions""" color.cprint("") color.cprint(section_title("Preferred version: ")) if not pkg.versions: color.cprint(version(" None")) color.cprint("") color.cprint(section_title("Safe versions: ")) color.cprint(version(" None")) color.cprint("") color.cprint(section_title("Deprecated versions: ")) color.cprint(version(" None")) else: pad = padder(pkg.versions, 4) preferred = preferred_version(pkg) url = "" if pkg.has_code: url = fs.for_package_version(pkg, preferred) line = version(" {0}".format(pad(preferred))) + color.cescape(url) color.cprint(line) safe = [] deprecated = [] for v in reversed(sorted(pkg.versions)): if pkg.has_code: url = fs.for_package_version(pkg, v) if pkg.versions[v].get("deprecated", False): deprecated.append((v, url)) else: safe.append((v, url)) for title, vers in [("Safe", safe), ("Deprecated", deprecated)]: color.cprint("") color.cprint(section_title("{0} versions: ".format(title))) if not vers: color.cprint(version(" None")) continue for v, url in vers: line = version(" {0}".format(pad(v))) + color.cescape(url) color.cprint(line) def print_virtuals(pkg): """output virtual packages""" color.cprint("") color.cprint(section_title("Virtual Packages: ")) if pkg.provided: inverse_map = {} for spec, whens in pkg.provided.items(): for when in whens: if when not in inverse_map: inverse_map[when] = set() inverse_map[when].add(spec) for when, specs in reversed(sorted(inverse_map.items())): line = " %s provides %s" % ( when.colorized(), ", ".join(s.colorized() for s in specs), ) print(line) else: color.cprint(" None") def info(parser, args): spec = spack.spec.Spec(args.package) pkg_cls = spack.repo.path.get_pkg_class(spec.name) pkg = pkg_cls(spec) # Output core package information header = section_title("{0}: ").format(pkg.build_system_class) + pkg.name color.cprint(header) color.cprint("") color.cprint(section_title("Description:")) if pkg.__doc__: color.cprint(color.cescape(pkg.format_doc(indent=4))) else: color.cprint(" None") color.cprint(section_title("Homepage: ") + pkg.homepage) # Now output optional information in expected order sections = [ (args.all or args.maintainers, print_maintainers), (args.all or args.detectable, print_detectable), (args.all or args.tags, print_tags), (args.all or not args.no_versions, print_versions), (args.all or not args.no_variants, print_variants), (args.all or args.phases, print_phases), (args.all or not args.no_dependencies, print_dependencies), (args.all or args.virtuals, print_virtuals), (args.all or args.tests, print_tests), ] for print_it, func in sections: if print_it: func(pkg) color.cprint("")
random_line_split
log_file.rs
use std::collections::{HashMap, HashSet}; use std::fmt; use std::ffi::CString; use std::io::{Result, Error, ErrorKind}; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use libc; use log::{error, info, warn}; use crate::{Data, ArcDataSlice}; use super::*; pub(super) struct LogFile { file_path: Box<PathBuf>, pub file_id: FileId, fd: libc::c_int, pub len: usize, pub max_size: usize, pub file_uuid: uuid::Uuid } impl Drop for LogFile { fn drop(&mut self) { unsafe { libc::close(self.fd); } } } impl fmt::Display for LogFile { fn
(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CRL Sweeper File: {}", self.file_path.as_path().display()) } } #[cfg(target_os="linux")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const O_DIRECT: libc::c_int = 0x4000; const O_DSYNC: libc::c_int = 4000; unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR | O_DIRECT | O_DSYNC) } } #[cfg(target_os="macos")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const F_NOCACHE: libc::c_int = 0x30; unsafe { let mut fd = libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR); if fd > 0 { if libc::fchmod(fd, 0o644) < 0 { fd = -1; } } if fd > 0 { if libc::fcntl(fd, F_NOCACHE, 1) < 0 { fd = -1; } } fd } } #[cfg(not(any(target_os="linux", target_os="macos")))] fn open_synchronous_fd(path: &CString) -> libc::c_int { unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR) } } impl LogFile { fn new( directory: &Path, file_id: FileId, max_file_size: usize) -> Result<(LogFile, Option<(LogEntrySerialNumber, usize)>)> { let f = format!("{}", file_id.0); let p = directory.join(f); let fp = p.as_path(); let fd = open_synchronous_fd(&CString::new(fp.as_os_str().as_bytes()).unwrap()); if fd < 0 { error!("Failed to open CRL file {}", fp.display()); return Err(Error::last_os_error()); } let mut size = seek(fd, 0, libc::SEEK_END)?; if size < (16 + STATIC_ENTRY_SIZE as usize) { // Initialize seek(fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(fd, 0); } let u = uuid::Uuid::new_v4(); write_bytes(fd, &u.as_bytes()[..])?; } let file_uuid = pread_uuid(fd, 0)?; size = seek(fd, 0, libc::SEEK_END)?; let last = find_last_valid_entry(fd, size, &file_uuid)?; let lf = LogFile{ file_path: Box::new(p), file_id, fd, len: size as usize, max_size: max_file_size, file_uuid }; Ok((lf, last)) } fn read(&self, offset: usize, nbytes: usize) -> Result<Data> { let mut v = Vec::<u8>::with_capacity(nbytes); if nbytes > 0 { v.resize(nbytes, 0); pread_bytes(self.fd, &mut v[..], offset)?; } Ok(Data::new(v)) } pub(super) fn write(&mut self, data: &Vec<ArcDataSlice>) -> Result<()> { let wsize: usize = data.iter().map(|d| d.len()).sum(); unsafe { let iov: Vec<libc::iovec> = data.iter().map( |d| { let p: *const u8 = &d.as_bytes()[0]; libc::iovec { iov_base: p as *mut libc::c_void, iov_len: d.len() } }).collect(); loop { if libc::writev(self.fd, &iov[0], data.len() as libc::c_int) >= 0 { break; } else { let err = Error::last_os_error(); match err.kind() { ErrorKind::Interrupted => (), _ => return { warn!("Unexpected error occured during CRL file write: {}", err); Err(err) } } } } if !( cfg!(target_os="linux") || cfg!(target_os="macos") ) { libc::fsync(self.fd); } } self.len += wsize; Ok(()) } pub(super) fn recycle(&mut self) -> Result<()> { info!("Recycling {}", self); seek(self.fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(self.fd, 0); } self.file_uuid = uuid::Uuid::new_v4(); self.len = 16; write_bytes(self.fd, &self.file_uuid.as_bytes()[..])?; Ok(()) } } fn pread_bytes(fd: libc::c_int, s: &mut [u8], offset: usize) -> Result<()> { if s.len() == 0 { Ok(()) } else { let p: *mut u8 = &mut s[0]; unsafe { if libc::pread(fd, p as *mut libc::c_void, s.len(), offset as libc::off_t) < 0 { Err(Error::last_os_error()) } else { Ok(()) } } } } fn pread_uuid(fd: libc::c_int, offset: usize) -> Result<uuid::Uuid> { let mut buf: [u8; 16] = [0; 16]; pread_bytes(fd, &mut buf[..], offset)?; Ok(uuid::Uuid::from_bytes(buf)) } fn write_bytes(fd: libc::c_int, s: &[u8]) -> Result<()> { let p: *const u8 = &s[0]; unsafe { if libc::write(fd, p as *const libc::c_void, s.len()) < 0 { return Err(Error::last_os_error()); } libc::fsync(fd); } Ok(()) } fn seek(fd: libc::c_int, offset: i64, whence: libc::c_int) -> Result<usize> { unsafe { let sz = libc::lseek(fd, offset, whence); if sz < 0 { Err(Error::last_os_error()) } else { Ok(sz as usize) } } } fn find_last_valid_entry( fd: libc::c_int, file_size: usize, file_uuid: &uuid::Uuid) -> Result<Option<(LogEntrySerialNumber, usize)>> { let mut offset = file_size - (file_size % 4096); let mut last = None; while offset > 32 && last.is_none() { let test_uuid = pread_uuid(fd, offset - 16)?; if test_uuid == *file_uuid { let entry_offset = offset - STATIC_ENTRY_SIZE as usize; let mut serial_bytes: [u8; 8] = [0; 8]; pread_bytes(fd, &mut serial_bytes[..], entry_offset)?; let serial = u64::from_le_bytes(serial_bytes); last = Some((LogEntrySerialNumber(serial), entry_offset)); break; } offset -= 4096; } //println!("LAST: {:?} file size {} offset {}", last, file_size, (file_size - (file_size % 4096))); Ok(last) } pub(super) fn recover( crl_directory: &Path, max_file_size: usize, num_streams: usize) -> Result<RecoveredCrlState> { let mut raw_files = Vec::<(LogFile, Option<(LogEntrySerialNumber, usize)>)>::new(); for i in 0 .. num_streams * 3 { let f = LogFile::new(crl_directory, FileId(i as u16), max_file_size)?; raw_files.push(f); } let mut last: Option<(FileId, LogEntrySerialNumber, usize)> = None; for t in &raw_files { if let Some((serial, offset)) = &t.1 { if let Some((_, cur_serial, _)) = &last { if serial > cur_serial { last = Some((t.0.file_id, *serial, *offset)); } } else { last = Some((t.0.file_id, *serial, *offset)) } } } let mut files: Vec<(LogFile, Option<LogEntrySerialNumber>)> = Vec::new(); for t in raw_files { files.push((t.0, t.1.map(|x| x.0))); } let mut tx: Vec<RecoveredTx> = Vec::new(); let mut alloc: Vec<RecoveredAlloc> = Vec::new(); let mut last_entry_serial = LogEntrySerialNumber(0); let mut last_entry_location = FileLocation { file_id: FileId(0), offset: 0, length: 0 }; if let Some((last_file_id, last_serial, last_offset)) = last { last_entry_serial = last_serial; last_entry_location = FileLocation { file_id: last_file_id, offset: last_offset as u64, length: STATIC_ENTRY_SIZE as u32 }; let mut transactions: HashMap<TxId, RecoveringTx> = HashMap::new(); let mut allocations: HashMap<TxId, RecoveringAlloc> = HashMap::new(); let mut deleted_tx: HashSet<TxId> = HashSet::new(); let mut deleted_alloc: HashSet<TxId> = HashSet::new(); let mut file_id = last_file_id; let mut entry_serial = last_serial; let mut entry_block_offset = last_offset; let earliest_serial_needed = { let mut d = files[last_file_id.0 as usize].0.read(last_offset, STATIC_ENTRY_SIZE as usize)?; let entry = encoding::decode_entry(&mut d)?; LogEntrySerialNumber(entry.earliest_needed) }; while entry_serial >= earliest_serial_needed { let file = &files[file_id.0 as usize].0; let mut d = file.read(entry_block_offset, STATIC_ENTRY_SIZE as usize)?; let mut entry = encoding::decode_entry(&mut d)?; entry_serial = entry.serial; //println!("Reading Entry {:?} entry_block_offset {} entry offset {}", entry_serial, entry_block_offset, entry.entry_offset); let entry_data_size = entry_block_offset - entry.entry_offset as usize; let mut entry_data = file.read(entry.entry_offset as usize, entry_data_size)?; encoding::load_entry_data(&mut entry_data, &mut entry, entry_serial)?; for txid in &entry.tx_deletions { deleted_tx.insert(*txid); } for txid in &entry.alloc_deletions { deleted_alloc.insert(*txid); } for rtx in entry.transactions { if ! deleted_tx.contains(&rtx.id) && ! transactions.contains_key(&rtx.id) { transactions.insert(rtx.id, rtx); } } for ra in entry.allocations { if ! deleted_alloc.contains(&ra.id) && ! allocations.contains_key(&ra.id) { allocations.insert(ra.id, ra); } } if entry.previous_entry_location.offset < 16 { break; // Cannot have an offset of 0 (first 16 bytes of the file are the UUID) } file_id = entry.previous_entry_location.file_id; entry_block_offset = entry.previous_entry_location.offset as usize; } let get_data = |file_location: &FileLocation| -> Result<ArcData> { let d = files[file_location.file_id.0 as usize].0.read(file_location.offset as usize, file_location.length as usize)?; Ok(d.into()) }; let get_slice = |file_location: &FileLocation| -> Result<ArcDataSlice> { let d = get_data(file_location)?; Ok(d.into()) }; for (txid, rtx) in transactions { let mut ou: Vec<transaction::ObjectUpdate> = Vec::with_capacity(rtx.object_updates.len()); for t in &rtx.object_updates { ou.push(transaction::ObjectUpdate { object_id: object::Id(t.0), data: get_slice(&t.1)? }); } tx.push( RecoveredTx { id: txid, txd_location: rtx.serialized_transaction_description, serialized_transaction_description: get_data(&rtx.serialized_transaction_description)?, object_updates: ou, update_locations: rtx.object_updates, tx_disposition: rtx.tx_disposition, paxos_state: rtx.paxos_state, last_entry_serial: rtx.last_entry_serial }); } for (txid, ra) in allocations { alloc.push(RecoveredAlloc{ id: txid, store_pointer: ra.store_pointer, object_id: ra.object_id, kind: ra.kind, size: ra.size, data_location: ra.data, data: get_data(&ra.data)?, refcount: ra.refcount, timestamp: ra.timestamp, serialized_revision_guard: ra.serialized_revision_guard, last_entry_serial: ra.last_entry_serial }); } }; Ok(RecoveredCrlState { log_files: files, transactions: tx, allocations: alloc, last_entry_serial, last_entry_location }) } #[cfg(test)] mod tests { use tempdir::TempDir; use super::*; #[test] fn initialization() { let t = TempDir::new("test").unwrap(); let (l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } #[test] fn recycle() { let t = TempDir::new("test").unwrap(); let (mut l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); write_bytes(l.fd, &[1u8,2u8,3u8,4u8]).unwrap(); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 20); } l.recycle().unwrap(); let ru = pread_uuid(l.fd, 0).unwrap(); assert_ne!(u, ru); assert_eq!(l.file_uuid, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } }
fmt
identifier_name
log_file.rs
use std::collections::{HashMap, HashSet}; use std::fmt; use std::ffi::CString; use std::io::{Result, Error, ErrorKind}; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use libc; use log::{error, info, warn}; use crate::{Data, ArcDataSlice}; use super::*; pub(super) struct LogFile { file_path: Box<PathBuf>, pub file_id: FileId,
impl Drop for LogFile { fn drop(&mut self) { unsafe { libc::close(self.fd); } } } impl fmt::Display for LogFile { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CRL Sweeper File: {}", self.file_path.as_path().display()) } } #[cfg(target_os="linux")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const O_DIRECT: libc::c_int = 0x4000; const O_DSYNC: libc::c_int = 4000; unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR | O_DIRECT | O_DSYNC) } } #[cfg(target_os="macos")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const F_NOCACHE: libc::c_int = 0x30; unsafe { let mut fd = libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR); if fd > 0 { if libc::fchmod(fd, 0o644) < 0 { fd = -1; } } if fd > 0 { if libc::fcntl(fd, F_NOCACHE, 1) < 0 { fd = -1; } } fd } } #[cfg(not(any(target_os="linux", target_os="macos")))] fn open_synchronous_fd(path: &CString) -> libc::c_int { unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR) } } impl LogFile { fn new( directory: &Path, file_id: FileId, max_file_size: usize) -> Result<(LogFile, Option<(LogEntrySerialNumber, usize)>)> { let f = format!("{}", file_id.0); let p = directory.join(f); let fp = p.as_path(); let fd = open_synchronous_fd(&CString::new(fp.as_os_str().as_bytes()).unwrap()); if fd < 0 { error!("Failed to open CRL file {}", fp.display()); return Err(Error::last_os_error()); } let mut size = seek(fd, 0, libc::SEEK_END)?; if size < (16 + STATIC_ENTRY_SIZE as usize) { // Initialize seek(fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(fd, 0); } let u = uuid::Uuid::new_v4(); write_bytes(fd, &u.as_bytes()[..])?; } let file_uuid = pread_uuid(fd, 0)?; size = seek(fd, 0, libc::SEEK_END)?; let last = find_last_valid_entry(fd, size, &file_uuid)?; let lf = LogFile{ file_path: Box::new(p), file_id, fd, len: size as usize, max_size: max_file_size, file_uuid }; Ok((lf, last)) } fn read(&self, offset: usize, nbytes: usize) -> Result<Data> { let mut v = Vec::<u8>::with_capacity(nbytes); if nbytes > 0 { v.resize(nbytes, 0); pread_bytes(self.fd, &mut v[..], offset)?; } Ok(Data::new(v)) } pub(super) fn write(&mut self, data: &Vec<ArcDataSlice>) -> Result<()> { let wsize: usize = data.iter().map(|d| d.len()).sum(); unsafe { let iov: Vec<libc::iovec> = data.iter().map( |d| { let p: *const u8 = &d.as_bytes()[0]; libc::iovec { iov_base: p as *mut libc::c_void, iov_len: d.len() } }).collect(); loop { if libc::writev(self.fd, &iov[0], data.len() as libc::c_int) >= 0 { break; } else { let err = Error::last_os_error(); match err.kind() { ErrorKind::Interrupted => (), _ => return { warn!("Unexpected error occured during CRL file write: {}", err); Err(err) } } } } if !( cfg!(target_os="linux") || cfg!(target_os="macos") ) { libc::fsync(self.fd); } } self.len += wsize; Ok(()) } pub(super) fn recycle(&mut self) -> Result<()> { info!("Recycling {}", self); seek(self.fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(self.fd, 0); } self.file_uuid = uuid::Uuid::new_v4(); self.len = 16; write_bytes(self.fd, &self.file_uuid.as_bytes()[..])?; Ok(()) } } fn pread_bytes(fd: libc::c_int, s: &mut [u8], offset: usize) -> Result<()> { if s.len() == 0 { Ok(()) } else { let p: *mut u8 = &mut s[0]; unsafe { if libc::pread(fd, p as *mut libc::c_void, s.len(), offset as libc::off_t) < 0 { Err(Error::last_os_error()) } else { Ok(()) } } } } fn pread_uuid(fd: libc::c_int, offset: usize) -> Result<uuid::Uuid> { let mut buf: [u8; 16] = [0; 16]; pread_bytes(fd, &mut buf[..], offset)?; Ok(uuid::Uuid::from_bytes(buf)) } fn write_bytes(fd: libc::c_int, s: &[u8]) -> Result<()> { let p: *const u8 = &s[0]; unsafe { if libc::write(fd, p as *const libc::c_void, s.len()) < 0 { return Err(Error::last_os_error()); } libc::fsync(fd); } Ok(()) } fn seek(fd: libc::c_int, offset: i64, whence: libc::c_int) -> Result<usize> { unsafe { let sz = libc::lseek(fd, offset, whence); if sz < 0 { Err(Error::last_os_error()) } else { Ok(sz as usize) } } } fn find_last_valid_entry( fd: libc::c_int, file_size: usize, file_uuid: &uuid::Uuid) -> Result<Option<(LogEntrySerialNumber, usize)>> { let mut offset = file_size - (file_size % 4096); let mut last = None; while offset > 32 && last.is_none() { let test_uuid = pread_uuid(fd, offset - 16)?; if test_uuid == *file_uuid { let entry_offset = offset - STATIC_ENTRY_SIZE as usize; let mut serial_bytes: [u8; 8] = [0; 8]; pread_bytes(fd, &mut serial_bytes[..], entry_offset)?; let serial = u64::from_le_bytes(serial_bytes); last = Some((LogEntrySerialNumber(serial), entry_offset)); break; } offset -= 4096; } //println!("LAST: {:?} file size {} offset {}", last, file_size, (file_size - (file_size % 4096))); Ok(last) } pub(super) fn recover( crl_directory: &Path, max_file_size: usize, num_streams: usize) -> Result<RecoveredCrlState> { let mut raw_files = Vec::<(LogFile, Option<(LogEntrySerialNumber, usize)>)>::new(); for i in 0 .. num_streams * 3 { let f = LogFile::new(crl_directory, FileId(i as u16), max_file_size)?; raw_files.push(f); } let mut last: Option<(FileId, LogEntrySerialNumber, usize)> = None; for t in &raw_files { if let Some((serial, offset)) = &t.1 { if let Some((_, cur_serial, _)) = &last { if serial > cur_serial { last = Some((t.0.file_id, *serial, *offset)); } } else { last = Some((t.0.file_id, *serial, *offset)) } } } let mut files: Vec<(LogFile, Option<LogEntrySerialNumber>)> = Vec::new(); for t in raw_files { files.push((t.0, t.1.map(|x| x.0))); } let mut tx: Vec<RecoveredTx> = Vec::new(); let mut alloc: Vec<RecoveredAlloc> = Vec::new(); let mut last_entry_serial = LogEntrySerialNumber(0); let mut last_entry_location = FileLocation { file_id: FileId(0), offset: 0, length: 0 }; if let Some((last_file_id, last_serial, last_offset)) = last { last_entry_serial = last_serial; last_entry_location = FileLocation { file_id: last_file_id, offset: last_offset as u64, length: STATIC_ENTRY_SIZE as u32 }; let mut transactions: HashMap<TxId, RecoveringTx> = HashMap::new(); let mut allocations: HashMap<TxId, RecoveringAlloc> = HashMap::new(); let mut deleted_tx: HashSet<TxId> = HashSet::new(); let mut deleted_alloc: HashSet<TxId> = HashSet::new(); let mut file_id = last_file_id; let mut entry_serial = last_serial; let mut entry_block_offset = last_offset; let earliest_serial_needed = { let mut d = files[last_file_id.0 as usize].0.read(last_offset, STATIC_ENTRY_SIZE as usize)?; let entry = encoding::decode_entry(&mut d)?; LogEntrySerialNumber(entry.earliest_needed) }; while entry_serial >= earliest_serial_needed { let file = &files[file_id.0 as usize].0; let mut d = file.read(entry_block_offset, STATIC_ENTRY_SIZE as usize)?; let mut entry = encoding::decode_entry(&mut d)?; entry_serial = entry.serial; //println!("Reading Entry {:?} entry_block_offset {} entry offset {}", entry_serial, entry_block_offset, entry.entry_offset); let entry_data_size = entry_block_offset - entry.entry_offset as usize; let mut entry_data = file.read(entry.entry_offset as usize, entry_data_size)?; encoding::load_entry_data(&mut entry_data, &mut entry, entry_serial)?; for txid in &entry.tx_deletions { deleted_tx.insert(*txid); } for txid in &entry.alloc_deletions { deleted_alloc.insert(*txid); } for rtx in entry.transactions { if ! deleted_tx.contains(&rtx.id) && ! transactions.contains_key(&rtx.id) { transactions.insert(rtx.id, rtx); } } for ra in entry.allocations { if ! deleted_alloc.contains(&ra.id) && ! allocations.contains_key(&ra.id) { allocations.insert(ra.id, ra); } } if entry.previous_entry_location.offset < 16 { break; // Cannot have an offset of 0 (first 16 bytes of the file are the UUID) } file_id = entry.previous_entry_location.file_id; entry_block_offset = entry.previous_entry_location.offset as usize; } let get_data = |file_location: &FileLocation| -> Result<ArcData> { let d = files[file_location.file_id.0 as usize].0.read(file_location.offset as usize, file_location.length as usize)?; Ok(d.into()) }; let get_slice = |file_location: &FileLocation| -> Result<ArcDataSlice> { let d = get_data(file_location)?; Ok(d.into()) }; for (txid, rtx) in transactions { let mut ou: Vec<transaction::ObjectUpdate> = Vec::with_capacity(rtx.object_updates.len()); for t in &rtx.object_updates { ou.push(transaction::ObjectUpdate { object_id: object::Id(t.0), data: get_slice(&t.1)? }); } tx.push( RecoveredTx { id: txid, txd_location: rtx.serialized_transaction_description, serialized_transaction_description: get_data(&rtx.serialized_transaction_description)?, object_updates: ou, update_locations: rtx.object_updates, tx_disposition: rtx.tx_disposition, paxos_state: rtx.paxos_state, last_entry_serial: rtx.last_entry_serial }); } for (txid, ra) in allocations { alloc.push(RecoveredAlloc{ id: txid, store_pointer: ra.store_pointer, object_id: ra.object_id, kind: ra.kind, size: ra.size, data_location: ra.data, data: get_data(&ra.data)?, refcount: ra.refcount, timestamp: ra.timestamp, serialized_revision_guard: ra.serialized_revision_guard, last_entry_serial: ra.last_entry_serial }); } }; Ok(RecoveredCrlState { log_files: files, transactions: tx, allocations: alloc, last_entry_serial, last_entry_location }) } #[cfg(test)] mod tests { use tempdir::TempDir; use super::*; #[test] fn initialization() { let t = TempDir::new("test").unwrap(); let (l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } #[test] fn recycle() { let t = TempDir::new("test").unwrap(); let (mut l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); write_bytes(l.fd, &[1u8,2u8,3u8,4u8]).unwrap(); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 20); } l.recycle().unwrap(); let ru = pread_uuid(l.fd, 0).unwrap(); assert_ne!(u, ru); assert_eq!(l.file_uuid, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } }
fd: libc::c_int, pub len: usize, pub max_size: usize, pub file_uuid: uuid::Uuid }
random_line_split
log_file.rs
use std::collections::{HashMap, HashSet}; use std::fmt; use std::ffi::CString; use std::io::{Result, Error, ErrorKind}; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use libc; use log::{error, info, warn}; use crate::{Data, ArcDataSlice}; use super::*; pub(super) struct LogFile { file_path: Box<PathBuf>, pub file_id: FileId, fd: libc::c_int, pub len: usize, pub max_size: usize, pub file_uuid: uuid::Uuid } impl Drop for LogFile { fn drop(&mut self) { unsafe { libc::close(self.fd); } } } impl fmt::Display for LogFile { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CRL Sweeper File: {}", self.file_path.as_path().display()) } } #[cfg(target_os="linux")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const O_DIRECT: libc::c_int = 0x4000; const O_DSYNC: libc::c_int = 4000; unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR | O_DIRECT | O_DSYNC) } } #[cfg(target_os="macos")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const F_NOCACHE: libc::c_int = 0x30; unsafe { let mut fd = libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR); if fd > 0 { if libc::fchmod(fd, 0o644) < 0 { fd = -1; } } if fd > 0 { if libc::fcntl(fd, F_NOCACHE, 1) < 0 { fd = -1; } } fd } } #[cfg(not(any(target_os="linux", target_os="macos")))] fn open_synchronous_fd(path: &CString) -> libc::c_int { unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR) } } impl LogFile { fn new( directory: &Path, file_id: FileId, max_file_size: usize) -> Result<(LogFile, Option<(LogEntrySerialNumber, usize)>)> { let f = format!("{}", file_id.0); let p = directory.join(f); let fp = p.as_path(); let fd = open_synchronous_fd(&CString::new(fp.as_os_str().as_bytes()).unwrap()); if fd < 0 { error!("Failed to open CRL file {}", fp.display()); return Err(Error::last_os_error()); } let mut size = seek(fd, 0, libc::SEEK_END)?; if size < (16 + STATIC_ENTRY_SIZE as usize) { // Initialize seek(fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(fd, 0); } let u = uuid::Uuid::new_v4(); write_bytes(fd, &u.as_bytes()[..])?; } let file_uuid = pread_uuid(fd, 0)?; size = seek(fd, 0, libc::SEEK_END)?; let last = find_last_valid_entry(fd, size, &file_uuid)?; let lf = LogFile{ file_path: Box::new(p), file_id, fd, len: size as usize, max_size: max_file_size, file_uuid }; Ok((lf, last)) } fn read(&self, offset: usize, nbytes: usize) -> Result<Data> { let mut v = Vec::<u8>::with_capacity(nbytes); if nbytes > 0 { v.resize(nbytes, 0); pread_bytes(self.fd, &mut v[..], offset)?; } Ok(Data::new(v)) } pub(super) fn write(&mut self, data: &Vec<ArcDataSlice>) -> Result<()> { let wsize: usize = data.iter().map(|d| d.len()).sum(); unsafe { let iov: Vec<libc::iovec> = data.iter().map( |d| { let p: *const u8 = &d.as_bytes()[0]; libc::iovec { iov_base: p as *mut libc::c_void, iov_len: d.len() } }).collect(); loop { if libc::writev(self.fd, &iov[0], data.len() as libc::c_int) >= 0 { break; } else { let err = Error::last_os_error(); match err.kind() { ErrorKind::Interrupted => (), _ => return { warn!("Unexpected error occured during CRL file write: {}", err); Err(err) } } } } if !( cfg!(target_os="linux") || cfg!(target_os="macos") ) { libc::fsync(self.fd); } } self.len += wsize; Ok(()) } pub(super) fn recycle(&mut self) -> Result<()> { info!("Recycling {}", self); seek(self.fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(self.fd, 0); } self.file_uuid = uuid::Uuid::new_v4(); self.len = 16; write_bytes(self.fd, &self.file_uuid.as_bytes()[..])?; Ok(()) } } fn pread_bytes(fd: libc::c_int, s: &mut [u8], offset: usize) -> Result<()> { if s.len() == 0 { Ok(()) } else { let p: *mut u8 = &mut s[0]; unsafe { if libc::pread(fd, p as *mut libc::c_void, s.len(), offset as libc::off_t) < 0 { Err(Error::last_os_error()) } else { Ok(()) } } } } fn pread_uuid(fd: libc::c_int, offset: usize) -> Result<uuid::Uuid> { let mut buf: [u8; 16] = [0; 16]; pread_bytes(fd, &mut buf[..], offset)?; Ok(uuid::Uuid::from_bytes(buf)) } fn write_bytes(fd: libc::c_int, s: &[u8]) -> Result<()> { let p: *const u8 = &s[0]; unsafe { if libc::write(fd, p as *const libc::c_void, s.len()) < 0 { return Err(Error::last_os_error()); } libc::fsync(fd); } Ok(()) } fn seek(fd: libc::c_int, offset: i64, whence: libc::c_int) -> Result<usize> { unsafe { let sz = libc::lseek(fd, offset, whence); if sz < 0 { Err(Error::last_os_error()) } else { Ok(sz as usize) } } } fn find_last_valid_entry( fd: libc::c_int, file_size: usize, file_uuid: &uuid::Uuid) -> Result<Option<(LogEntrySerialNumber, usize)>> { let mut offset = file_size - (file_size % 4096); let mut last = None; while offset > 32 && last.is_none() { let test_uuid = pread_uuid(fd, offset - 16)?; if test_uuid == *file_uuid { let entry_offset = offset - STATIC_ENTRY_SIZE as usize; let mut serial_bytes: [u8; 8] = [0; 8]; pread_bytes(fd, &mut serial_bytes[..], entry_offset)?; let serial = u64::from_le_bytes(serial_bytes); last = Some((LogEntrySerialNumber(serial), entry_offset)); break; } offset -= 4096; } //println!("LAST: {:?} file size {} offset {}", last, file_size, (file_size - (file_size % 4096))); Ok(last) } pub(super) fn recover( crl_directory: &Path, max_file_size: usize, num_streams: usize) -> Result<RecoveredCrlState> { let mut raw_files = Vec::<(LogFile, Option<(LogEntrySerialNumber, usize)>)>::new(); for i in 0 .. num_streams * 3 { let f = LogFile::new(crl_directory, FileId(i as u16), max_file_size)?; raw_files.push(f); } let mut last: Option<(FileId, LogEntrySerialNumber, usize)> = None; for t in &raw_files { if let Some((serial, offset)) = &t.1 { if let Some((_, cur_serial, _)) = &last { if serial > cur_serial { last = Some((t.0.file_id, *serial, *offset)); } } else { last = Some((t.0.file_id, *serial, *offset)) } } } let mut files: Vec<(LogFile, Option<LogEntrySerialNumber>)> = Vec::new(); for t in raw_files { files.push((t.0, t.1.map(|x| x.0))); } let mut tx: Vec<RecoveredTx> = Vec::new(); let mut alloc: Vec<RecoveredAlloc> = Vec::new(); let mut last_entry_serial = LogEntrySerialNumber(0); let mut last_entry_location = FileLocation { file_id: FileId(0), offset: 0, length: 0 }; if let Some((last_file_id, last_serial, last_offset)) = last { last_entry_serial = last_serial; last_entry_location = FileLocation { file_id: last_file_id, offset: last_offset as u64, length: STATIC_ENTRY_SIZE as u32 }; let mut transactions: HashMap<TxId, RecoveringTx> = HashMap::new(); let mut allocations: HashMap<TxId, RecoveringAlloc> = HashMap::new(); let mut deleted_tx: HashSet<TxId> = HashSet::new(); let mut deleted_alloc: HashSet<TxId> = HashSet::new(); let mut file_id = last_file_id; let mut entry_serial = last_serial; let mut entry_block_offset = last_offset; let earliest_serial_needed = { let mut d = files[last_file_id.0 as usize].0.read(last_offset, STATIC_ENTRY_SIZE as usize)?; let entry = encoding::decode_entry(&mut d)?; LogEntrySerialNumber(entry.earliest_needed) }; while entry_serial >= earliest_serial_needed { let file = &files[file_id.0 as usize].0; let mut d = file.read(entry_block_offset, STATIC_ENTRY_SIZE as usize)?; let mut entry = encoding::decode_entry(&mut d)?; entry_serial = entry.serial; //println!("Reading Entry {:?} entry_block_offset {} entry offset {}", entry_serial, entry_block_offset, entry.entry_offset); let entry_data_size = entry_block_offset - entry.entry_offset as usize; let mut entry_data = file.read(entry.entry_offset as usize, entry_data_size)?; encoding::load_entry_data(&mut entry_data, &mut entry, entry_serial)?; for txid in &entry.tx_deletions { deleted_tx.insert(*txid); } for txid in &entry.alloc_deletions { deleted_alloc.insert(*txid); } for rtx in entry.transactions { if ! deleted_tx.contains(&rtx.id) && ! transactions.contains_key(&rtx.id) { transactions.insert(rtx.id, rtx); } } for ra in entry.allocations { if ! deleted_alloc.contains(&ra.id) && ! allocations.contains_key(&ra.id) { allocations.insert(ra.id, ra); } } if entry.previous_entry_location.offset < 16
file_id = entry.previous_entry_location.file_id; entry_block_offset = entry.previous_entry_location.offset as usize; } let get_data = |file_location: &FileLocation| -> Result<ArcData> { let d = files[file_location.file_id.0 as usize].0.read(file_location.offset as usize, file_location.length as usize)?; Ok(d.into()) }; let get_slice = |file_location: &FileLocation| -> Result<ArcDataSlice> { let d = get_data(file_location)?; Ok(d.into()) }; for (txid, rtx) in transactions { let mut ou: Vec<transaction::ObjectUpdate> = Vec::with_capacity(rtx.object_updates.len()); for t in &rtx.object_updates { ou.push(transaction::ObjectUpdate { object_id: object::Id(t.0), data: get_slice(&t.1)? }); } tx.push( RecoveredTx { id: txid, txd_location: rtx.serialized_transaction_description, serialized_transaction_description: get_data(&rtx.serialized_transaction_description)?, object_updates: ou, update_locations: rtx.object_updates, tx_disposition: rtx.tx_disposition, paxos_state: rtx.paxos_state, last_entry_serial: rtx.last_entry_serial }); } for (txid, ra) in allocations { alloc.push(RecoveredAlloc{ id: txid, store_pointer: ra.store_pointer, object_id: ra.object_id, kind: ra.kind, size: ra.size, data_location: ra.data, data: get_data(&ra.data)?, refcount: ra.refcount, timestamp: ra.timestamp, serialized_revision_guard: ra.serialized_revision_guard, last_entry_serial: ra.last_entry_serial }); } }; Ok(RecoveredCrlState { log_files: files, transactions: tx, allocations: alloc, last_entry_serial, last_entry_location }) } #[cfg(test)] mod tests { use tempdir::TempDir; use super::*; #[test] fn initialization() { let t = TempDir::new("test").unwrap(); let (l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } #[test] fn recycle() { let t = TempDir::new("test").unwrap(); let (mut l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); write_bytes(l.fd, &[1u8,2u8,3u8,4u8]).unwrap(); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 20); } l.recycle().unwrap(); let ru = pread_uuid(l.fd, 0).unwrap(); assert_ne!(u, ru); assert_eq!(l.file_uuid, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } }
{ break; // Cannot have an offset of 0 (first 16 bytes of the file are the UUID) }
conditional_block
log_file.rs
use std::collections::{HashMap, HashSet}; use std::fmt; use std::ffi::CString; use std::io::{Result, Error, ErrorKind}; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use libc; use log::{error, info, warn}; use crate::{Data, ArcDataSlice}; use super::*; pub(super) struct LogFile { file_path: Box<PathBuf>, pub file_id: FileId, fd: libc::c_int, pub len: usize, pub max_size: usize, pub file_uuid: uuid::Uuid } impl Drop for LogFile { fn drop(&mut self) { unsafe { libc::close(self.fd); } } } impl fmt::Display for LogFile { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CRL Sweeper File: {}", self.file_path.as_path().display()) } } #[cfg(target_os="linux")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const O_DIRECT: libc::c_int = 0x4000; const O_DSYNC: libc::c_int = 4000; unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR | O_DIRECT | O_DSYNC) } } #[cfg(target_os="macos")] fn open_synchronous_fd(path: &CString) -> libc::c_int { const F_NOCACHE: libc::c_int = 0x30; unsafe { let mut fd = libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR); if fd > 0 { if libc::fchmod(fd, 0o644) < 0 { fd = -1; } } if fd > 0 { if libc::fcntl(fd, F_NOCACHE, 1) < 0 { fd = -1; } } fd } } #[cfg(not(any(target_os="linux", target_os="macos")))] fn open_synchronous_fd(path: &CString) -> libc::c_int { unsafe { libc::open(path.as_ptr(), libc::O_CREAT | libc::O_RDWR) } } impl LogFile { fn new( directory: &Path, file_id: FileId, max_file_size: usize) -> Result<(LogFile, Option<(LogEntrySerialNumber, usize)>)> { let f = format!("{}", file_id.0); let p = directory.join(f); let fp = p.as_path(); let fd = open_synchronous_fd(&CString::new(fp.as_os_str().as_bytes()).unwrap()); if fd < 0 { error!("Failed to open CRL file {}", fp.display()); return Err(Error::last_os_error()); } let mut size = seek(fd, 0, libc::SEEK_END)?; if size < (16 + STATIC_ENTRY_SIZE as usize) { // Initialize seek(fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(fd, 0); } let u = uuid::Uuid::new_v4(); write_bytes(fd, &u.as_bytes()[..])?; } let file_uuid = pread_uuid(fd, 0)?; size = seek(fd, 0, libc::SEEK_END)?; let last = find_last_valid_entry(fd, size, &file_uuid)?; let lf = LogFile{ file_path: Box::new(p), file_id, fd, len: size as usize, max_size: max_file_size, file_uuid }; Ok((lf, last)) } fn read(&self, offset: usize, nbytes: usize) -> Result<Data> { let mut v = Vec::<u8>::with_capacity(nbytes); if nbytes > 0 { v.resize(nbytes, 0); pread_bytes(self.fd, &mut v[..], offset)?; } Ok(Data::new(v)) } pub(super) fn write(&mut self, data: &Vec<ArcDataSlice>) -> Result<()> { let wsize: usize = data.iter().map(|d| d.len()).sum(); unsafe { let iov: Vec<libc::iovec> = data.iter().map( |d| { let p: *const u8 = &d.as_bytes()[0]; libc::iovec { iov_base: p as *mut libc::c_void, iov_len: d.len() } }).collect(); loop { if libc::writev(self.fd, &iov[0], data.len() as libc::c_int) >= 0 { break; } else { let err = Error::last_os_error(); match err.kind() { ErrorKind::Interrupted => (), _ => return { warn!("Unexpected error occured during CRL file write: {}", err); Err(err) } } } } if !( cfg!(target_os="linux") || cfg!(target_os="macos") ) { libc::fsync(self.fd); } } self.len += wsize; Ok(()) } pub(super) fn recycle(&mut self) -> Result<()> { info!("Recycling {}", self); seek(self.fd, 0, libc::SEEK_SET)?; unsafe { libc::ftruncate(self.fd, 0); } self.file_uuid = uuid::Uuid::new_v4(); self.len = 16; write_bytes(self.fd, &self.file_uuid.as_bytes()[..])?; Ok(()) } } fn pread_bytes(fd: libc::c_int, s: &mut [u8], offset: usize) -> Result<()> { if s.len() == 0 { Ok(()) } else { let p: *mut u8 = &mut s[0]; unsafe { if libc::pread(fd, p as *mut libc::c_void, s.len(), offset as libc::off_t) < 0 { Err(Error::last_os_error()) } else { Ok(()) } } } } fn pread_uuid(fd: libc::c_int, offset: usize) -> Result<uuid::Uuid> { let mut buf: [u8; 16] = [0; 16]; pread_bytes(fd, &mut buf[..], offset)?; Ok(uuid::Uuid::from_bytes(buf)) } fn write_bytes(fd: libc::c_int, s: &[u8]) -> Result<()>
fn seek(fd: libc::c_int, offset: i64, whence: libc::c_int) -> Result<usize> { unsafe { let sz = libc::lseek(fd, offset, whence); if sz < 0 { Err(Error::last_os_error()) } else { Ok(sz as usize) } } } fn find_last_valid_entry( fd: libc::c_int, file_size: usize, file_uuid: &uuid::Uuid) -> Result<Option<(LogEntrySerialNumber, usize)>> { let mut offset = file_size - (file_size % 4096); let mut last = None; while offset > 32 && last.is_none() { let test_uuid = pread_uuid(fd, offset - 16)?; if test_uuid == *file_uuid { let entry_offset = offset - STATIC_ENTRY_SIZE as usize; let mut serial_bytes: [u8; 8] = [0; 8]; pread_bytes(fd, &mut serial_bytes[..], entry_offset)?; let serial = u64::from_le_bytes(serial_bytes); last = Some((LogEntrySerialNumber(serial), entry_offset)); break; } offset -= 4096; } //println!("LAST: {:?} file size {} offset {}", last, file_size, (file_size - (file_size % 4096))); Ok(last) } pub(super) fn recover( crl_directory: &Path, max_file_size: usize, num_streams: usize) -> Result<RecoveredCrlState> { let mut raw_files = Vec::<(LogFile, Option<(LogEntrySerialNumber, usize)>)>::new(); for i in 0 .. num_streams * 3 { let f = LogFile::new(crl_directory, FileId(i as u16), max_file_size)?; raw_files.push(f); } let mut last: Option<(FileId, LogEntrySerialNumber, usize)> = None; for t in &raw_files { if let Some((serial, offset)) = &t.1 { if let Some((_, cur_serial, _)) = &last { if serial > cur_serial { last = Some((t.0.file_id, *serial, *offset)); } } else { last = Some((t.0.file_id, *serial, *offset)) } } } let mut files: Vec<(LogFile, Option<LogEntrySerialNumber>)> = Vec::new(); for t in raw_files { files.push((t.0, t.1.map(|x| x.0))); } let mut tx: Vec<RecoveredTx> = Vec::new(); let mut alloc: Vec<RecoveredAlloc> = Vec::new(); let mut last_entry_serial = LogEntrySerialNumber(0); let mut last_entry_location = FileLocation { file_id: FileId(0), offset: 0, length: 0 }; if let Some((last_file_id, last_serial, last_offset)) = last { last_entry_serial = last_serial; last_entry_location = FileLocation { file_id: last_file_id, offset: last_offset as u64, length: STATIC_ENTRY_SIZE as u32 }; let mut transactions: HashMap<TxId, RecoveringTx> = HashMap::new(); let mut allocations: HashMap<TxId, RecoveringAlloc> = HashMap::new(); let mut deleted_tx: HashSet<TxId> = HashSet::new(); let mut deleted_alloc: HashSet<TxId> = HashSet::new(); let mut file_id = last_file_id; let mut entry_serial = last_serial; let mut entry_block_offset = last_offset; let earliest_serial_needed = { let mut d = files[last_file_id.0 as usize].0.read(last_offset, STATIC_ENTRY_SIZE as usize)?; let entry = encoding::decode_entry(&mut d)?; LogEntrySerialNumber(entry.earliest_needed) }; while entry_serial >= earliest_serial_needed { let file = &files[file_id.0 as usize].0; let mut d = file.read(entry_block_offset, STATIC_ENTRY_SIZE as usize)?; let mut entry = encoding::decode_entry(&mut d)?; entry_serial = entry.serial; //println!("Reading Entry {:?} entry_block_offset {} entry offset {}", entry_serial, entry_block_offset, entry.entry_offset); let entry_data_size = entry_block_offset - entry.entry_offset as usize; let mut entry_data = file.read(entry.entry_offset as usize, entry_data_size)?; encoding::load_entry_data(&mut entry_data, &mut entry, entry_serial)?; for txid in &entry.tx_deletions { deleted_tx.insert(*txid); } for txid in &entry.alloc_deletions { deleted_alloc.insert(*txid); } for rtx in entry.transactions { if ! deleted_tx.contains(&rtx.id) && ! transactions.contains_key(&rtx.id) { transactions.insert(rtx.id, rtx); } } for ra in entry.allocations { if ! deleted_alloc.contains(&ra.id) && ! allocations.contains_key(&ra.id) { allocations.insert(ra.id, ra); } } if entry.previous_entry_location.offset < 16 { break; // Cannot have an offset of 0 (first 16 bytes of the file are the UUID) } file_id = entry.previous_entry_location.file_id; entry_block_offset = entry.previous_entry_location.offset as usize; } let get_data = |file_location: &FileLocation| -> Result<ArcData> { let d = files[file_location.file_id.0 as usize].0.read(file_location.offset as usize, file_location.length as usize)?; Ok(d.into()) }; let get_slice = |file_location: &FileLocation| -> Result<ArcDataSlice> { let d = get_data(file_location)?; Ok(d.into()) }; for (txid, rtx) in transactions { let mut ou: Vec<transaction::ObjectUpdate> = Vec::with_capacity(rtx.object_updates.len()); for t in &rtx.object_updates { ou.push(transaction::ObjectUpdate { object_id: object::Id(t.0), data: get_slice(&t.1)? }); } tx.push( RecoveredTx { id: txid, txd_location: rtx.serialized_transaction_description, serialized_transaction_description: get_data(&rtx.serialized_transaction_description)?, object_updates: ou, update_locations: rtx.object_updates, tx_disposition: rtx.tx_disposition, paxos_state: rtx.paxos_state, last_entry_serial: rtx.last_entry_serial }); } for (txid, ra) in allocations { alloc.push(RecoveredAlloc{ id: txid, store_pointer: ra.store_pointer, object_id: ra.object_id, kind: ra.kind, size: ra.size, data_location: ra.data, data: get_data(&ra.data)?, refcount: ra.refcount, timestamp: ra.timestamp, serialized_revision_guard: ra.serialized_revision_guard, last_entry_serial: ra.last_entry_serial }); } }; Ok(RecoveredCrlState { log_files: files, transactions: tx, allocations: alloc, last_entry_serial, last_entry_location }) } #[cfg(test)] mod tests { use tempdir::TempDir; use super::*; #[test] fn initialization() { let t = TempDir::new("test").unwrap(); let (l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } #[test] fn recycle() { let t = TempDir::new("test").unwrap(); let (mut l, o) = LogFile::new(t.path(), FileId(0), 50).unwrap(); let u = l.file_uuid; assert!(o.is_none()); assert_eq!(l.len, 16); let ru = pread_uuid(l.fd, 0).unwrap(); assert_eq!(u, ru); write_bytes(l.fd, &[1u8,2u8,3u8,4u8]).unwrap(); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 20); } l.recycle().unwrap(); let ru = pread_uuid(l.fd, 0).unwrap(); assert_ne!(u, ru); assert_eq!(l.file_uuid, ru); unsafe { let n = libc::lseek(l.fd, 0, libc::SEEK_END); assert_eq!(n, 16); } } }
{ let p: *const u8 = &s[0]; unsafe { if libc::write(fd, p as *const libc::c_void, s.len()) < 0 { return Err(Error::last_os_error()); } libc::fsync(fd); } Ok(()) }
identifier_body
sketch.js
const NUM_PARTICLES = 0; const SIZE = 5; const SIZE_D2 = SIZE / 2.0; const STEPS = 4; const TTYPE_DRAG = 0; const TTYPE_TRIANGLE = 1; const TTYPE_SQUARE = 2; const GRID_SIZE = 40; var grid_w, grid_h; var grid = null; var particles = null; var constraints = null; var bodies = null; var physics = null; var initGravityX = 0; var initGravityY = 0.1; var gravity = null; var pointDragging = false; var dragDist = 150; var currP = null; var delta = null; var drawFill = true; var drawPoints = false; var showDebugText = true; var mouseInsideSketch = true; var demoType = 'CLOTH'; var isPaused = false; var toolType = TTYPE_DRAG; let clothWidth = 25; let clothHeight = 20; let clothSpacing = 16; let clothConstraintLength = 20; let clothAttachPoints = 2; let clothXMargin = null; let webPoints = 40; let webRings = 12; let webSize = 200; let webSpacing = 12; let angleStep = 0.5; let canTear = false; let tearMult = 5; let tearStr = clothConstraintLength * tearMult; let tearStrSq = tearStr * tearStr; function setup() { let canvas = createCanvas(windowWidth, windowHeight); canvas.parent("#sketch"); canvas.attribute('oncontextmenu', 'return false;'); init(); initSettingsUI(); } function init() { grid = [] particles = []; constraints = []; bodies = []; physics = new Physics(); gravity = createVector(initGravityX, initGravityY); clothXMargin = (width - (clothWidth * clothSpacing)) / 2; // createSpiderWebSim(); createClothSim(); // Random particles for (let i = 0; i < NUM_PARTICLES; i++) { let p = new Particle(random() * width, random() * height); p.px += random() * 2 - 1; p.py += random() * 2 - 1; particles.push(p); } constrainPoints(); } function draw() { background(125); updateParticles(); for (let i = 0; i < STEPS; i++) { updateConstraints(); for (let body1 of bodies) { body1.calculateBBox(); for (let body2 of bodies) { if (body1 === body2) continue; if (physics.detectCollision(body1, body2)) physics.processCollision(); } } constrainPoints(); } buildGrid(); if (pointDragging) { if (currP) { currP.x = mouseX; currP.y = mouseY; } else { currP = getParticleAt(mouseX, mouseY); } } else { currP = null; } stroke(100); for (let x = 0; x < grid_w; x++) { line(x * GRID_SIZE, 0, x * GRID_SIZE, height); } for (let y = 0; y < grid_h; y++) { line(0, y * GRID_SIZE, width, y * GRID_SIZE); } if (drawFill) { for (let i = 0; i < bodies.length; i++) { let body = bodies[i]; fill((i * 10) % 255, (i * 5) % 255, (254 - i * 5) % 255); beginShape(); for (let point of body.vertices) { vertex(point.x, point.y); } endShape(); } } // Draw the constraints stroke(0); for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; line(c.p1.x, c.p1.y, c.p2.x, c.p2.y); } noStroke(); // Draw the points if (drawPoints) { fill(255, 255, 0); for (let i = 0; i < particles.length; i++) { rect(particles[i].x - SIZE_D2, particles[i].y - SIZE_D2, SIZE, SIZE); } } if (showDebugText) { fill(255); text('Particles: ' + particles.length + ' | Constraints: ' + constraints.length, 12, 12); text('Gravity: ' + gravity.x + ', ' + gravity.y, 12, 24); text('FPS: ' + frameRate(), 12, 38); text('Delta: ' + deltaTime, 12, 50); text('Dragging: ' + pointDragging, 12, 64); } } function mousePressed() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } else if (toolType == TTYPE_TRIANGLE) { createTriangle(mouseX, mouseY, 25 + random(100)); } else if (toolType == TTYPE_SQUARE) { createBox(mouseX, mouseY, 25 + random(100)); } if (isPaused) redraw(); // let p = new Particle(mouseX, mouseY); // p.px += random() * 2 - 1; // p.py += random() * 2 - 1; // constraints.push(new Constraint(particles[particles.length - 1], p, random() * 10 + 10)); // particles.push(p); } function mouseDragged() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } } function mouseReleased() { mouseInsideSketch = true; pointDragging = false; } function windowResized() { resizeCanvas(windowWidth, windowHeight); buildGrid(); } function buildGrid() { grid = []; grid_w = Math.ceil(width / GRID_SIZE); grid_h = Math.ceil(height / GRID_SIZE); for (let i = 0; i < grid_w * grid_h; i++) grid.push([]); for (let i = 0; i < particles.length; i++) { let cx = floor(particles[i].x / GRID_SIZE); let cy = floor(particles[i].y / GRID_SIZE); if (cx < 0 || cx >= grid_w || cy < 0 || cy >= grid_h) continue; grid[cx + cy * grid_w].push(particles[i]); } } function getParticleAt(x, y) { let cx = floor(x / GRID_SIZE); let cy = floor(y / GRID_SIZE); for (let x0 = cx - 1; x0 < cx + 1; x0++) { for (let y0 = cy - 1; y0 < cy + 1; y0++) { if (x0 < 0 || x0 >= grid_w || y0 < 0 || y0 >= grid_h) continue; let cell = grid[x0 + y0 * grid_w]; for (let i = 0; i < cell.length; i++) { let pDistX = (cell[i].x - x); let pDistY = (cell[i].y - y); if (pDistX * pDistX + pDistY * pDistY < dragDist) return cell[i]; } } } return null; } function updateParticles() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; let old_x = p.x; let old_y = p.y; if (p.invmass > 0) { p.x += gravity.x; p.y += gravity.y; p.x += (p.x - p.px); p.y += (p.y - p.py); } p.px = old_x; p.py = old_y; } } function
() { let constToBeRemoved = []; for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; if (!c.p1 || !c.p2) continue; let dx = c.p1.x - c.p2.x; let dy = c.p1.y - c.p2.y; if (dx == 0 && dy == 0) { dx += Math.random() * 0.1; dy += Math.random() * 0.1; } // let d = Math.sqrt((dx * dx) + (dy * dy)); // if (!c.pushing && d < c.l) // continue; // if (canTear) { // let tearStr = c.l * tearMult; // if (d > tearStr) { // constraints[i] = constraints[constraints.length - 1]; // i--; // constraints.pop(); // continue; // } // } // let percent = ((d - c.l) * // (c.p1.invmass + c.p2.invmass)) / // d; // Squared dist for optimization let dSq = (dx * dx) + (dy * dy); if (!c.pushing && dSq < c.lSq) continue; if (canTear && c.canTear) { // let tearStrSq = c.lSq * tearMult; if (dSq > tearStrSq) { constraints[i] = constraints[constraints.length - 1]; i--; constraints.pop(); continue; } } let percent = ((dSq - c.lSq) * (c.p1.invmass + c.p2.invmass)) / dSq; dx *= percent; dy *= percent; c.p1.x -= dx * c.p1.invmass;; c.p1.y -= dy * c.p1.invmass;; c.p2.x += dx * c.p2.invmass;; c.p2.y += dy * c.p2.invmass;; } } function constrainPoints() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; if (p.x < SIZE) { p.x = SIZE; } else if (p.x >= width - SIZE) { p.x = width - SIZE; } if (p.y < SIZE) { p.y = SIZE; } else if (p.y >= height - SIZE) { p.x -= (p.y - height + SIZE) * (p.x - p.px) * this.physics.friction; p.y = height - SIZE; } } } function Particle(x, y) { this.x = x; this.y = y; this.px = x; this.py = y; this.invmass = 0.3; } function Constraint(p1, p2, l, pushing = true, canTear = false, tearMult = 1) { this.p1 = p1; this.p2 = p2; this.l = l; this.lSq = l * l; this.pushing = pushing; this.canTear = canTear; this.tearStr = l * tearMult; this.tearStrSq = this.lSq * tearMult; } function createTriangle(x, y, size) { let body = new Body(); let a = 0; let l = 3; let astep = TWO_PI / l; for (let i = 0; i < l; i++) { p = new Particle(x + Math.sin(a) * size, y + Math.cos(a) * size); a += astep; if (i > 0) { let c = new Constraint( particles[particles.length - 1], p, size, true, false); constraints.push(c); body.constraints.push(c); } particles.push(p); body.vertices.push(p); } // Join ends of polygon let end = new Constraint( particles[particles.length - 1], particles[particles.length - l], size, true, false); constraints.push(end); body.constraints.push(end); body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createBox(x, y, size) { let body = new Body(); let hsize = size * 0.5; let vertices = []; vertices.push(new Particle(x - hsize, y - hsize)); vertices.push(new Particle(x + hsize, y - hsize)); vertices.push(new Particle(x + hsize, y + hsize)); vertices.push(new Particle(x - hsize, y + hsize)); particles.push(...vertices); body.vertices.push(...vertices); for (let i = 0; i < vertices.length; i++) { let c = new Constraint( vertices[(i + 1) % vertices.length], vertices[i], size); constraints.push(c); body.constraints.push(c); if (i > 1) { let d = new Constraint(vertices[(i + 2) % vertices.length], vertices[i], size * sqrt(2.0)); constraints.push(d); body.constraints.push(d); } } body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createClothSim() { for (let y = 0; y < clothHeight; y += 1) { for (let x = 0; x < clothWidth; x += 1) { let p = new Particle(x * clothSpacing + clothXMargin, y + 50); p.px += random() * 5 - 2.5; if (x > 0) { constraints.push(new Constraint( particles[x - 1 + y * clothWidth], p, clothConstraintLength, false, true, tearMult)); } if (y > 0) { constraints.push(new Constraint( particles[x + (y - 1) * clothWidth], p, clothConstraintLength, false, true, tearMult)); } else { if (y == 0 && x % clothAttachPoints == 0) p.invmass = 0; } particles.push(p); } } } function createSpiderWebSim() { let angleStep = TWO_PI / webPoints; for (let i = 0; i < webPoints; i++) { for (let j = 0; j < webRings; j++) { let a = i * angleStep; let s = ((webRings - j) / webRings) * webSize; let p = new Particle(width/2 + s * sin(a), height/2 + s * cos(a)); let spacing = webSpacing; if (particles.length > 0) { if (j > 0) { constraints.push(new Constraint( particles[particles.length - 1], p, spacing)); } if (i > 0) { constraints.push(new Constraint( particles[particles.length - webRings], p, spacing)); } if (i == webPoints - 1) { constraints.push(new Constraint( particles[j], p, spacing)); } } if (j == 0) p.invmass = 0; particles.push(p); } } }
updateConstraints
identifier_name
sketch.js
const NUM_PARTICLES = 0; const SIZE = 5; const SIZE_D2 = SIZE / 2.0; const STEPS = 4; const TTYPE_DRAG = 0; const TTYPE_TRIANGLE = 1; const TTYPE_SQUARE = 2; const GRID_SIZE = 40; var grid_w, grid_h; var grid = null; var particles = null; var constraints = null; var bodies = null; var physics = null; var initGravityX = 0; var initGravityY = 0.1; var gravity = null; var pointDragging = false; var dragDist = 150; var currP = null; var delta = null; var drawFill = true; var drawPoints = false; var showDebugText = true; var mouseInsideSketch = true; var demoType = 'CLOTH'; var isPaused = false; var toolType = TTYPE_DRAG; let clothWidth = 25; let clothHeight = 20; let clothSpacing = 16; let clothConstraintLength = 20; let clothAttachPoints = 2; let clothXMargin = null; let webPoints = 40; let webRings = 12; let webSize = 200; let webSpacing = 12; let angleStep = 0.5; let canTear = false; let tearMult = 5; let tearStr = clothConstraintLength * tearMult; let tearStrSq = tearStr * tearStr; function setup() { let canvas = createCanvas(windowWidth, windowHeight); canvas.parent("#sketch"); canvas.attribute('oncontextmenu', 'return false;'); init(); initSettingsUI(); } function init() { grid = [] particles = []; constraints = []; bodies = []; physics = new Physics(); gravity = createVector(initGravityX, initGravityY); clothXMargin = (width - (clothWidth * clothSpacing)) / 2; // createSpiderWebSim(); createClothSim(); // Random particles for (let i = 0; i < NUM_PARTICLES; i++) { let p = new Particle(random() * width, random() * height); p.px += random() * 2 - 1; p.py += random() * 2 - 1; particles.push(p); } constrainPoints(); } function draw() { background(125); updateParticles(); for (let i = 0; i < STEPS; i++) { updateConstraints(); for (let body1 of bodies) { body1.calculateBBox(); for (let body2 of bodies) { if (body1 === body2) continue; if (physics.detectCollision(body1, body2)) physics.processCollision(); } } constrainPoints(); } buildGrid(); if (pointDragging) { if (currP) { currP.x = mouseX; currP.y = mouseY; } else { currP = getParticleAt(mouseX, mouseY); } } else { currP = null; } stroke(100); for (let x = 0; x < grid_w; x++) { line(x * GRID_SIZE, 0, x * GRID_SIZE, height); } for (let y = 0; y < grid_h; y++) { line(0, y * GRID_SIZE, width, y * GRID_SIZE); } if (drawFill) { for (let i = 0; i < bodies.length; i++) { let body = bodies[i]; fill((i * 10) % 255, (i * 5) % 255, (254 - i * 5) % 255); beginShape(); for (let point of body.vertices) { vertex(point.x, point.y); } endShape(); } } // Draw the constraints stroke(0); for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; line(c.p1.x, c.p1.y, c.p2.x, c.p2.y); } noStroke(); // Draw the points if (drawPoints) { fill(255, 255, 0); for (let i = 0; i < particles.length; i++) { rect(particles[i].x - SIZE_D2, particles[i].y - SIZE_D2, SIZE, SIZE); } } if (showDebugText) { fill(255); text('Particles: ' + particles.length + ' | Constraints: ' + constraints.length, 12, 12); text('Gravity: ' + gravity.x + ', ' + gravity.y, 12, 24); text('FPS: ' + frameRate(), 12, 38); text('Delta: ' + deltaTime, 12, 50); text('Dragging: ' + pointDragging, 12, 64); } } function mousePressed() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } else if (toolType == TTYPE_TRIANGLE) { createTriangle(mouseX, mouseY, 25 + random(100)); } else if (toolType == TTYPE_SQUARE) { createBox(mouseX, mouseY, 25 + random(100)); } if (isPaused) redraw(); // let p = new Particle(mouseX, mouseY); // p.px += random() * 2 - 1; // p.py += random() * 2 - 1; // constraints.push(new Constraint(particles[particles.length - 1], p, random() * 10 + 10)); // particles.push(p); } function mouseDragged() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } } function mouseReleased() { mouseInsideSketch = true; pointDragging = false; } function windowResized() { resizeCanvas(windowWidth, windowHeight); buildGrid(); } function buildGrid() { grid = []; grid_w = Math.ceil(width / GRID_SIZE); grid_h = Math.ceil(height / GRID_SIZE); for (let i = 0; i < grid_w * grid_h; i++) grid.push([]); for (let i = 0; i < particles.length; i++) { let cx = floor(particles[i].x / GRID_SIZE); let cy = floor(particles[i].y / GRID_SIZE); if (cx < 0 || cx >= grid_w || cy < 0 || cy >= grid_h) continue; grid[cx + cy * grid_w].push(particles[i]); } } function getParticleAt(x, y) { let cx = floor(x / GRID_SIZE); let cy = floor(y / GRID_SIZE); for (let x0 = cx - 1; x0 < cx + 1; x0++) { for (let y0 = cy - 1; y0 < cy + 1; y0++) { if (x0 < 0 || x0 >= grid_w || y0 < 0 || y0 >= grid_h) continue; let cell = grid[x0 + y0 * grid_w]; for (let i = 0; i < cell.length; i++) { let pDistX = (cell[i].x - x); let pDistY = (cell[i].y - y); if (pDistX * pDistX + pDistY * pDistY < dragDist) return cell[i]; } } } return null; } function updateParticles() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; let old_x = p.x; let old_y = p.y; if (p.invmass > 0) { p.x += gravity.x; p.y += gravity.y; p.x += (p.x - p.px); p.y += (p.y - p.py); } p.px = old_x; p.py = old_y; } } function updateConstraints() { let constToBeRemoved = []; for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; if (!c.p1 || !c.p2) continue; let dx = c.p1.x - c.p2.x; let dy = c.p1.y - c.p2.y; if (dx == 0 && dy == 0) { dx += Math.random() * 0.1; dy += Math.random() * 0.1; } // let d = Math.sqrt((dx * dx) + (dy * dy)); // if (!c.pushing && d < c.l) // continue; // if (canTear) { // let tearStr = c.l * tearMult; // if (d > tearStr) { // constraints[i] = constraints[constraints.length - 1]; // i--; // constraints.pop(); // continue; // } // } // let percent = ((d - c.l) * // (c.p1.invmass + c.p2.invmass)) / // d; // Squared dist for optimization let dSq = (dx * dx) + (dy * dy); if (!c.pushing && dSq < c.lSq) continue; if (canTear && c.canTear) { // let tearStrSq = c.lSq * tearMult; if (dSq > tearStrSq) { constraints[i] = constraints[constraints.length - 1]; i--; constraints.pop(); continue; } } let percent = ((dSq - c.lSq) * (c.p1.invmass + c.p2.invmass)) / dSq; dx *= percent; dy *= percent; c.p1.x -= dx * c.p1.invmass;; c.p1.y -= dy * c.p1.invmass;; c.p2.x += dx * c.p2.invmass;; c.p2.y += dy * c.p2.invmass;; } } function constrainPoints() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; if (p.x < SIZE) { p.x = SIZE; } else if (p.x >= width - SIZE) { p.x = width - SIZE; } if (p.y < SIZE) { p.y = SIZE; } else if (p.y >= height - SIZE) { p.x -= (p.y - height + SIZE) * (p.x - p.px) * this.physics.friction; p.y = height - SIZE; } } } function Particle(x, y)
function Constraint(p1, p2, l, pushing = true, canTear = false, tearMult = 1) { this.p1 = p1; this.p2 = p2; this.l = l; this.lSq = l * l; this.pushing = pushing; this.canTear = canTear; this.tearStr = l * tearMult; this.tearStrSq = this.lSq * tearMult; } function createTriangle(x, y, size) { let body = new Body(); let a = 0; let l = 3; let astep = TWO_PI / l; for (let i = 0; i < l; i++) { p = new Particle(x + Math.sin(a) * size, y + Math.cos(a) * size); a += astep; if (i > 0) { let c = new Constraint( particles[particles.length - 1], p, size, true, false); constraints.push(c); body.constraints.push(c); } particles.push(p); body.vertices.push(p); } // Join ends of polygon let end = new Constraint( particles[particles.length - 1], particles[particles.length - l], size, true, false); constraints.push(end); body.constraints.push(end); body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createBox(x, y, size) { let body = new Body(); let hsize = size * 0.5; let vertices = []; vertices.push(new Particle(x - hsize, y - hsize)); vertices.push(new Particle(x + hsize, y - hsize)); vertices.push(new Particle(x + hsize, y + hsize)); vertices.push(new Particle(x - hsize, y + hsize)); particles.push(...vertices); body.vertices.push(...vertices); for (let i = 0; i < vertices.length; i++) { let c = new Constraint( vertices[(i + 1) % vertices.length], vertices[i], size); constraints.push(c); body.constraints.push(c); if (i > 1) { let d = new Constraint(vertices[(i + 2) % vertices.length], vertices[i], size * sqrt(2.0)); constraints.push(d); body.constraints.push(d); } } body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createClothSim() { for (let y = 0; y < clothHeight; y += 1) { for (let x = 0; x < clothWidth; x += 1) { let p = new Particle(x * clothSpacing + clothXMargin, y + 50); p.px += random() * 5 - 2.5; if (x > 0) { constraints.push(new Constraint( particles[x - 1 + y * clothWidth], p, clothConstraintLength, false, true, tearMult)); } if (y > 0) { constraints.push(new Constraint( particles[x + (y - 1) * clothWidth], p, clothConstraintLength, false, true, tearMult)); } else { if (y == 0 && x % clothAttachPoints == 0) p.invmass = 0; } particles.push(p); } } } function createSpiderWebSim() { let angleStep = TWO_PI / webPoints; for (let i = 0; i < webPoints; i++) { for (let j = 0; j < webRings; j++) { let a = i * angleStep; let s = ((webRings - j) / webRings) * webSize; let p = new Particle(width/2 + s * sin(a), height/2 + s * cos(a)); let spacing = webSpacing; if (particles.length > 0) { if (j > 0) { constraints.push(new Constraint( particles[particles.length - 1], p, spacing)); } if (i > 0) { constraints.push(new Constraint( particles[particles.length - webRings], p, spacing)); } if (i == webPoints - 1) { constraints.push(new Constraint( particles[j], p, spacing)); } } if (j == 0) p.invmass = 0; particles.push(p); } } }
{ this.x = x; this.y = y; this.px = x; this.py = y; this.invmass = 0.3; }
identifier_body
sketch.js
const NUM_PARTICLES = 0; const SIZE = 5; const SIZE_D2 = SIZE / 2.0; const STEPS = 4; const TTYPE_DRAG = 0; const TTYPE_TRIANGLE = 1; const TTYPE_SQUARE = 2; const GRID_SIZE = 40; var grid_w, grid_h; var grid = null; var particles = null; var constraints = null; var bodies = null; var physics = null; var initGravityX = 0; var initGravityY = 0.1; var gravity = null; var pointDragging = false; var dragDist = 150; var currP = null; var delta = null; var drawFill = true; var drawPoints = false; var showDebugText = true; var mouseInsideSketch = true; var demoType = 'CLOTH'; var isPaused = false; var toolType = TTYPE_DRAG; let clothWidth = 25; let clothHeight = 20; let clothSpacing = 16; let clothConstraintLength = 20; let clothAttachPoints = 2; let clothXMargin = null; let webPoints = 40; let webRings = 12; let webSize = 200; let webSpacing = 12; let angleStep = 0.5; let canTear = false; let tearMult = 5; let tearStr = clothConstraintLength * tearMult; let tearStrSq = tearStr * tearStr; function setup() { let canvas = createCanvas(windowWidth, windowHeight); canvas.parent("#sketch"); canvas.attribute('oncontextmenu', 'return false;'); init(); initSettingsUI(); } function init() { grid = [] particles = []; constraints = []; bodies = []; physics = new Physics(); gravity = createVector(initGravityX, initGravityY); clothXMargin = (width - (clothWidth * clothSpacing)) / 2; // createSpiderWebSim(); createClothSim(); // Random particles for (let i = 0; i < NUM_PARTICLES; i++) { let p = new Particle(random() * width, random() * height); p.px += random() * 2 - 1; p.py += random() * 2 - 1; particles.push(p); } constrainPoints(); } function draw() { background(125); updateParticles(); for (let i = 0; i < STEPS; i++) { updateConstraints(); for (let body1 of bodies) { body1.calculateBBox(); for (let body2 of bodies) { if (body1 === body2) continue; if (physics.detectCollision(body1, body2)) physics.processCollision(); } } constrainPoints(); } buildGrid(); if (pointDragging) { if (currP) { currP.x = mouseX; currP.y = mouseY; } else { currP = getParticleAt(mouseX, mouseY); } } else { currP = null; } stroke(100); for (let x = 0; x < grid_w; x++) { line(x * GRID_SIZE, 0, x * GRID_SIZE, height); } for (let y = 0; y < grid_h; y++) { line(0, y * GRID_SIZE, width, y * GRID_SIZE); } if (drawFill) { for (let i = 0; i < bodies.length; i++) { let body = bodies[i]; fill((i * 10) % 255, (i * 5) % 255, (254 - i * 5) % 255); beginShape(); for (let point of body.vertices) { vertex(point.x, point.y); } endShape(); } } // Draw the constraints stroke(0); for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; line(c.p1.x, c.p1.y, c.p2.x, c.p2.y); } noStroke(); // Draw the points if (drawPoints) { fill(255, 255, 0); for (let i = 0; i < particles.length; i++) { rect(particles[i].x - SIZE_D2, particles[i].y - SIZE_D2, SIZE, SIZE); } } if (showDebugText) { fill(255); text('Particles: ' + particles.length + ' | Constraints: ' + constraints.length, 12, 12); text('Gravity: ' + gravity.x + ', ' + gravity.y, 12, 24); text('FPS: ' + frameRate(), 12, 38); text('Delta: ' + deltaTime, 12, 50); text('Dragging: ' + pointDragging, 12, 64); } } function mousePressed() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } else if (toolType == TTYPE_TRIANGLE) { createTriangle(mouseX, mouseY, 25 + random(100)); } else if (toolType == TTYPE_SQUARE) { createBox(mouseX, mouseY, 25 + random(100)); } if (isPaused) redraw(); // let p = new Particle(mouseX, mouseY); // p.px += random() * 2 - 1; // p.py += random() * 2 - 1; // constraints.push(new Constraint(particles[particles.length - 1], p, random() * 10 + 10)); // particles.push(p); } function mouseDragged() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } } function mouseReleased() { mouseInsideSketch = true; pointDragging = false; } function windowResized() { resizeCanvas(windowWidth, windowHeight); buildGrid(); } function buildGrid() { grid = []; grid_w = Math.ceil(width / GRID_SIZE); grid_h = Math.ceil(height / GRID_SIZE); for (let i = 0; i < grid_w * grid_h; i++) grid.push([]); for (let i = 0; i < particles.length; i++) { let cx = floor(particles[i].x / GRID_SIZE); let cy = floor(particles[i].y / GRID_SIZE); if (cx < 0 || cx >= grid_w || cy < 0 || cy >= grid_h) continue; grid[cx + cy * grid_w].push(particles[i]); } } function getParticleAt(x, y) { let cx = floor(x / GRID_SIZE); let cy = floor(y / GRID_SIZE); for (let x0 = cx - 1; x0 < cx + 1; x0++) { for (let y0 = cy - 1; y0 < cy + 1; y0++) { if (x0 < 0 || x0 >= grid_w || y0 < 0 || y0 >= grid_h) continue; let cell = grid[x0 + y0 * grid_w]; for (let i = 0; i < cell.length; i++) { let pDistX = (cell[i].x - x); let pDistY = (cell[i].y - y); if (pDistX * pDistX + pDistY * pDistY < dragDist) return cell[i]; } } } return null; } function updateParticles() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; let old_x = p.x; let old_y = p.y; if (p.invmass > 0) { p.x += gravity.x; p.y += gravity.y; p.x += (p.x - p.px); p.y += (p.y - p.py); } p.px = old_x; p.py = old_y; } } function updateConstraints() { let constToBeRemoved = []; for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; if (!c.p1 || !c.p2) continue; let dx = c.p1.x - c.p2.x; let dy = c.p1.y - c.p2.y; if (dx == 0 && dy == 0) { dx += Math.random() * 0.1; dy += Math.random() * 0.1; } // let d = Math.sqrt((dx * dx) + (dy * dy)); // if (!c.pushing && d < c.l) // continue; // if (canTear) { // let tearStr = c.l * tearMult; // if (d > tearStr) { // constraints[i] = constraints[constraints.length - 1]; // i--; // constraints.pop(); // continue; // } // } // let percent = ((d - c.l) * // (c.p1.invmass + c.p2.invmass)) / // d; // Squared dist for optimization let dSq = (dx * dx) + (dy * dy); if (!c.pushing && dSq < c.lSq) continue; if (canTear && c.canTear) { // let tearStrSq = c.lSq * tearMult; if (dSq > tearStrSq) { constraints[i] = constraints[constraints.length - 1]; i--; constraints.pop(); continue; } } let percent = ((dSq - c.lSq) * (c.p1.invmass + c.p2.invmass)) / dSq; dx *= percent; dy *= percent; c.p1.x -= dx * c.p1.invmass;; c.p1.y -= dy * c.p1.invmass;; c.p2.x += dx * c.p2.invmass;; c.p2.y += dy * c.p2.invmass;; } } function constrainPoints() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; if (p.x < SIZE) { p.x = SIZE; } else if (p.x >= width - SIZE) { p.x = width - SIZE; } if (p.y < SIZE) { p.y = SIZE; } else if (p.y >= height - SIZE) { p.x -= (p.y - height + SIZE) * (p.x - p.px) * this.physics.friction; p.y = height - SIZE; } } }
this.x = x; this.y = y; this.px = x; this.py = y; this.invmass = 0.3; } function Constraint(p1, p2, l, pushing = true, canTear = false, tearMult = 1) { this.p1 = p1; this.p2 = p2; this.l = l; this.lSq = l * l; this.pushing = pushing; this.canTear = canTear; this.tearStr = l * tearMult; this.tearStrSq = this.lSq * tearMult; } function createTriangle(x, y, size) { let body = new Body(); let a = 0; let l = 3; let astep = TWO_PI / l; for (let i = 0; i < l; i++) { p = new Particle(x + Math.sin(a) * size, y + Math.cos(a) * size); a += astep; if (i > 0) { let c = new Constraint( particles[particles.length - 1], p, size, true, false); constraints.push(c); body.constraints.push(c); } particles.push(p); body.vertices.push(p); } // Join ends of polygon let end = new Constraint( particles[particles.length - 1], particles[particles.length - l], size, true, false); constraints.push(end); body.constraints.push(end); body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createBox(x, y, size) { let body = new Body(); let hsize = size * 0.5; let vertices = []; vertices.push(new Particle(x - hsize, y - hsize)); vertices.push(new Particle(x + hsize, y - hsize)); vertices.push(new Particle(x + hsize, y + hsize)); vertices.push(new Particle(x - hsize, y + hsize)); particles.push(...vertices); body.vertices.push(...vertices); for (let i = 0; i < vertices.length; i++) { let c = new Constraint( vertices[(i + 1) % vertices.length], vertices[i], size); constraints.push(c); body.constraints.push(c); if (i > 1) { let d = new Constraint(vertices[(i + 2) % vertices.length], vertices[i], size * sqrt(2.0)); constraints.push(d); body.constraints.push(d); } } body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createClothSim() { for (let y = 0; y < clothHeight; y += 1) { for (let x = 0; x < clothWidth; x += 1) { let p = new Particle(x * clothSpacing + clothXMargin, y + 50); p.px += random() * 5 - 2.5; if (x > 0) { constraints.push(new Constraint( particles[x - 1 + y * clothWidth], p, clothConstraintLength, false, true, tearMult)); } if (y > 0) { constraints.push(new Constraint( particles[x + (y - 1) * clothWidth], p, clothConstraintLength, false, true, tearMult)); } else { if (y == 0 && x % clothAttachPoints == 0) p.invmass = 0; } particles.push(p); } } } function createSpiderWebSim() { let angleStep = TWO_PI / webPoints; for (let i = 0; i < webPoints; i++) { for (let j = 0; j < webRings; j++) { let a = i * angleStep; let s = ((webRings - j) / webRings) * webSize; let p = new Particle(width/2 + s * sin(a), height/2 + s * cos(a)); let spacing = webSpacing; if (particles.length > 0) { if (j > 0) { constraints.push(new Constraint( particles[particles.length - 1], p, spacing)); } if (i > 0) { constraints.push(new Constraint( particles[particles.length - webRings], p, spacing)); } if (i == webPoints - 1) { constraints.push(new Constraint( particles[j], p, spacing)); } } if (j == 0) p.invmass = 0; particles.push(p); } } }
function Particle(x, y) {
random_line_split
sketch.js
const NUM_PARTICLES = 0; const SIZE = 5; const SIZE_D2 = SIZE / 2.0; const STEPS = 4; const TTYPE_DRAG = 0; const TTYPE_TRIANGLE = 1; const TTYPE_SQUARE = 2; const GRID_SIZE = 40; var grid_w, grid_h; var grid = null; var particles = null; var constraints = null; var bodies = null; var physics = null; var initGravityX = 0; var initGravityY = 0.1; var gravity = null; var pointDragging = false; var dragDist = 150; var currP = null; var delta = null; var drawFill = true; var drawPoints = false; var showDebugText = true; var mouseInsideSketch = true; var demoType = 'CLOTH'; var isPaused = false; var toolType = TTYPE_DRAG; let clothWidth = 25; let clothHeight = 20; let clothSpacing = 16; let clothConstraintLength = 20; let clothAttachPoints = 2; let clothXMargin = null; let webPoints = 40; let webRings = 12; let webSize = 200; let webSpacing = 12; let angleStep = 0.5; let canTear = false; let tearMult = 5; let tearStr = clothConstraintLength * tearMult; let tearStrSq = tearStr * tearStr; function setup() { let canvas = createCanvas(windowWidth, windowHeight); canvas.parent("#sketch"); canvas.attribute('oncontextmenu', 'return false;'); init(); initSettingsUI(); } function init() { grid = [] particles = []; constraints = []; bodies = []; physics = new Physics(); gravity = createVector(initGravityX, initGravityY); clothXMargin = (width - (clothWidth * clothSpacing)) / 2; // createSpiderWebSim(); createClothSim(); // Random particles for (let i = 0; i < NUM_PARTICLES; i++) { let p = new Particle(random() * width, random() * height); p.px += random() * 2 - 1; p.py += random() * 2 - 1; particles.push(p); } constrainPoints(); } function draw() { background(125); updateParticles(); for (let i = 0; i < STEPS; i++) { updateConstraints(); for (let body1 of bodies) { body1.calculateBBox(); for (let body2 of bodies) { if (body1 === body2) continue; if (physics.detectCollision(body1, body2)) physics.processCollision(); } } constrainPoints(); } buildGrid(); if (pointDragging) { if (currP) { currP.x = mouseX; currP.y = mouseY; } else { currP = getParticleAt(mouseX, mouseY); } } else { currP = null; } stroke(100); for (let x = 0; x < grid_w; x++) { line(x * GRID_SIZE, 0, x * GRID_SIZE, height); } for (let y = 0; y < grid_h; y++) { line(0, y * GRID_SIZE, width, y * GRID_SIZE); } if (drawFill) { for (let i = 0; i < bodies.length; i++) { let body = bodies[i]; fill((i * 10) % 255, (i * 5) % 255, (254 - i * 5) % 255); beginShape(); for (let point of body.vertices) { vertex(point.x, point.y); } endShape(); } } // Draw the constraints stroke(0); for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; line(c.p1.x, c.p1.y, c.p2.x, c.p2.y); } noStroke(); // Draw the points if (drawPoints)
if (showDebugText) { fill(255); text('Particles: ' + particles.length + ' | Constraints: ' + constraints.length, 12, 12); text('Gravity: ' + gravity.x + ', ' + gravity.y, 12, 24); text('FPS: ' + frameRate(), 12, 38); text('Delta: ' + deltaTime, 12, 50); text('Dragging: ' + pointDragging, 12, 64); } } function mousePressed() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } else if (toolType == TTYPE_TRIANGLE) { createTriangle(mouseX, mouseY, 25 + random(100)); } else if (toolType == TTYPE_SQUARE) { createBox(mouseX, mouseY, 25 + random(100)); } if (isPaused) redraw(); // let p = new Particle(mouseX, mouseY); // p.px += random() * 2 - 1; // p.py += random() * 2 - 1; // constraints.push(new Constraint(particles[particles.length - 1], p, random() * 10 + 10)); // particles.push(p); } function mouseDragged() { if (!mouseInsideSketch || mouseX < 0 || mouseX >= width || mouseY < 0 || mouseY >= height) return; if (toolType == TTYPE_DRAG) { pointDragging = true; } } function mouseReleased() { mouseInsideSketch = true; pointDragging = false; } function windowResized() { resizeCanvas(windowWidth, windowHeight); buildGrid(); } function buildGrid() { grid = []; grid_w = Math.ceil(width / GRID_SIZE); grid_h = Math.ceil(height / GRID_SIZE); for (let i = 0; i < grid_w * grid_h; i++) grid.push([]); for (let i = 0; i < particles.length; i++) { let cx = floor(particles[i].x / GRID_SIZE); let cy = floor(particles[i].y / GRID_SIZE); if (cx < 0 || cx >= grid_w || cy < 0 || cy >= grid_h) continue; grid[cx + cy * grid_w].push(particles[i]); } } function getParticleAt(x, y) { let cx = floor(x / GRID_SIZE); let cy = floor(y / GRID_SIZE); for (let x0 = cx - 1; x0 < cx + 1; x0++) { for (let y0 = cy - 1; y0 < cy + 1; y0++) { if (x0 < 0 || x0 >= grid_w || y0 < 0 || y0 >= grid_h) continue; let cell = grid[x0 + y0 * grid_w]; for (let i = 0; i < cell.length; i++) { let pDistX = (cell[i].x - x); let pDistY = (cell[i].y - y); if (pDistX * pDistX + pDistY * pDistY < dragDist) return cell[i]; } } } return null; } function updateParticles() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; let old_x = p.x; let old_y = p.y; if (p.invmass > 0) { p.x += gravity.x; p.y += gravity.y; p.x += (p.x - p.px); p.y += (p.y - p.py); } p.px = old_x; p.py = old_y; } } function updateConstraints() { let constToBeRemoved = []; for (let i = 0; i < constraints.length; i++) { let c = constraints[i]; if (!c.p1 || !c.p2) continue; let dx = c.p1.x - c.p2.x; let dy = c.p1.y - c.p2.y; if (dx == 0 && dy == 0) { dx += Math.random() * 0.1; dy += Math.random() * 0.1; } // let d = Math.sqrt((dx * dx) + (dy * dy)); // if (!c.pushing && d < c.l) // continue; // if (canTear) { // let tearStr = c.l * tearMult; // if (d > tearStr) { // constraints[i] = constraints[constraints.length - 1]; // i--; // constraints.pop(); // continue; // } // } // let percent = ((d - c.l) * // (c.p1.invmass + c.p2.invmass)) / // d; // Squared dist for optimization let dSq = (dx * dx) + (dy * dy); if (!c.pushing && dSq < c.lSq) continue; if (canTear && c.canTear) { // let tearStrSq = c.lSq * tearMult; if (dSq > tearStrSq) { constraints[i] = constraints[constraints.length - 1]; i--; constraints.pop(); continue; } } let percent = ((dSq - c.lSq) * (c.p1.invmass + c.p2.invmass)) / dSq; dx *= percent; dy *= percent; c.p1.x -= dx * c.p1.invmass;; c.p1.y -= dy * c.p1.invmass;; c.p2.x += dx * c.p2.invmass;; c.p2.y += dy * c.p2.invmass;; } } function constrainPoints() { for (let i = 0; i < particles.length; i++) { let p = particles[i]; if (p.x < SIZE) { p.x = SIZE; } else if (p.x >= width - SIZE) { p.x = width - SIZE; } if (p.y < SIZE) { p.y = SIZE; } else if (p.y >= height - SIZE) { p.x -= (p.y - height + SIZE) * (p.x - p.px) * this.physics.friction; p.y = height - SIZE; } } } function Particle(x, y) { this.x = x; this.y = y; this.px = x; this.py = y; this.invmass = 0.3; } function Constraint(p1, p2, l, pushing = true, canTear = false, tearMult = 1) { this.p1 = p1; this.p2 = p2; this.l = l; this.lSq = l * l; this.pushing = pushing; this.canTear = canTear; this.tearStr = l * tearMult; this.tearStrSq = this.lSq * tearMult; } function createTriangle(x, y, size) { let body = new Body(); let a = 0; let l = 3; let astep = TWO_PI / l; for (let i = 0; i < l; i++) { p = new Particle(x + Math.sin(a) * size, y + Math.cos(a) * size); a += astep; if (i > 0) { let c = new Constraint( particles[particles.length - 1], p, size, true, false); constraints.push(c); body.constraints.push(c); } particles.push(p); body.vertices.push(p); } // Join ends of polygon let end = new Constraint( particles[particles.length - 1], particles[particles.length - l], size, true, false); constraints.push(end); body.constraints.push(end); body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createBox(x, y, size) { let body = new Body(); let hsize = size * 0.5; let vertices = []; vertices.push(new Particle(x - hsize, y - hsize)); vertices.push(new Particle(x + hsize, y - hsize)); vertices.push(new Particle(x + hsize, y + hsize)); vertices.push(new Particle(x - hsize, y + hsize)); particles.push(...vertices); body.vertices.push(...vertices); for (let i = 0; i < vertices.length; i++) { let c = new Constraint( vertices[(i + 1) % vertices.length], vertices[i], size); constraints.push(c); body.constraints.push(c); if (i > 1) { let d = new Constraint(vertices[(i + 2) % vertices.length], vertices[i], size * sqrt(2.0)); constraints.push(d); body.constraints.push(d); } } body.vertexCount = body.vertices.length; body.constraintCount = body.constraints.length; bodies.push(body); } function createClothSim() { for (let y = 0; y < clothHeight; y += 1) { for (let x = 0; x < clothWidth; x += 1) { let p = new Particle(x * clothSpacing + clothXMargin, y + 50); p.px += random() * 5 - 2.5; if (x > 0) { constraints.push(new Constraint( particles[x - 1 + y * clothWidth], p, clothConstraintLength, false, true, tearMult)); } if (y > 0) { constraints.push(new Constraint( particles[x + (y - 1) * clothWidth], p, clothConstraintLength, false, true, tearMult)); } else { if (y == 0 && x % clothAttachPoints == 0) p.invmass = 0; } particles.push(p); } } } function createSpiderWebSim() { let angleStep = TWO_PI / webPoints; for (let i = 0; i < webPoints; i++) { for (let j = 0; j < webRings; j++) { let a = i * angleStep; let s = ((webRings - j) / webRings) * webSize; let p = new Particle(width/2 + s * sin(a), height/2 + s * cos(a)); let spacing = webSpacing; if (particles.length > 0) { if (j > 0) { constraints.push(new Constraint( particles[particles.length - 1], p, spacing)); } if (i > 0) { constraints.push(new Constraint( particles[particles.length - webRings], p, spacing)); } if (i == webPoints - 1) { constraints.push(new Constraint( particles[j], p, spacing)); } } if (j == 0) p.invmass = 0; particles.push(p); } } }
{ fill(255, 255, 0); for (let i = 0; i < particles.length; i++) { rect(particles[i].x - SIZE_D2, particles[i].y - SIZE_D2, SIZE, SIZE); } }
conditional_block
annualSearchPage.js
$(function(){ searchBtn(); $("input[name=btSelectAll]").attr("style","height:16px;width:16px;"); $("input[name=btSelectAll]").css("verticalAlign","middle"); $('#headYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); $('#updateYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); var shi=$('#searchUserName'); shi.selectpicker({ noneSelectedText:'--请选择--', style:'btnSelect', width:'250px', liveSearch:true, liveSearchPlaceholder:'请输入公司名称查询', size:5 }); }); function searchBtn(){ var localhostPath = getRootPath1(); var rootPath = getRootPath(); $('#dt').bootstrapTable('destroy'); $('#dt').bootstrapTable({ method: 'post', contentType: "application/x-www-form-urlencoded", url:rootPath+'/annual/findAllSearch.do', striped: true, //是否显示行间隔色 cache: false, //是否使用缓存,默认为true,所以一般情况下需要设置一下这个属性(*) pagination: true, queryParamsType:'', //默认值为 'limit' ,在默认情况下 传给服务端的参数为:offset,limit,sort // 设置为 '' 在这种情况下传给服务器的参数为:pageSize,pageNumber queryParams:queryParams, singleSelect: false, pageSize: basePage.pageSize, pageList: basePage.pageList, search: false, //不显示 搜索框 showColumns: false, //不显示下拉框(选择显示的列) sidePagination: "server", //服务端请求 clickToSelect: true, //是否启用点击选中行 columns: [ { field: 'Number', title: '行号', width:'1px', formatter: function (value, row, index) { return index+1; }, width:50 }, { field:'id', visible:false },{ field: 'userName', title: '公司名称', valign:'middle' },{ field: 'fileName', title: '文件标题', valign:'middle' }, { field: 'year', title: '年份', valign:'middle' },{ field: 'uploadTime', title: '上传时间', valign:'middle' },{ field: 'resume', title: '内容简述', valign:'middle' },{ field: 'remarks', title: '备注', valign:'middle' },{ field: 'status', title: '是否提交', valign:'middle', formatter:function(value,data,index){ if(data.status==0){ return '未提交'; }else{ return '已提交'; } } },{ field: '', title: '操作', valign:'middle', formatter:function(value,data,index){ var str = '<button style="margin-top: 5px;" onclick="searchRegulations(\''+data.id+'\');" type="button" class="btn btn-info btn-xs" style="margin-right:10px;"><span class="glyphicon glyphicon-search"></span>查看详情</button>'; return str; } } ], onLoadSuccess:function(){}, onLoadError: function () { } }); } function queryParams(params){ var temp = { pageSize:params.pageSize, pageNumber:params.pageNumber, fileName:$('#headNameId').val(), year:$('#headYearId').val(), userCode:$('#searchUserName').val() }; $("#headNameId").val(''); $('#headYearId').val(''); return temp; } //增加通知公告 function addInfo(){ window.location.href="<%=basePath%>regulatory/toAdd.do" } //删除 function delInfo(){ rootPath = getRootPath(); var id = getSelectionsStr(); if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(obj) { debugger; if(obj.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } $.commonReq({ url : rootPath + "/annual/deleteRegs.do", async : true, data : {"ids":id}, success : function(parame) { bootbox.alert( "删除成功!"); window.location.href=rootPath+'/annual/toAnnualReviewPage.do'; }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } }); } //修改通知公告 function updateInfo(){ debugger; $('#saveUpdateBtn').show(); $('#urlDiv2').hide(); $('#urlDiv').show(); var rootPath = getRootPath(); var localhostPath = getRootPath1(); var id = getSelectionsStr()+""; if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $("#formUpdatesInfo").find("input").val(""); $('#myModalAdd').modal('hide'); $("#uploadFileName").html(''); $("#uploadFileId").val(''); $("#uploadFileId").val(id); $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(parame) { if(parame.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } debugger; var obj = parame.data; $("#updateId").val(obj.id); $("#updateFileName").val(obj.fileName); $("#updateYearId").val(obj.year); $("#updateResume").val(obj.resume); $("#updateRemarks").val(obj.remarks); if(obj.fileUrl==''){ $("#downloadHref").hide(); }else{ $("#uploadFileInput").val(obj.fileUrl); $("#downloadHref").attr("href", localhostPath+"/filePath/"+obj.fileUrl ); } if(obj.fileUrl != null && obj.fileUrl.trim() !=''){ $("#uploadFileId").val(obj.fileUrl); var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim()!=''){ var idAry=nameAry[i].split('^'); var id = idAry[0]; var fileName = idAry[1]; var beforeFileName = $("#uploadFileName").html(); var afterFileName = beforeFileName+"&nbsp;&nbsp;&nbsp;" +'<span id='+id+'>'+fileName+'</span>' +'<a onclick="deleteFile('+id+');" style="color:#ff000096;"> 删除</a>'; $("#uploadFileName").html(afterFileName); } } } $('#myModalUpdate').modal('show'); }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } function saveUpdateInfo(){ debugger; var flag = true; //检查标题 var name=$('#updateFileName').val(); if(checkNullAndEmpty(name)){ bootbox.alert("环评文件标题不能为空!"); flag=false; return; } //检查年份 var year = $("#updateYearId").val(); if(checkNullAndEmpty(year)){ bootbox.alert("年份不能为空!"); flag=false; return; } var rootPath = getRootPath(); if(flag){ $.commonReq({ url : rootPath + "/annual/updateAnnualRevice.do", async : true, data:$("#formUpdateInfo").serialize(), success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath + '/annual/findAll.do'}); bootbox.alert("修改成功!"); }, error:function(xhr,status,e){ //服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } } function searchRegulations(id){ debugger; var localhostPath = getRootPath1(); var rootPath = getRootPath(); $("#formUpdatesInfo").find("input").val(""); $("#submitBtn").show(); $('#myModalUpdate').modal('hide'); $("#searchUploadFileName").html(''); $("#uploadFileId").val(''); $('#urlDiv').hide(); $.commonReq({ url : rootPath + '/annual/selectById.do', data : { "id" : id }, success : function(data) { debugger; var obj = data.data; $("#searchId").val(obj.id); $("#submitBtn").hide();
+ '/annual/findAll.do'}); bootbox.alert("提交成功!"); $('#myModalSearch').modal('hide'); }, error:function(xhr,status,e){ //服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } }); } } /*获取选中的值*/ function getSelectionsStr(){ var rows = $('#dt').bootstrapTable('getSelections'); var str=""; if(rows!=null){ for(var i=0;i<rows.length;i++){ str+=(rows[i].id+"")+","; } str=str.substring(0,str.lastIndexOf(',')); } return str; } /*验证是否为空*/ function checkNullAndEmpty(value){ if(value==null || value.trim()==''){ return true; }else{ return false; } } //判断字符串是否为数字 function checkNumber(value){ var re = /^[0-9]+.?[0-9]*$/; if(null==value||''==value) { return false; }else if(!re.test(value)){ return true; }else{ return false; } } /*时间格式转化*/ function dataFormat(value){ if(value!=null){ var date = new Date(value); var seperator1 = "-"; //年 var year = date.getFullYear(); //月 var month = date.getMonth() + 1; //日 var strDate = date.getDate(); if (month >= 1 && month <= 9) { month = "0" + month; } if (strDate >= 0 && strDate <= 9) { strDate = "0" + strDate; } var currentdate = year + seperator1 + month + seperator1 + strDate; return currentdate; }else{ return ""; } }
$("#searchFileName").val(obj.fileName); $("#searchYearId").val(obj.year); $("#searchResume").val(obj.resume); $("#searchRemarks").val(obj.remarks); var afterName = ''; if(obj.fileUrl !='' && obj.fileUrl != null){ var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim() != ''){ var idAry=nameAry[i].split('^'); var fileName = idAry[1]; var beforeFileList = $("#searchUploadFileName").html(); var url =localhostPath+'/filePath/'+nameAry[i]; var afterFileList =beforeFileList+'&nbsp;&nbsp;&nbsp;&nbsp;'+ '<a href='+url+' target=_blank>'+fileName+'</a>'; $("#searchUploadFileName").html(afterFileList); } } //$('#urlDiv2').show(); } $('#myModalSearch').modal('show'); } }); } //提交 function submitInfo(id){ debugger; var rootPath = getRootPath(); if(id=='' || id==undefined){ id = $("#searchId").val(); } if(id==""){ bootbox.alert('请选择你要提交的数据!'); }else{ bootbox.confirm("提交后您将无法对该文件进行修改或删除操作,确定要提交该数据吗?",function(result){ if(result){ $.commonReq({ url : rootPath + "/annual/sumbitAnnualReview.do", async : true, data:{"id":id}, success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath
identifier_body
annualSearchPage.js
$(function(){ searchBtn(); $("input[name=btSelectAll]").attr("style","height:16px;width:16px;"); $("input[name=btSelectAll]").css("verticalAlign","middle"); $('#headYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); $('#updateYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); var shi=$('#searchUserName'); shi.selectpicker({ noneSelectedText:'--请选择--', style:'btnSelect', width:'250px', liveSearch:true, liveSearchPlaceholder:'请输入公司名称查询', size:5 }); }); function searchBtn(){ var localhostPath = getRootPath1(); var rootPath = getRootPath(); $('#dt').bootstrapTable('destroy'); $('#dt').bootstrapTable({ method: 'post', contentType: "application/x-www-form-urlencoded", url:rootPath+'/annual/findAllSearch.do', striped: true, //是否显示行间隔色 cache: false, //是否使用缓存,默认为true,所以一般情况下需要设置一下这个属性(*) pagination: true, queryParamsType:'', //默认值为 'limit' ,在默认情况下 传给服务端的参数为:offset,limit,sort // 设置为 '' 在这种情况下传给服务器的参数为:pageSize,pageNumber queryParams:queryParams, singleSelect: false, pageSize: basePage.pageSize, pageList: basePage.pageList, search: false, //不显示 搜索框 showColumns: false, //不显示下拉框(选择显示的列) sidePagination: "server", //服务端请求 clickToSelect: true, //是否启用点击选中行 columns: [ { field: 'Number', title: '行号', width:'1px', formatter: function (value, row, index) { return index+1; }, width:50 }, { field:'id', visible:false },{ field: 'userName', title: '公司名称', valign:'middle' },{ field: 'fileName', title: '文件标题', valign:'middle' }, { field: 'year', title: '年份', valign:'middle' },{ field: 'uploadTime', title: '上传时间', valign:'middle' },{ field: 'resume', title: '内容简述', valign:'middle' },{ field: 'remarks', title: '备注', valign:'middle' },{ field: 'status', title: '是否提交', valign:'middle', formatter:function(value,data,index){ if(data.status==0){ return '未提交'; }else{ return '已提交'; } } },{ field: '', title: '操作', valign:'middle', formatter:function(value,data,index){ var str = '<button style="margin-top: 5px;" onclick="searchRegulations(\''+data.id+'\');" type="button" class="btn btn-info btn-xs" style="margin-right:10px;"><span class="glyphicon glyphicon-search"></span>查看详情</button>'; return str; } } ], onLoadSuccess:function(){}, onLoadError: function () { } }); } function queryParams(params){ var temp = { pageSize:params.pageSize, pageNumber:params.pageNumber, fileName:$('#headNameId').val(), year:$('#headYearId').val(), userCode:$('#searchUserName').val() }; $("#headNameId").val(''); $('#headYearId').val(''); return temp; } //增加通知公告 function addInfo(){ window.location.href="<%=basePath%>regulatory/toAdd.do" } //删除 function delInfo(){ rootPath = getRootPath(); var id = getSelectionsStr(); if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(obj) { debugger; if(obj.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } $.commonReq({ url : rootPath + "/annual/deleteRegs.do", async : true, data : {"ids":id}, success : function(parame) { bootbox.alert( "删除成功!"); window.location.href=rootPath+'/annual/toAnnualReviewPage.do'; }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } }); } //修改通知公告 function updateInfo(){ debugger; $('#saveUpdateBtn').show(); $('#urlDiv2').hide(); $('#urlDiv').show(); var rootPath = getRootPath(); var localhostPath = getRootPath1(); var id = getSelectionsStr()+""; if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $("#formUpdatesInfo").find("input").val(""); $('#myModalAdd').modal('hide'); $("#uploadFileName").html(''); $("#uploadFileId").val(''); $("#uploadFileId").val(id); $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(parame) { if(parame.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } debugger; var obj = parame.data; $("#updateId").val(obj.id); $("#updateFileName").val(obj.fileName); $("#updateYearId").val(obj.year); $("#updateResume").val(obj.resume); $("#updateRemarks").val(obj.remarks); if(obj.fileUrl==''){ $("#downloadHref").hide(); }else{ $("#uploadFileInput").val(obj.fileUrl); $("#downloadHref").attr("href", localhostPath+"/filePath/"+obj.fileUrl ); } if(obj.fileUrl != null && obj.fileUrl.trim() !=''){ $("#uploadFileId").val(obj.fileUrl); var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim()!=''){ var idAry=nameAry[i].split('^'); var id = idAry[0]; var fileName = idAry[1]; var beforeFileName = $("#uploadFileName").html(); var afterFileName = beforeFileName+"&nbsp;&nbsp;&nbsp;" +'<span id='+id+'>'+fileName+'</span>' +'<a onclick="deleteFile('+id+');" style="color:#ff000096;"> 删除</a>'; $("#uploadFileName").html(afterFileName); } } } $('#myModalUpdate').modal('show'); }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } function saveUpdateInfo(){ debugger; var flag = true; //检查标题 var name=$('#updateFileName').val(); if(checkNullAndEmpty(name)){ bootbox.alert("环评文件标题不能为空!"); flag=false; return; } //检查年份 var year = $("#updateYearId").val(); if(checkNullAndEmpty(year)){ bootbox.alert("年份不能为空!"); flag=false; return; } var rootPath = getRootPath(); if(flag){ $.commonReq({ url : rootPath + "/annual/updateAnnualRevice.do", async : true, data:$("#formUpdateInfo").serialize(), success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath + '/annual/findAll.do'}); bootbox.alert("修改成功!"); }, error:function(xhr,status,e){ //服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } } function searchRegulations(id){ debugger; var localhostPath = getRootPath1(); var rootPath = getRootPath(); $("#formUpdatesInfo").find("input").val(""); $("#submitBtn").show(); $('#myModalUpdate').modal('hide'); $("#searchUploadFileName").html(''); $("#uploadFileId").val(''); $('#urlDiv').hide(); $.commonReq({ url : rootPath + '/annual/selectById.do', data : { "id" : id }, success : function(data) { debugger; var obj = data.data; $("#searchId").val(obj.id); $("#submitBtn").hide(); $("#searchFileName").val(obj.fileName); $("#searchYearId").val(obj.year); $("#searchResume").val(obj.resume); $("#searchRemarks").val(obj.remarks); var afterName = ''; if(obj.fileUrl !='' && obj.fileUrl != null){ var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim() != ''){ var idAry=nameAry[i].split('^'); var fileName = idAry[1]; var beforeFileList = $("#searchUploadFileName").html(); var url =localhostPath+'/filePath/'+nameAry[i]; var afterFileList =beforeFileList+'&nbsp;&nbsp;&nbsp;&nbsp;'+ '<a href='+url+' target=_blank>'+fileName+'</a>'; $("#searchUploadFileName").html(afterFileList); } } //$('#urlDiv2').show(); } $('#myModalSearch').modal('show'); } }); } //提交 function submitInfo(id){ debugger; var rootPath = getRootPath(); if(id=='' || id==undefined){ id = $("#searchId").val(); } if(id==""){ bootbox.alert('请选择你要提交的数据!'); }else{ bootbox.confirm("提交后您将无法对该文件进行修改或删除操作,确定要提交该数据吗?",function(result){ if(result){ $.commonReq({ url : rootPath + "/annual/sumbitAnnualReview.do", async : true, data:{"id":id}, success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath + '/annual/findAll.do'}); bootbox.alert("提交成功!"); $('#myModalSearch').modal('hide'); }, error:function(xhr,status,e){ //服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } }); } } /*获取选中的值*/ function getSelectionsStr(){ var rows = $('#dt').bootstrapTable('getSelections'); var str=""; if(rows!=null){ for(var i=0;i<rows.length;i++){ str+=(rows[i].id+"")+","; } str=str.substring(0,str.lastIndexOf(',')); } return str; } /*验证是否为空*/ function checkNullAndEmpty(value){ if(value==null || value.trim()==''){ return true; }else{ return false; } } //判断字符串是否为数字 function checkNumber(value){ var re = /^[0-9]+.?[0-9]*$/; if(null==value||''==value) { return false; }else if(!re.test(value)){ return true; }else{ return false; } } /*时间格式转化*/ function dataFormat(value){ if(value!=null){ var date = new Date(value); var seperator1 = "-"; //年 var year = date.getFullYear(); //月 var month = date.getMonth() + 1; //日 var strDate = date.getDate(); if (month >= 1 && month <= 9) { month = "0" + month; } if (strDate >= 0 && strDate <= 9) { strDate = "0" + strDate; } var currentdate = year + seperator1 + month + seperator1 + strDate; return currentdate; }else{
}
return ""; }
random_line_split
annualSearchPage.js
$(function(){ searchBtn(); $("input[name=btSelectAll]").attr("style","height:16px;width:16px;"); $("input[name=btSelectAll]").css("verticalAlign","middle"); $('#headYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); $('#updateYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); var shi=$('#searchUserName'); shi.selectpicker({ noneSelectedText:'--请选择--', style:'btnSelect', width:'250px', liveSearch:true, liveSearchPlaceholder:'请输入公司名称查询', size:5 }); }); function searchBtn(){ var localhostPath = getRootPath1(); var rootPath = getRootPath(); $('#dt').bootstrapTable('destroy'); $('#dt').bootstrapTable({ method: 'post', contentType: "application/x-www-form-urlencoded", url:rootPath+'/annual/findAllSearch.do', striped: true, //是否显示行间隔色 cache: false, //是否使用缓存,默认为true,所以一般情况下需要设置一下这个属性(*) pagination: true, queryParamsType:'', //默认值为 'limit' ,在默认情况下 传给服务端的参数为:offset,limit,sort // 设置为 '' 在这种情况下传给服务器的参数为:pageSize,pageNumber queryParams:queryParams, singleSelect: false, pageSize: basePage.pageSize, pageList: basePage.pageList, search: false, //不显示 搜索框 showColumns: false, //不显示下拉框(选择显示的列) sidePagination: "server", //服务端请求 clickToSelect: true, //是否启用点击选中行 columns: [ { field: 'Number', title: '行号', width:'1px', formatter: function (value, row, index) { return index+1; }, width:50 }, { field:'id', visible:false },{ field: 'userName', title: '公司名称', valign:'middle' },{ field: 'fileName', title: '文件标题', valign:'middle' }, { field: 'year', title: '年份', valign:'middle' },{ field: 'uploadTime', title: '上传时间', valign:'middle' },{ field: 'resume', title: '内容简述', valign:'middle' },{ field: 'remarks', title: '备注', valign:'middle' },{ field: 'status', title: '是否提交', valign:'middle', formatter:function(value,data,index){ if(data.status==0){ return '未提交'; }else{ return '已提交'; } } },{ field: '', title: '操作', valign:'middle', formatter:function(value,data,index){ var str = '<button style="margin-top: 5px;" onclick="searchRegulations(\''+data.id+'\');" type="button" class="btn btn-info btn-xs" style="margin-right:10px;"><span class="glyphicon glyphicon-search"></span>查看详情</button>'; return str; } } ], onLoadSuccess:function(){}, onLoadError: function () { } }); } function queryParams(params){ var temp = { pageSize:params.pageSize, pageNumber:params.pageNumber, fileName:$('#headNameId').val(), year:$('#headYearId').val(), userCode:$('#searchUserName').val() }; $("#headNameId").val(''); $('#headYearId').val(''); return temp; } //增加通知公告 function addInfo(){ window.location.href="<%=basePath%>regulatory/toAdd.do" } //删除 function delInfo(){ rootPath = getRootPath(); var id = getSelectionsStr(); if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(obj) { debugger; if(obj.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } $.commonReq({ url : rootPath + "/annual/deleteRegs.do", async : true, data : {"ids":id}, success : function(parame) { bootbox.alert( "删除成功!"); window.location.href=rootPath+'/annual/toAnnualReviewPage.do'; }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } }); } //修改通知公告 function updateInfo(){ debugger; $('#saveUpdateBtn').show(); $('#urlDiv2').hide(); $('#urlDiv').show(); var rootPath = getRootPath(); var localhostPath = getRootPath1(); var id = getSelectionsStr()+""; if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $("#formUpdatesInfo").find("input").val(""); $('#myModalAdd').modal('hide'); $("#uploadFileName").html(''); $("#uploadFileId").val(''); $("#uploadFileId").val(id); $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(parame) { if(parame.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } debugger; var obj = parame.data; $("#updateId").val(obj.id); $("#updateFileName").val(obj.fileName); $("#updateYearId").val(obj.year); $("#updateResume").val(obj.resume); $("#updateRemarks").val(obj.remarks); if(obj.fileUrl==''){ $("#downloadHref").hide(); }else{ $("#uploadFileInput").val(obj.fileUrl); $("#downloadHref").attr("href", localhostPath+"/filePath/"+obj.fileUrl ); } if(obj.fileUrl != null && obj.fileUrl.trim() !=''){ $("#uploadFileId").val(obj.fileUrl); var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim()!=''){ var idAry=nameAry[i].split('^'); var id = idAry[0]; var fileName = idAry[1]; var beforeFileName = $("#uploadFileName").html(); var afterFileName = beforeFileName+"&nbsp;&nbsp;&nbsp;" +'<span id='+id+'>'+fileName+'</span>' +'<a onclick="deleteFile('+id+');" style="color:#ff000096;"> 删除</a>'; $("#uploadFileName").html(afterFileName); } } } $('#myModalUpdate').modal('show'); }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } function saveUpdateInfo(){ debugger; var flag = true; //检查标题 var name=$('#updateFileName').val(); if(checkNullAndEmpty(name)){ bootbox.alert("环评文件标题不能为空!"); flag=false; return; } //检查年份 var year = $("#updateYearId").val(); if(checkNullAndEmpty(year)){ bootbox.alert("年份不能为空!"); flag=false; return; } var rootPath = getRootPath(); if(flag){ $.commonReq({ url : rootPath + "/annual/updateAnnualRevice.do", async : true, data:$("#formUpdateInfo").serialize(), success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath + '/annual/findAll.do'}); bootbox.alert("修改成功!"); }, error:function(xhr,status,e){ //服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } } function searchRegulations(id){ debugger; var localhostPath = getRootPath1(); var rootPath = getRootPath(); $("#formUpdatesInfo").find("input").val(""); $("#submitBtn").show(); $('#myModalUpdate').modal('hide'); $("#searchUploadFileName").html(''); $("#uploadFileId").val(''); $('#urlDiv').hide(); $.commonReq({ url : rootPath + '/annual/selectById.do', data : { "id" : id }, success : function(data) { debugger; var obj = data.data; $("#searchId").val(obj.id); $("#submitBtn").hide(); $("#searchFileName").val(obj.fileName); $("#searchYearId").val(obj.year); $("#searchResume").val(obj.resume); $("#searchRemarks").val(obj.remarks); var afterName = ''; if(obj.fileUrl !='' && obj.fileUrl != null){ var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim() != ''){ var idAry=nameAry[i].split('^'); var fileName = idAry[1]; var beforeFileList = $("#searchUploadFileName").html(); var url =localhostPath+'/filePath/'+nameAry[i]; var afterFileList =beforeFileList+'&nbsp;&nbsp;&nbsp;&nbsp;'+ '<a href='+url+' target=_blank>'+fileName+'</a>'; $("#searchUploadFileName").html(afterFileList); } } //$('#urlDiv2').show(); } $('#myModalSearch').modal('show'); } }); } //提交 function submitInfo(id){ debugger; var rootPath = getRootPath(); if(id=='' || id==undefined){ id = $("#searchId").val(); } if(id==""){ bootbox.alert('请选择你要提交的数据!'); }else{ bootbox.confirm("提交后您将无法对该文件进行修改或删除操作,确定要提交该数据吗?",function(result){ if(result){ $.commonReq({ url : rootPath + "/annual/sumbitAnnualReview.do", async : true, data:{"id":id}, success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath + '/annual/findAll.do'}); bootbox.alert("提交成功!"); $('#myModalSearch').modal('hide'); }, error:function(xhr,status,e){ //服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } }); } } /*获取选中的值*/ function getSelectionsStr(){ var rows = $('#dt').bootstrapTable('getSelections'); var str=""; if(rows!=null){ for(var i=0;i<rows.length;i++){ str+=(rows[i].id+"")+","; } str=str.substring(0,str.lastIndexOf(',')); } return str; } /*验证是否为空*/ function checkNullAndEmpty(value){ if(value==null || value.trim()==''){ return true; }else{ return false; } } //判断字符串是否为数字 function checkNumber(value){ var re = /^[0-9]+.?[0-9]*$/; if(null==value||''==value) { return false; }else if(!re.test(value)){ return true; }else{ return false; } } /*时间格式转化*/ function dataFormat(value){ if(value!=null){ var date = new Date(value); var seperator1 = "-"; //年 var year = date.getFullYear(); //月 var month = date.getMonth() + 1; //日 var strDate = date.getDate(); if (month >= 1 && month <= 9) { month = "0" + month; } if (strDate >= 0 && strDate <= 9) { strDate = "0" + strDate; } var currentdate = year + seperator1 + month + seperator1 + strDate;
turn currentdate; }else{ return ""; } }
re
identifier_name
annualSearchPage.js
$(function(){ searchBtn(); $("input[name=btSelectAll]").attr("style","height:16px;width:16px;"); $("input[name=btSelectAll]").css("verticalAlign","middle"); $('#headYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); $('#updateYearId').datetimepicker({ startView: 'decade', minView: 'decade', format: 'yyyy', maxViewMode: 2, minViewMode:2, autoclose: true }); var shi=$('#searchUserName'); shi.selectpicker({ noneSelectedText:'--请选择--', style:'btnSelect', width:'250px', liveSearch:true, liveSearchPlaceholder:'请输入公司名称查询', size:5 }); }); function searchBtn(){ var localhostPath = getRootPath1(); var rootPath = getRootPath(); $('#dt').bootstrapTable('destroy'); $('#dt').bootstrapTable({ method: 'post', contentType: "application/x-www-form-urlencoded", url:rootPath+'/annual/findAllSearch.do', striped: true, //是否显示行间隔色 cache: false, //是否使用缓存,默认为true,所以一般情况下需要设置一下这个属性(*) pagination: true, queryParamsType:'', //默认值为 'limit' ,在默认情况下 传给服务端的参数为:offset,limit,sort // 设置为 '' 在这种情况下传给服务器的参数为:pageSize,pageNumber queryParams:queryParams, singleSelect: false, pageSize: basePage.pageSize, pageList: basePage.pageList, search: false, //不显示 搜索框 showColumns: false, //不显示下拉框(选择显示的列) sidePagination: "server", //服务端请求 clickToSelect: true, //是否启用点击选中行 columns: [ { field: 'Number', title: '行号', width:'1px', formatter: function (value, row, index) { return index+1; }, width:50 }, { field:'id', visible:false },{ field: 'userName', title: '公司名称', valign:'middle' },{ field: 'fileName', title: '文件标题', valign:'middle' }, { field: 'year', title: '年份', valign:'middle' },{ field: 'uploadTime', title: '上传时间', valign:'middle' },{ field: 'resume', title: '内容简述', valign:'middle' },{ field: 'remarks', title: '备注', valign:'middle' },{ field: 'status', title: '是否提交', valign:'middle', formatter:function(value,data,index){ if(data.status==0){ return '未提交'; }else{ return '已提交'; } } },{ field: '', title: '操作', valign:'middle', formatter:function(value,data,index){ var str = '<button style="margin-top: 5px;" onclick="searchRegulations(\''+data.id+'\');" type="button" class="btn btn-info btn-xs" style="margin-right:10px;"><span class="glyphicon glyphicon-search"></span>查看详情</button>'; return str; } } ], onLoadSuccess:function(){}, onLoadError: function () { } }); } function queryParams(params){ var temp = { pageSize:params.pageSize, pageNumber:params.pageNumber, fileName:$('#headNameId').val(), year:$('#headYearId').val(), userCode:$('#searchUserName').val() }; $("#headNameId").val(''); $('#headYearId').val(''); return temp; } //增加通知公告 function addInfo(){ window.location.href="<%=basePath%>regulatory/toAdd.do" } //删除 function delInfo(){ rootPath = getRootPath(); var id = getSelectionsStr(); if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(obj) { debugger; if(obj.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } $.commonReq({ url : rootPath + "/annual/deleteRegs.do", async : true, data : {"ids":id}, success : function(parame) { bootbox.alert( "删除成功!"); window.location.href=rootPath+'/annual/toAnnualReviewPage.do'; }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } }); } //修改通知公告 function updateInfo(){ debugger; $('#saveUpdateBtn').show(); $('#urlDiv2').hide(); $('#urlDiv').show(); var rootPath = getRootPath(); var localhostPath = getRootPath1(); var id = getSelectionsStr()+""; if(id==''){ bootbox.alert('请选择要编辑的行!'); return; }else{ if(id.indexOf(',')!=-1){ bootbox.alert('只能选中一行'); return; } } $("#formUpdatesInfo").find("input").val(""); $('#myModalAdd').modal('hide'); $("#uploadFileName").html(''); $("#uploadFileId").val(''); $("#uploadFileId").val(id); $.commonReq({ url : rootPath + "/annual/selectById.do", async : true, data : {"id":id}, success : function(parame) { if(parame.data.status==1){ bootbox.alert('该文件已提交审核,无法对其操作!'); return; } debugger; var obj = parame.data; $("#updateId").val(obj.id); $("#updateFileName").val(obj.fileName); $("#updateYearId").val(obj.year); $("#updateResume").val(obj.resume); $("#updateRemarks").val(obj.remarks); if(obj.fileUrl==''){ $("#downloadHref").hide(); }else{ $("#uploadFileInput").val(obj.fileUrl); $("#downloadHref").attr("href", localhostPath+"/filePath/"+obj.fileUrl ); } if(obj.fileUrl != null && obj.fileUrl.trim() !=''){ $("#uploadFileId").val(obj.fileUrl); var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim()!=''){ var idAry=nameAry[i].split('^'); var id = idAry[0]; var fileName = idAry[1]; var beforeFileName = $("#uploadFileName").html(); var afterFileName = beforeFileName+"&nbsp;&nbsp;&nbsp;" +'<span id='+id+'>'+fileName+'</span>' +'<a onclick="deleteFile('+id+');" style="color:#ff000096;"> 删除</a>'; $("#uploadFileName").html(afterFileName); } } } $('#myModalUpdate').modal('show'); }, error : function(parame) { bootbox.alert('服务器请求失败!'); } }); } function saveUpdateInfo(){ debugger; var flag = true; //检查标题 var name=$('#updateFileName').val(); if(checkNullAndEmpty(name)){ bootbox.alert("环评文件标题不能为空!"); flag=false; return; } //检查年份 var year = $("#updateYearId").val(); if(checkNullAndEmpty(year)){ bootbox.alert("年份不能为空!"); flag=false; return; } var rootPath = getRootPath(); if(flag){ $.commonReq({ url : rootPath + "/annual/updateAnnualRevice.do", async : true, data:$("#formUpdateInfo").serialize(), success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath + '/annual/findAll.do'});
//服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } } function searchRegulations(id){ debugger; var localhostPath = getRootPath1(); var rootPath = getRootPath(); $("#formUpdatesInfo").find("input").val(""); $("#submitBtn").show(); $('#myModalUpdate').modal('hide'); $("#searchUploadFileName").html(''); $("#uploadFileId").val(''); $('#urlDiv').hide(); $.commonReq({ url : rootPath + '/annual/selectById.do', data : { "id" : id }, success : function(data) { debugger; var obj = data.data; $("#searchId").val(obj.id); $("#submitBtn").hide(); $("#searchFileName").val(obj.fileName); $("#searchYearId").val(obj.year); $("#searchResume").val(obj.resume); $("#searchRemarks").val(obj.remarks); var afterName = ''; if(obj.fileUrl !='' && obj.fileUrl != null){ var nameAry = obj.fileUrl.split('|'); for(var i=0;i<nameAry.length;i++){ if(nameAry[i].trim() != ''){ var idAry=nameAry[i].split('^'); var fileName = idAry[1]; var beforeFileList = $("#searchUploadFileName").html(); var url =localhostPath+'/filePath/'+nameAry[i]; var afterFileList =beforeFileList+'&nbsp;&nbsp;&nbsp;&nbsp;'+ '<a href='+url+' target=_blank>'+fileName+'</a>'; $("#searchUploadFileName").html(afterFileList); } } //$('#urlDiv2').show(); } $('#myModalSearch').modal('show'); } }); } //提交 function submitInfo(id){ debugger; var rootPath = getRootPath(); if(id=='' || id==undefined){ id = $("#searchId").val(); } if(id==""){ bootbox.alert('请选择你要提交的数据!'); }else{ bootbox.confirm("提交后您将无法对该文件进行修改或删除操作,确定要提交该数据吗?",function(result){ if(result){ $.commonReq({ url : rootPath + "/annual/sumbitAnnualReview.do", async : true, data:{"id":id}, success : function(data) { $('#myModalAdd').modal('hide'); $('#myModalUpdate').modal('hide'); $('#dt').bootstrapTable('refresh', {url:rootPath + '/annual/findAll.do'}); bootbox.alert("提交成功!"); $('#myModalSearch').modal('hide'); }, error:function(xhr,status,e){ //服务器响应失败时的处理函数 bootbox.alert('服务器请求失败!'); } }); } }); } } /*获取选中的值*/ function getSelectionsStr(){ var rows = $('#dt').bootstrapTable('getSelections'); var str=""; if(rows!=null){ for(var i=0;i<rows.length;i++){ str+=(rows[i].id+"")+","; } str=str.substring(0,str.lastIndexOf(',')); } return str; } /*验证是否为空*/ function checkNullAndEmpty(value){ if(value==null || value.trim()==''){ return true; }else{ return false; } } //判断字符串是否为数字 function checkNumber(value){ var re = /^[0-9]+.?[0-9]*$/; if(null==value||''==value) { return false; }else if(!re.test(value)){ return true; }else{ return false; } } /*时间格式转化*/ function dataFormat(value){ if(value!=null){ var date = new Date(value); var seperator1 = "-"; //年 var year = date.getFullYear(); //月 var month = date.getMonth() + 1; //日 var strDate = date.getDate(); if (month >= 1 && month <= 9) { month = "0" + month; } if (strDate >= 0 && strDate <= 9) { strDate = "0" + strDate; } var currentdate = year + seperator1 + month + seperator1 + strDate; return currentdate; }else{ return ""; } }
bootbox.alert("修改成功!"); }, error:function(xhr,status,e){
conditional_block
symm_icon.rs
// Symmetric Icons #![allow(dead_code)] use array2d::*; // lambda, alpha, beta, gamma, omega, symmetry, scale const PRESETS: [[f32; 7]; 36] = [ [1.56, -1., 0.1, -0.82, -0.3, 3., 1.7], [-1.806, 1.806, 0., 1.5, 0., 7., 1.1], [2.4, -2.5, -0.9, 0.9, 0., 3., 1.5], [-2.7, 5., 1.5, 1., 0., 4., 1.], [-2.5, 8., -0.7, 1., 0., 5., 0.8], [-1.9, 1.806, -0.85, 1.8, 0., 7., 1.2], [2.409, -2.5, 0., 0.9, 0., 4., 1.4], [-1.806, 1.807, -0.07, 1.08, 0., 6., 1.2], [-2.34, 2.2, 0.4, 0.05, 0., 5., 1.2], [-2.57, 3.2, 1.2, -1.75, 0., 36., 1.2], [-2.6, 4., 1.5, 1., 0., 12., 1.1], [-2.2, 2.3, 0.55, -0.90, 0., 3., 1.3], [-2.205, 6.01, 13.5814, -0.2044, 0.011, 5., 0.8], [-2.7, 8.7, 13.86, -0.13, -0.18, 18., 0.8], [-2.52, 8.75, 12., 0.04, 0.18, 5., 0.8], [2.38, -4.18, 19.99, -0.69, 0.095, 17., 1.], [2.33, -8.22, -6.07, -0.52, 0.16, 4., 0.8], [-1.62, 2.049, 1.422, 1.96, 0.56, 6., 1.], [-1.89, 9.62, 1.95, 0.51, 0.21, 3., 0.6], [-1.65, 9.99, 1.57, 1.46, -0.55, 3., 0.8], [-2.7, 5., 1.5, 1., 0., 6., 1.], [-2.08, 1., -0.1, 0.167, 0., 7., 1.3], [1.56, -1., 0.1, -0.82, 0.12, 3., 1.6], [-1.806, 1.806, 0., 1., 0., 5., 1.1], [1.56, -1., 0.1, -0.82, 0., 3., 1.3], [-2.195, 10., -12., 1., 0., 3., 0.7], [-1.86, 2., 0., 1., 0.1, 4., 1.2], [-2.34, 2., 0.2, 0.1, 0., 5., 1.2], [2.6, -2., 0., 0.5, 0., 5., 1.3], [-2.5, 5., -1.9, 1., 0.188, 5., 1.], [2.409, -2.5, 0., 0.9, 0., 23., 1.2], [2.409, -2.5, -0.2, 0.81, 0., 24., 1.2], [-2.05, 3., -16.79, 1., 0., 9., 1.], [-2.32, 2.32, 0., 0.75, 0., 5., 1.2], [2.5, -2.5, 0., 0.9, 0., 3., 1.3], [1.5, -1., 0.1, -0.805, 0., 3., 1.4], ]; const MAX_XY : f32 = 1e5; const DEFAULT_SPEED : u32 = 100; const MAX_COLORS : u32 = 2111; const COLOR_SPEED : u32 = 3071; pub struct SymmetricIcons { lambda : f32, alpha : f32, beta : f32, gamma : f32, omega : f32, symmetry : u32, scale : f32, w : usize, h : usize, color_set : u32, iter : u32, speed : u32, apcx : f32, apcy : f32, rad : f32, color_list: Vec<u32>, icon : Array2D<u32>, image : Array2D<u32>, x : f32, y : f32, k : u32, } impl SymmetricIcons { pub fn new(w : usize, h : usize, color_set : u32) -> Self { let mut s = Self { lambda : 0.0, alpha : 0.0, beta : 0.0, gamma : 0.0, omega : 0.0, symmetry : 0, scale : 0.0, w : w, h : h, color_set : color_set, iter : 0, speed : DEFAULT_SPEED, apcx : 0.0, apcy : 0.0, rad : 0.0, color_list : vec![], icon : Array2D::filled_with(0_u32, w, h), image : Array2D::filled_with(0_u32, w, h), x : 0.0, y : 0.0, k : 0, }; s.set_preset(0); s } pub fn set_size(&mut self, w : usize, h : usize) { self.w = w; self.h = h; self.image = Array2D::filled_with(0_u32, w, h); self.icon = Array2D::filled_with(0_u32, w, h); self.iter = 0; self.color_list = vec![]; self.reset(); } pub fn set_preset(&mut self, i : usize) { let p = PRESETS[i % PRESETS.len()]; self.lambda = p[0]; self.alpha = p[1]; self.beta = p[2]; self.gamma = p[3]; self.omega = p[4]; self.symmetry = p[5] as u32; self.scale = if p[6] == 0. {1.} else {p[6]}; self.reset(); } pub fn set_parameters(&mut self, lambda : f32, alpha: f32, beta : f32, gamma : f32, omega : f32, symmetry : f32, scale : f32)
fn make_color(r : u32, g : u32, b : u32) -> u32 { (b << 16) | (g << 8) | r | 0xff00_0000 } fn make_colora(a : u32, r : u32, g : u32, b : u32) -> u32 { (a << 24) | (b << 16) | (g << 8) | r } fn get_rainbow(x : u32, y : u32) -> u32 { match x { 0 => Self::make_color(0, y, 255), 1 => Self::make_color(0, 255, 255 - y), 2 => Self::make_color(y, 255, 0), 3 => Self::make_color(255, 255 - y, 0), 4 => Self::make_color(255, 0, y), 5 => Self::make_color(255 - y, 0, 255), _ => Self::make_color(0,0,0), // black } } fn set_colors(&mut self, param_int : u32) { let mut colors = vec![0_u32; (MAX_COLORS+1) as usize]; match param_int { 0 => { for i in 0..64 { colors[i] = Self::make_color(0, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 1 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(i, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 2 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 3 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 4 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 0) } for i in 0..256 { let local_color = Self::make_color(255, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 5 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 6 => for i in 0..256 { colors[(i + 64)] = Self::make_colora(255, 255 - i as u32, 255 - i as u32, 255) }, 7 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255) }, 8 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255 - i as u32) }, 9 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255, 255 - i as u32) }, 10 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255 - i as u32)} , 11 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255)}, _ => () } if param_int > 5 { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 4 * i as u32) } for j in 0..5 { for i in 0..256 { colors[(320 + j * 256 + i)] = Self::get_rainbow((param_int + j as u32) % 6, i as u32) } } for i in 0..256 { let local_color = Self::get_rainbow((param_int - 1) % 6, i); colors[(1600 + 2 * i as usize)] = local_color; colors[(1601 + 2 * i as usize)] = local_color; } } else { // <= 5 for j in 0..5 { for i in 0..256 { colors[64 + j * 256 + i] = Self::get_rainbow((param_int + j as u32) % 6, i as u32); } } } self.color_list = colors } fn reset(&mut self) { self.speed = DEFAULT_SPEED; self.apcx = self.w as f32 / 2.; self.apcy = self.h as f32 / 2.; self.rad = if self.apcx > self.apcy {self.apcy} else {self.apcx}; self.k = 0; self.x = 0.01; self.y = 0.003; self.iter = 0; self.icon = Array2D::filled_with(0_u32, self.w, self.h); self.image = Array2D::filled_with(0_u32, self.w, self.h); self.set_colors(self.color_set); for m in 0..self.w { for n in 0..self.h { let color = self.get_color(self.icon[(m, n)]); self.set_point_color(m, n, color); } } } fn set_point_color(&mut self, x : usize, y : usize, color : u32) { self.image[(x, y)] = color; } fn get_color(&mut self, col : u32) -> u32 { let col = col & 0x00ffffff; if col * self.speed > MAX_COLORS { while (col * self.speed > COLOR_SPEED) && (self.speed > 3) { self.speed-=1 } self.color_list[MAX_COLORS as usize] } else { self.color_list[(col * self.speed) as usize] } } fn set_point(&mut self, x : usize, y : usize) { let icon = self.icon[(x,y)]; let color = self.get_color(icon); self.image[(x,y)] = color; self.icon[(x,y)] += 1; if icon >= 12288 { self.icon[(x,y)] = 8192 } } pub fn generate(&mut self, mod_disp : u32) -> bool { // geenrate icon, runs in a thread in 'start' self.iter+=1; if self.x.abs() > MAX_XY || self.y.abs() > MAX_XY { self.reset(); // prevent overflow } // generate new x,y let sq = self.x * self.x + self.y * self.y; // sq=x^2+y^2 let mut tx = self.x; let mut ty = self.y; // tx=pow, ty=pow for _m in 1..self.symmetry - 2 + 1 { let sqx = tx * self.x - ty * self.y; let sqy = ty * self.x + tx * self.y; tx = sqx; ty = sqy; } let sqx = self.x * tx - self.y * ty; let tmp = self.lambda + self.alpha * sq + self.beta * sqx; let x_new = tmp * self.x +self.gamma * tx - self.omega * self.y; let y_new = tmp * self.y - self.gamma * ty + self.omega * self.x; self.x = x_new; self.y = y_new; if self.k > 50 { self.set_point((self.apcx + self.x * self.rad / self.scale) as usize, (self.apcy + self.y * self.rad / self.scale) as usize); } else { self.k += 1; } self.iter % mod_disp == 0 } pub fn build(&mut self, preset : usize, n_iters : usize) -> (&[u8], (usize, usize)) { self.set_preset(preset); for _ in 0..n_iters { self.generate(1); } ( self.get_image(), self.get_size() ) } pub fn get_size(&self) -> (usize, usize) { ( self.w, self.h ) } pub fn write(&self, name : &str) { // showbinimage.py 800 800 symm_icon.bin use std::fs::File; use std::io::prelude::*; File::create(name).expect("create failed") .write_all(self.get_image()).expect("write failed"); } pub fn get_image(&self) -> &[u8] { // convert Vec<u32> to [u8] let v = self.image.as_row_major(); unsafe { std::slice::from_raw_parts( v.as_ptr() as *const u8, v.len() * std::mem::size_of::<u32>(), ) } } } pub fn _test_symmetric_icon() { let n = 2048; let mut symicn = SymmetricIcons::new(n, n, 0); symicn.set_preset(9); for _i in 0..900_000 { symicn.generate(5000); } symicn.write("symm_icon.bin"); use std::process::Command; let n = &n.to_string()[..]; Command::new("/usr/local/bin/showbinimage.py") .args(&[n, n, "symm_icon.bin"]) .output().expect("can't execute command"); } pub fn _test_array2d() { // Array2D is faster 1.67 vs 2.14 (1.3 times faster) use std::time::Instant; const N : usize = 100_000_000; const SZ : usize = 1200; let v = vec![0_usize; SZ*SZ]; let a = Array2D::filled_with(0_usize, SZ, SZ); let t = Instant::now(); for _ in 0..N { for i in 0..SZ { for j in 0..SZ { let crd = i*SZ+j; let x = v[crd]; let _xx = x+1; } } } println!("lap vec : {:?}", Instant::now()-t); let t = Instant::now(); for _ in 0..N { for r in 0..SZ { for c in 0..SZ { let x = a[(r,c)]; let _xx = x+1; } } } println!("lap array2d : {:?}", Instant::now()-t); }
{ self.lambda = lambda; self.alpha = alpha; self.beta = beta; self.gamma = gamma; self.omega = omega; self.symmetry = if symmetry < 1. { 1 } else { symmetry as u32 }; self.scale = if scale == 0. {1.} else { scale }; self.reset(); }
identifier_body
symm_icon.rs
// Symmetric Icons #![allow(dead_code)] use array2d::*; // lambda, alpha, beta, gamma, omega, symmetry, scale const PRESETS: [[f32; 7]; 36] = [ [1.56, -1., 0.1, -0.82, -0.3, 3., 1.7], [-1.806, 1.806, 0., 1.5, 0., 7., 1.1], [2.4, -2.5, -0.9, 0.9, 0., 3., 1.5], [-2.7, 5., 1.5, 1., 0., 4., 1.], [-2.5, 8., -0.7, 1., 0., 5., 0.8], [-1.9, 1.806, -0.85, 1.8, 0., 7., 1.2], [2.409, -2.5, 0., 0.9, 0., 4., 1.4], [-1.806, 1.807, -0.07, 1.08, 0., 6., 1.2], [-2.34, 2.2, 0.4, 0.05, 0., 5., 1.2], [-2.57, 3.2, 1.2, -1.75, 0., 36., 1.2], [-2.6, 4., 1.5, 1., 0., 12., 1.1], [-2.2, 2.3, 0.55, -0.90, 0., 3., 1.3], [-2.205, 6.01, 13.5814, -0.2044, 0.011, 5., 0.8], [-2.7, 8.7, 13.86, -0.13, -0.18, 18., 0.8], [-2.52, 8.75, 12., 0.04, 0.18, 5., 0.8], [2.38, -4.18, 19.99, -0.69, 0.095, 17., 1.], [2.33, -8.22, -6.07, -0.52, 0.16, 4., 0.8], [-1.62, 2.049, 1.422, 1.96, 0.56, 6., 1.], [-1.89, 9.62, 1.95, 0.51, 0.21, 3., 0.6], [-1.65, 9.99, 1.57, 1.46, -0.55, 3., 0.8], [-2.7, 5., 1.5, 1., 0., 6., 1.], [-2.08, 1., -0.1, 0.167, 0., 7., 1.3], [1.56, -1., 0.1, -0.82, 0.12, 3., 1.6], [-1.806, 1.806, 0., 1., 0., 5., 1.1], [1.56, -1., 0.1, -0.82, 0., 3., 1.3], [-2.195, 10., -12., 1., 0., 3., 0.7], [-1.86, 2., 0., 1., 0.1, 4., 1.2], [-2.34, 2., 0.2, 0.1, 0., 5., 1.2], [2.6, -2., 0., 0.5, 0., 5., 1.3], [-2.5, 5., -1.9, 1., 0.188, 5., 1.], [2.409, -2.5, 0., 0.9, 0., 23., 1.2], [2.409, -2.5, -0.2, 0.81, 0., 24., 1.2], [-2.05, 3., -16.79, 1., 0., 9., 1.], [-2.32, 2.32, 0., 0.75, 0., 5., 1.2], [2.5, -2.5, 0., 0.9, 0., 3., 1.3], [1.5, -1., 0.1, -0.805, 0., 3., 1.4], ]; const MAX_XY : f32 = 1e5; const DEFAULT_SPEED : u32 = 100; const MAX_COLORS : u32 = 2111; const COLOR_SPEED : u32 = 3071; pub struct SymmetricIcons { lambda : f32, alpha : f32, beta : f32, gamma : f32, omega : f32, symmetry : u32, scale : f32, w : usize, h : usize, color_set : u32, iter : u32, speed : u32, apcx : f32, apcy : f32, rad : f32, color_list: Vec<u32>, icon : Array2D<u32>, image : Array2D<u32>, x : f32, y : f32, k : u32, } impl SymmetricIcons { pub fn new(w : usize, h : usize, color_set : u32) -> Self { let mut s = Self { lambda : 0.0, alpha : 0.0, beta : 0.0, gamma : 0.0, omega : 0.0, symmetry : 0, scale : 0.0, w : w, h : h, color_set : color_set, iter : 0, speed : DEFAULT_SPEED, apcx : 0.0, apcy : 0.0, rad : 0.0, color_list : vec![], icon : Array2D::filled_with(0_u32, w, h), image : Array2D::filled_with(0_u32, w, h), x : 0.0, y : 0.0, k : 0, }; s.set_preset(0); s } pub fn set_size(&mut self, w : usize, h : usize) { self.w = w; self.h = h;
self.iter = 0; self.color_list = vec![]; self.reset(); } pub fn set_preset(&mut self, i : usize) { let p = PRESETS[i % PRESETS.len()]; self.lambda = p[0]; self.alpha = p[1]; self.beta = p[2]; self.gamma = p[3]; self.omega = p[4]; self.symmetry = p[5] as u32; self.scale = if p[6] == 0. {1.} else {p[6]}; self.reset(); } pub fn set_parameters(&mut self, lambda : f32, alpha: f32, beta : f32, gamma : f32, omega : f32, symmetry : f32, scale : f32) { self.lambda = lambda; self.alpha = alpha; self.beta = beta; self.gamma = gamma; self.omega = omega; self.symmetry = if symmetry < 1. { 1 } else { symmetry as u32 }; self.scale = if scale == 0. {1.} else { scale }; self.reset(); } fn make_color(r : u32, g : u32, b : u32) -> u32 { (b << 16) | (g << 8) | r | 0xff00_0000 } fn make_colora(a : u32, r : u32, g : u32, b : u32) -> u32 { (a << 24) | (b << 16) | (g << 8) | r } fn get_rainbow(x : u32, y : u32) -> u32 { match x { 0 => Self::make_color(0, y, 255), 1 => Self::make_color(0, 255, 255 - y), 2 => Self::make_color(y, 255, 0), 3 => Self::make_color(255, 255 - y, 0), 4 => Self::make_color(255, 0, y), 5 => Self::make_color(255 - y, 0, 255), _ => Self::make_color(0,0,0), // black } } fn set_colors(&mut self, param_int : u32) { let mut colors = vec![0_u32; (MAX_COLORS+1) as usize]; match param_int { 0 => { for i in 0..64 { colors[i] = Self::make_color(0, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 1 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(i, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 2 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 3 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 4 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 0) } for i in 0..256 { let local_color = Self::make_color(255, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 5 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 6 => for i in 0..256 { colors[(i + 64)] = Self::make_colora(255, 255 - i as u32, 255 - i as u32, 255) }, 7 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255) }, 8 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255 - i as u32) }, 9 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255, 255 - i as u32) }, 10 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255 - i as u32)} , 11 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255)}, _ => () } if param_int > 5 { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 4 * i as u32) } for j in 0..5 { for i in 0..256 { colors[(320 + j * 256 + i)] = Self::get_rainbow((param_int + j as u32) % 6, i as u32) } } for i in 0..256 { let local_color = Self::get_rainbow((param_int - 1) % 6, i); colors[(1600 + 2 * i as usize)] = local_color; colors[(1601 + 2 * i as usize)] = local_color; } } else { // <= 5 for j in 0..5 { for i in 0..256 { colors[64 + j * 256 + i] = Self::get_rainbow((param_int + j as u32) % 6, i as u32); } } } self.color_list = colors } fn reset(&mut self) { self.speed = DEFAULT_SPEED; self.apcx = self.w as f32 / 2.; self.apcy = self.h as f32 / 2.; self.rad = if self.apcx > self.apcy {self.apcy} else {self.apcx}; self.k = 0; self.x = 0.01; self.y = 0.003; self.iter = 0; self.icon = Array2D::filled_with(0_u32, self.w, self.h); self.image = Array2D::filled_with(0_u32, self.w, self.h); self.set_colors(self.color_set); for m in 0..self.w { for n in 0..self.h { let color = self.get_color(self.icon[(m, n)]); self.set_point_color(m, n, color); } } } fn set_point_color(&mut self, x : usize, y : usize, color : u32) { self.image[(x, y)] = color; } fn get_color(&mut self, col : u32) -> u32 { let col = col & 0x00ffffff; if col * self.speed > MAX_COLORS { while (col * self.speed > COLOR_SPEED) && (self.speed > 3) { self.speed-=1 } self.color_list[MAX_COLORS as usize] } else { self.color_list[(col * self.speed) as usize] } } fn set_point(&mut self, x : usize, y : usize) { let icon = self.icon[(x,y)]; let color = self.get_color(icon); self.image[(x,y)] = color; self.icon[(x,y)] += 1; if icon >= 12288 { self.icon[(x,y)] = 8192 } } pub fn generate(&mut self, mod_disp : u32) -> bool { // geenrate icon, runs in a thread in 'start' self.iter+=1; if self.x.abs() > MAX_XY || self.y.abs() > MAX_XY { self.reset(); // prevent overflow } // generate new x,y let sq = self.x * self.x + self.y * self.y; // sq=x^2+y^2 let mut tx = self.x; let mut ty = self.y; // tx=pow, ty=pow for _m in 1..self.symmetry - 2 + 1 { let sqx = tx * self.x - ty * self.y; let sqy = ty * self.x + tx * self.y; tx = sqx; ty = sqy; } let sqx = self.x * tx - self.y * ty; let tmp = self.lambda + self.alpha * sq + self.beta * sqx; let x_new = tmp * self.x +self.gamma * tx - self.omega * self.y; let y_new = tmp * self.y - self.gamma * ty + self.omega * self.x; self.x = x_new; self.y = y_new; if self.k > 50 { self.set_point((self.apcx + self.x * self.rad / self.scale) as usize, (self.apcy + self.y * self.rad / self.scale) as usize); } else { self.k += 1; } self.iter % mod_disp == 0 } pub fn build(&mut self, preset : usize, n_iters : usize) -> (&[u8], (usize, usize)) { self.set_preset(preset); for _ in 0..n_iters { self.generate(1); } ( self.get_image(), self.get_size() ) } pub fn get_size(&self) -> (usize, usize) { ( self.w, self.h ) } pub fn write(&self, name : &str) { // showbinimage.py 800 800 symm_icon.bin use std::fs::File; use std::io::prelude::*; File::create(name).expect("create failed") .write_all(self.get_image()).expect("write failed"); } pub fn get_image(&self) -> &[u8] { // convert Vec<u32> to [u8] let v = self.image.as_row_major(); unsafe { std::slice::from_raw_parts( v.as_ptr() as *const u8, v.len() * std::mem::size_of::<u32>(), ) } } } pub fn _test_symmetric_icon() { let n = 2048; let mut symicn = SymmetricIcons::new(n, n, 0); symicn.set_preset(9); for _i in 0..900_000 { symicn.generate(5000); } symicn.write("symm_icon.bin"); use std::process::Command; let n = &n.to_string()[..]; Command::new("/usr/local/bin/showbinimage.py") .args(&[n, n, "symm_icon.bin"]) .output().expect("can't execute command"); } pub fn _test_array2d() { // Array2D is faster 1.67 vs 2.14 (1.3 times faster) use std::time::Instant; const N : usize = 100_000_000; const SZ : usize = 1200; let v = vec![0_usize; SZ*SZ]; let a = Array2D::filled_with(0_usize, SZ, SZ); let t = Instant::now(); for _ in 0..N { for i in 0..SZ { for j in 0..SZ { let crd = i*SZ+j; let x = v[crd]; let _xx = x+1; } } } println!("lap vec : {:?}", Instant::now()-t); let t = Instant::now(); for _ in 0..N { for r in 0..SZ { for c in 0..SZ { let x = a[(r,c)]; let _xx = x+1; } } } println!("lap array2d : {:?}", Instant::now()-t); }
self.image = Array2D::filled_with(0_u32, w, h); self.icon = Array2D::filled_with(0_u32, w, h);
random_line_split
symm_icon.rs
// Symmetric Icons #![allow(dead_code)] use array2d::*; // lambda, alpha, beta, gamma, omega, symmetry, scale const PRESETS: [[f32; 7]; 36] = [ [1.56, -1., 0.1, -0.82, -0.3, 3., 1.7], [-1.806, 1.806, 0., 1.5, 0., 7., 1.1], [2.4, -2.5, -0.9, 0.9, 0., 3., 1.5], [-2.7, 5., 1.5, 1., 0., 4., 1.], [-2.5, 8., -0.7, 1., 0., 5., 0.8], [-1.9, 1.806, -0.85, 1.8, 0., 7., 1.2], [2.409, -2.5, 0., 0.9, 0., 4., 1.4], [-1.806, 1.807, -0.07, 1.08, 0., 6., 1.2], [-2.34, 2.2, 0.4, 0.05, 0., 5., 1.2], [-2.57, 3.2, 1.2, -1.75, 0., 36., 1.2], [-2.6, 4., 1.5, 1., 0., 12., 1.1], [-2.2, 2.3, 0.55, -0.90, 0., 3., 1.3], [-2.205, 6.01, 13.5814, -0.2044, 0.011, 5., 0.8], [-2.7, 8.7, 13.86, -0.13, -0.18, 18., 0.8], [-2.52, 8.75, 12., 0.04, 0.18, 5., 0.8], [2.38, -4.18, 19.99, -0.69, 0.095, 17., 1.], [2.33, -8.22, -6.07, -0.52, 0.16, 4., 0.8], [-1.62, 2.049, 1.422, 1.96, 0.56, 6., 1.], [-1.89, 9.62, 1.95, 0.51, 0.21, 3., 0.6], [-1.65, 9.99, 1.57, 1.46, -0.55, 3., 0.8], [-2.7, 5., 1.5, 1., 0., 6., 1.], [-2.08, 1., -0.1, 0.167, 0., 7., 1.3], [1.56, -1., 0.1, -0.82, 0.12, 3., 1.6], [-1.806, 1.806, 0., 1., 0., 5., 1.1], [1.56, -1., 0.1, -0.82, 0., 3., 1.3], [-2.195, 10., -12., 1., 0., 3., 0.7], [-1.86, 2., 0., 1., 0.1, 4., 1.2], [-2.34, 2., 0.2, 0.1, 0., 5., 1.2], [2.6, -2., 0., 0.5, 0., 5., 1.3], [-2.5, 5., -1.9, 1., 0.188, 5., 1.], [2.409, -2.5, 0., 0.9, 0., 23., 1.2], [2.409, -2.5, -0.2, 0.81, 0., 24., 1.2], [-2.05, 3., -16.79, 1., 0., 9., 1.], [-2.32, 2.32, 0., 0.75, 0., 5., 1.2], [2.5, -2.5, 0., 0.9, 0., 3., 1.3], [1.5, -1., 0.1, -0.805, 0., 3., 1.4], ]; const MAX_XY : f32 = 1e5; const DEFAULT_SPEED : u32 = 100; const MAX_COLORS : u32 = 2111; const COLOR_SPEED : u32 = 3071; pub struct SymmetricIcons { lambda : f32, alpha : f32, beta : f32, gamma : f32, omega : f32, symmetry : u32, scale : f32, w : usize, h : usize, color_set : u32, iter : u32, speed : u32, apcx : f32, apcy : f32, rad : f32, color_list: Vec<u32>, icon : Array2D<u32>, image : Array2D<u32>, x : f32, y : f32, k : u32, } impl SymmetricIcons { pub fn new(w : usize, h : usize, color_set : u32) -> Self { let mut s = Self { lambda : 0.0, alpha : 0.0, beta : 0.0, gamma : 0.0, omega : 0.0, symmetry : 0, scale : 0.0, w : w, h : h, color_set : color_set, iter : 0, speed : DEFAULT_SPEED, apcx : 0.0, apcy : 0.0, rad : 0.0, color_list : vec![], icon : Array2D::filled_with(0_u32, w, h), image : Array2D::filled_with(0_u32, w, h), x : 0.0, y : 0.0, k : 0, }; s.set_preset(0); s } pub fn set_size(&mut self, w : usize, h : usize) { self.w = w; self.h = h; self.image = Array2D::filled_with(0_u32, w, h); self.icon = Array2D::filled_with(0_u32, w, h); self.iter = 0; self.color_list = vec![]; self.reset(); } pub fn set_preset(&mut self, i : usize) { let p = PRESETS[i % PRESETS.len()]; self.lambda = p[0]; self.alpha = p[1]; self.beta = p[2]; self.gamma = p[3]; self.omega = p[4]; self.symmetry = p[5] as u32; self.scale = if p[6] == 0. {1.} else {p[6]}; self.reset(); } pub fn set_parameters(&mut self, lambda : f32, alpha: f32, beta : f32, gamma : f32, omega : f32, symmetry : f32, scale : f32) { self.lambda = lambda; self.alpha = alpha; self.beta = beta; self.gamma = gamma; self.omega = omega; self.symmetry = if symmetry < 1. { 1 } else { symmetry as u32 }; self.scale = if scale == 0. {1.} else { scale }; self.reset(); } fn make_color(r : u32, g : u32, b : u32) -> u32 { (b << 16) | (g << 8) | r | 0xff00_0000 } fn make_colora(a : u32, r : u32, g : u32, b : u32) -> u32 { (a << 24) | (b << 16) | (g << 8) | r } fn get_rainbow(x : u32, y : u32) -> u32 { match x { 0 => Self::make_color(0, y, 255), 1 => Self::make_color(0, 255, 255 - y), 2 => Self::make_color(y, 255, 0), 3 => Self::make_color(255, 255 - y, 0), 4 => Self::make_color(255, 0, y), 5 => Self::make_color(255 - y, 0, 255), _ => Self::make_color(0,0,0), // black } } fn set_colors(&mut self, param_int : u32) { let mut colors = vec![0_u32; (MAX_COLORS+1) as usize]; match param_int { 0 => { for i in 0..64 { colors[i] = Self::make_color(0, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 1 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(i, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 2 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 3 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 4 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 0) } for i in 0..256 { let local_color = Self::make_color(255, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 5 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 6 => for i in 0..256 { colors[(i + 64)] = Self::make_colora(255, 255 - i as u32, 255 - i as u32, 255) }, 7 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255) }, 8 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255 - i as u32) }, 9 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255, 255 - i as u32) }, 10 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255 - i as u32)} , 11 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255)}, _ => () } if param_int > 5 { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 4 * i as u32) } for j in 0..5 { for i in 0..256 { colors[(320 + j * 256 + i)] = Self::get_rainbow((param_int + j as u32) % 6, i as u32) } } for i in 0..256 { let local_color = Self::get_rainbow((param_int - 1) % 6, i); colors[(1600 + 2 * i as usize)] = local_color; colors[(1601 + 2 * i as usize)] = local_color; } } else { // <= 5 for j in 0..5 { for i in 0..256 { colors[64 + j * 256 + i] = Self::get_rainbow((param_int + j as u32) % 6, i as u32); } } } self.color_list = colors } fn reset(&mut self) { self.speed = DEFAULT_SPEED; self.apcx = self.w as f32 / 2.; self.apcy = self.h as f32 / 2.; self.rad = if self.apcx > self.apcy {self.apcy} else {self.apcx}; self.k = 0; self.x = 0.01; self.y = 0.003; self.iter = 0; self.icon = Array2D::filled_with(0_u32, self.w, self.h); self.image = Array2D::filled_with(0_u32, self.w, self.h); self.set_colors(self.color_set); for m in 0..self.w { for n in 0..self.h { let color = self.get_color(self.icon[(m, n)]); self.set_point_color(m, n, color); } } } fn set_point_color(&mut self, x : usize, y : usize, color : u32) { self.image[(x, y)] = color; } fn get_color(&mut self, col : u32) -> u32 { let col = col & 0x00ffffff; if col * self.speed > MAX_COLORS { while (col * self.speed > COLOR_SPEED) && (self.speed > 3) { self.speed-=1 } self.color_list[MAX_COLORS as usize] } else
} fn set_point(&mut self, x : usize, y : usize) { let icon = self.icon[(x,y)]; let color = self.get_color(icon); self.image[(x,y)] = color; self.icon[(x,y)] += 1; if icon >= 12288 { self.icon[(x,y)] = 8192 } } pub fn generate(&mut self, mod_disp : u32) -> bool { // geenrate icon, runs in a thread in 'start' self.iter+=1; if self.x.abs() > MAX_XY || self.y.abs() > MAX_XY { self.reset(); // prevent overflow } // generate new x,y let sq = self.x * self.x + self.y * self.y; // sq=x^2+y^2 let mut tx = self.x; let mut ty = self.y; // tx=pow, ty=pow for _m in 1..self.symmetry - 2 + 1 { let sqx = tx * self.x - ty * self.y; let sqy = ty * self.x + tx * self.y; tx = sqx; ty = sqy; } let sqx = self.x * tx - self.y * ty; let tmp = self.lambda + self.alpha * sq + self.beta * sqx; let x_new = tmp * self.x +self.gamma * tx - self.omega * self.y; let y_new = tmp * self.y - self.gamma * ty + self.omega * self.x; self.x = x_new; self.y = y_new; if self.k > 50 { self.set_point((self.apcx + self.x * self.rad / self.scale) as usize, (self.apcy + self.y * self.rad / self.scale) as usize); } else { self.k += 1; } self.iter % mod_disp == 0 } pub fn build(&mut self, preset : usize, n_iters : usize) -> (&[u8], (usize, usize)) { self.set_preset(preset); for _ in 0..n_iters { self.generate(1); } ( self.get_image(), self.get_size() ) } pub fn get_size(&self) -> (usize, usize) { ( self.w, self.h ) } pub fn write(&self, name : &str) { // showbinimage.py 800 800 symm_icon.bin use std::fs::File; use std::io::prelude::*; File::create(name).expect("create failed") .write_all(self.get_image()).expect("write failed"); } pub fn get_image(&self) -> &[u8] { // convert Vec<u32> to [u8] let v = self.image.as_row_major(); unsafe { std::slice::from_raw_parts( v.as_ptr() as *const u8, v.len() * std::mem::size_of::<u32>(), ) } } } pub fn _test_symmetric_icon() { let n = 2048; let mut symicn = SymmetricIcons::new(n, n, 0); symicn.set_preset(9); for _i in 0..900_000 { symicn.generate(5000); } symicn.write("symm_icon.bin"); use std::process::Command; let n = &n.to_string()[..]; Command::new("/usr/local/bin/showbinimage.py") .args(&[n, n, "symm_icon.bin"]) .output().expect("can't execute command"); } pub fn _test_array2d() { // Array2D is faster 1.67 vs 2.14 (1.3 times faster) use std::time::Instant; const N : usize = 100_000_000; const SZ : usize = 1200; let v = vec![0_usize; SZ*SZ]; let a = Array2D::filled_with(0_usize, SZ, SZ); let t = Instant::now(); for _ in 0..N { for i in 0..SZ { for j in 0..SZ { let crd = i*SZ+j; let x = v[crd]; let _xx = x+1; } } } println!("lap vec : {:?}", Instant::now()-t); let t = Instant::now(); for _ in 0..N { for r in 0..SZ { for c in 0..SZ { let x = a[(r,c)]; let _xx = x+1; } } } println!("lap array2d : {:?}", Instant::now()-t); }
{ self.color_list[(col * self.speed) as usize] }
conditional_block
symm_icon.rs
// Symmetric Icons #![allow(dead_code)] use array2d::*; // lambda, alpha, beta, gamma, omega, symmetry, scale const PRESETS: [[f32; 7]; 36] = [ [1.56, -1., 0.1, -0.82, -0.3, 3., 1.7], [-1.806, 1.806, 0., 1.5, 0., 7., 1.1], [2.4, -2.5, -0.9, 0.9, 0., 3., 1.5], [-2.7, 5., 1.5, 1., 0., 4., 1.], [-2.5, 8., -0.7, 1., 0., 5., 0.8], [-1.9, 1.806, -0.85, 1.8, 0., 7., 1.2], [2.409, -2.5, 0., 0.9, 0., 4., 1.4], [-1.806, 1.807, -0.07, 1.08, 0., 6., 1.2], [-2.34, 2.2, 0.4, 0.05, 0., 5., 1.2], [-2.57, 3.2, 1.2, -1.75, 0., 36., 1.2], [-2.6, 4., 1.5, 1., 0., 12., 1.1], [-2.2, 2.3, 0.55, -0.90, 0., 3., 1.3], [-2.205, 6.01, 13.5814, -0.2044, 0.011, 5., 0.8], [-2.7, 8.7, 13.86, -0.13, -0.18, 18., 0.8], [-2.52, 8.75, 12., 0.04, 0.18, 5., 0.8], [2.38, -4.18, 19.99, -0.69, 0.095, 17., 1.], [2.33, -8.22, -6.07, -0.52, 0.16, 4., 0.8], [-1.62, 2.049, 1.422, 1.96, 0.56, 6., 1.], [-1.89, 9.62, 1.95, 0.51, 0.21, 3., 0.6], [-1.65, 9.99, 1.57, 1.46, -0.55, 3., 0.8], [-2.7, 5., 1.5, 1., 0., 6., 1.], [-2.08, 1., -0.1, 0.167, 0., 7., 1.3], [1.56, -1., 0.1, -0.82, 0.12, 3., 1.6], [-1.806, 1.806, 0., 1., 0., 5., 1.1], [1.56, -1., 0.1, -0.82, 0., 3., 1.3], [-2.195, 10., -12., 1., 0., 3., 0.7], [-1.86, 2., 0., 1., 0.1, 4., 1.2], [-2.34, 2., 0.2, 0.1, 0., 5., 1.2], [2.6, -2., 0., 0.5, 0., 5., 1.3], [-2.5, 5., -1.9, 1., 0.188, 5., 1.], [2.409, -2.5, 0., 0.9, 0., 23., 1.2], [2.409, -2.5, -0.2, 0.81, 0., 24., 1.2], [-2.05, 3., -16.79, 1., 0., 9., 1.], [-2.32, 2.32, 0., 0.75, 0., 5., 1.2], [2.5, -2.5, 0., 0.9, 0., 3., 1.3], [1.5, -1., 0.1, -0.805, 0., 3., 1.4], ]; const MAX_XY : f32 = 1e5; const DEFAULT_SPEED : u32 = 100; const MAX_COLORS : u32 = 2111; const COLOR_SPEED : u32 = 3071; pub struct SymmetricIcons { lambda : f32, alpha : f32, beta : f32, gamma : f32, omega : f32, symmetry : u32, scale : f32, w : usize, h : usize, color_set : u32, iter : u32, speed : u32, apcx : f32, apcy : f32, rad : f32, color_list: Vec<u32>, icon : Array2D<u32>, image : Array2D<u32>, x : f32, y : f32, k : u32, } impl SymmetricIcons { pub fn new(w : usize, h : usize, color_set : u32) -> Self { let mut s = Self { lambda : 0.0, alpha : 0.0, beta : 0.0, gamma : 0.0, omega : 0.0, symmetry : 0, scale : 0.0, w : w, h : h, color_set : color_set, iter : 0, speed : DEFAULT_SPEED, apcx : 0.0, apcy : 0.0, rad : 0.0, color_list : vec![], icon : Array2D::filled_with(0_u32, w, h), image : Array2D::filled_with(0_u32, w, h), x : 0.0, y : 0.0, k : 0, }; s.set_preset(0); s } pub fn set_size(&mut self, w : usize, h : usize) { self.w = w; self.h = h; self.image = Array2D::filled_with(0_u32, w, h); self.icon = Array2D::filled_with(0_u32, w, h); self.iter = 0; self.color_list = vec![]; self.reset(); } pub fn set_preset(&mut self, i : usize) { let p = PRESETS[i % PRESETS.len()]; self.lambda = p[0]; self.alpha = p[1]; self.beta = p[2]; self.gamma = p[3]; self.omega = p[4]; self.symmetry = p[5] as u32; self.scale = if p[6] == 0. {1.} else {p[6]}; self.reset(); } pub fn set_parameters(&mut self, lambda : f32, alpha: f32, beta : f32, gamma : f32, omega : f32, symmetry : f32, scale : f32) { self.lambda = lambda; self.alpha = alpha; self.beta = beta; self.gamma = gamma; self.omega = omega; self.symmetry = if symmetry < 1. { 1 } else { symmetry as u32 }; self.scale = if scale == 0. {1.} else { scale }; self.reset(); } fn make_color(r : u32, g : u32, b : u32) -> u32 { (b << 16) | (g << 8) | r | 0xff00_0000 } fn make_colora(a : u32, r : u32, g : u32, b : u32) -> u32 { (a << 24) | (b << 16) | (g << 8) | r } fn get_rainbow(x : u32, y : u32) -> u32 { match x { 0 => Self::make_color(0, y, 255), 1 => Self::make_color(0, 255, 255 - y), 2 => Self::make_color(y, 255, 0), 3 => Self::make_color(255, 255 - y, 0), 4 => Self::make_color(255, 0, y), 5 => Self::make_color(255 - y, 0, 255), _ => Self::make_color(0,0,0), // black } } fn set_colors(&mut self, param_int : u32) { let mut colors = vec![0_u32; (MAX_COLORS+1) as usize]; match param_int { 0 => { for i in 0..64 { colors[i] = Self::make_color(0, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 1 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(i, i, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 2 => { for i in 0..64 { colors[i] = Self::make_color(0, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, 255); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 3 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 0) } for i in 0..256 { let local_color = Self::make_color(i, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 4 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 0) } for i in 0..256 { let local_color = Self::make_color(255, 255, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 5 => { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 0, 4 * i as u32) } for i in 0..256 { let local_color = Self::make_color(255, i, i); for j in 0..3 { colors[(1344 + j + 3 * i) as usize] = local_color } } } 6 => for i in 0..256 { colors[(i + 64)] = Self::make_colora(255, 255 - i as u32, 255 - i as u32, 255) }, 7 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255) }, 8 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255 - i as u32, 255, 255 - i as u32) }, 9 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255, 255 - i as u32) }, 10 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255 - i as u32)} , 11 => for i in 0..256 { colors[(i + 64)] = Self::make_color(255, 255 - i as u32, 255)}, _ => () } if param_int > 5 { for i in 0..64 { colors[i] = Self::make_color(4 * i as u32, 4 * i as u32, 4 * i as u32) } for j in 0..5 { for i in 0..256 { colors[(320 + j * 256 + i)] = Self::get_rainbow((param_int + j as u32) % 6, i as u32) } } for i in 0..256 { let local_color = Self::get_rainbow((param_int - 1) % 6, i); colors[(1600 + 2 * i as usize)] = local_color; colors[(1601 + 2 * i as usize)] = local_color; } } else { // <= 5 for j in 0..5 { for i in 0..256 { colors[64 + j * 256 + i] = Self::get_rainbow((param_int + j as u32) % 6, i as u32); } } } self.color_list = colors } fn
(&mut self) { self.speed = DEFAULT_SPEED; self.apcx = self.w as f32 / 2.; self.apcy = self.h as f32 / 2.; self.rad = if self.apcx > self.apcy {self.apcy} else {self.apcx}; self.k = 0; self.x = 0.01; self.y = 0.003; self.iter = 0; self.icon = Array2D::filled_with(0_u32, self.w, self.h); self.image = Array2D::filled_with(0_u32, self.w, self.h); self.set_colors(self.color_set); for m in 0..self.w { for n in 0..self.h { let color = self.get_color(self.icon[(m, n)]); self.set_point_color(m, n, color); } } } fn set_point_color(&mut self, x : usize, y : usize, color : u32) { self.image[(x, y)] = color; } fn get_color(&mut self, col : u32) -> u32 { let col = col & 0x00ffffff; if col * self.speed > MAX_COLORS { while (col * self.speed > COLOR_SPEED) && (self.speed > 3) { self.speed-=1 } self.color_list[MAX_COLORS as usize] } else { self.color_list[(col * self.speed) as usize] } } fn set_point(&mut self, x : usize, y : usize) { let icon = self.icon[(x,y)]; let color = self.get_color(icon); self.image[(x,y)] = color; self.icon[(x,y)] += 1; if icon >= 12288 { self.icon[(x,y)] = 8192 } } pub fn generate(&mut self, mod_disp : u32) -> bool { // geenrate icon, runs in a thread in 'start' self.iter+=1; if self.x.abs() > MAX_XY || self.y.abs() > MAX_XY { self.reset(); // prevent overflow } // generate new x,y let sq = self.x * self.x + self.y * self.y; // sq=x^2+y^2 let mut tx = self.x; let mut ty = self.y; // tx=pow, ty=pow for _m in 1..self.symmetry - 2 + 1 { let sqx = tx * self.x - ty * self.y; let sqy = ty * self.x + tx * self.y; tx = sqx; ty = sqy; } let sqx = self.x * tx - self.y * ty; let tmp = self.lambda + self.alpha * sq + self.beta * sqx; let x_new = tmp * self.x +self.gamma * tx - self.omega * self.y; let y_new = tmp * self.y - self.gamma * ty + self.omega * self.x; self.x = x_new; self.y = y_new; if self.k > 50 { self.set_point((self.apcx + self.x * self.rad / self.scale) as usize, (self.apcy + self.y * self.rad / self.scale) as usize); } else { self.k += 1; } self.iter % mod_disp == 0 } pub fn build(&mut self, preset : usize, n_iters : usize) -> (&[u8], (usize, usize)) { self.set_preset(preset); for _ in 0..n_iters { self.generate(1); } ( self.get_image(), self.get_size() ) } pub fn get_size(&self) -> (usize, usize) { ( self.w, self.h ) } pub fn write(&self, name : &str) { // showbinimage.py 800 800 symm_icon.bin use std::fs::File; use std::io::prelude::*; File::create(name).expect("create failed") .write_all(self.get_image()).expect("write failed"); } pub fn get_image(&self) -> &[u8] { // convert Vec<u32> to [u8] let v = self.image.as_row_major(); unsafe { std::slice::from_raw_parts( v.as_ptr() as *const u8, v.len() * std::mem::size_of::<u32>(), ) } } } pub fn _test_symmetric_icon() { let n = 2048; let mut symicn = SymmetricIcons::new(n, n, 0); symicn.set_preset(9); for _i in 0..900_000 { symicn.generate(5000); } symicn.write("symm_icon.bin"); use std::process::Command; let n = &n.to_string()[..]; Command::new("/usr/local/bin/showbinimage.py") .args(&[n, n, "symm_icon.bin"]) .output().expect("can't execute command"); } pub fn _test_array2d() { // Array2D is faster 1.67 vs 2.14 (1.3 times faster) use std::time::Instant; const N : usize = 100_000_000; const SZ : usize = 1200; let v = vec![0_usize; SZ*SZ]; let a = Array2D::filled_with(0_usize, SZ, SZ); let t = Instant::now(); for _ in 0..N { for i in 0..SZ { for j in 0..SZ { let crd = i*SZ+j; let x = v[crd]; let _xx = x+1; } } } println!("lap vec : {:?}", Instant::now()-t); let t = Instant::now(); for _ in 0..N { for r in 0..SZ { for c in 0..SZ { let x = a[(r,c)]; let _xx = x+1; } } } println!("lap array2d : {:?}", Instant::now()-t); }
reset
identifier_name
component.rs
use crate::code::CodeObject; use crate::signatures::SignatureCollection; use crate::{Engine, Module, ResourcesRequired}; use anyhow::{bail, Context, Result}; use serde::{Deserialize, Serialize}; use std::fs; use std::mem; use std::path::Path; use std::ptr::NonNull; use std::sync::Arc; use wasmtime_environ::component::{ AllCallFunc, ComponentTypes, GlobalInitializer, InstantiateModule, StaticModuleIndex, TrampolineIndex, Translator, VMComponentOffsets, }; use wasmtime_environ::{FunctionLoc, HostPtr, ObjectKind, PrimaryMap, ScopeVec}; use wasmtime_jit::{CodeMemory, CompiledModuleInfo}; use wasmtime_runtime::component::ComponentRuntimeInfo; use wasmtime_runtime::{ MmapVec, VMArrayCallFunction, VMFuncRef, VMFunctionBody, VMNativeCallFunction, VMWasmCallFunction, }; /// A compiled WebAssembly Component. // // FIXME: need to write more docs here. #[derive(Clone)] pub struct Component { inner: Arc<ComponentInner>, } struct ComponentInner { /// Core wasm modules that the component defined internally, indexed by the /// compile-time-assigned `ModuleUpvarIndex`. static_modules: PrimaryMap<StaticModuleIndex, Module>, /// Code-related information such as the compiled artifact, type /// information, etc. /// /// Note that the `Arc` here is used to share this allocation with internal /// modules. code: Arc<CodeObject>, /// Metadata produced during compilation. info: CompiledComponentInfo, } #[derive(Serialize, Deserialize)] struct CompiledComponentInfo { /// Type information calculated during translation about this component. component: wasmtime_environ::component::Component, /// Where lowered function trampolines are located within the `text` /// section of `code_memory`. /// /// These are the /// /// 1. Wasm-call, /// 2. array-call, and /// 3. native-call /// /// function pointers that end up in a `VMFuncRef` for each /// lowering. trampolines: PrimaryMap<TrampolineIndex, AllCallFunc<FunctionLoc>>, /// The location of the wasm-to-native trampoline for the `resource.drop` /// intrinsic. resource_drop_wasm_to_native_trampoline: Option<FunctionLoc>, } pub(crate) struct AllCallFuncPointers { pub wasm_call: NonNull<VMWasmCallFunction>, pub array_call: VMArrayCallFunction, pub native_call: NonNull<VMNativeCallFunction>, } #[derive(Serialize, Deserialize)] pub(crate) struct ComponentArtifacts { info: CompiledComponentInfo, types: ComponentTypes, static_modules: PrimaryMap<StaticModuleIndex, CompiledModuleInfo>, } impl Component { /// Compiles a new WebAssembly component from the in-memory wasm image /// provided. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn new(engine: &Engine, bytes: impl AsRef<[u8]>) -> Result<Component> { let bytes = bytes.as_ref(); #[cfg(feature = "wat")] let bytes = wat::parse_bytes(bytes)?; Component::from_binary(engine, &bytes) } /// Compiles a new WebAssembly component from a wasm file on disk pointed to /// by `file`. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn from_file(engine: &Engine, file: impl AsRef<Path>) -> Result<Component> { match Self::new( engine, &fs::read(&file).with_context(|| "failed to read input file")?, ) { Ok(m) => Ok(m), Err(e) => { cfg_if::cfg_if! { if #[cfg(feature = "wat")] { let mut e = e.downcast::<wat::Error>()?; e.set_path(file); bail!(e) } else { Err(e) } } } } } /// Compiles a new WebAssembly component from the in-memory wasm image /// provided. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn from_binary(engine: &Engine, binary: &[u8]) -> Result<Component> { engine .check_compatible_with_native_host() .context("compilation settings are not compatible with the native host")?; let (mmap, artifacts) = Component::build_artifacts(engine, binary)?; let mut code_memory = CodeMemory::new(mmap)?; code_memory.publish()?; Component::from_parts(engine, Arc::new(code_memory), Some(artifacts)) } /// Same as [`Module::deserialize`], but for components. /// /// Note that the file referenced here must contain contents previously /// produced by [`Engine::precompile_component`] or /// [`Component::serialize`]. /// /// For more information see the [`Module::deserialize`] method. /// /// [`Module::deserialize`]: crate::Module::deserialize pub unsafe fn deserialize(engine: &Engine, bytes: impl AsRef<[u8]>) -> Result<Component> { let code = engine.load_code_bytes(bytes.as_ref(), ObjectKind::Component)?; Component::from_parts(engine, code, None) } /// Same as [`Module::deserialize_file`], but for components. /// /// For more information see the [`Component::deserialize`] and /// [`Module::deserialize_file`] methods. /// /// [`Module::deserialize_file`]: crate::Module::deserialize_file pub unsafe fn
(engine: &Engine, path: impl AsRef<Path>) -> Result<Component> { let code = engine.load_code_file(path.as_ref(), ObjectKind::Component)?; Component::from_parts(engine, code, None) } /// Performs the compilation phase for a component, translating and /// validating the provided wasm binary to machine code. /// /// This method will compile all nested core wasm binaries in addition to /// any necessary extra functions required for operation with components. /// The output artifact here is the serialized object file contained within /// an owned mmap along with metadata about the compilation itself. #[cfg(any(feature = "cranelift", feature = "winch"))] pub(crate) fn build_artifacts( engine: &Engine, binary: &[u8], ) -> Result<(MmapVec, ComponentArtifacts)> { use crate::compiler::CompileInputs; let tunables = &engine.config().tunables; let compiler = engine.compiler(); let scope = ScopeVec::new(); let mut validator = wasmparser::Validator::new_with_features(engine.config().features.clone()); let mut types = Default::default(); let (component, mut module_translations) = Translator::new(tunables, &mut validator, &mut types, &scope) .translate(binary) .context("failed to parse WebAssembly module")?; let types = types.finish(); let compile_inputs = CompileInputs::for_component( &types, &component, module_translations.iter_mut().map(|(i, translation)| { let functions = mem::take(&mut translation.function_body_inputs); (i, &*translation, functions) }), ); let unlinked_compile_outputs = compile_inputs.compile(&engine)?; let (compiled_funcs, function_indices) = unlinked_compile_outputs.pre_link(); let mut object = compiler.object(ObjectKind::Component)?; engine.append_compiler_info(&mut object); engine.append_bti(&mut object); let (mut object, compilation_artifacts) = function_indices.link_and_append_code( object, &engine.config().tunables, compiler, compiled_funcs, module_translations, )?; let info = CompiledComponentInfo { component: component.component, trampolines: compilation_artifacts.trampolines, resource_drop_wasm_to_native_trampoline: compilation_artifacts .resource_drop_wasm_to_native_trampoline, }; let artifacts = ComponentArtifacts { info, types, static_modules: compilation_artifacts.modules, }; object.serialize_info(&artifacts); let mmap = object.finish()?; Ok((mmap, artifacts)) } /// Final assembly step for a component from its in-memory representation. /// /// If the `artifacts` are specified as `None` here then they will be /// deserialized from `code_memory`. fn from_parts( engine: &Engine, code_memory: Arc<CodeMemory>, artifacts: Option<ComponentArtifacts>, ) -> Result<Component> { let ComponentArtifacts { info, types, static_modules, } = match artifacts { Some(artifacts) => artifacts, None => bincode::deserialize(code_memory.wasmtime_info())?, }; // Validate that the component can be used with the current instance // allocator. engine.allocator().validate_component( &info.component, &VMComponentOffsets::new(HostPtr, &info.component), &|module_index| &static_modules[module_index].module, )?; // Create a signature registration with the `Engine` for all trampolines // and core wasm types found within this component, both for the // component and for all included core wasm modules. let signatures = SignatureCollection::new_for_module(engine.signatures(), types.module_types()); // Assemble the `CodeObject` artifact which is shared by all core wasm // modules as well as the final component. let types = Arc::new(types); let code = Arc::new(CodeObject::new(code_memory, signatures, types.into())); // Convert all information about static core wasm modules into actual // `Module` instances by converting each `CompiledModuleInfo`, the // `types` type information, and the code memory to a runtime object. let static_modules = static_modules .into_iter() .map(|(_, info)| Module::from_parts_raw(engine, code.clone(), info, false)) .collect::<Result<_>>()?; Ok(Component { inner: Arc::new(ComponentInner { static_modules, code, info, }), }) } pub(crate) fn env_component(&self) -> &wasmtime_environ::component::Component { &self.inner.info.component } pub(crate) fn static_module(&self, idx: StaticModuleIndex) -> &Module { &self.inner.static_modules[idx] } pub(crate) fn types(&self) -> &Arc<ComponentTypes> { self.inner.component_types() } pub(crate) fn signatures(&self) -> &SignatureCollection { self.inner.code.signatures() } pub(crate) fn text(&self) -> &[u8] { self.inner.code.code_memory().text() } pub(crate) fn trampoline_ptrs(&self, index: TrampolineIndex) -> AllCallFuncPointers { let AllCallFunc { wasm_call, array_call, native_call, } = &self.inner.info.trampolines[index]; AllCallFuncPointers { wasm_call: self.func(wasm_call).cast(), array_call: unsafe { mem::transmute::<NonNull<VMFunctionBody>, VMArrayCallFunction>( self.func(array_call), ) }, native_call: self.func(native_call).cast(), } } fn func(&self, loc: &FunctionLoc) -> NonNull<VMFunctionBody> { let text = self.text(); let trampoline = &text[loc.start as usize..][..loc.length as usize]; NonNull::new(trampoline.as_ptr() as *mut VMFunctionBody).unwrap() } pub(crate) fn code_object(&self) -> &Arc<CodeObject> { &self.inner.code } /// Same as [`Module::serialize`], except for a component. /// /// Note that the artifact produced here must be passed to /// [`Component::deserialize`] and is not compatible for use with /// [`Module`]. /// /// [`Module::serialize`]: crate::Module::serialize /// [`Module`]: crate::Module pub fn serialize(&self) -> Result<Vec<u8>> { Ok(self.code_object().code_memory().mmap().to_vec()) } pub(crate) fn runtime_info(&self) -> Arc<dyn ComponentRuntimeInfo> { self.inner.clone() } /// Creates a new `VMFuncRef` with all fields filled out for the destructor /// specified. /// /// The `dtor`'s own `VMFuncRef` won't have `wasm_call` filled out but this /// component may have `resource_drop_wasm_to_native_trampoline` filled out /// if necessary in which case it's filled in here. pub(crate) fn resource_drop_func_ref(&self, dtor: &crate::func::HostFunc) -> VMFuncRef { // Host functions never have their `wasm_call` filled in at this time. assert!(dtor.func_ref().wasm_call.is_none()); // Note that if `resource_drop_wasm_to_native_trampoline` is not present // then this can't be called by the component, so it's ok to leave it // blank. let wasm_call = self .inner .info .resource_drop_wasm_to_native_trampoline .as_ref() .map(|i| self.func(i).cast()); VMFuncRef { wasm_call, ..*dtor.func_ref() } } /// Returns a summary of the resources required to instantiate this /// [`Component`][crate::component::Component]. /// /// Note that when a component imports and instantiates another component or /// core module, we cannot determine ahead of time how many resources /// instantiating this component will require, and therefore this method /// will return `None` in these scenarios. /// /// Potential uses of the returned information: /// /// * Determining whether your pooling allocator configuration supports /// instantiating this component. /// /// * Deciding how many of which `Component` you want to instantiate within /// a fixed amount of resources, e.g. determining whether to create 5 /// instances of component X or 10 instances of component Y. /// /// # Example /// /// ``` /// # fn main() -> wasmtime::Result<()> { /// use wasmtime::{Config, Engine, component::Component}; /// /// let mut config = Config::new(); /// config.wasm_multi_memory(true); /// config.wasm_component_model(true); /// let engine = Engine::new(&config)?; /// /// let component = Component::new(&engine, &r#" /// (component /// ;; Define a core module that uses two memories. /// (core module $m /// (memory 1) /// (memory 6) /// ) /// /// ;; Instantiate that core module three times. /// (core instance $i1 (instantiate (module $m))) /// (core instance $i2 (instantiate (module $m))) /// (core instance $i3 (instantiate (module $m))) /// ) /// "#)?; /// /// let resources = component.resources_required() /// .expect("this component does not import any core modules or instances"); /// /// // Instantiating the component will require allocating two memories per /// // core instance, and there are three instances, so six total memories. /// assert_eq!(resources.num_memories, 6); /// assert_eq!(resources.max_initial_memory_size, Some(6)); /// /// // The component doesn't need any tables. /// assert_eq!(resources.num_tables, 0); /// assert_eq!(resources.max_initial_table_size, None); /// # Ok(()) } /// ``` pub fn resources_required(&self) -> Option<ResourcesRequired> { let mut resources = ResourcesRequired { num_memories: 0, max_initial_memory_size: None, num_tables: 0, max_initial_table_size: None, }; for init in &self.env_component().initializers { match init { GlobalInitializer::InstantiateModule(inst) => match inst { InstantiateModule::Static(index, _) => { let module = self.static_module(*index); resources.add(&module.resources_required()); } InstantiateModule::Import(_, _) => { // We can't statically determine the resources required // to instantiate this component. return None; } }, GlobalInitializer::LowerImport { .. } | GlobalInitializer::ExtractMemory(_) | GlobalInitializer::ExtractRealloc(_) | GlobalInitializer::ExtractPostReturn(_) | GlobalInitializer::Resource(_) => {} } } Some(resources) } } impl ComponentRuntimeInfo for ComponentInner { fn component(&self) -> &wasmtime_environ::component::Component { &self.info.component } fn component_types(&self) -> &Arc<ComponentTypes> { match self.code.types() { crate::code::Types::Component(types) => types, // The only creator of a `Component` is itself which uses the other // variant, so this shouldn't be possible. crate::code::Types::Module(_) => unreachable!(), } } }
deserialize_file
identifier_name
component.rs
use crate::code::CodeObject; use crate::signatures::SignatureCollection; use crate::{Engine, Module, ResourcesRequired}; use anyhow::{bail, Context, Result}; use serde::{Deserialize, Serialize}; use std::fs; use std::mem; use std::path::Path; use std::ptr::NonNull; use std::sync::Arc; use wasmtime_environ::component::{ AllCallFunc, ComponentTypes, GlobalInitializer, InstantiateModule, StaticModuleIndex, TrampolineIndex, Translator, VMComponentOffsets, }; use wasmtime_environ::{FunctionLoc, HostPtr, ObjectKind, PrimaryMap, ScopeVec}; use wasmtime_jit::{CodeMemory, CompiledModuleInfo}; use wasmtime_runtime::component::ComponentRuntimeInfo; use wasmtime_runtime::{ MmapVec, VMArrayCallFunction, VMFuncRef, VMFunctionBody, VMNativeCallFunction, VMWasmCallFunction, }; /// A compiled WebAssembly Component. // // FIXME: need to write more docs here. #[derive(Clone)] pub struct Component { inner: Arc<ComponentInner>, } struct ComponentInner { /// Core wasm modules that the component defined internally, indexed by the /// compile-time-assigned `ModuleUpvarIndex`. static_modules: PrimaryMap<StaticModuleIndex, Module>, /// Code-related information such as the compiled artifact, type /// information, etc. /// /// Note that the `Arc` here is used to share this allocation with internal /// modules. code: Arc<CodeObject>, /// Metadata produced during compilation. info: CompiledComponentInfo, } #[derive(Serialize, Deserialize)] struct CompiledComponentInfo { /// Type information calculated during translation about this component. component: wasmtime_environ::component::Component, /// Where lowered function trampolines are located within the `text` /// section of `code_memory`. /// /// These are the /// /// 1. Wasm-call, /// 2. array-call, and /// 3. native-call /// /// function pointers that end up in a `VMFuncRef` for each /// lowering. trampolines: PrimaryMap<TrampolineIndex, AllCallFunc<FunctionLoc>>, /// The location of the wasm-to-native trampoline for the `resource.drop` /// intrinsic. resource_drop_wasm_to_native_trampoline: Option<FunctionLoc>, } pub(crate) struct AllCallFuncPointers { pub wasm_call: NonNull<VMWasmCallFunction>, pub array_call: VMArrayCallFunction, pub native_call: NonNull<VMNativeCallFunction>, } #[derive(Serialize, Deserialize)] pub(crate) struct ComponentArtifacts { info: CompiledComponentInfo, types: ComponentTypes, static_modules: PrimaryMap<StaticModuleIndex, CompiledModuleInfo>, } impl Component { /// Compiles a new WebAssembly component from the in-memory wasm image /// provided. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn new(engine: &Engine, bytes: impl AsRef<[u8]>) -> Result<Component> { let bytes = bytes.as_ref(); #[cfg(feature = "wat")] let bytes = wat::parse_bytes(bytes)?; Component::from_binary(engine, &bytes) } /// Compiles a new WebAssembly component from a wasm file on disk pointed to /// by `file`. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn from_file(engine: &Engine, file: impl AsRef<Path>) -> Result<Component>
/// Compiles a new WebAssembly component from the in-memory wasm image /// provided. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn from_binary(engine: &Engine, binary: &[u8]) -> Result<Component> { engine .check_compatible_with_native_host() .context("compilation settings are not compatible with the native host")?; let (mmap, artifacts) = Component::build_artifacts(engine, binary)?; let mut code_memory = CodeMemory::new(mmap)?; code_memory.publish()?; Component::from_parts(engine, Arc::new(code_memory), Some(artifacts)) } /// Same as [`Module::deserialize`], but for components. /// /// Note that the file referenced here must contain contents previously /// produced by [`Engine::precompile_component`] or /// [`Component::serialize`]. /// /// For more information see the [`Module::deserialize`] method. /// /// [`Module::deserialize`]: crate::Module::deserialize pub unsafe fn deserialize(engine: &Engine, bytes: impl AsRef<[u8]>) -> Result<Component> { let code = engine.load_code_bytes(bytes.as_ref(), ObjectKind::Component)?; Component::from_parts(engine, code, None) } /// Same as [`Module::deserialize_file`], but for components. /// /// For more information see the [`Component::deserialize`] and /// [`Module::deserialize_file`] methods. /// /// [`Module::deserialize_file`]: crate::Module::deserialize_file pub unsafe fn deserialize_file(engine: &Engine, path: impl AsRef<Path>) -> Result<Component> { let code = engine.load_code_file(path.as_ref(), ObjectKind::Component)?; Component::from_parts(engine, code, None) } /// Performs the compilation phase for a component, translating and /// validating the provided wasm binary to machine code. /// /// This method will compile all nested core wasm binaries in addition to /// any necessary extra functions required for operation with components. /// The output artifact here is the serialized object file contained within /// an owned mmap along with metadata about the compilation itself. #[cfg(any(feature = "cranelift", feature = "winch"))] pub(crate) fn build_artifacts( engine: &Engine, binary: &[u8], ) -> Result<(MmapVec, ComponentArtifacts)> { use crate::compiler::CompileInputs; let tunables = &engine.config().tunables; let compiler = engine.compiler(); let scope = ScopeVec::new(); let mut validator = wasmparser::Validator::new_with_features(engine.config().features.clone()); let mut types = Default::default(); let (component, mut module_translations) = Translator::new(tunables, &mut validator, &mut types, &scope) .translate(binary) .context("failed to parse WebAssembly module")?; let types = types.finish(); let compile_inputs = CompileInputs::for_component( &types, &component, module_translations.iter_mut().map(|(i, translation)| { let functions = mem::take(&mut translation.function_body_inputs); (i, &*translation, functions) }), ); let unlinked_compile_outputs = compile_inputs.compile(&engine)?; let (compiled_funcs, function_indices) = unlinked_compile_outputs.pre_link(); let mut object = compiler.object(ObjectKind::Component)?; engine.append_compiler_info(&mut object); engine.append_bti(&mut object); let (mut object, compilation_artifacts) = function_indices.link_and_append_code( object, &engine.config().tunables, compiler, compiled_funcs, module_translations, )?; let info = CompiledComponentInfo { component: component.component, trampolines: compilation_artifacts.trampolines, resource_drop_wasm_to_native_trampoline: compilation_artifacts .resource_drop_wasm_to_native_trampoline, }; let artifacts = ComponentArtifacts { info, types, static_modules: compilation_artifacts.modules, }; object.serialize_info(&artifacts); let mmap = object.finish()?; Ok((mmap, artifacts)) } /// Final assembly step for a component from its in-memory representation. /// /// If the `artifacts` are specified as `None` here then they will be /// deserialized from `code_memory`. fn from_parts( engine: &Engine, code_memory: Arc<CodeMemory>, artifacts: Option<ComponentArtifacts>, ) -> Result<Component> { let ComponentArtifacts { info, types, static_modules, } = match artifacts { Some(artifacts) => artifacts, None => bincode::deserialize(code_memory.wasmtime_info())?, }; // Validate that the component can be used with the current instance // allocator. engine.allocator().validate_component( &info.component, &VMComponentOffsets::new(HostPtr, &info.component), &|module_index| &static_modules[module_index].module, )?; // Create a signature registration with the `Engine` for all trampolines // and core wasm types found within this component, both for the // component and for all included core wasm modules. let signatures = SignatureCollection::new_for_module(engine.signatures(), types.module_types()); // Assemble the `CodeObject` artifact which is shared by all core wasm // modules as well as the final component. let types = Arc::new(types); let code = Arc::new(CodeObject::new(code_memory, signatures, types.into())); // Convert all information about static core wasm modules into actual // `Module` instances by converting each `CompiledModuleInfo`, the // `types` type information, and the code memory to a runtime object. let static_modules = static_modules .into_iter() .map(|(_, info)| Module::from_parts_raw(engine, code.clone(), info, false)) .collect::<Result<_>>()?; Ok(Component { inner: Arc::new(ComponentInner { static_modules, code, info, }), }) } pub(crate) fn env_component(&self) -> &wasmtime_environ::component::Component { &self.inner.info.component } pub(crate) fn static_module(&self, idx: StaticModuleIndex) -> &Module { &self.inner.static_modules[idx] } pub(crate) fn types(&self) -> &Arc<ComponentTypes> { self.inner.component_types() } pub(crate) fn signatures(&self) -> &SignatureCollection { self.inner.code.signatures() } pub(crate) fn text(&self) -> &[u8] { self.inner.code.code_memory().text() } pub(crate) fn trampoline_ptrs(&self, index: TrampolineIndex) -> AllCallFuncPointers { let AllCallFunc { wasm_call, array_call, native_call, } = &self.inner.info.trampolines[index]; AllCallFuncPointers { wasm_call: self.func(wasm_call).cast(), array_call: unsafe { mem::transmute::<NonNull<VMFunctionBody>, VMArrayCallFunction>( self.func(array_call), ) }, native_call: self.func(native_call).cast(), } } fn func(&self, loc: &FunctionLoc) -> NonNull<VMFunctionBody> { let text = self.text(); let trampoline = &text[loc.start as usize..][..loc.length as usize]; NonNull::new(trampoline.as_ptr() as *mut VMFunctionBody).unwrap() } pub(crate) fn code_object(&self) -> &Arc<CodeObject> { &self.inner.code } /// Same as [`Module::serialize`], except for a component. /// /// Note that the artifact produced here must be passed to /// [`Component::deserialize`] and is not compatible for use with /// [`Module`]. /// /// [`Module::serialize`]: crate::Module::serialize /// [`Module`]: crate::Module pub fn serialize(&self) -> Result<Vec<u8>> { Ok(self.code_object().code_memory().mmap().to_vec()) } pub(crate) fn runtime_info(&self) -> Arc<dyn ComponentRuntimeInfo> { self.inner.clone() } /// Creates a new `VMFuncRef` with all fields filled out for the destructor /// specified. /// /// The `dtor`'s own `VMFuncRef` won't have `wasm_call` filled out but this /// component may have `resource_drop_wasm_to_native_trampoline` filled out /// if necessary in which case it's filled in here. pub(crate) fn resource_drop_func_ref(&self, dtor: &crate::func::HostFunc) -> VMFuncRef { // Host functions never have their `wasm_call` filled in at this time. assert!(dtor.func_ref().wasm_call.is_none()); // Note that if `resource_drop_wasm_to_native_trampoline` is not present // then this can't be called by the component, so it's ok to leave it // blank. let wasm_call = self .inner .info .resource_drop_wasm_to_native_trampoline .as_ref() .map(|i| self.func(i).cast()); VMFuncRef { wasm_call, ..*dtor.func_ref() } } /// Returns a summary of the resources required to instantiate this /// [`Component`][crate::component::Component]. /// /// Note that when a component imports and instantiates another component or /// core module, we cannot determine ahead of time how many resources /// instantiating this component will require, and therefore this method /// will return `None` in these scenarios. /// /// Potential uses of the returned information: /// /// * Determining whether your pooling allocator configuration supports /// instantiating this component. /// /// * Deciding how many of which `Component` you want to instantiate within /// a fixed amount of resources, e.g. determining whether to create 5 /// instances of component X or 10 instances of component Y. /// /// # Example /// /// ``` /// # fn main() -> wasmtime::Result<()> { /// use wasmtime::{Config, Engine, component::Component}; /// /// let mut config = Config::new(); /// config.wasm_multi_memory(true); /// config.wasm_component_model(true); /// let engine = Engine::new(&config)?; /// /// let component = Component::new(&engine, &r#" /// (component /// ;; Define a core module that uses two memories. /// (core module $m /// (memory 1) /// (memory 6) /// ) /// /// ;; Instantiate that core module three times. /// (core instance $i1 (instantiate (module $m))) /// (core instance $i2 (instantiate (module $m))) /// (core instance $i3 (instantiate (module $m))) /// ) /// "#)?; /// /// let resources = component.resources_required() /// .expect("this component does not import any core modules or instances"); /// /// // Instantiating the component will require allocating two memories per /// // core instance, and there are three instances, so six total memories. /// assert_eq!(resources.num_memories, 6); /// assert_eq!(resources.max_initial_memory_size, Some(6)); /// /// // The component doesn't need any tables. /// assert_eq!(resources.num_tables, 0); /// assert_eq!(resources.max_initial_table_size, None); /// # Ok(()) } /// ``` pub fn resources_required(&self) -> Option<ResourcesRequired> { let mut resources = ResourcesRequired { num_memories: 0, max_initial_memory_size: None, num_tables: 0, max_initial_table_size: None, }; for init in &self.env_component().initializers { match init { GlobalInitializer::InstantiateModule(inst) => match inst { InstantiateModule::Static(index, _) => { let module = self.static_module(*index); resources.add(&module.resources_required()); } InstantiateModule::Import(_, _) => { // We can't statically determine the resources required // to instantiate this component. return None; } }, GlobalInitializer::LowerImport { .. } | GlobalInitializer::ExtractMemory(_) | GlobalInitializer::ExtractRealloc(_) | GlobalInitializer::ExtractPostReturn(_) | GlobalInitializer::Resource(_) => {} } } Some(resources) } } impl ComponentRuntimeInfo for ComponentInner { fn component(&self) -> &wasmtime_environ::component::Component { &self.info.component } fn component_types(&self) -> &Arc<ComponentTypes> { match self.code.types() { crate::code::Types::Component(types) => types, // The only creator of a `Component` is itself which uses the other // variant, so this shouldn't be possible. crate::code::Types::Module(_) => unreachable!(), } } }
{ match Self::new( engine, &fs::read(&file).with_context(|| "failed to read input file")?, ) { Ok(m) => Ok(m), Err(e) => { cfg_if::cfg_if! { if #[cfg(feature = "wat")] { let mut e = e.downcast::<wat::Error>()?; e.set_path(file); bail!(e) } else { Err(e) } } } } }
identifier_body
component.rs
use crate::code::CodeObject; use crate::signatures::SignatureCollection; use crate::{Engine, Module, ResourcesRequired}; use anyhow::{bail, Context, Result}; use serde::{Deserialize, Serialize}; use std::fs; use std::mem; use std::path::Path; use std::ptr::NonNull; use std::sync::Arc; use wasmtime_environ::component::{ AllCallFunc, ComponentTypes, GlobalInitializer, InstantiateModule, StaticModuleIndex, TrampolineIndex, Translator, VMComponentOffsets, }; use wasmtime_environ::{FunctionLoc, HostPtr, ObjectKind, PrimaryMap, ScopeVec}; use wasmtime_jit::{CodeMemory, CompiledModuleInfo}; use wasmtime_runtime::component::ComponentRuntimeInfo; use wasmtime_runtime::{ MmapVec, VMArrayCallFunction, VMFuncRef, VMFunctionBody, VMNativeCallFunction, VMWasmCallFunction, }; /// A compiled WebAssembly Component. // // FIXME: need to write more docs here. #[derive(Clone)] pub struct Component { inner: Arc<ComponentInner>, } struct ComponentInner { /// Core wasm modules that the component defined internally, indexed by the /// compile-time-assigned `ModuleUpvarIndex`. static_modules: PrimaryMap<StaticModuleIndex, Module>, /// Code-related information such as the compiled artifact, type /// information, etc. /// /// Note that the `Arc` here is used to share this allocation with internal /// modules. code: Arc<CodeObject>, /// Metadata produced during compilation. info: CompiledComponentInfo, } #[derive(Serialize, Deserialize)] struct CompiledComponentInfo { /// Type information calculated during translation about this component. component: wasmtime_environ::component::Component, /// Where lowered function trampolines are located within the `text` /// section of `code_memory`. /// /// These are the /// /// 1. Wasm-call, /// 2. array-call, and /// 3. native-call /// /// function pointers that end up in a `VMFuncRef` for each /// lowering. trampolines: PrimaryMap<TrampolineIndex, AllCallFunc<FunctionLoc>>, /// The location of the wasm-to-native trampoline for the `resource.drop` /// intrinsic. resource_drop_wasm_to_native_trampoline: Option<FunctionLoc>, } pub(crate) struct AllCallFuncPointers { pub wasm_call: NonNull<VMWasmCallFunction>, pub array_call: VMArrayCallFunction, pub native_call: NonNull<VMNativeCallFunction>, } #[derive(Serialize, Deserialize)] pub(crate) struct ComponentArtifacts { info: CompiledComponentInfo, types: ComponentTypes, static_modules: PrimaryMap<StaticModuleIndex, CompiledModuleInfo>, } impl Component { /// Compiles a new WebAssembly component from the in-memory wasm image /// provided. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn new(engine: &Engine, bytes: impl AsRef<[u8]>) -> Result<Component> { let bytes = bytes.as_ref(); #[cfg(feature = "wat")] let bytes = wat::parse_bytes(bytes)?; Component::from_binary(engine, &bytes) } /// Compiles a new WebAssembly component from a wasm file on disk pointed to /// by `file`. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn from_file(engine: &Engine, file: impl AsRef<Path>) -> Result<Component> { match Self::new( engine, &fs::read(&file).with_context(|| "failed to read input file")?, ) { Ok(m) => Ok(m), Err(e) => { cfg_if::cfg_if! { if #[cfg(feature = "wat")] { let mut e = e.downcast::<wat::Error>()?; e.set_path(file); bail!(e) } else { Err(e) } } } } } /// Compiles a new WebAssembly component from the in-memory wasm image /// provided. // // FIXME: need to write more docs here. #[cfg(any(feature = "cranelift", feature = "winch"))] #[cfg_attr(nightlydoc, doc(cfg(any(feature = "cranelift", feature = "winch"))))] pub fn from_binary(engine: &Engine, binary: &[u8]) -> Result<Component> { engine .check_compatible_with_native_host() .context("compilation settings are not compatible with the native host")?; let (mmap, artifacts) = Component::build_artifacts(engine, binary)?; let mut code_memory = CodeMemory::new(mmap)?; code_memory.publish()?; Component::from_parts(engine, Arc::new(code_memory), Some(artifacts)) } /// Same as [`Module::deserialize`], but for components. /// /// Note that the file referenced here must contain contents previously /// produced by [`Engine::precompile_component`] or /// [`Component::serialize`]. /// /// For more information see the [`Module::deserialize`] method. /// /// [`Module::deserialize`]: crate::Module::deserialize pub unsafe fn deserialize(engine: &Engine, bytes: impl AsRef<[u8]>) -> Result<Component> { let code = engine.load_code_bytes(bytes.as_ref(), ObjectKind::Component)?; Component::from_parts(engine, code, None) } /// Same as [`Module::deserialize_file`], but for components. /// /// For more information see the [`Component::deserialize`] and /// [`Module::deserialize_file`] methods. /// /// [`Module::deserialize_file`]: crate::Module::deserialize_file pub unsafe fn deserialize_file(engine: &Engine, path: impl AsRef<Path>) -> Result<Component> { let code = engine.load_code_file(path.as_ref(), ObjectKind::Component)?; Component::from_parts(engine, code, None) } /// Performs the compilation phase for a component, translating and /// validating the provided wasm binary to machine code. /// /// This method will compile all nested core wasm binaries in addition to /// any necessary extra functions required for operation with components. /// The output artifact here is the serialized object file contained within /// an owned mmap along with metadata about the compilation itself. #[cfg(any(feature = "cranelift", feature = "winch"))] pub(crate) fn build_artifacts( engine: &Engine, binary: &[u8], ) -> Result<(MmapVec, ComponentArtifacts)> { use crate::compiler::CompileInputs; let tunables = &engine.config().tunables; let compiler = engine.compiler(); let scope = ScopeVec::new(); let mut validator = wasmparser::Validator::new_with_features(engine.config().features.clone()); let mut types = Default::default(); let (component, mut module_translations) = Translator::new(tunables, &mut validator, &mut types, &scope) .translate(binary) .context("failed to parse WebAssembly module")?; let types = types.finish(); let compile_inputs = CompileInputs::for_component( &types, &component, module_translations.iter_mut().map(|(i, translation)| { let functions = mem::take(&mut translation.function_body_inputs); (i, &*translation, functions) }), ); let unlinked_compile_outputs = compile_inputs.compile(&engine)?; let (compiled_funcs, function_indices) = unlinked_compile_outputs.pre_link(); let mut object = compiler.object(ObjectKind::Component)?; engine.append_compiler_info(&mut object); engine.append_bti(&mut object); let (mut object, compilation_artifacts) = function_indices.link_and_append_code( object, &engine.config().tunables, compiler, compiled_funcs, module_translations, )?; let info = CompiledComponentInfo { component: component.component, trampolines: compilation_artifacts.trampolines, resource_drop_wasm_to_native_trampoline: compilation_artifacts .resource_drop_wasm_to_native_trampoline, }; let artifacts = ComponentArtifacts { info, types, static_modules: compilation_artifacts.modules, }; object.serialize_info(&artifacts); let mmap = object.finish()?; Ok((mmap, artifacts)) } /// Final assembly step for a component from its in-memory representation. /// /// If the `artifacts` are specified as `None` here then they will be /// deserialized from `code_memory`. fn from_parts( engine: &Engine, code_memory: Arc<CodeMemory>, artifacts: Option<ComponentArtifacts>, ) -> Result<Component> { let ComponentArtifacts { info, types, static_modules, } = match artifacts { Some(artifacts) => artifacts, None => bincode::deserialize(code_memory.wasmtime_info())?, }; // Validate that the component can be used with the current instance // allocator. engine.allocator().validate_component( &info.component, &VMComponentOffsets::new(HostPtr, &info.component), &|module_index| &static_modules[module_index].module, )?; // Create a signature registration with the `Engine` for all trampolines // and core wasm types found within this component, both for the // component and for all included core wasm modules. let signatures = SignatureCollection::new_for_module(engine.signatures(), types.module_types()); // Assemble the `CodeObject` artifact which is shared by all core wasm // modules as well as the final component. let types = Arc::new(types); let code = Arc::new(CodeObject::new(code_memory, signatures, types.into())); // Convert all information about static core wasm modules into actual // `Module` instances by converting each `CompiledModuleInfo`, the // `types` type information, and the code memory to a runtime object. let static_modules = static_modules .into_iter() .map(|(_, info)| Module::from_parts_raw(engine, code.clone(), info, false)) .collect::<Result<_>>()?; Ok(Component { inner: Arc::new(ComponentInner { static_modules, code, info, }), }) } pub(crate) fn env_component(&self) -> &wasmtime_environ::component::Component { &self.inner.info.component } pub(crate) fn static_module(&self, idx: StaticModuleIndex) -> &Module { &self.inner.static_modules[idx] } pub(crate) fn types(&self) -> &Arc<ComponentTypes> { self.inner.component_types() } pub(crate) fn signatures(&self) -> &SignatureCollection { self.inner.code.signatures() } pub(crate) fn text(&self) -> &[u8] { self.inner.code.code_memory().text() } pub(crate) fn trampoline_ptrs(&self, index: TrampolineIndex) -> AllCallFuncPointers { let AllCallFunc { wasm_call, array_call, native_call, } = &self.inner.info.trampolines[index]; AllCallFuncPointers { wasm_call: self.func(wasm_call).cast(), array_call: unsafe { mem::transmute::<NonNull<VMFunctionBody>, VMArrayCallFunction>( self.func(array_call), ) }, native_call: self.func(native_call).cast(), } } fn func(&self, loc: &FunctionLoc) -> NonNull<VMFunctionBody> { let text = self.text(); let trampoline = &text[loc.start as usize..][..loc.length as usize]; NonNull::new(trampoline.as_ptr() as *mut VMFunctionBody).unwrap() } pub(crate) fn code_object(&self) -> &Arc<CodeObject> { &self.inner.code } /// Same as [`Module::serialize`], except for a component. /// /// Note that the artifact produced here must be passed to /// [`Component::deserialize`] and is not compatible for use with /// [`Module`]. /// /// [`Module::serialize`]: crate::Module::serialize /// [`Module`]: crate::Module
} pub(crate) fn runtime_info(&self) -> Arc<dyn ComponentRuntimeInfo> { self.inner.clone() } /// Creates a new `VMFuncRef` with all fields filled out for the destructor /// specified. /// /// The `dtor`'s own `VMFuncRef` won't have `wasm_call` filled out but this /// component may have `resource_drop_wasm_to_native_trampoline` filled out /// if necessary in which case it's filled in here. pub(crate) fn resource_drop_func_ref(&self, dtor: &crate::func::HostFunc) -> VMFuncRef { // Host functions never have their `wasm_call` filled in at this time. assert!(dtor.func_ref().wasm_call.is_none()); // Note that if `resource_drop_wasm_to_native_trampoline` is not present // then this can't be called by the component, so it's ok to leave it // blank. let wasm_call = self .inner .info .resource_drop_wasm_to_native_trampoline .as_ref() .map(|i| self.func(i).cast()); VMFuncRef { wasm_call, ..*dtor.func_ref() } } /// Returns a summary of the resources required to instantiate this /// [`Component`][crate::component::Component]. /// /// Note that when a component imports and instantiates another component or /// core module, we cannot determine ahead of time how many resources /// instantiating this component will require, and therefore this method /// will return `None` in these scenarios. /// /// Potential uses of the returned information: /// /// * Determining whether your pooling allocator configuration supports /// instantiating this component. /// /// * Deciding how many of which `Component` you want to instantiate within /// a fixed amount of resources, e.g. determining whether to create 5 /// instances of component X or 10 instances of component Y. /// /// # Example /// /// ``` /// # fn main() -> wasmtime::Result<()> { /// use wasmtime::{Config, Engine, component::Component}; /// /// let mut config = Config::new(); /// config.wasm_multi_memory(true); /// config.wasm_component_model(true); /// let engine = Engine::new(&config)?; /// /// let component = Component::new(&engine, &r#" /// (component /// ;; Define a core module that uses two memories. /// (core module $m /// (memory 1) /// (memory 6) /// ) /// /// ;; Instantiate that core module three times. /// (core instance $i1 (instantiate (module $m))) /// (core instance $i2 (instantiate (module $m))) /// (core instance $i3 (instantiate (module $m))) /// ) /// "#)?; /// /// let resources = component.resources_required() /// .expect("this component does not import any core modules or instances"); /// /// // Instantiating the component will require allocating two memories per /// // core instance, and there are three instances, so six total memories. /// assert_eq!(resources.num_memories, 6); /// assert_eq!(resources.max_initial_memory_size, Some(6)); /// /// // The component doesn't need any tables. /// assert_eq!(resources.num_tables, 0); /// assert_eq!(resources.max_initial_table_size, None); /// # Ok(()) } /// ``` pub fn resources_required(&self) -> Option<ResourcesRequired> { let mut resources = ResourcesRequired { num_memories: 0, max_initial_memory_size: None, num_tables: 0, max_initial_table_size: None, }; for init in &self.env_component().initializers { match init { GlobalInitializer::InstantiateModule(inst) => match inst { InstantiateModule::Static(index, _) => { let module = self.static_module(*index); resources.add(&module.resources_required()); } InstantiateModule::Import(_, _) => { // We can't statically determine the resources required // to instantiate this component. return None; } }, GlobalInitializer::LowerImport { .. } | GlobalInitializer::ExtractMemory(_) | GlobalInitializer::ExtractRealloc(_) | GlobalInitializer::ExtractPostReturn(_) | GlobalInitializer::Resource(_) => {} } } Some(resources) } } impl ComponentRuntimeInfo for ComponentInner { fn component(&self) -> &wasmtime_environ::component::Component { &self.info.component } fn component_types(&self) -> &Arc<ComponentTypes> { match self.code.types() { crate::code::Types::Component(types) => types, // The only creator of a `Component` is itself which uses the other // variant, so this shouldn't be possible. crate::code::Types::Module(_) => unreachable!(), } } }
pub fn serialize(&self) -> Result<Vec<u8>> { Ok(self.code_object().code_memory().mmap().to_vec())
random_line_split
utils.ts
import $ from 'jquery' import 'bootstrap' import { Frame } from 'scenejs' import { createPopper, Placement } from '@popperjs/core' import domtoimage from 'dom-to-image' export default { /** * 把 selected作为一组,更新他们的grouped结构体 * @param selectedItems * @param isGrouped */ setSelectedItemAsGroup (designVm:any, selectedItems, isGrouped){ const grouped = {} for (const itemid in selectedItems) { const pageid = selectedItems[itemid].pageid const pageindex = designVm.$store.getters.findPageIndex(pageid) const shadow = designVm.$store.getters.getDesignItemValue(pageindex, itemid, 'shadow') grouped[itemid] = pageid if (shadow){ for (const pageid in shadow) { grouped[shadow[pageid]] = pageid } } } for (const itemid in grouped) { const pageid = grouped[itemid] designVm.$store.commit('setDesignItemValue', { pageid, itemid, props: { grouped: isGrouped ? grouped : undefined }, needSyncShadown: false }) } }, saveJwt (jwt) { window.sessionStorage.setItem('jwt', jwt) }, getJwt () { return window.sessionStorage.getItem('jwt') }, saveDesign (api, design, cb: any = null) { const jwt = this.getJwt() if (!design.pages || design.pages.length === 0) return // console.log(design) const files = {} let fileCount = 0 const promises: any = [] for (const pageindex in design.pages) { const page = design.pages[pageindex] const node = $(`#${page.id} .scaled-content`).get(0) if (!node) continue promises.push(domtoimage.toBlob(node)) } new Promise((resolve) => { if (promises.length === 0) { resolve() return } for (let pageindex = 0; pageindex < promises.length; pageindex++) { promises[pageindex].then(blob => { files[`preview_url[${pageindex}]`] = new File([blob], `preview-${pageindex}.png`) fileCount++ if (fileCount >= design.pages.length) { resolve() } }).catch(err => { console.error('domtoimage oops, something went wrong!', err) fileCount++ if (fileCount >= design.pages.length) { resolve() } }) } }).then((blobs) => { // console.log(files) this.post(api + 'design/save.json', { meta: JSON.stringify(design) }, files, (rst) => { if (cb) { cb(rst) } if (!rst || !rst.success) { this.toast('保存失败', rst.msg || '自动保存失败') } }, 'json') }) }, toast (title, msg) { const dialogId = this.uuid(8, 16, 'tst') $('body').append(`<div class=" d-flex justify-content-center align-items-center"> <div class="toast" role="alert" data-delay="3000" id="${dialogId}" aria-live="assertive" aria-atomic="true" style="position: absolute; top: 10px; right: 10px;z-index:1051;opacity:1"> <div class="toast-header"> <img src="/img/logo.png" class="rounded mr-2" style="height: 16px"> <strong class="mr-auto">${title}</strong> <button type="button" class="ml-2 mb-1 close no-outline" data-dismiss="toast" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="toast-body"> ${msg} </div> </div></div>`) $(`#${dialogId}`).toast('show') }, closeDialog (dialogId) { $(`#${dialogId}`).modal('hide') $(`#${dialogId}`).remove() delete window[dialogId + 'okCb'] }, loading (content, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') const loadingCb = function () { $(`#${dialogId}`).modal('hide') } window[dialogId + 'okCb'] = loadingCb $('body').append(` <div class="modal no-user-select" tabindex="-1" data-backdrop="static" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-sm modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-body text-center"> <div class="text-center m-3 text-white text-muted">${content}</div> <div class="progress"> <div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%"></div> </div> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, openDialog (title, content, okText, cancelText = '', okCb: any = null, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') if (!okCb) { okCb = function () { $(`#${dialogId}`).modal('hide') $(`#${dialogId}`).remove() } } window[dialogId + 'okCb'] = okCb $('body').append(` <div class="modal" tabindex="-1" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-header no-border"> <h5 class="modal-title ${title ? '' : 'd-none'}">${title}</h5> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="modal-body"> ${content} </div> <div class="modal-footer no-border"> <button type="button" class="btn btn-secondary ${cancelText ? '' : 'd-none'}" data-dismiss="modal">${cancelText}</button> <button type="button" class="btn btn-primary" onclick="${dialogId}okCb('${dialogId}')">${okText}</button> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, get (url, data = {}, cb) { $.ajax({ headers: { token: this.getJwt() }, url: url, data: data, crossDomain: true, success: (data) => cb(data), dataType: 'json' }) }, post (url, data = {}, files: Record<string, any>, cb) { const fd: FormData = new FormData() for (const key in data) { fd.append(key, data[key]) } for (const file in files) { fd.append(file, files[file]) } $.ajax({ headers: { token: this.getJwt() }, method: 'post', processData: false, contentType: false, url: url, data: fd, crossDomain: true, success: (data) => cb(data), error: (data) => cb(data), dataType: 'json'
objs.forEach(obj => { if (obj) { Object.keys(obj).forEach(key => { const val = obj[key] if (this.isPlainObject(val)) { // 递归 if (this.isPlainObject(result[key])) { result[key] = this.deepMerge(result[key], val) } else { result[key] = this.deepMerge(val) } } else { result[key] = val } }) } }) // console.log(result) return result }, isPlainObject (val) { return toString.call(val) === '[object Object]' }, /** * 切换显示Popper弹出菜单 * * @param vueObject Vue对象,必须有notifyDismissAllMenu方法 * @param openWhere * @param openForm * @param trigger * @param placement * @param offset * @param dismissAllMenu */ togglePopper: function (vueObject, openWhere, openForm, trigger, placement: Placement = 'bottom-end', offset = [0, 10], dismissAllMenu = true) { const oldState = vueObject[trigger] if (dismissAllMenu) vueObject.notifyDismissAllMenu({ trigger }) vueObject[trigger] = !oldState if (!vueObject[trigger]) return vueObject.$nextTick(function () { const el = this.$refs[openForm].$el || this.$refs[openForm] // 是组件的话,需要用里面的el const popper = createPopper(openWhere, el, { placement, modifiers: [ { name: 'offset', options: { offset: offset } } ] }) }) }, /** * 获取当前在屏幕中显示的页面id * @return Array<string> */ getPageIdInScreen: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const pageInScreen: Array<string> = [] $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const pageid = $(el).attr('data-page-ref') as string pageInScreen.push(pageid) }) return pageInScreen }, /** * 判断给定的元素是否完全进入了指定的页面 * @return boolean */ elementIsInPage (el: HTMLElement | SVGElement, page: HTMLElement) { if (!el || !page) return false const elRect: DOMRect = el.getBoundingClientRect() const pageRect: DOMRect = page.getBoundingClientRect() if (elRect.left >= pageRect.left && elRect.right <= pageRect.right && elRect.top >= pageRect.top && elRect.bottom <= pageRect.bottom) { return true } return false }, /** * 获取当前在屏幕中显示的,在屏幕中心的页面id, 用页面的y坐标和中心点的y坐标的距离差最小的那个,如果距离屏幕中心的页面有被选中的,则返回选中的 */ getPageIdInScreenCenter: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const screenCenterX = clientWidth / 2 const screenCenterY = clientHeight / 2 // console.log(`screen center: ${screenCenterX}x${screenCenterY}`) const pageInScreen = {} let selected = '' let selectedPageDist = 0 $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const centerY = rect.height / 2 + rect.top const centerX = rect.width / 2 + rect.left const dist = Math.sqrt(Math.pow(centerY - screenCenterY, 2) + Math.pow(centerX - screenCenterX, 2)) const pageid = $(el).attr('data-page-ref') as string pageInScreen[dist] = pageid if ($(el).hasClass('selected')) { selectedPageDist = dist selected = pageid } }) // console.log(pageInScreen) const dists = Object.keys(pageInScreen).sort() if (selected) return selected return pageInScreen[dists[0]] || '' }, isEmptyObject: function (e) { for (const t in e) { return !1 } return !0 }, /** * 按默认2页对页显示,根据页面的顺序计算他所在的对页分组 * @param pageIndex */ getGroupIndex: function (pageIndex: number) { if (pageIndex === 0) return 0 // 首页 if (pageIndex % 2 === 0) { // 偶数页 return pageIndex / 2 } else { // 奇数页 return pageIndex - (pageIndex - 1) / 2 } }, formatFloat: function (f) { const v = (parseFloat(f) || 0).toFixed(2) if (v.match(/\.00/)) { return parseInt(v) } else { return parseFloat(v) } }, unitName: function (unit) { const map = { px: '像素', mm: '毫米', cm: '厘米', in: '英寸' } return map[unit] || unit }, isDragInCorners: function (direction: number[]) { return (direction[0] === -1 && direction[1] === -1) || (direction[0] === -1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === -1) }, /** * 如果传入assetSide则判断是否拖动指定的边,如果不传入则判断是否拖动任意边 * @param direction * @param assetSide t b l r */ isDragInEdge: function (direction: number[], assertSide = '') { if (assertSide === 't') { return (direction[0] === 0 && direction[1] === -1) } if (assertSide === 'r') { return (direction[0] === 1 && direction[1] === 0) } if (assertSide === 'b') { return (direction[0] === 0 && direction[1] === 1) } if (assertSide === 'l') { return (direction[0] === -1 && direction[1] === 0) } return (direction[0] === 1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === 1) || (direction[0] === -1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === -1) }, /** * TODO 该函数为什么不通过computed来实现呢? * @param el * @param frame */ applyFrameCSS: function (el: any, frame: Frame) { if (el) { // console.log('applyFrameCSS ' + frame.toCSS()) // const el: any = moveable.target el.style.cssText = frame.toCSS() } }, getTransform (el: HTMLElement): Frame { const json: any = { // left: 'left', // top: 'top', // width: '', // height: '', transform: { // translateX: '0px', // translateY: '0px', // rotate: '0deg', // rotateY: '0deg', // rotateX: '0deg', // scaleX: 1, // scaleY: 1, // matrix3d: undefined } } const transform = el.style?.transform let items: any if ((items = transform.match(/translateX\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateX = this.formatFloat(items[0].replace(/translateX\(|px\)/g, '')) + 'px' } if ((items = transform.match(/translateY\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateY = parseFloat(items[0].replace(/translateY\(|px\)/g, '')) + 'px' } if ((items = transform.match(/rotate\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotate = parseFloat(items[0].replace(/rotate\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateX\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateX = parseFloat(items[0].replace(/rotateX\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateY\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateY = parseFloat(items[0].replace(/rotateY\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/scaleX\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleX = parseFloat(items[0].replace(/scaleX\(|\)/g, '')) } if ((items = transform.match(/scaleY\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleY = parseFloat(items[0].replace(/scaleY\(|\)/g, '')) } if ((items = transform.match(/matrix3d\(.+\)/ig))) { json.transform.matrix3d = items[0].replace(/matrix3d\(|\)/g, '') } else { delete json.transform.matrix3d } // console.log(el.style.width) json.width = $(el).width() + 'px' json.height = $(el).height() + 'px' json.top = el.style?.top json.left = el.style?.left return new Frame(json) }, log: (msg) => { if ($('#log-console').length === 0) { $('body').append("<div id='log-console'></div>") } $('#log-console').append('<p>' + JSON.stringify(msg) + '</p>') }, /** * * @param {type} len 长度 * @param {type} radix 进制 * @returns {String} */ uuid: (len, radix, prefix = '') => { const chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split('') radix = radix || chars.length const uuid: any = [] if (len) { // Compact form for (let i = 0; i < len; i++) { uuid[i] = chars[0 | Math.random() * radix] } } else { // rfc4122, version 4 form let r // rfc4122 requires these characters uuid[8] = uuid[13] = uuid[18] = uuid[23] = '-' uuid[14] = '4' // Fill in random data. At i==19 set the high bits of clock sequence as // per rfc4122, sec. 4.1.5 for (let i = 0; i < 36; i++) { if (!uuid[i]) { r = 0 | Math.random() * 16 uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r] } } } return (prefix || '') + uuid.join('') } }
}) }, deepMerge (...objs) { const result = Object.create(null)
random_line_split
utils.ts
import $ from 'jquery' import 'bootstrap' import { Frame } from 'scenejs' import { createPopper, Placement } from '@popperjs/core' import domtoimage from 'dom-to-image' export default { /** * 把 selected作为一组,更新他们的grouped结构体 * @param selectedItems * @param isGrouped */ setSelectedItemAsGroup (designVm:any, selectedItems, isGrouped){ const grouped = {} for (const itemid in selectedItems) { const pageid = selectedItems[itemid].pageid const pageindex = designVm.$store.getters.findPageIndex(pageid) const shadow = designVm.$store.getters.getDesignItemValue(pageindex, itemid, 'shadow') grouped[itemid] = pageid if (shadow){ for (const pageid in shadow) { grouped[shadow[pageid]] = pageid } } } for (const itemid in grouped) { const pageid = grouped[itemid] designVm.$store.commit('setDesignItemValue', { pageid, itemid, props: { grouped: isGrouped ? grouped : undefined }, needSyncShadown: false }) } }, saveJwt (jwt) { window.s
torage.setItem('jwt', jwt) }, getJwt () { return window.sessionStorage.getItem('jwt') }, saveDesign (api, design, cb: any = null) { const jwt = this.getJwt() if (!design.pages || design.pages.length === 0) return // console.log(design) const files = {} let fileCount = 0 const promises: any = [] for (const pageindex in design.pages) { const page = design.pages[pageindex] const node = $(`#${page.id} .scaled-content`).get(0) if (!node) continue promises.push(domtoimage.toBlob(node)) } new Promise((resolve) => { if (promises.length === 0) { resolve() return } for (let pageindex = 0; pageindex < promises.length; pageindex++) { promises[pageindex].then(blob => { files[`preview_url[${pageindex}]`] = new File([blob], `preview-${pageindex}.png`) fileCount++ if (fileCount >= design.pages.length) { resolve() } }).catch(err => { console.error('domtoimage oops, something went wrong!', err) fileCount++ if (fileCount >= design.pages.length) { resolve() } }) } }).then((blobs) => { // console.log(files) this.post(api + 'design/save.json', { meta: JSON.stringify(design) }, files, (rst) => { if (cb) { cb(rst) } if (!rst || !rst.success) { this.toast('保存失败', rst.msg || '自动保存失败') } }, 'json') }) }, toast (title, msg) { const dialogId = this.uuid(8, 16, 'tst') $('body').append(`<div class=" d-flex justify-content-center align-items-center"> <div class="toast" role="alert" data-delay="3000" id="${dialogId}" aria-live="assertive" aria-atomic="true" style="position: absolute; top: 10px; right: 10px;z-index:1051;opacity:1"> <div class="toast-header"> <img src="/img/logo.png" class="rounded mr-2" style="height: 16px"> <strong class="mr-auto">${title}</strong> <button type="button" class="ml-2 mb-1 close no-outline" data-dismiss="toast" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="toast-body"> ${msg} </div> </div></div>`) $(`#${dialogId}`).toast('show') }, closeDialog (dialogId) { $(`#${dialogId}`).modal('hide') $(`#${dialogId}`).remove() delete window[dialogId + 'okCb'] }, loading (content, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') const loadingCb = function () { $(`#${dialogId}`).modal('hide') } window[dialogId + 'okCb'] = loadingCb $('body').append(` <div class="modal no-user-select" tabindex="-1" data-backdrop="static" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-sm modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-body text-center"> <div class="text-center m-3 text-white text-muted">${content}</div> <div class="progress"> <div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%"></div> </div> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, openDialog (title, content, okText, cancelText = '', okCb: any = null, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') if (!okCb) { okCb = function () { $(`#${dialogId}`).modal('hide') $(`#${dialogId}`).remove() } } window[dialogId + 'okCb'] = okCb $('body').append(` <div class="modal" tabindex="-1" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-header no-border"> <h5 class="modal-title ${title ? '' : 'd-none'}">${title}</h5> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="modal-body"> ${content} </div> <div class="modal-footer no-border"> <button type="button" class="btn btn-secondary ${cancelText ? '' : 'd-none'}" data-dismiss="modal">${cancelText}</button> <button type="button" class="btn btn-primary" onclick="${dialogId}okCb('${dialogId}')">${okText}</button> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, get (url, data = {}, cb) { $.ajax({ headers: { token: this.getJwt() }, url: url, data: data, crossDomain: true, success: (data) => cb(data), dataType: 'json' }) }, post (url, data = {}, files: Record<string, any>, cb) { const fd: FormData = new FormData() for (const key in data) { fd.append(key, data[key]) } for (const file in files) { fd.append(file, files[file]) } $.ajax({ headers: { token: this.getJwt() }, method: 'post', processData: false, contentType: false, url: url, data: fd, crossDomain: true, success: (data) => cb(data), error: (data) => cb(data), dataType: 'json' }) }, deepMerge (...objs) { const result = Object.create(null) objs.forEach(obj => { if (obj) { Object.keys(obj).forEach(key => { const val = obj[key] if (this.isPlainObject(val)) { // 递归 if (this.isPlainObject(result[key])) { result[key] = this.deepMerge(result[key], val) } else { result[key] = this.deepMerge(val) } } else { result[key] = val } }) } }) // console.log(result) return result }, isPlainObject (val) { return toString.call(val) === '[object Object]' }, /** * 切换显示Popper弹出菜单 * * @param vueObject Vue对象,必须有notifyDismissAllMenu方法 * @param openWhere * @param openForm * @param trigger * @param placement * @param offset * @param dismissAllMenu */ togglePopper: function (vueObject, openWhere, openForm, trigger, placement: Placement = 'bottom-end', offset = [0, 10], dismissAllMenu = true) { const oldState = vueObject[trigger] if (dismissAllMenu) vueObject.notifyDismissAllMenu({ trigger }) vueObject[trigger] = !oldState if (!vueObject[trigger]) return vueObject.$nextTick(function () { const el = this.$refs[openForm].$el || this.$refs[openForm] // 是组件的话,需要用里面的el const popper = createPopper(openWhere, el, { placement, modifiers: [ { name: 'offset', options: { offset: offset } } ] }) }) }, /** * 获取当前在屏幕中显示的页面id * @return Array<string> */ getPageIdInScreen: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const pageInScreen: Array<string> = [] $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const pageid = $(el).attr('data-page-ref') as string pageInScreen.push(pageid) }) return pageInScreen }, /** * 判断给定的元素是否完全进入了指定的页面 * @return boolean */ elementIsInPage (el: HTMLElement | SVGElement, page: HTMLElement) { if (!el || !page) return false const elRect: DOMRect = el.getBoundingClientRect() const pageRect: DOMRect = page.getBoundingClientRect() if (elRect.left >= pageRect.left && elRect.right <= pageRect.right && elRect.top >= pageRect.top && elRect.bottom <= pageRect.bottom) { return true } return false }, /** * 获取当前在屏幕中显示的,在屏幕中心的页面id, 用页面的y坐标和中心点的y坐标的距离差最小的那个,如果距离屏幕中心的页面有被选中的,则返回选中的 */ getPageIdInScreenCenter: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const screenCenterX = clientWidth / 2 const screenCenterY = clientHeight / 2 // console.log(`screen center: ${screenCenterX}x${screenCenterY}`) const pageInScreen = {} let selected = '' let selectedPageDist = 0 $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const centerY = rect.height / 2 + rect.top const centerX = rect.width / 2 + rect.left const dist = Math.sqrt(Math.pow(centerY - screenCenterY, 2) + Math.pow(centerX - screenCenterX, 2)) const pageid = $(el).attr('data-page-ref') as string pageInScreen[dist] = pageid if ($(el).hasClass('selected')) { selectedPageDist = dist selected = pageid } }) // console.log(pageInScreen) const dists = Object.keys(pageInScreen).sort() if (selected) return selected return pageInScreen[dists[0]] || '' }, isEmptyObject: function (e) { for (const t in e) { return !1 } return !0 }, /** * 按默认2页对页显示,根据页面的顺序计算他所在的对页分组 * @param pageIndex */ getGroupIndex: function (pageIndex: number) { if (pageIndex === 0) return 0 // 首页 if (pageIndex % 2 === 0) { // 偶数页 return pageIndex / 2 } else { // 奇数页 return pageIndex - (pageIndex - 1) / 2 } }, formatFloat: function (f) { const v = (parseFloat(f) || 0).toFixed(2) if (v.match(/\.00/)) { return parseInt(v) } else { return parseFloat(v) } }, unitName: function (unit) { const map = { px: '像素', mm: '毫米', cm: '厘米', in: '英寸' } return map[unit] || unit }, isDragInCorners: function (direction: number[]) { return (direction[0] === -1 && direction[1] === -1) || (direction[0] === -1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === -1) }, /** * 如果传入assetSide则判断是否拖动指定的边,如果不传入则判断是否拖动任意边 * @param direction * @param assetSide t b l r */ isDragInEdge: function (direction: number[], assertSide = '') { if (assertSide === 't') { return (direction[0] === 0 && direction[1] === -1) } if (assertSide === 'r') { return (direction[0] === 1 && direction[1] === 0) } if (assertSide === 'b') { return (direction[0] === 0 && direction[1] === 1) } if (assertSide === 'l') { return (direction[0] === -1 && direction[1] === 0) } return (direction[0] === 1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === 1) || (direction[0] === -1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === -1) }, /** * TODO 该函数为什么不通过computed来实现呢? * @param el * @param frame */ applyFrameCSS: function (el: any, frame: Frame) { if (el) { // console.log('applyFrameCSS ' + frame.toCSS()) // const el: any = moveable.target el.style.cssText = frame.toCSS() } }, getTransform (el: HTMLElement): Frame { const json: any = { // left: 'left', // top: 'top', // width: '', // height: '', transform: { // translateX: '0px', // translateY: '0px', // rotate: '0deg', // rotateY: '0deg', // rotateX: '0deg', // scaleX: 1, // scaleY: 1, // matrix3d: undefined } } const transform = el.style?.transform let items: any if ((items = transform.match(/translateX\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateX = this.formatFloat(items[0].replace(/translateX\(|px\)/g, '')) + 'px' } if ((items = transform.match(/translateY\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateY = parseFloat(items[0].replace(/translateY\(|px\)/g, '')) + 'px' } if ((items = transform.match(/rotate\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotate = parseFloat(items[0].replace(/rotate\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateX\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateX = parseFloat(items[0].replace(/rotateX\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateY\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateY = parseFloat(items[0].replace(/rotateY\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/scaleX\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleX = parseFloat(items[0].replace(/scaleX\(|\)/g, '')) } if ((items = transform.match(/scaleY\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleY = parseFloat(items[0].replace(/scaleY\(|\)/g, '')) } if ((items = transform.match(/matrix3d\(.+\)/ig))) { json.transform.matrix3d = items[0].replace(/matrix3d\(|\)/g, '') } else { delete json.transform.matrix3d } // console.log(el.style.width) json.width = $(el).width() + 'px' json.height = $(el).height() + 'px' json.top = el.style?.top json.left = el.style?.left return new Frame(json) }, log: (msg) => { if ($('#log-console').length === 0) { $('body').append("<div id='log-console'></div>") } $('#log-console').append('<p>' + JSON.stringify(msg) + '</p>') }, /** * * @param {type} len 长度 * @param {type} radix 进制 * @returns {String} */ uuid: (len, radix, prefix = '') => { const chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split('') radix = radix || chars.length const uuid: any = [] if (len) { // Compact form for (let i = 0; i < len; i++) { uuid[i] = chars[0 | Math.random() * radix] } } else { // rfc4122, version 4 form let r // rfc4122 requires these characters uuid[8] = uuid[13] = uuid[18] = uuid[23] = '-' uuid[14] = '4' // Fill in random data. At i==19 set the high bits of clock sequence as // per rfc4122, sec. 4.1.5 for (let i = 0; i < 36; i++) { if (!uuid[i]) { r = 0 | Math.random() * 16 uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r] } } } return (prefix || '') + uuid.join('') } }
essionS
identifier_name
utils.ts
import $ from 'jquery' import 'bootstrap' import { Frame } from 'scenejs' import { createPopper, Placement } from '@popperjs/core' import domtoimage from 'dom-to-image' export default { /** * 把 selected作为一组,更新他们的grouped结构体 * @param selectedItems * @param isGrouped */ setSelectedItemAsGroup (designVm:any, selectedItems, isGrouped){ const grouped = {} for (const itemid in selectedItems) { const pageid = selectedItems[itemid].pageid const pageindex = designVm.$store.getters.findPageIndex(pageid) const shadow = designVm.$store.getters.getDesignItemValue(pageindex, itemid, 'shadow') grouped[itemid] = pageid if (shadow){ for (const pageid in shadow) { grouped[shadow[pageid]] = pageid } } } for (const itemid in grouped) { const pageid = grouped[itemid] designVm.$store.commit('setDesignItemValue', { pageid, itemid, props: { grouped: isGrouped ? grouped : undefined }, needSyncShadown: false }) } }, saveJwt (jwt) { window.sessionStorage.setItem('jwt', jwt) }, getJwt () { return window.sessionStorage.getItem('jwt') }, saveDesign (api, design, cb: any = null) { const jwt = this.getJwt() if (!design.pages || design.pages.length === 0) return // console.log(design) const files = {} let fileCount = 0 const promises: any = [] for (const pageindex in design.pages) { const page = design.pages[pageindex] const node = $(`#${page.id} .scaled-content`).get(0) if (!node) continue promises.push(domtoimage.toBlob(node)) } new Promise((resolve) => { if (promises.length === 0) { resolve() return } for (let pageindex = 0; pageindex < promises.length; pageindex++) { promises[pageindex].then(blob => { files[`preview_url[${pageindex}]`] = new File([blob], `preview-${pageindex}.png`) fileCount++ if (fileCount >= design.pages.length) { resolve() } }).catch(err => { console.error('domtoimage oops, something went wrong!', err) fileCount++ if (fileCount >= design.pages.length) { resolve() } }) } }).then((blobs) => { // console.log(files) this.post(api + 'design/save.json', { meta: JSON.stringify(design) }, files, (rst) => { if (cb) { cb(rst) } if (!rst || !rst.success) { this.toast('保存失败', rst.msg || '自动保存失败') } }, 'json') }) }, toast (title, msg) { const dialogId = this.uuid(8, 16, 'tst') $('body').append(`<div class=" d-flex justify-content-center align-items-center"> <div class="toast" role="alert" data-delay="3000" id="${dialogId}" aria-live="assertive" aria-atomic="true" style="position: absolute; top: 10px; right: 10px;z-index:1051;opacity:1"> <div class="toast-header"> <img src="/img/logo.png" class="rounded mr-2" style="height: 16px"> <strong class="mr-auto">${title}</strong> <button type="button" class="ml-2 mb-1 close no-outline" data-dismiss="toast" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="toast-body"> ${msg} </div> </div></div>`) $(`#${dialogId}`).toast('show') }, closeDialog (dialogId) { $(`#${dialogId}`).modal('hide') $(`#${dialogId}`).remove() delete window[dialogId + 'okCb'] }, loading (content, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') const loadingCb = function () { $(`#${dialogId}`).modal('hide') } window[dialogId + 'okCb'] = loadingCb $('body').append(` <div class="modal no-user-select" tabindex="-1" data-backdrop="static" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-sm modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-body text-center"> <div class="text-center m-3 text-white text-muted">${content}</div> <div class="progress"> <div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%"></div> </div> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, openDialog (title, content, okText, cancelText = '', okCb: any = null, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') if (!okCb) { okCb = function () { $(`#${dialogId}`).modal('hide') $(`#${dialogId}`).remove() } } window[dialogId + 'okCb'] = okCb $('body').append(` <div class="modal" tabindex="-1" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-header no-border"> <h5 class="modal-title ${title ? '' : 'd-none'}">${title}</h5> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="modal-body"> ${content} </div> <div class="modal-footer no-border"> <button type="button" class="btn btn-secondary ${cancelText ? '' : 'd-none'}" data-dismiss="modal">${cancelText}</button> <button type="button" class="btn btn-primary" onclick="${dialogId}okCb('${dialogId}')">${okText}</button> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, get (url, data = {}, cb) { $.ajax({ headers: { token: this.getJwt() }, url: url, data: data, crossDomain: true, success: (data) => cb(data), dataType: 'json' }) }, post (url, data = {}, files: Record<string, any>, cb) { const fd: FormData = new FormData() fo
ect.create(null) objs.forEach(obj => { if (obj) { Object.keys(obj).forEach(key => { const val = obj[key] if (this.isPlainObject(val)) { // 递归 if (this.isPlainObject(result[key])) { result[key] = this.deepMerge(result[key], val) } else { result[key] = this.deepMerge(val) } } else { result[key] = val } }) } }) // console.log(result) return result }, isPlainObject (val) { return toString.call(val) === '[object Object]' }, /** * 切换显示Popper弹出菜单 * * @param vueObject Vue对象,必须有notifyDismissAllMenu方法 * @param openWhere * @param openForm * @param trigger * @param placement * @param offset * @param dismissAllMenu */ togglePopper: function (vueObject, openWhere, openForm, trigger, placement: Placement = 'bottom-end', offset = [0, 10], dismissAllMenu = true) { const oldState = vueObject[trigger] if (dismissAllMenu) vueObject.notifyDismissAllMenu({ trigger }) vueObject[trigger] = !oldState if (!vueObject[trigger]) return vueObject.$nextTick(function () { const el = this.$refs[openForm].$el || this.$refs[openForm] // 是组件的话,需要用里面的el const popper = createPopper(openWhere, el, { placement, modifiers: [ { name: 'offset', options: { offset: offset } } ] }) }) }, /** * 获取当前在屏幕中显示的页面id * @return Array<string> */ getPageIdInScreen: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const pageInScreen: Array<string> = [] $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const pageid = $(el).attr('data-page-ref') as string pageInScreen.push(pageid) }) return pageInScreen }, /** * 判断给定的元素是否完全进入了指定的页面 * @return boolean */ elementIsInPage (el: HTMLElement | SVGElement, page: HTMLElement) { if (!el || !page) return false const elRect: DOMRect = el.getBoundingClientRect() const pageRect: DOMRect = page.getBoundingClientRect() if (elRect.left >= pageRect.left && elRect.right <= pageRect.right && elRect.top >= pageRect.top && elRect.bottom <= pageRect.bottom) { return true } return false }, /** * 获取当前在屏幕中显示的,在屏幕中心的页面id, 用页面的y坐标和中心点的y坐标的距离差最小的那个,如果距离屏幕中心的页面有被选中的,则返回选中的 */ getPageIdInScreenCenter: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const screenCenterX = clientWidth / 2 const screenCenterY = clientHeight / 2 // console.log(`screen center: ${screenCenterX}x${screenCenterY}`) const pageInScreen = {} let selected = '' let selectedPageDist = 0 $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const centerY = rect.height / 2 + rect.top const centerX = rect.width / 2 + rect.left const dist = Math.sqrt(Math.pow(centerY - screenCenterY, 2) + Math.pow(centerX - screenCenterX, 2)) const pageid = $(el).attr('data-page-ref') as string pageInScreen[dist] = pageid if ($(el).hasClass('selected')) { selectedPageDist = dist selected = pageid } }) // console.log(pageInScreen) const dists = Object.keys(pageInScreen).sort() if (selected) return selected return pageInScreen[dists[0]] || '' }, isEmptyObject: function (e) { for (const t in e) { return !1 } return !0 }, /** * 按默认2页对页显示,根据页面的顺序计算他所在的对页分组 * @param pageIndex */ getGroupIndex: function (pageIndex: number) { if (pageIndex === 0) return 0 // 首页 if (pageIndex % 2 === 0) { // 偶数页 return pageIndex / 2 } else { // 奇数页 return pageIndex - (pageIndex - 1) / 2 } }, formatFloat: function (f) { const v = (parseFloat(f) || 0).toFixed(2) if (v.match(/\.00/)) { return parseInt(v) } else { return parseFloat(v) } }, unitName: function (unit) { const map = { px: '像素', mm: '毫米', cm: '厘米', in: '英寸' } return map[unit] || unit }, isDragInCorners: function (direction: number[]) { return (direction[0] === -1 && direction[1] === -1) || (direction[0] === -1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === -1) }, /** * 如果传入assetSide则判断是否拖动指定的边,如果不传入则判断是否拖动任意边 * @param direction * @param assetSide t b l r */ isDragInEdge: function (direction: number[], assertSide = '') { if (assertSide === 't') { return (direction[0] === 0 && direction[1] === -1) } if (assertSide === 'r') { return (direction[0] === 1 && direction[1] === 0) } if (assertSide === 'b') { return (direction[0] === 0 && direction[1] === 1) } if (assertSide === 'l') { return (direction[0] === -1 && direction[1] === 0) } return (direction[0] === 1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === 1) || (direction[0] === -1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === -1) }, /** * TODO 该函数为什么不通过computed来实现呢? * @param el * @param frame */ applyFrameCSS: function (el: any, frame: Frame) { if (el) { // console.log('applyFrameCSS ' + frame.toCSS()) // const el: any = moveable.target el.style.cssText = frame.toCSS() } }, getTransform (el: HTMLElement): Frame { const json: any = { // left: 'left', // top: 'top', // width: '', // height: '', transform: { // translateX: '0px', // translateY: '0px', // rotate: '0deg', // rotateY: '0deg', // rotateX: '0deg', // scaleX: 1, // scaleY: 1, // matrix3d: undefined } } const transform = el.style?.transform let items: any if ((items = transform.match(/translateX\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateX = this.formatFloat(items[0].replace(/translateX\(|px\)/g, '')) + 'px' } if ((items = transform.match(/translateY\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateY = parseFloat(items[0].replace(/translateY\(|px\)/g, '')) + 'px' } if ((items = transform.match(/rotate\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotate = parseFloat(items[0].replace(/rotate\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateX\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateX = parseFloat(items[0].replace(/rotateX\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateY\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateY = parseFloat(items[0].replace(/rotateY\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/scaleX\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleX = parseFloat(items[0].replace(/scaleX\(|\)/g, '')) } if ((items = transform.match(/scaleY\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleY = parseFloat(items[0].replace(/scaleY\(|\)/g, '')) } if ((items = transform.match(/matrix3d\(.+\)/ig))) { json.transform.matrix3d = items[0].replace(/matrix3d\(|\)/g, '') } else { delete json.transform.matrix3d } // console.log(el.style.width) json.width = $(el).width() + 'px' json.height = $(el).height() + 'px' json.top = el.style?.top json.left = el.style?.left return new Frame(json) }, log: (msg) => { if ($('#log-console').length === 0) { $('body').append("<div id='log-console'></div>") } $('#log-console').append('<p>' + JSON.stringify(msg) + '</p>') }, /** * * @param {type} len 长度 * @param {type} radix 进制 * @returns {String} */ uuid: (len, radix, prefix = '') => { const chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split('') radix = radix || chars.length const uuid: any = [] if (len) { // Compact form for (let i = 0; i < len; i++) { uuid[i] = chars[0 | Math.random() * radix] } } else { // rfc4122, version 4 form let r // rfc4122 requires these characters uuid[8] = uuid[13] = uuid[18] = uuid[23] = '-' uuid[14] = '4' // Fill in random data. At i==19 set the high bits of clock sequence as // per rfc4122, sec. 4.1.5 for (let i = 0; i < 36; i++) { if (!uuid[i]) { r = 0 | Math.random() * 16 uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r] } } } return (prefix || '') + uuid.join('') } }
r (const key in data) { fd.append(key, data[key]) } for (const file in files) { fd.append(file, files[file]) } $.ajax({ headers: { token: this.getJwt() }, method: 'post', processData: false, contentType: false, url: url, data: fd, crossDomain: true, success: (data) => cb(data), error: (data) => cb(data), dataType: 'json' }) }, deepMerge (...objs) { const result = Obj
identifier_body
utils.ts
import $ from 'jquery' import 'bootstrap' import { Frame } from 'scenejs' import { createPopper, Placement } from '@popperjs/core' import domtoimage from 'dom-to-image' export default { /** * 把 selected作为一组,更新他们的grouped结构体 * @param selectedItems * @param isGrouped */ setSelectedItemAsGroup (designVm:any, selectedItems, isGrouped){ const grouped = {} for (const itemid in selectedItems) { const pageid = selectedItems[itemid].pageid const pageindex = designVm.$store.getters.findPageIndex(pageid) const shadow = designVm.$store.getters.getDesignItemValue(pageindex, itemid, 'shadow') grouped[itemid] = pageid if (shadow){ for (const pageid in shadow) { grouped[shadow[pageid]] = pageid } } } for (const itemid in grouped) { const pageid = grouped[itemid] designVm.$store.commit('setDesignItemValue', { pageid, itemid, props: { grouped: isGrouped ? grouped : undefined }, needSyncShadown: false }) } }, saveJwt (jwt) { window.sessionStorage.setItem('jwt', jwt) }, getJwt () { return window.sessionStorage.getItem('jwt') }, saveDesign (api, design, cb: any = null) { const jwt = this.getJwt() if (!design.pages || design.pages.length === 0) return // console.log(design) const files = {} let fileCount = 0 const promises: any = [] for (const pageindex in design.pages) { const page = design.pages[pageindex] const node = $(`#${page.id} .scaled-content`).get(0) if (!node) continue promises.push(domtoimage.toBlob(node)) } new Promise((resolve) => { if (promises.length === 0) { resolve() return } for (let pageindex = 0; pageindex < promises.length; pageindex++) { promises[pageindex].then(blob => { files[`preview_url[${pageindex}]`] = new File([blob], `preview-${pageindex}.png`) fileCount++ if (fileCount >= design.pages.length) { resolve() } }).catch(err => { console.error('domtoimage oops, something went wrong!', err) fileCount++ if (fileCount >= design.pages.length) { resolve() } }) } }).then((blobs) => { // console.log(files) this.post(api + 'design/save.json', { meta: JSON.stringify(design) }, files, (rst) => { if (cb) { cb(rst) } if (!rst || !rst.success) { this.toast('保存失败', rst.msg || '自动保存失败') } }, 'json') }) }, toast (title, msg) { const dialogId = this.uuid(8, 16, 'tst') $('body').append(`<div class=" d-flex justify-content-center align-items-center"> <div class="toast" role="alert" data-delay="3000" id="${dialogId}" aria-live="assertive" aria-atomic="true" style="position: absolute; top: 10px; right: 10px;z-index:1051;opacity:1"> <div class="toast-header"> <img src="/img/logo.png" class="rounded mr-2" style="height: 16px"> <strong class="mr-auto">${title}</strong> <button type="button" class="ml-2 mb-1 close no-outline" data-dismiss="toast" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="toast-body"> ${msg} </div> </div></div>`) $(`#${dialogId}`).toast('show') }, closeDialog (dialogId) { $(`#${dialogId}`).modal('hide') $(`#${dialogId}`).remove() delete window[dialogId + 'okCb'] }, loading (content, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') const loadingCb = function () { $(`#${dialogId}`).modal('hide') } window[dialogId + 'okCb'] = loadingCb $('body').append(` <div class="modal no-user-select" tabindex="-1" data-backdrop="static" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-sm modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-body text-center"> <div class="text-center m-3 text-white text-muted">${content}</div> <div class="progress"> <div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%"></div> </div> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, openDialog (title, content, okText, cancelText = '', okCb: any = null, dialogId = '') { if (!dialogId) dialogId = this.uuid(8, 16, 'dlg') if (!okCb) { okCb = function () { $(`#${dialo
dy').append(` <div class="modal" tabindex="-1" role="dialog" id="${dialogId}"> <div class="modal-dialog modal-dialog-centered" role="document"> <div class="modal-content"> <div class="modal-header no-border"> <h5 class="modal-title ${title ? '' : 'd-none'}">${title}</h5> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="modal-body"> ${content} </div> <div class="modal-footer no-border"> <button type="button" class="btn btn-secondary ${cancelText ? '' : 'd-none'}" data-dismiss="modal">${cancelText}</button> <button type="button" class="btn btn-primary" onclick="${dialogId}okCb('${dialogId}')">${okText}</button> </div> </div> </div> </div>`) $(`#${dialogId}`).modal('show') return dialogId }, get (url, data = {}, cb) { $.ajax({ headers: { token: this.getJwt() }, url: url, data: data, crossDomain: true, success: (data) => cb(data), dataType: 'json' }) }, post (url, data = {}, files: Record<string, any>, cb) { const fd: FormData = new FormData() for (const key in data) { fd.append(key, data[key]) } for (const file in files) { fd.append(file, files[file]) } $.ajax({ headers: { token: this.getJwt() }, method: 'post', processData: false, contentType: false, url: url, data: fd, crossDomain: true, success: (data) => cb(data), error: (data) => cb(data), dataType: 'json' }) }, deepMerge (...objs) { const result = Object.create(null) objs.forEach(obj => { if (obj) { Object.keys(obj).forEach(key => { const val = obj[key] if (this.isPlainObject(val)) { // 递归 if (this.isPlainObject(result[key])) { result[key] = this.deepMerge(result[key], val) } else { result[key] = this.deepMerge(val) } } else { result[key] = val } }) } }) // console.log(result) return result }, isPlainObject (val) { return toString.call(val) === '[object Object]' }, /** * 切换显示Popper弹出菜单 * * @param vueObject Vue对象,必须有notifyDismissAllMenu方法 * @param openWhere * @param openForm * @param trigger * @param placement * @param offset * @param dismissAllMenu */ togglePopper: function (vueObject, openWhere, openForm, trigger, placement: Placement = 'bottom-end', offset = [0, 10], dismissAllMenu = true) { const oldState = vueObject[trigger] if (dismissAllMenu) vueObject.notifyDismissAllMenu({ trigger }) vueObject[trigger] = !oldState if (!vueObject[trigger]) return vueObject.$nextTick(function () { const el = this.$refs[openForm].$el || this.$refs[openForm] // 是组件的话,需要用里面的el const popper = createPopper(openWhere, el, { placement, modifiers: [ { name: 'offset', options: { offset: offset } } ] }) }) }, /** * 获取当前在屏幕中显示的页面id * @return Array<string> */ getPageIdInScreen: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const pageInScreen: Array<string> = [] $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const pageid = $(el).attr('data-page-ref') as string pageInScreen.push(pageid) }) return pageInScreen }, /** * 判断给定的元素是否完全进入了指定的页面 * @return boolean */ elementIsInPage (el: HTMLElement | SVGElement, page: HTMLElement) { if (!el || !page) return false const elRect: DOMRect = el.getBoundingClientRect() const pageRect: DOMRect = page.getBoundingClientRect() if (elRect.left >= pageRect.left && elRect.right <= pageRect.right && elRect.top >= pageRect.top && elRect.bottom <= pageRect.bottom) { return true } return false }, /** * 获取当前在屏幕中显示的,在屏幕中心的页面id, 用页面的y坐标和中心点的y坐标的距离差最小的那个,如果距离屏幕中心的页面有被选中的,则返回选中的 */ getPageIdInScreenCenter: function () { const clientWidth = document.body.clientWidth const clientHeight = document.body.clientHeight const screenCenterX = clientWidth / 2 const screenCenterY = clientHeight / 2 // console.log(`screen center: ${screenCenterX}x${screenCenterY}`) const pageInScreen = {} let selected = '' let selectedPageDist = 0 $('.editor').each(function (idx, el) { const rect = el.getBoundingClientRect() if (rect.bottom <= 83 /* 滚出了workspace区域 */ || rect.right <= 0 || rect.left >= clientWidth || rect.top >= clientHeight) { return } const centerY = rect.height / 2 + rect.top const centerX = rect.width / 2 + rect.left const dist = Math.sqrt(Math.pow(centerY - screenCenterY, 2) + Math.pow(centerX - screenCenterX, 2)) const pageid = $(el).attr('data-page-ref') as string pageInScreen[dist] = pageid if ($(el).hasClass('selected')) { selectedPageDist = dist selected = pageid } }) // console.log(pageInScreen) const dists = Object.keys(pageInScreen).sort() if (selected) return selected return pageInScreen[dists[0]] || '' }, isEmptyObject: function (e) { for (const t in e) { return !1 } return !0 }, /** * 按默认2页对页显示,根据页面的顺序计算他所在的对页分组 * @param pageIndex */ getGroupIndex: function (pageIndex: number) { if (pageIndex === 0) return 0 // 首页 if (pageIndex % 2 === 0) { // 偶数页 return pageIndex / 2 } else { // 奇数页 return pageIndex - (pageIndex - 1) / 2 } }, formatFloat: function (f) { const v = (parseFloat(f) || 0).toFixed(2) if (v.match(/\.00/)) { return parseInt(v) } else { return parseFloat(v) } }, unitName: function (unit) { const map = { px: '像素', mm: '毫米', cm: '厘米', in: '英寸' } return map[unit] || unit }, isDragInCorners: function (direction: number[]) { return (direction[0] === -1 && direction[1] === -1) || (direction[0] === -1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === 1) || (direction[0] === 1 && direction[1] === -1) }, /** * 如果传入assetSide则判断是否拖动指定的边,如果不传入则判断是否拖动任意边 * @param direction * @param assetSide t b l r */ isDragInEdge: function (direction: number[], assertSide = '') { if (assertSide === 't') { return (direction[0] === 0 && direction[1] === -1) } if (assertSide === 'r') { return (direction[0] === 1 && direction[1] === 0) } if (assertSide === 'b') { return (direction[0] === 0 && direction[1] === 1) } if (assertSide === 'l') { return (direction[0] === -1 && direction[1] === 0) } return (direction[0] === 1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === 1) || (direction[0] === -1 && direction[1] === 0) || (direction[0] === 0 && direction[1] === -1) }, /** * TODO 该函数为什么不通过computed来实现呢? * @param el * @param frame */ applyFrameCSS: function (el: any, frame: Frame) { if (el) { // console.log('applyFrameCSS ' + frame.toCSS()) // const el: any = moveable.target el.style.cssText = frame.toCSS() } }, getTransform (el: HTMLElement): Frame { const json: any = { // left: 'left', // top: 'top', // width: '', // height: '', transform: { // translateX: '0px', // translateY: '0px', // rotate: '0deg', // rotateY: '0deg', // rotateX: '0deg', // scaleX: 1, // scaleY: 1, // matrix3d: undefined } } const transform = el.style?.transform let items: any if ((items = transform.match(/translateX\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateX = this.formatFloat(items[0].replace(/translateX\(|px\)/g, '')) + 'px' } if ((items = transform.match(/translateY\(-?\d*(\.\d+)?px\)/ig))) { json.transform.translateY = parseFloat(items[0].replace(/translateY\(|px\)/g, '')) + 'px' } if ((items = transform.match(/rotate\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotate = parseFloat(items[0].replace(/rotate\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateX\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateX = parseFloat(items[0].replace(/rotateX\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/rotateY\(-?\d+(\.\d+)?deg\)/ig))) { json.transform.rotateY = parseFloat(items[0].replace(/rotateY\(|deg\)/g, '')) + 'deg' } if ((items = transform.match(/scaleX\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleX = parseFloat(items[0].replace(/scaleX\(|\)/g, '')) } if ((items = transform.match(/scaleY\(-?\d*(\.\d+)?\)/ig))) { json.transform.scaleY = parseFloat(items[0].replace(/scaleY\(|\)/g, '')) } if ((items = transform.match(/matrix3d\(.+\)/ig))) { json.transform.matrix3d = items[0].replace(/matrix3d\(|\)/g, '') } else { delete json.transform.matrix3d } // console.log(el.style.width) json.width = $(el).width() + 'px' json.height = $(el).height() + 'px' json.top = el.style?.top json.left = el.style?.left return new Frame(json) }, log: (msg) => { if ($('#log-console').length === 0) { $('body').append("<div id='log-console'></div>") } $('#log-console').append('<p>' + JSON.stringify(msg) + '</p>') }, /** * * @param {type} len 长度 * @param {type} radix 进制 * @returns {String} */ uuid: (len, radix, prefix = '') => { const chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split('') radix = radix || chars.length const uuid: any = [] if (len) { // Compact form for (let i = 0; i < len; i++) { uuid[i] = chars[0 | Math.random() * radix] } } else { // rfc4122, version 4 form let r // rfc4122 requires these characters uuid[8] = uuid[13] = uuid[18] = uuid[23] = '-' uuid[14] = '4' // Fill in random data. At i==19 set the high bits of clock sequence as // per rfc4122, sec. 4.1.5 for (let i = 0; i < 36; i++) { if (!uuid[i]) { r = 0 | Math.random() * 16 uuid[i] = chars[(i === 19) ? (r & 0x3) | 0x8 : r] } } } return (prefix || '') + uuid.join('') } }
gId}`).modal('hide') $(`#${dialogId}`).remove() } } window[dialogId + 'okCb'] = okCb $('bo
conditional_block
kerdenSOM.py
#!/usr/bin/env python """ Kernel Probability Density Estimator Self-Organizing Map """ # python import re import os import sys import glob import time import numpy import shutil import subprocess # appion from appionlib import appionScript from appionlib import apXmipp from appionlib import apDisplay from appionlib import appiondata from appionlib import apEMAN from appionlib import apFile from appionlib import apProject from appionlib import apFourier from appionlib import apImagicFile from appionlib import apImage #====================== #====================== class kerdenSOMScript(appionScript.AppionScript): #====================== def setupParserOptions(self): self.parser.add_option("-a", "--alignid", dest="alignstackid", type="int", help="Alignment stack id", metavar="#") self.parser.add_option("-m", "--maskrad", dest="maskrad", type="float", help="Mask radius in Angstroms", metavar="#") self.parser.add_option("-x", "--xdim", dest="xdim", type="int", default=4, help="X dimension", metavar="#") self.parser.add_option("-y", "--ydim", dest="ydim", type="int", default=3, help="Y dimension", metavar="#") self.parser.add_option("--numpart", dest="numpart", type="int", help="Number of particles, default all in stack", metavar="#") self.convergemodes = ( "normal", "fast", "slow" ) self.parser.add_option("--converge", dest="converge", help="Convergence criteria mode", metavar="MODE", type="choice", choices=self.convergemodes, default="normal" ) #====================== def checkConflicts(self): if self.params['alignstackid'] is None: apDisplay.printError("Please enter an aligned stack id, e.g. --alignstackid=4") if self.params['numpart'] is None: alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) self.params['numpart'] = alignstackdata['num_particles'] if self.params['xdim'] > 16 or self.params['xdim'] > 16: apDisplay.printError("Dimensions must be less than 15") #====================== def setRunDir(self): self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) path = self.alignstackdata['path']['path'] uppath = os.path.abspath(os.path.join(path, "..")) self.params['rundir'] = os.path.join(uppath, self.params['runname']) #====================== def insertKerDenSOM(self, binned=None): ### Preliminary data projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid']) alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) numclass = self.params['xdim']*self.params['ydim'] pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir'])) ### KerDen SOM Params object kerdenq = appiondata.ApKerDenSOMParamsData() kerdenq['mask_diam'] = 2.0*self.params['maskrad'] kerdenq['x_dimension'] = self.params['xdim'] kerdenq['y_dimension'] = self.params['ydim'] kerdenq['convergence'] = self.params['converge'] kerdenq['run_seconds'] = time.time()-self.t0 ### Align Analysis Run object analysisq = appiondata.ApAlignAnalysisRunData() analysisq['runname'] = self.params['runname'] analysisq['path'] = pathdata analysisq['description'] = self.params['description'] analysisq['alignstack'] = alignstackdata analysisq['hidden'] = False ### linked through cluster not analysis #analysisq['kerdenparams'] = kerdenq ### Clustering Run object clusterrunq = appiondata.ApClusteringRunData() clusterrunq['runname'] = self.params['runname'] clusterrunq['description'] = self.params['description'] # what if we binned the aligned stack to get the new one if binned is None: boxsize = alignstackdata['boxsize'] pixelsize = alignstackdata['pixelsize'] else: boxsize = alignstackdata['boxsize'] / binned pixelsize = alignstackdata['pixelsize'] * binned clusterrunq['boxsize'] = boxsize clusterrunq['pixelsize'] = pixelsize clusterrunq['num_particles'] = self.params['numpart'] clusterrunq['alignstack'] = alignstackdata clusterrunq['analysisrun'] = analysisq clusterrunq['kerdenparams'] = kerdenq ### Clustering Stack object clusterstackq = appiondata.ApClusteringStackData() clusterstackq['avg_imagicfile'] = "kerdenstack"+self.timestamp+".hed" clusterstackq['num_classes'] = numclass clusterstackq['clusterrun'] = clusterrunq clusterstackq['path'] = pathdata clusterstackq['hidden'] = False imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile']) if not os.path.isfile(imagicfile): apDisplay.printError("could not find average stack file: "+imagicfile) ### looping over clusters apDisplay.printColor("Inserting particle classification data, please wait", "cyan") for i in range(numclass): classnum = i+1 classroot = "%s.%d"% (self.timestamp, classnum-1) classdocfile = os.path.join(self.params['rundir'], classroot) partlist = self.readClassDocFile(classdocfile) ### Clustering Particle object clusterrefq = appiondata.ApClusteringReferenceData() clusterrefq['refnum'] = classnum clusterrefq['avg_mrcfile'] = classroot+".mrc" clusterrefq['clusterrun'] = clusterrunq clusterrefq['path'] = pathdata clusterrefq['num_particles'] = len(partlist) clusterrefq['ssnr_resolution'] = self.cluster_resolution[i] ### looping over particles sys.stderr.write(".") for partnum in partlist: alignpartdata = self.getAlignParticleData(partnum, alignstackdata) ### Clustering Particle objects clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = clusterstackq clusterpartq['alignparticle'] = alignpartdata clusterpartq['partnum'] = partnum clusterpartq['refnum'] = classnum clusterpartq['clusterreference'] = clusterrefq ### finally we can insert parameters if self.params['commit'] is True: clusterpartq.insert() #===================== def getAlignParticleData(self, partnum, alignstackdata): alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = alignstackdata alignpartq['partnum'] = partnum alignparts = alignpartq.query(results=1) return alignparts[0] #===================== def readClassDocFile(self, docfile): if not os.path.isfile(docfile): return [] partlist = [] f = open(docfile, 'r') for line in f: sline = line.strip() if re.match("[0-9]+", sline): # numbers start at zero partnum = int(sline)+1 partlist.append(partnum) f.close() if not partlist: return [] partlist.sort() return partlist #====================== def runKerdenSOM(self, indata): """ From http://xmipp.cnb.csic.es/twiki/bin/view/Xmipp/KerDenSOM KerDenSOM stands for "Kernel Probability Density Estimator Self-Organizing Map". It maps a set of high dimensional input vectors into a two-dimensional grid. """ apDisplay.printMsg("Running KerDen SOM") outstamp = os.path.join(self.params['rundir'], self.timestamp) kerdencmd = ( "xmipp_classify_kerdensom -verb 1 -i %s -o %s -xdim %d -ydim %d -saveclusters "% (indata, outstamp, self.params['xdim'], self.params['ydim']) ) ### convergence criteria if self.params['converge'] == "fast": kerdencmd += " -eps 1e-5 " elif self.params['converge'] == "slow": kerdencmd += " -eps 1e-9 " else: kerdencmd += " -eps 1e-7 " apDisplay.printColor(kerdencmd, "cyan") proc = subprocess.Popen(kerdencmd, shell=True) proc.wait() time.sleep(1) return #====================== def fileId(self, fname): ext = os.path.splitext(fname)[1] num = int(ext[1:]) return num #====================== def sortFile(self, a, b): if self.fileId(a) > self.fileId(b): return 1 return -1 #====================== def createMontageByEMAN(self): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### create crappy files emancmd = ( "proc2d "+self.instack+" crap.mrc first=0 last=0 mask=1" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) emancmd = ( "proc2d crap.mrc crap.png" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) stackname = "kerdenstack"+self.timestamp+".hed" count = 0 numclass = self.params['xdim']*self.params['ydim'] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) #listname = self.timestamp+str(i) if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle emancmd = ( "proc2d crap.mrc "+stackname+" " ) sys.stderr.write("skipping "+listname+"\n") apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False)
emancmd = ("proc2d %s %s list=%s average"% (self.instack, stackname, listname)) apEMAN.executeEmanCmd(emancmd, showcmd=True, verbose=False) ### create mrc emancmd = ("proc2d %s %s first=%d last=%d"% (stackname, listname+".mrc", count, count)) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### create png emancmd = ("proc2d %s %s"% (listname+".mrc", listname+".png")) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### FIX ME: for now fill self.clsuter_sersolution with None, although it ### should be possible to calculate it if particle list exists like in createMontageInMemory self.cluster_resolution.append(None) montagecmd += listname+".png " count +=1 montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFile("crap.mrc") apFile.removeFile("crap.png") apFile.removeFilePattern(self.timestamp+".*.png") #====================== def readListFile(self, listfile): partlist = [] f = open(listfile, "r") for line in f: sline = line.strip() if re.match("[0-9]+$", sline): partnum = int(sline)+1 partlist.append(partnum) f.close() return partlist #====================== def createMontageInMemory(self, apix): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### Set binning of images boxsize = apImagicFile.getBoxsize(self.instack) bin = 1 while boxsize/bin > 200: bin+=1 binboxsize = boxsize/bin ### create averages files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montage = [] montagepngs = [] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) pngfile = listname+".png" if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle sys.stderr.write("skipping "+listname+"\n") blank = numpy.ones((binboxsize, binboxsize), dtype=numpy.float32) ### add to montage stack montage.append(blank) self.cluster_resolution.append(None) ### create png apImage.arrayToPng(blank, pngfile) else: ### read particle list partlist = self.readListFile(listname) ### average particles partdatalist = apImagicFile.readParticleListFromStack(self.instack, partlist, boxsize, msg=False) partdataarray = numpy.asarray(partdatalist) finaldata = partdataarray.mean(0) if bin > 1: finaldata = apImage.binImg(finaldata, bin) ### add to montage stack montage.append(finaldata) res = apFourier.spectralSNR(partdatalist, apix) self.cluster_resolution.append(res) ### create png apImage.arrayToPng(finaldata, pngfile) ### check for png file if os.path.isfile(pngfile): montagepngs.append(pngfile) else: apDisplay.printError("failed to create montage") stackname = "kerdenstack"+self.timestamp+".hed" apImagicFile.writeImagic(montage, stackname) ### create montage montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) for monpng in montagepngs: montagecmd += monpng+" " montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFilePattern(self.timestamp+".*.png") return bin #====================== def start(self): aligndata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) boxsize = aligndata['boxsize'] apix = aligndata['pixelsize'] maskpixrad = self.params['maskrad']/apix if maskpixrad*2 > boxsize-2: apDisplay.printError("Mask radius is too big for boxsize: %d > %d"%(maskpixrad*2,boxsize-2)) apDisplay.printMsg("Mask radius and boxsize: %.1f < %d"%(maskpixrad*2,boxsize-2)) self.instack = os.path.join(aligndata['path']['path'], aligndata['imagicfile']) outdata = "stack.data" apXmipp.convertStackToXmippData(self.instack, outdata, maskpixrad, boxsize, numpart=self.params['numpart']-1) self.runKerdenSOM(outdata) if apFile.stackSize(self.instack) > 3.0*(1024**3): # Big stacks use eman self.createMontageByEMAN() binned = None else: binned = self.createMontageInMemory(apix) self.insertKerDenSOM(binned=binned) apFile.removeFile(outdata) apFile.removeFilePattern("*.cod") #====================== #====================== if __name__ == '__main__': kerdenSOM = kerdenSOMScript() kerdenSOM.start() kerdenSOM.close()
### create png shutil.copy("crap.png", listname+".png") else: ### average particles
random_line_split
kerdenSOM.py
#!/usr/bin/env python """ Kernel Probability Density Estimator Self-Organizing Map """ # python import re import os import sys import glob import time import numpy import shutil import subprocess # appion from appionlib import appionScript from appionlib import apXmipp from appionlib import apDisplay from appionlib import appiondata from appionlib import apEMAN from appionlib import apFile from appionlib import apProject from appionlib import apFourier from appionlib import apImagicFile from appionlib import apImage #====================== #====================== class kerdenSOMScript(appionScript.AppionScript): #====================== def setupParserOptions(self): self.parser.add_option("-a", "--alignid", dest="alignstackid", type="int", help="Alignment stack id", metavar="#") self.parser.add_option("-m", "--maskrad", dest="maskrad", type="float", help="Mask radius in Angstroms", metavar="#") self.parser.add_option("-x", "--xdim", dest="xdim", type="int", default=4, help="X dimension", metavar="#") self.parser.add_option("-y", "--ydim", dest="ydim", type="int", default=3, help="Y dimension", metavar="#") self.parser.add_option("--numpart", dest="numpart", type="int", help="Number of particles, default all in stack", metavar="#") self.convergemodes = ( "normal", "fast", "slow" ) self.parser.add_option("--converge", dest="converge", help="Convergence criteria mode", metavar="MODE", type="choice", choices=self.convergemodes, default="normal" ) #====================== def checkConflicts(self): if self.params['alignstackid'] is None: apDisplay.printError("Please enter an aligned stack id, e.g. --alignstackid=4") if self.params['numpart'] is None: alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) self.params['numpart'] = alignstackdata['num_particles'] if self.params['xdim'] > 16 or self.params['xdim'] > 16: apDisplay.printError("Dimensions must be less than 15") #====================== def setRunDir(self): self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) path = self.alignstackdata['path']['path'] uppath = os.path.abspath(os.path.join(path, "..")) self.params['rundir'] = os.path.join(uppath, self.params['runname']) #====================== def insertKerDenSOM(self, binned=None): ### Preliminary data projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid']) alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) numclass = self.params['xdim']*self.params['ydim'] pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir'])) ### KerDen SOM Params object kerdenq = appiondata.ApKerDenSOMParamsData() kerdenq['mask_diam'] = 2.0*self.params['maskrad'] kerdenq['x_dimension'] = self.params['xdim'] kerdenq['y_dimension'] = self.params['ydim'] kerdenq['convergence'] = self.params['converge'] kerdenq['run_seconds'] = time.time()-self.t0 ### Align Analysis Run object analysisq = appiondata.ApAlignAnalysisRunData() analysisq['runname'] = self.params['runname'] analysisq['path'] = pathdata analysisq['description'] = self.params['description'] analysisq['alignstack'] = alignstackdata analysisq['hidden'] = False ### linked through cluster not analysis #analysisq['kerdenparams'] = kerdenq ### Clustering Run object clusterrunq = appiondata.ApClusteringRunData() clusterrunq['runname'] = self.params['runname'] clusterrunq['description'] = self.params['description'] # what if we binned the aligned stack to get the new one if binned is None: boxsize = alignstackdata['boxsize'] pixelsize = alignstackdata['pixelsize'] else: boxsize = alignstackdata['boxsize'] / binned pixelsize = alignstackdata['pixelsize'] * binned clusterrunq['boxsize'] = boxsize clusterrunq['pixelsize'] = pixelsize clusterrunq['num_particles'] = self.params['numpart'] clusterrunq['alignstack'] = alignstackdata clusterrunq['analysisrun'] = analysisq clusterrunq['kerdenparams'] = kerdenq ### Clustering Stack object clusterstackq = appiondata.ApClusteringStackData() clusterstackq['avg_imagicfile'] = "kerdenstack"+self.timestamp+".hed" clusterstackq['num_classes'] = numclass clusterstackq['clusterrun'] = clusterrunq clusterstackq['path'] = pathdata clusterstackq['hidden'] = False imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile']) if not os.path.isfile(imagicfile): apDisplay.printError("could not find average stack file: "+imagicfile) ### looping over clusters apDisplay.printColor("Inserting particle classification data, please wait", "cyan") for i in range(numclass): classnum = i+1 classroot = "%s.%d"% (self.timestamp, classnum-1) classdocfile = os.path.join(self.params['rundir'], classroot) partlist = self.readClassDocFile(classdocfile) ### Clustering Particle object clusterrefq = appiondata.ApClusteringReferenceData() clusterrefq['refnum'] = classnum clusterrefq['avg_mrcfile'] = classroot+".mrc" clusterrefq['clusterrun'] = clusterrunq clusterrefq['path'] = pathdata clusterrefq['num_particles'] = len(partlist) clusterrefq['ssnr_resolution'] = self.cluster_resolution[i] ### looping over particles sys.stderr.write(".") for partnum in partlist: alignpartdata = self.getAlignParticleData(partnum, alignstackdata) ### Clustering Particle objects clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = clusterstackq clusterpartq['alignparticle'] = alignpartdata clusterpartq['partnum'] = partnum clusterpartq['refnum'] = classnum clusterpartq['clusterreference'] = clusterrefq ### finally we can insert parameters if self.params['commit'] is True: clusterpartq.insert() #===================== def getAlignParticleData(self, partnum, alignstackdata): alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = alignstackdata alignpartq['partnum'] = partnum alignparts = alignpartq.query(results=1) return alignparts[0] #===================== def readClassDocFile(self, docfile): if not os.path.isfile(docfile): return [] partlist = [] f = open(docfile, 'r') for line in f: sline = line.strip() if re.match("[0-9]+", sline): # numbers start at zero
f.close() if not partlist: return [] partlist.sort() return partlist #====================== def runKerdenSOM(self, indata): """ From http://xmipp.cnb.csic.es/twiki/bin/view/Xmipp/KerDenSOM KerDenSOM stands for "Kernel Probability Density Estimator Self-Organizing Map". It maps a set of high dimensional input vectors into a two-dimensional grid. """ apDisplay.printMsg("Running KerDen SOM") outstamp = os.path.join(self.params['rundir'], self.timestamp) kerdencmd = ( "xmipp_classify_kerdensom -verb 1 -i %s -o %s -xdim %d -ydim %d -saveclusters "% (indata, outstamp, self.params['xdim'], self.params['ydim']) ) ### convergence criteria if self.params['converge'] == "fast": kerdencmd += " -eps 1e-5 " elif self.params['converge'] == "slow": kerdencmd += " -eps 1e-9 " else: kerdencmd += " -eps 1e-7 " apDisplay.printColor(kerdencmd, "cyan") proc = subprocess.Popen(kerdencmd, shell=True) proc.wait() time.sleep(1) return #====================== def fileId(self, fname): ext = os.path.splitext(fname)[1] num = int(ext[1:]) return num #====================== def sortFile(self, a, b): if self.fileId(a) > self.fileId(b): return 1 return -1 #====================== def createMontageByEMAN(self): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### create crappy files emancmd = ( "proc2d "+self.instack+" crap.mrc first=0 last=0 mask=1" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) emancmd = ( "proc2d crap.mrc crap.png" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) stackname = "kerdenstack"+self.timestamp+".hed" count = 0 numclass = self.params['xdim']*self.params['ydim'] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) #listname = self.timestamp+str(i) if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle emancmd = ( "proc2d crap.mrc "+stackname+" " ) sys.stderr.write("skipping "+listname+"\n") apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### create png shutil.copy("crap.png", listname+".png") else: ### average particles emancmd = ("proc2d %s %s list=%s average"% (self.instack, stackname, listname)) apEMAN.executeEmanCmd(emancmd, showcmd=True, verbose=False) ### create mrc emancmd = ("proc2d %s %s first=%d last=%d"% (stackname, listname+".mrc", count, count)) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### create png emancmd = ("proc2d %s %s"% (listname+".mrc", listname+".png")) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### FIX ME: for now fill self.clsuter_sersolution with None, although it ### should be possible to calculate it if particle list exists like in createMontageInMemory self.cluster_resolution.append(None) montagecmd += listname+".png " count +=1 montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFile("crap.mrc") apFile.removeFile("crap.png") apFile.removeFilePattern(self.timestamp+".*.png") #====================== def readListFile(self, listfile): partlist = [] f = open(listfile, "r") for line in f: sline = line.strip() if re.match("[0-9]+$", sline): partnum = int(sline)+1 partlist.append(partnum) f.close() return partlist #====================== def createMontageInMemory(self, apix): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### Set binning of images boxsize = apImagicFile.getBoxsize(self.instack) bin = 1 while boxsize/bin > 200: bin+=1 binboxsize = boxsize/bin ### create averages files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montage = [] montagepngs = [] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) pngfile = listname+".png" if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle sys.stderr.write("skipping "+listname+"\n") blank = numpy.ones((binboxsize, binboxsize), dtype=numpy.float32) ### add to montage stack montage.append(blank) self.cluster_resolution.append(None) ### create png apImage.arrayToPng(blank, pngfile) else: ### read particle list partlist = self.readListFile(listname) ### average particles partdatalist = apImagicFile.readParticleListFromStack(self.instack, partlist, boxsize, msg=False) partdataarray = numpy.asarray(partdatalist) finaldata = partdataarray.mean(0) if bin > 1: finaldata = apImage.binImg(finaldata, bin) ### add to montage stack montage.append(finaldata) res = apFourier.spectralSNR(partdatalist, apix) self.cluster_resolution.append(res) ### create png apImage.arrayToPng(finaldata, pngfile) ### check for png file if os.path.isfile(pngfile): montagepngs.append(pngfile) else: apDisplay.printError("failed to create montage") stackname = "kerdenstack"+self.timestamp+".hed" apImagicFile.writeImagic(montage, stackname) ### create montage montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) for monpng in montagepngs: montagecmd += monpng+" " montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFilePattern(self.timestamp+".*.png") return bin #====================== def start(self): aligndata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) boxsize = aligndata['boxsize'] apix = aligndata['pixelsize'] maskpixrad = self.params['maskrad']/apix if maskpixrad*2 > boxsize-2: apDisplay.printError("Mask radius is too big for boxsize: %d > %d"%(maskpixrad*2,boxsize-2)) apDisplay.printMsg("Mask radius and boxsize: %.1f < %d"%(maskpixrad*2,boxsize-2)) self.instack = os.path.join(aligndata['path']['path'], aligndata['imagicfile']) outdata = "stack.data" apXmipp.convertStackToXmippData(self.instack, outdata, maskpixrad, boxsize, numpart=self.params['numpart']-1) self.runKerdenSOM(outdata) if apFile.stackSize(self.instack) > 3.0*(1024**3): # Big stacks use eman self.createMontageByEMAN() binned = None else: binned = self.createMontageInMemory(apix) self.insertKerDenSOM(binned=binned) apFile.removeFile(outdata) apFile.removeFilePattern("*.cod") #====================== #====================== if __name__ == '__main__': kerdenSOM = kerdenSOMScript() kerdenSOM.start() kerdenSOM.close()
partnum = int(sline)+1 partlist.append(partnum)
conditional_block
kerdenSOM.py
#!/usr/bin/env python """ Kernel Probability Density Estimator Self-Organizing Map """ # python import re import os import sys import glob import time import numpy import shutil import subprocess # appion from appionlib import appionScript from appionlib import apXmipp from appionlib import apDisplay from appionlib import appiondata from appionlib import apEMAN from appionlib import apFile from appionlib import apProject from appionlib import apFourier from appionlib import apImagicFile from appionlib import apImage #====================== #====================== class kerdenSOMScript(appionScript.AppionScript): #====================== def setupParserOptions(self): self.parser.add_option("-a", "--alignid", dest="alignstackid", type="int", help="Alignment stack id", metavar="#") self.parser.add_option("-m", "--maskrad", dest="maskrad", type="float", help="Mask radius in Angstroms", metavar="#") self.parser.add_option("-x", "--xdim", dest="xdim", type="int", default=4, help="X dimension", metavar="#") self.parser.add_option("-y", "--ydim", dest="ydim", type="int", default=3, help="Y dimension", metavar="#") self.parser.add_option("--numpart", dest="numpart", type="int", help="Number of particles, default all in stack", metavar="#") self.convergemodes = ( "normal", "fast", "slow" ) self.parser.add_option("--converge", dest="converge", help="Convergence criteria mode", metavar="MODE", type="choice", choices=self.convergemodes, default="normal" ) #====================== def checkConflicts(self): if self.params['alignstackid'] is None: apDisplay.printError("Please enter an aligned stack id, e.g. --alignstackid=4") if self.params['numpart'] is None: alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) self.params['numpart'] = alignstackdata['num_particles'] if self.params['xdim'] > 16 or self.params['xdim'] > 16: apDisplay.printError("Dimensions must be less than 15") #====================== def setRunDir(self):
#====================== def insertKerDenSOM(self, binned=None): ### Preliminary data projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid']) alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) numclass = self.params['xdim']*self.params['ydim'] pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir'])) ### KerDen SOM Params object kerdenq = appiondata.ApKerDenSOMParamsData() kerdenq['mask_diam'] = 2.0*self.params['maskrad'] kerdenq['x_dimension'] = self.params['xdim'] kerdenq['y_dimension'] = self.params['ydim'] kerdenq['convergence'] = self.params['converge'] kerdenq['run_seconds'] = time.time()-self.t0 ### Align Analysis Run object analysisq = appiondata.ApAlignAnalysisRunData() analysisq['runname'] = self.params['runname'] analysisq['path'] = pathdata analysisq['description'] = self.params['description'] analysisq['alignstack'] = alignstackdata analysisq['hidden'] = False ### linked through cluster not analysis #analysisq['kerdenparams'] = kerdenq ### Clustering Run object clusterrunq = appiondata.ApClusteringRunData() clusterrunq['runname'] = self.params['runname'] clusterrunq['description'] = self.params['description'] # what if we binned the aligned stack to get the new one if binned is None: boxsize = alignstackdata['boxsize'] pixelsize = alignstackdata['pixelsize'] else: boxsize = alignstackdata['boxsize'] / binned pixelsize = alignstackdata['pixelsize'] * binned clusterrunq['boxsize'] = boxsize clusterrunq['pixelsize'] = pixelsize clusterrunq['num_particles'] = self.params['numpart'] clusterrunq['alignstack'] = alignstackdata clusterrunq['analysisrun'] = analysisq clusterrunq['kerdenparams'] = kerdenq ### Clustering Stack object clusterstackq = appiondata.ApClusteringStackData() clusterstackq['avg_imagicfile'] = "kerdenstack"+self.timestamp+".hed" clusterstackq['num_classes'] = numclass clusterstackq['clusterrun'] = clusterrunq clusterstackq['path'] = pathdata clusterstackq['hidden'] = False imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile']) if not os.path.isfile(imagicfile): apDisplay.printError("could not find average stack file: "+imagicfile) ### looping over clusters apDisplay.printColor("Inserting particle classification data, please wait", "cyan") for i in range(numclass): classnum = i+1 classroot = "%s.%d"% (self.timestamp, classnum-1) classdocfile = os.path.join(self.params['rundir'], classroot) partlist = self.readClassDocFile(classdocfile) ### Clustering Particle object clusterrefq = appiondata.ApClusteringReferenceData() clusterrefq['refnum'] = classnum clusterrefq['avg_mrcfile'] = classroot+".mrc" clusterrefq['clusterrun'] = clusterrunq clusterrefq['path'] = pathdata clusterrefq['num_particles'] = len(partlist) clusterrefq['ssnr_resolution'] = self.cluster_resolution[i] ### looping over particles sys.stderr.write(".") for partnum in partlist: alignpartdata = self.getAlignParticleData(partnum, alignstackdata) ### Clustering Particle objects clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = clusterstackq clusterpartq['alignparticle'] = alignpartdata clusterpartq['partnum'] = partnum clusterpartq['refnum'] = classnum clusterpartq['clusterreference'] = clusterrefq ### finally we can insert parameters if self.params['commit'] is True: clusterpartq.insert() #===================== def getAlignParticleData(self, partnum, alignstackdata): alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = alignstackdata alignpartq['partnum'] = partnum alignparts = alignpartq.query(results=1) return alignparts[0] #===================== def readClassDocFile(self, docfile): if not os.path.isfile(docfile): return [] partlist = [] f = open(docfile, 'r') for line in f: sline = line.strip() if re.match("[0-9]+", sline): # numbers start at zero partnum = int(sline)+1 partlist.append(partnum) f.close() if not partlist: return [] partlist.sort() return partlist #====================== def runKerdenSOM(self, indata): """ From http://xmipp.cnb.csic.es/twiki/bin/view/Xmipp/KerDenSOM KerDenSOM stands for "Kernel Probability Density Estimator Self-Organizing Map". It maps a set of high dimensional input vectors into a two-dimensional grid. """ apDisplay.printMsg("Running KerDen SOM") outstamp = os.path.join(self.params['rundir'], self.timestamp) kerdencmd = ( "xmipp_classify_kerdensom -verb 1 -i %s -o %s -xdim %d -ydim %d -saveclusters "% (indata, outstamp, self.params['xdim'], self.params['ydim']) ) ### convergence criteria if self.params['converge'] == "fast": kerdencmd += " -eps 1e-5 " elif self.params['converge'] == "slow": kerdencmd += " -eps 1e-9 " else: kerdencmd += " -eps 1e-7 " apDisplay.printColor(kerdencmd, "cyan") proc = subprocess.Popen(kerdencmd, shell=True) proc.wait() time.sleep(1) return #====================== def fileId(self, fname): ext = os.path.splitext(fname)[1] num = int(ext[1:]) return num #====================== def sortFile(self, a, b): if self.fileId(a) > self.fileId(b): return 1 return -1 #====================== def createMontageByEMAN(self): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### create crappy files emancmd = ( "proc2d "+self.instack+" crap.mrc first=0 last=0 mask=1" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) emancmd = ( "proc2d crap.mrc crap.png" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) stackname = "kerdenstack"+self.timestamp+".hed" count = 0 numclass = self.params['xdim']*self.params['ydim'] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) #listname = self.timestamp+str(i) if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle emancmd = ( "proc2d crap.mrc "+stackname+" " ) sys.stderr.write("skipping "+listname+"\n") apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### create png shutil.copy("crap.png", listname+".png") else: ### average particles emancmd = ("proc2d %s %s list=%s average"% (self.instack, stackname, listname)) apEMAN.executeEmanCmd(emancmd, showcmd=True, verbose=False) ### create mrc emancmd = ("proc2d %s %s first=%d last=%d"% (stackname, listname+".mrc", count, count)) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### create png emancmd = ("proc2d %s %s"% (listname+".mrc", listname+".png")) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### FIX ME: for now fill self.clsuter_sersolution with None, although it ### should be possible to calculate it if particle list exists like in createMontageInMemory self.cluster_resolution.append(None) montagecmd += listname+".png " count +=1 montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFile("crap.mrc") apFile.removeFile("crap.png") apFile.removeFilePattern(self.timestamp+".*.png") #====================== def readListFile(self, listfile): partlist = [] f = open(listfile, "r") for line in f: sline = line.strip() if re.match("[0-9]+$", sline): partnum = int(sline)+1 partlist.append(partnum) f.close() return partlist #====================== def createMontageInMemory(self, apix): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### Set binning of images boxsize = apImagicFile.getBoxsize(self.instack) bin = 1 while boxsize/bin > 200: bin+=1 binboxsize = boxsize/bin ### create averages files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montage = [] montagepngs = [] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) pngfile = listname+".png" if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle sys.stderr.write("skipping "+listname+"\n") blank = numpy.ones((binboxsize, binboxsize), dtype=numpy.float32) ### add to montage stack montage.append(blank) self.cluster_resolution.append(None) ### create png apImage.arrayToPng(blank, pngfile) else: ### read particle list partlist = self.readListFile(listname) ### average particles partdatalist = apImagicFile.readParticleListFromStack(self.instack, partlist, boxsize, msg=False) partdataarray = numpy.asarray(partdatalist) finaldata = partdataarray.mean(0) if bin > 1: finaldata = apImage.binImg(finaldata, bin) ### add to montage stack montage.append(finaldata) res = apFourier.spectralSNR(partdatalist, apix) self.cluster_resolution.append(res) ### create png apImage.arrayToPng(finaldata, pngfile) ### check for png file if os.path.isfile(pngfile): montagepngs.append(pngfile) else: apDisplay.printError("failed to create montage") stackname = "kerdenstack"+self.timestamp+".hed" apImagicFile.writeImagic(montage, stackname) ### create montage montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) for monpng in montagepngs: montagecmd += monpng+" " montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFilePattern(self.timestamp+".*.png") return bin #====================== def start(self): aligndata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) boxsize = aligndata['boxsize'] apix = aligndata['pixelsize'] maskpixrad = self.params['maskrad']/apix if maskpixrad*2 > boxsize-2: apDisplay.printError("Mask radius is too big for boxsize: %d > %d"%(maskpixrad*2,boxsize-2)) apDisplay.printMsg("Mask radius and boxsize: %.1f < %d"%(maskpixrad*2,boxsize-2)) self.instack = os.path.join(aligndata['path']['path'], aligndata['imagicfile']) outdata = "stack.data" apXmipp.convertStackToXmippData(self.instack, outdata, maskpixrad, boxsize, numpart=self.params['numpart']-1) self.runKerdenSOM(outdata) if apFile.stackSize(self.instack) > 3.0*(1024**3): # Big stacks use eman self.createMontageByEMAN() binned = None else: binned = self.createMontageInMemory(apix) self.insertKerDenSOM(binned=binned) apFile.removeFile(outdata) apFile.removeFilePattern("*.cod") #====================== #====================== if __name__ == '__main__': kerdenSOM = kerdenSOMScript() kerdenSOM.start() kerdenSOM.close()
self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) path = self.alignstackdata['path']['path'] uppath = os.path.abspath(os.path.join(path, "..")) self.params['rundir'] = os.path.join(uppath, self.params['runname'])
identifier_body
kerdenSOM.py
#!/usr/bin/env python """ Kernel Probability Density Estimator Self-Organizing Map """ # python import re import os import sys import glob import time import numpy import shutil import subprocess # appion from appionlib import appionScript from appionlib import apXmipp from appionlib import apDisplay from appionlib import appiondata from appionlib import apEMAN from appionlib import apFile from appionlib import apProject from appionlib import apFourier from appionlib import apImagicFile from appionlib import apImage #====================== #====================== class kerdenSOMScript(appionScript.AppionScript): #====================== def setupParserOptions(self): self.parser.add_option("-a", "--alignid", dest="alignstackid", type="int", help="Alignment stack id", metavar="#") self.parser.add_option("-m", "--maskrad", dest="maskrad", type="float", help="Mask radius in Angstroms", metavar="#") self.parser.add_option("-x", "--xdim", dest="xdim", type="int", default=4, help="X dimension", metavar="#") self.parser.add_option("-y", "--ydim", dest="ydim", type="int", default=3, help="Y dimension", metavar="#") self.parser.add_option("--numpart", dest="numpart", type="int", help="Number of particles, default all in stack", metavar="#") self.convergemodes = ( "normal", "fast", "slow" ) self.parser.add_option("--converge", dest="converge", help="Convergence criteria mode", metavar="MODE", type="choice", choices=self.convergemodes, default="normal" ) #====================== def checkConflicts(self): if self.params['alignstackid'] is None: apDisplay.printError("Please enter an aligned stack id, e.g. --alignstackid=4") if self.params['numpart'] is None: alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) self.params['numpart'] = alignstackdata['num_particles'] if self.params['xdim'] > 16 or self.params['xdim'] > 16: apDisplay.printError("Dimensions must be less than 15") #====================== def setRunDir(self): self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) path = self.alignstackdata['path']['path'] uppath = os.path.abspath(os.path.join(path, "..")) self.params['rundir'] = os.path.join(uppath, self.params['runname']) #====================== def insertKerDenSOM(self, binned=None): ### Preliminary data projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid']) alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) numclass = self.params['xdim']*self.params['ydim'] pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir'])) ### KerDen SOM Params object kerdenq = appiondata.ApKerDenSOMParamsData() kerdenq['mask_diam'] = 2.0*self.params['maskrad'] kerdenq['x_dimension'] = self.params['xdim'] kerdenq['y_dimension'] = self.params['ydim'] kerdenq['convergence'] = self.params['converge'] kerdenq['run_seconds'] = time.time()-self.t0 ### Align Analysis Run object analysisq = appiondata.ApAlignAnalysisRunData() analysisq['runname'] = self.params['runname'] analysisq['path'] = pathdata analysisq['description'] = self.params['description'] analysisq['alignstack'] = alignstackdata analysisq['hidden'] = False ### linked through cluster not analysis #analysisq['kerdenparams'] = kerdenq ### Clustering Run object clusterrunq = appiondata.ApClusteringRunData() clusterrunq['runname'] = self.params['runname'] clusterrunq['description'] = self.params['description'] # what if we binned the aligned stack to get the new one if binned is None: boxsize = alignstackdata['boxsize'] pixelsize = alignstackdata['pixelsize'] else: boxsize = alignstackdata['boxsize'] / binned pixelsize = alignstackdata['pixelsize'] * binned clusterrunq['boxsize'] = boxsize clusterrunq['pixelsize'] = pixelsize clusterrunq['num_particles'] = self.params['numpart'] clusterrunq['alignstack'] = alignstackdata clusterrunq['analysisrun'] = analysisq clusterrunq['kerdenparams'] = kerdenq ### Clustering Stack object clusterstackq = appiondata.ApClusteringStackData() clusterstackq['avg_imagicfile'] = "kerdenstack"+self.timestamp+".hed" clusterstackq['num_classes'] = numclass clusterstackq['clusterrun'] = clusterrunq clusterstackq['path'] = pathdata clusterstackq['hidden'] = False imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile']) if not os.path.isfile(imagicfile): apDisplay.printError("could not find average stack file: "+imagicfile) ### looping over clusters apDisplay.printColor("Inserting particle classification data, please wait", "cyan") for i in range(numclass): classnum = i+1 classroot = "%s.%d"% (self.timestamp, classnum-1) classdocfile = os.path.join(self.params['rundir'], classroot) partlist = self.readClassDocFile(classdocfile) ### Clustering Particle object clusterrefq = appiondata.ApClusteringReferenceData() clusterrefq['refnum'] = classnum clusterrefq['avg_mrcfile'] = classroot+".mrc" clusterrefq['clusterrun'] = clusterrunq clusterrefq['path'] = pathdata clusterrefq['num_particles'] = len(partlist) clusterrefq['ssnr_resolution'] = self.cluster_resolution[i] ### looping over particles sys.stderr.write(".") for partnum in partlist: alignpartdata = self.getAlignParticleData(partnum, alignstackdata) ### Clustering Particle objects clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = clusterstackq clusterpartq['alignparticle'] = alignpartdata clusterpartq['partnum'] = partnum clusterpartq['refnum'] = classnum clusterpartq['clusterreference'] = clusterrefq ### finally we can insert parameters if self.params['commit'] is True: clusterpartq.insert() #===================== def getAlignParticleData(self, partnum, alignstackdata): alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = alignstackdata alignpartq['partnum'] = partnum alignparts = alignpartq.query(results=1) return alignparts[0] #===================== def readClassDocFile(self, docfile): if not os.path.isfile(docfile): return [] partlist = [] f = open(docfile, 'r') for line in f: sline = line.strip() if re.match("[0-9]+", sline): # numbers start at zero partnum = int(sline)+1 partlist.append(partnum) f.close() if not partlist: return [] partlist.sort() return partlist #====================== def runKerdenSOM(self, indata): """ From http://xmipp.cnb.csic.es/twiki/bin/view/Xmipp/KerDenSOM KerDenSOM stands for "Kernel Probability Density Estimator Self-Organizing Map". It maps a set of high dimensional input vectors into a two-dimensional grid. """ apDisplay.printMsg("Running KerDen SOM") outstamp = os.path.join(self.params['rundir'], self.timestamp) kerdencmd = ( "xmipp_classify_kerdensom -verb 1 -i %s -o %s -xdim %d -ydim %d -saveclusters "% (indata, outstamp, self.params['xdim'], self.params['ydim']) ) ### convergence criteria if self.params['converge'] == "fast": kerdencmd += " -eps 1e-5 " elif self.params['converge'] == "slow": kerdencmd += " -eps 1e-9 " else: kerdencmd += " -eps 1e-7 " apDisplay.printColor(kerdencmd, "cyan") proc = subprocess.Popen(kerdencmd, shell=True) proc.wait() time.sleep(1) return #====================== def fileId(self, fname): ext = os.path.splitext(fname)[1] num = int(ext[1:]) return num #====================== def sortFile(self, a, b): if self.fileId(a) > self.fileId(b): return 1 return -1 #====================== def
(self): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### create crappy files emancmd = ( "proc2d "+self.instack+" crap.mrc first=0 last=0 mask=1" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) emancmd = ( "proc2d crap.mrc crap.png" ) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) stackname = "kerdenstack"+self.timestamp+".hed" count = 0 numclass = self.params['xdim']*self.params['ydim'] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) #listname = self.timestamp+str(i) if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle emancmd = ( "proc2d crap.mrc "+stackname+" " ) sys.stderr.write("skipping "+listname+"\n") apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### create png shutil.copy("crap.png", listname+".png") else: ### average particles emancmd = ("proc2d %s %s list=%s average"% (self.instack, stackname, listname)) apEMAN.executeEmanCmd(emancmd, showcmd=True, verbose=False) ### create mrc emancmd = ("proc2d %s %s first=%d last=%d"% (stackname, listname+".mrc", count, count)) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### create png emancmd = ("proc2d %s %s"% (listname+".mrc", listname+".png")) apEMAN.executeEmanCmd(emancmd, showcmd=False, verbose=False) ### FIX ME: for now fill self.clsuter_sersolution with None, although it ### should be possible to calculate it if particle list exists like in createMontageInMemory self.cluster_resolution.append(None) montagecmd += listname+".png " count +=1 montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFile("crap.mrc") apFile.removeFile("crap.png") apFile.removeFilePattern(self.timestamp+".*.png") #====================== def readListFile(self, listfile): partlist = [] f = open(listfile, "r") for line in f: sline = line.strip() if re.match("[0-9]+$", sline): partnum = int(sline)+1 partlist.append(partnum) f.close() return partlist #====================== def createMontageInMemory(self, apix): self.cluster_resolution = [] apDisplay.printMsg("Converting files") ### Set binning of images boxsize = apImagicFile.getBoxsize(self.instack) bin = 1 while boxsize/bin > 200: bin+=1 binboxsize = boxsize/bin ### create averages files = glob.glob(self.timestamp+".[0-9]*") files.sort(self.sortFile) montage = [] montagepngs = [] i = 0 for listname in files: i += 1 apDisplay.printMsg("%d of %d classes"%(i,len(files))) pngfile = listname+".png" if not os.path.isfile(listname) or apFile.fileSize(listname) < 1: ### create a ghost particle sys.stderr.write("skipping "+listname+"\n") blank = numpy.ones((binboxsize, binboxsize), dtype=numpy.float32) ### add to montage stack montage.append(blank) self.cluster_resolution.append(None) ### create png apImage.arrayToPng(blank, pngfile) else: ### read particle list partlist = self.readListFile(listname) ### average particles partdatalist = apImagicFile.readParticleListFromStack(self.instack, partlist, boxsize, msg=False) partdataarray = numpy.asarray(partdatalist) finaldata = partdataarray.mean(0) if bin > 1: finaldata = apImage.binImg(finaldata, bin) ### add to montage stack montage.append(finaldata) res = apFourier.spectralSNR(partdatalist, apix) self.cluster_resolution.append(res) ### create png apImage.arrayToPng(finaldata, pngfile) ### check for png file if os.path.isfile(pngfile): montagepngs.append(pngfile) else: apDisplay.printError("failed to create montage") stackname = "kerdenstack"+self.timestamp+".hed" apImagicFile.writeImagic(montage, stackname) ### create montage montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim'])) for monpng in montagepngs: montagecmd += monpng+" " montagecmd += "montage.png" apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False) time.sleep(1) apFile.removeFilePattern(self.timestamp+".*.png") return bin #====================== def start(self): aligndata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) boxsize = aligndata['boxsize'] apix = aligndata['pixelsize'] maskpixrad = self.params['maskrad']/apix if maskpixrad*2 > boxsize-2: apDisplay.printError("Mask radius is too big for boxsize: %d > %d"%(maskpixrad*2,boxsize-2)) apDisplay.printMsg("Mask radius and boxsize: %.1f < %d"%(maskpixrad*2,boxsize-2)) self.instack = os.path.join(aligndata['path']['path'], aligndata['imagicfile']) outdata = "stack.data" apXmipp.convertStackToXmippData(self.instack, outdata, maskpixrad, boxsize, numpart=self.params['numpart']-1) self.runKerdenSOM(outdata) if apFile.stackSize(self.instack) > 3.0*(1024**3): # Big stacks use eman self.createMontageByEMAN() binned = None else: binned = self.createMontageInMemory(apix) self.insertKerDenSOM(binned=binned) apFile.removeFile(outdata) apFile.removeFilePattern("*.cod") #====================== #====================== if __name__ == '__main__': kerdenSOM = kerdenSOMScript() kerdenSOM.start() kerdenSOM.close()
createMontageByEMAN
identifier_name
tarjetas.js
/*--- PROPIEDADES ---*/ var listaTarjetas = []; var tarjetaActual = null; var existeFoto = false; var existeSonido = false; var fondoActual = 1; // Esta variable contiene el id del fondo que está actualmente mostrándose en el formulario de nueva tarjeta var mostrarFavoritas = false; // Indica si se deben mostrar solamente las tarjetas favoritas o no var traduccionSugerida = ""; // Guarda la traducción que se ha encontrado para la palabra actual var tarjetaEnEdicion = false; // Indica si se ha pulsado sobre el botón de edición de la tarjeta var valorAnteriorTitulo=""; var anchoiPhone3 = 80; var anchoiPhone4 = 160; var anchoTablet = 230; var estadoServidor =false; //Guarda el estado del servidor de traduccion, si es false el servidor no esta disponible /*--- MÉTODOS ---*/ /** * RepresentarListaTarjetas. Realiza la maquetación de la lista de tarjetas que corresponden con la categoría actual. * * @param categoria categoría actual * @param favoritas indica si solamente se deben mostrar las favoritas o no */ function RepresentarListaTarjetas(categoria, favoritas){ $('#lblListaTarjetas').html(""); var texto = ""; var letra = ""; var contador = 0; var listaImagenesACargar = []; if (favoritas) $('#h1NombreCategoria').html(res_Favoritos) mostrarFavoritas = favoritas; if (activarPhoneGap){ switch(tipoDispositivo){ case "iPhone3": ancho = anchoiPhone3; break; case "iPhone4": ancho = anchoiPhone4; break; case "tablet": ancho = anchoTablet; break; } } else { ancho = anchoTablet; } var columna =1; $.each(listaTarjetas, function(i, item) { console.log("Comprobamos esta tarjeta para añadirla a la categoría ("+categoria.id+"): "+item.id+" con la categoria: "+item.categoria); if ( ( (favoritas) && (item.favorita == 1) ) || ( (!favoritas) && (item.categoria == categoria.id) ) ) { // Maquetación de la tabla que llevará cada una de las imágenes relacionadas con la tarjeta if(columna ==1){ texto+="<tr>"; } texto += "<td><div class='contenedorImg'><a href=\'javascript:;\' onClick=\'CargarTarjeta(event," + item.id + ", true)\' onTouchStart=\'(event," + item.id + ")\'><div class= 'divImgTarjeta conSombra'><img id=\'img" + item.id + "\' src=\'img/imagen_no_disponible_230.jpg\' />" + "</div></a></div></td>"; columna++; if (columna ==4){ texto+="</tr>"; columna=1; } listaImagenesACargar.push(item); contador += 1; } }); // Actualización del grid de imágenes //console.log("Este es el texto: " + texto); $('#lblListaTarjetas').html(texto); //ObtenerTarjetasPorCategoria(categoria.id); if (tarjetasPorCategoria.length <=2){ var altoPag =parseFloat($('#PaginaDetalleCategoria').height()); var altoImagen = altoPag-(altoPag*0.4); //navigator.notification.alert("El alto de la pagina es de: "+altoPag+"+px el alto de la imagen es: "+altoImagen+"px"); $('.contenedorImg img').css('max-height',altoImagen.toString()+"px"); } // Una vez que se haya cargado la lista de imágenes, hay que cargar sus rutas $.each(listaImagenesACargar, function(i, item){ CargarFoto("img" + item.id, item.foto); }); } /** * CargarFoto. Intenta cargar la foto pasada como parámetro. Si lo consigue, la redimensiona para que se muestre * correctamente en la lista de tarjetas. Si no consigue cargarla, deja la imagen que esta en el identificador correspondiente. * * @param identificador id de la imagen donde cargará la foto * @param rutaFoto ruta en el dispositivo donde se encuentra la foto * @param anchoFoto ancho en pixels de la foto original * @param altoFoto alto en pixels de la foto original */ function CargarFoto(identificador, rutaFoto){ if (activarPhoneGap) { // Solamente se comprueba si exista la fotografía en el caso de que está activado el PhoneGap. if ($.trim(rutaFoto).length > 0) { // Se comprueba que no es la imagen por defecto if (rutaFoto.indexOf('img/imagen_no_disponible') >= 0) { //console.log("La foto a cargar es la de por defecto"); } else { window.resolveLocalFileSystemURI(rutaFoto, function(fileEntry){ $("#" + identificador).attr("src", rutaFoto).on('load', function(){ /* if (anchoFoto < altoFoto){ switch(tipoDispositivo){ case "iPhone3": alto = ((altoFoto * anchoiPhone3) / anchoFoto).toFixed(0); ancho = anchoiPhone3; break; case "iPhone4": alto = ((altoFoto * anchoiPhone4) / anchoFoto).toFixed(0); ancho = anchoiPhone4; break; case "tablet": alto = ((altoFoto * anchoTablet) / anchoFoto).toFixed(0); ancho = anchoTablet; break; } // En el caso de que la altura sea mayor que el ancho, hay que desplazar la imagen para que quede centrada // en altura //$("#" + identificador).css("position", "relative").css("top", "-" + ((alto - ancho) / 2).toFixed(0).toString() + "px"); } else { switch(tipoDispositivo){ case "iPhone3": ancho = ((anchoFoto * anchoiPhone3) / altoFoto).toFixed(0); alto = anchoiPhone3; break; case "iPhone4": ancho = ((anchoFoto * anchoiPhone4) / altoFoto).toFixed(0); alto = anchoiPhone4; break; case "tablet": ancho = ((anchoFoto * anchoTablet) / altoFoto).toFixed(0); alto = anchoTablet; break; } // En el caso de que la anchura sea mayor que la altura, hay que desplazar la imagen para que quede // centrada en anchura $('#' + identificador).css("position", "relative").css("left", "-" + ((ancho - alto)/2).toFixed(0).toString() + "px"); } $('#' + identificador).attr("width", ancho); $('#' + identificador).attr("height", alto); */ //console.log("Ancho: " + anchoFoto + ", alto: " + altoFoto); }); }, function(error){ console.log("Ha fallado la carga del archivo " + rutaFoto); }); } } } } /** * NuevaTarjeta. Inserta una nueva tarjeta con los datos pasados como parámetros. * * @param categoria identificador de la categoría a la que pertenece la tarjeta * @param titulo1 título 1 de la tarjeta * @param titulo2 título 2 de la tarjeta * @param fondo nombre de la imagen del fondo de la tarjeta * @param foto nombre de la imagen principal de la tarjeta * @param sonido nombre del sonido de la tarjeta * @param ancho ancho en pixels de la foto * @param alto alto en pixels de la imagen * @param fuente tipografía asociada a la tarjeta * @param tamanioFuente tamaño en pixels de la fuente utilizada */ function NuevaTarjeta(categoria, titulo1, titulo2, fondo, foto, sonido, ancho, alto, fuente){ var maxId = 0; //console.log("llego a NuevaTarjeta"); try{ // obtención del último identificador utilizado $.each(listaTarjetas, function(i, item){ if (item.id > maxId) { maxId = item.id; } }); // Inserción de la tarjeta en la lista de tarjetas actuales (para la categoría actual) ... listaTarjetas.push({ 'id': (maxId+1), 'categoria': categoria, 'titulo1': titulo1, 'titulo2': titulo2, 'fondo': fondo, 'foto': foto, 'sonido': sonido, 'favorita': 0, 'anchoFoto': ancho, 'altoFoto': alto, 'fuente':fuente, 'idiomaA':idiomaSecundario.toLowerCase(), 'idiomaDe':idiomaPrincipal.toLowerCase() }); // ... e inserción de la tarjeta en la base de datos var sql = "insert into Tarjetas(id, categoria, titulo1, titulo2, fondo, foto, sonido, favorita, anchoFoto, altoFoto, fuente, idiomaA, idiomaDe ) values(" + (maxId+1) + "," + categoria + ",\'" + titulo1 + "\',\'" + titulo2 + "\',\'" + fondo + "\',\'" + foto + "\',\'" + sonido + "\',0," + ancho + "," + alto + ",'" + fuente + "','"+idiomaSecundario.toLowerCase()+"','"+idiomaPrincipal.toLowerCase()+"')"; console.log("El fondo es el numero: "+fondo); console.log("Creamos una nueva tarjeta, SQL: "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); //Refrescamos el array con las tarjetas de la categoria actual //console.log("la categoria es: "+categoria); //console.log("Llamamos a obtenertarjetasporcategoria"); ObtenerTarjetasPorCategoria(categoria); //console.log("Llamamos a reoresentarlistatarrjetas con el parametro categoriaActual = "+categoria); // ... y actualización de la lista de categorías RepresentarCategorias(); //Metemos el id de la categoría en un objeto debido a que RepresentarlistaTarjetas recibe un objeto y no un entero var obj = new Object(); obj.id=categoria // Actualización de la visualización de la lista de tarjetas ... RepresentarListaTarjetas(obj,false); tarjetaActual = null; } catch(e){ console.log(e); } } /** * SeleccionarTarjetaPorId. Devuelve la tarjeta cuyo identificador es pasado como parámetro. * * @param id identificador de la tarjeta * @result Tarjeta que corresponde con el identificador pasado. */ function SeleccionarTarjetaPorId(id){ var resultado = null; $.each(listaTarjetas, function(i, item){ if (item.id == id) { resultado = item; } }); return resultado; } /** * ContarTarjetasPorCategoria. Devuelve el número de tarjetas que están relacionadas con la categoría * pasada como parámetro. * * @param categoria identificador de la categoría * @result número de tarjetas relacionadas con la categoría */ function ContarTarjetasPorCategoria(categoria){ var resultado = 0; try{ $.each(listaTarjetas, function(i, item){ if (item.categoria == categoria){ resultado += 1; } }); } catch(e){ console.log(e.message); } return resultado; } /** * CargarTarjeta. Prepara todos los componentes de la página donde se muestra la tarjeta. Carga la tarjeta cuyo identificador * es pasado como parámetro. * * @param event Evento que se dispara al llamar a esta función * @param id identificador de la tarjeta * @param cambiarPagina booleano que indica si se debe cambiar a la página de 'PaginaDetalleTarjeta' */ function CargarTarjeta(event, id, cambiarPagina){ tarjetaActual = SeleccionarTarjetaPorId(id); //console.log("Foto actual: " + tarjetaActual.foto); /* switch(tipoDispositivo){ case 'iPhone3': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr("width", "250px"); break; case 'iPhone4': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '580px'); break; case 'tablet': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '700px'); break; } */ //Eliminamos las clases antiguas $('#lblTituloTarjeta').removeClass(); $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto); $('#lblTituloTarjeta').html(tarjetaActual.titulo1).addClass('fuente-' + tarjetaActual.fuente); // Fondo try{ for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); $('#PaginaDetalleTarjeta').addClass('fondo'+tarjetaActual.fondo); /* console.log("Fondo actual: " + tarjetaActual.fondo); console.log("EL alto de la imagen es: "+$('#imgGrandeTarjeta').css("height")); console.log("EL ancho de la imagen es: "+$('#imgGrandeTarjeta').css("width")); */ } catch (e){ console.log(e.message); } if (tarjetaActual.favorita == 1) { $('#btnCambiarTarjetaFavorita').addClass("ui-btn-favorito"); /* TODO actualizar la tarjeta en la base de datos */ } else { $('#btnCambiarTarjetaFavorita').removeClass("ui-btn-favorito"); } // Se carga la página con la tarjeta if (cambiarPagina){ $.mobile.changePage($('#PaginaDetalleTarjeta')); } PararEvento(event); } function ReversoTarjeta(event){ //console.log("Entra en ReversoTarjeta"); try{ //Eliminamos las clases antiguas $('#lblTituloTarjetaReverso').removeClass(); for(i=1;i<=15;i++){ $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //Añadimos las clases nuevas $('#imgGrandeTarjetaReverso').attr('src', tarjetaActual.foto); $('#lblTituloTarjetaReverso').html(tarjetaActual.titulo2).addClass('fuente-' + tarjetaActual.fuente); $('#PaginaReversoTarjeta').addClass('fondo'+tarjetaActual.fondo); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); // Se cambia a la página del reverso $.mobile.changePage($('#PaginaReversoTarjeta'), {transition: 'flip',changeHash:'false'}); ReproducirSonidoEstatico(); PararEvento(event); } catch (e){ console.log(e.message); } } /** * ActualizarTarjeta. Actualiza los datos de la tarjeta actual, con los datos pasados a la función * *@param tarjeta Datos de la tarjeta actualizada */ function ActualizarTarjeta(event, tarjeta){ var listaTemp = []; var datosAntiguos; $.each(listaTarjetas, function(i, item) { if (tarjeta.id == item.id) { datosAntiguos = item; listaTemp.push(tarjeta); } else { listaTemp.push(item); } }); listaTarjetas = listaTemp; // Actualización en la base de datos var sql = "UPDATE Tarjetas SET titulo1='" + $.trim(tarjeta.titulo1) + "', titulo2='" + $.trim(tarjeta.titulo2) + "', fondo='" + $.trim(tarjeta.fondo) + "', foto='" + $.trim(tarjeta.foto) + "', sonido='" + $.trim(tarjeta.sonido) + "', favorita=" + tarjeta.favorita + ", anchoFoto=" + tarjeta.anchoFoto + ", altoFoto=" + tarjeta.altoFoto + ", fuente='" + tarjeta.fuente + "', idiomaA='" +idiomaSecundario.toLowerCase() + "', idiomaDe='" +idiomaPrincipal.toLowerCase() + "' WHERE id=" + tarjeta.id; console.log("Actualizamos una tarjeta--> "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // Eliminación de las clases de los elementos antiguos (fondo, fuente, tamaño de la fuente) for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); $('#lblTituloTarjeta').removeClass(); $('#lblTituloTarjetaReverso').removeClass(); // Actualizar los datos de la tarjeta actual, con los nuevos datos de la tarjeta actualizada CargarTarjeta(event, tarjeta.id, false); } /** * ComprobarEliminarTarjeta. Comprueba si el usuario ha seleccionado la eliminación de la tarjeta actual. */ function ComprobarEliminarTarjeta(event){ if (parseInt(event) == 1){ EliminaTarjetaActual(event) } } /** * EliminaTarjetaActual. Realiza la eliminación de la tarjeta actualmente seleccionada, tanto de la lista de tarjetas como de la base de datos. */ function EliminaTarjetaActual(event){ var listaTemp = []; try{ // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.id != tarjetaActual.id) {! listaTemp.push(item); } }); listaTarjetas = listaTemp; // Eliminación de la tarjeta de la BD var sql = "delete from Tarjetas where id=" + tarjetaActual.id; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // ... actualización de la lista de tarjetas ... RepresentarListaTarjetas(categoriaActual); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual history.back(); } catch (e){ console.log("Error en EliminarTarjetaActual: " + e.message); } } function EliminarTarjetasPorCategoria(categoria){ var listaTemp = []; // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.categoria != categoria) { listaTemp.push(item); } }); listaTarjetas = listaTemp; var sql = "delete from Tarjetas where categoria=" + categoria; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual $.mobile.changePage($('#PaginaCategorias')); } /** * EliminarListaTarjetas. Vacía completamente la lista de las tarjetas. */ function EliminarListaTarjetas(){ listaTarjetas = []; } function LimpiarTraduccion(){ $('#pnlResultadoTraduccion').removeClass("in"); } /* * Obtiene un Token de acceso al servidor de Microsoft Translate a través del servicio web */ function getAccessToken(){ //Editado--> Pedro var urlObtenerAccesToken = 'http://www.bubblewords.info/WSTraducciones/GetAccessToken.asmx/getToken'; $.ajax({ url: urlObtenerAccesToken + '?callback=?', type: "GET", dataType: 'jsonp', success: function(data){ console.log("AccessToken Recibido"); accessToken=data[0]; estadoServidor=true; //navigator.notification.confirm("Hemos obtenido el token de acceso: "+accessToken) if (intervaloSinConexion){ clearInterval(intervaloSinConexion); intervaloSinConexion=undefined; console.warn("El servidor esta disponible, cambiamos o establecemos el intervalo a 9 minutos"); intervaloNormal = setInterval(getAccessToken, 9 * 60 * 1000); } hayConexion=true; }, timeout:5000, error: function(x, t, m) { console.log("AccessToken No recibido"); if (hayConexion ==true){ /* *En caso de que se tenga conexion de red, pero no sea accesible el servicio web que nos devuelve el token de acceso *solicitamos un token de acceso cada 30 segundos, hasta que el servidor responda, en cuyo caso se para el intervalo *sin conexión y comienza el intervalo normal de 9 minutos */ if (intervaloNormal){ clearInterval(intervaloNormal); intervaloNormal=undefined; console.warn("El servidor no esta disponible, cambiamos el intervalo a 30 segundos"); intervaloSinConexion = setInterval(getAccessToken, 30 * 1000); } if(t==="timeout") { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } else { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible+" Er
ne la traducción de un texto proporcionandole un idioma de origen y destino */ function TraduccionSugerida(event){ console.log("Hay conexion = "+hayConexion); console.log("Se ha traducido "+numTraducciones+" veces"); if (valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && (!liteVersion || (liteVersion && numTraducciones < 5))){ var texto=$('#inputTituloTarjeta').attr('value'); var destino =$('#lstIdiomaSecundario').attr('value'); var origen = $('#lstIdiomaPrincipal').attr('value'); if(origen != destino){ console.log("traduccion> de: "+origen+" destino "+destino+" palabra "+texto); var p = new Object; p.text = texto; p.from = origen; p.to = destino; p.oncomplete = 'ajaxTranslateCallback'; p.appId = "Bearer " + accessToken; var requestStr = "http://api.microsofttranslator.com/V2/Ajax.svc/Translate"; //navigator.notification.confirm("Solicitamos la traduccion: texto="+p.text+" origen="+p.from+" destino="+p.to+" token="+p.appId); $.ajax({ url: requestStr, type: "GET", data: p, dataType: 'jsonp', cache: true, }); } valorAnteriorTitulo=texto; PararEvento(event); }else if(valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && numTraducciones >=maxTraducciones && alertMostrado==false){ console.log("entramos por el mensaje de limitacion de trad."); $('#inputTitulo2Tarjeta').focus(); mensajeActualizar(res_lite_traducciones); alertMostrado=true; }else{ console.log("entramos por el ultimo else"); /* * En caso de que no haya conexion o que la palabra sea la misma que la anterior, * establecemos el foco en el segundo titulo. */ $('#inputTitulo2Tarjeta').focus(); } } /* * Callback que controla la respuesta del servidor de Microsoft Translator */ function ajaxTranslateCallback(response) { if (response.length > 0) { traduccionSugerida = response; //navigator.notification.confirm("La traduccion se ha recibido con exito: "+traduccionSugerida); $('#lblTraduccionObtenida').html(response.toString()); $('#pnlResultadoTraduccion').addClass("in").css('zIndex', 300); $('.tooltip-inner').textfill({maxFontPixels: 200, minFontPixels:4}); if (liteVersion){ numTraducciones++; } } } /** * AplicarTraduccion. Se ha pulsado sobre la opción de aplicar la traducción sugerida. */ function AplicarTraduccion(event){ $('#inputTitulo2Tarjeta').attr('value', traduccionSugerida); $('#pnlResultadoTraduccion').css('zIndex', -200); LimpiarTraduccion(); PararEvento(event); } /** * SeleccionarFondoTarjeta. Se ha seleccionado un fondo para la tarjeta que se desea crear. Cambia * la imagen del formulario 'Nueva tarjeta' y guarda el fondo actualmente seleccionado. * * @param numero número del fondo elegido * @param tipoDispositivo indica el tipo de dispositivo */ function SeleccionarFondoTarjeta(event, numero, tipoDispositivo, debeVolver){ //console.log("Fondo seleccionado: " + numero.toString() + ", tipo dispositivo: " + tipoDispositivo); try{ fondoActual = numero; $('#imgFondoTarjeta').attr('src', 'img/texturas/muestras/textura' + numero + '.jpg') // Mostrar y ocultar capas $('#pnlMostrarImagenFondo').addClass("in").show(); //console.log("SeleccionarFontoTarjeta. DebeVolver: " + debeVolver); if (debeVolver){ Volver(event); } } catch(e){ console.log(e.message); } } /** * MostrarImagenDeGaleria. Se quiere mostrar la imagen que ha seleccionado el usuario desde la galería. */ function MostrarImagenDeGaleria(imageData, tipoDispositivo){ var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; //console.log("MostrarImagenDeGaleria. Ancho: " + anchoFoto + ", alto: " + altoFoto); // Ocultar y mostrar capas $('#pnlMostrarTextoFotoGaleria').removeClass("in").hide(); $("#imgPrincipalTarjetaGaleria").attr("src", imageData); $('#pnlMostrarImagenGaleria').addClass("in").show(); $('#pnlMostrarImagenCamara').removeClass("in").hide(); $('#pnlMostrarTextoFotoCamara').addClass("in").show(); // Quitar cualquier foto de la cámara anterior $('#imgPrincipalTarjetaCamara').attr('src',''); } /** * MostrarImagenDeCamara. Se quiere mostrar la imagen que se ha tomado desde la cámara. */ function MostrarImagenDeCamara(imageData, tipoDispositivo){ console.log("Establecemos la imagen: "+imageData); var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; // Ocultar y mostrar capas $('#pnlMostrarTextoFotoCamara').removeClass("in").hide(); $("#imgPrincipalTarjetaCamara").attr("src", imageData); $('#pnlMostrarImagenCamara').addClass("in").show(); $('#pnlMostrarImagenGaleria').removeClass("in").hide(); $('#pnlMostrarTextoFotoGaleria').addClass("in").show(); // Quitar cualquier foto anterior de la galería $('#imgPrincipalTarjetaGaleria').attr('src',''); }
ror: "+t,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } } } }); } /* * Obtie
conditional_block
tarjetas.js
/*--- PROPIEDADES ---*/ var listaTarjetas = []; var tarjetaActual = null; var existeFoto = false; var existeSonido = false; var fondoActual = 1; // Esta variable contiene el id del fondo que está actualmente mostrándose en el formulario de nueva tarjeta var mostrarFavoritas = false; // Indica si se deben mostrar solamente las tarjetas favoritas o no var traduccionSugerida = ""; // Guarda la traducción que se ha encontrado para la palabra actual var tarjetaEnEdicion = false; // Indica si se ha pulsado sobre el botón de edición de la tarjeta var valorAnteriorTitulo=""; var anchoiPhone3 = 80; var anchoiPhone4 = 160; var anchoTablet = 230; var estadoServidor =false; //Guarda el estado del servidor de traduccion, si es false el servidor no esta disponible /*--- MÉTODOS ---*/ /** * RepresentarListaTarjetas. Realiza la maquetación de la lista de tarjetas que corresponden con la categoría actual. * * @param categoria categoría actual * @param favoritas indica si solamente se deben mostrar las favoritas o no */ function RepresentarListaTarjetas(categoria, favoritas){ $('#lb
to. Intenta cargar la foto pasada como parámetro. Si lo consigue, la redimensiona para que se muestre * correctamente en la lista de tarjetas. Si no consigue cargarla, deja la imagen que esta en el identificador correspondiente. * * @param identificador id de la imagen donde cargará la foto * @param rutaFoto ruta en el dispositivo donde se encuentra la foto * @param anchoFoto ancho en pixels de la foto original * @param altoFoto alto en pixels de la foto original */ function CargarFoto(identificador, rutaFoto){ if (activarPhoneGap) { // Solamente se comprueba si exista la fotografía en el caso de que está activado el PhoneGap. if ($.trim(rutaFoto).length > 0) { // Se comprueba que no es la imagen por defecto if (rutaFoto.indexOf('img/imagen_no_disponible') >= 0) { //console.log("La foto a cargar es la de por defecto"); } else { window.resolveLocalFileSystemURI(rutaFoto, function(fileEntry){ $("#" + identificador).attr("src", rutaFoto).on('load', function(){ /* if (anchoFoto < altoFoto){ switch(tipoDispositivo){ case "iPhone3": alto = ((altoFoto * anchoiPhone3) / anchoFoto).toFixed(0); ancho = anchoiPhone3; break; case "iPhone4": alto = ((altoFoto * anchoiPhone4) / anchoFoto).toFixed(0); ancho = anchoiPhone4; break; case "tablet": alto = ((altoFoto * anchoTablet) / anchoFoto).toFixed(0); ancho = anchoTablet; break; } // En el caso de que la altura sea mayor que el ancho, hay que desplazar la imagen para que quede centrada // en altura //$("#" + identificador).css("position", "relative").css("top", "-" + ((alto - ancho) / 2).toFixed(0).toString() + "px"); } else { switch(tipoDispositivo){ case "iPhone3": ancho = ((anchoFoto * anchoiPhone3) / altoFoto).toFixed(0); alto = anchoiPhone3; break; case "iPhone4": ancho = ((anchoFoto * anchoiPhone4) / altoFoto).toFixed(0); alto = anchoiPhone4; break; case "tablet": ancho = ((anchoFoto * anchoTablet) / altoFoto).toFixed(0); alto = anchoTablet; break; } // En el caso de que la anchura sea mayor que la altura, hay que desplazar la imagen para que quede // centrada en anchura $('#' + identificador).css("position", "relative").css("left", "-" + ((ancho - alto)/2).toFixed(0).toString() + "px"); } $('#' + identificador).attr("width", ancho); $('#' + identificador).attr("height", alto); */ //console.log("Ancho: " + anchoFoto + ", alto: " + altoFoto); }); }, function(error){ console.log("Ha fallado la carga del archivo " + rutaFoto); }); } } } } /** * NuevaTarjeta. Inserta una nueva tarjeta con los datos pasados como parámetros. * * @param categoria identificador de la categoría a la que pertenece la tarjeta * @param titulo1 título 1 de la tarjeta * @param titulo2 título 2 de la tarjeta * @param fondo nombre de la imagen del fondo de la tarjeta * @param foto nombre de la imagen principal de la tarjeta * @param sonido nombre del sonido de la tarjeta * @param ancho ancho en pixels de la foto * @param alto alto en pixels de la imagen * @param fuente tipografía asociada a la tarjeta * @param tamanioFuente tamaño en pixels de la fuente utilizada */ function NuevaTarjeta(categoria, titulo1, titulo2, fondo, foto, sonido, ancho, alto, fuente){ var maxId = 0; //console.log("llego a NuevaTarjeta"); try{ // obtención del último identificador utilizado $.each(listaTarjetas, function(i, item){ if (item.id > maxId) { maxId = item.id; } }); // Inserción de la tarjeta en la lista de tarjetas actuales (para la categoría actual) ... listaTarjetas.push({ 'id': (maxId+1), 'categoria': categoria, 'titulo1': titulo1, 'titulo2': titulo2, 'fondo': fondo, 'foto': foto, 'sonido': sonido, 'favorita': 0, 'anchoFoto': ancho, 'altoFoto': alto, 'fuente':fuente, 'idiomaA':idiomaSecundario.toLowerCase(), 'idiomaDe':idiomaPrincipal.toLowerCase() }); // ... e inserción de la tarjeta en la base de datos var sql = "insert into Tarjetas(id, categoria, titulo1, titulo2, fondo, foto, sonido, favorita, anchoFoto, altoFoto, fuente, idiomaA, idiomaDe ) values(" + (maxId+1) + "," + categoria + ",\'" + titulo1 + "\',\'" + titulo2 + "\',\'" + fondo + "\',\'" + foto + "\',\'" + sonido + "\',0," + ancho + "," + alto + ",'" + fuente + "','"+idiomaSecundario.toLowerCase()+"','"+idiomaPrincipal.toLowerCase()+"')"; console.log("El fondo es el numero: "+fondo); console.log("Creamos una nueva tarjeta, SQL: "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); //Refrescamos el array con las tarjetas de la categoria actual //console.log("la categoria es: "+categoria); //console.log("Llamamos a obtenertarjetasporcategoria"); ObtenerTarjetasPorCategoria(categoria); //console.log("Llamamos a reoresentarlistatarrjetas con el parametro categoriaActual = "+categoria); // ... y actualización de la lista de categorías RepresentarCategorias(); //Metemos el id de la categoría en un objeto debido a que RepresentarlistaTarjetas recibe un objeto y no un entero var obj = new Object(); obj.id=categoria // Actualización de la visualización de la lista de tarjetas ... RepresentarListaTarjetas(obj,false); tarjetaActual = null; } catch(e){ console.log(e); } } /** * SeleccionarTarjetaPorId. Devuelve la tarjeta cuyo identificador es pasado como parámetro. * * @param id identificador de la tarjeta * @result Tarjeta que corresponde con el identificador pasado. */ function SeleccionarTarjetaPorId(id){ var resultado = null; $.each(listaTarjetas, function(i, item){ if (item.id == id) { resultado = item; } }); return resultado; } /** * ContarTarjetasPorCategoria. Devuelve el número de tarjetas que están relacionadas con la categoría * pasada como parámetro. * * @param categoria identificador de la categoría * @result número de tarjetas relacionadas con la categoría */ function ContarTarjetasPorCategoria(categoria){ var resultado = 0; try{ $.each(listaTarjetas, function(i, item){ if (item.categoria == categoria){ resultado += 1; } }); } catch(e){ console.log(e.message); } return resultado; } /** * CargarTarjeta. Prepara todos los componentes de la página donde se muestra la tarjeta. Carga la tarjeta cuyo identificador * es pasado como parámetro. * * @param event Evento que se dispara al llamar a esta función * @param id identificador de la tarjeta * @param cambiarPagina booleano que indica si se debe cambiar a la página de 'PaginaDetalleTarjeta' */ function CargarTarjeta(event, id, cambiarPagina){ tarjetaActual = SeleccionarTarjetaPorId(id); //console.log("Foto actual: " + tarjetaActual.foto); /* switch(tipoDispositivo){ case 'iPhone3': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr("width", "250px"); break; case 'iPhone4': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '580px'); break; case 'tablet': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '700px'); break; } */ //Eliminamos las clases antiguas $('#lblTituloTarjeta').removeClass(); $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto); $('#lblTituloTarjeta').html(tarjetaActual.titulo1).addClass('fuente-' + tarjetaActual.fuente); // Fondo try{ for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); $('#PaginaDetalleTarjeta').addClass('fondo'+tarjetaActual.fondo); /* console.log("Fondo actual: " + tarjetaActual.fondo); console.log("EL alto de la imagen es: "+$('#imgGrandeTarjeta').css("height")); console.log("EL ancho de la imagen es: "+$('#imgGrandeTarjeta').css("width")); */ } catch (e){ console.log(e.message); } if (tarjetaActual.favorita == 1) { $('#btnCambiarTarjetaFavorita').addClass("ui-btn-favorito"); /* TODO actualizar la tarjeta en la base de datos */ } else { $('#btnCambiarTarjetaFavorita').removeClass("ui-btn-favorito"); } // Se carga la página con la tarjeta if (cambiarPagina){ $.mobile.changePage($('#PaginaDetalleTarjeta')); } PararEvento(event); } function ReversoTarjeta(event){ //console.log("Entra en ReversoTarjeta"); try{ //Eliminamos las clases antiguas $('#lblTituloTarjetaReverso').removeClass(); for(i=1;i<=15;i++){ $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //Añadimos las clases nuevas $('#imgGrandeTarjetaReverso').attr('src', tarjetaActual.foto); $('#lblTituloTarjetaReverso').html(tarjetaActual.titulo2).addClass('fuente-' + tarjetaActual.fuente); $('#PaginaReversoTarjeta').addClass('fondo'+tarjetaActual.fondo); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); // Se cambia a la página del reverso $.mobile.changePage($('#PaginaReversoTarjeta'), {transition: 'flip',changeHash:'false'}); ReproducirSonidoEstatico(); PararEvento(event); } catch (e){ console.log(e.message); } } /** * ActualizarTarjeta. Actualiza los datos de la tarjeta actual, con los datos pasados a la función * *@param tarjeta Datos de la tarjeta actualizada */ function ActualizarTarjeta(event, tarjeta){ var listaTemp = []; var datosAntiguos; $.each(listaTarjetas, function(i, item) { if (tarjeta.id == item.id) { datosAntiguos = item; listaTemp.push(tarjeta); } else { listaTemp.push(item); } }); listaTarjetas = listaTemp; // Actualización en la base de datos var sql = "UPDATE Tarjetas SET titulo1='" + $.trim(tarjeta.titulo1) + "', titulo2='" + $.trim(tarjeta.titulo2) + "', fondo='" + $.trim(tarjeta.fondo) + "', foto='" + $.trim(tarjeta.foto) + "', sonido='" + $.trim(tarjeta.sonido) + "', favorita=" + tarjeta.favorita + ", anchoFoto=" + tarjeta.anchoFoto + ", altoFoto=" + tarjeta.altoFoto + ", fuente='" + tarjeta.fuente + "', idiomaA='" +idiomaSecundario.toLowerCase() + "', idiomaDe='" +idiomaPrincipal.toLowerCase() + "' WHERE id=" + tarjeta.id; console.log("Actualizamos una tarjeta--> "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // Eliminación de las clases de los elementos antiguos (fondo, fuente, tamaño de la fuente) for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); $('#lblTituloTarjeta').removeClass(); $('#lblTituloTarjetaReverso').removeClass(); // Actualizar los datos de la tarjeta actual, con los nuevos datos de la tarjeta actualizada CargarTarjeta(event, tarjeta.id, false); } /** * ComprobarEliminarTarjeta. Comprueba si el usuario ha seleccionado la eliminación de la tarjeta actual. */ function ComprobarEliminarTarjeta(event){ if (parseInt(event) == 1){ EliminaTarjetaActual(event) } } /** * EliminaTarjetaActual. Realiza la eliminación de la tarjeta actualmente seleccionada, tanto de la lista de tarjetas como de la base de datos. */ function EliminaTarjetaActual(event){ var listaTemp = []; try{ // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.id != tarjetaActual.id) {! listaTemp.push(item); } }); listaTarjetas = listaTemp; // Eliminación de la tarjeta de la BD var sql = "delete from Tarjetas where id=" + tarjetaActual.id; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // ... actualización de la lista de tarjetas ... RepresentarListaTarjetas(categoriaActual); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual history.back(); } catch (e){ console.log("Error en EliminarTarjetaActual: " + e.message); } } function EliminarTarjetasPorCategoria(categoria){ var listaTemp = []; // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.categoria != categoria) { listaTemp.push(item); } }); listaTarjetas = listaTemp; var sql = "delete from Tarjetas where categoria=" + categoria; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual $.mobile.changePage($('#PaginaCategorias')); } /** * EliminarListaTarjetas. Vacía completamente la lista de las tarjetas. */ function EliminarListaTarjetas(){ listaTarjetas = []; } function LimpiarTraduccion(){ $('#pnlResultadoTraduccion').removeClass("in"); } /* * Obtiene un Token de acceso al servidor de Microsoft Translate a través del servicio web */ function getAccessToken(){ //Editado--> Pedro var urlObtenerAccesToken = 'http://www.bubblewords.info/WSTraducciones/GetAccessToken.asmx/getToken'; $.ajax({ url: urlObtenerAccesToken + '?callback=?', type: "GET", dataType: 'jsonp', success: function(data){ console.log("AccessToken Recibido"); accessToken=data[0]; estadoServidor=true; //navigator.notification.confirm("Hemos obtenido el token de acceso: "+accessToken) if (intervaloSinConexion){ clearInterval(intervaloSinConexion); intervaloSinConexion=undefined; console.warn("El servidor esta disponible, cambiamos o establecemos el intervalo a 9 minutos"); intervaloNormal = setInterval(getAccessToken, 9 * 60 * 1000); } hayConexion=true; }, timeout:5000, error: function(x, t, m) { console.log("AccessToken No recibido"); if (hayConexion ==true){ /* *En caso de que se tenga conexion de red, pero no sea accesible el servicio web que nos devuelve el token de acceso *solicitamos un token de acceso cada 30 segundos, hasta que el servidor responda, en cuyo caso se para el intervalo *sin conexión y comienza el intervalo normal de 9 minutos */ if (intervaloNormal){ clearInterval(intervaloNormal); intervaloNormal=undefined; console.warn("El servidor no esta disponible, cambiamos el intervalo a 30 segundos"); intervaloSinConexion = setInterval(getAccessToken, 30 * 1000); } if(t==="timeout") { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } else { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible+" Error: "+t,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } } } }); } /* * Obtiene la traducción de un texto proporcionandole un idioma de origen y destino */ function TraduccionSugerida(event){ console.log("Hay conexion = "+hayConexion); console.log("Se ha traducido "+numTraducciones+" veces"); if (valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && (!liteVersion || (liteVersion && numTraducciones < 5))){ var texto=$('#inputTituloTarjeta').attr('value'); var destino =$('#lstIdiomaSecundario').attr('value'); var origen = $('#lstIdiomaPrincipal').attr('value'); if(origen != destino){ console.log("traduccion> de: "+origen+" destino "+destino+" palabra "+texto); var p = new Object; p.text = texto; p.from = origen; p.to = destino; p.oncomplete = 'ajaxTranslateCallback'; p.appId = "Bearer " + accessToken; var requestStr = "http://api.microsofttranslator.com/V2/Ajax.svc/Translate"; //navigator.notification.confirm("Solicitamos la traduccion: texto="+p.text+" origen="+p.from+" destino="+p.to+" token="+p.appId); $.ajax({ url: requestStr, type: "GET", data: p, dataType: 'jsonp', cache: true, }); } valorAnteriorTitulo=texto; PararEvento(event); }else if(valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && numTraducciones >=maxTraducciones && alertMostrado==false){ console.log("entramos por el mensaje de limitacion de trad."); $('#inputTitulo2Tarjeta').focus(); mensajeActualizar(res_lite_traducciones); alertMostrado=true; }else{ console.log("entramos por el ultimo else"); /* * En caso de que no haya conexion o que la palabra sea la misma que la anterior, * establecemos el foco en el segundo titulo. */ $('#inputTitulo2Tarjeta').focus(); } } /* * Callback que controla la respuesta del servidor de Microsoft Translator */ function ajaxTranslateCallback(response) { if (response.length > 0) { traduccionSugerida = response; //navigator.notification.confirm("La traduccion se ha recibido con exito: "+traduccionSugerida); $('#lblTraduccionObtenida').html(response.toString()); $('#pnlResultadoTraduccion').addClass("in").css('zIndex', 300); $('.tooltip-inner').textfill({maxFontPixels: 200, minFontPixels:4}); if (liteVersion){ numTraducciones++; } } } /** * AplicarTraduccion. Se ha pulsado sobre la opción de aplicar la traducción sugerida. */ function AplicarTraduccion(event){ $('#inputTitulo2Tarjeta').attr('value', traduccionSugerida); $('#pnlResultadoTraduccion').css('zIndex', -200); LimpiarTraduccion(); PararEvento(event); } /** * SeleccionarFondoTarjeta. Se ha seleccionado un fondo para la tarjeta que se desea crear. Cambia * la imagen del formulario 'Nueva tarjeta' y guarda el fondo actualmente seleccionado. * * @param numero número del fondo elegido * @param tipoDispositivo indica el tipo de dispositivo */ function SeleccionarFondoTarjeta(event, numero, tipoDispositivo, debeVolver){ //console.log("Fondo seleccionado: " + numero.toString() + ", tipo dispositivo: " + tipoDispositivo); try{ fondoActual = numero; $('#imgFondoTarjeta').attr('src', 'img/texturas/muestras/textura' + numero + '.jpg') // Mostrar y ocultar capas $('#pnlMostrarImagenFondo').addClass("in").show(); //console.log("SeleccionarFontoTarjeta. DebeVolver: " + debeVolver); if (debeVolver){ Volver(event); } } catch(e){ console.log(e.message); } } /** * MostrarImagenDeGaleria. Se quiere mostrar la imagen que ha seleccionado el usuario desde la galería. */ function MostrarImagenDeGaleria(imageData, tipoDispositivo){ var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; //console.log("MostrarImagenDeGaleria. Ancho: " + anchoFoto + ", alto: " + altoFoto); // Ocultar y mostrar capas $('#pnlMostrarTextoFotoGaleria').removeClass("in").hide(); $("#imgPrincipalTarjetaGaleria").attr("src", imageData); $('#pnlMostrarImagenGaleria').addClass("in").show(); $('#pnlMostrarImagenCamara').removeClass("in").hide(); $('#pnlMostrarTextoFotoCamara').addClass("in").show(); // Quitar cualquier foto de la cámara anterior $('#imgPrincipalTarjetaCamara').attr('src',''); } /** * MostrarImagenDeCamara. Se quiere mostrar la imagen que se ha tomado desde la cámara. */ function MostrarImagenDeCamara(imageData, tipoDispositivo){ console.log("Establecemos la imagen: "+imageData); var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; // Ocultar y mostrar capas $('#pnlMostrarTextoFotoCamara').removeClass("in").hide(); $("#imgPrincipalTarjetaCamara").attr("src", imageData); $('#pnlMostrarImagenCamara').addClass("in").show(); $('#pnlMostrarImagenGaleria').removeClass("in").hide(); $('#pnlMostrarTextoFotoGaleria').addClass("in").show(); // Quitar cualquier foto anterior de la galería $('#imgPrincipalTarjetaGaleria').attr('src',''); }
lListaTarjetas').html(""); var texto = ""; var letra = ""; var contador = 0; var listaImagenesACargar = []; if (favoritas) $('#h1NombreCategoria').html(res_Favoritos) mostrarFavoritas = favoritas; if (activarPhoneGap){ switch(tipoDispositivo){ case "iPhone3": ancho = anchoiPhone3; break; case "iPhone4": ancho = anchoiPhone4; break; case "tablet": ancho = anchoTablet; break; } } else { ancho = anchoTablet; } var columna =1; $.each(listaTarjetas, function(i, item) { console.log("Comprobamos esta tarjeta para añadirla a la categoría ("+categoria.id+"): "+item.id+" con la categoria: "+item.categoria); if ( ( (favoritas) && (item.favorita == 1) ) || ( (!favoritas) && (item.categoria == categoria.id) ) ) { // Maquetación de la tabla que llevará cada una de las imágenes relacionadas con la tarjeta if(columna ==1){ texto+="<tr>"; } texto += "<td><div class='contenedorImg'><a href=\'javascript:;\' onClick=\'CargarTarjeta(event," + item.id + ", true)\' onTouchStart=\'(event," + item.id + ")\'><div class= 'divImgTarjeta conSombra'><img id=\'img" + item.id + "\' src=\'img/imagen_no_disponible_230.jpg\' />" + "</div></a></div></td>"; columna++; if (columna ==4){ texto+="</tr>"; columna=1; } listaImagenesACargar.push(item); contador += 1; } }); // Actualización del grid de imágenes //console.log("Este es el texto: " + texto); $('#lblListaTarjetas').html(texto); //ObtenerTarjetasPorCategoria(categoria.id); if (tarjetasPorCategoria.length <=2){ var altoPag =parseFloat($('#PaginaDetalleCategoria').height()); var altoImagen = altoPag-(altoPag*0.4); //navigator.notification.alert("El alto de la pagina es de: "+altoPag+"+px el alto de la imagen es: "+altoImagen+"px"); $('.contenedorImg img').css('max-height',altoImagen.toString()+"px"); } // Una vez que se haya cargado la lista de imágenes, hay que cargar sus rutas $.each(listaImagenesACargar, function(i, item){ CargarFoto("img" + item.id, item.foto); }); } /** * CargarFo
identifier_body
tarjetas.js
/*--- PROPIEDADES ---*/ var listaTarjetas = []; var tarjetaActual = null; var existeFoto = false; var existeSonido = false; var fondoActual = 1; // Esta variable contiene el id del fondo que está actualmente mostrándose en el formulario de nueva tarjeta var mostrarFavoritas = false; // Indica si se deben mostrar solamente las tarjetas favoritas o no var traduccionSugerida = ""; // Guarda la traducción que se ha encontrado para la palabra actual var tarjetaEnEdicion = false; // Indica si se ha pulsado sobre el botón de edición de la tarjeta var valorAnteriorTitulo=""; var anchoiPhone3 = 80; var anchoiPhone4 = 160; var anchoTablet = 230; var estadoServidor =false; //Guarda el estado del servidor de traduccion, si es false el servidor no esta disponible /*--- MÉTODOS ---*/ /** * RepresentarListaTarjetas. Realiza la maquetación de la lista de tarjetas que corresponden con la categoría actual. * * @param categoria categoría actual * @param favoritas indica si solamente se deben mostrar las favoritas o no */ function RepresentarListaTarjetas(categoria, favoritas){ $('#lblListaTarjetas').html(""); var texto = ""; var letra = ""; var contador = 0; var listaImagenesACargar = []; if (favoritas) $('#h1NombreCategoria').html(res_Favoritos) mostrarFavoritas = favoritas; if (activarPhoneGap){ switch(tipoDispositivo){ case "iPhone3": ancho = anchoiPhone3; break; case "iPhone4": ancho = anchoiPhone4; break;
} } else { ancho = anchoTablet; } var columna =1; $.each(listaTarjetas, function(i, item) { console.log("Comprobamos esta tarjeta para añadirla a la categoría ("+categoria.id+"): "+item.id+" con la categoria: "+item.categoria); if ( ( (favoritas) && (item.favorita == 1) ) || ( (!favoritas) && (item.categoria == categoria.id) ) ) { // Maquetación de la tabla que llevará cada una de las imágenes relacionadas con la tarjeta if(columna ==1){ texto+="<tr>"; } texto += "<td><div class='contenedorImg'><a href=\'javascript:;\' onClick=\'CargarTarjeta(event," + item.id + ", true)\' onTouchStart=\'(event," + item.id + ")\'><div class= 'divImgTarjeta conSombra'><img id=\'img" + item.id + "\' src=\'img/imagen_no_disponible_230.jpg\' />" + "</div></a></div></td>"; columna++; if (columna ==4){ texto+="</tr>"; columna=1; } listaImagenesACargar.push(item); contador += 1; } }); // Actualización del grid de imágenes //console.log("Este es el texto: " + texto); $('#lblListaTarjetas').html(texto); //ObtenerTarjetasPorCategoria(categoria.id); if (tarjetasPorCategoria.length <=2){ var altoPag =parseFloat($('#PaginaDetalleCategoria').height()); var altoImagen = altoPag-(altoPag*0.4); //navigator.notification.alert("El alto de la pagina es de: "+altoPag+"+px el alto de la imagen es: "+altoImagen+"px"); $('.contenedorImg img').css('max-height',altoImagen.toString()+"px"); } // Una vez que se haya cargado la lista de imágenes, hay que cargar sus rutas $.each(listaImagenesACargar, function(i, item){ CargarFoto("img" + item.id, item.foto); }); } /** * CargarFoto. Intenta cargar la foto pasada como parámetro. Si lo consigue, la redimensiona para que se muestre * correctamente en la lista de tarjetas. Si no consigue cargarla, deja la imagen que esta en el identificador correspondiente. * * @param identificador id de la imagen donde cargará la foto * @param rutaFoto ruta en el dispositivo donde se encuentra la foto * @param anchoFoto ancho en pixels de la foto original * @param altoFoto alto en pixels de la foto original */ function CargarFoto(identificador, rutaFoto){ if (activarPhoneGap) { // Solamente se comprueba si exista la fotografía en el caso de que está activado el PhoneGap. if ($.trim(rutaFoto).length > 0) { // Se comprueba que no es la imagen por defecto if (rutaFoto.indexOf('img/imagen_no_disponible') >= 0) { //console.log("La foto a cargar es la de por defecto"); } else { window.resolveLocalFileSystemURI(rutaFoto, function(fileEntry){ $("#" + identificador).attr("src", rutaFoto).on('load', function(){ /* if (anchoFoto < altoFoto){ switch(tipoDispositivo){ case "iPhone3": alto = ((altoFoto * anchoiPhone3) / anchoFoto).toFixed(0); ancho = anchoiPhone3; break; case "iPhone4": alto = ((altoFoto * anchoiPhone4) / anchoFoto).toFixed(0); ancho = anchoiPhone4; break; case "tablet": alto = ((altoFoto * anchoTablet) / anchoFoto).toFixed(0); ancho = anchoTablet; break; } // En el caso de que la altura sea mayor que el ancho, hay que desplazar la imagen para que quede centrada // en altura //$("#" + identificador).css("position", "relative").css("top", "-" + ((alto - ancho) / 2).toFixed(0).toString() + "px"); } else { switch(tipoDispositivo){ case "iPhone3": ancho = ((anchoFoto * anchoiPhone3) / altoFoto).toFixed(0); alto = anchoiPhone3; break; case "iPhone4": ancho = ((anchoFoto * anchoiPhone4) / altoFoto).toFixed(0); alto = anchoiPhone4; break; case "tablet": ancho = ((anchoFoto * anchoTablet) / altoFoto).toFixed(0); alto = anchoTablet; break; } // En el caso de que la anchura sea mayor que la altura, hay que desplazar la imagen para que quede // centrada en anchura $('#' + identificador).css("position", "relative").css("left", "-" + ((ancho - alto)/2).toFixed(0).toString() + "px"); } $('#' + identificador).attr("width", ancho); $('#' + identificador).attr("height", alto); */ //console.log("Ancho: " + anchoFoto + ", alto: " + altoFoto); }); }, function(error){ console.log("Ha fallado la carga del archivo " + rutaFoto); }); } } } } /** * NuevaTarjeta. Inserta una nueva tarjeta con los datos pasados como parámetros. * * @param categoria identificador de la categoría a la que pertenece la tarjeta * @param titulo1 título 1 de la tarjeta * @param titulo2 título 2 de la tarjeta * @param fondo nombre de la imagen del fondo de la tarjeta * @param foto nombre de la imagen principal de la tarjeta * @param sonido nombre del sonido de la tarjeta * @param ancho ancho en pixels de la foto * @param alto alto en pixels de la imagen * @param fuente tipografía asociada a la tarjeta * @param tamanioFuente tamaño en pixels de la fuente utilizada */ function NuevaTarjeta(categoria, titulo1, titulo2, fondo, foto, sonido, ancho, alto, fuente){ var maxId = 0; //console.log("llego a NuevaTarjeta"); try{ // obtención del último identificador utilizado $.each(listaTarjetas, function(i, item){ if (item.id > maxId) { maxId = item.id; } }); // Inserción de la tarjeta en la lista de tarjetas actuales (para la categoría actual) ... listaTarjetas.push({ 'id': (maxId+1), 'categoria': categoria, 'titulo1': titulo1, 'titulo2': titulo2, 'fondo': fondo, 'foto': foto, 'sonido': sonido, 'favorita': 0, 'anchoFoto': ancho, 'altoFoto': alto, 'fuente':fuente, 'idiomaA':idiomaSecundario.toLowerCase(), 'idiomaDe':idiomaPrincipal.toLowerCase() }); // ... e inserción de la tarjeta en la base de datos var sql = "insert into Tarjetas(id, categoria, titulo1, titulo2, fondo, foto, sonido, favorita, anchoFoto, altoFoto, fuente, idiomaA, idiomaDe ) values(" + (maxId+1) + "," + categoria + ",\'" + titulo1 + "\',\'" + titulo2 + "\',\'" + fondo + "\',\'" + foto + "\',\'" + sonido + "\',0," + ancho + "," + alto + ",'" + fuente + "','"+idiomaSecundario.toLowerCase()+"','"+idiomaPrincipal.toLowerCase()+"')"; console.log("El fondo es el numero: "+fondo); console.log("Creamos una nueva tarjeta, SQL: "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); //Refrescamos el array con las tarjetas de la categoria actual //console.log("la categoria es: "+categoria); //console.log("Llamamos a obtenertarjetasporcategoria"); ObtenerTarjetasPorCategoria(categoria); //console.log("Llamamos a reoresentarlistatarrjetas con el parametro categoriaActual = "+categoria); // ... y actualización de la lista de categorías RepresentarCategorias(); //Metemos el id de la categoría en un objeto debido a que RepresentarlistaTarjetas recibe un objeto y no un entero var obj = new Object(); obj.id=categoria // Actualización de la visualización de la lista de tarjetas ... RepresentarListaTarjetas(obj,false); tarjetaActual = null; } catch(e){ console.log(e); } } /** * SeleccionarTarjetaPorId. Devuelve la tarjeta cuyo identificador es pasado como parámetro. * * @param id identificador de la tarjeta * @result Tarjeta que corresponde con el identificador pasado. */ function SeleccionarTarjetaPorId(id){ var resultado = null; $.each(listaTarjetas, function(i, item){ if (item.id == id) { resultado = item; } }); return resultado; } /** * ContarTarjetasPorCategoria. Devuelve el número de tarjetas que están relacionadas con la categoría * pasada como parámetro. * * @param categoria identificador de la categoría * @result número de tarjetas relacionadas con la categoría */ function ContarTarjetasPorCategoria(categoria){ var resultado = 0; try{ $.each(listaTarjetas, function(i, item){ if (item.categoria == categoria){ resultado += 1; } }); } catch(e){ console.log(e.message); } return resultado; } /** * CargarTarjeta. Prepara todos los componentes de la página donde se muestra la tarjeta. Carga la tarjeta cuyo identificador * es pasado como parámetro. * * @param event Evento que se dispara al llamar a esta función * @param id identificador de la tarjeta * @param cambiarPagina booleano que indica si se debe cambiar a la página de 'PaginaDetalleTarjeta' */ function CargarTarjeta(event, id, cambiarPagina){ tarjetaActual = SeleccionarTarjetaPorId(id); //console.log("Foto actual: " + tarjetaActual.foto); /* switch(tipoDispositivo){ case 'iPhone3': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr("width", "250px"); break; case 'iPhone4': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '580px'); break; case 'tablet': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '700px'); break; } */ //Eliminamos las clases antiguas $('#lblTituloTarjeta').removeClass(); $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto); $('#lblTituloTarjeta').html(tarjetaActual.titulo1).addClass('fuente-' + tarjetaActual.fuente); // Fondo try{ for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); $('#PaginaDetalleTarjeta').addClass('fondo'+tarjetaActual.fondo); /* console.log("Fondo actual: " + tarjetaActual.fondo); console.log("EL alto de la imagen es: "+$('#imgGrandeTarjeta').css("height")); console.log("EL ancho de la imagen es: "+$('#imgGrandeTarjeta').css("width")); */ } catch (e){ console.log(e.message); } if (tarjetaActual.favorita == 1) { $('#btnCambiarTarjetaFavorita').addClass("ui-btn-favorito"); /* TODO actualizar la tarjeta en la base de datos */ } else { $('#btnCambiarTarjetaFavorita').removeClass("ui-btn-favorito"); } // Se carga la página con la tarjeta if (cambiarPagina){ $.mobile.changePage($('#PaginaDetalleTarjeta')); } PararEvento(event); } function ReversoTarjeta(event){ //console.log("Entra en ReversoTarjeta"); try{ //Eliminamos las clases antiguas $('#lblTituloTarjetaReverso').removeClass(); for(i=1;i<=15;i++){ $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //Añadimos las clases nuevas $('#imgGrandeTarjetaReverso').attr('src', tarjetaActual.foto); $('#lblTituloTarjetaReverso').html(tarjetaActual.titulo2).addClass('fuente-' + tarjetaActual.fuente); $('#PaginaReversoTarjeta').addClass('fondo'+tarjetaActual.fondo); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); // Se cambia a la página del reverso $.mobile.changePage($('#PaginaReversoTarjeta'), {transition: 'flip',changeHash:'false'}); ReproducirSonidoEstatico(); PararEvento(event); } catch (e){ console.log(e.message); } } /** * ActualizarTarjeta. Actualiza los datos de la tarjeta actual, con los datos pasados a la función * *@param tarjeta Datos de la tarjeta actualizada */ function ActualizarTarjeta(event, tarjeta){ var listaTemp = []; var datosAntiguos; $.each(listaTarjetas, function(i, item) { if (tarjeta.id == item.id) { datosAntiguos = item; listaTemp.push(tarjeta); } else { listaTemp.push(item); } }); listaTarjetas = listaTemp; // Actualización en la base de datos var sql = "UPDATE Tarjetas SET titulo1='" + $.trim(tarjeta.titulo1) + "', titulo2='" + $.trim(tarjeta.titulo2) + "', fondo='" + $.trim(tarjeta.fondo) + "', foto='" + $.trim(tarjeta.foto) + "', sonido='" + $.trim(tarjeta.sonido) + "', favorita=" + tarjeta.favorita + ", anchoFoto=" + tarjeta.anchoFoto + ", altoFoto=" + tarjeta.altoFoto + ", fuente='" + tarjeta.fuente + "', idiomaA='" +idiomaSecundario.toLowerCase() + "', idiomaDe='" +idiomaPrincipal.toLowerCase() + "' WHERE id=" + tarjeta.id; console.log("Actualizamos una tarjeta--> "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // Eliminación de las clases de los elementos antiguos (fondo, fuente, tamaño de la fuente) for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); $('#lblTituloTarjeta').removeClass(); $('#lblTituloTarjetaReverso').removeClass(); // Actualizar los datos de la tarjeta actual, con los nuevos datos de la tarjeta actualizada CargarTarjeta(event, tarjeta.id, false); } /** * ComprobarEliminarTarjeta. Comprueba si el usuario ha seleccionado la eliminación de la tarjeta actual. */ function ComprobarEliminarTarjeta(event){ if (parseInt(event) == 1){ EliminaTarjetaActual(event) } } /** * EliminaTarjetaActual. Realiza la eliminación de la tarjeta actualmente seleccionada, tanto de la lista de tarjetas como de la base de datos. */ function EliminaTarjetaActual(event){ var listaTemp = []; try{ // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.id != tarjetaActual.id) {! listaTemp.push(item); } }); listaTarjetas = listaTemp; // Eliminación de la tarjeta de la BD var sql = "delete from Tarjetas where id=" + tarjetaActual.id; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // ... actualización de la lista de tarjetas ... RepresentarListaTarjetas(categoriaActual); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual history.back(); } catch (e){ console.log("Error en EliminarTarjetaActual: " + e.message); } } function EliminarTarjetasPorCategoria(categoria){ var listaTemp = []; // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.categoria != categoria) { listaTemp.push(item); } }); listaTarjetas = listaTemp; var sql = "delete from Tarjetas where categoria=" + categoria; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual $.mobile.changePage($('#PaginaCategorias')); } /** * EliminarListaTarjetas. Vacía completamente la lista de las tarjetas. */ function EliminarListaTarjetas(){ listaTarjetas = []; } function LimpiarTraduccion(){ $('#pnlResultadoTraduccion').removeClass("in"); } /* * Obtiene un Token de acceso al servidor de Microsoft Translate a través del servicio web */ function getAccessToken(){ //Editado--> Pedro var urlObtenerAccesToken = 'http://www.bubblewords.info/WSTraducciones/GetAccessToken.asmx/getToken'; $.ajax({ url: urlObtenerAccesToken + '?callback=?', type: "GET", dataType: 'jsonp', success: function(data){ console.log("AccessToken Recibido"); accessToken=data[0]; estadoServidor=true; //navigator.notification.confirm("Hemos obtenido el token de acceso: "+accessToken) if (intervaloSinConexion){ clearInterval(intervaloSinConexion); intervaloSinConexion=undefined; console.warn("El servidor esta disponible, cambiamos o establecemos el intervalo a 9 minutos"); intervaloNormal = setInterval(getAccessToken, 9 * 60 * 1000); } hayConexion=true; }, timeout:5000, error: function(x, t, m) { console.log("AccessToken No recibido"); if (hayConexion ==true){ /* *En caso de que se tenga conexion de red, pero no sea accesible el servicio web que nos devuelve el token de acceso *solicitamos un token de acceso cada 30 segundos, hasta que el servidor responda, en cuyo caso se para el intervalo *sin conexión y comienza el intervalo normal de 9 minutos */ if (intervaloNormal){ clearInterval(intervaloNormal); intervaloNormal=undefined; console.warn("El servidor no esta disponible, cambiamos el intervalo a 30 segundos"); intervaloSinConexion = setInterval(getAccessToken, 30 * 1000); } if(t==="timeout") { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } else { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible+" Error: "+t,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } } } }); } /* * Obtiene la traducción de un texto proporcionandole un idioma de origen y destino */ function TraduccionSugerida(event){ console.log("Hay conexion = "+hayConexion); console.log("Se ha traducido "+numTraducciones+" veces"); if (valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && (!liteVersion || (liteVersion && numTraducciones < 5))){ var texto=$('#inputTituloTarjeta').attr('value'); var destino =$('#lstIdiomaSecundario').attr('value'); var origen = $('#lstIdiomaPrincipal').attr('value'); if(origen != destino){ console.log("traduccion> de: "+origen+" destino "+destino+" palabra "+texto); var p = new Object; p.text = texto; p.from = origen; p.to = destino; p.oncomplete = 'ajaxTranslateCallback'; p.appId = "Bearer " + accessToken; var requestStr = "http://api.microsofttranslator.com/V2/Ajax.svc/Translate"; //navigator.notification.confirm("Solicitamos la traduccion: texto="+p.text+" origen="+p.from+" destino="+p.to+" token="+p.appId); $.ajax({ url: requestStr, type: "GET", data: p, dataType: 'jsonp', cache: true, }); } valorAnteriorTitulo=texto; PararEvento(event); }else if(valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && numTraducciones >=maxTraducciones && alertMostrado==false){ console.log("entramos por el mensaje de limitacion de trad."); $('#inputTitulo2Tarjeta').focus(); mensajeActualizar(res_lite_traducciones); alertMostrado=true; }else{ console.log("entramos por el ultimo else"); /* * En caso de que no haya conexion o que la palabra sea la misma que la anterior, * establecemos el foco en el segundo titulo. */ $('#inputTitulo2Tarjeta').focus(); } } /* * Callback que controla la respuesta del servidor de Microsoft Translator */ function ajaxTranslateCallback(response) { if (response.length > 0) { traduccionSugerida = response; //navigator.notification.confirm("La traduccion se ha recibido con exito: "+traduccionSugerida); $('#lblTraduccionObtenida').html(response.toString()); $('#pnlResultadoTraduccion').addClass("in").css('zIndex', 300); $('.tooltip-inner').textfill({maxFontPixels: 200, minFontPixels:4}); if (liteVersion){ numTraducciones++; } } } /** * AplicarTraduccion. Se ha pulsado sobre la opción de aplicar la traducción sugerida. */ function AplicarTraduccion(event){ $('#inputTitulo2Tarjeta').attr('value', traduccionSugerida); $('#pnlResultadoTraduccion').css('zIndex', -200); LimpiarTraduccion(); PararEvento(event); } /** * SeleccionarFondoTarjeta. Se ha seleccionado un fondo para la tarjeta que se desea crear. Cambia * la imagen del formulario 'Nueva tarjeta' y guarda el fondo actualmente seleccionado. * * @param numero número del fondo elegido * @param tipoDispositivo indica el tipo de dispositivo */ function SeleccionarFondoTarjeta(event, numero, tipoDispositivo, debeVolver){ //console.log("Fondo seleccionado: " + numero.toString() + ", tipo dispositivo: " + tipoDispositivo); try{ fondoActual = numero; $('#imgFondoTarjeta').attr('src', 'img/texturas/muestras/textura' + numero + '.jpg') // Mostrar y ocultar capas $('#pnlMostrarImagenFondo').addClass("in").show(); //console.log("SeleccionarFontoTarjeta. DebeVolver: " + debeVolver); if (debeVolver){ Volver(event); } } catch(e){ console.log(e.message); } } /** * MostrarImagenDeGaleria. Se quiere mostrar la imagen que ha seleccionado el usuario desde la galería. */ function MostrarImagenDeGaleria(imageData, tipoDispositivo){ var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; //console.log("MostrarImagenDeGaleria. Ancho: " + anchoFoto + ", alto: " + altoFoto); // Ocultar y mostrar capas $('#pnlMostrarTextoFotoGaleria').removeClass("in").hide(); $("#imgPrincipalTarjetaGaleria").attr("src", imageData); $('#pnlMostrarImagenGaleria').addClass("in").show(); $('#pnlMostrarImagenCamara').removeClass("in").hide(); $('#pnlMostrarTextoFotoCamara').addClass("in").show(); // Quitar cualquier foto de la cámara anterior $('#imgPrincipalTarjetaCamara').attr('src',''); } /** * MostrarImagenDeCamara. Se quiere mostrar la imagen que se ha tomado desde la cámara. */ function MostrarImagenDeCamara(imageData, tipoDispositivo){ console.log("Establecemos la imagen: "+imageData); var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; // Ocultar y mostrar capas $('#pnlMostrarTextoFotoCamara').removeClass("in").hide(); $("#imgPrincipalTarjetaCamara").attr("src", imageData); $('#pnlMostrarImagenCamara').addClass("in").show(); $('#pnlMostrarImagenGaleria').removeClass("in").hide(); $('#pnlMostrarTextoFotoGaleria').addClass("in").show(); // Quitar cualquier foto anterior de la galería $('#imgPrincipalTarjetaGaleria').attr('src',''); }
case "tablet": ancho = anchoTablet; break;
random_line_split
tarjetas.js
/*--- PROPIEDADES ---*/ var listaTarjetas = []; var tarjetaActual = null; var existeFoto = false; var existeSonido = false; var fondoActual = 1; // Esta variable contiene el id del fondo que está actualmente mostrándose en el formulario de nueva tarjeta var mostrarFavoritas = false; // Indica si se deben mostrar solamente las tarjetas favoritas o no var traduccionSugerida = ""; // Guarda la traducción que se ha encontrado para la palabra actual var tarjetaEnEdicion = false; // Indica si se ha pulsado sobre el botón de edición de la tarjeta var valorAnteriorTitulo=""; var anchoiPhone3 = 80; var anchoiPhone4 = 160; var anchoTablet = 230; var estadoServidor =false; //Guarda el estado del servidor de traduccion, si es false el servidor no esta disponible /*--- MÉTODOS ---*/ /** * RepresentarListaTarjetas. Realiza la maquetación de la lista de tarjetas que corresponden con la categoría actual. * * @param categoria categoría actual * @param favoritas indica si solamente se deben mostrar las favoritas o no */ function Represent
a, favoritas){ $('#lblListaTarjetas').html(""); var texto = ""; var letra = ""; var contador = 0; var listaImagenesACargar = []; if (favoritas) $('#h1NombreCategoria').html(res_Favoritos) mostrarFavoritas = favoritas; if (activarPhoneGap){ switch(tipoDispositivo){ case "iPhone3": ancho = anchoiPhone3; break; case "iPhone4": ancho = anchoiPhone4; break; case "tablet": ancho = anchoTablet; break; } } else { ancho = anchoTablet; } var columna =1; $.each(listaTarjetas, function(i, item) { console.log("Comprobamos esta tarjeta para añadirla a la categoría ("+categoria.id+"): "+item.id+" con la categoria: "+item.categoria); if ( ( (favoritas) && (item.favorita == 1) ) || ( (!favoritas) && (item.categoria == categoria.id) ) ) { // Maquetación de la tabla que llevará cada una de las imágenes relacionadas con la tarjeta if(columna ==1){ texto+="<tr>"; } texto += "<td><div class='contenedorImg'><a href=\'javascript:;\' onClick=\'CargarTarjeta(event," + item.id + ", true)\' onTouchStart=\'(event," + item.id + ")\'><div class= 'divImgTarjeta conSombra'><img id=\'img" + item.id + "\' src=\'img/imagen_no_disponible_230.jpg\' />" + "</div></a></div></td>"; columna++; if (columna ==4){ texto+="</tr>"; columna=1; } listaImagenesACargar.push(item); contador += 1; } }); // Actualización del grid de imágenes //console.log("Este es el texto: " + texto); $('#lblListaTarjetas').html(texto); //ObtenerTarjetasPorCategoria(categoria.id); if (tarjetasPorCategoria.length <=2){ var altoPag =parseFloat($('#PaginaDetalleCategoria').height()); var altoImagen = altoPag-(altoPag*0.4); //navigator.notification.alert("El alto de la pagina es de: "+altoPag+"+px el alto de la imagen es: "+altoImagen+"px"); $('.contenedorImg img').css('max-height',altoImagen.toString()+"px"); } // Una vez que se haya cargado la lista de imágenes, hay que cargar sus rutas $.each(listaImagenesACargar, function(i, item){ CargarFoto("img" + item.id, item.foto); }); } /** * CargarFoto. Intenta cargar la foto pasada como parámetro. Si lo consigue, la redimensiona para que se muestre * correctamente en la lista de tarjetas. Si no consigue cargarla, deja la imagen que esta en el identificador correspondiente. * * @param identificador id de la imagen donde cargará la foto * @param rutaFoto ruta en el dispositivo donde se encuentra la foto * @param anchoFoto ancho en pixels de la foto original * @param altoFoto alto en pixels de la foto original */ function CargarFoto(identificador, rutaFoto){ if (activarPhoneGap) { // Solamente se comprueba si exista la fotografía en el caso de que está activado el PhoneGap. if ($.trim(rutaFoto).length > 0) { // Se comprueba que no es la imagen por defecto if (rutaFoto.indexOf('img/imagen_no_disponible') >= 0) { //console.log("La foto a cargar es la de por defecto"); } else { window.resolveLocalFileSystemURI(rutaFoto, function(fileEntry){ $("#" + identificador).attr("src", rutaFoto).on('load', function(){ /* if (anchoFoto < altoFoto){ switch(tipoDispositivo){ case "iPhone3": alto = ((altoFoto * anchoiPhone3) / anchoFoto).toFixed(0); ancho = anchoiPhone3; break; case "iPhone4": alto = ((altoFoto * anchoiPhone4) / anchoFoto).toFixed(0); ancho = anchoiPhone4; break; case "tablet": alto = ((altoFoto * anchoTablet) / anchoFoto).toFixed(0); ancho = anchoTablet; break; } // En el caso de que la altura sea mayor que el ancho, hay que desplazar la imagen para que quede centrada // en altura //$("#" + identificador).css("position", "relative").css("top", "-" + ((alto - ancho) / 2).toFixed(0).toString() + "px"); } else { switch(tipoDispositivo){ case "iPhone3": ancho = ((anchoFoto * anchoiPhone3) / altoFoto).toFixed(0); alto = anchoiPhone3; break; case "iPhone4": ancho = ((anchoFoto * anchoiPhone4) / altoFoto).toFixed(0); alto = anchoiPhone4; break; case "tablet": ancho = ((anchoFoto * anchoTablet) / altoFoto).toFixed(0); alto = anchoTablet; break; } // En el caso de que la anchura sea mayor que la altura, hay que desplazar la imagen para que quede // centrada en anchura $('#' + identificador).css("position", "relative").css("left", "-" + ((ancho - alto)/2).toFixed(0).toString() + "px"); } $('#' + identificador).attr("width", ancho); $('#' + identificador).attr("height", alto); */ //console.log("Ancho: " + anchoFoto + ", alto: " + altoFoto); }); }, function(error){ console.log("Ha fallado la carga del archivo " + rutaFoto); }); } } } } /** * NuevaTarjeta. Inserta una nueva tarjeta con los datos pasados como parámetros. * * @param categoria identificador de la categoría a la que pertenece la tarjeta * @param titulo1 título 1 de la tarjeta * @param titulo2 título 2 de la tarjeta * @param fondo nombre de la imagen del fondo de la tarjeta * @param foto nombre de la imagen principal de la tarjeta * @param sonido nombre del sonido de la tarjeta * @param ancho ancho en pixels de la foto * @param alto alto en pixels de la imagen * @param fuente tipografía asociada a la tarjeta * @param tamanioFuente tamaño en pixels de la fuente utilizada */ function NuevaTarjeta(categoria, titulo1, titulo2, fondo, foto, sonido, ancho, alto, fuente){ var maxId = 0; //console.log("llego a NuevaTarjeta"); try{ // obtención del último identificador utilizado $.each(listaTarjetas, function(i, item){ if (item.id > maxId) { maxId = item.id; } }); // Inserción de la tarjeta en la lista de tarjetas actuales (para la categoría actual) ... listaTarjetas.push({ 'id': (maxId+1), 'categoria': categoria, 'titulo1': titulo1, 'titulo2': titulo2, 'fondo': fondo, 'foto': foto, 'sonido': sonido, 'favorita': 0, 'anchoFoto': ancho, 'altoFoto': alto, 'fuente':fuente, 'idiomaA':idiomaSecundario.toLowerCase(), 'idiomaDe':idiomaPrincipal.toLowerCase() }); // ... e inserción de la tarjeta en la base de datos var sql = "insert into Tarjetas(id, categoria, titulo1, titulo2, fondo, foto, sonido, favorita, anchoFoto, altoFoto, fuente, idiomaA, idiomaDe ) values(" + (maxId+1) + "," + categoria + ",\'" + titulo1 + "\',\'" + titulo2 + "\',\'" + fondo + "\',\'" + foto + "\',\'" + sonido + "\',0," + ancho + "," + alto + ",'" + fuente + "','"+idiomaSecundario.toLowerCase()+"','"+idiomaPrincipal.toLowerCase()+"')"; console.log("El fondo es el numero: "+fondo); console.log("Creamos una nueva tarjeta, SQL: "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); //Refrescamos el array con las tarjetas de la categoria actual //console.log("la categoria es: "+categoria); //console.log("Llamamos a obtenertarjetasporcategoria"); ObtenerTarjetasPorCategoria(categoria); //console.log("Llamamos a reoresentarlistatarrjetas con el parametro categoriaActual = "+categoria); // ... y actualización de la lista de categorías RepresentarCategorias(); //Metemos el id de la categoría en un objeto debido a que RepresentarlistaTarjetas recibe un objeto y no un entero var obj = new Object(); obj.id=categoria // Actualización de la visualización de la lista de tarjetas ... RepresentarListaTarjetas(obj,false); tarjetaActual = null; } catch(e){ console.log(e); } } /** * SeleccionarTarjetaPorId. Devuelve la tarjeta cuyo identificador es pasado como parámetro. * * @param id identificador de la tarjeta * @result Tarjeta que corresponde con el identificador pasado. */ function SeleccionarTarjetaPorId(id){ var resultado = null; $.each(listaTarjetas, function(i, item){ if (item.id == id) { resultado = item; } }); return resultado; } /** * ContarTarjetasPorCategoria. Devuelve el número de tarjetas que están relacionadas con la categoría * pasada como parámetro. * * @param categoria identificador de la categoría * @result número de tarjetas relacionadas con la categoría */ function ContarTarjetasPorCategoria(categoria){ var resultado = 0; try{ $.each(listaTarjetas, function(i, item){ if (item.categoria == categoria){ resultado += 1; } }); } catch(e){ console.log(e.message); } return resultado; } /** * CargarTarjeta. Prepara todos los componentes de la página donde se muestra la tarjeta. Carga la tarjeta cuyo identificador * es pasado como parámetro. * * @param event Evento que se dispara al llamar a esta función * @param id identificador de la tarjeta * @param cambiarPagina booleano que indica si se debe cambiar a la página de 'PaginaDetalleTarjeta' */ function CargarTarjeta(event, id, cambiarPagina){ tarjetaActual = SeleccionarTarjetaPorId(id); //console.log("Foto actual: " + tarjetaActual.foto); /* switch(tipoDispositivo){ case 'iPhone3': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr("width", "250px"); break; case 'iPhone4': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '580px'); break; case 'tablet': $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto).attr('width', '700px'); break; } */ //Eliminamos las clases antiguas $('#lblTituloTarjeta').removeClass(); $('#imgGrandeTarjeta').attr('src', tarjetaActual.foto); $('#lblTituloTarjeta').html(tarjetaActual.titulo1).addClass('fuente-' + tarjetaActual.fuente); // Fondo try{ for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); $('#PaginaDetalleTarjeta').addClass('fondo'+tarjetaActual.fondo); /* console.log("Fondo actual: " + tarjetaActual.fondo); console.log("EL alto de la imagen es: "+$('#imgGrandeTarjeta').css("height")); console.log("EL ancho de la imagen es: "+$('#imgGrandeTarjeta').css("width")); */ } catch (e){ console.log(e.message); } if (tarjetaActual.favorita == 1) { $('#btnCambiarTarjetaFavorita').addClass("ui-btn-favorito"); /* TODO actualizar la tarjeta en la base de datos */ } else { $('#btnCambiarTarjetaFavorita').removeClass("ui-btn-favorito"); } // Se carga la página con la tarjeta if (cambiarPagina){ $.mobile.changePage($('#PaginaDetalleTarjeta')); } PararEvento(event); } function ReversoTarjeta(event){ //console.log("Entra en ReversoTarjeta"); try{ //Eliminamos las clases antiguas $('#lblTituloTarjetaReverso').removeClass(); for(i=1;i<=15;i++){ $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //Añadimos las clases nuevas $('#imgGrandeTarjetaReverso').attr('src', tarjetaActual.foto); $('#lblTituloTarjetaReverso').html(tarjetaActual.titulo2).addClass('fuente-' + tarjetaActual.fuente); $('#PaginaReversoTarjeta').addClass('fondo'+tarjetaActual.fondo); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); // Se cambia a la página del reverso $.mobile.changePage($('#PaginaReversoTarjeta'), {transition: 'flip',changeHash:'false'}); ReproducirSonidoEstatico(); PararEvento(event); } catch (e){ console.log(e.message); } } /** * ActualizarTarjeta. Actualiza los datos de la tarjeta actual, con los datos pasados a la función * *@param tarjeta Datos de la tarjeta actualizada */ function ActualizarTarjeta(event, tarjeta){ var listaTemp = []; var datosAntiguos; $.each(listaTarjetas, function(i, item) { if (tarjeta.id == item.id) { datosAntiguos = item; listaTemp.push(tarjeta); } else { listaTemp.push(item); } }); listaTarjetas = listaTemp; // Actualización en la base de datos var sql = "UPDATE Tarjetas SET titulo1='" + $.trim(tarjeta.titulo1) + "', titulo2='" + $.trim(tarjeta.titulo2) + "', fondo='" + $.trim(tarjeta.fondo) + "', foto='" + $.trim(tarjeta.foto) + "', sonido='" + $.trim(tarjeta.sonido) + "', favorita=" + tarjeta.favorita + ", anchoFoto=" + tarjeta.anchoFoto + ", altoFoto=" + tarjeta.altoFoto + ", fuente='" + tarjeta.fuente + "', idiomaA='" +idiomaSecundario.toLowerCase() + "', idiomaDe='" +idiomaPrincipal.toLowerCase() + "' WHERE id=" + tarjeta.id; console.log("Actualizamos una tarjeta--> "+sql); bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // Eliminación de las clases de los elementos antiguos (fondo, fuente, tamaño de la fuente) for(i=1;i<=15;i++){ $('#PaginaDetalleTarjeta').removeClass('fondo'+i); $('#PaginaReversoTarjeta').removeClass('fondo'+i); } //console.log("Clases de detalle tarjeta "+PaginaDetalleTarjeta.className); //console.log("Clases de reverso tarjeta "+PaginaReversoTarjeta.className); $('#lblTituloTarjeta').removeClass(); $('#lblTituloTarjetaReverso').removeClass(); // Actualizar los datos de la tarjeta actual, con los nuevos datos de la tarjeta actualizada CargarTarjeta(event, tarjeta.id, false); } /** * ComprobarEliminarTarjeta. Comprueba si el usuario ha seleccionado la eliminación de la tarjeta actual. */ function ComprobarEliminarTarjeta(event){ if (parseInt(event) == 1){ EliminaTarjetaActual(event) } } /** * EliminaTarjetaActual. Realiza la eliminación de la tarjeta actualmente seleccionada, tanto de la lista de tarjetas como de la base de datos. */ function EliminaTarjetaActual(event){ var listaTemp = []; try{ // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.id != tarjetaActual.id) {! listaTemp.push(item); } }); listaTarjetas = listaTemp; // Eliminación de la tarjeta de la BD var sql = "delete from Tarjetas where id=" + tarjetaActual.id; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); // ... actualización de la lista de tarjetas ... RepresentarListaTarjetas(categoriaActual); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual history.back(); } catch (e){ console.log("Error en EliminarTarjetaActual: " + e.message); } } function EliminarTarjetasPorCategoria(categoria){ var listaTemp = []; // Eliminación de la tarjeta de la lista actual $.each(listaTarjetas, function(i, item){ if (item.categoria != categoria) { listaTemp.push(item); } }); listaTarjetas = listaTemp; var sql = "delete from Tarjetas where categoria=" + categoria; bwBD.transaction(function(tx){ tx.executeSql(sql); }, errorBD); tarjetaActual = null; // ... y actualización de la lista de categorías RepresentarCategorias(); // Cargar la página de las tarjetas de la categoría actual $.mobile.changePage($('#PaginaCategorias')); } /** * EliminarListaTarjetas. Vacía completamente la lista de las tarjetas. */ function EliminarListaTarjetas(){ listaTarjetas = []; } function LimpiarTraduccion(){ $('#pnlResultadoTraduccion').removeClass("in"); } /* * Obtiene un Token de acceso al servidor de Microsoft Translate a través del servicio web */ function getAccessToken(){ //Editado--> Pedro var urlObtenerAccesToken = 'http://www.bubblewords.info/WSTraducciones/GetAccessToken.asmx/getToken'; $.ajax({ url: urlObtenerAccesToken + '?callback=?', type: "GET", dataType: 'jsonp', success: function(data){ console.log("AccessToken Recibido"); accessToken=data[0]; estadoServidor=true; //navigator.notification.confirm("Hemos obtenido el token de acceso: "+accessToken) if (intervaloSinConexion){ clearInterval(intervaloSinConexion); intervaloSinConexion=undefined; console.warn("El servidor esta disponible, cambiamos o establecemos el intervalo a 9 minutos"); intervaloNormal = setInterval(getAccessToken, 9 * 60 * 1000); } hayConexion=true; }, timeout:5000, error: function(x, t, m) { console.log("AccessToken No recibido"); if (hayConexion ==true){ /* *En caso de que se tenga conexion de red, pero no sea accesible el servicio web que nos devuelve el token de acceso *solicitamos un token de acceso cada 30 segundos, hasta que el servidor responda, en cuyo caso se para el intervalo *sin conexión y comienza el intervalo normal de 9 minutos */ if (intervaloNormal){ clearInterval(intervaloNormal); intervaloNormal=undefined; console.warn("El servidor no esta disponible, cambiamos el intervalo a 30 segundos"); intervaloSinConexion = setInterval(getAccessToken, 30 * 1000); } if(t==="timeout") { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } else { if (estadoServidor==true){ navigator.notification.confirm(res_servidor_no_disponible+" Error: "+t,'',res_titulo_servidor_no_disponible,res_Aceptar); } estadoServidor=false; } } } }); } /* * Obtiene la traducción de un texto proporcionandole un idioma de origen y destino */ function TraduccionSugerida(event){ console.log("Hay conexion = "+hayConexion); console.log("Se ha traducido "+numTraducciones+" veces"); if (valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && (!liteVersion || (liteVersion && numTraducciones < 5))){ var texto=$('#inputTituloTarjeta').attr('value'); var destino =$('#lstIdiomaSecundario').attr('value'); var origen = $('#lstIdiomaPrincipal').attr('value'); if(origen != destino){ console.log("traduccion> de: "+origen+" destino "+destino+" palabra "+texto); var p = new Object; p.text = texto; p.from = origen; p.to = destino; p.oncomplete = 'ajaxTranslateCallback'; p.appId = "Bearer " + accessToken; var requestStr = "http://api.microsofttranslator.com/V2/Ajax.svc/Translate"; //navigator.notification.confirm("Solicitamos la traduccion: texto="+p.text+" origen="+p.from+" destino="+p.to+" token="+p.appId); $.ajax({ url: requestStr, type: "GET", data: p, dataType: 'jsonp', cache: true, }); } valorAnteriorTitulo=texto; PararEvento(event); }else if(valorAnteriorTitulo != $('#inputTituloTarjeta').val() && hayConexion && numTraducciones >=maxTraducciones && alertMostrado==false){ console.log("entramos por el mensaje de limitacion de trad."); $('#inputTitulo2Tarjeta').focus(); mensajeActualizar(res_lite_traducciones); alertMostrado=true; }else{ console.log("entramos por el ultimo else"); /* * En caso de que no haya conexion o que la palabra sea la misma que la anterior, * establecemos el foco en el segundo titulo. */ $('#inputTitulo2Tarjeta').focus(); } } /* * Callback que controla la respuesta del servidor de Microsoft Translator */ function ajaxTranslateCallback(response) { if (response.length > 0) { traduccionSugerida = response; //navigator.notification.confirm("La traduccion se ha recibido con exito: "+traduccionSugerida); $('#lblTraduccionObtenida').html(response.toString()); $('#pnlResultadoTraduccion').addClass("in").css('zIndex', 300); $('.tooltip-inner').textfill({maxFontPixels: 200, minFontPixels:4}); if (liteVersion){ numTraducciones++; } } } /** * AplicarTraduccion. Se ha pulsado sobre la opción de aplicar la traducción sugerida. */ function AplicarTraduccion(event){ $('#inputTitulo2Tarjeta').attr('value', traduccionSugerida); $('#pnlResultadoTraduccion').css('zIndex', -200); LimpiarTraduccion(); PararEvento(event); } /** * SeleccionarFondoTarjeta. Se ha seleccionado un fondo para la tarjeta que se desea crear. Cambia * la imagen del formulario 'Nueva tarjeta' y guarda el fondo actualmente seleccionado. * * @param numero número del fondo elegido * @param tipoDispositivo indica el tipo de dispositivo */ function SeleccionarFondoTarjeta(event, numero, tipoDispositivo, debeVolver){ //console.log("Fondo seleccionado: " + numero.toString() + ", tipo dispositivo: " + tipoDispositivo); try{ fondoActual = numero; $('#imgFondoTarjeta').attr('src', 'img/texturas/muestras/textura' + numero + '.jpg') // Mostrar y ocultar capas $('#pnlMostrarImagenFondo').addClass("in").show(); //console.log("SeleccionarFontoTarjeta. DebeVolver: " + debeVolver); if (debeVolver){ Volver(event); } } catch(e){ console.log(e.message); } } /** * MostrarImagenDeGaleria. Se quiere mostrar la imagen que ha seleccionado el usuario desde la galería. */ function MostrarImagenDeGaleria(imageData, tipoDispositivo){ var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; //console.log("MostrarImagenDeGaleria. Ancho: " + anchoFoto + ", alto: " + altoFoto); // Ocultar y mostrar capas $('#pnlMostrarTextoFotoGaleria').removeClass("in").hide(); $("#imgPrincipalTarjetaGaleria").attr("src", imageData); $('#pnlMostrarImagenGaleria').addClass("in").show(); $('#pnlMostrarImagenCamara').removeClass("in").hide(); $('#pnlMostrarTextoFotoCamara').addClass("in").show(); // Quitar cualquier foto de la cámara anterior $('#imgPrincipalTarjetaCamara').attr('src',''); } /** * MostrarImagenDeCamara. Se quiere mostrar la imagen que se ha tomado desde la cámara. */ function MostrarImagenDeCamara(imageData, tipoDispositivo){ console.log("Establecemos la imagen: "+imageData); var image = new Image(); image.src = imageData; anchoFoto = image.width; altoFoto = image.height; existeFoto = true; // Ocultar y mostrar capas $('#pnlMostrarTextoFotoCamara').removeClass("in").hide(); $("#imgPrincipalTarjetaCamara").attr("src", imageData); $('#pnlMostrarImagenCamara').addClass("in").show(); $('#pnlMostrarImagenGaleria').removeClass("in").hide(); $('#pnlMostrarTextoFotoGaleria').addClass("in").show(); // Quitar cualquier foto anterior de la galería $('#imgPrincipalTarjetaGaleria').attr('src',''); }
arListaTarjetas(categori
identifier_name
annotate.rs
extern crate chrono; extern crate id3; extern crate mp3_duration; extern crate regex; extern crate reqwest; use std::{ fs::{ read_dir, rename, }, io::{ Read, }, iter::{ repeat_with, }, path::{ Path, PathBuf, }, time::{ Duration, } }; use chrono::{ Datelike, format::{ ParseResult, }, NaiveDate, }; use id3::{ frame::{ Picture, PictureType, }, Tag, Timestamp, Version, }; use regex::{ Regex, }; use crate::{ types::{ AlbumFull, ClientWithToken, SimpleError, Track, }, utils::{ get_with_retry, }, whitelist::{ add_whitelist, }, }; #[derive(Debug)] pub struct TrackData { album_name: String, album_artists: String, release_date: Option<Timestamp>, image_url: Option<String>, track_name: String, track_number: i32, track_artists: Option<String>, expected_duration_ms: i32, } impl TrackData { pub fn release_date_from( album_full: &AlbumFull, ) -> ParseResult<Timestamp> { let mut year = -1; let mut month = None; let mut day = None; if album_full.release_date_precision == "year" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y", )?; year = date.year(); } if album_full.release_date_precision == "month" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m", )?; year = date.year(); month = Some(date.month() as u8); } else if album_full.release_date_precision == "day" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m-%d", ).expect("wat"); year = date.year(); month = Some(date.month() as u8); day = Some(date.day() as u8); } Ok(Timestamp { year: year, month: month, day: day, hour: None, minute: None, second: None, }) } pub fn from( track: Track, album_full: &AlbumFull, ) -> Self { let album_artists = album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); let track_artists = track.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); Self { album_name: album_full.name.clone(), album_artists: album_artists.clone(), release_date: Self::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: track.name, track_number: track.track_number, track_artists: Some(track_artists).filter(|artists| { // need clone? artists != &album_artists }), expected_duration_ms: track.duration_ms, } } } fn get_tracks_files( abs_path: &Path, ) -> Result<Vec<PathBuf>, SimpleError> { read_dir(abs_path).map_err(SimpleError::from).and_then(|dir_iter| { dir_iter.map(|entry| { entry.map(|entry_ok| { entry_ok.path() }).map_err(SimpleError::from) }).collect::<Result<Vec<PathBuf>, SimpleError>>() }).map(|mut paths| { paths.sort(); paths.into_iter().filter(|path| { path.is_file() }).collect() }) } pub fn
( album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<Vec<TrackData>, SimpleError> { let mut tracks = Vec::new(); let mut paging = album_full.tracks.clone(); while let Some(next_url) = paging.next { tracks.append(&mut paging.items); paging = get_with_retry( &next_url[..], client_with_token, )?; } tracks.append(&mut paging.items); Ok(tracks.into_iter().map(|track| { TrackData::from(track, album_full) }).collect()) } fn norm_track_number( track_number: i32, ) -> String { if track_number < 10 { return format!("0{}", track_number); } track_number.to_string() } fn expected_time( file: &PathBuf, track_data: &TrackData, ) -> bool { let actual_duration = mp3_duration::from_path(file.as_path()).expect( &format!("error measuring {}", file.display())[..], ); let expected_duration = Duration::from_millis( track_data.expected_duration_ms as u64, ); actual_duration.checked_sub(expected_duration).or( expected_duration.checked_sub(actual_duration) ).and_then(|res| { res.checked_sub(Duration::from_secs(5)) }).is_none() } fn get_image( image_url: &str, ) -> Result<Vec<u8>, SimpleError> { reqwest::get(image_url).map_err(SimpleError::from).and_then(|response| { response.bytes().map(|byte_res| { byte_res.map_err(SimpleError::from) }).collect() }) } fn add_image( tags: &mut Tag, image: &Vec<u8>, ) { tags.add_picture(Picture { mime_type: "image/jpeg".to_string(), picture_type: PictureType::CoverFront, description: format!( "Cover for {} by {}", tags.album().expect("error in writing tags"), tags.artist().expect("error in writing tags"), ), data: image.clone(), }); } fn annotate_tags( tags: &mut Tag, file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, ) -> String { lazy_static! { static ref INVALID_FILE_CHRS: Regex = Regex::new(r"[^\w\s.\(\)]+").unwrap(); } let mut new_name = format!( "{} {}.mp3", norm_track_number(track_data.track_number), track_data.track_name, ); if !expected_time(file, &track_data) { new_name = format!( "{} {} (unexpected duration).mp3", norm_track_number(track_data.track_number), track_data.track_name, ); } tags.set_album(track_data.album_name); let album_artists = track_data.album_artists.clone(); track_data.track_artists.map(|artists| { tags.set_album_artist(album_artists.clone()); tags.set_artist(artists); }).unwrap_or_else(|| { tags.set_artist(album_artists); }); track_data.release_date.map(|date| { tags.set_date_released(date); }); tags.set_title(track_data.track_name); tags.set_track(track_data.track_number as u32); if !album_image.is_empty() { add_image(tags, album_image) } INVALID_FILE_CHRS.replace_all(&new_name[..], "_").to_string() } fn annotate_file( file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, rename_file: bool, ) -> Result<(), SimpleError> { let mut tags = Tag::new(); let new_name = annotate_tags(&mut tags, file, track_data, album_image); tags.write_to_path(file, Version::Id3v24).map_err(SimpleError::from) .and_then(|_| { if rename_file { return file.as_path().file_name().ok_or(SimpleError { msg: format!("{} not file?", file.display()), }).and_then(|file_name| { if new_name != file_name.to_string_lossy() { return rename( file, file.with_file_name(new_name), ).map_err(SimpleError::from); } Ok(()) }); } return Ok(()); }) } pub fn annotate( dir: &PathBuf, album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let mut rename_files = true; let files = get_tracks_files(&abs_path)?; let mut data = get_tracks_data(album_full, client_with_token)?; if files.len() != data.len() { println!( "number of files in {} should be {}, not renaming", dir.display(), data.len(), ); rename_files = false; } let album_image = album_full.images.iter().next().map(|image| { image.url.clone() }).map(|url| { get_image(&url[..]).unwrap_or_else(|err| { println!("error getting image for {}: {}", album_full.name, err.msg); vec![] }) }).unwrap_or_else(|| { println!("no image for {}", album_full.name); vec![] }); let mut track_counter = data.len() as i32; data.extend(repeat_with(|| { let track_data = TrackData { album_name: album_full.name.clone(), album_artists: album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "), release_date: TrackData::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: "unknown track name".to_string(), track_number: track_counter, track_artists: None, expected_duration_ms: 0, }; track_counter += 1; track_data }).take(files.len())); files.iter().zip( data.into_iter(), ).map(|(track_file, track_data)| { annotate_file(track_file, track_data, &album_image, rename_files) .and_then(|_| { add_whitelist(dir.to_string_lossy().to_string()) }) }).collect() } pub fn test_run( dir: &PathBuf, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let files = get_tracks_files(&abs_path)?; files.iter().map(|track_file| { mp3_duration::from_path(track_file.as_path()).map(|_| { () }).unwrap_or_else(|err| { println!("error measuring {}: {}", track_file.display(), err); }); Ok(()) }).collect() }
get_tracks_data
identifier_name
annotate.rs
extern crate chrono; extern crate id3; extern crate mp3_duration; extern crate regex; extern crate reqwest; use std::{ fs::{ read_dir, rename, }, io::{ Read, }, iter::{ repeat_with, }, path::{ Path, PathBuf, }, time::{ Duration, } }; use chrono::{ Datelike, format::{ ParseResult, }, NaiveDate, }; use id3::{ frame::{ Picture, PictureType, }, Tag, Timestamp, Version, }; use regex::{ Regex, }; use crate::{ types::{ AlbumFull, ClientWithToken, SimpleError, Track, }, utils::{ get_with_retry, }, whitelist::{ add_whitelist, }, }; #[derive(Debug)] pub struct TrackData { album_name: String, album_artists: String, release_date: Option<Timestamp>, image_url: Option<String>, track_name: String, track_number: i32, track_artists: Option<String>, expected_duration_ms: i32, } impl TrackData { pub fn release_date_from( album_full: &AlbumFull, ) -> ParseResult<Timestamp> { let mut year = -1; let mut month = None; let mut day = None; if album_full.release_date_precision == "year" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y", )?; year = date.year(); } if album_full.release_date_precision == "month" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m", )?; year = date.year(); month = Some(date.month() as u8); } else if album_full.release_date_precision == "day" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m-%d", ).expect("wat"); year = date.year(); month = Some(date.month() as u8); day = Some(date.day() as u8); } Ok(Timestamp { year: year, month: month, day: day, hour: None, minute: None, second: None, }) } pub fn from( track: Track, album_full: &AlbumFull, ) -> Self { let album_artists = album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); let track_artists = track.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); Self { album_name: album_full.name.clone(), album_artists: album_artists.clone(), release_date: Self::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: track.name, track_number: track.track_number, track_artists: Some(track_artists).filter(|artists| { // need clone? artists != &album_artists }), expected_duration_ms: track.duration_ms, } } } fn get_tracks_files( abs_path: &Path, ) -> Result<Vec<PathBuf>, SimpleError> { read_dir(abs_path).map_err(SimpleError::from).and_then(|dir_iter| { dir_iter.map(|entry| { entry.map(|entry_ok| { entry_ok.path() }).map_err(SimpleError::from) }).collect::<Result<Vec<PathBuf>, SimpleError>>() }).map(|mut paths| { paths.sort(); paths.into_iter().filter(|path| { path.is_file() }).collect() }) } pub fn get_tracks_data( album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<Vec<TrackData>, SimpleError> { let mut tracks = Vec::new(); let mut paging = album_full.tracks.clone(); while let Some(next_url) = paging.next { tracks.append(&mut paging.items); paging = get_with_retry( &next_url[..], client_with_token, )?; } tracks.append(&mut paging.items); Ok(tracks.into_iter().map(|track| { TrackData::from(track, album_full) }).collect()) } fn norm_track_number( track_number: i32, ) -> String { if track_number < 10 { return format!("0{}", track_number); } track_number.to_string() } fn expected_time( file: &PathBuf, track_data: &TrackData, ) -> bool { let actual_duration = mp3_duration::from_path(file.as_path()).expect( &format!("error measuring {}", file.display())[..], ); let expected_duration = Duration::from_millis( track_data.expected_duration_ms as u64, ); actual_duration.checked_sub(expected_duration).or( expected_duration.checked_sub(actual_duration) ).and_then(|res| { res.checked_sub(Duration::from_secs(5)) }).is_none() } fn get_image( image_url: &str, ) -> Result<Vec<u8>, SimpleError> { reqwest::get(image_url).map_err(SimpleError::from).and_then(|response| { response.bytes().map(|byte_res| { byte_res.map_err(SimpleError::from) }).collect() }) } fn add_image( tags: &mut Tag, image: &Vec<u8>, ) { tags.add_picture(Picture { mime_type: "image/jpeg".to_string(), picture_type: PictureType::CoverFront, description: format!( "Cover for {} by {}",
} fn annotate_tags( tags: &mut Tag, file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, ) -> String { lazy_static! { static ref INVALID_FILE_CHRS: Regex = Regex::new(r"[^\w\s.\(\)]+").unwrap(); } let mut new_name = format!( "{} {}.mp3", norm_track_number(track_data.track_number), track_data.track_name, ); if !expected_time(file, &track_data) { new_name = format!( "{} {} (unexpected duration).mp3", norm_track_number(track_data.track_number), track_data.track_name, ); } tags.set_album(track_data.album_name); let album_artists = track_data.album_artists.clone(); track_data.track_artists.map(|artists| { tags.set_album_artist(album_artists.clone()); tags.set_artist(artists); }).unwrap_or_else(|| { tags.set_artist(album_artists); }); track_data.release_date.map(|date| { tags.set_date_released(date); }); tags.set_title(track_data.track_name); tags.set_track(track_data.track_number as u32); if !album_image.is_empty() { add_image(tags, album_image) } INVALID_FILE_CHRS.replace_all(&new_name[..], "_").to_string() } fn annotate_file( file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, rename_file: bool, ) -> Result<(), SimpleError> { let mut tags = Tag::new(); let new_name = annotate_tags(&mut tags, file, track_data, album_image); tags.write_to_path(file, Version::Id3v24).map_err(SimpleError::from) .and_then(|_| { if rename_file { return file.as_path().file_name().ok_or(SimpleError { msg: format!("{} not file?", file.display()), }).and_then(|file_name| { if new_name != file_name.to_string_lossy() { return rename( file, file.with_file_name(new_name), ).map_err(SimpleError::from); } Ok(()) }); } return Ok(()); }) } pub fn annotate( dir: &PathBuf, album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let mut rename_files = true; let files = get_tracks_files(&abs_path)?; let mut data = get_tracks_data(album_full, client_with_token)?; if files.len() != data.len() { println!( "number of files in {} should be {}, not renaming", dir.display(), data.len(), ); rename_files = false; } let album_image = album_full.images.iter().next().map(|image| { image.url.clone() }).map(|url| { get_image(&url[..]).unwrap_or_else(|err| { println!("error getting image for {}: {}", album_full.name, err.msg); vec![] }) }).unwrap_or_else(|| { println!("no image for {}", album_full.name); vec![] }); let mut track_counter = data.len() as i32; data.extend(repeat_with(|| { let track_data = TrackData { album_name: album_full.name.clone(), album_artists: album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "), release_date: TrackData::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: "unknown track name".to_string(), track_number: track_counter, track_artists: None, expected_duration_ms: 0, }; track_counter += 1; track_data }).take(files.len())); files.iter().zip( data.into_iter(), ).map(|(track_file, track_data)| { annotate_file(track_file, track_data, &album_image, rename_files) .and_then(|_| { add_whitelist(dir.to_string_lossy().to_string()) }) }).collect() } pub fn test_run( dir: &PathBuf, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let files = get_tracks_files(&abs_path)?; files.iter().map(|track_file| { mp3_duration::from_path(track_file.as_path()).map(|_| { () }).unwrap_or_else(|err| { println!("error measuring {}: {}", track_file.display(), err); }); Ok(()) }).collect() }
tags.album().expect("error in writing tags"), tags.artist().expect("error in writing tags"), ), data: image.clone(), });
random_line_split
annotate.rs
extern crate chrono; extern crate id3; extern crate mp3_duration; extern crate regex; extern crate reqwest; use std::{ fs::{ read_dir, rename, }, io::{ Read, }, iter::{ repeat_with, }, path::{ Path, PathBuf, }, time::{ Duration, } }; use chrono::{ Datelike, format::{ ParseResult, }, NaiveDate, }; use id3::{ frame::{ Picture, PictureType, }, Tag, Timestamp, Version, }; use regex::{ Regex, }; use crate::{ types::{ AlbumFull, ClientWithToken, SimpleError, Track, }, utils::{ get_with_retry, }, whitelist::{ add_whitelist, }, }; #[derive(Debug)] pub struct TrackData { album_name: String, album_artists: String, release_date: Option<Timestamp>, image_url: Option<String>, track_name: String, track_number: i32, track_artists: Option<String>, expected_duration_ms: i32, } impl TrackData { pub fn release_date_from( album_full: &AlbumFull, ) -> ParseResult<Timestamp> { let mut year = -1; let mut month = None; let mut day = None; if album_full.release_date_precision == "year"
if album_full.release_date_precision == "month" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m", )?; year = date.year(); month = Some(date.month() as u8); } else if album_full.release_date_precision == "day" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m-%d", ).expect("wat"); year = date.year(); month = Some(date.month() as u8); day = Some(date.day() as u8); } Ok(Timestamp { year: year, month: month, day: day, hour: None, minute: None, second: None, }) } pub fn from( track: Track, album_full: &AlbumFull, ) -> Self { let album_artists = album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); let track_artists = track.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); Self { album_name: album_full.name.clone(), album_artists: album_artists.clone(), release_date: Self::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: track.name, track_number: track.track_number, track_artists: Some(track_artists).filter(|artists| { // need clone? artists != &album_artists }), expected_duration_ms: track.duration_ms, } } } fn get_tracks_files( abs_path: &Path, ) -> Result<Vec<PathBuf>, SimpleError> { read_dir(abs_path).map_err(SimpleError::from).and_then(|dir_iter| { dir_iter.map(|entry| { entry.map(|entry_ok| { entry_ok.path() }).map_err(SimpleError::from) }).collect::<Result<Vec<PathBuf>, SimpleError>>() }).map(|mut paths| { paths.sort(); paths.into_iter().filter(|path| { path.is_file() }).collect() }) } pub fn get_tracks_data( album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<Vec<TrackData>, SimpleError> { let mut tracks = Vec::new(); let mut paging = album_full.tracks.clone(); while let Some(next_url) = paging.next { tracks.append(&mut paging.items); paging = get_with_retry( &next_url[..], client_with_token, )?; } tracks.append(&mut paging.items); Ok(tracks.into_iter().map(|track| { TrackData::from(track, album_full) }).collect()) } fn norm_track_number( track_number: i32, ) -> String { if track_number < 10 { return format!("0{}", track_number); } track_number.to_string() } fn expected_time( file: &PathBuf, track_data: &TrackData, ) -> bool { let actual_duration = mp3_duration::from_path(file.as_path()).expect( &format!("error measuring {}", file.display())[..], ); let expected_duration = Duration::from_millis( track_data.expected_duration_ms as u64, ); actual_duration.checked_sub(expected_duration).or( expected_duration.checked_sub(actual_duration) ).and_then(|res| { res.checked_sub(Duration::from_secs(5)) }).is_none() } fn get_image( image_url: &str, ) -> Result<Vec<u8>, SimpleError> { reqwest::get(image_url).map_err(SimpleError::from).and_then(|response| { response.bytes().map(|byte_res| { byte_res.map_err(SimpleError::from) }).collect() }) } fn add_image( tags: &mut Tag, image: &Vec<u8>, ) { tags.add_picture(Picture { mime_type: "image/jpeg".to_string(), picture_type: PictureType::CoverFront, description: format!( "Cover for {} by {}", tags.album().expect("error in writing tags"), tags.artist().expect("error in writing tags"), ), data: image.clone(), }); } fn annotate_tags( tags: &mut Tag, file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, ) -> String { lazy_static! { static ref INVALID_FILE_CHRS: Regex = Regex::new(r"[^\w\s.\(\)]+").unwrap(); } let mut new_name = format!( "{} {}.mp3", norm_track_number(track_data.track_number), track_data.track_name, ); if !expected_time(file, &track_data) { new_name = format!( "{} {} (unexpected duration).mp3", norm_track_number(track_data.track_number), track_data.track_name, ); } tags.set_album(track_data.album_name); let album_artists = track_data.album_artists.clone(); track_data.track_artists.map(|artists| { tags.set_album_artist(album_artists.clone()); tags.set_artist(artists); }).unwrap_or_else(|| { tags.set_artist(album_artists); }); track_data.release_date.map(|date| { tags.set_date_released(date); }); tags.set_title(track_data.track_name); tags.set_track(track_data.track_number as u32); if !album_image.is_empty() { add_image(tags, album_image) } INVALID_FILE_CHRS.replace_all(&new_name[..], "_").to_string() } fn annotate_file( file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, rename_file: bool, ) -> Result<(), SimpleError> { let mut tags = Tag::new(); let new_name = annotate_tags(&mut tags, file, track_data, album_image); tags.write_to_path(file, Version::Id3v24).map_err(SimpleError::from) .and_then(|_| { if rename_file { return file.as_path().file_name().ok_or(SimpleError { msg: format!("{} not file?", file.display()), }).and_then(|file_name| { if new_name != file_name.to_string_lossy() { return rename( file, file.with_file_name(new_name), ).map_err(SimpleError::from); } Ok(()) }); } return Ok(()); }) } pub fn annotate( dir: &PathBuf, album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let mut rename_files = true; let files = get_tracks_files(&abs_path)?; let mut data = get_tracks_data(album_full, client_with_token)?; if files.len() != data.len() { println!( "number of files in {} should be {}, not renaming", dir.display(), data.len(), ); rename_files = false; } let album_image = album_full.images.iter().next().map(|image| { image.url.clone() }).map(|url| { get_image(&url[..]).unwrap_or_else(|err| { println!("error getting image for {}: {}", album_full.name, err.msg); vec![] }) }).unwrap_or_else(|| { println!("no image for {}", album_full.name); vec![] }); let mut track_counter = data.len() as i32; data.extend(repeat_with(|| { let track_data = TrackData { album_name: album_full.name.clone(), album_artists: album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "), release_date: TrackData::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: "unknown track name".to_string(), track_number: track_counter, track_artists: None, expected_duration_ms: 0, }; track_counter += 1; track_data }).take(files.len())); files.iter().zip( data.into_iter(), ).map(|(track_file, track_data)| { annotate_file(track_file, track_data, &album_image, rename_files) .and_then(|_| { add_whitelist(dir.to_string_lossy().to_string()) }) }).collect() } pub fn test_run( dir: &PathBuf, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let files = get_tracks_files(&abs_path)?; files.iter().map(|track_file| { mp3_duration::from_path(track_file.as_path()).map(|_| { () }).unwrap_or_else(|err| { println!("error measuring {}: {}", track_file.display(), err); }); Ok(()) }).collect() }
{ let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y", )?; year = date.year(); }
conditional_block
annotate.rs
extern crate chrono; extern crate id3; extern crate mp3_duration; extern crate regex; extern crate reqwest; use std::{ fs::{ read_dir, rename, }, io::{ Read, }, iter::{ repeat_with, }, path::{ Path, PathBuf, }, time::{ Duration, } }; use chrono::{ Datelike, format::{ ParseResult, }, NaiveDate, }; use id3::{ frame::{ Picture, PictureType, }, Tag, Timestamp, Version, }; use regex::{ Regex, }; use crate::{ types::{ AlbumFull, ClientWithToken, SimpleError, Track, }, utils::{ get_with_retry, }, whitelist::{ add_whitelist, }, }; #[derive(Debug)] pub struct TrackData { album_name: String, album_artists: String, release_date: Option<Timestamp>, image_url: Option<String>, track_name: String, track_number: i32, track_artists: Option<String>, expected_duration_ms: i32, } impl TrackData { pub fn release_date_from( album_full: &AlbumFull, ) -> ParseResult<Timestamp> { let mut year = -1; let mut month = None; let mut day = None; if album_full.release_date_precision == "year" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y", )?; year = date.year(); } if album_full.release_date_precision == "month" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m", )?; year = date.year(); month = Some(date.month() as u8); } else if album_full.release_date_precision == "day" { let date = NaiveDate::parse_from_str( &album_full.release_date[..], "%Y-%m-%d", ).expect("wat"); year = date.year(); month = Some(date.month() as u8); day = Some(date.day() as u8); } Ok(Timestamp { year: year, month: month, day: day, hour: None, minute: None, second: None, }) } pub fn from( track: Track, album_full: &AlbumFull, ) -> Self { let album_artists = album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); let track_artists = track.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "); Self { album_name: album_full.name.clone(), album_artists: album_artists.clone(), release_date: Self::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: track.name, track_number: track.track_number, track_artists: Some(track_artists).filter(|artists| { // need clone? artists != &album_artists }), expected_duration_ms: track.duration_ms, } } } fn get_tracks_files( abs_path: &Path, ) -> Result<Vec<PathBuf>, SimpleError> { read_dir(abs_path).map_err(SimpleError::from).and_then(|dir_iter| { dir_iter.map(|entry| { entry.map(|entry_ok| { entry_ok.path() }).map_err(SimpleError::from) }).collect::<Result<Vec<PathBuf>, SimpleError>>() }).map(|mut paths| { paths.sort(); paths.into_iter().filter(|path| { path.is_file() }).collect() }) } pub fn get_tracks_data( album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<Vec<TrackData>, SimpleError> { let mut tracks = Vec::new(); let mut paging = album_full.tracks.clone(); while let Some(next_url) = paging.next { tracks.append(&mut paging.items); paging = get_with_retry( &next_url[..], client_with_token, )?; } tracks.append(&mut paging.items); Ok(tracks.into_iter().map(|track| { TrackData::from(track, album_full) }).collect()) } fn norm_track_number( track_number: i32, ) -> String
fn expected_time( file: &PathBuf, track_data: &TrackData, ) -> bool { let actual_duration = mp3_duration::from_path(file.as_path()).expect( &format!("error measuring {}", file.display())[..], ); let expected_duration = Duration::from_millis( track_data.expected_duration_ms as u64, ); actual_duration.checked_sub(expected_duration).or( expected_duration.checked_sub(actual_duration) ).and_then(|res| { res.checked_sub(Duration::from_secs(5)) }).is_none() } fn get_image( image_url: &str, ) -> Result<Vec<u8>, SimpleError> { reqwest::get(image_url).map_err(SimpleError::from).and_then(|response| { response.bytes().map(|byte_res| { byte_res.map_err(SimpleError::from) }).collect() }) } fn add_image( tags: &mut Tag, image: &Vec<u8>, ) { tags.add_picture(Picture { mime_type: "image/jpeg".to_string(), picture_type: PictureType::CoverFront, description: format!( "Cover for {} by {}", tags.album().expect("error in writing tags"), tags.artist().expect("error in writing tags"), ), data: image.clone(), }); } fn annotate_tags( tags: &mut Tag, file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, ) -> String { lazy_static! { static ref INVALID_FILE_CHRS: Regex = Regex::new(r"[^\w\s.\(\)]+").unwrap(); } let mut new_name = format!( "{} {}.mp3", norm_track_number(track_data.track_number), track_data.track_name, ); if !expected_time(file, &track_data) { new_name = format!( "{} {} (unexpected duration).mp3", norm_track_number(track_data.track_number), track_data.track_name, ); } tags.set_album(track_data.album_name); let album_artists = track_data.album_artists.clone(); track_data.track_artists.map(|artists| { tags.set_album_artist(album_artists.clone()); tags.set_artist(artists); }).unwrap_or_else(|| { tags.set_artist(album_artists); }); track_data.release_date.map(|date| { tags.set_date_released(date); }); tags.set_title(track_data.track_name); tags.set_track(track_data.track_number as u32); if !album_image.is_empty() { add_image(tags, album_image) } INVALID_FILE_CHRS.replace_all(&new_name[..], "_").to_string() } fn annotate_file( file: &PathBuf, track_data: TrackData, album_image: &Vec<u8>, rename_file: bool, ) -> Result<(), SimpleError> { let mut tags = Tag::new(); let new_name = annotate_tags(&mut tags, file, track_data, album_image); tags.write_to_path(file, Version::Id3v24).map_err(SimpleError::from) .and_then(|_| { if rename_file { return file.as_path().file_name().ok_or(SimpleError { msg: format!("{} not file?", file.display()), }).and_then(|file_name| { if new_name != file_name.to_string_lossy() { return rename( file, file.with_file_name(new_name), ).map_err(SimpleError::from); } Ok(()) }); } return Ok(()); }) } pub fn annotate( dir: &PathBuf, album_full: &AlbumFull, client_with_token: &ClientWithToken, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let mut rename_files = true; let files = get_tracks_files(&abs_path)?; let mut data = get_tracks_data(album_full, client_with_token)?; if files.len() != data.len() { println!( "number of files in {} should be {}, not renaming", dir.display(), data.len(), ); rename_files = false; } let album_image = album_full.images.iter().next().map(|image| { image.url.clone() }).map(|url| { get_image(&url[..]).unwrap_or_else(|err| { println!("error getting image for {}: {}", album_full.name, err.msg); vec![] }) }).unwrap_or_else(|| { println!("no image for {}", album_full.name); vec![] }); let mut track_counter = data.len() as i32; data.extend(repeat_with(|| { let track_data = TrackData { album_name: album_full.name.clone(), album_artists: album_full.artists.iter().map(|artist| { artist.name.clone() }).collect::<Vec<String>>().join(", "), release_date: TrackData::release_date_from(album_full).ok(), image_url: album_full.images.iter().next().map(|image| { image.url.clone() }), track_name: "unknown track name".to_string(), track_number: track_counter, track_artists: None, expected_duration_ms: 0, }; track_counter += 1; track_data }).take(files.len())); files.iter().zip( data.into_iter(), ).map(|(track_file, track_data)| { annotate_file(track_file, track_data, &album_image, rename_files) .and_then(|_| { add_whitelist(dir.to_string_lossy().to_string()) }) }).collect() } pub fn test_run( dir: &PathBuf, ) -> Result<(), SimpleError> { let abs_path = Path::new("/home/banana/music/").join(&dir.as_path()); let files = get_tracks_files(&abs_path)?; files.iter().map(|track_file| { mp3_duration::from_path(track_file.as_path()).map(|_| { () }).unwrap_or_else(|err| { println!("error measuring {}: {}", track_file.display(), err); }); Ok(()) }).collect() }
{ if track_number < 10 { return format!("0{}", track_number); } track_number.to_string() }
identifier_body
terminal.rs
use std::{ io::{self, Write}, ops::Range, cmp::min, iter::Peekable }; use crate::{ iface::{TerminalPlugin, FormatLike}, config }; use smallvec::{smallvec, SmallVec}; use terminfo::{expand, Database, capability as cap}; // pub const CORNER_SW: char = '╗'; const CORNER_SE: char = '╔'; const CORNER_NSE: char = '╠'; const LINE: char = '═'; const TEXT_START: char = '⟦'; const TEXT_END: char = '⟧'; const CORNER_NS: char = '║'; // pub const ERROR_START: char = '!'; // pub const ERROR_END: char = '!'; // pub const CORNER_NW: char = '╝'; const CORNER_NE: char = '╚'; const ERR_START: &str = "!!"; type Color = u8; mod color { #![allow(unused)] use super::Color; pub const TEXT_WHITE: Color = 251; pub const CYAN: Color = 6; pub const YELLOW: Color = 3; pub const RED: Color = 1; pub const BRIGHT_RED: Color = 9; pub const BRIGHT_GREEN: Color = 10; pub const LIGHT_GRAY: Color = 243; pub const LESS_LIGHT_GRAY: Color = 240; pub const JUNGLE_GREEN: Color = 112; pub const ORANGE: Color = 208; pub const SIGNALING_RED: Color = 196; } fn fmt_to_color(fmt: FormatLike) -> Color { use self::FormatLike::*; match fmt { Text => color::TEXT_WHITE, PrimaryText => color::JUNGLE_GREEN, Lines => color::LIGHT_GRAY, SoftWarning => color::ORANGE, HardWarning => color::SIGNALING_RED, Error => color::RED, ExplicitOk => color::BRIGHT_GREEN, Hidden => color::LESS_LIGHT_GRAY } } #[derive(Debug)] pub struct Terminal { column_count: usize, text_segments: SmallVec<[SmallVec<[TextSegment; 2]>; 2]>, error_segments: Vec<(&'static str, String)>, terminfo: Database, } impl TerminalPlugin for Terminal { fn new(column_count: usize) -> Self { let terminfo = Database::from_env().unwrap(); Terminal { column_count, text_segments: Default::default(), error_segments: Default::default(), terminfo } } fn add_text_segment(&mut self, text: &str, fmt_args: FormatLike) { self.text_segments.push(smallvec![TextSegment::new(text, fmt_args)]); } fn add_error_segment(&mut self, scope: &'static str, msg: &str) { self.error_segments.push((scope, msg.into())); } fn extend_previous_segment(&mut self, text: &str, fmt_args: FormatLike) { { if let Some(last) = self.text_segments.last_mut() { last.push(TextSegment::new(text, fmt_args)); return; } } self.add_text_segment(text, fmt_args); } fn flush_to_stdout(&self, prompt_ending: &str) { //TODO split into multiple functions // - one for outputting text segments // - one for outputting error segments let layout = self.calculate_layout(); let stdout = io::stdout(); let mut term = self.writer(stdout.lock()); self.render_text_segments(&mut term, layout); self.render_error_segments(&mut term); term.fmt(FormatLike::Lines); write!(term, "{}{}", CORNER_NE, prompt_ending).unwrap(); term.reset_fmt(); term.flush().unwrap(); } } impl Terminal { fn render_text_segments<W>(&self, term: &mut TermWriter<W>, layout: Vec<LineLayout>) where W: Write { let mut first = true; for LineLayout { segments, join_padding, rem_padding } in layout { term.fmt(FormatLike::Lines); if first { first = false; write!(term, "{}", CORNER_SE).unwrap(); } else { write!(term, "{}", CORNER_NSE).unwrap(); } for segment_group in &self.text_segments[segments] { for segment in segment_group { term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_START).unwrap(); term.fmt(segment.fmt); write!(term, "{}", &segment.text).unwrap(); term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_END).unwrap(); } for _ in 0..join_padding { write!(term, "{}", LINE).unwrap(); } } for _ in 0..rem_padding { write!(term, "{}", LINE).unwrap(); } write!(term, "\n").unwrap(); } } fn render_error_segments<W>(&self, term: &mut TermWriter<W>) where W: Write { for (scope, text) in self.error_segments.iter() { term.fmt(FormatLike::Lines); write!(term, "{}", CORNER_NSE).unwrap(); term.fmt(FormatLike::Error); let mut text = text.trim(); write!(term, "{} {}: ", ERR_START, scope).unwrap(); let bulk_len = 1 + ERR_START.len() + 1 + scope.len() + 2; let mut rem_len = self.column_count.checked_sub(bulk_len).unwrap_or(0); loop { if text.len() <= rem_len { term.fmt(FormatLike::Error); write!(term, "{}", text).unwrap(); break; } else { //find split point and split text let split_idx = find_viable_split_idx(text, rem_len); let (line_text, new_text) = text.split_at(split_idx); text = new_text.trim_start(); rem_len = self.column_count - 3; term.fmt(FormatLike::Error); write!(term, "{text}", text=line_text.trim_end()).unwrap(); term.fmt(FormatLike::Lines); write!(term, "\n{sep}", sep=CORNER_NS).unwrap(); for _ in 0..ERR_START.len()+1 { write!(term, " ").unwrap(); } } } write!(term, "\n").unwrap(); } } } fn find_viable_split_idx(text: &str, max_len: usize) -> usize { let mut last_split_idx = 0; let mut last_char_idx = 0; for (idx, ch) in text.char_indices() { if idx + ch.len_utf8() > max_len { break; } last_char_idx = idx; if !(ch.is_alphanumeric() || ch == '.' || ch=='!' || ch==':' || ch=='?') { last
t_split_idx == 0 { last_char_idx } else { last_split_idx } } impl Terminal { fn writer<W>(&self, out: W) -> TermWriter<W> where W: Write { TermWriter { terminal: self, out } } fn calculate_layout(&self) -> Vec<LineLayout> { // -1 as it starts with a `╠` or similar let init_rem_space = self.column_count - 1; let mut lines = Vec::new(); let mut text_segments = self.text_segments.iter().peekable(); let mut idx_offset = 0; while let Some(line) = calc_next_line_layout(&mut text_segments, init_rem_space, idx_offset) { idx_offset = line.segments.end; lines.push(line) } lines } } fn calc_next_line_layout<'a>( iter: &mut Peekable<impl Iterator<Item=impl IntoIterator<Item=&'a TextSegment>+Copy>>, init_rem_space: usize, idx_offset: usize ) -> Option<LineLayout> { let first_seg = match iter.next() { Some(seg) => seg, None => {return None;} }; let first_item = idx_offset; let mut after_last_item = idx_offset + 1; let first_len = calc_min_segment_group_len(first_seg); if first_len >= init_rem_space { let segments = first_item..after_last_item; return Some(LineLayout { segments, join_padding: 0, rem_padding: 0 }); } let mut rem_space = init_rem_space - first_len; while let Some(segment_group_iter) = iter.peek().map(|i| *i) { let min_len = calc_min_segment_group_len(segment_group_iter); if rem_space > min_len { rem_space -= min_len; after_last_item += 1; iter.next(); } else { let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); return Some(LineLayout { segments, join_padding, rem_padding }) } } let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); Some(LineLayout { segments, join_padding, rem_padding }) } fn calc_padding( first_item: usize, after_last_item: usize, rem_space: usize ) -> (usize, usize) { let nr_items = after_last_item - first_item; let join_padding = rem_space / nr_items; let join_padding = min(join_padding, config::MAX_JOIN_PADDING); let rem_padding = rem_space - (join_padding * nr_items); (join_padding, rem_padding) } fn calc_min_segment_group_len<'a>(group: impl IntoIterator<Item=&'a TextSegment>) -> usize { // +2 as in TEXT_START(char) + TEXT_END(char) group.into_iter().map(|seg| seg.pre_calculated_length + 2).sum() } struct LineLayout { segments: Range<usize>, join_padding: usize, rem_padding: usize } struct TermWriter<'a, W: Write+'a> { terminal: &'a Terminal, out: W } impl<'a, W: 'a> TermWriter<'a, W> where W: Write { fn fmt(&mut self, fmt: FormatLike) { write!(&mut self.out, "\x01").unwrap(); let color = fmt_to_color(fmt); if let Some(cap) = self.terminal.terminfo.get::<cap::SetAForeground>() { expand!(&mut self.out, cap.as_ref(); color).unwrap(); } write!(&mut self.out, "\x02").unwrap(); } fn reset_fmt(&mut self) { write!(&mut self.out, "\x01").unwrap(); let terminfo = &self.terminal.terminfo; if let Some(cap) = terminfo.get::<cap::ExitAttributeMode>() { expand!(&mut self.out, cap.as_ref();).unwrap(); } else if let Some(cap) = terminfo.get::<cap::SetAttributes>() { expand!(&mut self.out, cap.as_ref(); 0).unwrap(); } else if let Some(cap) = terminfo.get::<cap::OrigPair>() { expand!(&mut self.out, cap.as_ref();).unwrap() } write!(&mut self.out, "\x02").unwrap(); } } impl<'a, W: 'a> Write for TermWriter<'a, W> where W: Write { fn flush(&mut self) -> Result<(), io::Error> { self.out.flush() } fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> { self.out.write(buf) } } #[derive(Debug)] struct TextSegment { text: String, fmt: FormatLike, pre_calculated_length: usize, } impl TextSegment { pub fn new(text: impl Into<String>, fmt: FormatLike) -> Self { let text = text.into(); let len = text.chars().count(); TextSegment { text, fmt, pre_calculated_length: len, } } }
_split_idx = idx; } } if las
conditional_block
terminal.rs
use std::{ io::{self, Write}, ops::Range, cmp::min, iter::Peekable }; use crate::{ iface::{TerminalPlugin, FormatLike}, config }; use smallvec::{smallvec, SmallVec}; use terminfo::{expand, Database, capability as cap}; // pub const CORNER_SW: char = '╗'; const CORNER_SE: char = '╔'; const CORNER_NSE: char = '╠'; const LINE: char = '═'; const TEXT_START: char = '⟦'; const TEXT_END: char = '⟧'; const CORNER_NS: char = '║'; // pub const ERROR_START: char = '!'; // pub const ERROR_END: char = '!'; // pub const CORNER_NW: char = '╝'; const CORNER_NE: char = '╚'; const ERR_START: &str = "!!"; type Color = u8; mod color { #![allow(unused)] use super::Color; pub const TEXT_WHITE: Color = 251; pub const CYAN: Color = 6; pub const YELLOW: Color = 3; pub const RED: Color = 1; pub const BRIGHT_RED: Color = 9; pub const BRIGHT_GREEN: Color = 10; pub const LIGHT_GRAY: Color = 243; pub const LESS_LIGHT_GRAY: Color = 240; pub const JUNGLE_GREEN: Color = 112; pub const ORANGE: Color = 208; pub const SIGNALING_RED: Color = 196; } fn fmt_to_color(fmt: FormatLike) -> Color { use self::FormatLike::*; match fmt { Text => color::TEXT_WHITE, PrimaryText => color::JUNGLE_GREEN, Lines => color::LIGHT_GRAY, SoftWarning => color::ORANGE, HardWarning => color::SIGNALING_RED, Error => color::RED, ExplicitOk => color::BRIGHT_GREEN, Hidden => color::LESS_LIGHT_GRAY } } #[derive(Debug)] pub struct Terminal { column_count: usize, text_segments: SmallVec<[SmallVec<[TextSegment; 2]>; 2]>, error_segments: Vec<(&'static str, String)>, terminfo: Database, } impl TerminalPlugin for Terminal { fn new(column_count: usize) -> Self { let terminfo = Database::from_env().unwrap(); Terminal { column_count, text_segments: Default::default(), error_segments: Default::default(), terminfo } } fn add_text_segment(&mut self, text: &str, fmt_args: FormatLike) { self.text_segments.push(smallvec![TextSegment::new(text, fmt_args)]); } fn add_error_segment(&mut self, scope: &'static str, msg: &str) { self.error_segments.push((scope, msg.into())); } fn extend_previous_segment(&mut self, text: &str, fmt_args: FormatLike) { { if let Some(last) = self.text_segments.last_mut() { last.push(TextSegment::new(text, fmt_args)); return; } } self.add_text_segment(text, fmt_args); } fn flush_to_stdout(&self, prompt_ending: &str) { //TODO split into multiple functions // - one for outputting text segments // - one for outputting error segments let layout = self.calculate_layout(); let stdout = io::stdout(); let mut term = self.writer(stdout.lock()); self.render_text_segments(&mut term, layout); self.render_error_segments(&mut term); term.fmt(FormatLike::Lines); write!(term, "{}{}", CORNER_NE, prompt_ending).unwrap(); term.reset_fmt(); term.flush().unwrap(); } } impl Terminal { fn render_text_segments<W>(&self, term: &mut TermWriter<W>, layout: Vec<LineLayout>) where W: Write { let mut first = true; for LineLayout { segments, join_padding, rem_padding } in layout { term.fmt(FormatLike::Lines); if first { first = false; write!(term, "{}", CORNER_SE).unwrap(); } else { write!(term, "{}", CORNER_NSE).unwrap(); } for segment_group in &self.text_segments[segments] { for segment in segment_group { term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_START).unwrap(); term.fmt(segment.fmt); write!(term, "{}", &segment.text).unwrap(); term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_END).unwrap(); } for _ in 0..join_padding { write!(term, "{}", LINE).unwrap(); } } for _ in 0..rem_padding { write!(term, "{}", LINE).unwrap(); } write!(term, "\n").unwrap(); } } fn render_error_segments<W>(&self, term: &mut TermWriter<W>) where W: Write { for (scope, text) in self.error_segments.iter() { term.fmt(FormatLike::Lines); write!(term, "{}", CORNER_NSE).unwrap(); term.fmt(FormatLike::Error); let mut text = text.trim(); write!(term, "{} {}: ", ERR_START, scope).unwrap(); let bulk_len = 1 + ERR_START.len() + 1 + scope.len() + 2; let mut rem_len = self.column_count.checked_sub(bulk_len).unwrap_or(0); loop { if text.len() <= rem_len { term.fmt(FormatLike::Error); write!(term, "{}", text).unwrap(); break; } else { //find split point and split text let split_idx = find_viable_split_idx(text, rem_len); let (line_text, new_text) = text.split_at(split_idx); text = new_text.trim_start(); rem_len = self.column_count - 3; term.fmt(FormatLike::Error); write!(term, "{text}", text=line_text.trim_end()).unwrap(); term.fmt(FormatLike::Lines); write!(term, "\n{sep}", sep=CORNER_NS).unwrap(); for _ in 0..ERR_START.len()+1 { write!(term, " ").unwrap(); } } } write!(term, "\n").unwrap(); } } } fn find_viable_split_idx(text: &str, max_len: usize) -> usize { let mut last_split_idx = 0; let mut last_char_idx = 0; for (idx, ch) in text.char_indices() { if idx + ch.len_utf8() > max_len { break; } last_char_idx = idx; if !(ch.is_alphanumeric() || ch == '.' || ch=='!' || ch==':' || ch=='?') { last_split_idx = idx; } } if last_split_idx == 0 { last_char_idx } else { last_split_idx } } impl Terminal { fn writer<W>(&self, out: W) -> TermWriter<W> where W: Write { TermWriter { terminal: self, out } } fn calculate_layout(&self) -> Vec<LineLayout> { // -1 as it starts with a `╠` or similar let init_rem_space = self.column_count - 1; let mut lines = Vec::new(); let mut text_segments = self.text_segments.iter().peekable(); let mut idx_offset = 0; while let Some(line) = calc_next_line_layout(&mut text_segments, init_rem_space, idx_offset) { idx_offset = line.segments.end; lines.push(line) } lines } } fn calc_next_line_layout<'a>( iter: &mut Peekable<impl Iterator<Item=impl IntoIterator<Item=&'a TextSegment>+Copy>>, init_rem_space: usize, idx_offset: usize ) -> Option<LineLayout> { let first_seg = match iter.next() { Some(seg) => seg, None => {return None;} }; let first_item = idx_offset; let mut after_last_item = idx_offset + 1; let first_len = calc_min_segment_group_len(first_seg); if first_len >= init_rem_space { let segments = first_item..after_last_item; return Some(LineLayout { segments, join_padding: 0, rem_padding: 0 }); } let mut rem_space = init_rem_space - first_len; while let Some(segment_group_iter) = iter.peek().map(|i| *i) { let min_len = calc_min_segment_group_len(segment_group_iter); if rem_space > min_len { rem_space -= min_len; after_last_item += 1; iter.next(); } else { let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); return Some(LineLayout { segments, join_padding, rem_padding }) } } let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); Some(LineLayout { segments, join_padding, rem_padding }) } fn calc_padding( first_item: usize, after_last_item: usize, rem_space: usize ) -> (usize, usize) { let nr_items = after_last_item - first_item; let join_padding = rem_space / nr_items; let join_padding = min(join_padding, config::MAX_JOIN_PADDING); let rem_padding = rem_space - (join_padding * nr_items); (join_padding, rem_padding) } fn calc_min_segment_group_len<'a>(group: impl IntoIterator<Item=&'a TextSegment>) -> usize { // +2 as in TEXT_START(char) + TEXT_END(char) group.into_iter().map(|seg| seg.pre_calculated_length + 2).sum() } struct LineLayout { segments: Range<usize>, join_padding: usize, rem_padding: usize } struct TermWriter<'a, W: Write+'a> { terminal: &'a Terminal, out: W } impl<'a, W: 'a> TermWriter<'a, W> where W: Write { fn fmt(&mut self, fmt: FormatLike) { write!(&mut self.out, "\x01").unwrap(); let color = fmt_to_color(fmt); if let Some(cap) = self.terminal.terminfo.get::<cap::SetAForeground>() { expand!(&mut self.out, cap.as_ref(); color).unwrap(); } write!(&mut self.out, "\x02").unwrap(); } fn reset_fmt(&mut self) { write!(&mut self.out, "\x01").unwrap(); let terminfo = &self.terminal.terminfo; if let Some(cap) = terminfo.get::<cap::ExitAttributeMode>() { expand!(&mut self.out, cap.as_ref();).unwrap(); } else if let Some(cap) = terminfo.get::<cap::SetAttributes>() { expand!(&mut self.out, cap.as_ref(); 0).unwrap(); } else if let Some(cap) = terminfo.get::<cap::OrigPair>() { expand!(&mut self.out, cap.as_ref();).unwrap() } write!(&mut self.out, "\x02").unwrap(); } } impl<'a, W: 'a> Write for TermWriter<'a, W> where W: Write { fn flush(&mut self) ->
t<(), io::Error> { self.out.flush() } fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> { self.out.write(buf) } } #[derive(Debug)] struct TextSegment { text: String, fmt: FormatLike, pre_calculated_length: usize, } impl TextSegment { pub fn new(text: impl Into<String>, fmt: FormatLike) -> Self { let text = text.into(); let len = text.chars().count(); TextSegment { text, fmt, pre_calculated_length: len, } } }
Resul
identifier_name
terminal.rs
use std::{ io::{self, Write}, ops::Range, cmp::min, iter::Peekable }; use crate::{ iface::{TerminalPlugin, FormatLike}, config }; use smallvec::{smallvec, SmallVec}; use terminfo::{expand, Database, capability as cap}; // pub const CORNER_SW: char = '╗'; const CORNER_SE: char = '╔'; const CORNER_NSE: char = '╠'; const LINE: char = '═'; const TEXT_START: char = '⟦'; const TEXT_END: char = '⟧'; const CORNER_NS: char = '║'; // pub const ERROR_START: char = '!'; // pub const ERROR_END: char = '!'; // pub const CORNER_NW: char = '╝'; const CORNER_NE: char = '╚'; const ERR_START: &str = "!!"; type Color = u8; mod color { #![allow(unused)] use super::Color; pub const TEXT_WHITE: Color = 251; pub const CYAN: Color = 6; pub const YELLOW: Color = 3; pub const RED: Color = 1; pub const BRIGHT_RED: Color = 9; pub const BRIGHT_GREEN: Color = 10; pub const LIGHT_GRAY: Color = 243; pub const LESS_LIGHT_GRAY: Color = 240; pub const JUNGLE_GREEN: Color = 112; pub const ORANGE: Color = 208; pub const SIGNALING_RED: Color = 196; } fn fmt_to_color(fmt: FormatLike) -> Color { use self::Fo
)] pub struct Terminal { column_count: usize, text_segments: SmallVec<[SmallVec<[TextSegment; 2]>; 2]>, error_segments: Vec<(&'static str, String)>, terminfo: Database, } impl TerminalPlugin for Terminal { fn new(column_count: usize) -> Self { let terminfo = Database::from_env().unwrap(); Terminal { column_count, text_segments: Default::default(), error_segments: Default::default(), terminfo } } fn add_text_segment(&mut self, text: &str, fmt_args: FormatLike) { self.text_segments.push(smallvec![TextSegment::new(text, fmt_args)]); } fn add_error_segment(&mut self, scope: &'static str, msg: &str) { self.error_segments.push((scope, msg.into())); } fn extend_previous_segment(&mut self, text: &str, fmt_args: FormatLike) { { if let Some(last) = self.text_segments.last_mut() { last.push(TextSegment::new(text, fmt_args)); return; } } self.add_text_segment(text, fmt_args); } fn flush_to_stdout(&self, prompt_ending: &str) { //TODO split into multiple functions // - one for outputting text segments // - one for outputting error segments let layout = self.calculate_layout(); let stdout = io::stdout(); let mut term = self.writer(stdout.lock()); self.render_text_segments(&mut term, layout); self.render_error_segments(&mut term); term.fmt(FormatLike::Lines); write!(term, "{}{}", CORNER_NE, prompt_ending).unwrap(); term.reset_fmt(); term.flush().unwrap(); } } impl Terminal { fn render_text_segments<W>(&self, term: &mut TermWriter<W>, layout: Vec<LineLayout>) where W: Write { let mut first = true; for LineLayout { segments, join_padding, rem_padding } in layout { term.fmt(FormatLike::Lines); if first { first = false; write!(term, "{}", CORNER_SE).unwrap(); } else { write!(term, "{}", CORNER_NSE).unwrap(); } for segment_group in &self.text_segments[segments] { for segment in segment_group { term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_START).unwrap(); term.fmt(segment.fmt); write!(term, "{}", &segment.text).unwrap(); term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_END).unwrap(); } for _ in 0..join_padding { write!(term, "{}", LINE).unwrap(); } } for _ in 0..rem_padding { write!(term, "{}", LINE).unwrap(); } write!(term, "\n").unwrap(); } } fn render_error_segments<W>(&self, term: &mut TermWriter<W>) where W: Write { for (scope, text) in self.error_segments.iter() { term.fmt(FormatLike::Lines); write!(term, "{}", CORNER_NSE).unwrap(); term.fmt(FormatLike::Error); let mut text = text.trim(); write!(term, "{} {}: ", ERR_START, scope).unwrap(); let bulk_len = 1 + ERR_START.len() + 1 + scope.len() + 2; let mut rem_len = self.column_count.checked_sub(bulk_len).unwrap_or(0); loop { if text.len() <= rem_len { term.fmt(FormatLike::Error); write!(term, "{}", text).unwrap(); break; } else { //find split point and split text let split_idx = find_viable_split_idx(text, rem_len); let (line_text, new_text) = text.split_at(split_idx); text = new_text.trim_start(); rem_len = self.column_count - 3; term.fmt(FormatLike::Error); write!(term, "{text}", text=line_text.trim_end()).unwrap(); term.fmt(FormatLike::Lines); write!(term, "\n{sep}", sep=CORNER_NS).unwrap(); for _ in 0..ERR_START.len()+1 { write!(term, " ").unwrap(); } } } write!(term, "\n").unwrap(); } } } fn find_viable_split_idx(text: &str, max_len: usize) -> usize { let mut last_split_idx = 0; let mut last_char_idx = 0; for (idx, ch) in text.char_indices() { if idx + ch.len_utf8() > max_len { break; } last_char_idx = idx; if !(ch.is_alphanumeric() || ch == '.' || ch=='!' || ch==':' || ch=='?') { last_split_idx = idx; } } if last_split_idx == 0 { last_char_idx } else { last_split_idx } } impl Terminal { fn writer<W>(&self, out: W) -> TermWriter<W> where W: Write { TermWriter { terminal: self, out } } fn calculate_layout(&self) -> Vec<LineLayout> { // -1 as it starts with a `╠` or similar let init_rem_space = self.column_count - 1; let mut lines = Vec::new(); let mut text_segments = self.text_segments.iter().peekable(); let mut idx_offset = 0; while let Some(line) = calc_next_line_layout(&mut text_segments, init_rem_space, idx_offset) { idx_offset = line.segments.end; lines.push(line) } lines } } fn calc_next_line_layout<'a>( iter: &mut Peekable<impl Iterator<Item=impl IntoIterator<Item=&'a TextSegment>+Copy>>, init_rem_space: usize, idx_offset: usize ) -> Option<LineLayout> { let first_seg = match iter.next() { Some(seg) => seg, None => {return None;} }; let first_item = idx_offset; let mut after_last_item = idx_offset + 1; let first_len = calc_min_segment_group_len(first_seg); if first_len >= init_rem_space { let segments = first_item..after_last_item; return Some(LineLayout { segments, join_padding: 0, rem_padding: 0 }); } let mut rem_space = init_rem_space - first_len; while let Some(segment_group_iter) = iter.peek().map(|i| *i) { let min_len = calc_min_segment_group_len(segment_group_iter); if rem_space > min_len { rem_space -= min_len; after_last_item += 1; iter.next(); } else { let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); return Some(LineLayout { segments, join_padding, rem_padding }) } } let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); Some(LineLayout { segments, join_padding, rem_padding }) } fn calc_padding( first_item: usize, after_last_item: usize, rem_space: usize ) -> (usize, usize) { let nr_items = after_last_item - first_item; let join_padding = rem_space / nr_items; let join_padding = min(join_padding, config::MAX_JOIN_PADDING); let rem_padding = rem_space - (join_padding * nr_items); (join_padding, rem_padding) } fn calc_min_segment_group_len<'a>(group: impl IntoIterator<Item=&'a TextSegment>) -> usize { // +2 as in TEXT_START(char) + TEXT_END(char) group.into_iter().map(|seg| seg.pre_calculated_length + 2).sum() } struct LineLayout { segments: Range<usize>, join_padding: usize, rem_padding: usize } struct TermWriter<'a, W: Write+'a> { terminal: &'a Terminal, out: W } impl<'a, W: 'a> TermWriter<'a, W> where W: Write { fn fmt(&mut self, fmt: FormatLike) { write!(&mut self.out, "\x01").unwrap(); let color = fmt_to_color(fmt); if let Some(cap) = self.terminal.terminfo.get::<cap::SetAForeground>() { expand!(&mut self.out, cap.as_ref(); color).unwrap(); } write!(&mut self.out, "\x02").unwrap(); } fn reset_fmt(&mut self) { write!(&mut self.out, "\x01").unwrap(); let terminfo = &self.terminal.terminfo; if let Some(cap) = terminfo.get::<cap::ExitAttributeMode>() { expand!(&mut self.out, cap.as_ref();).unwrap(); } else if let Some(cap) = terminfo.get::<cap::SetAttributes>() { expand!(&mut self.out, cap.as_ref(); 0).unwrap(); } else if let Some(cap) = terminfo.get::<cap::OrigPair>() { expand!(&mut self.out, cap.as_ref();).unwrap() } write!(&mut self.out, "\x02").unwrap(); } } impl<'a, W: 'a> Write for TermWriter<'a, W> where W: Write { fn flush(&mut self) -> Result<(), io::Error> { self.out.flush() } fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> { self.out.write(buf) } } #[derive(Debug)] struct TextSegment { text: String, fmt: FormatLike, pre_calculated_length: usize, } impl TextSegment { pub fn new(text: impl Into<String>, fmt: FormatLike) -> Self { let text = text.into(); let len = text.chars().count(); TextSegment { text, fmt, pre_calculated_length: len, } } }
rmatLike::*; match fmt { Text => color::TEXT_WHITE, PrimaryText => color::JUNGLE_GREEN, Lines => color::LIGHT_GRAY, SoftWarning => color::ORANGE, HardWarning => color::SIGNALING_RED, Error => color::RED, ExplicitOk => color::BRIGHT_GREEN, Hidden => color::LESS_LIGHT_GRAY } } #[derive(Debug
identifier_body
terminal.rs
use std::{ io::{self, Write}, ops::Range, cmp::min, iter::Peekable }; use crate::{ iface::{TerminalPlugin, FormatLike}, config }; use smallvec::{smallvec, SmallVec}; use terminfo::{expand, Database, capability as cap}; // pub const CORNER_SW: char = '╗'; const CORNER_SE: char = '╔'; const CORNER_NSE: char = '╠'; const LINE: char = '═'; const TEXT_START: char = '⟦'; const TEXT_END: char = '⟧'; const CORNER_NS: char = '║'; // pub const ERROR_START: char = '!'; // pub const ERROR_END: char = '!'; // pub const CORNER_NW: char = '╝'; const CORNER_NE: char = '╚'; const ERR_START: &str = "!!"; type Color = u8; mod color { #![allow(unused)] use super::Color; pub const TEXT_WHITE: Color = 251; pub const CYAN: Color = 6; pub const YELLOW: Color = 3; pub const RED: Color = 1; pub const BRIGHT_RED: Color = 9; pub const BRIGHT_GREEN: Color = 10; pub const LIGHT_GRAY: Color = 243; pub const LESS_LIGHT_GRAY: Color = 240; pub const JUNGLE_GREEN: Color = 112; pub const ORANGE: Color = 208; pub const SIGNALING_RED: Color = 196; } fn fmt_to_color(fmt: FormatLike) -> Color { use self::FormatLike::*; match fmt { Text => color::TEXT_WHITE, PrimaryText => color::JUNGLE_GREEN, Lines => color::LIGHT_GRAY, SoftWarning => color::ORANGE, HardWarning => color::SIGNALING_RED, Error => color::RED, ExplicitOk => color::BRIGHT_GREEN, Hidden => color::LESS_LIGHT_GRAY } } #[derive(Debug)] pub struct Terminal { column_count: usize, text_segments: SmallVec<[SmallVec<[TextSegment; 2]>; 2]>, error_segments: Vec<(&'static str, String)>, terminfo: Database, } impl TerminalPlugin for Terminal { fn new(column_count: usize) -> Self { let terminfo = Database::from_env().unwrap(); Terminal { column_count, text_segments: Default::default(), error_segments: Default::default(), terminfo } } fn add_text_segment(&mut self, text: &str, fmt_args: FormatLike) { self.text_segments.push(smallvec![TextSegment::new(text, fmt_args)]); } fn add_error_segment(&mut self, scope: &'static str, msg: &str) { self.error_segments.push((scope, msg.into())); } fn extend_previous_segment(&mut self, text: &str, fmt_args: FormatLike) { { if let Some(last) = self.text_segments.last_mut() { last.push(TextSegment::new(text, fmt_args)); return;
self.add_text_segment(text, fmt_args); } fn flush_to_stdout(&self, prompt_ending: &str) { //TODO split into multiple functions // - one for outputting text segments // - one for outputting error segments let layout = self.calculate_layout(); let stdout = io::stdout(); let mut term = self.writer(stdout.lock()); self.render_text_segments(&mut term, layout); self.render_error_segments(&mut term); term.fmt(FormatLike::Lines); write!(term, "{}{}", CORNER_NE, prompt_ending).unwrap(); term.reset_fmt(); term.flush().unwrap(); } } impl Terminal { fn render_text_segments<W>(&self, term: &mut TermWriter<W>, layout: Vec<LineLayout>) where W: Write { let mut first = true; for LineLayout { segments, join_padding, rem_padding } in layout { term.fmt(FormatLike::Lines); if first { first = false; write!(term, "{}", CORNER_SE).unwrap(); } else { write!(term, "{}", CORNER_NSE).unwrap(); } for segment_group in &self.text_segments[segments] { for segment in segment_group { term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_START).unwrap(); term.fmt(segment.fmt); write!(term, "{}", &segment.text).unwrap(); term.fmt(FormatLike::Lines); write!(term, "{}", TEXT_END).unwrap(); } for _ in 0..join_padding { write!(term, "{}", LINE).unwrap(); } } for _ in 0..rem_padding { write!(term, "{}", LINE).unwrap(); } write!(term, "\n").unwrap(); } } fn render_error_segments<W>(&self, term: &mut TermWriter<W>) where W: Write { for (scope, text) in self.error_segments.iter() { term.fmt(FormatLike::Lines); write!(term, "{}", CORNER_NSE).unwrap(); term.fmt(FormatLike::Error); let mut text = text.trim(); write!(term, "{} {}: ", ERR_START, scope).unwrap(); let bulk_len = 1 + ERR_START.len() + 1 + scope.len() + 2; let mut rem_len = self.column_count.checked_sub(bulk_len).unwrap_or(0); loop { if text.len() <= rem_len { term.fmt(FormatLike::Error); write!(term, "{}", text).unwrap(); break; } else { //find split point and split text let split_idx = find_viable_split_idx(text, rem_len); let (line_text, new_text) = text.split_at(split_idx); text = new_text.trim_start(); rem_len = self.column_count - 3; term.fmt(FormatLike::Error); write!(term, "{text}", text=line_text.trim_end()).unwrap(); term.fmt(FormatLike::Lines); write!(term, "\n{sep}", sep=CORNER_NS).unwrap(); for _ in 0..ERR_START.len()+1 { write!(term, " ").unwrap(); } } } write!(term, "\n").unwrap(); } } } fn find_viable_split_idx(text: &str, max_len: usize) -> usize { let mut last_split_idx = 0; let mut last_char_idx = 0; for (idx, ch) in text.char_indices() { if idx + ch.len_utf8() > max_len { break; } last_char_idx = idx; if !(ch.is_alphanumeric() || ch == '.' || ch=='!' || ch==':' || ch=='?') { last_split_idx = idx; } } if last_split_idx == 0 { last_char_idx } else { last_split_idx } } impl Terminal { fn writer<W>(&self, out: W) -> TermWriter<W> where W: Write { TermWriter { terminal: self, out } } fn calculate_layout(&self) -> Vec<LineLayout> { // -1 as it starts with a `╠` or similar let init_rem_space = self.column_count - 1; let mut lines = Vec::new(); let mut text_segments = self.text_segments.iter().peekable(); let mut idx_offset = 0; while let Some(line) = calc_next_line_layout(&mut text_segments, init_rem_space, idx_offset) { idx_offset = line.segments.end; lines.push(line) } lines } } fn calc_next_line_layout<'a>( iter: &mut Peekable<impl Iterator<Item=impl IntoIterator<Item=&'a TextSegment>+Copy>>, init_rem_space: usize, idx_offset: usize ) -> Option<LineLayout> { let first_seg = match iter.next() { Some(seg) => seg, None => {return None;} }; let first_item = idx_offset; let mut after_last_item = idx_offset + 1; let first_len = calc_min_segment_group_len(first_seg); if first_len >= init_rem_space { let segments = first_item..after_last_item; return Some(LineLayout { segments, join_padding: 0, rem_padding: 0 }); } let mut rem_space = init_rem_space - first_len; while let Some(segment_group_iter) = iter.peek().map(|i| *i) { let min_len = calc_min_segment_group_len(segment_group_iter); if rem_space > min_len { rem_space -= min_len; after_last_item += 1; iter.next(); } else { let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); return Some(LineLayout { segments, join_padding, rem_padding }) } } let segments = first_item..after_last_item; let (join_padding, rem_padding) = calc_padding(first_item, after_last_item, rem_space); Some(LineLayout { segments, join_padding, rem_padding }) } fn calc_padding( first_item: usize, after_last_item: usize, rem_space: usize ) -> (usize, usize) { let nr_items = after_last_item - first_item; let join_padding = rem_space / nr_items; let join_padding = min(join_padding, config::MAX_JOIN_PADDING); let rem_padding = rem_space - (join_padding * nr_items); (join_padding, rem_padding) } fn calc_min_segment_group_len<'a>(group: impl IntoIterator<Item=&'a TextSegment>) -> usize { // +2 as in TEXT_START(char) + TEXT_END(char) group.into_iter().map(|seg| seg.pre_calculated_length + 2).sum() } struct LineLayout { segments: Range<usize>, join_padding: usize, rem_padding: usize } struct TermWriter<'a, W: Write+'a> { terminal: &'a Terminal, out: W } impl<'a, W: 'a> TermWriter<'a, W> where W: Write { fn fmt(&mut self, fmt: FormatLike) { write!(&mut self.out, "\x01").unwrap(); let color = fmt_to_color(fmt); if let Some(cap) = self.terminal.terminfo.get::<cap::SetAForeground>() { expand!(&mut self.out, cap.as_ref(); color).unwrap(); } write!(&mut self.out, "\x02").unwrap(); } fn reset_fmt(&mut self) { write!(&mut self.out, "\x01").unwrap(); let terminfo = &self.terminal.terminfo; if let Some(cap) = terminfo.get::<cap::ExitAttributeMode>() { expand!(&mut self.out, cap.as_ref();).unwrap(); } else if let Some(cap) = terminfo.get::<cap::SetAttributes>() { expand!(&mut self.out, cap.as_ref(); 0).unwrap(); } else if let Some(cap) = terminfo.get::<cap::OrigPair>() { expand!(&mut self.out, cap.as_ref();).unwrap() } write!(&mut self.out, "\x02").unwrap(); } } impl<'a, W: 'a> Write for TermWriter<'a, W> where W: Write { fn flush(&mut self) -> Result<(), io::Error> { self.out.flush() } fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> { self.out.write(buf) } } #[derive(Debug)] struct TextSegment { text: String, fmt: FormatLike, pre_calculated_length: usize, } impl TextSegment { pub fn new(text: impl Into<String>, fmt: FormatLike) -> Self { let text = text.into(); let len = text.chars().count(); TextSegment { text, fmt, pre_calculated_length: len, } } }
} }
random_line_split
mock.rs
// This file is part of the SORA network and Polkaswap app. // Copyright (c) 2020, 2021, Polka Biome Ltd. All rights reserved. // SPDX-License-Identifier: BSD-4-Clause // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // Redistributions of source code must retain the above copyright notice, this list // of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this // list of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // All advertising materials mentioning features or use of this software must display // the following acknowledgement: This product includes software developed by Polka Biome // Ltd., SORA, and Polkaswap. // // Neither the name of the Polka Biome Ltd. nor the names of its contributors may be used // to endorse or promote products derived from this software without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY Polka Biome Ltd. AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Polka Biome Ltd. BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; // OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use crate::{self as pswap_distribution, Config}; use common::mock::ExistentialDeposits; use common::prelude::Balance; use common::{ balance, fixed, fixed_from_basis_points, AssetName, AssetSymbol, BalancePrecision, Fixed, FromGenericPair, }; use currencies::BasicCurrencyAdapter; use frame_support::traits::GenesisBuild; use frame_support::weights::Weight; use frame_support::{construct_runtime, parameter_types}; use frame_system; use hex_literal::hex; use permissions::Scope; use sp_core::H256; use sp_runtime::testing::Header; use sp_runtime::traits::{BlakeTwo256, IdentityLookup, Zero}; use sp_runtime::{AccountId32, Perbill}; pub type AccountId = AccountId32; pub type BlockNumber = u64; pub type Amount = i128; pub type AssetId = common::AssetId32<common::PredefinedAssetId>; pub type TechAccountId = common::TechAccountId<AccountId, TechAssetId, DEXId>; type TechAssetId = common::TechAssetId<common::PredefinedAssetId>; type DEXId = common::DEXId; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Runtime>; type Block = frame_system::mocking::MockBlock<Runtime>; pub fn alice() -> AccountId { AccountId32::from([1u8; 32]) } pub fn fees_account_a() -> AccountId { AccountId32::from([2u8; 32]) } pub fn fees_account_b() -> AccountId { AccountId32::from([3u8; 32]) } pub fn liquidity_provider_a() -> AccountId { AccountId32::from([4u8; 32]) } pub fn liquidity_provider_b() -> AccountId
pub fn liquidity_provider_c() -> AccountId { AccountId32::from([6u8; 32]) } pub const DEX_A_ID: DEXId = common::DEXId::Polkaswap; parameter_types! { pub GetBaseAssetId: AssetId = common::XOR.into(); pub GetIncentiveAssetId: AssetId = common::PSWAP.into(); pub const PoolTokenAId: AssetId = common::AssetId32::from_bytes(hex!("0211110000000000000000000000000000000000000000000000000000000000")); pub const PoolTokenBId: AssetId = common::AssetId32::from_bytes(hex!("0222220000000000000000000000000000000000000000000000000000000000")); pub const BlockHashCount: u64 = 250; pub const MaximumBlockWeight: Weight = 1024; pub const MaximumBlockLength: u32 = 2 * 1024; pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); pub const GetDefaultFee: u16 = 30; pub const GetDefaultProtocolFee: u16 = 0; pub GetPswapDistributionTechAccountId: TechAccountId = { let tech_account_id = TechAccountId::from_generic_pair( crate::TECH_ACCOUNT_PREFIX.to_vec(), crate::TECH_ACCOUNT_MAIN.to_vec(), ); tech_account_id }; pub GetPswapDistributionAccountId: AccountId = { let tech_account_id = GetPswapDistributionTechAccountId::get(); let account_id = technical::Module::<Runtime>::tech_account_id_to_account_id(&tech_account_id) .expect("Failed to get ordinary account id for technical account id."); account_id }; pub const GetDefaultSubscriptionFrequency: BlockNumber = 10; pub const GetBurnUpdateFrequency: BlockNumber = 3; pub const ExistentialDeposit: u128 = 0; pub const TransferFee: u128 = 0; pub const CreationFee: u128 = 0; pub const TransactionByteFee: u128 = 1; pub GetFee: Fixed = fixed_from_basis_points(30u16); pub GetParliamentAccountId: AccountId = AccountId32::from([7u8; 32]); } construct_runtime! { pub enum Runtime where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Module, Call, Config, Storage, Event<T>}, PswapDistribution: pswap_distribution::{Module, Call, Config<T>, Storage, Event<T>}, Tokens: tokens::{Module, Call, Config<T>, Storage, Event<T>}, Permissions: permissions::{Module, Call, Config<T>, Storage, Event<T>}, Currencies: currencies::{Module, Call, Storage, Event<T>}, Assets: assets::{Module, Call, Config<T>, Storage, Event<T>}, Balances: pallet_balances::{Module, Call, Config<T>, Storage, Event<T>}, Technical: technical::{Module, Call, Storage, Event<T>}, DexManager: dex_manager::{Module, Call, Storage}, } } impl frame_system::Config for Runtime { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = AccountId; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type DbWeight = (); type Version = (); type AccountData = pallet_balances::AccountData<Balance>; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type PalletInfo = PalletInfo; type SS58Prefix = (); } impl Config for Runtime { type Event = Event; type GetIncentiveAssetId = GetIncentiveAssetId; type LiquidityProxy = (); type CompatBalance = Balance; type GetDefaultSubscriptionFrequency = GetDefaultSubscriptionFrequency; type GetBurnUpdateFrequency = GetBurnUpdateFrequency; type GetTechnicalAccountId = GetPswapDistributionAccountId; type EnsureDEXManager = DexManager; type OnPswapBurnedAggregator = (); type WeightInfo = (); type GetParliamentAccountId = GetParliamentAccountId; } impl tokens::Config for Runtime { type Event = Event; type Balance = Balance; type Amount = Amount; type CurrencyId = <Runtime as assets::Config>::AssetId; type WeightInfo = (); type ExistentialDeposits = ExistentialDeposits; type OnDust = (); } impl permissions::Config for Runtime { type Event = Event; } impl currencies::Config for Runtime { type Event = Event; type MultiCurrency = Tokens; type NativeCurrency = BasicCurrencyAdapter<Runtime, pallet_balances::Module<Runtime>, Amount, BlockNumber>; type GetNativeCurrencyId = <Runtime as assets::Config>::GetBaseAssetId; type WeightInfo = (); } impl assets::Config for Runtime { type Event = Event; type ExtraAccountId = [u8; 32]; type ExtraAssetRecordArg = common::AssetIdExtraAssetRecordArg<common::DEXId, common::LiquiditySourceType, [u8; 32]>; type AssetId = AssetId; type GetBaseAssetId = GetBaseAssetId; type Currency = currencies::Module<Runtime>; type WeightInfo = (); } impl common::Config for Runtime { type DEXId = DEXId; type LstId = common::LiquiditySourceType; } impl pallet_balances::Config for Runtime { type Balance = Balance; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); type MaxLocks = (); } impl technical::Config for Runtime { type Event = Event; type TechAssetId = TechAssetId; type TechAccountId = TechAccountId; type Trigger = (); type Condition = (); type SwapAction = (); type WeightInfo = (); } impl dex_manager::Config for Runtime {} pub struct ExtBuilder { endowed_accounts: Vec<(AccountId, AssetId, Balance)>, endowed_assets: Vec<( AssetId, AccountId, AssetSymbol, AssetName, BalancePrecision, Balance, bool, )>, initial_permission_owners: Vec<(u32, Scope, Vec<AccountId>)>, initial_permissions: Vec<(AccountId, Scope, Vec<u32>)>, subscribed_accounts: Vec<(AccountId, (DEXId, AssetId, BlockNumber, BlockNumber))>, burn_info: (Fixed, Fixed, Fixed), } impl ExtBuilder { pub fn uninitialized() -> Self { Self { endowed_accounts: Vec::new(), endowed_assets: vec![( PoolTokenAId::get(), alice(), AssetSymbol(b"POOL".to_vec()), AssetName(b"Pool Token".to_vec()), 18, Balance::from(0u32), true, )], initial_permission_owners: Vec::new(), initial_permissions: Vec::new(), subscribed_accounts: Vec::new(), burn_info: (fixed!(0), fixed!(0.10), fixed!(0.30)), } } } impl ExtBuilder { pub fn with_accounts(accounts: Vec<(AccountId, AssetId, Balance)>) -> Self { let permissioned_account_id = GetPswapDistributionAccountId::get(); Self { endowed_accounts: accounts, endowed_assets: vec![ ( common::XOR.into(), alice(), AssetSymbol(b"XOR".to_vec()), AssetName(b"SORA".to_vec()), 18, Balance::zero(), true, ), ( common::PSWAP.into(), alice(), AssetSymbol(b"PSWAP".to_vec()), AssetName(b"Polkaswap".to_vec()), 10, Balance::zero(), true, ), ( PoolTokenAId::get(), alice(), AssetSymbol(b"POOLA".to_vec()), AssetName(b"Pool A".to_vec()), 18, Balance::zero(), true, ), ( PoolTokenBId::get(), alice(), AssetSymbol(b"POOLB".to_vec()), AssetName(b"Pool B".to_vec()), 18, Balance::zero(), true, ), ], initial_permission_owners: vec![], initial_permissions: vec![( permissioned_account_id, Scope::Unlimited, vec![permissions::MINT, permissions::BURN], )], subscribed_accounts: vec![ (fees_account_a(), (DEX_A_ID, PoolTokenAId::get(), 5, 0)), (fees_account_b(), (DEX_A_ID, PoolTokenBId::get(), 7, 0)), ], burn_info: (fixed!(0.1), fixed!(0.10), fixed!(0.40)), } } } impl Default for ExtBuilder { fn default() -> Self { ExtBuilder::with_accounts(vec![ (fees_account_a(), common::XOR.into(), balance!(1)), (fees_account_a(), common::PSWAP.into(), balance!(6)), (liquidity_provider_a(), PoolTokenAId::get(), balance!(3)), (liquidity_provider_b(), PoolTokenAId::get(), balance!(2)), (liquidity_provider_c(), PoolTokenAId::get(), balance!(1)), (liquidity_provider_a(), PoolTokenBId::get(), balance!(10)), (liquidity_provider_b(), PoolTokenBId::get(), balance!(10)), (liquidity_provider_c(), PoolTokenBId::get(), balance!(10)), ]) } } impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { let mut t = SystemConfig::default().build_storage::<Runtime>().unwrap(); let mut vec = self .endowed_accounts .iter() .map(|(acc, ..)| (acc.clone(), 0)) .chain(vec![ (alice(), 0), (fees_account_a(), 0), (fees_account_b(), 0), (GetPswapDistributionAccountId::get(), 0), (GetParliamentAccountId::get(), 0), ]) .collect::<Vec<_>>(); vec.sort_by_key(|x| x.0.clone()); vec.dedup_by(|x, y| x.0 == y.0); BalancesConfig { balances: vec } .assimilate_storage(&mut t) .unwrap(); PermissionsConfig { initial_permissions: self.initial_permissions, initial_permission_owners: self.initial_permission_owners, } .assimilate_storage(&mut t) .unwrap(); TokensConfig { endowed_accounts: self.endowed_accounts, } .assimilate_storage(&mut t) .unwrap(); AssetsConfig { endowed_assets: self.endowed_assets, } .assimilate_storage(&mut t) .unwrap(); PswapDistributionConfig { subscribed_accounts: self.subscribed_accounts, burn_info: self.burn_info, } .assimilate_storage(&mut t) .unwrap(); t.into() } }
{ AccountId32::from([5u8; 32]) }
identifier_body
mock.rs
// This file is part of the SORA network and Polkaswap app. // Copyright (c) 2020, 2021, Polka Biome Ltd. All rights reserved. // SPDX-License-Identifier: BSD-4-Clause // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // Redistributions of source code must retain the above copyright notice, this list // of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this // list of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // All advertising materials mentioning features or use of this software must display // the following acknowledgement: This product includes software developed by Polka Biome // Ltd., SORA, and Polkaswap. // // Neither the name of the Polka Biome Ltd. nor the names of its contributors may be used // to endorse or promote products derived from this software without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY Polka Biome Ltd. AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Polka Biome Ltd. BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; // OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use crate::{self as pswap_distribution, Config}; use common::mock::ExistentialDeposits; use common::prelude::Balance; use common::{ balance, fixed, fixed_from_basis_points, AssetName, AssetSymbol, BalancePrecision, Fixed, FromGenericPair, }; use currencies::BasicCurrencyAdapter; use frame_support::traits::GenesisBuild; use frame_support::weights::Weight; use frame_support::{construct_runtime, parameter_types}; use frame_system; use hex_literal::hex; use permissions::Scope; use sp_core::H256; use sp_runtime::testing::Header; use sp_runtime::traits::{BlakeTwo256, IdentityLookup, Zero}; use sp_runtime::{AccountId32, Perbill}; pub type AccountId = AccountId32; pub type BlockNumber = u64; pub type Amount = i128; pub type AssetId = common::AssetId32<common::PredefinedAssetId>; pub type TechAccountId = common::TechAccountId<AccountId, TechAssetId, DEXId>; type TechAssetId = common::TechAssetId<common::PredefinedAssetId>; type DEXId = common::DEXId; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Runtime>; type Block = frame_system::mocking::MockBlock<Runtime>; pub fn alice() -> AccountId { AccountId32::from([1u8; 32]) } pub fn fees_account_a() -> AccountId { AccountId32::from([2u8; 32]) } pub fn fees_account_b() -> AccountId { AccountId32::from([3u8; 32]) } pub fn liquidity_provider_a() -> AccountId { AccountId32::from([4u8; 32]) } pub fn liquidity_provider_b() -> AccountId { AccountId32::from([5u8; 32]) } pub fn liquidity_provider_c() -> AccountId { AccountId32::from([6u8; 32]) } pub const DEX_A_ID: DEXId = common::DEXId::Polkaswap; parameter_types! { pub GetBaseAssetId: AssetId = common::XOR.into(); pub GetIncentiveAssetId: AssetId = common::PSWAP.into(); pub const PoolTokenAId: AssetId = common::AssetId32::from_bytes(hex!("0211110000000000000000000000000000000000000000000000000000000000")); pub const PoolTokenBId: AssetId = common::AssetId32::from_bytes(hex!("0222220000000000000000000000000000000000000000000000000000000000")); pub const BlockHashCount: u64 = 250; pub const MaximumBlockWeight: Weight = 1024; pub const MaximumBlockLength: u32 = 2 * 1024; pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); pub const GetDefaultFee: u16 = 30; pub const GetDefaultProtocolFee: u16 = 0; pub GetPswapDistributionTechAccountId: TechAccountId = { let tech_account_id = TechAccountId::from_generic_pair( crate::TECH_ACCOUNT_PREFIX.to_vec(), crate::TECH_ACCOUNT_MAIN.to_vec(), ); tech_account_id }; pub GetPswapDistributionAccountId: AccountId = { let tech_account_id = GetPswapDistributionTechAccountId::get(); let account_id = technical::Module::<Runtime>::tech_account_id_to_account_id(&tech_account_id) .expect("Failed to get ordinary account id for technical account id."); account_id }; pub const GetDefaultSubscriptionFrequency: BlockNumber = 10; pub const GetBurnUpdateFrequency: BlockNumber = 3; pub const ExistentialDeposit: u128 = 0; pub const TransferFee: u128 = 0; pub const CreationFee: u128 = 0; pub const TransactionByteFee: u128 = 1; pub GetFee: Fixed = fixed_from_basis_points(30u16); pub GetParliamentAccountId: AccountId = AccountId32::from([7u8; 32]); } construct_runtime! { pub enum Runtime where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Module, Call, Config, Storage, Event<T>}, PswapDistribution: pswap_distribution::{Module, Call, Config<T>, Storage, Event<T>}, Tokens: tokens::{Module, Call, Config<T>, Storage, Event<T>}, Permissions: permissions::{Module, Call, Config<T>, Storage, Event<T>}, Currencies: currencies::{Module, Call, Storage, Event<T>}, Assets: assets::{Module, Call, Config<T>, Storage, Event<T>}, Balances: pallet_balances::{Module, Call, Config<T>, Storage, Event<T>}, Technical: technical::{Module, Call, Storage, Event<T>}, DexManager: dex_manager::{Module, Call, Storage}, } } impl frame_system::Config for Runtime { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = AccountId; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type DbWeight = (); type Version = (); type AccountData = pallet_balances::AccountData<Balance>; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type PalletInfo = PalletInfo; type SS58Prefix = (); } impl Config for Runtime { type Event = Event; type GetIncentiveAssetId = GetIncentiveAssetId; type LiquidityProxy = (); type CompatBalance = Balance; type GetDefaultSubscriptionFrequency = GetDefaultSubscriptionFrequency; type GetBurnUpdateFrequency = GetBurnUpdateFrequency; type GetTechnicalAccountId = GetPswapDistributionAccountId; type EnsureDEXManager = DexManager; type OnPswapBurnedAggregator = (); type WeightInfo = (); type GetParliamentAccountId = GetParliamentAccountId; } impl tokens::Config for Runtime { type Event = Event; type Balance = Balance; type Amount = Amount; type CurrencyId = <Runtime as assets::Config>::AssetId; type WeightInfo = (); type ExistentialDeposits = ExistentialDeposits; type OnDust = (); } impl permissions::Config for Runtime { type Event = Event; } impl currencies::Config for Runtime { type Event = Event; type MultiCurrency = Tokens; type NativeCurrency = BasicCurrencyAdapter<Runtime, pallet_balances::Module<Runtime>, Amount, BlockNumber>; type GetNativeCurrencyId = <Runtime as assets::Config>::GetBaseAssetId; type WeightInfo = (); } impl assets::Config for Runtime { type Event = Event; type ExtraAccountId = [u8; 32]; type ExtraAssetRecordArg = common::AssetIdExtraAssetRecordArg<common::DEXId, common::LiquiditySourceType, [u8; 32]>; type AssetId = AssetId; type GetBaseAssetId = GetBaseAssetId; type Currency = currencies::Module<Runtime>; type WeightInfo = (); } impl common::Config for Runtime { type DEXId = DEXId; type LstId = common::LiquiditySourceType; } impl pallet_balances::Config for Runtime { type Balance = Balance; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); type MaxLocks = (); } impl technical::Config for Runtime { type Event = Event; type TechAssetId = TechAssetId; type TechAccountId = TechAccountId; type Trigger = (); type Condition = (); type SwapAction = (); type WeightInfo = (); } impl dex_manager::Config for Runtime {} pub struct ExtBuilder { endowed_accounts: Vec<(AccountId, AssetId, Balance)>, endowed_assets: Vec<( AssetId, AccountId, AssetSymbol, AssetName, BalancePrecision, Balance, bool, )>, initial_permission_owners: Vec<(u32, Scope, Vec<AccountId>)>, initial_permissions: Vec<(AccountId, Scope, Vec<u32>)>, subscribed_accounts: Vec<(AccountId, (DEXId, AssetId, BlockNumber, BlockNumber))>, burn_info: (Fixed, Fixed, Fixed), } impl ExtBuilder { pub fn uninitialized() -> Self { Self { endowed_accounts: Vec::new(), endowed_assets: vec![( PoolTokenAId::get(), alice(), AssetSymbol(b"POOL".to_vec()), AssetName(b"Pool Token".to_vec()), 18, Balance::from(0u32), true, )], initial_permission_owners: Vec::new(), initial_permissions: Vec::new(), subscribed_accounts: Vec::new(), burn_info: (fixed!(0), fixed!(0.10), fixed!(0.30)), } } } impl ExtBuilder { pub fn
(accounts: Vec<(AccountId, AssetId, Balance)>) -> Self { let permissioned_account_id = GetPswapDistributionAccountId::get(); Self { endowed_accounts: accounts, endowed_assets: vec![ ( common::XOR.into(), alice(), AssetSymbol(b"XOR".to_vec()), AssetName(b"SORA".to_vec()), 18, Balance::zero(), true, ), ( common::PSWAP.into(), alice(), AssetSymbol(b"PSWAP".to_vec()), AssetName(b"Polkaswap".to_vec()), 10, Balance::zero(), true, ), ( PoolTokenAId::get(), alice(), AssetSymbol(b"POOLA".to_vec()), AssetName(b"Pool A".to_vec()), 18, Balance::zero(), true, ), ( PoolTokenBId::get(), alice(), AssetSymbol(b"POOLB".to_vec()), AssetName(b"Pool B".to_vec()), 18, Balance::zero(), true, ), ], initial_permission_owners: vec![], initial_permissions: vec![( permissioned_account_id, Scope::Unlimited, vec![permissions::MINT, permissions::BURN], )], subscribed_accounts: vec![ (fees_account_a(), (DEX_A_ID, PoolTokenAId::get(), 5, 0)), (fees_account_b(), (DEX_A_ID, PoolTokenBId::get(), 7, 0)), ], burn_info: (fixed!(0.1), fixed!(0.10), fixed!(0.40)), } } } impl Default for ExtBuilder { fn default() -> Self { ExtBuilder::with_accounts(vec![ (fees_account_a(), common::XOR.into(), balance!(1)), (fees_account_a(), common::PSWAP.into(), balance!(6)), (liquidity_provider_a(), PoolTokenAId::get(), balance!(3)), (liquidity_provider_b(), PoolTokenAId::get(), balance!(2)), (liquidity_provider_c(), PoolTokenAId::get(), balance!(1)), (liquidity_provider_a(), PoolTokenBId::get(), balance!(10)), (liquidity_provider_b(), PoolTokenBId::get(), balance!(10)), (liquidity_provider_c(), PoolTokenBId::get(), balance!(10)), ]) } } impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { let mut t = SystemConfig::default().build_storage::<Runtime>().unwrap(); let mut vec = self .endowed_accounts .iter() .map(|(acc, ..)| (acc.clone(), 0)) .chain(vec![ (alice(), 0), (fees_account_a(), 0), (fees_account_b(), 0), (GetPswapDistributionAccountId::get(), 0), (GetParliamentAccountId::get(), 0), ]) .collect::<Vec<_>>(); vec.sort_by_key(|x| x.0.clone()); vec.dedup_by(|x, y| x.0 == y.0); BalancesConfig { balances: vec } .assimilate_storage(&mut t) .unwrap(); PermissionsConfig { initial_permissions: self.initial_permissions, initial_permission_owners: self.initial_permission_owners, } .assimilate_storage(&mut t) .unwrap(); TokensConfig { endowed_accounts: self.endowed_accounts, } .assimilate_storage(&mut t) .unwrap(); AssetsConfig { endowed_assets: self.endowed_assets, } .assimilate_storage(&mut t) .unwrap(); PswapDistributionConfig { subscribed_accounts: self.subscribed_accounts, burn_info: self.burn_info, } .assimilate_storage(&mut t) .unwrap(); t.into() } }
with_accounts
identifier_name
mock.rs
// This file is part of the SORA network and Polkaswap app. // Copyright (c) 2020, 2021, Polka Biome Ltd. All rights reserved. // SPDX-License-Identifier: BSD-4-Clause // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // Redistributions of source code must retain the above copyright notice, this list // of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this // list of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // All advertising materials mentioning features or use of this software must display // the following acknowledgement: This product includes software developed by Polka Biome // Ltd., SORA, and Polkaswap. // // Neither the name of the Polka Biome Ltd. nor the names of its contributors may be used // to endorse or promote products derived from this software without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY Polka Biome Ltd. AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Polka Biome Ltd. BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; // OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use crate::{self as pswap_distribution, Config}; use common::mock::ExistentialDeposits; use common::prelude::Balance; use common::{ balance, fixed, fixed_from_basis_points, AssetName, AssetSymbol, BalancePrecision, Fixed, FromGenericPair, }; use currencies::BasicCurrencyAdapter; use frame_support::traits::GenesisBuild; use frame_support::weights::Weight; use frame_support::{construct_runtime, parameter_types}; use frame_system; use hex_literal::hex; use permissions::Scope; use sp_core::H256; use sp_runtime::testing::Header; use sp_runtime::traits::{BlakeTwo256, IdentityLookup, Zero}; use sp_runtime::{AccountId32, Perbill}; pub type AccountId = AccountId32; pub type BlockNumber = u64; pub type Amount = i128; pub type AssetId = common::AssetId32<common::PredefinedAssetId>; pub type TechAccountId = common::TechAccountId<AccountId, TechAssetId, DEXId>; type TechAssetId = common::TechAssetId<common::PredefinedAssetId>; type DEXId = common::DEXId; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Runtime>; type Block = frame_system::mocking::MockBlock<Runtime>; pub fn alice() -> AccountId { AccountId32::from([1u8; 32]) } pub fn fees_account_a() -> AccountId { AccountId32::from([2u8; 32]) } pub fn fees_account_b() -> AccountId { AccountId32::from([3u8; 32]) } pub fn liquidity_provider_a() -> AccountId { AccountId32::from([4u8; 32]) } pub fn liquidity_provider_b() -> AccountId { AccountId32::from([5u8; 32]) } pub fn liquidity_provider_c() -> AccountId { AccountId32::from([6u8; 32]) } pub const DEX_A_ID: DEXId = common::DEXId::Polkaswap; parameter_types! { pub GetBaseAssetId: AssetId = common::XOR.into(); pub GetIncentiveAssetId: AssetId = common::PSWAP.into(); pub const PoolTokenAId: AssetId = common::AssetId32::from_bytes(hex!("0211110000000000000000000000000000000000000000000000000000000000")); pub const PoolTokenBId: AssetId = common::AssetId32::from_bytes(hex!("0222220000000000000000000000000000000000000000000000000000000000")); pub const BlockHashCount: u64 = 250; pub const MaximumBlockWeight: Weight = 1024; pub const MaximumBlockLength: u32 = 2 * 1024; pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); pub const GetDefaultFee: u16 = 30; pub const GetDefaultProtocolFee: u16 = 0; pub GetPswapDistributionTechAccountId: TechAccountId = { let tech_account_id = TechAccountId::from_generic_pair( crate::TECH_ACCOUNT_PREFIX.to_vec(), crate::TECH_ACCOUNT_MAIN.to_vec(), ); tech_account_id }; pub GetPswapDistributionAccountId: AccountId = { let tech_account_id = GetPswapDistributionTechAccountId::get(); let account_id = technical::Module::<Runtime>::tech_account_id_to_account_id(&tech_account_id) .expect("Failed to get ordinary account id for technical account id."); account_id }; pub const GetDefaultSubscriptionFrequency: BlockNumber = 10; pub const GetBurnUpdateFrequency: BlockNumber = 3; pub const ExistentialDeposit: u128 = 0; pub const TransferFee: u128 = 0; pub const CreationFee: u128 = 0; pub const TransactionByteFee: u128 = 1; pub GetFee: Fixed = fixed_from_basis_points(30u16); pub GetParliamentAccountId: AccountId = AccountId32::from([7u8; 32]); } construct_runtime! { pub enum Runtime where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Module, Call, Config, Storage, Event<T>}, PswapDistribution: pswap_distribution::{Module, Call, Config<T>, Storage, Event<T>}, Tokens: tokens::{Module, Call, Config<T>, Storage, Event<T>}, Permissions: permissions::{Module, Call, Config<T>, Storage, Event<T>}, Currencies: currencies::{Module, Call, Storage, Event<T>}, Assets: assets::{Module, Call, Config<T>, Storage, Event<T>}, Balances: pallet_balances::{Module, Call, Config<T>, Storage, Event<T>}, Technical: technical::{Module, Call, Storage, Event<T>}, DexManager: dex_manager::{Module, Call, Storage}, } } impl frame_system::Config for Runtime { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); type Origin = Origin; type Call = Call; type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = AccountId; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type DbWeight = (); type Version = (); type AccountData = pallet_balances::AccountData<Balance>; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type PalletInfo = PalletInfo; type SS58Prefix = (); } impl Config for Runtime { type Event = Event; type GetIncentiveAssetId = GetIncentiveAssetId; type LiquidityProxy = (); type CompatBalance = Balance; type GetDefaultSubscriptionFrequency = GetDefaultSubscriptionFrequency; type GetBurnUpdateFrequency = GetBurnUpdateFrequency; type GetTechnicalAccountId = GetPswapDistributionAccountId; type EnsureDEXManager = DexManager; type OnPswapBurnedAggregator = (); type WeightInfo = (); type GetParliamentAccountId = GetParliamentAccountId; } impl tokens::Config for Runtime { type Event = Event; type Balance = Balance; type Amount = Amount; type CurrencyId = <Runtime as assets::Config>::AssetId; type WeightInfo = (); type ExistentialDeposits = ExistentialDeposits; type OnDust = (); } impl permissions::Config for Runtime { type Event = Event; } impl currencies::Config for Runtime { type Event = Event; type MultiCurrency = Tokens; type NativeCurrency = BasicCurrencyAdapter<Runtime, pallet_balances::Module<Runtime>, Amount, BlockNumber>; type GetNativeCurrencyId = <Runtime as assets::Config>::GetBaseAssetId; type WeightInfo = (); } impl assets::Config for Runtime { type Event = Event; type ExtraAccountId = [u8; 32]; type ExtraAssetRecordArg = common::AssetIdExtraAssetRecordArg<common::DEXId, common::LiquiditySourceType, [u8; 32]>; type AssetId = AssetId; type GetBaseAssetId = GetBaseAssetId; type Currency = currencies::Module<Runtime>; type WeightInfo = (); } impl common::Config for Runtime { type DEXId = DEXId; type LstId = common::LiquiditySourceType; } impl pallet_balances::Config for Runtime { type Balance = Balance; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); type MaxLocks = (); } impl technical::Config for Runtime { type Event = Event; type TechAssetId = TechAssetId; type TechAccountId = TechAccountId; type Trigger = (); type Condition = (); type SwapAction = (); type WeightInfo = (); } impl dex_manager::Config for Runtime {} pub struct ExtBuilder { endowed_accounts: Vec<(AccountId, AssetId, Balance)>, endowed_assets: Vec<( AssetId, AccountId, AssetSymbol, AssetName, BalancePrecision, Balance, bool, )>, initial_permission_owners: Vec<(u32, Scope, Vec<AccountId>)>, initial_permissions: Vec<(AccountId, Scope, Vec<u32>)>, subscribed_accounts: Vec<(AccountId, (DEXId, AssetId, BlockNumber, BlockNumber))>, burn_info: (Fixed, Fixed, Fixed), } impl ExtBuilder { pub fn uninitialized() -> Self { Self { endowed_accounts: Vec::new(), endowed_assets: vec![( PoolTokenAId::get(), alice(), AssetSymbol(b"POOL".to_vec()), AssetName(b"Pool Token".to_vec()), 18, Balance::from(0u32), true, )], initial_permission_owners: Vec::new(), initial_permissions: Vec::new(), subscribed_accounts: Vec::new(), burn_info: (fixed!(0), fixed!(0.10), fixed!(0.30)), } } } impl ExtBuilder { pub fn with_accounts(accounts: Vec<(AccountId, AssetId, Balance)>) -> Self { let permissioned_account_id = GetPswapDistributionAccountId::get(); Self { endowed_accounts: accounts, endowed_assets: vec![ ( common::XOR.into(), alice(), AssetSymbol(b"XOR".to_vec()), AssetName(b"SORA".to_vec()), 18, Balance::zero(), true, ), ( common::PSWAP.into(), alice(), AssetSymbol(b"PSWAP".to_vec()), AssetName(b"Polkaswap".to_vec()), 10, Balance::zero(), true, ), ( PoolTokenAId::get(), alice(), AssetSymbol(b"POOLA".to_vec()), AssetName(b"Pool A".to_vec()), 18, Balance::zero(), true, ), ( PoolTokenBId::get(), alice(), AssetSymbol(b"POOLB".to_vec()), AssetName(b"Pool B".to_vec()), 18, Balance::zero(), true, ), ], initial_permission_owners: vec![], initial_permissions: vec![( permissioned_account_id, Scope::Unlimited, vec![permissions::MINT, permissions::BURN], )], subscribed_accounts: vec![ (fees_account_a(), (DEX_A_ID, PoolTokenAId::get(), 5, 0)), (fees_account_b(), (DEX_A_ID, PoolTokenBId::get(), 7, 0)), ], burn_info: (fixed!(0.1), fixed!(0.10), fixed!(0.40)), } } } impl Default for ExtBuilder { fn default() -> Self { ExtBuilder::with_accounts(vec![ (fees_account_a(), common::XOR.into(), balance!(1)), (fees_account_a(), common::PSWAP.into(), balance!(6)), (liquidity_provider_a(), PoolTokenAId::get(), balance!(3)), (liquidity_provider_b(), PoolTokenAId::get(), balance!(2)), (liquidity_provider_c(), PoolTokenAId::get(), balance!(1)), (liquidity_provider_a(), PoolTokenBId::get(), balance!(10)), (liquidity_provider_b(), PoolTokenBId::get(), balance!(10)), (liquidity_provider_c(), PoolTokenBId::get(), balance!(10)), ]) } } impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { let mut t = SystemConfig::default().build_storage::<Runtime>().unwrap(); let mut vec = self .endowed_accounts .iter() .map(|(acc, ..)| (acc.clone(), 0)) .chain(vec![ (alice(), 0), (fees_account_a(), 0), (fees_account_b(), 0), (GetPswapDistributionAccountId::get(), 0), (GetParliamentAccountId::get(), 0), ]) .collect::<Vec<_>>(); vec.sort_by_key(|x| x.0.clone()); vec.dedup_by(|x, y| x.0 == y.0); BalancesConfig { balances: vec } .assimilate_storage(&mut t) .unwrap(); PermissionsConfig { initial_permissions: self.initial_permissions,
initial_permission_owners: self.initial_permission_owners, } .assimilate_storage(&mut t) .unwrap(); TokensConfig { endowed_accounts: self.endowed_accounts, } .assimilate_storage(&mut t) .unwrap(); AssetsConfig { endowed_assets: self.endowed_assets, } .assimilate_storage(&mut t) .unwrap(); PswapDistributionConfig { subscribed_accounts: self.subscribed_accounts, burn_info: self.burn_info, } .assimilate_storage(&mut t) .unwrap(); t.into() } }
random_line_split
list_view_items.rs
use std::cell::Cell; use std::ptr::NonNull; use crate::aliases::WinResult; use crate::co; use crate::handles::HWND; use crate::msg::lvm; use crate::structs::{LVFINDINFO, LVHITTESTINFO, LVITEM, RECT}; use crate::various::WString; /// Exposes item methods of a [`ListView`](crate::gui::ListView) control. /// /// You cannot directly instantiate this object, it is created internally by the /// control. pub struct ListViewItems { hwnd_ptr: Cell<NonNull<HWND>>, } impl ListViewItems { pub(in crate::gui::native_controls) fn new() -> ListViewItems { Self { hwnd_ptr: Cell::new(NonNull::from(&HWND::NULL)), // initially invalid } } pub(in crate::gui::native_controls) fn set_hwnd_ref(&self, hwnd_ref: &HWND) { self.hwnd_ptr.replace(NonNull::from(hwnd_ref)); } pub(in crate::gui::native_controls) fn hwnd(&self) -> HWND { unsafe { *self.hwnd_ptr.get().as_ref() } } /// Appends a new item by sending an /// [`LVM_INSERTITEM`](crate::msg::lvm::InsertItem) message, and returns its /// index. /// /// The texts are relative to each column. /// /// # Examples /// /// ```rust,ignore /// use winsafe::gui; /// /// let my_list: gui::ListView; // initialized somewhere /// /// my_list.items().add( /// &[ /// "First column text", /// "Second column text", /// ], /// None, // no icon; requires set_image_list() before /// ).unwrap(); /// ``` /// /// # Panics /// /// Panics if `texts` is empty, or if the number of texts is greater than /// the number of columns. pub fn add<S: AsRef<str>>(&self, texts: &[S], icon_index: Option<u32>) -> WinResult<u32> { if texts.is_empty() { panic!("No texts passed when adding a ListView item."); } let mut lvi = LVITEM::default(); lvi.mask = co::LVIF::TEXT | co::LVIF::IMAGE; lvi.iItem = 0x0fff_ffff; // insert as the last one lvi.iImage = match icon_index { Some(idx) => idx as _, None => -1, }; let mut wtext = WString::from_str(texts[0].as_ref()); lvi.set_pszText(Some(&mut wtext)); let new_idx = self.hwnd().SendMessage(lvm::InsertItem { lvitem: &lvi })?; for (idx, text) in texts.iter().skip(1).enumerate() { self.set_text(new_idx, idx as u32 + 1, text.as_ref())?; } Ok(new_idx) } /// Retrieves the total number of items by sending an /// [`LVM_GETITEMCOUNT`](crate::msg::lvm::GetItemCount) message. pub fn count(&self) -> u32 { self.hwnd().SendMessage(lvm::GetItemCount {}) } /// Deletes the items at the given indexes by sending an /// [`LVM_DELETEITEM`](crate::msg::lvm::DeleteItem) message. /// /// The indexes are iterated backwards, so the last item will be deleted /// first. pub fn delete(&self, item_indexes: &[u32]) -> WinResult<()> { for idx in item_indexes.iter().rev() { self.hwnd().SendMessage(lvm::DeleteItem { index: *idx, })?; } Ok(()) } /// Deletes all items by sending an /// [`LVM_DELETEALLITEMS`](crate::msg::lvm::DeleteAllItems) message. pub fn delete_all(&self) -> WinResult<()> { self.hwnd().SendMessage(lvm::DeleteAllItems {}) } /// Deletes the selected items by sending /// [`LVM_DELETEITEM`](crate::msg::lvm::DeleteItem) messages. pub fn delete_selected(&self) -> WinResult<()> { loop { match self.hwnd().SendMessage(lvm::GetNextItem { initial_index: None, relationship: co::LVNI::SELECTED, }) { Some(index) => self.hwnd().SendMessage(lvm::DeleteItem { index })?, None => break, }; } Ok(()) } /// Scrolls the list by sending an /// [`LVM_ENSUREVISIBLE`](crate::msg::lvm::EnsureVisible) message so that an /// item is visible in the list. pub fn ensure_visible(&self, item_index: u32) -> WinResult<()> { self.hwnd().SendMessage(lvm::EnsureVisible { index: item_index, entirely_visible: true, }) } /// Searches for an item with the given text, case-insensitive, by sending /// an [`LVM_FINDITEM`](crate::msg::lvm::FindItem) message. pub fn find(&self, text: &str) -> Option<u32> { let mut buf = WString::from_str(text); let mut lvfi = LVFINDINFO::default(); lvfi.flags = co::LVFI::STRING; lvfi.set_psz(Some(&mut buf)); self.hwnd().SendMessage(lvm::FindItem { start_index: None, lvfindinfo: &mut lvfi, }) } /// Retrieves the index of the focused item by sending an /// [`LVM_GETNEXTITEM`](crate::msg::lvm::GetNextItem) message. pub fn focused(&self) -> Option<u32> { self.hwnd().SendMessage(lvm::GetNextItem { initial_index: None, relationship: co::LVNI::FOCUSED, }) } /// Retrieves the item at the specified position by sending an /// [`LVM_HITTEST`](crate::msg::lvm::HitTest) message pub fn hit_test(&self, info: &mut LVHITTESTINFO) -> Option<u32> { self.hwnd().SendMessage(lvm::HitTest { info }) } /// Tells if the item is the focused one by sending an /// [`LVM_GETITEMSTATE`](crate::msg::lvm::GetItemState) message. pub fn is_focused(&self, item_index: u32) -> bool { self.hwnd().SendMessage(lvm::GetItemState { index: item_index, mask: co::LVIS::FOCUSED, }).has(co::LVIS::FOCUSED) } /// Tells if the item is selected by sending an /// [`LVM_GETITEMSTATE`](crate::msg::lvm::GetItemState) message. pub fn is_selected(&self, item_index: u32) -> bool { self.hwnd().SendMessage(lvm::GetItemState { index: item_index, mask: co::LVIS::SELECTED, }).has(co::LVIS::SELECTED) } /// Tells if the item is currently visible by sending an /// [`LVM_ISITEMVISIBLE`](crate::msg::lvm::IsItemVisible) message. pub fn is_visible(&self, item_index: u32) -> bool { self.hwnd().SendMessage(lvm::IsItemVisible { index: item_index }) } /// Retrieves the actual index of the unique ID by sending an /// [`LVM_MAPIDTOINDEX`](crate::msg::lvm::MapIdToIndex) message. pub fn map_id_to_index(&self, item_id: u32) -> Option<u32> { self.hwnd().SendMessage(lvm::MapIdToIndex { id: item_id }) } /// Retrieves an unique ID for the given index by sending an /// [`LVM_MAPINDEXTOID`](crate::msg::lvm::MapIndexToId) message. pub fn map_index_to_id(&self, item_index: u32) -> Option<u32> { self.hwnd().SendMessage(lvm::MapIndexToId { index: item_index }) } /// Retrieves the bound rectangle of item by sending an /// [`LVM_GETITEMRECT`](crate::msg::lvm::GetItemRect) message. pub fn rect(&self, item_index: u32, portion: co::LVIR) -> WinResult<RECT> { let mut rc = RECT::default(); self.hwnd().SendMessage(lvm::GetItemRect { index: item_index, rect: &mut rc, portion, })?; Ok(rc) } /// Retrieves the indexes of the selected items by sending /// [`LVM_GETNEXTITEM`](crate::msg::lvm::GetNextItem) messages. pub fn selected(&self) -> Vec<u32> { let mut items = Vec::with_capacity(self.selected_count() as _); let mut idx = None; loop { idx = match self.hwnd().SendMessage(lvm::GetNextItem { initial_index: idx, relationship: co::LVNI::SELECTED, }) { Some(idx) => { items.push(idx); Some(idx) }, None => break, }; } items } /// Retrieves the number of selected items by sending an /// [`LVM_GETSELECTEDCOUNT`](crate::msg::lvm::GetSelectedCount) message. pub fn selected_count(&self) -> u32 { self.hwnd().SendMessage(lvm::GetSelectedCount {}) } /// Sets the focused item by sending an /// [`LVM_SETITEMSTATE`](crate::msg::lvm::SetItemState) message. pub fn set_focused(&self, item_index: u32) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.stateMask = co::LVIS::FOCUSED; lvi.state = co::LVIS::FOCUSED; self.hwnd().SendMessage(lvm::SetItemState { index: Some(item_index), lvitem: &lvi, }) } /// Sets or remove the selection from the given item indexes by sending /// [`LVM_SETITEMSTATE`](crate::msg::lvm::SetItemState) messages. pub fn
(&self, set: bool, item_indexes: &[u32]) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.stateMask = co::LVIS::SELECTED; if set { lvi.state = co::LVIS::SELECTED; } for idx in item_indexes.iter() { self.hwnd().SendMessage(lvm::SetItemState { index: Some(*idx), lvitem: &lvi, })?; } Ok(()) } /// Sets or remove the selection for all items by sending an /// [`LVM_SETITEMSTATE`](crate::msg::lvm::SetItemState) message. pub fn set_selected_all(&self, set: bool) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.stateMask = co::LVIS::SELECTED; if set { lvi.state = co::LVIS::SELECTED; } self.hwnd().SendMessage(lvm::SetItemState { index: None, lvitem: &lvi, }) } /// Sets the text of an item under a column by sending an /// [`LVM_SETITEMTEXT`](crate::msg::lvm::SetItemText) message. pub fn set_text(&self, item_index: u32, column_index: u32, text: &str) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.iSubItem = column_index as _; let mut wtext = WString::from_str(text); lvi.set_pszText(Some(&mut wtext)); self.hwnd().SendMessage(lvm::SetItemText { index: item_index, lvitem: &lvi, }) } /// Retrieves the text of an item under a column by sending an /// [`LVM_GETITEMTEXT`](crate::msg::lvm::GetItemText) message. /// /// The passed buffer will be automatically allocated. /// /// This method can be more performant than /// [`text_str`](crate::gui::ListViewItems::text_str) because the buffer can be /// reused, avoiding multiple allocations. However, it has the inconvenient /// of the manual conversion from [`WString`](crate::WString) to `String`. /// /// # Examples /// /// ```rust,ignore /// use winsafe::{gui, WString}; /// /// let my_list: gui::ListView; // initialized somewhere /// /// let mut buf = WString::default(); /// my_list.items().text(0, 2, &mut buf); // 1st item, 3rd column /// /// println!("Text: {}", buf.to_string()); /// ``` pub fn text(&self, item_index: u32, column_index: u32, buf: &mut WString) { Self::text_retrieve(self.hwnd(), item_index, column_index, buf) } pub(in crate::gui::native_controls) fn text_retrieve( hwnd: HWND, item_index: u32, column_index: u32, buf: &mut WString) { // Static method because it's also used by ListViewColumns. // https://forums.codeguru.com/showthread.php?351972-Getting-listView-item-text-length const BLOCK: usize = 64; // arbitrary let mut buf_sz = BLOCK; let mut buf = buf; loop { let mut lvi = LVITEM::default(); lvi.iSubItem = column_index as _; buf.realloc_buffer(buf_sz); lvi.set_pszText(Some(&mut buf)); let nchars = hwnd.SendMessage(lvm::GetItemText { // char count without terminating null index: item_index, lvitem: &mut lvi, }); if (nchars as usize) + 1 < buf_sz { // to break, must have at least 1 char gap break; } buf_sz += BLOCK; // increase buffer size to try again } } /// A more convenient [`text`](crate::gui::ListViewItems::text), which /// directly returns a `String` instead of requiring an external buffer. /// /// # Examples /// /// ```rust,ignore /// use winsafe::gui; /// /// let my_list: gui::ListView; // initialized somewhere /// /// println!("Text: {}", my_list.items().text(0, 2)); // 1st item, 3rd column /// ``` pub fn text_str(&self, item_index: u32, column_index: u32) -> String { let mut buf = WString::default(); self.text(item_index, column_index, &mut buf); buf.to_string() } }
set_selected
identifier_name
list_view_items.rs
use std::cell::Cell; use std::ptr::NonNull; use crate::aliases::WinResult; use crate::co; use crate::handles::HWND; use crate::msg::lvm; use crate::structs::{LVFINDINFO, LVHITTESTINFO, LVITEM, RECT}; use crate::various::WString; /// Exposes item methods of a [`ListView`](crate::gui::ListView) control. /// /// You cannot directly instantiate this object, it is created internally by the /// control. pub struct ListViewItems { hwnd_ptr: Cell<NonNull<HWND>>, } impl ListViewItems { pub(in crate::gui::native_controls) fn new() -> ListViewItems { Self { hwnd_ptr: Cell::new(NonNull::from(&HWND::NULL)), // initially invalid } } pub(in crate::gui::native_controls) fn set_hwnd_ref(&self, hwnd_ref: &HWND) { self.hwnd_ptr.replace(NonNull::from(hwnd_ref)); } pub(in crate::gui::native_controls) fn hwnd(&self) -> HWND { unsafe { *self.hwnd_ptr.get().as_ref() } } /// Appends a new item by sending an /// [`LVM_INSERTITEM`](crate::msg::lvm::InsertItem) message, and returns its /// index. /// /// The texts are relative to each column. /// /// # Examples /// /// ```rust,ignore /// use winsafe::gui; /// /// let my_list: gui::ListView; // initialized somewhere /// /// my_list.items().add( /// &[ /// "First column text", /// "Second column text", /// ], /// None, // no icon; requires set_image_list() before /// ).unwrap(); /// ``` /// /// # Panics /// /// Panics if `texts` is empty, or if the number of texts is greater than /// the number of columns. pub fn add<S: AsRef<str>>(&self, texts: &[S], icon_index: Option<u32>) -> WinResult<u32> { if texts.is_empty() { panic!("No texts passed when adding a ListView item."); } let mut lvi = LVITEM::default(); lvi.mask = co::LVIF::TEXT | co::LVIF::IMAGE; lvi.iItem = 0x0fff_ffff; // insert as the last one lvi.iImage = match icon_index { Some(idx) => idx as _, None => -1, }; let mut wtext = WString::from_str(texts[0].as_ref()); lvi.set_pszText(Some(&mut wtext)); let new_idx = self.hwnd().SendMessage(lvm::InsertItem { lvitem: &lvi })?; for (idx, text) in texts.iter().skip(1).enumerate() { self.set_text(new_idx, idx as u32 + 1, text.as_ref())?; } Ok(new_idx) } /// Retrieves the total number of items by sending an /// [`LVM_GETITEMCOUNT`](crate::msg::lvm::GetItemCount) message. pub fn count(&self) -> u32 { self.hwnd().SendMessage(lvm::GetItemCount {}) } /// Deletes the items at the given indexes by sending an /// [`LVM_DELETEITEM`](crate::msg::lvm::DeleteItem) message. /// /// The indexes are iterated backwards, so the last item will be deleted /// first. pub fn delete(&self, item_indexes: &[u32]) -> WinResult<()> { for idx in item_indexes.iter().rev() { self.hwnd().SendMessage(lvm::DeleteItem { index: *idx, })?; } Ok(()) } /// Deletes all items by sending an /// [`LVM_DELETEALLITEMS`](crate::msg::lvm::DeleteAllItems) message. pub fn delete_all(&self) -> WinResult<()> { self.hwnd().SendMessage(lvm::DeleteAllItems {}) } /// Deletes the selected items by sending /// [`LVM_DELETEITEM`](crate::msg::lvm::DeleteItem) messages. pub fn delete_selected(&self) -> WinResult<()> { loop { match self.hwnd().SendMessage(lvm::GetNextItem { initial_index: None, relationship: co::LVNI::SELECTED, }) { Some(index) => self.hwnd().SendMessage(lvm::DeleteItem { index })?, None => break, }; } Ok(()) } /// Scrolls the list by sending an /// [`LVM_ENSUREVISIBLE`](crate::msg::lvm::EnsureVisible) message so that an /// item is visible in the list. pub fn ensure_visible(&self, item_index: u32) -> WinResult<()> { self.hwnd().SendMessage(lvm::EnsureVisible { index: item_index, entirely_visible: true, }) } /// Searches for an item with the given text, case-insensitive, by sending /// an [`LVM_FINDITEM`](crate::msg::lvm::FindItem) message. pub fn find(&self, text: &str) -> Option<u32> { let mut buf = WString::from_str(text); let mut lvfi = LVFINDINFO::default(); lvfi.flags = co::LVFI::STRING; lvfi.set_psz(Some(&mut buf)); self.hwnd().SendMessage(lvm::FindItem { start_index: None, lvfindinfo: &mut lvfi, }) } /// Retrieves the index of the focused item by sending an /// [`LVM_GETNEXTITEM`](crate::msg::lvm::GetNextItem) message. pub fn focused(&self) -> Option<u32> { self.hwnd().SendMessage(lvm::GetNextItem { initial_index: None, relationship: co::LVNI::FOCUSED, }) } /// Retrieves the item at the specified position by sending an /// [`LVM_HITTEST`](crate::msg::lvm::HitTest) message pub fn hit_test(&self, info: &mut LVHITTESTINFO) -> Option<u32> { self.hwnd().SendMessage(lvm::HitTest { info }) } /// Tells if the item is the focused one by sending an /// [`LVM_GETITEMSTATE`](crate::msg::lvm::GetItemState) message. pub fn is_focused(&self, item_index: u32) -> bool { self.hwnd().SendMessage(lvm::GetItemState { index: item_index, mask: co::LVIS::FOCUSED, }).has(co::LVIS::FOCUSED) } /// Tells if the item is selected by sending an /// [`LVM_GETITEMSTATE`](crate::msg::lvm::GetItemState) message. pub fn is_selected(&self, item_index: u32) -> bool { self.hwnd().SendMessage(lvm::GetItemState { index: item_index, mask: co::LVIS::SELECTED, }).has(co::LVIS::SELECTED) } /// Tells if the item is currently visible by sending an /// [`LVM_ISITEMVISIBLE`](crate::msg::lvm::IsItemVisible) message. pub fn is_visible(&self, item_index: u32) -> bool { self.hwnd().SendMessage(lvm::IsItemVisible { index: item_index }) } /// Retrieves the actual index of the unique ID by sending an /// [`LVM_MAPIDTOINDEX`](crate::msg::lvm::MapIdToIndex) message. pub fn map_id_to_index(&self, item_id: u32) -> Option<u32> { self.hwnd().SendMessage(lvm::MapIdToIndex { id: item_id }) } /// Retrieves an unique ID for the given index by sending an /// [`LVM_MAPINDEXTOID`](crate::msg::lvm::MapIndexToId) message. pub fn map_index_to_id(&self, item_index: u32) -> Option<u32> { self.hwnd().SendMessage(lvm::MapIndexToId { index: item_index }) } /// Retrieves the bound rectangle of item by sending an /// [`LVM_GETITEMRECT`](crate::msg::lvm::GetItemRect) message. pub fn rect(&self, item_index: u32, portion: co::LVIR) -> WinResult<RECT> { let mut rc = RECT::default(); self.hwnd().SendMessage(lvm::GetItemRect { index: item_index, rect: &mut rc, portion, })?; Ok(rc) } /// Retrieves the indexes of the selected items by sending /// [`LVM_GETNEXTITEM`](crate::msg::lvm::GetNextItem) messages. pub fn selected(&self) -> Vec<u32> { let mut items = Vec::with_capacity(self.selected_count() as _); let mut idx = None; loop { idx = match self.hwnd().SendMessage(lvm::GetNextItem { initial_index: idx, relationship: co::LVNI::SELECTED, }) { Some(idx) => { items.push(idx); Some(idx) }, None => break, }; } items } /// Retrieves the number of selected items by sending an /// [`LVM_GETSELECTEDCOUNT`](crate::msg::lvm::GetSelectedCount) message. pub fn selected_count(&self) -> u32 { self.hwnd().SendMessage(lvm::GetSelectedCount {}) } /// Sets the focused item by sending an /// [`LVM_SETITEMSTATE`](crate::msg::lvm::SetItemState) message. pub fn set_focused(&self, item_index: u32) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.stateMask = co::LVIS::FOCUSED; lvi.state = co::LVIS::FOCUSED; self.hwnd().SendMessage(lvm::SetItemState { index: Some(item_index), lvitem: &lvi, }) } /// Sets or remove the selection from the given item indexes by sending /// [`LVM_SETITEMSTATE`](crate::msg::lvm::SetItemState) messages. pub fn set_selected(&self, set: bool, item_indexes: &[u32]) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.stateMask = co::LVIS::SELECTED; if set { lvi.state = co::LVIS::SELECTED; } for idx in item_indexes.iter() { self.hwnd().SendMessage(lvm::SetItemState { index: Some(*idx), lvitem: &lvi, })?; } Ok(()) } /// Sets or remove the selection for all items by sending an /// [`LVM_SETITEMSTATE`](crate::msg::lvm::SetItemState) message. pub fn set_selected_all(&self, set: bool) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.stateMask = co::LVIS::SELECTED; if set { lvi.state = co::LVIS::SELECTED; } self.hwnd().SendMessage(lvm::SetItemState { index: None, lvitem: &lvi, }) } /// Sets the text of an item under a column by sending an /// [`LVM_SETITEMTEXT`](crate::msg::lvm::SetItemText) message. pub fn set_text(&self, item_index: u32, column_index: u32, text: &str) -> WinResult<()> { let mut lvi = LVITEM::default(); lvi.iSubItem = column_index as _; let mut wtext = WString::from_str(text); lvi.set_pszText(Some(&mut wtext)); self.hwnd().SendMessage(lvm::SetItemText { index: item_index, lvitem: &lvi, }) } /// Retrieves the text of an item under a column by sending an /// [`LVM_GETITEMTEXT`](crate::msg::lvm::GetItemText) message. /// /// The passed buffer will be automatically allocated. /// /// This method can be more performant than /// [`text_str`](crate::gui::ListViewItems::text_str) because the buffer can be /// reused, avoiding multiple allocations. However, it has the inconvenient /// of the manual conversion from [`WString`](crate::WString) to `String`. /// /// # Examples /// /// ```rust,ignore /// use winsafe::{gui, WString}; /// /// let my_list: gui::ListView; // initialized somewhere /// /// let mut buf = WString::default(); /// my_list.items().text(0, 2, &mut buf); // 1st item, 3rd column /// /// println!("Text: {}", buf.to_string()); /// ``` pub fn text(&self, item_index: u32, column_index: u32, buf: &mut WString) { Self::text_retrieve(self.hwnd(), item_index, column_index, buf) } pub(in crate::gui::native_controls) fn text_retrieve( hwnd: HWND, item_index: u32, column_index: u32, buf: &mut WString)
// Static method because it's also used by ListViewColumns. // https://forums.codeguru.com/showthread.php?351972-Getting-listView-item-text-length const BLOCK: usize = 64; // arbitrary let mut buf_sz = BLOCK; let mut buf = buf; loop { let mut lvi = LVITEM::default(); lvi.iSubItem = column_index as _; buf.realloc_buffer(buf_sz); lvi.set_pszText(Some(&mut buf)); let nchars = hwnd.SendMessage(lvm::GetItemText { // char count without terminating null index: item_index, lvitem: &mut lvi, }); if (nchars as usize) + 1 < buf_sz { // to break, must have at least 1 char gap break; } buf_sz += BLOCK; // increase buffer size to try again } } /// A more convenient [`text`](crate::gui::ListViewItems::text), which /// directly returns a `String` instead of requiring an external buffer. /// /// # Examples /// /// ```rust,ignore /// use winsafe::gui; /// /// let my_list: gui::ListView; // initialized somewhere /// /// println!("Text: {}", my_list.items().text(0, 2)); // 1st item, 3rd column /// ``` pub fn text_str(&self, item_index: u32, column_index: u32) -> String { let mut buf = WString::default(); self.text(item_index, column_index, &mut buf); buf.to_string() } }
{
random_line_split
index.js
const landing_page_puzzlepiece_container = 'landing-page-puzzlepiece-container'; const drag_to_start_story_div = 'drag-to-start-story-div'; /** * Enable drag and drop using the Dragula library */ const draggables = dragula([ /** * Adding all the elements to the same dragula might actually allow * any puzzle piece to get dragged and dropped into any puzzlespot, * but since all puzzle piece + spot pairs are on "separate pages", * this will work for our purposes. */ document.getElementById('landing-page-puzzlepiece-container'), document.getElementById('landing-page-puzzlespot'), document.getElementById('step-1-puzzlepiece-container'), document.getElementById('step-1-puzzlespot'), document.getElementById('step-2-puzzlepiece-container'), document.getElementById('step-2-puzzlespot'), document.getElementById('step-3-puzzlepiece-container'), document.getElementById('step-3-puzzlespot'), document.getElementById('step-4-part-1-puzzlepiece-container'), document.getElementById('step-4-part-1-puzzlespot'), document.getElementById('step-4-part-2-puzzlepiece-container'), document.getElementById('step-4-part-2-puzzlespot') ]); draggables.on('drag', function(el, source) { // Hide the "drag to start" when the BEGIN puzzle piece is dragged if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 0; } }); draggables.on('cancel', function (el, container, source) { // Show the "drag to start" when the BEGIN puzzle piece is dragged and dropped outside a dragula container if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 1; } }); draggables.on('drop', function (el, source, target, sibling) { // console.log("element", el); // The draggable puzzle piece // console.log("source", source); // The missing puzzle piece div is the source for some reason // console.log("target", target); // The container holding the puzzle piece is the target for some reason // Transition to the next page const isLandingPagePuzzlePiece = source.classList.contains('landing-page-puzzlepiece'); if (isLandingPagePuzzlePiece || source.classList.contains('puzzle-piece') || source.classList.contains('puzzle-piece-big') && source.classList.contains('missing') ) { // Go to the next page and disable the next button on the following page setTimeout(() => { transitionToNextPage(); target.parentElement.querySelector('.next-button')?.removeAttribute('disabled'); }, 400); // Hide the puzzle piece and show the BEGIN button instead once the user navigates past the landing page setTimeout(() => { if (isLandingPagePuzzlePiece) { document.getElementById('landing-page-nextback-container').style.opacity = 1; document.getElementById('landing-page-puzzle-grid').style.display = 'none'; } }, 1500); } }); /** * Give a visual hint to the user by animating puzzle pieces when the user is * hovering over a missing puzzle piece element. */ const puzzlePuzzleEls = Array.from(document.querySelectorAll('.puzzle-piece, .puzzle-piece-big')); const missingPuzzleEls = Array.from(document.querySelectorAll('.missing')); for (const missingPuzzleEl of missingPuzzleEls) { missingPuzzleEl.addEventListener('mouseenter', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '2s'; puzzleEl.style.animationName = 'pulse'; } } }); missingPuzzleEl.addEventListener('mouseleave', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '6s'; puzzleEl.style.animationName = 'bounce'; } } }); } /** * Control transitions between pages. The current page is encoded in the URL as an id * e.g. https://esse-dev.github.io/a-web-monetization-story#page-0 */ const pageEls = document.getElementsByClassName('page-container'); let currentPageNum = -1; let pageElIdCounter = 0; for (const pageEl of pageEls) { pageEl.id = `page-${pageElIdCounter}`; pageElIdCounter++; } // The 'popstate' event is triggered when the user navigates toa new URL within the current website. // For instance, this happens when the user presses the browser back button. window.addEventListener('popstate', showPageInURL); // Once website is loaded show current page (to prevent images and fonts from showing up late) document.fonts.ready.then(showPageInURL); // Page was getting scrolled halfway between pages when resizing, transitionToPageInURL should // handle scrolling back to the proper position once the resize happens. window.addEventListener('resize', () => showPage(currentPageNum)); function showPageInURL() { // Get the page number encoded in the URL. If there is no page in the URL, default to 0. const pageInUrl = parseInt(window.location.hash.replace('#page-', '')) || 0; if (pageInUrl !== currentPageNum) { const isGoingToPreviousPage = pageInUrl === currentPageNum - 1; showPage(pageInUrl, isGoingToPreviousPage); } } function transitionToPage(nextPageNum, reverseAnimation = false) { const currentPageEl = pageEls[currentPageNum]; let delay = 0; // Get all animated elements in the current page element. const animatedEls = currentPageEl.querySelectorAll('.animate-in, .animate-out'); const animatedOutEls = currentPageEl.querySelectorAll('.animate-out'); const animatedInEls = currentPageEl.querySelectorAll('.animate-in'); // Hide all animated elements in the current page. // setTimeout is used so .animate-in elements are hidden AFTER transitioning to the next page. setTimeout(() => { for (const animatedEl of Array.from(animatedEls).reverse()) { const elIsAnimatingOut = (animatedEl.classList.contains('animate-out') && !reverseAnimation) || (animatedEl.classList.contains('animate-in') && reverseAnimation); if (!elIsAnimatingOut) { animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; setTimeout(() => { animatedEl.style.opacity = 0; }, 800); } if (elIsAnimatingOut) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; animatedEl.style.opacity = 0; delay += 0.1; } } }, 10); // Once all elements in the current page are hidden, show the next page. const isPageAnimatingOut = (animatedOutEls.length > 0 && !reverseAnimation) || (animatedInEls.length > 0 && reverseAnimation); const totalPageAnimateOutTime = delay*100 + 200; setTimeout(() => { window.location.href = '#page-' + nextPageNum; // Showing the next page is handled by the popstate listener }, isPageAnimatingOut ? totalPageAnimateOutTime + 400 : 20); } const navDotEls = Array.from(document.getElementsByClassName('nav-dot')); for (let i = 0; i < navDotEls.length; i++) { const navDotEl = navDotEls[i]; navDotEl.addEventListener('click', () => { transitionToPage(i, true); }); } const MAX_PAGE_NUM = navDotEls.length - 1; function transitionToNextPage() { if (currentPageNum < MAX_PAGE_NUM) { transitionToPage(currentPageNum + 1); } } function
() { if (currentPageNum > 0) { transitionToPage(currentPageNum - 1, true); } } // showPage is used by transitionToPage and transitionToPageInURL // not recommended to be called manually! function showPage(nextPageNum, reverseAnimation = false) { currentPageNum = nextPageNum; const nextPageEl = pageEls[nextPageNum]; nextPageEl.scrollIntoView(); let delay = 0; const animatedEls = nextPageEl.querySelectorAll('.animate-in, .animate-out'); for (const animatedEl of animatedEls) { const elIsAnimatingIn = (animatedEl.classList.contains('animate-in') && !reverseAnimation) || (animatedEl.classList.contains('animate-out') && reverseAnimation); if (!elIsAnimatingIn) { animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; } if (elIsAnimatingIn) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; } animatedEl.style.opacity = 1; delay += 0.1; } const navEl = document.getElementsByClassName('nav-dot-container')[0]; // Hide the navigation element on the landing page and the thank you page if (currentPageNum === 0 || currentPageNum === MAX_PAGE_NUM) { navEl.style.opacity = 0; } else { navEl.style.opacity = 1; } const navDogEl = document.getElementById('nav-dog'); const navDotWidth = navEl.offsetWidth / navDotEls.length; const navDogElOffset = 19; // higher number = move further left navDogEl.style.left = (navDotWidth/2 + navDotWidth*currentPageNum - navDogElOffset) + 'px'; let navDotCounter = 0; for (const navDotEl of navDotEls) { if (navDotCounter <= currentPageNum) { navDotEl.removeAttribute('disabled'); } navDotCounter++; } if (!nextPageEl.querySelector('.page-light-background')) { document.getElementById('footer').classList.add('dark-footer'); document.getElementById('footer').classList.remove('light-footer'); } else { document.getElementById('footer').classList.add('light-footer'); document.getElementById('footer').classList.remove('dark-footer'); } } async function copyCode() { // Read the basic_web_monetization_code.html file const codeText = await readFile(window.location.origin + '/a-web-monetization-story/' + 'samples/basic_web_monetization_code.html'); // Create hidden text area element to hold text, set the value and add it to the body const tempTextArea = document.createElement("textarea"); tempTextArea.value = codeText; // This element is visible, but is outside the visible view... // Hiding it seems to prevent the text area from being selectable // And thus the text cannot be copied document.body.appendChild(tempTextArea); // Select and copy the text to the clipboard tempTextArea.select(); document.execCommand('copy'); tempTextArea.remove(); // Let the user know that text has been copied to the clipboard document.getElementById('copied-code-image').style.opacity = 1; setTimeout(() => { document.getElementById('copied-code-image').style.opacity = 0; }, 2500); } /** * Read a file and return the text in the file. * * @param {String} fileName The name of the file to read. * @returns The text read from the file. */ async function readFile(fileName) { return await fetch(fileName) .then(response => { if (response.ok) { return response.text(); } else { console.log(response.status); throw Error(response.status); } }) .catch(error => console.log(error)); } /** * The following code adds basic keyboard navigation support. Users can tab-key between links on * pages. Users can use the arrow keys to go to the next and previous page. */ function getAncestorEl(elem, selector) { for ( ; elem && elem !== document; elem = elem.parentNode ) { if ( elem.matches( selector ) ) return elem; } return null; }; document.addEventListener('keydown', (e) => { if (e.key === 'ArrowRight') { transitionToNextPage(); } if (e.key === 'ArrowLeft') { transitionToPreviousPage(); } }); window.addEventListener('focus', () => { const pageContainerAncestor = getAncestorEl(document.activeElement, '.page-container'); if (pageContainerAncestor) { showPage(parseInt(pageContainerAncestor.id.replace('page-', ''))); } }, true);
transitionToPreviousPage
identifier_name
index.js
const landing_page_puzzlepiece_container = 'landing-page-puzzlepiece-container'; const drag_to_start_story_div = 'drag-to-start-story-div'; /** * Enable drag and drop using the Dragula library */ const draggables = dragula([ /** * Adding all the elements to the same dragula might actually allow * any puzzle piece to get dragged and dropped into any puzzlespot, * but since all puzzle piece + spot pairs are on "separate pages", * this will work for our purposes. */ document.getElementById('landing-page-puzzlepiece-container'), document.getElementById('landing-page-puzzlespot'), document.getElementById('step-1-puzzlepiece-container'), document.getElementById('step-1-puzzlespot'), document.getElementById('step-2-puzzlepiece-container'), document.getElementById('step-2-puzzlespot'), document.getElementById('step-3-puzzlepiece-container'), document.getElementById('step-3-puzzlespot'), document.getElementById('step-4-part-1-puzzlepiece-container'), document.getElementById('step-4-part-1-puzzlespot'), document.getElementById('step-4-part-2-puzzlepiece-container'), document.getElementById('step-4-part-2-puzzlespot') ]); draggables.on('drag', function(el, source) { // Hide the "drag to start" when the BEGIN puzzle piece is dragged if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 0; } }); draggables.on('cancel', function (el, container, source) { // Show the "drag to start" when the BEGIN puzzle piece is dragged and dropped outside a dragula container if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 1; } }); draggables.on('drop', function (el, source, target, sibling) { // console.log("element", el); // The draggable puzzle piece // console.log("source", source); // The missing puzzle piece div is the source for some reason // console.log("target", target); // The container holding the puzzle piece is the target for some reason // Transition to the next page const isLandingPagePuzzlePiece = source.classList.contains('landing-page-puzzlepiece'); if (isLandingPagePuzzlePiece || source.classList.contains('puzzle-piece') || source.classList.contains('puzzle-piece-big') && source.classList.contains('missing') ) { // Go to the next page and disable the next button on the following page setTimeout(() => { transitionToNextPage(); target.parentElement.querySelector('.next-button')?.removeAttribute('disabled'); }, 400); // Hide the puzzle piece and show the BEGIN button instead once the user navigates past the landing page setTimeout(() => { if (isLandingPagePuzzlePiece) { document.getElementById('landing-page-nextback-container').style.opacity = 1; document.getElementById('landing-page-puzzle-grid').style.display = 'none'; } }, 1500); } }); /** * Give a visual hint to the user by animating puzzle pieces when the user is * hovering over a missing puzzle piece element. */ const puzzlePuzzleEls = Array.from(document.querySelectorAll('.puzzle-piece, .puzzle-piece-big')); const missingPuzzleEls = Array.from(document.querySelectorAll('.missing')); for (const missingPuzzleEl of missingPuzzleEls) { missingPuzzleEl.addEventListener('mouseenter', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '2s'; puzzleEl.style.animationName = 'pulse'; } } }); missingPuzzleEl.addEventListener('mouseleave', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '6s'; puzzleEl.style.animationName = 'bounce'; } } }); } /** * Control transitions between pages. The current page is encoded in the URL as an id * e.g. https://esse-dev.github.io/a-web-monetization-story#page-0 */ const pageEls = document.getElementsByClassName('page-container'); let currentPageNum = -1;
pageEl.id = `page-${pageElIdCounter}`; pageElIdCounter++; } // The 'popstate' event is triggered when the user navigates toa new URL within the current website. // For instance, this happens when the user presses the browser back button. window.addEventListener('popstate', showPageInURL); // Once website is loaded show current page (to prevent images and fonts from showing up late) document.fonts.ready.then(showPageInURL); // Page was getting scrolled halfway between pages when resizing, transitionToPageInURL should // handle scrolling back to the proper position once the resize happens. window.addEventListener('resize', () => showPage(currentPageNum)); function showPageInURL() { // Get the page number encoded in the URL. If there is no page in the URL, default to 0. const pageInUrl = parseInt(window.location.hash.replace('#page-', '')) || 0; if (pageInUrl !== currentPageNum) { const isGoingToPreviousPage = pageInUrl === currentPageNum - 1; showPage(pageInUrl, isGoingToPreviousPage); } } function transitionToPage(nextPageNum, reverseAnimation = false) { const currentPageEl = pageEls[currentPageNum]; let delay = 0; // Get all animated elements in the current page element. const animatedEls = currentPageEl.querySelectorAll('.animate-in, .animate-out'); const animatedOutEls = currentPageEl.querySelectorAll('.animate-out'); const animatedInEls = currentPageEl.querySelectorAll('.animate-in'); // Hide all animated elements in the current page. // setTimeout is used so .animate-in elements are hidden AFTER transitioning to the next page. setTimeout(() => { for (const animatedEl of Array.from(animatedEls).reverse()) { const elIsAnimatingOut = (animatedEl.classList.contains('animate-out') && !reverseAnimation) || (animatedEl.classList.contains('animate-in') && reverseAnimation); if (!elIsAnimatingOut) { animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; setTimeout(() => { animatedEl.style.opacity = 0; }, 800); } if (elIsAnimatingOut) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; animatedEl.style.opacity = 0; delay += 0.1; } } }, 10); // Once all elements in the current page are hidden, show the next page. const isPageAnimatingOut = (animatedOutEls.length > 0 && !reverseAnimation) || (animatedInEls.length > 0 && reverseAnimation); const totalPageAnimateOutTime = delay*100 + 200; setTimeout(() => { window.location.href = '#page-' + nextPageNum; // Showing the next page is handled by the popstate listener }, isPageAnimatingOut ? totalPageAnimateOutTime + 400 : 20); } const navDotEls = Array.from(document.getElementsByClassName('nav-dot')); for (let i = 0; i < navDotEls.length; i++) { const navDotEl = navDotEls[i]; navDotEl.addEventListener('click', () => { transitionToPage(i, true); }); } const MAX_PAGE_NUM = navDotEls.length - 1; function transitionToNextPage() { if (currentPageNum < MAX_PAGE_NUM) { transitionToPage(currentPageNum + 1); } } function transitionToPreviousPage() { if (currentPageNum > 0) { transitionToPage(currentPageNum - 1, true); } } // showPage is used by transitionToPage and transitionToPageInURL // not recommended to be called manually! function showPage(nextPageNum, reverseAnimation = false) { currentPageNum = nextPageNum; const nextPageEl = pageEls[nextPageNum]; nextPageEl.scrollIntoView(); let delay = 0; const animatedEls = nextPageEl.querySelectorAll('.animate-in, .animate-out'); for (const animatedEl of animatedEls) { const elIsAnimatingIn = (animatedEl.classList.contains('animate-in') && !reverseAnimation) || (animatedEl.classList.contains('animate-out') && reverseAnimation); if (!elIsAnimatingIn) { animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; } if (elIsAnimatingIn) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; } animatedEl.style.opacity = 1; delay += 0.1; } const navEl = document.getElementsByClassName('nav-dot-container')[0]; // Hide the navigation element on the landing page and the thank you page if (currentPageNum === 0 || currentPageNum === MAX_PAGE_NUM) { navEl.style.opacity = 0; } else { navEl.style.opacity = 1; } const navDogEl = document.getElementById('nav-dog'); const navDotWidth = navEl.offsetWidth / navDotEls.length; const navDogElOffset = 19; // higher number = move further left navDogEl.style.left = (navDotWidth/2 + navDotWidth*currentPageNum - navDogElOffset) + 'px'; let navDotCounter = 0; for (const navDotEl of navDotEls) { if (navDotCounter <= currentPageNum) { navDotEl.removeAttribute('disabled'); } navDotCounter++; } if (!nextPageEl.querySelector('.page-light-background')) { document.getElementById('footer').classList.add('dark-footer'); document.getElementById('footer').classList.remove('light-footer'); } else { document.getElementById('footer').classList.add('light-footer'); document.getElementById('footer').classList.remove('dark-footer'); } } async function copyCode() { // Read the basic_web_monetization_code.html file const codeText = await readFile(window.location.origin + '/a-web-monetization-story/' + 'samples/basic_web_monetization_code.html'); // Create hidden text area element to hold text, set the value and add it to the body const tempTextArea = document.createElement("textarea"); tempTextArea.value = codeText; // This element is visible, but is outside the visible view... // Hiding it seems to prevent the text area from being selectable // And thus the text cannot be copied document.body.appendChild(tempTextArea); // Select and copy the text to the clipboard tempTextArea.select(); document.execCommand('copy'); tempTextArea.remove(); // Let the user know that text has been copied to the clipboard document.getElementById('copied-code-image').style.opacity = 1; setTimeout(() => { document.getElementById('copied-code-image').style.opacity = 0; }, 2500); } /** * Read a file and return the text in the file. * * @param {String} fileName The name of the file to read. * @returns The text read from the file. */ async function readFile(fileName) { return await fetch(fileName) .then(response => { if (response.ok) { return response.text(); } else { console.log(response.status); throw Error(response.status); } }) .catch(error => console.log(error)); } /** * The following code adds basic keyboard navigation support. Users can tab-key between links on * pages. Users can use the arrow keys to go to the next and previous page. */ function getAncestorEl(elem, selector) { for ( ; elem && elem !== document; elem = elem.parentNode ) { if ( elem.matches( selector ) ) return elem; } return null; }; document.addEventListener('keydown', (e) => { if (e.key === 'ArrowRight') { transitionToNextPage(); } if (e.key === 'ArrowLeft') { transitionToPreviousPage(); } }); window.addEventListener('focus', () => { const pageContainerAncestor = getAncestorEl(document.activeElement, '.page-container'); if (pageContainerAncestor) { showPage(parseInt(pageContainerAncestor.id.replace('page-', ''))); } }, true);
let pageElIdCounter = 0; for (const pageEl of pageEls) {
random_line_split
index.js
const landing_page_puzzlepiece_container = 'landing-page-puzzlepiece-container'; const drag_to_start_story_div = 'drag-to-start-story-div'; /** * Enable drag and drop using the Dragula library */ const draggables = dragula([ /** * Adding all the elements to the same dragula might actually allow * any puzzle piece to get dragged and dropped into any puzzlespot, * but since all puzzle piece + spot pairs are on "separate pages", * this will work for our purposes. */ document.getElementById('landing-page-puzzlepiece-container'), document.getElementById('landing-page-puzzlespot'), document.getElementById('step-1-puzzlepiece-container'), document.getElementById('step-1-puzzlespot'), document.getElementById('step-2-puzzlepiece-container'), document.getElementById('step-2-puzzlespot'), document.getElementById('step-3-puzzlepiece-container'), document.getElementById('step-3-puzzlespot'), document.getElementById('step-4-part-1-puzzlepiece-container'), document.getElementById('step-4-part-1-puzzlespot'), document.getElementById('step-4-part-2-puzzlepiece-container'), document.getElementById('step-4-part-2-puzzlespot') ]); draggables.on('drag', function(el, source) { // Hide the "drag to start" when the BEGIN puzzle piece is dragged if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 0; } }); draggables.on('cancel', function (el, container, source) { // Show the "drag to start" when the BEGIN puzzle piece is dragged and dropped outside a dragula container if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 1; } }); draggables.on('drop', function (el, source, target, sibling) { // console.log("element", el); // The draggable puzzle piece // console.log("source", source); // The missing puzzle piece div is the source for some reason // console.log("target", target); // The container holding the puzzle piece is the target for some reason // Transition to the next page const isLandingPagePuzzlePiece = source.classList.contains('landing-page-puzzlepiece'); if (isLandingPagePuzzlePiece || source.classList.contains('puzzle-piece') || source.classList.contains('puzzle-piece-big') && source.classList.contains('missing') ) { // Go to the next page and disable the next button on the following page setTimeout(() => { transitionToNextPage(); target.parentElement.querySelector('.next-button')?.removeAttribute('disabled'); }, 400); // Hide the puzzle piece and show the BEGIN button instead once the user navigates past the landing page setTimeout(() => { if (isLandingPagePuzzlePiece) { document.getElementById('landing-page-nextback-container').style.opacity = 1; document.getElementById('landing-page-puzzle-grid').style.display = 'none'; } }, 1500); } }); /** * Give a visual hint to the user by animating puzzle pieces when the user is * hovering over a missing puzzle piece element. */ const puzzlePuzzleEls = Array.from(document.querySelectorAll('.puzzle-piece, .puzzle-piece-big')); const missingPuzzleEls = Array.from(document.querySelectorAll('.missing')); for (const missingPuzzleEl of missingPuzzleEls) { missingPuzzleEl.addEventListener('mouseenter', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '2s'; puzzleEl.style.animationName = 'pulse'; } } }); missingPuzzleEl.addEventListener('mouseleave', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '6s'; puzzleEl.style.animationName = 'bounce'; } } }); } /** * Control transitions between pages. The current page is encoded in the URL as an id * e.g. https://esse-dev.github.io/a-web-monetization-story#page-0 */ const pageEls = document.getElementsByClassName('page-container'); let currentPageNum = -1; let pageElIdCounter = 0; for (const pageEl of pageEls) { pageEl.id = `page-${pageElIdCounter}`; pageElIdCounter++; } // The 'popstate' event is triggered when the user navigates toa new URL within the current website. // For instance, this happens when the user presses the browser back button. window.addEventListener('popstate', showPageInURL); // Once website is loaded show current page (to prevent images and fonts from showing up late) document.fonts.ready.then(showPageInURL); // Page was getting scrolled halfway between pages when resizing, transitionToPageInURL should // handle scrolling back to the proper position once the resize happens. window.addEventListener('resize', () => showPage(currentPageNum)); function showPageInURL() { // Get the page number encoded in the URL. If there is no page in the URL, default to 0. const pageInUrl = parseInt(window.location.hash.replace('#page-', '')) || 0; if (pageInUrl !== currentPageNum) { const isGoingToPreviousPage = pageInUrl === currentPageNum - 1; showPage(pageInUrl, isGoingToPreviousPage); } } function transitionToPage(nextPageNum, reverseAnimation = false) { const currentPageEl = pageEls[currentPageNum]; let delay = 0; // Get all animated elements in the current page element. const animatedEls = currentPageEl.querySelectorAll('.animate-in, .animate-out'); const animatedOutEls = currentPageEl.querySelectorAll('.animate-out'); const animatedInEls = currentPageEl.querySelectorAll('.animate-in'); // Hide all animated elements in the current page. // setTimeout is used so .animate-in elements are hidden AFTER transitioning to the next page. setTimeout(() => { for (const animatedEl of Array.from(animatedEls).reverse()) { const elIsAnimatingOut = (animatedEl.classList.contains('animate-out') && !reverseAnimation) || (animatedEl.classList.contains('animate-in') && reverseAnimation); if (!elIsAnimatingOut) { animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; setTimeout(() => { animatedEl.style.opacity = 0; }, 800); } if (elIsAnimatingOut) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; animatedEl.style.opacity = 0; delay += 0.1; } } }, 10); // Once all elements in the current page are hidden, show the next page. const isPageAnimatingOut = (animatedOutEls.length > 0 && !reverseAnimation) || (animatedInEls.length > 0 && reverseAnimation); const totalPageAnimateOutTime = delay*100 + 200; setTimeout(() => { window.location.href = '#page-' + nextPageNum; // Showing the next page is handled by the popstate listener }, isPageAnimatingOut ? totalPageAnimateOutTime + 400 : 20); } const navDotEls = Array.from(document.getElementsByClassName('nav-dot')); for (let i = 0; i < navDotEls.length; i++) { const navDotEl = navDotEls[i]; navDotEl.addEventListener('click', () => { transitionToPage(i, true); }); } const MAX_PAGE_NUM = navDotEls.length - 1; function transitionToNextPage() { if (currentPageNum < MAX_PAGE_NUM) { transitionToPage(currentPageNum + 1); } } function transitionToPreviousPage() { if (currentPageNum > 0) { transitionToPage(currentPageNum - 1, true); } } // showPage is used by transitionToPage and transitionToPageInURL // not recommended to be called manually! function showPage(nextPageNum, reverseAnimation = false)
async function copyCode() { // Read the basic_web_monetization_code.html file const codeText = await readFile(window.location.origin + '/a-web-monetization-story/' + 'samples/basic_web_monetization_code.html'); // Create hidden text area element to hold text, set the value and add it to the body const tempTextArea = document.createElement("textarea"); tempTextArea.value = codeText; // This element is visible, but is outside the visible view... // Hiding it seems to prevent the text area from being selectable // And thus the text cannot be copied document.body.appendChild(tempTextArea); // Select and copy the text to the clipboard tempTextArea.select(); document.execCommand('copy'); tempTextArea.remove(); // Let the user know that text has been copied to the clipboard document.getElementById('copied-code-image').style.opacity = 1; setTimeout(() => { document.getElementById('copied-code-image').style.opacity = 0; }, 2500); } /** * Read a file and return the text in the file. * * @param {String} fileName The name of the file to read. * @returns The text read from the file. */ async function readFile(fileName) { return await fetch(fileName) .then(response => { if (response.ok) { return response.text(); } else { console.log(response.status); throw Error(response.status); } }) .catch(error => console.log(error)); } /** * The following code adds basic keyboard navigation support. Users can tab-key between links on * pages. Users can use the arrow keys to go to the next and previous page. */ function getAncestorEl(elem, selector) { for ( ; elem && elem !== document; elem = elem.parentNode ) { if ( elem.matches( selector ) ) return elem; } return null; }; document.addEventListener('keydown', (e) => { if (e.key === 'ArrowRight') { transitionToNextPage(); } if (e.key === 'ArrowLeft') { transitionToPreviousPage(); } }); window.addEventListener('focus', () => { const pageContainerAncestor = getAncestorEl(document.activeElement, '.page-container'); if (pageContainerAncestor) { showPage(parseInt(pageContainerAncestor.id.replace('page-', ''))); } }, true);
{ currentPageNum = nextPageNum; const nextPageEl = pageEls[nextPageNum]; nextPageEl.scrollIntoView(); let delay = 0; const animatedEls = nextPageEl.querySelectorAll('.animate-in, .animate-out'); for (const animatedEl of animatedEls) { const elIsAnimatingIn = (animatedEl.classList.contains('animate-in') && !reverseAnimation) || (animatedEl.classList.contains('animate-out') && reverseAnimation); if (!elIsAnimatingIn) { animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; } if (elIsAnimatingIn) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; } animatedEl.style.opacity = 1; delay += 0.1; } const navEl = document.getElementsByClassName('nav-dot-container')[0]; // Hide the navigation element on the landing page and the thank you page if (currentPageNum === 0 || currentPageNum === MAX_PAGE_NUM) { navEl.style.opacity = 0; } else { navEl.style.opacity = 1; } const navDogEl = document.getElementById('nav-dog'); const navDotWidth = navEl.offsetWidth / navDotEls.length; const navDogElOffset = 19; // higher number = move further left navDogEl.style.left = (navDotWidth/2 + navDotWidth*currentPageNum - navDogElOffset) + 'px'; let navDotCounter = 0; for (const navDotEl of navDotEls) { if (navDotCounter <= currentPageNum) { navDotEl.removeAttribute('disabled'); } navDotCounter++; } if (!nextPageEl.querySelector('.page-light-background')) { document.getElementById('footer').classList.add('dark-footer'); document.getElementById('footer').classList.remove('light-footer'); } else { document.getElementById('footer').classList.add('light-footer'); document.getElementById('footer').classList.remove('dark-footer'); } }
identifier_body
index.js
const landing_page_puzzlepiece_container = 'landing-page-puzzlepiece-container'; const drag_to_start_story_div = 'drag-to-start-story-div'; /** * Enable drag and drop using the Dragula library */ const draggables = dragula([ /** * Adding all the elements to the same dragula might actually allow * any puzzle piece to get dragged and dropped into any puzzlespot, * but since all puzzle piece + spot pairs are on "separate pages", * this will work for our purposes. */ document.getElementById('landing-page-puzzlepiece-container'), document.getElementById('landing-page-puzzlespot'), document.getElementById('step-1-puzzlepiece-container'), document.getElementById('step-1-puzzlespot'), document.getElementById('step-2-puzzlepiece-container'), document.getElementById('step-2-puzzlespot'), document.getElementById('step-3-puzzlepiece-container'), document.getElementById('step-3-puzzlespot'), document.getElementById('step-4-part-1-puzzlepiece-container'), document.getElementById('step-4-part-1-puzzlespot'), document.getElementById('step-4-part-2-puzzlepiece-container'), document.getElementById('step-4-part-2-puzzlespot') ]); draggables.on('drag', function(el, source) { // Hide the "drag to start" when the BEGIN puzzle piece is dragged if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 0; } }); draggables.on('cancel', function (el, container, source) { // Show the "drag to start" when the BEGIN puzzle piece is dragged and dropped outside a dragula container if (source.id === landing_page_puzzlepiece_container) { document.getElementById(drag_to_start_story_div).style.opacity = 1; } }); draggables.on('drop', function (el, source, target, sibling) { // console.log("element", el); // The draggable puzzle piece // console.log("source", source); // The missing puzzle piece div is the source for some reason // console.log("target", target); // The container holding the puzzle piece is the target for some reason // Transition to the next page const isLandingPagePuzzlePiece = source.classList.contains('landing-page-puzzlepiece'); if (isLandingPagePuzzlePiece || source.classList.contains('puzzle-piece') || source.classList.contains('puzzle-piece-big') && source.classList.contains('missing') ) { // Go to the next page and disable the next button on the following page setTimeout(() => { transitionToNextPage(); target.parentElement.querySelector('.next-button')?.removeAttribute('disabled'); }, 400); // Hide the puzzle piece and show the BEGIN button instead once the user navigates past the landing page setTimeout(() => { if (isLandingPagePuzzlePiece) { document.getElementById('landing-page-nextback-container').style.opacity = 1; document.getElementById('landing-page-puzzle-grid').style.display = 'none'; } }, 1500); } }); /** * Give a visual hint to the user by animating puzzle pieces when the user is * hovering over a missing puzzle piece element. */ const puzzlePuzzleEls = Array.from(document.querySelectorAll('.puzzle-piece, .puzzle-piece-big')); const missingPuzzleEls = Array.from(document.querySelectorAll('.missing')); for (const missingPuzzleEl of missingPuzzleEls) { missingPuzzleEl.addEventListener('mouseenter', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '2s'; puzzleEl.style.animationName = 'pulse'; } } }); missingPuzzleEl.addEventListener('mouseleave', () => { for (const puzzleEl of puzzlePuzzleEls) { if (!puzzleEl.classList.contains('missing')) { puzzleEl.style.animationDuration = '6s'; puzzleEl.style.animationName = 'bounce'; } } }); } /** * Control transitions between pages. The current page is encoded in the URL as an id * e.g. https://esse-dev.github.io/a-web-monetization-story#page-0 */ const pageEls = document.getElementsByClassName('page-container'); let currentPageNum = -1; let pageElIdCounter = 0; for (const pageEl of pageEls) { pageEl.id = `page-${pageElIdCounter}`; pageElIdCounter++; } // The 'popstate' event is triggered when the user navigates toa new URL within the current website. // For instance, this happens when the user presses the browser back button. window.addEventListener('popstate', showPageInURL); // Once website is loaded show current page (to prevent images and fonts from showing up late) document.fonts.ready.then(showPageInURL); // Page was getting scrolled halfway between pages when resizing, transitionToPageInURL should // handle scrolling back to the proper position once the resize happens. window.addEventListener('resize', () => showPage(currentPageNum)); function showPageInURL() { // Get the page number encoded in the URL. If there is no page in the URL, default to 0. const pageInUrl = parseInt(window.location.hash.replace('#page-', '')) || 0; if (pageInUrl !== currentPageNum) { const isGoingToPreviousPage = pageInUrl === currentPageNum - 1; showPage(pageInUrl, isGoingToPreviousPage); } } function transitionToPage(nextPageNum, reverseAnimation = false) { const currentPageEl = pageEls[currentPageNum]; let delay = 0; // Get all animated elements in the current page element. const animatedEls = currentPageEl.querySelectorAll('.animate-in, .animate-out'); const animatedOutEls = currentPageEl.querySelectorAll('.animate-out'); const animatedInEls = currentPageEl.querySelectorAll('.animate-in'); // Hide all animated elements in the current page. // setTimeout is used so .animate-in elements are hidden AFTER transitioning to the next page. setTimeout(() => { for (const animatedEl of Array.from(animatedEls).reverse()) { const elIsAnimatingOut = (animatedEl.classList.contains('animate-out') && !reverseAnimation) || (animatedEl.classList.contains('animate-in') && reverseAnimation); if (!elIsAnimatingOut) { animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; setTimeout(() => { animatedEl.style.opacity = 0; }, 800); } if (elIsAnimatingOut) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; animatedEl.style.opacity = 0; delay += 0.1; } } }, 10); // Once all elements in the current page are hidden, show the next page. const isPageAnimatingOut = (animatedOutEls.length > 0 && !reverseAnimation) || (animatedInEls.length > 0 && reverseAnimation); const totalPageAnimateOutTime = delay*100 + 200; setTimeout(() => { window.location.href = '#page-' + nextPageNum; // Showing the next page is handled by the popstate listener }, isPageAnimatingOut ? totalPageAnimateOutTime + 400 : 20); } const navDotEls = Array.from(document.getElementsByClassName('nav-dot')); for (let i = 0; i < navDotEls.length; i++) { const navDotEl = navDotEls[i]; navDotEl.addEventListener('click', () => { transitionToPage(i, true); }); } const MAX_PAGE_NUM = navDotEls.length - 1; function transitionToNextPage() { if (currentPageNum < MAX_PAGE_NUM) { transitionToPage(currentPageNum + 1); } } function transitionToPreviousPage() { if (currentPageNum > 0) { transitionToPage(currentPageNum - 1, true); } } // showPage is used by transitionToPage and transitionToPageInURL // not recommended to be called manually! function showPage(nextPageNum, reverseAnimation = false) { currentPageNum = nextPageNum; const nextPageEl = pageEls[nextPageNum]; nextPageEl.scrollIntoView(); let delay = 0; const animatedEls = nextPageEl.querySelectorAll('.animate-in, .animate-out'); for (const animatedEl of animatedEls) { const elIsAnimatingIn = (animatedEl.classList.contains('animate-in') && !reverseAnimation) || (animatedEl.classList.contains('animate-out') && reverseAnimation); if (!elIsAnimatingIn)
if (elIsAnimatingIn) { animatedEl.style.transitionDuration = '0.2s'; animatedEl.style.transitionDelay = `${delay}s`; } animatedEl.style.opacity = 1; delay += 0.1; } const navEl = document.getElementsByClassName('nav-dot-container')[0]; // Hide the navigation element on the landing page and the thank you page if (currentPageNum === 0 || currentPageNum === MAX_PAGE_NUM) { navEl.style.opacity = 0; } else { navEl.style.opacity = 1; } const navDogEl = document.getElementById('nav-dog'); const navDotWidth = navEl.offsetWidth / navDotEls.length; const navDogElOffset = 19; // higher number = move further left navDogEl.style.left = (navDotWidth/2 + navDotWidth*currentPageNum - navDogElOffset) + 'px'; let navDotCounter = 0; for (const navDotEl of navDotEls) { if (navDotCounter <= currentPageNum) { navDotEl.removeAttribute('disabled'); } navDotCounter++; } if (!nextPageEl.querySelector('.page-light-background')) { document.getElementById('footer').classList.add('dark-footer'); document.getElementById('footer').classList.remove('light-footer'); } else { document.getElementById('footer').classList.add('light-footer'); document.getElementById('footer').classList.remove('dark-footer'); } } async function copyCode() { // Read the basic_web_monetization_code.html file const codeText = await readFile(window.location.origin + '/a-web-monetization-story/' + 'samples/basic_web_monetization_code.html'); // Create hidden text area element to hold text, set the value and add it to the body const tempTextArea = document.createElement("textarea"); tempTextArea.value = codeText; // This element is visible, but is outside the visible view... // Hiding it seems to prevent the text area from being selectable // And thus the text cannot be copied document.body.appendChild(tempTextArea); // Select and copy the text to the clipboard tempTextArea.select(); document.execCommand('copy'); tempTextArea.remove(); // Let the user know that text has been copied to the clipboard document.getElementById('copied-code-image').style.opacity = 1; setTimeout(() => { document.getElementById('copied-code-image').style.opacity = 0; }, 2500); } /** * Read a file and return the text in the file. * * @param {String} fileName The name of the file to read. * @returns The text read from the file. */ async function readFile(fileName) { return await fetch(fileName) .then(response => { if (response.ok) { return response.text(); } else { console.log(response.status); throw Error(response.status); } }) .catch(error => console.log(error)); } /** * The following code adds basic keyboard navigation support. Users can tab-key between links on * pages. Users can use the arrow keys to go to the next and previous page. */ function getAncestorEl(elem, selector) { for ( ; elem && elem !== document; elem = elem.parentNode ) { if ( elem.matches( selector ) ) return elem; } return null; }; document.addEventListener('keydown', (e) => { if (e.key === 'ArrowRight') { transitionToNextPage(); } if (e.key === 'ArrowLeft') { transitionToPreviousPage(); } }); window.addEventListener('focus', () => { const pageContainerAncestor = getAncestorEl(document.activeElement, '.page-container'); if (pageContainerAncestor) { showPage(parseInt(pageContainerAncestor.id.replace('page-', ''))); } }, true);
{ animatedEl.style.transitionDuration = '0s'; animatedEl.style.transitionDelay = '0s'; }
conditional_block
lib.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. //! Library to build `Custom_tag` OCaml values. use std::ffi::CStr; use std::ffi::CString; use std::mem::MaybeUninit; use std::ops::Deref; use std::os::raw::c_char; use std::os::raw::c_int; use std::os::raw::c_void; use std::rc::Rc; use ocamlrep::from; use ocamlrep::Allocator; use ocamlrep::FromError; use ocamlrep::FromOcamlRep; use ocamlrep::ToOcamlRep; use ocamlrep::Value; use ocamlrep::CUSTOM_TAG; use ocamlrep_ocamlpool::catch_unwind; extern "C" { fn caml_register_custom_operations(ops: *const CustomOperations); fn caml_serialize_block_1(data: *const u8, len: usize); fn caml_serialize_int_8(x: i64); fn caml_deserialize_sint_8() -> i64; fn caml_deserialize_block_1(data: *mut u8, len: usize); } /// Struct containing the operations for a custom OCaml block. /// /// This is the Rust encoding of OCaml's `struct custom_operations`. /// /// For more information on the fields see /// [the OCaml guide](https://caml.inria.fr/pub/docs/manual-ocaml/intfc.html#ss:c-custom-ops) #[repr(C)] pub struct CustomOperations { identifier: *const c_char, finalize: Option<extern "C" fn(usize) -> ()>, compare: Option<extern "C" fn(usize, usize) -> c_int>, hash: Option<extern "C" fn(usize) -> isize>, serialize: Option<extern "C" fn(usize, *mut usize, *mut usize) -> ()>, deserialize: Option<extern "C" fn(*mut c_void) -> usize>, compare_ext: Option<extern "C" fn(usize, usize) -> c_int>, /// Not implemented yet, always set to NULL. custom_fixed_length: *const c_void, } impl CustomOperations { /// Create a new custom block with the given identifier. /// /// All function pointers will be set to NULL by default. fn new(identifier: &'static CStr) -> Self { Self { identifier: identifier.as_ptr(), finalize: None, compare: None, hash: None, serialize: None, deserialize: None, compare_ext: None, custom_fixed_length: std::ptr::null(), } } } /// A wrapper around a Rust type that allows it /// to be written into/read from OCaml memory and managed by /// the OCaml GC. /// /// The value still lives on the Rust heap in an `Rc`'d pointer, /// and the `Rc`-pointer itself will be written to OCaml memory. /// /// # Examples /// /// Expose Rust type: /// /// ```rust /// use ocamlrep_custom::caml_serialize_default_impls; /// use ocamlrep_custom::{CamlSerialize, Custom}; /// use ocamlrep_ocamlpool::ocaml_ffi; /// use std::cell::Cell; /// /// pub struct Counter(Cell<isize>); /// /// impl CamlSerialize for Counter { /// caml_serialize_default_impls!(); /// } /// /// ocaml_ffi! { /// fn counter_new() -> Custom<Counter> { /// Custom::from(Counter(Cell::new(0))) /// } /// /// fn counter_inc(counter: Custom<Counter>) -> Custom<Counter> { /// counter.0.set(counter.0.get() - 1); /// counter /// } /// /// fn counter_read(counter: Custom<Counter>) -> isize { /// counter.0.get() /// } /// } /// ``` /// /// From OCaml: /// /// ```ocaml /// type counter; (* abstract type *) /// /// external counter_new : unit -> counter = "counter_new" /// external counter_inc: counter -> unit = "counter_inc" /// external counter_read : counter -> isize = "counter_read" /// /// let () = /// let cnt = counter_new () in (* will be dropped on GC finalization *) /// assert (counter_read cnt == 0); /// counter_inc cnt; /// assert (counter_read cnt == 1) /// ``` pub struct Custom<T: CamlSerialize>(Rc<T>); impl<T: CamlSerialize> Custom<T> { /// Create a new `ToCustom` wrapper by taking ownership of the value. pub fn from(x: T) -> Self { Self::new(Rc::new(x)) } /// Create a new `ToCustom` directly from an `Rc`'d value. pub fn new(x: Rc<T>) -> Self { Self(x) } /// Get a reference to the inner `Rc` pub fn inner(&self) -> &Rc<T> { &self.0 } } impl<T: CamlSerialize> Deref for Custom<T> { type Target = T; fn deref(&self) -> &T { self.0.deref() } } /// A custom block has two words: a pointer to the CustomOperations struct, /// and a pointer the the value. Our values are ref-counted, but an Rc pointer /// is just pointer-sized. #[repr(C)] struct CustomBlockOcamlRep<T>(&'static CustomOperations, Rc<T>); const CUSTOM_BLOCK_SIZE_IN_BYTES: usize = std::mem::size_of::<CustomBlockOcamlRep<()>>(); const CUSTOM_BLOCK_SIZE_IN_WORDS: usize = CUSTOM_BLOCK_SIZE_IN_BYTES / std::mem::size_of::<Value<'_>>(); impl<T: CamlSerialize> ToOcamlRep for Custom<T> { fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> { let ops: &'static CustomOperations = <T as CamlSerialize>::operations(); let mut block = alloc.block_with_size_and_tag(CUSTOM_BLOCK_SIZE_IN_WORDS, CUSTOM_TAG); // Safety: we don't call any method on `alloc` after this method. let block_ptr: *mut Value<'_> = unsafe { alloc.block_ptr_mut(&mut block) }; // Safety: `alloc` guarantees that the `block_ptr` returned by // `block_ptr_mut` is aligend to `align_of::<Value>()` and valid // for reads and writes of `CUSTOM_BLOCK_SIZE_IN_WORDS * // size_of::<Value>()` bytes. Since `CustomBlockOcamlRep` has size // `CUSTOM_BLOCK_SIZE_IN_WORDS * size_of::<Value>()`, its // alignment is equal to `align_of::<Value>()`, and no other // reference to our newly-allocated block can exist, it's safe for us to // interpret `block_ptr` as a `&mut CustomBlockOcamlRep`. let block_ptr = block_ptr as *mut MaybeUninit<CustomBlockOcamlRep<T>>; let custom_block = unsafe { block_ptr.as_mut().unwrap() }; // Write the address of the operations struct to the first word, and the // pointer to the value to the second word. *custom_block = MaybeUninit::new(CustomBlockOcamlRep(ops, Rc::clone(&self.0))); block.build() } } impl<T: CamlSerialize> FromOcamlRep for Custom<T> { fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> { let rc = rc_from_value::<T>(value)?; let rc = Rc::clone(rc); Ok(Custom::new(rc)) } } /// Helper function to fetch a reference to the `Rc` from the OCaml representation /// of a custom block. fn rc_from_value<'a, T: CamlSerialize>(value: Value<'a>) -> Result<&'a Rc<T>, FromError> { let block = from::expect_block(value)?; from::expect_block_tag(block, CUSTOM_TAG)?; from::expect_block_size(block, CUSTOM_BLOCK_SIZE_IN_WORDS)?; // We still don't know whether this block is in fact a // CustomBlockOcamlRep<T>--it may be a CustomBlockOcamlRep<U>, or some // other custom block which happens to be the same size. We can verify // that the block is actually a CustomBlockOcamlRep<T> by checking that // it points to the correct CustomOperations struct. let ops = <T as CamlSerialize>::operations(); if !std::ptr::eq(ops, block[0].to_bits() as *const CustomOperations) { return Err(FromError::UnexpectedCustomOps { expected: ops as *const _ as usize, actual: block[0].to_bits(), }); } let value_ptr = value.to_bits() as *const CustomBlockOcamlRep<T>; // Safety: `value_ptr` is guaranteed to be aligned to // `align_of::<Value>()`, and our use of `expect_block_size` guarantees // that the pointer is valid for reads of `CUSTOM_BLOCK_SIZE_IN_WORDS * // `size_of::<Value>()` bytes. Since the first field points to the right // operations struct, we either have a valid `CustomBlockOCamlRep<T>` // (i.e., constructed above in our `ToOcamlRep` implementation) or // someone went out of their way to construct an invalid one. Assume // it's valid and read in the `CustomBlockOcamlRep<T>`. let custom_block = unsafe { value_ptr.as_ref().unwrap() }; Ok(&custom_block.1) } /// Trait that allows OCaml serialization and deserialization. /// /// If you want to support serialization/deserialization, you /// **MUST** call `CamlSerialize::register()` when starting up /// the program. /// /// This will register your type in the OCaml runtime, allowing /// deserialization. /// /// Rust does not support different instantiations of the default /// implementation for different implementors of trait types. Therefore, /// you must implement `type_identifier`, `operations` and `register` /// manually when implementing this trait for a type. You can use /// the `caml_serialize_default_impls!()` to do that automatically: /// /// ``` /// impl CamlSerialize for MyType { /// caml_serialize_default_impls!(); /// } /// ``` pub trait CamlSerialize: Sized { /// Get the type name. fn type_identifier() -> &'static CStr; /// Get the type's custom operations struct. /// /// Always has to return the same reference! If not, the /// OCaml-to-Rust conversion will fail. /// /// The returned structure is not intended to be used by /// a programmer. Using it directly by e.g. injecting it /// into OCaml custom blocks is dangerous and can cause /// undefined behavior. Don't do it! fn operations() -> &'static CustomOperations; /// Register the type with the OCaml system. /// /// # Safety /// /// Must not be called from multiple threads. /// /// This function interacts with the OCaml runtime, which is not thread-safe. /// If any other threads are attempting to interact with the OCaml runtime /// or its custom operations table (e.g., by invoking this function, or by /// executing OCaml code using custom blocks) when this function is invoked, /// undefined behavior will result. /// /// # Examples /// /// ``` /// use ocamlrep_custom::CamlSerialize; /// use ocamlrep_ocamlpool::ocaml_ffi; /// /// struct IntBox(isize); /// /// impl CamlSerialize for IntBox { /// caml_serialize_default_impls!(); /// fn serialize(&self) -> Vec<u8> { ... } /// fn deserialize(buffer: &[u8]) -> Self { ... } /// } /// /// ocaml_ffi! { /// fn register_custom_types() { /// // Once `register_custom_types` has been invoked from OCaml, IntBox /// // can be serialized and deserialized from OCaml using the Marshal /// // module. /// // /// // Safety: this will be called from OCaml, as such nothing else will /// // be interacting with the OCaml runtime. /// unsafe { IntBox::register() }; /// } /// } /// ``` unsafe fn register(); /// Convert a value to an array of bytes. /// /// The default implementation panics. fn serialize(&self) -> Vec<u8> { panic!( "serialization not implemented for {:?}", Self::type_identifier() ) } /// Deserialize a value form an array of bytes. /// /// The default implementation panics. fn deserialize(_data: &[u8]) -> Self { panic!( "deserialization not implemented for {:?}", Self::type_identifier() ) } } #[macro_export] macro_rules! caml_serialize_default_impls { () => { fn type_identifier() -> &'static std::ffi::CStr { static ONCE: std::sync::Once = std::sync::Once::new(); static mut TYPE_NAME: Option<std::ffi::CString> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { TYPE_NAME = Some($crate::type_identifier_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { TYPE_NAME.as_ref().unwrap() } } fn operations() -> &'static $crate::CustomOperations { static ONCE: std::sync::Once = std::sync::Once::new(); static mut OPS_STRUCT: Option<$crate::CustomOperations> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { OPS_STRUCT = Some($crate::operations_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { OPS_STRUCT.as_ref().unwrap() } } unsafe fn register() { static mut IS_REGISTERED: bool = false; // Safety: Can only be called in a single-threaded context! if IS_REGISTERED { return; } IS_REGISTERED = true; let ops = Self::operations(); $crate::register_helper::<Self>(ops) } }; } /// Helper used for the `caml_serialize_default_impls` macro pub fn type_identifier_helper<T>() -> CString { let name = format!("ocamlrep.custom.{}", std::any::type_name::<T>()); std::ffi::CString::new(name).unwrap() } /// Helper used for the `caml_serialize_default_impls` macro pub fn operations_helper<T: CamlSerialize>() -> CustomOperations { let type_identifier = <T as CamlSerialize>::type_identifier(); let mut ops = CustomOperations::new(type_identifier); ops.finalize = Some(drop_value::<T>); ops.serialize = Some(serialize_value::<T>); ops.deserialize = Some(deserialize_value::<T>); ops } /// Helper used for the `caml_serialize_default_impls` macro /// /// Should not be used directly. Interacts with the OCaml runtime and is /// thus unsafe to call in a multi-threaded context. pub unsafe fn register_helper<T>(ops: &'static CustomOperations) { // Safety: operations struct has a static lifetime, it will live forever! caml_register_custom_operations(ops as *const CustomOperations); } /// Helper function used by `operations_helper`. Returns a finalizer for custom /// blocks containing an `Rc<T>`. extern "C" fn drop_value<T: CamlSerialize>(value: usize) { let _: usize = catch_unwind(|| {
// a CustomBlockOcamlRep<T> created by T::to_ocamlrep. Such a pointer // would be aligned and valid. let custom_block_ptr = value as *mut CustomBlockOcamlRep<T>; let custom_block = unsafe { custom_block_ptr.as_mut().unwrap() }; // The `Rc` will be dropped here, and its reference count will decrease // by one (possibly freeing the referenced value). // Safety: Since the OCaml runtime will only invoke the finalizer for a // value which will never again be used, it is safe to use // `drop_in_place` (i.e., our finalizer will only be invoked once, so we // won't cause a double-drop). unsafe { std::ptr::drop_in_place(&mut custom_block.1); } 0 }); } /// Helper function for serialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn serialize_value<T: CamlSerialize>( value: usize, bsize_32: *mut usize, bsize_64: *mut usize, ) { let _: usize = catch_unwind(|| { // Safety: Only called by the OCaml runtime (we don't expose a means of // invoking this function from Rust), which provides some OCaml // CUSTOM_TAG block as the value. let value = unsafe { Value::from_bits(value) }; // Only called by the OCaml runtime, when serializing // a Custom-object managed by the OCaml GC. let rc = rc_from_value::<T>(value).unwrap(); let bytes: Vec<u8> = rc.serialize(); let bytes_ptr = bytes.as_ptr(); // Safety: As above, we don't expose a means of invoking this function // from Rust--it can only be invoked by the OCaml runtime while // serializing a value. It is safe to invoke OCaml serialization // functions in this context. unsafe { let len = bytes.len(); caml_serialize_int_8(len.try_into().unwrap()); caml_serialize_block_1(bytes_ptr, len); // The size taken up in the data-part of the custom block. *bsize_32 = std::mem::size_of::<u32>(); *bsize_64 = std::mem::size_of::<u64>(); } 0 }); } /// Helper function for deserialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn deserialize_value<T: CamlSerialize>(data_ptr: *mut c_void) -> usize { catch_unwind(|| { // Get the serialized bytes from the input channel. let bytes = unsafe { // Safety: We don't expose a means of invoking this function from // Rust--`deserialize_value` can only be invoked by the OCaml // runtime while deserializing a custom block value. It is safe to // invoke OCaml deserialization functions in this context. let len: usize = caml_deserialize_sint_8().try_into().unwrap(); let mut buf: Vec<u8> = Vec::with_capacity(len); // Safety: len <= capacity. The elements aren't initialized at this // time, but we trust that caml_deserialize_block_1 will fill `len` // bytes of the buffer. #[allow(clippy::uninit_vec)] buf.set_len(len); // Safety: As above, `deserialize_value` can only be invoked by the // OCaml runtime during custom block deserialization. caml_deserialize_block_1(buf.as_mut_ptr(), len); buf }; // Actually deserialize those bytes into a T. let val: T = CamlSerialize::deserialize(&bytes); // Safety: The OCaml runtime will give us a data buffer which is // usize-aligned and valid for reads and writes of bsize_32 or bsize_64 // (as provided by `serialize_value`, above) bytes (depending on system // architecture). This is sufficient for `Rc<T>` (which has the size and // alignment of usize). let data_ptr = data_ptr as *mut MaybeUninit<Rc<T>>; let data = unsafe { data_ptr.as_mut().unwrap() }; *data = MaybeUninit::new(Rc::new(val)); // Return the size of the value we wrote to our output pointer. The // OCaml runtime will verify that it matches the expected // bsize_32/bsize_64 written by the serializer. std::mem::size_of_val(data) }) } #[cfg(test)] mod test { use std::mem::*; use super::*; #[test] fn custom_block_ocamlrep_size() { assert_eq!( size_of::<CustomBlockOcamlRep<u8>>(), 2 * size_of::<Value<'_>>() ); } #[test] fn custom_block_ocamlrep_align() { assert_eq!( align_of::<CustomBlockOcamlRep<u8>>(), align_of::<Value<'_>>() ); } }
// Safety: We trust here that CustomOperations structs containing this // `drop_value` instance will only ever be referenced by custom blocks // matching the layout of `CustomBlockOcamlRep`. If that's so, then this // function should only be invoked by the OCaml runtime on a pointer to
random_line_split
lib.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. //! Library to build `Custom_tag` OCaml values. use std::ffi::CStr; use std::ffi::CString; use std::mem::MaybeUninit; use std::ops::Deref; use std::os::raw::c_char; use std::os::raw::c_int; use std::os::raw::c_void; use std::rc::Rc; use ocamlrep::from; use ocamlrep::Allocator; use ocamlrep::FromError; use ocamlrep::FromOcamlRep; use ocamlrep::ToOcamlRep; use ocamlrep::Value; use ocamlrep::CUSTOM_TAG; use ocamlrep_ocamlpool::catch_unwind; extern "C" { fn caml_register_custom_operations(ops: *const CustomOperations); fn caml_serialize_block_1(data: *const u8, len: usize); fn caml_serialize_int_8(x: i64); fn caml_deserialize_sint_8() -> i64; fn caml_deserialize_block_1(data: *mut u8, len: usize); } /// Struct containing the operations for a custom OCaml block. /// /// This is the Rust encoding of OCaml's `struct custom_operations`. /// /// For more information on the fields see /// [the OCaml guide](https://caml.inria.fr/pub/docs/manual-ocaml/intfc.html#ss:c-custom-ops) #[repr(C)] pub struct CustomOperations { identifier: *const c_char, finalize: Option<extern "C" fn(usize) -> ()>, compare: Option<extern "C" fn(usize, usize) -> c_int>, hash: Option<extern "C" fn(usize) -> isize>, serialize: Option<extern "C" fn(usize, *mut usize, *mut usize) -> ()>, deserialize: Option<extern "C" fn(*mut c_void) -> usize>, compare_ext: Option<extern "C" fn(usize, usize) -> c_int>, /// Not implemented yet, always set to NULL. custom_fixed_length: *const c_void, } impl CustomOperations { /// Create a new custom block with the given identifier. /// /// All function pointers will be set to NULL by default. fn new(identifier: &'static CStr) -> Self { Self { identifier: identifier.as_ptr(), finalize: None, compare: None, hash: None, serialize: None, deserialize: None, compare_ext: None, custom_fixed_length: std::ptr::null(), } } } /// A wrapper around a Rust type that allows it /// to be written into/read from OCaml memory and managed by /// the OCaml GC. /// /// The value still lives on the Rust heap in an `Rc`'d pointer, /// and the `Rc`-pointer itself will be written to OCaml memory. /// /// # Examples /// /// Expose Rust type: /// /// ```rust /// use ocamlrep_custom::caml_serialize_default_impls; /// use ocamlrep_custom::{CamlSerialize, Custom}; /// use ocamlrep_ocamlpool::ocaml_ffi; /// use std::cell::Cell; /// /// pub struct Counter(Cell<isize>); /// /// impl CamlSerialize for Counter { /// caml_serialize_default_impls!(); /// } /// /// ocaml_ffi! { /// fn counter_new() -> Custom<Counter> { /// Custom::from(Counter(Cell::new(0))) /// } /// /// fn counter_inc(counter: Custom<Counter>) -> Custom<Counter> { /// counter.0.set(counter.0.get() - 1); /// counter /// } /// /// fn counter_read(counter: Custom<Counter>) -> isize { /// counter.0.get() /// } /// } /// ``` /// /// From OCaml: /// /// ```ocaml /// type counter; (* abstract type *) /// /// external counter_new : unit -> counter = "counter_new" /// external counter_inc: counter -> unit = "counter_inc" /// external counter_read : counter -> isize = "counter_read" /// /// let () = /// let cnt = counter_new () in (* will be dropped on GC finalization *) /// assert (counter_read cnt == 0); /// counter_inc cnt; /// assert (counter_read cnt == 1) /// ``` pub struct Custom<T: CamlSerialize>(Rc<T>); impl<T: CamlSerialize> Custom<T> { /// Create a new `ToCustom` wrapper by taking ownership of the value. pub fn from(x: T) -> Self { Self::new(Rc::new(x)) } /// Create a new `ToCustom` directly from an `Rc`'d value. pub fn new(x: Rc<T>) -> Self { Self(x) } /// Get a reference to the inner `Rc` pub fn inner(&self) -> &Rc<T> { &self.0 } } impl<T: CamlSerialize> Deref for Custom<T> { type Target = T; fn deref(&self) -> &T { self.0.deref() } } /// A custom block has two words: a pointer to the CustomOperations struct, /// and a pointer the the value. Our values are ref-counted, but an Rc pointer /// is just pointer-sized. #[repr(C)] struct CustomBlockOcamlRep<T>(&'static CustomOperations, Rc<T>); const CUSTOM_BLOCK_SIZE_IN_BYTES: usize = std::mem::size_of::<CustomBlockOcamlRep<()>>(); const CUSTOM_BLOCK_SIZE_IN_WORDS: usize = CUSTOM_BLOCK_SIZE_IN_BYTES / std::mem::size_of::<Value<'_>>(); impl<T: CamlSerialize> ToOcamlRep for Custom<T> { fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> { let ops: &'static CustomOperations = <T as CamlSerialize>::operations(); let mut block = alloc.block_with_size_and_tag(CUSTOM_BLOCK_SIZE_IN_WORDS, CUSTOM_TAG); // Safety: we don't call any method on `alloc` after this method. let block_ptr: *mut Value<'_> = unsafe { alloc.block_ptr_mut(&mut block) }; // Safety: `alloc` guarantees that the `block_ptr` returned by // `block_ptr_mut` is aligend to `align_of::<Value>()` and valid // for reads and writes of `CUSTOM_BLOCK_SIZE_IN_WORDS * // size_of::<Value>()` bytes. Since `CustomBlockOcamlRep` has size // `CUSTOM_BLOCK_SIZE_IN_WORDS * size_of::<Value>()`, its // alignment is equal to `align_of::<Value>()`, and no other // reference to our newly-allocated block can exist, it's safe for us to // interpret `block_ptr` as a `&mut CustomBlockOcamlRep`. let block_ptr = block_ptr as *mut MaybeUninit<CustomBlockOcamlRep<T>>; let custom_block = unsafe { block_ptr.as_mut().unwrap() }; // Write the address of the operations struct to the first word, and the // pointer to the value to the second word. *custom_block = MaybeUninit::new(CustomBlockOcamlRep(ops, Rc::clone(&self.0))); block.build() } } impl<T: CamlSerialize> FromOcamlRep for Custom<T> { fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> { let rc = rc_from_value::<T>(value)?; let rc = Rc::clone(rc); Ok(Custom::new(rc)) } } /// Helper function to fetch a reference to the `Rc` from the OCaml representation /// of a custom block. fn rc_from_value<'a, T: CamlSerialize>(value: Value<'a>) -> Result<&'a Rc<T>, FromError> { let block = from::expect_block(value)?; from::expect_block_tag(block, CUSTOM_TAG)?; from::expect_block_size(block, CUSTOM_BLOCK_SIZE_IN_WORDS)?; // We still don't know whether this block is in fact a // CustomBlockOcamlRep<T>--it may be a CustomBlockOcamlRep<U>, or some // other custom block which happens to be the same size. We can verify // that the block is actually a CustomBlockOcamlRep<T> by checking that // it points to the correct CustomOperations struct. let ops = <T as CamlSerialize>::operations(); if !std::ptr::eq(ops, block[0].to_bits() as *const CustomOperations) { return Err(FromError::UnexpectedCustomOps { expected: ops as *const _ as usize, actual: block[0].to_bits(), }); } let value_ptr = value.to_bits() as *const CustomBlockOcamlRep<T>; // Safety: `value_ptr` is guaranteed to be aligned to // `align_of::<Value>()`, and our use of `expect_block_size` guarantees // that the pointer is valid for reads of `CUSTOM_BLOCK_SIZE_IN_WORDS * // `size_of::<Value>()` bytes. Since the first field points to the right // operations struct, we either have a valid `CustomBlockOCamlRep<T>` // (i.e., constructed above in our `ToOcamlRep` implementation) or // someone went out of their way to construct an invalid one. Assume // it's valid and read in the `CustomBlockOcamlRep<T>`. let custom_block = unsafe { value_ptr.as_ref().unwrap() }; Ok(&custom_block.1) } /// Trait that allows OCaml serialization and deserialization. /// /// If you want to support serialization/deserialization, you /// **MUST** call `CamlSerialize::register()` when starting up /// the program. /// /// This will register your type in the OCaml runtime, allowing /// deserialization. /// /// Rust does not support different instantiations of the default /// implementation for different implementors of trait types. Therefore, /// you must implement `type_identifier`, `operations` and `register` /// manually when implementing this trait for a type. You can use /// the `caml_serialize_default_impls!()` to do that automatically: /// /// ``` /// impl CamlSerialize for MyType { /// caml_serialize_default_impls!(); /// } /// ``` pub trait CamlSerialize: Sized { /// Get the type name. fn type_identifier() -> &'static CStr; /// Get the type's custom operations struct. /// /// Always has to return the same reference! If not, the /// OCaml-to-Rust conversion will fail. /// /// The returned structure is not intended to be used by /// a programmer. Using it directly by e.g. injecting it /// into OCaml custom blocks is dangerous and can cause /// undefined behavior. Don't do it! fn operations() -> &'static CustomOperations; /// Register the type with the OCaml system. /// /// # Safety /// /// Must not be called from multiple threads. /// /// This function interacts with the OCaml runtime, which is not thread-safe. /// If any other threads are attempting to interact with the OCaml runtime /// or its custom operations table (e.g., by invoking this function, or by /// executing OCaml code using custom blocks) when this function is invoked, /// undefined behavior will result. /// /// # Examples /// /// ``` /// use ocamlrep_custom::CamlSerialize; /// use ocamlrep_ocamlpool::ocaml_ffi; /// /// struct IntBox(isize); /// /// impl CamlSerialize for IntBox { /// caml_serialize_default_impls!(); /// fn serialize(&self) -> Vec<u8> { ... } /// fn deserialize(buffer: &[u8]) -> Self { ... } /// } /// /// ocaml_ffi! { /// fn register_custom_types() { /// // Once `register_custom_types` has been invoked from OCaml, IntBox /// // can be serialized and deserialized from OCaml using the Marshal /// // module. /// // /// // Safety: this will be called from OCaml, as such nothing else will /// // be interacting with the OCaml runtime. /// unsafe { IntBox::register() }; /// } /// } /// ``` unsafe fn register(); /// Convert a value to an array of bytes. /// /// The default implementation panics. fn serialize(&self) -> Vec<u8> { panic!( "serialization not implemented for {:?}", Self::type_identifier() ) } /// Deserialize a value form an array of bytes. /// /// The default implementation panics. fn deserialize(_data: &[u8]) -> Self { panic!( "deserialization not implemented for {:?}", Self::type_identifier() ) } } #[macro_export] macro_rules! caml_serialize_default_impls { () => { fn type_identifier() -> &'static std::ffi::CStr { static ONCE: std::sync::Once = std::sync::Once::new(); static mut TYPE_NAME: Option<std::ffi::CString> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { TYPE_NAME = Some($crate::type_identifier_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { TYPE_NAME.as_ref().unwrap() } } fn operations() -> &'static $crate::CustomOperations { static ONCE: std::sync::Once = std::sync::Once::new(); static mut OPS_STRUCT: Option<$crate::CustomOperations> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { OPS_STRUCT = Some($crate::operations_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { OPS_STRUCT.as_ref().unwrap() } } unsafe fn register() { static mut IS_REGISTERED: bool = false; // Safety: Can only be called in a single-threaded context! if IS_REGISTERED { return; } IS_REGISTERED = true; let ops = Self::operations(); $crate::register_helper::<Self>(ops) } }; } /// Helper used for the `caml_serialize_default_impls` macro pub fn type_identifier_helper<T>() -> CString { let name = format!("ocamlrep.custom.{}", std::any::type_name::<T>()); std::ffi::CString::new(name).unwrap() } /// Helper used for the `caml_serialize_default_impls` macro pub fn operations_helper<T: CamlSerialize>() -> CustomOperations { let type_identifier = <T as CamlSerialize>::type_identifier(); let mut ops = CustomOperations::new(type_identifier); ops.finalize = Some(drop_value::<T>); ops.serialize = Some(serialize_value::<T>); ops.deserialize = Some(deserialize_value::<T>); ops } /// Helper used for the `caml_serialize_default_impls` macro /// /// Should not be used directly. Interacts with the OCaml runtime and is /// thus unsafe to call in a multi-threaded context. pub unsafe fn register_helper<T>(ops: &'static CustomOperations) { // Safety: operations struct has a static lifetime, it will live forever! caml_register_custom_operations(ops as *const CustomOperations); } /// Helper function used by `operations_helper`. Returns a finalizer for custom /// blocks containing an `Rc<T>`. extern "C" fn
<T: CamlSerialize>(value: usize) { let _: usize = catch_unwind(|| { // Safety: We trust here that CustomOperations structs containing this // `drop_value` instance will only ever be referenced by custom blocks // matching the layout of `CustomBlockOcamlRep`. If that's so, then this // function should only be invoked by the OCaml runtime on a pointer to // a CustomBlockOcamlRep<T> created by T::to_ocamlrep. Such a pointer // would be aligned and valid. let custom_block_ptr = value as *mut CustomBlockOcamlRep<T>; let custom_block = unsafe { custom_block_ptr.as_mut().unwrap() }; // The `Rc` will be dropped here, and its reference count will decrease // by one (possibly freeing the referenced value). // Safety: Since the OCaml runtime will only invoke the finalizer for a // value which will never again be used, it is safe to use // `drop_in_place` (i.e., our finalizer will only be invoked once, so we // won't cause a double-drop). unsafe { std::ptr::drop_in_place(&mut custom_block.1); } 0 }); } /// Helper function for serialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn serialize_value<T: CamlSerialize>( value: usize, bsize_32: *mut usize, bsize_64: *mut usize, ) { let _: usize = catch_unwind(|| { // Safety: Only called by the OCaml runtime (we don't expose a means of // invoking this function from Rust), which provides some OCaml // CUSTOM_TAG block as the value. let value = unsafe { Value::from_bits(value) }; // Only called by the OCaml runtime, when serializing // a Custom-object managed by the OCaml GC. let rc = rc_from_value::<T>(value).unwrap(); let bytes: Vec<u8> = rc.serialize(); let bytes_ptr = bytes.as_ptr(); // Safety: As above, we don't expose a means of invoking this function // from Rust--it can only be invoked by the OCaml runtime while // serializing a value. It is safe to invoke OCaml serialization // functions in this context. unsafe { let len = bytes.len(); caml_serialize_int_8(len.try_into().unwrap()); caml_serialize_block_1(bytes_ptr, len); // The size taken up in the data-part of the custom block. *bsize_32 = std::mem::size_of::<u32>(); *bsize_64 = std::mem::size_of::<u64>(); } 0 }); } /// Helper function for deserialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn deserialize_value<T: CamlSerialize>(data_ptr: *mut c_void) -> usize { catch_unwind(|| { // Get the serialized bytes from the input channel. let bytes = unsafe { // Safety: We don't expose a means of invoking this function from // Rust--`deserialize_value` can only be invoked by the OCaml // runtime while deserializing a custom block value. It is safe to // invoke OCaml deserialization functions in this context. let len: usize = caml_deserialize_sint_8().try_into().unwrap(); let mut buf: Vec<u8> = Vec::with_capacity(len); // Safety: len <= capacity. The elements aren't initialized at this // time, but we trust that caml_deserialize_block_1 will fill `len` // bytes of the buffer. #[allow(clippy::uninit_vec)] buf.set_len(len); // Safety: As above, `deserialize_value` can only be invoked by the // OCaml runtime during custom block deserialization. caml_deserialize_block_1(buf.as_mut_ptr(), len); buf }; // Actually deserialize those bytes into a T. let val: T = CamlSerialize::deserialize(&bytes); // Safety: The OCaml runtime will give us a data buffer which is // usize-aligned and valid for reads and writes of bsize_32 or bsize_64 // (as provided by `serialize_value`, above) bytes (depending on system // architecture). This is sufficient for `Rc<T>` (which has the size and // alignment of usize). let data_ptr = data_ptr as *mut MaybeUninit<Rc<T>>; let data = unsafe { data_ptr.as_mut().unwrap() }; *data = MaybeUninit::new(Rc::new(val)); // Return the size of the value we wrote to our output pointer. The // OCaml runtime will verify that it matches the expected // bsize_32/bsize_64 written by the serializer. std::mem::size_of_val(data) }) } #[cfg(test)] mod test { use std::mem::*; use super::*; #[test] fn custom_block_ocamlrep_size() { assert_eq!( size_of::<CustomBlockOcamlRep<u8>>(), 2 * size_of::<Value<'_>>() ); } #[test] fn custom_block_ocamlrep_align() { assert_eq!( align_of::<CustomBlockOcamlRep<u8>>(), align_of::<Value<'_>>() ); } }
drop_value
identifier_name
lib.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. //! Library to build `Custom_tag` OCaml values. use std::ffi::CStr; use std::ffi::CString; use std::mem::MaybeUninit; use std::ops::Deref; use std::os::raw::c_char; use std::os::raw::c_int; use std::os::raw::c_void; use std::rc::Rc; use ocamlrep::from; use ocamlrep::Allocator; use ocamlrep::FromError; use ocamlrep::FromOcamlRep; use ocamlrep::ToOcamlRep; use ocamlrep::Value; use ocamlrep::CUSTOM_TAG; use ocamlrep_ocamlpool::catch_unwind; extern "C" { fn caml_register_custom_operations(ops: *const CustomOperations); fn caml_serialize_block_1(data: *const u8, len: usize); fn caml_serialize_int_8(x: i64); fn caml_deserialize_sint_8() -> i64; fn caml_deserialize_block_1(data: *mut u8, len: usize); } /// Struct containing the operations for a custom OCaml block. /// /// This is the Rust encoding of OCaml's `struct custom_operations`. /// /// For more information on the fields see /// [the OCaml guide](https://caml.inria.fr/pub/docs/manual-ocaml/intfc.html#ss:c-custom-ops) #[repr(C)] pub struct CustomOperations { identifier: *const c_char, finalize: Option<extern "C" fn(usize) -> ()>, compare: Option<extern "C" fn(usize, usize) -> c_int>, hash: Option<extern "C" fn(usize) -> isize>, serialize: Option<extern "C" fn(usize, *mut usize, *mut usize) -> ()>, deserialize: Option<extern "C" fn(*mut c_void) -> usize>, compare_ext: Option<extern "C" fn(usize, usize) -> c_int>, /// Not implemented yet, always set to NULL. custom_fixed_length: *const c_void, } impl CustomOperations { /// Create a new custom block with the given identifier. /// /// All function pointers will be set to NULL by default. fn new(identifier: &'static CStr) -> Self { Self { identifier: identifier.as_ptr(), finalize: None, compare: None, hash: None, serialize: None, deserialize: None, compare_ext: None, custom_fixed_length: std::ptr::null(), } } } /// A wrapper around a Rust type that allows it /// to be written into/read from OCaml memory and managed by /// the OCaml GC. /// /// The value still lives on the Rust heap in an `Rc`'d pointer, /// and the `Rc`-pointer itself will be written to OCaml memory. /// /// # Examples /// /// Expose Rust type: /// /// ```rust /// use ocamlrep_custom::caml_serialize_default_impls; /// use ocamlrep_custom::{CamlSerialize, Custom}; /// use ocamlrep_ocamlpool::ocaml_ffi; /// use std::cell::Cell; /// /// pub struct Counter(Cell<isize>); /// /// impl CamlSerialize for Counter { /// caml_serialize_default_impls!(); /// } /// /// ocaml_ffi! { /// fn counter_new() -> Custom<Counter> { /// Custom::from(Counter(Cell::new(0))) /// } /// /// fn counter_inc(counter: Custom<Counter>) -> Custom<Counter> { /// counter.0.set(counter.0.get() - 1); /// counter /// } /// /// fn counter_read(counter: Custom<Counter>) -> isize { /// counter.0.get() /// } /// } /// ``` /// /// From OCaml: /// /// ```ocaml /// type counter; (* abstract type *) /// /// external counter_new : unit -> counter = "counter_new" /// external counter_inc: counter -> unit = "counter_inc" /// external counter_read : counter -> isize = "counter_read" /// /// let () = /// let cnt = counter_new () in (* will be dropped on GC finalization *) /// assert (counter_read cnt == 0); /// counter_inc cnt; /// assert (counter_read cnt == 1) /// ``` pub struct Custom<T: CamlSerialize>(Rc<T>); impl<T: CamlSerialize> Custom<T> { /// Create a new `ToCustom` wrapper by taking ownership of the value. pub fn from(x: T) -> Self { Self::new(Rc::new(x)) } /// Create a new `ToCustom` directly from an `Rc`'d value. pub fn new(x: Rc<T>) -> Self { Self(x) } /// Get a reference to the inner `Rc` pub fn inner(&self) -> &Rc<T> { &self.0 } } impl<T: CamlSerialize> Deref for Custom<T> { type Target = T; fn deref(&self) -> &T { self.0.deref() } } /// A custom block has two words: a pointer to the CustomOperations struct, /// and a pointer the the value. Our values are ref-counted, but an Rc pointer /// is just pointer-sized. #[repr(C)] struct CustomBlockOcamlRep<T>(&'static CustomOperations, Rc<T>); const CUSTOM_BLOCK_SIZE_IN_BYTES: usize = std::mem::size_of::<CustomBlockOcamlRep<()>>(); const CUSTOM_BLOCK_SIZE_IN_WORDS: usize = CUSTOM_BLOCK_SIZE_IN_BYTES / std::mem::size_of::<Value<'_>>(); impl<T: CamlSerialize> ToOcamlRep for Custom<T> { fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> { let ops: &'static CustomOperations = <T as CamlSerialize>::operations(); let mut block = alloc.block_with_size_and_tag(CUSTOM_BLOCK_SIZE_IN_WORDS, CUSTOM_TAG); // Safety: we don't call any method on `alloc` after this method. let block_ptr: *mut Value<'_> = unsafe { alloc.block_ptr_mut(&mut block) }; // Safety: `alloc` guarantees that the `block_ptr` returned by // `block_ptr_mut` is aligend to `align_of::<Value>()` and valid // for reads and writes of `CUSTOM_BLOCK_SIZE_IN_WORDS * // size_of::<Value>()` bytes. Since `CustomBlockOcamlRep` has size // `CUSTOM_BLOCK_SIZE_IN_WORDS * size_of::<Value>()`, its // alignment is equal to `align_of::<Value>()`, and no other // reference to our newly-allocated block can exist, it's safe for us to // interpret `block_ptr` as a `&mut CustomBlockOcamlRep`. let block_ptr = block_ptr as *mut MaybeUninit<CustomBlockOcamlRep<T>>; let custom_block = unsafe { block_ptr.as_mut().unwrap() }; // Write the address of the operations struct to the first word, and the // pointer to the value to the second word. *custom_block = MaybeUninit::new(CustomBlockOcamlRep(ops, Rc::clone(&self.0))); block.build() } } impl<T: CamlSerialize> FromOcamlRep for Custom<T> { fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> { let rc = rc_from_value::<T>(value)?; let rc = Rc::clone(rc); Ok(Custom::new(rc)) } } /// Helper function to fetch a reference to the `Rc` from the OCaml representation /// of a custom block. fn rc_from_value<'a, T: CamlSerialize>(value: Value<'a>) -> Result<&'a Rc<T>, FromError> { let block = from::expect_block(value)?; from::expect_block_tag(block, CUSTOM_TAG)?; from::expect_block_size(block, CUSTOM_BLOCK_SIZE_IN_WORDS)?; // We still don't know whether this block is in fact a // CustomBlockOcamlRep<T>--it may be a CustomBlockOcamlRep<U>, or some // other custom block which happens to be the same size. We can verify // that the block is actually a CustomBlockOcamlRep<T> by checking that // it points to the correct CustomOperations struct. let ops = <T as CamlSerialize>::operations(); if !std::ptr::eq(ops, block[0].to_bits() as *const CustomOperations) { return Err(FromError::UnexpectedCustomOps { expected: ops as *const _ as usize, actual: block[0].to_bits(), }); } let value_ptr = value.to_bits() as *const CustomBlockOcamlRep<T>; // Safety: `value_ptr` is guaranteed to be aligned to // `align_of::<Value>()`, and our use of `expect_block_size` guarantees // that the pointer is valid for reads of `CUSTOM_BLOCK_SIZE_IN_WORDS * // `size_of::<Value>()` bytes. Since the first field points to the right // operations struct, we either have a valid `CustomBlockOCamlRep<T>` // (i.e., constructed above in our `ToOcamlRep` implementation) or // someone went out of their way to construct an invalid one. Assume // it's valid and read in the `CustomBlockOcamlRep<T>`. let custom_block = unsafe { value_ptr.as_ref().unwrap() }; Ok(&custom_block.1) } /// Trait that allows OCaml serialization and deserialization. /// /// If you want to support serialization/deserialization, you /// **MUST** call `CamlSerialize::register()` when starting up /// the program. /// /// This will register your type in the OCaml runtime, allowing /// deserialization. /// /// Rust does not support different instantiations of the default /// implementation for different implementors of trait types. Therefore, /// you must implement `type_identifier`, `operations` and `register` /// manually when implementing this trait for a type. You can use /// the `caml_serialize_default_impls!()` to do that automatically: /// /// ``` /// impl CamlSerialize for MyType { /// caml_serialize_default_impls!(); /// } /// ``` pub trait CamlSerialize: Sized { /// Get the type name. fn type_identifier() -> &'static CStr; /// Get the type's custom operations struct. /// /// Always has to return the same reference! If not, the /// OCaml-to-Rust conversion will fail. /// /// The returned structure is not intended to be used by /// a programmer. Using it directly by e.g. injecting it /// into OCaml custom blocks is dangerous and can cause /// undefined behavior. Don't do it! fn operations() -> &'static CustomOperations; /// Register the type with the OCaml system. /// /// # Safety /// /// Must not be called from multiple threads. /// /// This function interacts with the OCaml runtime, which is not thread-safe. /// If any other threads are attempting to interact with the OCaml runtime /// or its custom operations table (e.g., by invoking this function, or by /// executing OCaml code using custom blocks) when this function is invoked, /// undefined behavior will result. /// /// # Examples /// /// ``` /// use ocamlrep_custom::CamlSerialize; /// use ocamlrep_ocamlpool::ocaml_ffi; /// /// struct IntBox(isize); /// /// impl CamlSerialize for IntBox { /// caml_serialize_default_impls!(); /// fn serialize(&self) -> Vec<u8> { ... } /// fn deserialize(buffer: &[u8]) -> Self { ... } /// } /// /// ocaml_ffi! { /// fn register_custom_types() { /// // Once `register_custom_types` has been invoked from OCaml, IntBox /// // can be serialized and deserialized from OCaml using the Marshal /// // module. /// // /// // Safety: this will be called from OCaml, as such nothing else will /// // be interacting with the OCaml runtime. /// unsafe { IntBox::register() }; /// } /// } /// ``` unsafe fn register(); /// Convert a value to an array of bytes. /// /// The default implementation panics. fn serialize(&self) -> Vec<u8> { panic!( "serialization not implemented for {:?}", Self::type_identifier() ) } /// Deserialize a value form an array of bytes. /// /// The default implementation panics. fn deserialize(_data: &[u8]) -> Self { panic!( "deserialization not implemented for {:?}", Self::type_identifier() ) } } #[macro_export] macro_rules! caml_serialize_default_impls { () => { fn type_identifier() -> &'static std::ffi::CStr { static ONCE: std::sync::Once = std::sync::Once::new(); static mut TYPE_NAME: Option<std::ffi::CString> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { TYPE_NAME = Some($crate::type_identifier_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { TYPE_NAME.as_ref().unwrap() } } fn operations() -> &'static $crate::CustomOperations { static ONCE: std::sync::Once = std::sync::Once::new(); static mut OPS_STRUCT: Option<$crate::CustomOperations> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { OPS_STRUCT = Some($crate::operations_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { OPS_STRUCT.as_ref().unwrap() } } unsafe fn register() { static mut IS_REGISTERED: bool = false; // Safety: Can only be called in a single-threaded context! if IS_REGISTERED { return; } IS_REGISTERED = true; let ops = Self::operations(); $crate::register_helper::<Self>(ops) } }; } /// Helper used for the `caml_serialize_default_impls` macro pub fn type_identifier_helper<T>() -> CString { let name = format!("ocamlrep.custom.{}", std::any::type_name::<T>()); std::ffi::CString::new(name).unwrap() } /// Helper used for the `caml_serialize_default_impls` macro pub fn operations_helper<T: CamlSerialize>() -> CustomOperations { let type_identifier = <T as CamlSerialize>::type_identifier(); let mut ops = CustomOperations::new(type_identifier); ops.finalize = Some(drop_value::<T>); ops.serialize = Some(serialize_value::<T>); ops.deserialize = Some(deserialize_value::<T>); ops } /// Helper used for the `caml_serialize_default_impls` macro /// /// Should not be used directly. Interacts with the OCaml runtime and is /// thus unsafe to call in a multi-threaded context. pub unsafe fn register_helper<T>(ops: &'static CustomOperations) { // Safety: operations struct has a static lifetime, it will live forever! caml_register_custom_operations(ops as *const CustomOperations); } /// Helper function used by `operations_helper`. Returns a finalizer for custom /// blocks containing an `Rc<T>`. extern "C" fn drop_value<T: CamlSerialize>(value: usize) { let _: usize = catch_unwind(|| { // Safety: We trust here that CustomOperations structs containing this // `drop_value` instance will only ever be referenced by custom blocks // matching the layout of `CustomBlockOcamlRep`. If that's so, then this // function should only be invoked by the OCaml runtime on a pointer to // a CustomBlockOcamlRep<T> created by T::to_ocamlrep. Such a pointer // would be aligned and valid. let custom_block_ptr = value as *mut CustomBlockOcamlRep<T>; let custom_block = unsafe { custom_block_ptr.as_mut().unwrap() }; // The `Rc` will be dropped here, and its reference count will decrease // by one (possibly freeing the referenced value). // Safety: Since the OCaml runtime will only invoke the finalizer for a // value which will never again be used, it is safe to use // `drop_in_place` (i.e., our finalizer will only be invoked once, so we // won't cause a double-drop). unsafe { std::ptr::drop_in_place(&mut custom_block.1); } 0 }); } /// Helper function for serialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn serialize_value<T: CamlSerialize>( value: usize, bsize_32: *mut usize, bsize_64: *mut usize, ) { let _: usize = catch_unwind(|| { // Safety: Only called by the OCaml runtime (we don't expose a means of // invoking this function from Rust), which provides some OCaml // CUSTOM_TAG block as the value. let value = unsafe { Value::from_bits(value) }; // Only called by the OCaml runtime, when serializing // a Custom-object managed by the OCaml GC. let rc = rc_from_value::<T>(value).unwrap(); let bytes: Vec<u8> = rc.serialize(); let bytes_ptr = bytes.as_ptr(); // Safety: As above, we don't expose a means of invoking this function // from Rust--it can only be invoked by the OCaml runtime while // serializing a value. It is safe to invoke OCaml serialization // functions in this context. unsafe { let len = bytes.len(); caml_serialize_int_8(len.try_into().unwrap()); caml_serialize_block_1(bytes_ptr, len); // The size taken up in the data-part of the custom block. *bsize_32 = std::mem::size_of::<u32>(); *bsize_64 = std::mem::size_of::<u64>(); } 0 }); } /// Helper function for deserialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn deserialize_value<T: CamlSerialize>(data_ptr: *mut c_void) -> usize { catch_unwind(|| { // Get the serialized bytes from the input channel. let bytes = unsafe { // Safety: We don't expose a means of invoking this function from // Rust--`deserialize_value` can only be invoked by the OCaml // runtime while deserializing a custom block value. It is safe to // invoke OCaml deserialization functions in this context. let len: usize = caml_deserialize_sint_8().try_into().unwrap(); let mut buf: Vec<u8> = Vec::with_capacity(len); // Safety: len <= capacity. The elements aren't initialized at this // time, but we trust that caml_deserialize_block_1 will fill `len` // bytes of the buffer. #[allow(clippy::uninit_vec)] buf.set_len(len); // Safety: As above, `deserialize_value` can only be invoked by the // OCaml runtime during custom block deserialization. caml_deserialize_block_1(buf.as_mut_ptr(), len); buf }; // Actually deserialize those bytes into a T. let val: T = CamlSerialize::deserialize(&bytes); // Safety: The OCaml runtime will give us a data buffer which is // usize-aligned and valid for reads and writes of bsize_32 or bsize_64 // (as provided by `serialize_value`, above) bytes (depending on system // architecture). This is sufficient for `Rc<T>` (which has the size and // alignment of usize). let data_ptr = data_ptr as *mut MaybeUninit<Rc<T>>; let data = unsafe { data_ptr.as_mut().unwrap() }; *data = MaybeUninit::new(Rc::new(val)); // Return the size of the value we wrote to our output pointer. The // OCaml runtime will verify that it matches the expected // bsize_32/bsize_64 written by the serializer. std::mem::size_of_val(data) }) } #[cfg(test)] mod test { use std::mem::*; use super::*; #[test] fn custom_block_ocamlrep_size() { assert_eq!( size_of::<CustomBlockOcamlRep<u8>>(), 2 * size_of::<Value<'_>>() ); } #[test] fn custom_block_ocamlrep_align()
}
{ assert_eq!( align_of::<CustomBlockOcamlRep<u8>>(), align_of::<Value<'_>>() ); }
identifier_body
lib.rs
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. //! Library to build `Custom_tag` OCaml values. use std::ffi::CStr; use std::ffi::CString; use std::mem::MaybeUninit; use std::ops::Deref; use std::os::raw::c_char; use std::os::raw::c_int; use std::os::raw::c_void; use std::rc::Rc; use ocamlrep::from; use ocamlrep::Allocator; use ocamlrep::FromError; use ocamlrep::FromOcamlRep; use ocamlrep::ToOcamlRep; use ocamlrep::Value; use ocamlrep::CUSTOM_TAG; use ocamlrep_ocamlpool::catch_unwind; extern "C" { fn caml_register_custom_operations(ops: *const CustomOperations); fn caml_serialize_block_1(data: *const u8, len: usize); fn caml_serialize_int_8(x: i64); fn caml_deserialize_sint_8() -> i64; fn caml_deserialize_block_1(data: *mut u8, len: usize); } /// Struct containing the operations for a custom OCaml block. /// /// This is the Rust encoding of OCaml's `struct custom_operations`. /// /// For more information on the fields see /// [the OCaml guide](https://caml.inria.fr/pub/docs/manual-ocaml/intfc.html#ss:c-custom-ops) #[repr(C)] pub struct CustomOperations { identifier: *const c_char, finalize: Option<extern "C" fn(usize) -> ()>, compare: Option<extern "C" fn(usize, usize) -> c_int>, hash: Option<extern "C" fn(usize) -> isize>, serialize: Option<extern "C" fn(usize, *mut usize, *mut usize) -> ()>, deserialize: Option<extern "C" fn(*mut c_void) -> usize>, compare_ext: Option<extern "C" fn(usize, usize) -> c_int>, /// Not implemented yet, always set to NULL. custom_fixed_length: *const c_void, } impl CustomOperations { /// Create a new custom block with the given identifier. /// /// All function pointers will be set to NULL by default. fn new(identifier: &'static CStr) -> Self { Self { identifier: identifier.as_ptr(), finalize: None, compare: None, hash: None, serialize: None, deserialize: None, compare_ext: None, custom_fixed_length: std::ptr::null(), } } } /// A wrapper around a Rust type that allows it /// to be written into/read from OCaml memory and managed by /// the OCaml GC. /// /// The value still lives on the Rust heap in an `Rc`'d pointer, /// and the `Rc`-pointer itself will be written to OCaml memory. /// /// # Examples /// /// Expose Rust type: /// /// ```rust /// use ocamlrep_custom::caml_serialize_default_impls; /// use ocamlrep_custom::{CamlSerialize, Custom}; /// use ocamlrep_ocamlpool::ocaml_ffi; /// use std::cell::Cell; /// /// pub struct Counter(Cell<isize>); /// /// impl CamlSerialize for Counter { /// caml_serialize_default_impls!(); /// } /// /// ocaml_ffi! { /// fn counter_new() -> Custom<Counter> { /// Custom::from(Counter(Cell::new(0))) /// } /// /// fn counter_inc(counter: Custom<Counter>) -> Custom<Counter> { /// counter.0.set(counter.0.get() - 1); /// counter /// } /// /// fn counter_read(counter: Custom<Counter>) -> isize { /// counter.0.get() /// } /// } /// ``` /// /// From OCaml: /// /// ```ocaml /// type counter; (* abstract type *) /// /// external counter_new : unit -> counter = "counter_new" /// external counter_inc: counter -> unit = "counter_inc" /// external counter_read : counter -> isize = "counter_read" /// /// let () = /// let cnt = counter_new () in (* will be dropped on GC finalization *) /// assert (counter_read cnt == 0); /// counter_inc cnt; /// assert (counter_read cnt == 1) /// ``` pub struct Custom<T: CamlSerialize>(Rc<T>); impl<T: CamlSerialize> Custom<T> { /// Create a new `ToCustom` wrapper by taking ownership of the value. pub fn from(x: T) -> Self { Self::new(Rc::new(x)) } /// Create a new `ToCustom` directly from an `Rc`'d value. pub fn new(x: Rc<T>) -> Self { Self(x) } /// Get a reference to the inner `Rc` pub fn inner(&self) -> &Rc<T> { &self.0 } } impl<T: CamlSerialize> Deref for Custom<T> { type Target = T; fn deref(&self) -> &T { self.0.deref() } } /// A custom block has two words: a pointer to the CustomOperations struct, /// and a pointer the the value. Our values are ref-counted, but an Rc pointer /// is just pointer-sized. #[repr(C)] struct CustomBlockOcamlRep<T>(&'static CustomOperations, Rc<T>); const CUSTOM_BLOCK_SIZE_IN_BYTES: usize = std::mem::size_of::<CustomBlockOcamlRep<()>>(); const CUSTOM_BLOCK_SIZE_IN_WORDS: usize = CUSTOM_BLOCK_SIZE_IN_BYTES / std::mem::size_of::<Value<'_>>(); impl<T: CamlSerialize> ToOcamlRep for Custom<T> { fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> { let ops: &'static CustomOperations = <T as CamlSerialize>::operations(); let mut block = alloc.block_with_size_and_tag(CUSTOM_BLOCK_SIZE_IN_WORDS, CUSTOM_TAG); // Safety: we don't call any method on `alloc` after this method. let block_ptr: *mut Value<'_> = unsafe { alloc.block_ptr_mut(&mut block) }; // Safety: `alloc` guarantees that the `block_ptr` returned by // `block_ptr_mut` is aligend to `align_of::<Value>()` and valid // for reads and writes of `CUSTOM_BLOCK_SIZE_IN_WORDS * // size_of::<Value>()` bytes. Since `CustomBlockOcamlRep` has size // `CUSTOM_BLOCK_SIZE_IN_WORDS * size_of::<Value>()`, its // alignment is equal to `align_of::<Value>()`, and no other // reference to our newly-allocated block can exist, it's safe for us to // interpret `block_ptr` as a `&mut CustomBlockOcamlRep`. let block_ptr = block_ptr as *mut MaybeUninit<CustomBlockOcamlRep<T>>; let custom_block = unsafe { block_ptr.as_mut().unwrap() }; // Write the address of the operations struct to the first word, and the // pointer to the value to the second word. *custom_block = MaybeUninit::new(CustomBlockOcamlRep(ops, Rc::clone(&self.0))); block.build() } } impl<T: CamlSerialize> FromOcamlRep for Custom<T> { fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> { let rc = rc_from_value::<T>(value)?; let rc = Rc::clone(rc); Ok(Custom::new(rc)) } } /// Helper function to fetch a reference to the `Rc` from the OCaml representation /// of a custom block. fn rc_from_value<'a, T: CamlSerialize>(value: Value<'a>) -> Result<&'a Rc<T>, FromError> { let block = from::expect_block(value)?; from::expect_block_tag(block, CUSTOM_TAG)?; from::expect_block_size(block, CUSTOM_BLOCK_SIZE_IN_WORDS)?; // We still don't know whether this block is in fact a // CustomBlockOcamlRep<T>--it may be a CustomBlockOcamlRep<U>, or some // other custom block which happens to be the same size. We can verify // that the block is actually a CustomBlockOcamlRep<T> by checking that // it points to the correct CustomOperations struct. let ops = <T as CamlSerialize>::operations(); if !std::ptr::eq(ops, block[0].to_bits() as *const CustomOperations)
let value_ptr = value.to_bits() as *const CustomBlockOcamlRep<T>; // Safety: `value_ptr` is guaranteed to be aligned to // `align_of::<Value>()`, and our use of `expect_block_size` guarantees // that the pointer is valid for reads of `CUSTOM_BLOCK_SIZE_IN_WORDS * // `size_of::<Value>()` bytes. Since the first field points to the right // operations struct, we either have a valid `CustomBlockOCamlRep<T>` // (i.e., constructed above in our `ToOcamlRep` implementation) or // someone went out of their way to construct an invalid one. Assume // it's valid and read in the `CustomBlockOcamlRep<T>`. let custom_block = unsafe { value_ptr.as_ref().unwrap() }; Ok(&custom_block.1) } /// Trait that allows OCaml serialization and deserialization. /// /// If you want to support serialization/deserialization, you /// **MUST** call `CamlSerialize::register()` when starting up /// the program. /// /// This will register your type in the OCaml runtime, allowing /// deserialization. /// /// Rust does not support different instantiations of the default /// implementation for different implementors of trait types. Therefore, /// you must implement `type_identifier`, `operations` and `register` /// manually when implementing this trait for a type. You can use /// the `caml_serialize_default_impls!()` to do that automatically: /// /// ``` /// impl CamlSerialize for MyType { /// caml_serialize_default_impls!(); /// } /// ``` pub trait CamlSerialize: Sized { /// Get the type name. fn type_identifier() -> &'static CStr; /// Get the type's custom operations struct. /// /// Always has to return the same reference! If not, the /// OCaml-to-Rust conversion will fail. /// /// The returned structure is not intended to be used by /// a programmer. Using it directly by e.g. injecting it /// into OCaml custom blocks is dangerous and can cause /// undefined behavior. Don't do it! fn operations() -> &'static CustomOperations; /// Register the type with the OCaml system. /// /// # Safety /// /// Must not be called from multiple threads. /// /// This function interacts with the OCaml runtime, which is not thread-safe. /// If any other threads are attempting to interact with the OCaml runtime /// or its custom operations table (e.g., by invoking this function, or by /// executing OCaml code using custom blocks) when this function is invoked, /// undefined behavior will result. /// /// # Examples /// /// ``` /// use ocamlrep_custom::CamlSerialize; /// use ocamlrep_ocamlpool::ocaml_ffi; /// /// struct IntBox(isize); /// /// impl CamlSerialize for IntBox { /// caml_serialize_default_impls!(); /// fn serialize(&self) -> Vec<u8> { ... } /// fn deserialize(buffer: &[u8]) -> Self { ... } /// } /// /// ocaml_ffi! { /// fn register_custom_types() { /// // Once `register_custom_types` has been invoked from OCaml, IntBox /// // can be serialized and deserialized from OCaml using the Marshal /// // module. /// // /// // Safety: this will be called from OCaml, as such nothing else will /// // be interacting with the OCaml runtime. /// unsafe { IntBox::register() }; /// } /// } /// ``` unsafe fn register(); /// Convert a value to an array of bytes. /// /// The default implementation panics. fn serialize(&self) -> Vec<u8> { panic!( "serialization not implemented for {:?}", Self::type_identifier() ) } /// Deserialize a value form an array of bytes. /// /// The default implementation panics. fn deserialize(_data: &[u8]) -> Self { panic!( "deserialization not implemented for {:?}", Self::type_identifier() ) } } #[macro_export] macro_rules! caml_serialize_default_impls { () => { fn type_identifier() -> &'static std::ffi::CStr { static ONCE: std::sync::Once = std::sync::Once::new(); static mut TYPE_NAME: Option<std::ffi::CString> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { TYPE_NAME = Some($crate::type_identifier_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { TYPE_NAME.as_ref().unwrap() } } fn operations() -> &'static $crate::CustomOperations { static ONCE: std::sync::Once = std::sync::Once::new(); static mut OPS_STRUCT: Option<$crate::CustomOperations> = None; ONCE.call_once(|| { // Safety: // - We've gated initialization, so it's thread safe. // - We only set the constant once. unsafe { OPS_STRUCT = Some($crate::operations_helper::<Self>()); } }); // Safety: // - By now the constant has been initialized, and once initialized // it is never changes. // - Concurrent reads are OK. unsafe { OPS_STRUCT.as_ref().unwrap() } } unsafe fn register() { static mut IS_REGISTERED: bool = false; // Safety: Can only be called in a single-threaded context! if IS_REGISTERED { return; } IS_REGISTERED = true; let ops = Self::operations(); $crate::register_helper::<Self>(ops) } }; } /// Helper used for the `caml_serialize_default_impls` macro pub fn type_identifier_helper<T>() -> CString { let name = format!("ocamlrep.custom.{}", std::any::type_name::<T>()); std::ffi::CString::new(name).unwrap() } /// Helper used for the `caml_serialize_default_impls` macro pub fn operations_helper<T: CamlSerialize>() -> CustomOperations { let type_identifier = <T as CamlSerialize>::type_identifier(); let mut ops = CustomOperations::new(type_identifier); ops.finalize = Some(drop_value::<T>); ops.serialize = Some(serialize_value::<T>); ops.deserialize = Some(deserialize_value::<T>); ops } /// Helper used for the `caml_serialize_default_impls` macro /// /// Should not be used directly. Interacts with the OCaml runtime and is /// thus unsafe to call in a multi-threaded context. pub unsafe fn register_helper<T>(ops: &'static CustomOperations) { // Safety: operations struct has a static lifetime, it will live forever! caml_register_custom_operations(ops as *const CustomOperations); } /// Helper function used by `operations_helper`. Returns a finalizer for custom /// blocks containing an `Rc<T>`. extern "C" fn drop_value<T: CamlSerialize>(value: usize) { let _: usize = catch_unwind(|| { // Safety: We trust here that CustomOperations structs containing this // `drop_value` instance will only ever be referenced by custom blocks // matching the layout of `CustomBlockOcamlRep`. If that's so, then this // function should only be invoked by the OCaml runtime on a pointer to // a CustomBlockOcamlRep<T> created by T::to_ocamlrep. Such a pointer // would be aligned and valid. let custom_block_ptr = value as *mut CustomBlockOcamlRep<T>; let custom_block = unsafe { custom_block_ptr.as_mut().unwrap() }; // The `Rc` will be dropped here, and its reference count will decrease // by one (possibly freeing the referenced value). // Safety: Since the OCaml runtime will only invoke the finalizer for a // value which will never again be used, it is safe to use // `drop_in_place` (i.e., our finalizer will only be invoked once, so we // won't cause a double-drop). unsafe { std::ptr::drop_in_place(&mut custom_block.1); } 0 }); } /// Helper function for serialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn serialize_value<T: CamlSerialize>( value: usize, bsize_32: *mut usize, bsize_64: *mut usize, ) { let _: usize = catch_unwind(|| { // Safety: Only called by the OCaml runtime (we don't expose a means of // invoking this function from Rust), which provides some OCaml // CUSTOM_TAG block as the value. let value = unsafe { Value::from_bits(value) }; // Only called by the OCaml runtime, when serializing // a Custom-object managed by the OCaml GC. let rc = rc_from_value::<T>(value).unwrap(); let bytes: Vec<u8> = rc.serialize(); let bytes_ptr = bytes.as_ptr(); // Safety: As above, we don't expose a means of invoking this function // from Rust--it can only be invoked by the OCaml runtime while // serializing a value. It is safe to invoke OCaml serialization // functions in this context. unsafe { let len = bytes.len(); caml_serialize_int_8(len.try_into().unwrap()); caml_serialize_block_1(bytes_ptr, len); // The size taken up in the data-part of the custom block. *bsize_32 = std::mem::size_of::<u32>(); *bsize_64 = std::mem::size_of::<u64>(); } 0 }); } /// Helper function for deserialization. Interacts with the OCaml runtime, so must /// only be invoked by the OCaml runtime when serializing a custom block. extern "C" fn deserialize_value<T: CamlSerialize>(data_ptr: *mut c_void) -> usize { catch_unwind(|| { // Get the serialized bytes from the input channel. let bytes = unsafe { // Safety: We don't expose a means of invoking this function from // Rust--`deserialize_value` can only be invoked by the OCaml // runtime while deserializing a custom block value. It is safe to // invoke OCaml deserialization functions in this context. let len: usize = caml_deserialize_sint_8().try_into().unwrap(); let mut buf: Vec<u8> = Vec::with_capacity(len); // Safety: len <= capacity. The elements aren't initialized at this // time, but we trust that caml_deserialize_block_1 will fill `len` // bytes of the buffer. #[allow(clippy::uninit_vec)] buf.set_len(len); // Safety: As above, `deserialize_value` can only be invoked by the // OCaml runtime during custom block deserialization. caml_deserialize_block_1(buf.as_mut_ptr(), len); buf }; // Actually deserialize those bytes into a T. let val: T = CamlSerialize::deserialize(&bytes); // Safety: The OCaml runtime will give us a data buffer which is // usize-aligned and valid for reads and writes of bsize_32 or bsize_64 // (as provided by `serialize_value`, above) bytes (depending on system // architecture). This is sufficient for `Rc<T>` (which has the size and // alignment of usize). let data_ptr = data_ptr as *mut MaybeUninit<Rc<T>>; let data = unsafe { data_ptr.as_mut().unwrap() }; *data = MaybeUninit::new(Rc::new(val)); // Return the size of the value we wrote to our output pointer. The // OCaml runtime will verify that it matches the expected // bsize_32/bsize_64 written by the serializer. std::mem::size_of_val(data) }) } #[cfg(test)] mod test { use std::mem::*; use super::*; #[test] fn custom_block_ocamlrep_size() { assert_eq!( size_of::<CustomBlockOcamlRep<u8>>(), 2 * size_of::<Value<'_>>() ); } #[test] fn custom_block_ocamlrep_align() { assert_eq!( align_of::<CustomBlockOcamlRep<u8>>(), align_of::<Value<'_>>() ); } }
{ return Err(FromError::UnexpectedCustomOps { expected: ops as *const _ as usize, actual: block[0].to_bits(), }); }
conditional_block
run.py
''' CodeOfWar COSC 370 AI Battlecode Project Jennifer Mince, Carly Good, Matt Manoly, Zachary Taylor Code for Inspiration: https://github.com/AnPelec/Battlecode-2018/blob/master/Project%20Achilles/run.py ''' import battlecode as bc import random import sys import traceback import time from datetime import datetime import os print(os.getcwd()) print("pystarting") # A GameController is the main type that you talk to the game with. # Its constructor will connect to a running game. gc = bc.GameController() directions = list(bc.Direction) #get our team from API my_team = gc.team() #these dictionaries set up the priorities for each unit to interact with priority_rangers = { bc.UnitType.Worker : 3, bc.UnitType.Knight : 2, bc.UnitType.Healer : 1, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 1, bc.UnitType.Factory : 4, bc.UnitType.Rocket : 4, } priority_healers = { bc.UnitType.Worker : 4, bc.UnitType.Knight : 3, bc.UnitType.Healer : 2, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 2 } #a directions dictionary used to approach approach_dir = { (0,1) : bc.Direction.North, (1,1) : bc.Direction.Northeast, (1,0) : bc.Direction.East, (1,-1) : bc.Direction.Southeast, (0,-1) : bc.Direction.South, (-1,-1) : bc.Direction.Southwest, (-1,0) : bc.Direction.West, (-1,1) : bc.Direction.Northwest, } #sets the my_team and enemy_team variables to know who to attack or help enemy_team = bc.Team.Red if my_team == bc.Team.Red: enemy_team = bc.Team.Blue #find the start map and original units at start of game start_map = gc.starting_map(bc.Planet.Earth) init_units = start_map.initial_units for i in range(init_units.__len__()): if init_units.__getitem__(i).team == enemy_team: enemy_spawn = init_units.__getitem__(i).location.map_location() #flag for sending units into battle, flipped when an army has begun amassing release_units = False #flag for sending units to the rockets for escape escape = False fight = False print("pystarted") random.seed(datetime.now()) #Research order gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Mage) #method to move any unit def move(unit): #API returns any possible moves in list form possible_directions = list(bc.Direction) choices = [] #find only the moves that are valid moves for direct in possible_directions: if gc.can_move(unit.id, direct): choices.append(direct) #if not choices: # gc.disintegrate_unit(unit.id) # return if choices: dir = random.choice(choices) #if unit can move and is ready to move, randomly move them to a new position if gc.is_move_ready(unit.id) and gc.can_move(unit.id, dir): gc.move_robot(unit.id, dir) #Try to approach a given target destination. (Note: NOT unit) def approach(unit, location, destination): global approach_dir #Find the difference in unit position and reduce it to a simple coordinate pair #for use with the approach_dir dictionary. x_diff = destination.x - location.x y_diff = destination.y - location.y x_move = x_diff y_move = y_diff #if there is an x_diff/y_diff, reduce it to a movement in one direction. if x_diff != 0: x_move = x_diff/abs(x_diff) if y_diff != 0: y_move = y_diff/abs(y_diff) #if there is no moves to make, exit. if (x_move,y_move) == (0,0): return #if we can move in an optimal direction, move that direction. dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if cant move in optimal direction, try moving in a similar direction if x_move == 0: x_move = random.choice([-1,1]) elif y_move == 0: y_move = random.choice([-1,1]) else: if x_diff > y_diff: y_move = 0 else: x_move = 0 dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if nothing else works, move randomly move(unit) #logic for worker units def workerWork(worker): global num_workers, total_number_factories, escape, full_vision, fight #if there is a worker deficit and we have the resources to replicate, #find a valid direction to do so. if num_workers < 7 and gc.karbonite() >= 60: for dir in directions: if gc.can_replicate(worker.id, dir): gc.replicate(worker.id, dir) return #once an action is performed, that worker is done nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: fight = True full_vision.extend(nearby) #build on any existing nearby blueprints, or repair damaged structures nearby = gc.sense_nearby_units(worker.location.map_location(), 2) for other in nearby: if gc.can_build(worker.id, other.id): gc.build(worker.id, other.id) return elif other.health < other.max_health and gc.can_repair(worker.id, other.id): gc.repair(worker.id, other.id) return #build factories until game reaches round 150, then focus on making units if gc.karbonite() > bc.UnitType.Factory.blueprint_cost() and gc.round() < 150: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Factory, dir): gc.blueprint(worker.id, bc.UnitType.Factory, dir) return if gc.karbonite() > bc.UnitType.Rocket.blueprint_cost() and gc.round() > 550: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Rocket, dir): gc.blueprint(worker.id, bc.UnitType.Rocket, dir) return #find a direction to harvest for dir in directions: if gc.can_harvest(worker.id, dir): gc.harvest(worker.id, dir) return #if this part of the code is reached, then the only thing left to do is move move(worker) #factoryProduce takes a factory and first to ungarrison any available units #then attempts to produce a ratio of a 4 rangers to 1 healer def factoryProduce(factory): global num_healers, num_rangers, release_units, fight garrison = unit.structure_garrison() if num_rangers + num_healers > 15 or fight: release_units = True #If a unit is garrisoned, release them in an available spot. if len(garrison) > 0 and release_units: for dir in directions: if gc.can_unload(factory.id, dir): gc.unload(factory.id, dir) if gc.round() > 650: return #If the factory is available to produce another unit. If we have enough #healers, produce rangers. if gc.can_produce_robot(factory.id, bc.UnitType.Ranger): if num_rangers < num_healers * 4: gc.produce_robot(factory.id, bc.UnitType.Ranger) else: gc.produce_robot(factory.id, bc.UnitType.Healer) return #Healer_heal finds units near the healer and attempts to heal them def Healer_heal(unit): global enemy_spawn, my_team, full_vision location = unit.location #find nearby units on team nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.attack_range(), my_team) #if can heal, heal heal = False if gc.is_heal_ready(unit.id): lowest_health = unit for other in nearby: if other.health < lowest_health.health and other.health < other.max_health: lowest_health = other heal = True if gc.can_heal(unit.id, lowest_health.id) and heal: gc.heal(unit.id, lowest_health.id) return #if no heal targets, walk towards the action if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) #Healer_overcharge finds a nearby unit and restores their ability charge. def Healer_overcharge(unit): global my_team #if we can't overcharge, exit if not gc.is_overcharge_ready(unit.id): return #cannot overcharge if not at research level 3 if bc.ResearchInfo().get_level(bc.UnitType.Healer) < 3: return #find our location location = unit.location #get all possible targets around, and choose one to heal possible_targets = sense_nearby_units_by_team(location.map_location(), unit.ability_range(), my_team) for other in possible_targets: if gc.can_heal(unit.id, other.id): gc.heal(unit.id, other.id) return #Mars Info Finding and Rocket variables marsMap = gc.starting_map(bc.Planet.Mars) marsHeight = marsMap.height marsWidth = marsMap.width #add to this variable as rockets are built safe_locations = [] #method to find a safe location on Mars to land using known Mars info from the API def
(): global safe_locations component_num = 0 for i in range(marsHeight): for j in range(marsWidth): if (i, j) not in safe_locations: temp_loc = bc.MapLocation(bc.Planet.Mars, i, j) try: if marsMap.is_passable_terrain_at(temp_loc): safe_locations.append((i, j)) #this stores the locations that are safe to use later component_num += 1 except Exception as e: print(i, j) print('Error:', e) #traceback.print_exc() #now choose a safe location to launch to per rocket def findRocketLand(rocket): global safe_locations #not sure what range to use temp_range= 5 for t in range(temp_range): return_value = random.choice(safe_locations) #calls locations from above method if (t < temp_range -1): continue return bc.MapLocation(bc.Planet.Mars, return_value[0], return_value[1]) #returns the map location to land on #method to launch the rocket def launch(unit): garrison = unit.structure_garrison() free_loc = findRocketLand(unit) if gc.can_launch_rocket(unit.id, free_loc): #if can launch, launch gc.launch_rocket(unit.id, free_loc) #method to unload and garrison the rocket once built def unloadRocket(rocket): garrison = unit.structure_garrison() if len(garrison) > 0: for d in directions: if gc.can_unload(unit.id, d): gc.unload(unit.id, d) find_locations_Mars() #method to move the units towards the rockets def moveUnitToRocket(unit,nearby): if not gc.is_move_ready(unit.id): return #if ready to move #get a location of the unit location = unit.location.map_location() #use directions from above best = directions[0] #set a distance closest_distance = 100000 #for each of nearby for x in nearby: if gc.can_load(x.id, unit.id): gc.load(x.id,unit.id) return next_location = x.location.map_location() #now the distance is from that location to the next one found current_distance = location.distance_squared_to(next_location) #if closer than the set closest distance, go there if current_distance < closest_distance: closest_distance = current_distance best = location.direction_to(next_location) #moving the units based off current location and if they can move range_index = 8 for i in range(8): if directions[i] == best: range_index = i break for i in range(4): temp_index = (range_index + i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return temp_index = (range_index - i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return #rangerAttack takes a unit and who is nearby to attempt an attack. def rangerAttack(unit, nearby): global priority_rangers best_target = 0 targets = [] #list of targets from least valuable to most #we find the best unit to attack from the priority_rangers dictionary #and attempt to attack the best unit. for enemy in nearby: #if enemy is too close, back away if gc.is_move_ready(unit.id): x_diff = unit.location.map_location().x - enemy.location.map_location().x y_diff = unit.location.map_location().y - enemy.location.map_location().y #backing away is done by reversing location and destination in approach function if (x_diff * x_diff) + (y_diff * y_diff) < 20: approach(unit,enemy.location.map_location(),unit.location.map_location()) if priority_rangers[enemy.unit_type] > best_target: best_target = priority_rangers[enemy.unit_type] targets.append(enemy) #if we can attack, and something is nearby to attack, do so. if gc.is_attack_ready(unit.id): for i in range(len(targets)-1,-1,-1): if gc.can_attack(unit.id, targets[i].id): gc.attack(unit.id, targets[i].id) return if gc.is_move_ready(unit.id): approach(unit,unit.location.map_location(),targets[-1].location.map_location()) #rangerLogic handles movement when no enemies are nearby, and attack orders. def rangerLogic(unit): global enemy_spawn, enemy_team, escape, full_vision #Make sure only rangers get ranger orders. if unit.unit_type != bc.UnitType.Ranger: return location = unit.location #if its time to escape, try to run to a rocket if escape and unit.location.map_location().planet == bc.Planet.Earth: nearby = gc.sense_nearby_units_by_type(location.map_location(), unit.vision_range, bc.UnitType.Rocket) if nearby: moveUnitToRocket(unit,nearby) return #sense enemies that are nearby, and then attack them nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: full_vision.extend(nearby) rangerAttack(unit, nearby) #if no one is nearby then approach the enemy, if no enemies are seen by anyone, approach enemy spawn if not nearby and gc.is_move_ready(unit.id): if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) while True: # We only support Python 3, which means brackets around print() print('pyround:', gc.round(), 'time left:', gc.get_time_left_ms(), 'ms') # count how much of each unit we have at the beginning of each turn num_workers = 0 num_knights = 0 num_healers = 0 num_rangers = 0 num_mages = 0 total_number_factories = 0 total_number_rockets = 0 for unit in gc.my_units(): if unit.unit_type == bc.UnitType.Worker: num_workers += 1 if unit.unit_type == bc.UnitType.Knight: num_knights += 1 if unit.unit_type == bc.UnitType.Healer: num_healers += 1 if unit.unit_type == bc.UnitType.Ranger: num_rangers += 1 if unit.unit_type == bc.UnitType.Mage: num_mages += 1 if unit.unit_type == bc.UnitType.Factory: total_number_factories += 1 if unit.unit_type == bc.UnitType.Rocket: total_number_rockets += 1 # shared unit vision full_vision = [] try: # walk through our units: for unit in gc.my_units(): location = unit.location if unit.unit_type == bc.UnitType.Rocket: escape = True if unit.location.map_location().planet == bc.Planet.Mars: unloadRocket(unit) elif len(unit.structure_garrison()) >= 8 or gc.round() >= 748 or unit.health < unit.max_health: launch(unit) elif unit.unit_type == bc.UnitType.Factory: factoryProduce(unit) elif unit.unit_type == bc.UnitType.Worker: workerWork(unit) elif unit.unit_type == bc.UnitType.Healer: if location.is_on_map(): Healer_heal(unit) elif unit.unit_type == bc.UnitType.Ranger: if location.is_on_map(): rangerLogic(unit) #when we want to move to rockets call is #moveUnitToRocket(unit) #want to make sure it is right time in the game and we have enough units to fill the rockets #launch(unit id of rocket to launch) # if current_unit.unit_type == bc.UnitType.Rocket: #unload_rocket(current_unit) except Exception as e: print('Error:', e) # use this to show where the error was traceback.print_exc() # send the actions we've performed, and wait for our next turn. gc.next_turn() # these lines are not strictly necessary, but it helps make the logs make more sense. # it forces everything we've written this turn to be written to the manager. sys.stdout.flush() sys.stderr.flush()
find_locations_Mars
identifier_name
run.py
''' CodeOfWar COSC 370 AI Battlecode Project Jennifer Mince, Carly Good, Matt Manoly, Zachary Taylor Code for Inspiration: https://github.com/AnPelec/Battlecode-2018/blob/master/Project%20Achilles/run.py ''' import battlecode as bc import random import sys import traceback import time from datetime import datetime import os print(os.getcwd()) print("pystarting") # A GameController is the main type that you talk to the game with. # Its constructor will connect to a running game. gc = bc.GameController() directions = list(bc.Direction) #get our team from API my_team = gc.team() #these dictionaries set up the priorities for each unit to interact with priority_rangers = { bc.UnitType.Worker : 3, bc.UnitType.Knight : 2, bc.UnitType.Healer : 1, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 1, bc.UnitType.Factory : 4, bc.UnitType.Rocket : 4, } priority_healers = { bc.UnitType.Worker : 4, bc.UnitType.Knight : 3, bc.UnitType.Healer : 2, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 2 } #a directions dictionary used to approach approach_dir = { (0,1) : bc.Direction.North, (1,1) : bc.Direction.Northeast, (1,0) : bc.Direction.East, (1,-1) : bc.Direction.Southeast, (0,-1) : bc.Direction.South, (-1,-1) : bc.Direction.Southwest, (-1,0) : bc.Direction.West, (-1,1) : bc.Direction.Northwest, } #sets the my_team and enemy_team variables to know who to attack or help enemy_team = bc.Team.Red if my_team == bc.Team.Red: enemy_team = bc.Team.Blue #find the start map and original units at start of game start_map = gc.starting_map(bc.Planet.Earth) init_units = start_map.initial_units for i in range(init_units.__len__()): if init_units.__getitem__(i).team == enemy_team: enemy_spawn = init_units.__getitem__(i).location.map_location() #flag for sending units into battle, flipped when an army has begun amassing release_units = False #flag for sending units to the rockets for escape escape = False fight = False print("pystarted") random.seed(datetime.now()) #Research order gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Mage) #method to move any unit def move(unit): #API returns any possible moves in list form possible_directions = list(bc.Direction) choices = [] #find only the moves that are valid moves for direct in possible_directions: if gc.can_move(unit.id, direct): choices.append(direct) #if not choices: # gc.disintegrate_unit(unit.id) # return if choices: dir = random.choice(choices) #if unit can move and is ready to move, randomly move them to a new position if gc.is_move_ready(unit.id) and gc.can_move(unit.id, dir): gc.move_robot(unit.id, dir) #Try to approach a given target destination. (Note: NOT unit) def approach(unit, location, destination): global approach_dir #Find the difference in unit position and reduce it to a simple coordinate pair #for use with the approach_dir dictionary. x_diff = destination.x - location.x y_diff = destination.y - location.y x_move = x_diff y_move = y_diff #if there is an x_diff/y_diff, reduce it to a movement in one direction. if x_diff != 0: x_move = x_diff/abs(x_diff) if y_diff != 0: y_move = y_diff/abs(y_diff) #if there is no moves to make, exit. if (x_move,y_move) == (0,0): return #if we can move in an optimal direction, move that direction. dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if cant move in optimal direction, try moving in a similar direction if x_move == 0: x_move = random.choice([-1,1]) elif y_move == 0: y_move = random.choice([-1,1]) else: if x_diff > y_diff: y_move = 0 else: x_move = 0 dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if nothing else works, move randomly move(unit) #logic for worker units def workerWork(worker): global num_workers, total_number_factories, escape, full_vision, fight #if there is a worker deficit and we have the resources to replicate, #find a valid direction to do so. if num_workers < 7 and gc.karbonite() >= 60: for dir in directions: if gc.can_replicate(worker.id, dir): gc.replicate(worker.id, dir) return #once an action is performed, that worker is done nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: fight = True full_vision.extend(nearby) #build on any existing nearby blueprints, or repair damaged structures nearby = gc.sense_nearby_units(worker.location.map_location(), 2) for other in nearby: if gc.can_build(worker.id, other.id): gc.build(worker.id, other.id) return elif other.health < other.max_health and gc.can_repair(worker.id, other.id): gc.repair(worker.id, other.id) return #build factories until game reaches round 150, then focus on making units if gc.karbonite() > bc.UnitType.Factory.blueprint_cost() and gc.round() < 150: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Factory, dir): gc.blueprint(worker.id, bc.UnitType.Factory, dir) return if gc.karbonite() > bc.UnitType.Rocket.blueprint_cost() and gc.round() > 550: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Rocket, dir): gc.blueprint(worker.id, bc.UnitType.Rocket, dir) return #find a direction to harvest for dir in directions: if gc.can_harvest(worker.id, dir): gc.harvest(worker.id, dir) return #if this part of the code is reached, then the only thing left to do is move move(worker) #factoryProduce takes a factory and first to ungarrison any available units #then attempts to produce a ratio of a 4 rangers to 1 healer def factoryProduce(factory):
#Healer_heal finds units near the healer and attempts to heal them def Healer_heal(unit): global enemy_spawn, my_team, full_vision location = unit.location #find nearby units on team nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.attack_range(), my_team) #if can heal, heal heal = False if gc.is_heal_ready(unit.id): lowest_health = unit for other in nearby: if other.health < lowest_health.health and other.health < other.max_health: lowest_health = other heal = True if gc.can_heal(unit.id, lowest_health.id) and heal: gc.heal(unit.id, lowest_health.id) return #if no heal targets, walk towards the action if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) #Healer_overcharge finds a nearby unit and restores their ability charge. def Healer_overcharge(unit): global my_team #if we can't overcharge, exit if not gc.is_overcharge_ready(unit.id): return #cannot overcharge if not at research level 3 if bc.ResearchInfo().get_level(bc.UnitType.Healer) < 3: return #find our location location = unit.location #get all possible targets around, and choose one to heal possible_targets = sense_nearby_units_by_team(location.map_location(), unit.ability_range(), my_team) for other in possible_targets: if gc.can_heal(unit.id, other.id): gc.heal(unit.id, other.id) return #Mars Info Finding and Rocket variables marsMap = gc.starting_map(bc.Planet.Mars) marsHeight = marsMap.height marsWidth = marsMap.width #add to this variable as rockets are built safe_locations = [] #method to find a safe location on Mars to land using known Mars info from the API def find_locations_Mars(): global safe_locations component_num = 0 for i in range(marsHeight): for j in range(marsWidth): if (i, j) not in safe_locations: temp_loc = bc.MapLocation(bc.Planet.Mars, i, j) try: if marsMap.is_passable_terrain_at(temp_loc): safe_locations.append((i, j)) #this stores the locations that are safe to use later component_num += 1 except Exception as e: print(i, j) print('Error:', e) #traceback.print_exc() #now choose a safe location to launch to per rocket def findRocketLand(rocket): global safe_locations #not sure what range to use temp_range= 5 for t in range(temp_range): return_value = random.choice(safe_locations) #calls locations from above method if (t < temp_range -1): continue return bc.MapLocation(bc.Planet.Mars, return_value[0], return_value[1]) #returns the map location to land on #method to launch the rocket def launch(unit): garrison = unit.structure_garrison() free_loc = findRocketLand(unit) if gc.can_launch_rocket(unit.id, free_loc): #if can launch, launch gc.launch_rocket(unit.id, free_loc) #method to unload and garrison the rocket once built def unloadRocket(rocket): garrison = unit.structure_garrison() if len(garrison) > 0: for d in directions: if gc.can_unload(unit.id, d): gc.unload(unit.id, d) find_locations_Mars() #method to move the units towards the rockets def moveUnitToRocket(unit,nearby): if not gc.is_move_ready(unit.id): return #if ready to move #get a location of the unit location = unit.location.map_location() #use directions from above best = directions[0] #set a distance closest_distance = 100000 #for each of nearby for x in nearby: if gc.can_load(x.id, unit.id): gc.load(x.id,unit.id) return next_location = x.location.map_location() #now the distance is from that location to the next one found current_distance = location.distance_squared_to(next_location) #if closer than the set closest distance, go there if current_distance < closest_distance: closest_distance = current_distance best = location.direction_to(next_location) #moving the units based off current location and if they can move range_index = 8 for i in range(8): if directions[i] == best: range_index = i break for i in range(4): temp_index = (range_index + i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return temp_index = (range_index - i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return #rangerAttack takes a unit and who is nearby to attempt an attack. def rangerAttack(unit, nearby): global priority_rangers best_target = 0 targets = [] #list of targets from least valuable to most #we find the best unit to attack from the priority_rangers dictionary #and attempt to attack the best unit. for enemy in nearby: #if enemy is too close, back away if gc.is_move_ready(unit.id): x_diff = unit.location.map_location().x - enemy.location.map_location().x y_diff = unit.location.map_location().y - enemy.location.map_location().y #backing away is done by reversing location and destination in approach function if (x_diff * x_diff) + (y_diff * y_diff) < 20: approach(unit,enemy.location.map_location(),unit.location.map_location()) if priority_rangers[enemy.unit_type] > best_target: best_target = priority_rangers[enemy.unit_type] targets.append(enemy) #if we can attack, and something is nearby to attack, do so. if gc.is_attack_ready(unit.id): for i in range(len(targets)-1,-1,-1): if gc.can_attack(unit.id, targets[i].id): gc.attack(unit.id, targets[i].id) return if gc.is_move_ready(unit.id): approach(unit,unit.location.map_location(),targets[-1].location.map_location()) #rangerLogic handles movement when no enemies are nearby, and attack orders. def rangerLogic(unit): global enemy_spawn, enemy_team, escape, full_vision #Make sure only rangers get ranger orders. if unit.unit_type != bc.UnitType.Ranger: return location = unit.location #if its time to escape, try to run to a rocket if escape and unit.location.map_location().planet == bc.Planet.Earth: nearby = gc.sense_nearby_units_by_type(location.map_location(), unit.vision_range, bc.UnitType.Rocket) if nearby: moveUnitToRocket(unit,nearby) return #sense enemies that are nearby, and then attack them nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: full_vision.extend(nearby) rangerAttack(unit, nearby) #if no one is nearby then approach the enemy, if no enemies are seen by anyone, approach enemy spawn if not nearby and gc.is_move_ready(unit.id): if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) while True: # We only support Python 3, which means brackets around print() print('pyround:', gc.round(), 'time left:', gc.get_time_left_ms(), 'ms') # count how much of each unit we have at the beginning of each turn num_workers = 0 num_knights = 0 num_healers = 0 num_rangers = 0 num_mages = 0 total_number_factories = 0 total_number_rockets = 0 for unit in gc.my_units(): if unit.unit_type == bc.UnitType.Worker: num_workers += 1 if unit.unit_type == bc.UnitType.Knight: num_knights += 1 if unit.unit_type == bc.UnitType.Healer: num_healers += 1 if unit.unit_type == bc.UnitType.Ranger: num_rangers += 1 if unit.unit_type == bc.UnitType.Mage: num_mages += 1 if unit.unit_type == bc.UnitType.Factory: total_number_factories += 1 if unit.unit_type == bc.UnitType.Rocket: total_number_rockets += 1 # shared unit vision full_vision = [] try: # walk through our units: for unit in gc.my_units(): location = unit.location if unit.unit_type == bc.UnitType.Rocket: escape = True if unit.location.map_location().planet == bc.Planet.Mars: unloadRocket(unit) elif len(unit.structure_garrison()) >= 8 or gc.round() >= 748 or unit.health < unit.max_health: launch(unit) elif unit.unit_type == bc.UnitType.Factory: factoryProduce(unit) elif unit.unit_type == bc.UnitType.Worker: workerWork(unit) elif unit.unit_type == bc.UnitType.Healer: if location.is_on_map(): Healer_heal(unit) elif unit.unit_type == bc.UnitType.Ranger: if location.is_on_map(): rangerLogic(unit) #when we want to move to rockets call is #moveUnitToRocket(unit) #want to make sure it is right time in the game and we have enough units to fill the rockets #launch(unit id of rocket to launch) # if current_unit.unit_type == bc.UnitType.Rocket: #unload_rocket(current_unit) except Exception as e: print('Error:', e) # use this to show where the error was traceback.print_exc() # send the actions we've performed, and wait for our next turn. gc.next_turn() # these lines are not strictly necessary, but it helps make the logs make more sense. # it forces everything we've written this turn to be written to the manager. sys.stdout.flush() sys.stderr.flush()
global num_healers, num_rangers, release_units, fight garrison = unit.structure_garrison() if num_rangers + num_healers > 15 or fight: release_units = True #If a unit is garrisoned, release them in an available spot. if len(garrison) > 0 and release_units: for dir in directions: if gc.can_unload(factory.id, dir): gc.unload(factory.id, dir) if gc.round() > 650: return #If the factory is available to produce another unit. If we have enough #healers, produce rangers. if gc.can_produce_robot(factory.id, bc.UnitType.Ranger): if num_rangers < num_healers * 4: gc.produce_robot(factory.id, bc.UnitType.Ranger) else: gc.produce_robot(factory.id, bc.UnitType.Healer) return
identifier_body
run.py
''' CodeOfWar COSC 370 AI Battlecode Project Jennifer Mince, Carly Good, Matt Manoly, Zachary Taylor Code for Inspiration: https://github.com/AnPelec/Battlecode-2018/blob/master/Project%20Achilles/run.py ''' import battlecode as bc import random import sys import traceback import time from datetime import datetime import os print(os.getcwd()) print("pystarting") # A GameController is the main type that you talk to the game with. # Its constructor will connect to a running game. gc = bc.GameController() directions = list(bc.Direction) #get our team from API my_team = gc.team() #these dictionaries set up the priorities for each unit to interact with priority_rangers = { bc.UnitType.Worker : 3, bc.UnitType.Knight : 2, bc.UnitType.Healer : 1, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 1, bc.UnitType.Factory : 4, bc.UnitType.Rocket : 4, } priority_healers = { bc.UnitType.Worker : 4, bc.UnitType.Knight : 3, bc.UnitType.Healer : 2, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 2 } #a directions dictionary used to approach approach_dir = { (0,1) : bc.Direction.North, (1,1) : bc.Direction.Northeast, (1,0) : bc.Direction.East, (1,-1) : bc.Direction.Southeast, (0,-1) : bc.Direction.South, (-1,-1) : bc.Direction.Southwest, (-1,0) : bc.Direction.West, (-1,1) : bc.Direction.Northwest, } #sets the my_team and enemy_team variables to know who to attack or help enemy_team = bc.Team.Red if my_team == bc.Team.Red: enemy_team = bc.Team.Blue #find the start map and original units at start of game start_map = gc.starting_map(bc.Planet.Earth) init_units = start_map.initial_units for i in range(init_units.__len__()): if init_units.__getitem__(i).team == enemy_team: enemy_spawn = init_units.__getitem__(i).location.map_location() #flag for sending units into battle, flipped when an army has begun amassing release_units = False #flag for sending units to the rockets for escape escape = False fight = False print("pystarted") random.seed(datetime.now()) #Research order gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Mage) #method to move any unit def move(unit): #API returns any possible moves in list form possible_directions = list(bc.Direction) choices = [] #find only the moves that are valid moves for direct in possible_directions: if gc.can_move(unit.id, direct): choices.append(direct) #if not choices: # gc.disintegrate_unit(unit.id) # return if choices: dir = random.choice(choices) #if unit can move and is ready to move, randomly move them to a new position if gc.is_move_ready(unit.id) and gc.can_move(unit.id, dir): gc.move_robot(unit.id, dir) #Try to approach a given target destination. (Note: NOT unit) def approach(unit, location, destination): global approach_dir #Find the difference in unit position and reduce it to a simple coordinate pair #for use with the approach_dir dictionary. x_diff = destination.x - location.x y_diff = destination.y - location.y x_move = x_diff y_move = y_diff #if there is an x_diff/y_diff, reduce it to a movement in one direction. if x_diff != 0: x_move = x_diff/abs(x_diff) if y_diff != 0: y_move = y_diff/abs(y_diff) #if there is no moves to make, exit. if (x_move,y_move) == (0,0): return #if we can move in an optimal direction, move that direction. dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if cant move in optimal direction, try moving in a similar direction if x_move == 0: x_move = random.choice([-1,1]) elif y_move == 0: y_move = random.choice([-1,1]) else: if x_diff > y_diff: y_move = 0 else: x_move = 0 dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if nothing else works, move randomly move(unit) #logic for worker units def workerWork(worker): global num_workers, total_number_factories, escape, full_vision, fight #if there is a worker deficit and we have the resources to replicate, #find a valid direction to do so. if num_workers < 7 and gc.karbonite() >= 60: for dir in directions: if gc.can_replicate(worker.id, dir): gc.replicate(worker.id, dir) return #once an action is performed, that worker is done nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: fight = True full_vision.extend(nearby) #build on any existing nearby blueprints, or repair damaged structures nearby = gc.sense_nearby_units(worker.location.map_location(), 2) for other in nearby: if gc.can_build(worker.id, other.id): gc.build(worker.id, other.id) return elif other.health < other.max_health and gc.can_repair(worker.id, other.id): gc.repair(worker.id, other.id) return #build factories until game reaches round 150, then focus on making units if gc.karbonite() > bc.UnitType.Factory.blueprint_cost() and gc.round() < 150: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Factory, dir): gc.blueprint(worker.id, bc.UnitType.Factory, dir) return if gc.karbonite() > bc.UnitType.Rocket.blueprint_cost() and gc.round() > 550: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Rocket, dir): gc.blueprint(worker.id, bc.UnitType.Rocket, dir) return #find a direction to harvest for dir in directions: if gc.can_harvest(worker.id, dir): gc.harvest(worker.id, dir) return #if this part of the code is reached, then the only thing left to do is move move(worker) #factoryProduce takes a factory and first to ungarrison any available units #then attempts to produce a ratio of a 4 rangers to 1 healer def factoryProduce(factory): global num_healers, num_rangers, release_units, fight garrison = unit.structure_garrison() if num_rangers + num_healers > 15 or fight: release_units = True #If a unit is garrisoned, release them in an available spot. if len(garrison) > 0 and release_units: for dir in directions: if gc.can_unload(factory.id, dir): gc.unload(factory.id, dir) if gc.round() > 650: return #If the factory is available to produce another unit. If we have enough #healers, produce rangers. if gc.can_produce_robot(factory.id, bc.UnitType.Ranger): if num_rangers < num_healers * 4: gc.produce_robot(factory.id, bc.UnitType.Ranger) else: gc.produce_robot(factory.id, bc.UnitType.Healer) return #Healer_heal finds units near the healer and attempts to heal them def Healer_heal(unit): global enemy_spawn, my_team, full_vision location = unit.location #find nearby units on team nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.attack_range(), my_team) #if can heal, heal heal = False if gc.is_heal_ready(unit.id): lowest_health = unit for other in nearby: if other.health < lowest_health.health and other.health < other.max_health: lowest_health = other heal = True if gc.can_heal(unit.id, lowest_health.id) and heal: gc.heal(unit.id, lowest_health.id) return #if no heal targets, walk towards the action if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) #Healer_overcharge finds a nearby unit and restores their ability charge. def Healer_overcharge(unit): global my_team #if we can't overcharge, exit if not gc.is_overcharge_ready(unit.id): return #cannot overcharge if not at research level 3 if bc.ResearchInfo().get_level(bc.UnitType.Healer) < 3: return #find our location location = unit.location #get all possible targets around, and choose one to heal possible_targets = sense_nearby_units_by_team(location.map_location(), unit.ability_range(), my_team) for other in possible_targets: if gc.can_heal(unit.id, other.id): gc.heal(unit.id, other.id) return #Mars Info Finding and Rocket variables marsMap = gc.starting_map(bc.Planet.Mars) marsHeight = marsMap.height marsWidth = marsMap.width #add to this variable as rockets are built safe_locations = [] #method to find a safe location on Mars to land using known Mars info from the API def find_locations_Mars(): global safe_locations component_num = 0 for i in range(marsHeight): for j in range(marsWidth): if (i, j) not in safe_locations: temp_loc = bc.MapLocation(bc.Planet.Mars, i, j) try: if marsMap.is_passable_terrain_at(temp_loc): safe_locations.append((i, j)) #this stores the locations that are safe to use later component_num += 1 except Exception as e: print(i, j) print('Error:', e) #traceback.print_exc() #now choose a safe location to launch to per rocket def findRocketLand(rocket): global safe_locations #not sure what range to use temp_range= 5 for t in range(temp_range): return_value = random.choice(safe_locations) #calls locations from above method if (t < temp_range -1): continue return bc.MapLocation(bc.Planet.Mars, return_value[0], return_value[1]) #returns the map location to land on #method to launch the rocket def launch(unit): garrison = unit.structure_garrison() free_loc = findRocketLand(unit) if gc.can_launch_rocket(unit.id, free_loc): #if can launch, launch gc.launch_rocket(unit.id, free_loc) #method to unload and garrison the rocket once built def unloadRocket(rocket): garrison = unit.structure_garrison() if len(garrison) > 0: for d in directions: if gc.can_unload(unit.id, d): gc.unload(unit.id, d) find_locations_Mars() #method to move the units towards the rockets def moveUnitToRocket(unit,nearby): if not gc.is_move_ready(unit.id): return #if ready to move #get a location of the unit location = unit.location.map_location() #use directions from above best = directions[0] #set a distance closest_distance = 100000 #for each of nearby for x in nearby: if gc.can_load(x.id, unit.id): gc.load(x.id,unit.id) return next_location = x.location.map_location() #now the distance is from that location to the next one found current_distance = location.distance_squared_to(next_location) #if closer than the set closest distance, go there if current_distance < closest_distance: closest_distance = current_distance best = location.direction_to(next_location) #moving the units based off current location and if they can move range_index = 8 for i in range(8): if directions[i] == best: range_index = i break for i in range(4): temp_index = (range_index + i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return temp_index = (range_index - i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return #rangerAttack takes a unit and who is nearby to attempt an attack. def rangerAttack(unit, nearby): global priority_rangers best_target = 0 targets = [] #list of targets from least valuable to most #we find the best unit to attack from the priority_rangers dictionary #and attempt to attack the best unit. for enemy in nearby: #if enemy is too close, back away if gc.is_move_ready(unit.id): x_diff = unit.location.map_location().x - enemy.location.map_location().x y_diff = unit.location.map_location().y - enemy.location.map_location().y #backing away is done by reversing location and destination in approach function if (x_diff * x_diff) + (y_diff * y_diff) < 20: approach(unit,enemy.location.map_location(),unit.location.map_location()) if priority_rangers[enemy.unit_type] > best_target: best_target = priority_rangers[enemy.unit_type] targets.append(enemy) #if we can attack, and something is nearby to attack, do so. if gc.is_attack_ready(unit.id): for i in range(len(targets)-1,-1,-1): if gc.can_attack(unit.id, targets[i].id): gc.attack(unit.id, targets[i].id) return if gc.is_move_ready(unit.id): approach(unit,unit.location.map_location(),targets[-1].location.map_location()) #rangerLogic handles movement when no enemies are nearby, and attack orders. def rangerLogic(unit): global enemy_spawn, enemy_team, escape, full_vision #Make sure only rangers get ranger orders. if unit.unit_type != bc.UnitType.Ranger: return location = unit.location #if its time to escape, try to run to a rocket if escape and unit.location.map_location().planet == bc.Planet.Earth: nearby = gc.sense_nearby_units_by_type(location.map_location(), unit.vision_range, bc.UnitType.Rocket) if nearby: moveUnitToRocket(unit,nearby) return #sense enemies that are nearby, and then attack them nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: full_vision.extend(nearby) rangerAttack(unit, nearby) #if no one is nearby then approach the enemy, if no enemies are seen by anyone, approach enemy spawn if not nearby and gc.is_move_ready(unit.id): if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) while True: # We only support Python 3, which means brackets around print() print('pyround:', gc.round(), 'time left:', gc.get_time_left_ms(), 'ms') # count how much of each unit we have at the beginning of each turn num_workers = 0 num_knights = 0 num_healers = 0 num_rangers = 0 num_mages = 0 total_number_factories = 0 total_number_rockets = 0 for unit in gc.my_units(): if unit.unit_type == bc.UnitType.Worker: num_workers += 1 if unit.unit_type == bc.UnitType.Knight: num_knights += 1 if unit.unit_type == bc.UnitType.Healer: num_healers += 1 if unit.unit_type == bc.UnitType.Ranger: num_rangers += 1 if unit.unit_type == bc.UnitType.Mage: num_mages += 1 if unit.unit_type == bc.UnitType.Factory: total_number_factories += 1 if unit.unit_type == bc.UnitType.Rocket: total_number_rockets += 1 # shared unit vision full_vision = [] try: # walk through our units: for unit in gc.my_units(): location = unit.location if unit.unit_type == bc.UnitType.Rocket: escape = True if unit.location.map_location().planet == bc.Planet.Mars:
elif len(unit.structure_garrison()) >= 8 or gc.round() >= 748 or unit.health < unit.max_health: launch(unit) elif unit.unit_type == bc.UnitType.Factory: factoryProduce(unit) elif unit.unit_type == bc.UnitType.Worker: workerWork(unit) elif unit.unit_type == bc.UnitType.Healer: if location.is_on_map(): Healer_heal(unit) elif unit.unit_type == bc.UnitType.Ranger: if location.is_on_map(): rangerLogic(unit) #when we want to move to rockets call is #moveUnitToRocket(unit) #want to make sure it is right time in the game and we have enough units to fill the rockets #launch(unit id of rocket to launch) # if current_unit.unit_type == bc.UnitType.Rocket: #unload_rocket(current_unit) except Exception as e: print('Error:', e) # use this to show where the error was traceback.print_exc() # send the actions we've performed, and wait for our next turn. gc.next_turn() # these lines are not strictly necessary, but it helps make the logs make more sense. # it forces everything we've written this turn to be written to the manager. sys.stdout.flush() sys.stderr.flush()
unloadRocket(unit)
conditional_block
run.py
''' CodeOfWar COSC 370 AI Battlecode Project Jennifer Mince, Carly Good, Matt Manoly, Zachary Taylor Code for Inspiration: https://github.com/AnPelec/Battlecode-2018/blob/master/Project%20Achilles/run.py ''' import battlecode as bc import random import sys import traceback import time from datetime import datetime import os print(os.getcwd()) print("pystarting") # A GameController is the main type that you talk to the game with. # Its constructor will connect to a running game. gc = bc.GameController() directions = list(bc.Direction) #get our team from API my_team = gc.team() #these dictionaries set up the priorities for each unit to interact with priority_rangers = { bc.UnitType.Worker : 3, bc.UnitType.Knight : 2, bc.UnitType.Healer : 1, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 1, bc.UnitType.Factory : 4, bc.UnitType.Rocket : 4, }
priority_healers = { bc.UnitType.Worker : 4, bc.UnitType.Knight : 3, bc.UnitType.Healer : 2, bc.UnitType.Ranger : 1, bc.UnitType.Mage : 2 } #a directions dictionary used to approach approach_dir = { (0,1) : bc.Direction.North, (1,1) : bc.Direction.Northeast, (1,0) : bc.Direction.East, (1,-1) : bc.Direction.Southeast, (0,-1) : bc.Direction.South, (-1,-1) : bc.Direction.Southwest, (-1,0) : bc.Direction.West, (-1,1) : bc.Direction.Northwest, } #sets the my_team and enemy_team variables to know who to attack or help enemy_team = bc.Team.Red if my_team == bc.Team.Red: enemy_team = bc.Team.Blue #find the start map and original units at start of game start_map = gc.starting_map(bc.Planet.Earth) init_units = start_map.initial_units for i in range(init_units.__len__()): if init_units.__getitem__(i).team == enemy_team: enemy_spawn = init_units.__getitem__(i).location.map_location() #flag for sending units into battle, flipped when an army has begun amassing release_units = False #flag for sending units to the rockets for escape escape = False fight = False print("pystarted") random.seed(datetime.now()) #Research order gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Ranger) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Rocket) gc.queue_research(bc.UnitType.Worker) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Mage) gc.queue_research(bc.UnitType.Healer) gc.queue_research(bc.UnitType.Mage) #method to move any unit def move(unit): #API returns any possible moves in list form possible_directions = list(bc.Direction) choices = [] #find only the moves that are valid moves for direct in possible_directions: if gc.can_move(unit.id, direct): choices.append(direct) #if not choices: # gc.disintegrate_unit(unit.id) # return if choices: dir = random.choice(choices) #if unit can move and is ready to move, randomly move them to a new position if gc.is_move_ready(unit.id) and gc.can_move(unit.id, dir): gc.move_robot(unit.id, dir) #Try to approach a given target destination. (Note: NOT unit) def approach(unit, location, destination): global approach_dir #Find the difference in unit position and reduce it to a simple coordinate pair #for use with the approach_dir dictionary. x_diff = destination.x - location.x y_diff = destination.y - location.y x_move = x_diff y_move = y_diff #if there is an x_diff/y_diff, reduce it to a movement in one direction. if x_diff != 0: x_move = x_diff/abs(x_diff) if y_diff != 0: y_move = y_diff/abs(y_diff) #if there is no moves to make, exit. if (x_move,y_move) == (0,0): return #if we can move in an optimal direction, move that direction. dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if cant move in optimal direction, try moving in a similar direction if x_move == 0: x_move = random.choice([-1,1]) elif y_move == 0: y_move = random.choice([-1,1]) else: if x_diff > y_diff: y_move = 0 else: x_move = 0 dir = approach_dir[(x_move,y_move)] if gc.is_move_ready(unit.id) and gc.can_move(unit.id,dir): gc.move_robot(unit.id, dir) return #if nothing else works, move randomly move(unit) #logic for worker units def workerWork(worker): global num_workers, total_number_factories, escape, full_vision, fight #if there is a worker deficit and we have the resources to replicate, #find a valid direction to do so. if num_workers < 7 and gc.karbonite() >= 60: for dir in directions: if gc.can_replicate(worker.id, dir): gc.replicate(worker.id, dir) return #once an action is performed, that worker is done nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: fight = True full_vision.extend(nearby) #build on any existing nearby blueprints, or repair damaged structures nearby = gc.sense_nearby_units(worker.location.map_location(), 2) for other in nearby: if gc.can_build(worker.id, other.id): gc.build(worker.id, other.id) return elif other.health < other.max_health and gc.can_repair(worker.id, other.id): gc.repair(worker.id, other.id) return #build factories until game reaches round 150, then focus on making units if gc.karbonite() > bc.UnitType.Factory.blueprint_cost() and gc.round() < 150: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Factory, dir): gc.blueprint(worker.id, bc.UnitType.Factory, dir) return if gc.karbonite() > bc.UnitType.Rocket.blueprint_cost() and gc.round() > 550: for dir in directions: if gc.can_blueprint(worker.id, bc.UnitType.Rocket, dir): gc.blueprint(worker.id, bc.UnitType.Rocket, dir) return #find a direction to harvest for dir in directions: if gc.can_harvest(worker.id, dir): gc.harvest(worker.id, dir) return #if this part of the code is reached, then the only thing left to do is move move(worker) #factoryProduce takes a factory and first to ungarrison any available units #then attempts to produce a ratio of a 4 rangers to 1 healer def factoryProduce(factory): global num_healers, num_rangers, release_units, fight garrison = unit.structure_garrison() if num_rangers + num_healers > 15 or fight: release_units = True #If a unit is garrisoned, release them in an available spot. if len(garrison) > 0 and release_units: for dir in directions: if gc.can_unload(factory.id, dir): gc.unload(factory.id, dir) if gc.round() > 650: return #If the factory is available to produce another unit. If we have enough #healers, produce rangers. if gc.can_produce_robot(factory.id, bc.UnitType.Ranger): if num_rangers < num_healers * 4: gc.produce_robot(factory.id, bc.UnitType.Ranger) else: gc.produce_robot(factory.id, bc.UnitType.Healer) return #Healer_heal finds units near the healer and attempts to heal them def Healer_heal(unit): global enemy_spawn, my_team, full_vision location = unit.location #find nearby units on team nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.attack_range(), my_team) #if can heal, heal heal = False if gc.is_heal_ready(unit.id): lowest_health = unit for other in nearby: if other.health < lowest_health.health and other.health < other.max_health: lowest_health = other heal = True if gc.can_heal(unit.id, lowest_health.id) and heal: gc.heal(unit.id, lowest_health.id) return #if no heal targets, walk towards the action if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) #Healer_overcharge finds a nearby unit and restores their ability charge. def Healer_overcharge(unit): global my_team #if we can't overcharge, exit if not gc.is_overcharge_ready(unit.id): return #cannot overcharge if not at research level 3 if bc.ResearchInfo().get_level(bc.UnitType.Healer) < 3: return #find our location location = unit.location #get all possible targets around, and choose one to heal possible_targets = sense_nearby_units_by_team(location.map_location(), unit.ability_range(), my_team) for other in possible_targets: if gc.can_heal(unit.id, other.id): gc.heal(unit.id, other.id) return #Mars Info Finding and Rocket variables marsMap = gc.starting_map(bc.Planet.Mars) marsHeight = marsMap.height marsWidth = marsMap.width #add to this variable as rockets are built safe_locations = [] #method to find a safe location on Mars to land using known Mars info from the API def find_locations_Mars(): global safe_locations component_num = 0 for i in range(marsHeight): for j in range(marsWidth): if (i, j) not in safe_locations: temp_loc = bc.MapLocation(bc.Planet.Mars, i, j) try: if marsMap.is_passable_terrain_at(temp_loc): safe_locations.append((i, j)) #this stores the locations that are safe to use later component_num += 1 except Exception as e: print(i, j) print('Error:', e) #traceback.print_exc() #now choose a safe location to launch to per rocket def findRocketLand(rocket): global safe_locations #not sure what range to use temp_range= 5 for t in range(temp_range): return_value = random.choice(safe_locations) #calls locations from above method if (t < temp_range -1): continue return bc.MapLocation(bc.Planet.Mars, return_value[0], return_value[1]) #returns the map location to land on #method to launch the rocket def launch(unit): garrison = unit.structure_garrison() free_loc = findRocketLand(unit) if gc.can_launch_rocket(unit.id, free_loc): #if can launch, launch gc.launch_rocket(unit.id, free_loc) #method to unload and garrison the rocket once built def unloadRocket(rocket): garrison = unit.structure_garrison() if len(garrison) > 0: for d in directions: if gc.can_unload(unit.id, d): gc.unload(unit.id, d) find_locations_Mars() #method to move the units towards the rockets def moveUnitToRocket(unit,nearby): if not gc.is_move_ready(unit.id): return #if ready to move #get a location of the unit location = unit.location.map_location() #use directions from above best = directions[0] #set a distance closest_distance = 100000 #for each of nearby for x in nearby: if gc.can_load(x.id, unit.id): gc.load(x.id,unit.id) return next_location = x.location.map_location() #now the distance is from that location to the next one found current_distance = location.distance_squared_to(next_location) #if closer than the set closest distance, go there if current_distance < closest_distance: closest_distance = current_distance best = location.direction_to(next_location) #moving the units based off current location and if they can move range_index = 8 for i in range(8): if directions[i] == best: range_index = i break for i in range(4): temp_index = (range_index + i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return temp_index = (range_index - i + 9)%9 if gc.can_move(unit.id, directions[temp_index]): gc.move_robot(unit.id, directions[temp_index]) return #rangerAttack takes a unit and who is nearby to attempt an attack. def rangerAttack(unit, nearby): global priority_rangers best_target = 0 targets = [] #list of targets from least valuable to most #we find the best unit to attack from the priority_rangers dictionary #and attempt to attack the best unit. for enemy in nearby: #if enemy is too close, back away if gc.is_move_ready(unit.id): x_diff = unit.location.map_location().x - enemy.location.map_location().x y_diff = unit.location.map_location().y - enemy.location.map_location().y #backing away is done by reversing location and destination in approach function if (x_diff * x_diff) + (y_diff * y_diff) < 20: approach(unit,enemy.location.map_location(),unit.location.map_location()) if priority_rangers[enemy.unit_type] > best_target: best_target = priority_rangers[enemy.unit_type] targets.append(enemy) #if we can attack, and something is nearby to attack, do so. if gc.is_attack_ready(unit.id): for i in range(len(targets)-1,-1,-1): if gc.can_attack(unit.id, targets[i].id): gc.attack(unit.id, targets[i].id) return if gc.is_move_ready(unit.id): approach(unit,unit.location.map_location(),targets[-1].location.map_location()) #rangerLogic handles movement when no enemies are nearby, and attack orders. def rangerLogic(unit): global enemy_spawn, enemy_team, escape, full_vision #Make sure only rangers get ranger orders. if unit.unit_type != bc.UnitType.Ranger: return location = unit.location #if its time to escape, try to run to a rocket if escape and unit.location.map_location().planet == bc.Planet.Earth: nearby = gc.sense_nearby_units_by_type(location.map_location(), unit.vision_range, bc.UnitType.Rocket) if nearby: moveUnitToRocket(unit,nearby) return #sense enemies that are nearby, and then attack them nearby = gc.sense_nearby_units_by_team(location.map_location(), unit.vision_range, enemy_team) if nearby: full_vision.extend(nearby) rangerAttack(unit, nearby) #if no one is nearby then approach the enemy, if no enemies are seen by anyone, approach enemy spawn if not nearby and gc.is_move_ready(unit.id): if full_vision: approach(unit, unit.location.map_location(),full_vision[0].location.map_location()) else: approach(unit, unit.location.map_location(),enemy_spawn) while True: # We only support Python 3, which means brackets around print() print('pyround:', gc.round(), 'time left:', gc.get_time_left_ms(), 'ms') # count how much of each unit we have at the beginning of each turn num_workers = 0 num_knights = 0 num_healers = 0 num_rangers = 0 num_mages = 0 total_number_factories = 0 total_number_rockets = 0 for unit in gc.my_units(): if unit.unit_type == bc.UnitType.Worker: num_workers += 1 if unit.unit_type == bc.UnitType.Knight: num_knights += 1 if unit.unit_type == bc.UnitType.Healer: num_healers += 1 if unit.unit_type == bc.UnitType.Ranger: num_rangers += 1 if unit.unit_type == bc.UnitType.Mage: num_mages += 1 if unit.unit_type == bc.UnitType.Factory: total_number_factories += 1 if unit.unit_type == bc.UnitType.Rocket: total_number_rockets += 1 # shared unit vision full_vision = [] try: # walk through our units: for unit in gc.my_units(): location = unit.location if unit.unit_type == bc.UnitType.Rocket: escape = True if unit.location.map_location().planet == bc.Planet.Mars: unloadRocket(unit) elif len(unit.structure_garrison()) >= 8 or gc.round() >= 748 or unit.health < unit.max_health: launch(unit) elif unit.unit_type == bc.UnitType.Factory: factoryProduce(unit) elif unit.unit_type == bc.UnitType.Worker: workerWork(unit) elif unit.unit_type == bc.UnitType.Healer: if location.is_on_map(): Healer_heal(unit) elif unit.unit_type == bc.UnitType.Ranger: if location.is_on_map(): rangerLogic(unit) #when we want to move to rockets call is #moveUnitToRocket(unit) #want to make sure it is right time in the game and we have enough units to fill the rockets #launch(unit id of rocket to launch) # if current_unit.unit_type == bc.UnitType.Rocket: #unload_rocket(current_unit) except Exception as e: print('Error:', e) # use this to show where the error was traceback.print_exc() # send the actions we've performed, and wait for our next turn. gc.next_turn() # these lines are not strictly necessary, but it helps make the logs make more sense. # it forces everything we've written this turn to be written to the manager. sys.stdout.flush() sys.stderr.flush()
random_line_split
test.rs
// Code that generates a test runner to run all the tests in a crate #![allow(dead_code)] #![allow(unused_imports)] use HasTestSignature::*; use std::iter; use std::slice; use std::mem; use std::vec; use log::debug; use smallvec::{smallvec, SmallVec}; use syntax_pos::{DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos}; use crate::attr::{self, HasAttrs}; use crate::source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan}; use crate::config; use crate::entry::{self, EntryPointType}; use crate::ext::base::{ExtCtxt, Resolver}; use crate::ext::build::AstBuilder; use crate::ext::expand::ExpansionConfig; use crate::ext::hygiene::{self, Mark, SyntaxContext}; use crate::mut_visit::{*, ExpectOne}; use crate::feature_gate::Features; use crate::util::map_in_place::MapInPlace; use crate::parse::{token, ParseSess}; use crate::print::pprust; use crate::ast::{self, Ident}; use crate::ptr::P; use crate::symbol::{self, Symbol, keywords}; use crate::ThinVec; struct Test { span: Span, path: Vec<Ident>, } struct TestCtxt<'a> { span_diagnostic: &'a errors::Handler, path: Vec<Ident>, ext_cx: ExtCtxt<'a>, test_cases: Vec<Test>, reexport_test_harness_main: Option<Symbol>, is_libtest: bool, ctxt: SyntaxContext, features: &'a Features, test_runner: Option<ast::Path>, // top-level re-export submodule, filled out after folding is finished toplevel_reexport: Option<Ident>, } // Traverse the crate, collecting all the test functions, eliding any // existing main functions, and synthesizing a main test harness pub fn modify_for_testing(sess: &ParseSess, resolver: &mut dyn Resolver, should_test: bool, krate: &mut ast::Crate, span_diagnostic: &errors::Handler, features: &Features) { // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use __test::main as some_name;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = attr::first_attr_value_str_by_name(&krate.attrs, "reexport_test_harness_main"); // Do this here so that the test_runner crate attribute gets marked as used // even in non-test builds let test_runner = get_test_runner(span_diagnostic, &krate); if should_test { generate_test_harness(sess, resolver, reexport_test_harness_main, krate, span_diagnostic, features, test_runner) } } struct TestHarnessGenerator<'a> { cx: TestCtxt<'a>, tests: Vec<Ident>, // submodule name, gensym'd identifier for re-exports tested_submods: Vec<(Ident, Ident)>, } impl<'a> MutVisitor for TestHarnessGenerator<'a> { fn visit_crate(&mut self, c: &mut ast::Crate) { noop_visit_crate(c, self); // Create a main function to run our tests let test_main = { let unresolved = mk_main(&mut self.cx); self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap() }; c.module.items.push(test_main); } fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { let ident = i.ident; if ident.name != keywords::Invalid.name() { self.cx.path.push(ident); } debug!("current path: {}", path_name_i(&self.cx.path)); let mut item = i.into_inner(); if is_test_case(&item) { debug!("this is a test item"); let test = Test { span: item.span, path: self.cx.path.clone(), }; self.cx.test_cases.push(test); self.tests.push(item.ident); } // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things if let ast::ItemKind::Mod(mut module) = item.node { let tests = mem::replace(&mut self.tests, Vec::new()); let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); noop_visit_mod(&mut module, self); let tests = mem::replace(&mut self.tests, tests); let tested_submods = mem::replace(&mut self.tested_submods, tested_submods); if !tests.is_empty() || !tested_submods.is_empty() { let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods); module.items.push(it); if !self.cx.path.is_empty() { self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym)); } else { debug!("pushing nothing, sym: {:?}", sym); self.cx.toplevel_reexport = Some(sym); } } item.node = ast::ItemKind::Mod(module); } if ident.name != keywords::Invalid.name() { self.cx.path.pop(); } smallvec![P(item)] } fn visit_mac(&mut self, _mac: &mut ast::Mac) { // Do nothing. } } /// A folder used to remove any entry points (like fn main) because the harness /// generator will provide its own struct EntryPointCleaner { // Current depth in the ast depth: usize, } impl MutVisitor for EntryPointCleaner { fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { self.depth += 1; let item = noop_flat_map_item(i, self).expect_one("noop did something"); self.depth -= 1; // Remove any #[main] or #[start] from the AST so it doesn't // clash with the one we're going to add, but mark it as // #[allow(dead_code)] to avoid printing warnings. let item = match entry::entry_point_type(&item, self.depth) { EntryPointType::MainNamed | EntryPointType::MainAttr | EntryPointType::Start => item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { let allow_ident = Ident::from_str("allow"); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident, vec![dc_nested]); let allow_dead_code = attr::mk_attr_outer(DUMMY_SP, attr::mk_attr_id(), allow_dead_code_item); ast::Item { id, ident, attrs: attrs.into_iter() .filter(|attr| { !attr.check_name("main") && !attr.check_name("start") }) .chain(iter::once(allow_dead_code)) .collect(), node, vis, span, tokens, } }), EntryPointType::None | EntryPointType::OtherMain => item, }; smallvec![item] } fn visit_mac(&mut self, _mac: &mut ast::Mac) { // Do nothing. } } /// Creates an item (specifically a module) that "pub use"s the tests passed in. /// Each tested submodule will contain a similar reexport module that we will export /// under the name of the original module. That is, `submod::__test_reexports` is /// reexported like so `pub use submod::__test_reexports as submod`. fn mk_reexport_mod(cx: &mut TestCtxt<'_>, parent: ast::NodeId, tests: Vec<Ident>, tested_submods: Vec<(Ident, Ident)>) -> (P<ast::Item>, Ident) { let super_ = Ident::from_str("super"); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), cx.ext_cx.path(DUMMY_SP, vec![super_, r])) }).chain(tested_submods.into_iter().map(|(r, sym)| { let path = cx.ext_cx.path(DUMMY_SP, vec![super_, r, sym]); cx.ext_cx.item_use_simple_(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), Some(r), path) })).collect(); let reexport_mod = ast::Mod { inline: true, inner: DUMMY_SP, items, }; let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports")); let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item { ident: sym, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Mod(reexport_mod), vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, })).pop().unwrap(); (it, sym) } /// Crawl over the crate, inserting test reexports and the test main function fn generate_test_harness(sess: &ParseSess, resolver: &mut dyn Resolver, reexport_test_harness_main: Option<Symbol>, krate: &mut ast::Crate, sd: &errors::Handler, features: &Features, test_runner: Option<ast::Path>) { // Remove the entry points let mut cleaner = EntryPointCleaner { depth: 0 }; cleaner.visit_crate(krate); let mark = Mark::fresh(Mark::root()); let mut econfig = ExpansionConfig::default("test".to_string()); econfig.features = Some(features); let cx = TestCtxt { span_diagnostic: sd, ext_cx: ExtCtxt::new(sess, econfig, resolver), path: Vec::new(), test_cases: Vec::new(), reexport_test_harness_main, // N.B., doesn't consider the value of `--crate-name` passed on the command line. is_libtest: attr::find_crate_name(&krate.attrs).map(|s| s == "test").unwrap_or(false), toplevel_reexport: None, ctxt: SyntaxContext::empty().apply_mark(mark), features, test_runner }; mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, def_site: None, format: MacroAttribute(Symbol::intern("test_case")), allow_internal_unstable: Some(vec![ Symbol::intern("main"), Symbol::intern("test"), Symbol::intern("rustc_attrs"), ].into()), allow_internal_unsafe: false, local_inner_macros: false, edition: hygiene::default_edition(), }); TestHarnessGenerator { cx, tests: Vec::new(), tested_submods: Vec::new(), }.visit_crate(krate); } /// Craft a span that will be ignored by the stability lint's /// call to source_map's `is_internal` check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt<'_>, sp: Span) -> Span { sp.with_ctxt(cx.ctxt) } enum HasTestSignature { Yes, No(BadTestSignature), } #[derive(PartialEq)] enum BadTestSignature { NotEvenAFunction, WrongTypeSignature, NoArgumentsAllowed, ShouldPanicOnlyWithNoArgs, } /// Creates a function item for use as the main function of a test build. /// This function will call the `test_runner` as specified by the crate attribute fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> { // Writing this out by hand with 'ignored_span': // pub fn main() { // #![main] // test::test_main_static(::std::os::args().as_slice(), &[..tests]); // } let sp = ignored_span(cx, DUMMY_SP); let ecx = &cx.ext_cx; let test_id = ecx.ident_of("test").gensym(); // test::test_main_static(...) let mut test_runner = cx.test_runner.clone().unwrap_or( ecx.path(sp, vec![ test_id, ecx.ident_of("test_main_static") ])); test_runner.span = sp; let test_main_path_expr = ecx.expr_path(test_runner); let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![mk_tests_slice(cx)]); let call_test_main = ecx.stmt_expr(call_test_main); // #![main] let main_meta = ecx.meta_word(sp, Symbol::intern("main")); let main_attr = ecx.attribute(sp, main_meta); // extern crate test as test_gensym let test_extern_stmt = ecx.stmt_item(sp, ecx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(Some(Symbol::intern("test"))) )); // pub fn main() { ... } let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); // If no test runner is provided we need to import the test crate let main_body = if cx.test_runner.is_none() { ecx.block(sp, vec![test_extern_stmt, call_test_main]) } else { ecx.block(sp, vec![call_test_main]) }; let main = ast::ItemKind::Fn(ecx.fn_decl(vec![], ast::FunctionRetTy::Ty(main_ret_ty)), ast::FnHeader::default(), ast::Generics::default(), main_body); // Honor the reexport_test_harness_main attribute let main_id = Ident::new( cx.reexport_test_harness_main.unwrap_or(Symbol::gensym("main")), sp); P(ast::Item { ident: main_id, attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, vis: dummy_spanned(ast::VisibilityKind::Public), span: sp, tokens: None, }) } fn path_name_i(idents: &[Ident]) -> String { let mut path_name = "".to_string(); let mut idents_iter = idents.iter().peekable(); while let Some(ident) = idents_iter.next() { path_name.push_str(&ident.as_str()); if idents_iter.peek().is_some() { path_name.push_str("::") } } path_name } /// Creates a slice containing every test like so: /// &[path::to::test1, path::to::test2] fn mk_tests_slice(cx: &TestCtxt<'_>) -> P<ast::Expr> { debug!("building test vector from {} tests", cx.test_cases.len()); let ref ecx = cx.ext_cx; ecx.expr_vec_slice(DUMMY_SP, cx.test_cases.iter().map(|test| { ecx.expr_addr_of(test.span, ecx.expr_path(ecx.path(test.span, visible_path(cx, &test.path)))) }).collect()) } /// Creates a path from the top-level __test module to the test via __test_reexports fn visible_path(cx: &TestCtxt<'_>, path: &[Ident]) -> Vec<Ident>{ let mut visible_path = vec![]; match cx.toplevel_reexport { Some(id) => visible_path.push(id), None => { cx.span_diagnostic.bug("expected to find top-level re-export name, but found None"); } } visible_path.extend_from_slice(path); visible_path } fn is_test_case(i: &ast::Item) -> bool
fn get_test_runner(sd: &errors::Handler, krate: &ast::Crate) -> Option<ast::Path> { let test_attr = attr::find_by_name(&krate.attrs, "test_runner")?; test_attr.meta_item_list().map(|meta_list| { if meta_list.len() != 1 { sd.span_fatal(test_attr.span, "#![test_runner(..)] accepts exactly 1 argument").raise() } match meta_list[0].meta_item() { Some(meta_item) if meta_item.is_word() => meta_item.path.clone(), _ => sd.span_fatal(test_attr.span, "`test_runner` argument must be a path").raise() } }) }
{ attr::contains_name(&i.attrs, "rustc_test_marker") }
identifier_body
test.rs
// Code that generates a test runner to run all the tests in a crate #![allow(dead_code)] #![allow(unused_imports)] use HasTestSignature::*; use std::iter; use std::slice; use std::mem; use std::vec; use log::debug; use smallvec::{smallvec, SmallVec}; use syntax_pos::{DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos}; use crate::attr::{self, HasAttrs}; use crate::source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan}; use crate::config; use crate::entry::{self, EntryPointType}; use crate::ext::base::{ExtCtxt, Resolver}; use crate::ext::build::AstBuilder; use crate::ext::expand::ExpansionConfig; use crate::ext::hygiene::{self, Mark, SyntaxContext}; use crate::mut_visit::{*, ExpectOne}; use crate::feature_gate::Features; use crate::util::map_in_place::MapInPlace; use crate::parse::{token, ParseSess}; use crate::print::pprust; use crate::ast::{self, Ident}; use crate::ptr::P; use crate::symbol::{self, Symbol, keywords}; use crate::ThinVec; struct Test { span: Span, path: Vec<Ident>, } struct TestCtxt<'a> { span_diagnostic: &'a errors::Handler, path: Vec<Ident>, ext_cx: ExtCtxt<'a>, test_cases: Vec<Test>, reexport_test_harness_main: Option<Symbol>, is_libtest: bool, ctxt: SyntaxContext, features: &'a Features, test_runner: Option<ast::Path>, // top-level re-export submodule, filled out after folding is finished toplevel_reexport: Option<Ident>, } // Traverse the crate, collecting all the test functions, eliding any // existing main functions, and synthesizing a main test harness pub fn modify_for_testing(sess: &ParseSess, resolver: &mut dyn Resolver, should_test: bool, krate: &mut ast::Crate, span_diagnostic: &errors::Handler, features: &Features) { // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use __test::main as some_name;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = attr::first_attr_value_str_by_name(&krate.attrs, "reexport_test_harness_main"); // Do this here so that the test_runner crate attribute gets marked as used // even in non-test builds let test_runner = get_test_runner(span_diagnostic, &krate); if should_test { generate_test_harness(sess, resolver, reexport_test_harness_main, krate, span_diagnostic, features, test_runner) } } struct TestHarnessGenerator<'a> { cx: TestCtxt<'a>, tests: Vec<Ident>, // submodule name, gensym'd identifier for re-exports tested_submods: Vec<(Ident, Ident)>, } impl<'a> MutVisitor for TestHarnessGenerator<'a> { fn visit_crate(&mut self, c: &mut ast::Crate) { noop_visit_crate(c, self); // Create a main function to run our tests let test_main = { let unresolved = mk_main(&mut self.cx); self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap() }; c.module.items.push(test_main); } fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { let ident = i.ident; if ident.name != keywords::Invalid.name() { self.cx.path.push(ident); } debug!("current path: {}", path_name_i(&self.cx.path)); let mut item = i.into_inner(); if is_test_case(&item) { debug!("this is a test item"); let test = Test { span: item.span, path: self.cx.path.clone(), }; self.cx.test_cases.push(test); self.tests.push(item.ident); } // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things if let ast::ItemKind::Mod(mut module) = item.node { let tests = mem::replace(&mut self.tests, Vec::new()); let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); noop_visit_mod(&mut module, self); let tests = mem::replace(&mut self.tests, tests); let tested_submods = mem::replace(&mut self.tested_submods, tested_submods); if !tests.is_empty() || !tested_submods.is_empty() { let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods); module.items.push(it); if !self.cx.path.is_empty() { self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym)); } else { debug!("pushing nothing, sym: {:?}", sym); self.cx.toplevel_reexport = Some(sym); } } item.node = ast::ItemKind::Mod(module); } if ident.name != keywords::Invalid.name() { self.cx.path.pop(); } smallvec![P(item)] } fn visit_mac(&mut self, _mac: &mut ast::Mac) { // Do nothing. } } /// A folder used to remove any entry points (like fn main) because the harness /// generator will provide its own struct EntryPointCleaner { // Current depth in the ast depth: usize, }
fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { self.depth += 1; let item = noop_flat_map_item(i, self).expect_one("noop did something"); self.depth -= 1; // Remove any #[main] or #[start] from the AST so it doesn't // clash with the one we're going to add, but mark it as // #[allow(dead_code)] to avoid printing warnings. let item = match entry::entry_point_type(&item, self.depth) { EntryPointType::MainNamed | EntryPointType::MainAttr | EntryPointType::Start => item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { let allow_ident = Ident::from_str("allow"); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident, vec![dc_nested]); let allow_dead_code = attr::mk_attr_outer(DUMMY_SP, attr::mk_attr_id(), allow_dead_code_item); ast::Item { id, ident, attrs: attrs.into_iter() .filter(|attr| { !attr.check_name("main") && !attr.check_name("start") }) .chain(iter::once(allow_dead_code)) .collect(), node, vis, span, tokens, } }), EntryPointType::None | EntryPointType::OtherMain => item, }; smallvec![item] } fn visit_mac(&mut self, _mac: &mut ast::Mac) { // Do nothing. } } /// Creates an item (specifically a module) that "pub use"s the tests passed in. /// Each tested submodule will contain a similar reexport module that we will export /// under the name of the original module. That is, `submod::__test_reexports` is /// reexported like so `pub use submod::__test_reexports as submod`. fn mk_reexport_mod(cx: &mut TestCtxt<'_>, parent: ast::NodeId, tests: Vec<Ident>, tested_submods: Vec<(Ident, Ident)>) -> (P<ast::Item>, Ident) { let super_ = Ident::from_str("super"); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), cx.ext_cx.path(DUMMY_SP, vec![super_, r])) }).chain(tested_submods.into_iter().map(|(r, sym)| { let path = cx.ext_cx.path(DUMMY_SP, vec![super_, r, sym]); cx.ext_cx.item_use_simple_(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), Some(r), path) })).collect(); let reexport_mod = ast::Mod { inline: true, inner: DUMMY_SP, items, }; let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports")); let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item { ident: sym, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Mod(reexport_mod), vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, })).pop().unwrap(); (it, sym) } /// Crawl over the crate, inserting test reexports and the test main function fn generate_test_harness(sess: &ParseSess, resolver: &mut dyn Resolver, reexport_test_harness_main: Option<Symbol>, krate: &mut ast::Crate, sd: &errors::Handler, features: &Features, test_runner: Option<ast::Path>) { // Remove the entry points let mut cleaner = EntryPointCleaner { depth: 0 }; cleaner.visit_crate(krate); let mark = Mark::fresh(Mark::root()); let mut econfig = ExpansionConfig::default("test".to_string()); econfig.features = Some(features); let cx = TestCtxt { span_diagnostic: sd, ext_cx: ExtCtxt::new(sess, econfig, resolver), path: Vec::new(), test_cases: Vec::new(), reexport_test_harness_main, // N.B., doesn't consider the value of `--crate-name` passed on the command line. is_libtest: attr::find_crate_name(&krate.attrs).map(|s| s == "test").unwrap_or(false), toplevel_reexport: None, ctxt: SyntaxContext::empty().apply_mark(mark), features, test_runner }; mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, def_site: None, format: MacroAttribute(Symbol::intern("test_case")), allow_internal_unstable: Some(vec![ Symbol::intern("main"), Symbol::intern("test"), Symbol::intern("rustc_attrs"), ].into()), allow_internal_unsafe: false, local_inner_macros: false, edition: hygiene::default_edition(), }); TestHarnessGenerator { cx, tests: Vec::new(), tested_submods: Vec::new(), }.visit_crate(krate); } /// Craft a span that will be ignored by the stability lint's /// call to source_map's `is_internal` check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt<'_>, sp: Span) -> Span { sp.with_ctxt(cx.ctxt) } enum HasTestSignature { Yes, No(BadTestSignature), } #[derive(PartialEq)] enum BadTestSignature { NotEvenAFunction, WrongTypeSignature, NoArgumentsAllowed, ShouldPanicOnlyWithNoArgs, } /// Creates a function item for use as the main function of a test build. /// This function will call the `test_runner` as specified by the crate attribute fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> { // Writing this out by hand with 'ignored_span': // pub fn main() { // #![main] // test::test_main_static(::std::os::args().as_slice(), &[..tests]); // } let sp = ignored_span(cx, DUMMY_SP); let ecx = &cx.ext_cx; let test_id = ecx.ident_of("test").gensym(); // test::test_main_static(...) let mut test_runner = cx.test_runner.clone().unwrap_or( ecx.path(sp, vec![ test_id, ecx.ident_of("test_main_static") ])); test_runner.span = sp; let test_main_path_expr = ecx.expr_path(test_runner); let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![mk_tests_slice(cx)]); let call_test_main = ecx.stmt_expr(call_test_main); // #![main] let main_meta = ecx.meta_word(sp, Symbol::intern("main")); let main_attr = ecx.attribute(sp, main_meta); // extern crate test as test_gensym let test_extern_stmt = ecx.stmt_item(sp, ecx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(Some(Symbol::intern("test"))) )); // pub fn main() { ... } let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); // If no test runner is provided we need to import the test crate let main_body = if cx.test_runner.is_none() { ecx.block(sp, vec![test_extern_stmt, call_test_main]) } else { ecx.block(sp, vec![call_test_main]) }; let main = ast::ItemKind::Fn(ecx.fn_decl(vec![], ast::FunctionRetTy::Ty(main_ret_ty)), ast::FnHeader::default(), ast::Generics::default(), main_body); // Honor the reexport_test_harness_main attribute let main_id = Ident::new( cx.reexport_test_harness_main.unwrap_or(Symbol::gensym("main")), sp); P(ast::Item { ident: main_id, attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, vis: dummy_spanned(ast::VisibilityKind::Public), span: sp, tokens: None, }) } fn path_name_i(idents: &[Ident]) -> String { let mut path_name = "".to_string(); let mut idents_iter = idents.iter().peekable(); while let Some(ident) = idents_iter.next() { path_name.push_str(&ident.as_str()); if idents_iter.peek().is_some() { path_name.push_str("::") } } path_name } /// Creates a slice containing every test like so: /// &[path::to::test1, path::to::test2] fn mk_tests_slice(cx: &TestCtxt<'_>) -> P<ast::Expr> { debug!("building test vector from {} tests", cx.test_cases.len()); let ref ecx = cx.ext_cx; ecx.expr_vec_slice(DUMMY_SP, cx.test_cases.iter().map(|test| { ecx.expr_addr_of(test.span, ecx.expr_path(ecx.path(test.span, visible_path(cx, &test.path)))) }).collect()) } /// Creates a path from the top-level __test module to the test via __test_reexports fn visible_path(cx: &TestCtxt<'_>, path: &[Ident]) -> Vec<Ident>{ let mut visible_path = vec![]; match cx.toplevel_reexport { Some(id) => visible_path.push(id), None => { cx.span_diagnostic.bug("expected to find top-level re-export name, but found None"); } } visible_path.extend_from_slice(path); visible_path } fn is_test_case(i: &ast::Item) -> bool { attr::contains_name(&i.attrs, "rustc_test_marker") } fn get_test_runner(sd: &errors::Handler, krate: &ast::Crate) -> Option<ast::Path> { let test_attr = attr::find_by_name(&krate.attrs, "test_runner")?; test_attr.meta_item_list().map(|meta_list| { if meta_list.len() != 1 { sd.span_fatal(test_attr.span, "#![test_runner(..)] accepts exactly 1 argument").raise() } match meta_list[0].meta_item() { Some(meta_item) if meta_item.is_word() => meta_item.path.clone(), _ => sd.span_fatal(test_attr.span, "`test_runner` argument must be a path").raise() } }) }
impl MutVisitor for EntryPointCleaner {
random_line_split
test.rs
// Code that generates a test runner to run all the tests in a crate #![allow(dead_code)] #![allow(unused_imports)] use HasTestSignature::*; use std::iter; use std::slice; use std::mem; use std::vec; use log::debug; use smallvec::{smallvec, SmallVec}; use syntax_pos::{DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos}; use crate::attr::{self, HasAttrs}; use crate::source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan}; use crate::config; use crate::entry::{self, EntryPointType}; use crate::ext::base::{ExtCtxt, Resolver}; use crate::ext::build::AstBuilder; use crate::ext::expand::ExpansionConfig; use crate::ext::hygiene::{self, Mark, SyntaxContext}; use crate::mut_visit::{*, ExpectOne}; use crate::feature_gate::Features; use crate::util::map_in_place::MapInPlace; use crate::parse::{token, ParseSess}; use crate::print::pprust; use crate::ast::{self, Ident}; use crate::ptr::P; use crate::symbol::{self, Symbol, keywords}; use crate::ThinVec; struct Test { span: Span, path: Vec<Ident>, } struct TestCtxt<'a> { span_diagnostic: &'a errors::Handler, path: Vec<Ident>, ext_cx: ExtCtxt<'a>, test_cases: Vec<Test>, reexport_test_harness_main: Option<Symbol>, is_libtest: bool, ctxt: SyntaxContext, features: &'a Features, test_runner: Option<ast::Path>, // top-level re-export submodule, filled out after folding is finished toplevel_reexport: Option<Ident>, } // Traverse the crate, collecting all the test functions, eliding any // existing main functions, and synthesizing a main test harness pub fn modify_for_testing(sess: &ParseSess, resolver: &mut dyn Resolver, should_test: bool, krate: &mut ast::Crate, span_diagnostic: &errors::Handler, features: &Features) { // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use __test::main as some_name;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = attr::first_attr_value_str_by_name(&krate.attrs, "reexport_test_harness_main"); // Do this here so that the test_runner crate attribute gets marked as used // even in non-test builds let test_runner = get_test_runner(span_diagnostic, &krate); if should_test { generate_test_harness(sess, resolver, reexport_test_harness_main, krate, span_diagnostic, features, test_runner) } } struct TestHarnessGenerator<'a> { cx: TestCtxt<'a>, tests: Vec<Ident>, // submodule name, gensym'd identifier for re-exports tested_submods: Vec<(Ident, Ident)>, } impl<'a> MutVisitor for TestHarnessGenerator<'a> { fn visit_crate(&mut self, c: &mut ast::Crate) { noop_visit_crate(c, self); // Create a main function to run our tests let test_main = { let unresolved = mk_main(&mut self.cx); self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap() }; c.module.items.push(test_main); } fn
(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { let ident = i.ident; if ident.name != keywords::Invalid.name() { self.cx.path.push(ident); } debug!("current path: {}", path_name_i(&self.cx.path)); let mut item = i.into_inner(); if is_test_case(&item) { debug!("this is a test item"); let test = Test { span: item.span, path: self.cx.path.clone(), }; self.cx.test_cases.push(test); self.tests.push(item.ident); } // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things if let ast::ItemKind::Mod(mut module) = item.node { let tests = mem::replace(&mut self.tests, Vec::new()); let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); noop_visit_mod(&mut module, self); let tests = mem::replace(&mut self.tests, tests); let tested_submods = mem::replace(&mut self.tested_submods, tested_submods); if !tests.is_empty() || !tested_submods.is_empty() { let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods); module.items.push(it); if !self.cx.path.is_empty() { self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym)); } else { debug!("pushing nothing, sym: {:?}", sym); self.cx.toplevel_reexport = Some(sym); } } item.node = ast::ItemKind::Mod(module); } if ident.name != keywords::Invalid.name() { self.cx.path.pop(); } smallvec![P(item)] } fn visit_mac(&mut self, _mac: &mut ast::Mac) { // Do nothing. } } /// A folder used to remove any entry points (like fn main) because the harness /// generator will provide its own struct EntryPointCleaner { // Current depth in the ast depth: usize, } impl MutVisitor for EntryPointCleaner { fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { self.depth += 1; let item = noop_flat_map_item(i, self).expect_one("noop did something"); self.depth -= 1; // Remove any #[main] or #[start] from the AST so it doesn't // clash with the one we're going to add, but mark it as // #[allow(dead_code)] to avoid printing warnings. let item = match entry::entry_point_type(&item, self.depth) { EntryPointType::MainNamed | EntryPointType::MainAttr | EntryPointType::Start => item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { let allow_ident = Ident::from_str("allow"); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident, vec![dc_nested]); let allow_dead_code = attr::mk_attr_outer(DUMMY_SP, attr::mk_attr_id(), allow_dead_code_item); ast::Item { id, ident, attrs: attrs.into_iter() .filter(|attr| { !attr.check_name("main") && !attr.check_name("start") }) .chain(iter::once(allow_dead_code)) .collect(), node, vis, span, tokens, } }), EntryPointType::None | EntryPointType::OtherMain => item, }; smallvec![item] } fn visit_mac(&mut self, _mac: &mut ast::Mac) { // Do nothing. } } /// Creates an item (specifically a module) that "pub use"s the tests passed in. /// Each tested submodule will contain a similar reexport module that we will export /// under the name of the original module. That is, `submod::__test_reexports` is /// reexported like so `pub use submod::__test_reexports as submod`. fn mk_reexport_mod(cx: &mut TestCtxt<'_>, parent: ast::NodeId, tests: Vec<Ident>, tested_submods: Vec<(Ident, Ident)>) -> (P<ast::Item>, Ident) { let super_ = Ident::from_str("super"); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), cx.ext_cx.path(DUMMY_SP, vec![super_, r])) }).chain(tested_submods.into_iter().map(|(r, sym)| { let path = cx.ext_cx.path(DUMMY_SP, vec![super_, r, sym]); cx.ext_cx.item_use_simple_(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), Some(r), path) })).collect(); let reexport_mod = ast::Mod { inline: true, inner: DUMMY_SP, items, }; let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports")); let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item { ident: sym, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Mod(reexport_mod), vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, })).pop().unwrap(); (it, sym) } /// Crawl over the crate, inserting test reexports and the test main function fn generate_test_harness(sess: &ParseSess, resolver: &mut dyn Resolver, reexport_test_harness_main: Option<Symbol>, krate: &mut ast::Crate, sd: &errors::Handler, features: &Features, test_runner: Option<ast::Path>) { // Remove the entry points let mut cleaner = EntryPointCleaner { depth: 0 }; cleaner.visit_crate(krate); let mark = Mark::fresh(Mark::root()); let mut econfig = ExpansionConfig::default("test".to_string()); econfig.features = Some(features); let cx = TestCtxt { span_diagnostic: sd, ext_cx: ExtCtxt::new(sess, econfig, resolver), path: Vec::new(), test_cases: Vec::new(), reexport_test_harness_main, // N.B., doesn't consider the value of `--crate-name` passed on the command line. is_libtest: attr::find_crate_name(&krate.attrs).map(|s| s == "test").unwrap_or(false), toplevel_reexport: None, ctxt: SyntaxContext::empty().apply_mark(mark), features, test_runner }; mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, def_site: None, format: MacroAttribute(Symbol::intern("test_case")), allow_internal_unstable: Some(vec![ Symbol::intern("main"), Symbol::intern("test"), Symbol::intern("rustc_attrs"), ].into()), allow_internal_unsafe: false, local_inner_macros: false, edition: hygiene::default_edition(), }); TestHarnessGenerator { cx, tests: Vec::new(), tested_submods: Vec::new(), }.visit_crate(krate); } /// Craft a span that will be ignored by the stability lint's /// call to source_map's `is_internal` check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt<'_>, sp: Span) -> Span { sp.with_ctxt(cx.ctxt) } enum HasTestSignature { Yes, No(BadTestSignature), } #[derive(PartialEq)] enum BadTestSignature { NotEvenAFunction, WrongTypeSignature, NoArgumentsAllowed, ShouldPanicOnlyWithNoArgs, } /// Creates a function item for use as the main function of a test build. /// This function will call the `test_runner` as specified by the crate attribute fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> { // Writing this out by hand with 'ignored_span': // pub fn main() { // #![main] // test::test_main_static(::std::os::args().as_slice(), &[..tests]); // } let sp = ignored_span(cx, DUMMY_SP); let ecx = &cx.ext_cx; let test_id = ecx.ident_of("test").gensym(); // test::test_main_static(...) let mut test_runner = cx.test_runner.clone().unwrap_or( ecx.path(sp, vec![ test_id, ecx.ident_of("test_main_static") ])); test_runner.span = sp; let test_main_path_expr = ecx.expr_path(test_runner); let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![mk_tests_slice(cx)]); let call_test_main = ecx.stmt_expr(call_test_main); // #![main] let main_meta = ecx.meta_word(sp, Symbol::intern("main")); let main_attr = ecx.attribute(sp, main_meta); // extern crate test as test_gensym let test_extern_stmt = ecx.stmt_item(sp, ecx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(Some(Symbol::intern("test"))) )); // pub fn main() { ... } let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); // If no test runner is provided we need to import the test crate let main_body = if cx.test_runner.is_none() { ecx.block(sp, vec![test_extern_stmt, call_test_main]) } else { ecx.block(sp, vec![call_test_main]) }; let main = ast::ItemKind::Fn(ecx.fn_decl(vec![], ast::FunctionRetTy::Ty(main_ret_ty)), ast::FnHeader::default(), ast::Generics::default(), main_body); // Honor the reexport_test_harness_main attribute let main_id = Ident::new( cx.reexport_test_harness_main.unwrap_or(Symbol::gensym("main")), sp); P(ast::Item { ident: main_id, attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, vis: dummy_spanned(ast::VisibilityKind::Public), span: sp, tokens: None, }) } fn path_name_i(idents: &[Ident]) -> String { let mut path_name = "".to_string(); let mut idents_iter = idents.iter().peekable(); while let Some(ident) = idents_iter.next() { path_name.push_str(&ident.as_str()); if idents_iter.peek().is_some() { path_name.push_str("::") } } path_name } /// Creates a slice containing every test like so: /// &[path::to::test1, path::to::test2] fn mk_tests_slice(cx: &TestCtxt<'_>) -> P<ast::Expr> { debug!("building test vector from {} tests", cx.test_cases.len()); let ref ecx = cx.ext_cx; ecx.expr_vec_slice(DUMMY_SP, cx.test_cases.iter().map(|test| { ecx.expr_addr_of(test.span, ecx.expr_path(ecx.path(test.span, visible_path(cx, &test.path)))) }).collect()) } /// Creates a path from the top-level __test module to the test via __test_reexports fn visible_path(cx: &TestCtxt<'_>, path: &[Ident]) -> Vec<Ident>{ let mut visible_path = vec![]; match cx.toplevel_reexport { Some(id) => visible_path.push(id), None => { cx.span_diagnostic.bug("expected to find top-level re-export name, but found None"); } } visible_path.extend_from_slice(path); visible_path } fn is_test_case(i: &ast::Item) -> bool { attr::contains_name(&i.attrs, "rustc_test_marker") } fn get_test_runner(sd: &errors::Handler, krate: &ast::Crate) -> Option<ast::Path> { let test_attr = attr::find_by_name(&krate.attrs, "test_runner")?; test_attr.meta_item_list().map(|meta_list| { if meta_list.len() != 1 { sd.span_fatal(test_attr.span, "#![test_runner(..)] accepts exactly 1 argument").raise() } match meta_list[0].meta_item() { Some(meta_item) if meta_item.is_word() => meta_item.path.clone(), _ => sd.span_fatal(test_attr.span, "`test_runner` argument must be a path").raise() } }) }
flat_map_item
identifier_name
healthCheck.js
const config = require('../config.js') const models = require('../models') const { handleResponse, successResponse, errorResponseServerError } = require('../apiHelpers') const { sequelize } = require('../models') const { getRelayerFunds, fundRelayerIfEmpty } = require('../relay/txRelay') const { getEthRelayerFunds } = require('../relay/ethTxRelay') const { solanaConnection } = require('../solana-client') const solanaWeb3 = require('@solana/web3.js') const Web3 = require('web3') const audiusLibsWrapper = require('../audiusLibsInstance') const { NOTIFICATION_JOB_LAST_SUCCESS_KEY, NOTIFICATION_EMAILS_JOB_LAST_SUCCESS_KEY, NOTIFICATION_ANNOUNCEMENTS_JOB_LAST_SUCCESS_KEY } = require('../notifications/index.js') const axios = require('axios') const moment = require('moment') // Defaults used in relay health check endpoint const RELAY_HEALTH_TEN_MINS_AGO_BLOCKS = 120 // 1 block/5sec = 120 blocks/10 minutes const RELAY_HEALTH_MAX_TRANSACTIONS = 100 // max transactions to look into const RELAY_HEALTH_MAX_ERRORS = 5 // max acceptable errors for a 200 response const RELAY_HEALTH_MAX_BLOCK_RANGE = 360 // max block range allowed from query params const RELAY_HEALTH_MIN_TRANSACTIONS = 5 // min number of tx's that must have happened within block diff const RELAY_HEALTH_ACCOUNTS = new Set(config.get('relayerWallets').map(wallet => wallet.publicKey)) const ETH_RELAY_HEALTH_ACCOUNTS = new Set(config.get('ethRelayerWallets').map(wallet => wallet.publicKey)) // flatten one level of nexted arrays const flatten = (arr) => arr.reduce((acc, val) => acc.concat(val), []) module.exports = function (app) { /** * Relay health check endpoint. Takes the query params startBlock, endBlock, maxTransactions, and maxErrors. * If those query params are not specified, use default values. */ /* There are a few scenarios where a health check should return unhealthy 1. Some number of relays are failing for some number of users To solve this, traverse the blocks for Audius transactions and count failures for users. If it's greater than some threshold, return error 2. Relays are not being sent / sent but not acknowledged by blockchain */ app.get('/health_check/relay', handleResponse(async (req, res) => { const start = Date.now() const audiusLibsInstance = req.app.get('audiusLibs') const redis = req.app.get('redis') const web3 = audiusLibsInstance.web3Manager.getWeb3() let endBlockNumber = parseInt((await web3.eth.getBlockNumber()), 10) let blockDiff = parseInt(req.query.blockDiff, 10) || RELAY_HEALTH_TEN_MINS_AGO_BLOCKS let maxTransactions = parseInt(req.query.maxTransactions, 10) || RELAY_HEALTH_MAX_TRANSACTIONS let maxErrors = parseInt(req.query.maxErrors, 10) || RELAY_HEALTH_MAX_ERRORS let minTransactions = parseInt(req.query.minTransactions) || RELAY_HEALTH_MIN_TRANSACTIONS let isVerbose = req.query.verbose || false // In the case that endBlockNumber - blockDiff goes negative, default startBlockNumber to 0 let startBlockNumber = Math.max(endBlockNumber - blockDiff, 0) // If query params are invalid, throw server error if ( isNaN(startBlockNumber) || isNaN(endBlockNumber) || startBlockNumber < 0 || endBlockNumber < 0 || endBlockNumber < startBlockNumber ) { return errorResponseServerError(`Invalid start and/or end block. startBlock: ${startBlockNumber}, endBlock: ${endBlockNumber}`) } if (endBlockNumber - startBlockNumber > RELAY_HEALTH_MAX_BLOCK_RANGE) { return errorResponseServerError(`Block difference is over ${RELAY_HEALTH_MAX_BLOCK_RANGE}. startBlock: ${startBlockNumber}, endBlock: ${endBlockNumber}`) } if ( isNaN(maxTransactions) || isNaN(maxErrors) || maxTransactions < 0 || maxErrors < 0 ) { return errorResponseServerError(`Invalid number of transactions and/or errors. maxTransactions: ${maxTransactions}, maxErrors: ${maxErrors}`) } let failureTxs = {} // senderAddress: [<txHash>] let txCounter = 0 let minBlockTime = null let maxBlockTime = null req.logger.info( `Searching for transactions to/from relay accounts within blocks ${startBlockNumber} and ${endBlockNumber}` ) // Iterate through the range of blocks, looking into the max number of transactions that are from audius for (let i = endBlockNumber; i > startBlockNumber; i--) { // If the max number of transactions have been evaluated, break out if (txCounter > maxTransactions) break let block = await web3.eth.getBlock(i, true) if (!block) { req.logger.error(`Could not find block for health_check/relay ${i}`) continue } if (!minBlockTime || block.timestamp < minBlockTime) minBlockTime = block.timestamp if (!maxBlockTime || block.timestamp > maxBlockTime) maxBlockTime = block.timestamp if (block.transactions.length) { for (const tx of block.transactions) { // If transaction is from audius account, determine success or fail status if (RELAY_HEALTH_ACCOUNTS.has(tx.from)) { const txHash = tx.hash const resp = await web3.eth.getTransactionReceipt(txHash) txCounter++ // tx failed if (!resp.status) { const senderAddress = await redis.hget('txHashToSenderAddress', txHash) if (senderAddress) { if (!failureTxs[senderAddress]) failureTxs[senderAddress] = [txHash] else failureTxs[senderAddress].push(txHash) } else { failureTxs['unknown'] = (failureTxs['unknown'] || []).concat(txHash) } } } } } } let isError = false // delete old entries from set in redis const epochOneHourAgo = Math.floor(Date.now() / 1000) - 3600 await redis.zremrangebyscore('relayTxAttempts', '-inf', epochOneHourAgo) await redis.zremrangebyscore('relayTxFailures', '-inf', epochOneHourAgo) await redis.zremrangebyscore('relayTxSuccesses', '-inf', epochOneHourAgo) // check if there have been any attempts in the time window that we processed the block health check const attemptedTxsInRedis = await redis.zrangebyscore('relayTxAttempts', minBlockTime, maxBlockTime) const successfulTxsInRedis = await redis.zrangebyscore('relayTxSuccesses', minBlockTime, maxBlockTime) const failureTxsInRedis = await redis.zrangebyscore('relayTxFailures', minBlockTime, maxBlockTime) if (txCounter < minTransactions) isError = true const serverResponse = { blockchain: { numberOfTransactions: txCounter, minTransactions, numberOfFailedTransactions: flatten(Object.values(failureTxs)).length, failedTransactionHashes: failureTxs, startBlock: startBlockNumber, endBlock: endBlockNumber }, redis: { attemptedTxsCount: attemptedTxsInRedis.length, successfulTxsCount: successfulTxsInRedis.length, failureTxsCount: failureTxsInRedis.length }, healthCheckComputeTime: Date.now() - start } if (isVerbose) { serverResponse.redis = { ...serverResponse.redis, attemptedTxsInRedis, successfulTxsInRedis, failureTxsInRedis } } if (isError) return errorResponseServerError(serverResponse) else return successResponse(serverResponse) })) app.get('/health_check', handleResponse(async (req, res) => { // for now we just check db connectivity await sequelize.query('SELECT 1', { type: sequelize.QueryTypes.SELECT }) // get connected discprov via libs const audiusLibsInstance = req.app.get('audiusLibs') return successResponse({ 'healthy': true, 'git': process.env.GIT_SHA, selectedDiscoveryProvider: audiusLibsInstance.discoveryProvider.discoveryProviderEndpoint }) })) app.get('/balance_check', handleResponse(async (req, res) => { let { minimumBalance, minimumRelayerBalance } = req.query minimumBalance = parseFloat(minimumBalance || config.get('minimumBalance')) minimumRelayerBalance = parseFloat(minimumRelayerBalance || config.get('minimumRelayerBalance')) let belowMinimumBalances = [] let balances = [] // run fundRelayerIfEmpty so it'll auto top off any accounts below the threshold try { await fundRelayerIfEmpty() } catch (err) { req.logger.error(`Failed to fund relayer with error: ${err}`) } balances = await Promise.all( [...RELAY_HEALTH_ACCOUNTS].map(async account => { let balance = parseFloat(Web3.utils.fromWei(await getRelayerFunds(account), 'ether')) if (balance < minimumBalance) { belowMinimumBalances.push({ account, balance }) } return { account, balance } }) ) const relayerPublicKey = config.get('relayerPublicKey') const relayerBalance = parseFloat(Web3.utils.fromWei(await getRelayerFunds(relayerPublicKey), 'ether')) const relayerAboveMinimum = relayerBalance >= minimumRelayerBalance // no accounts below minimum balance if (!belowMinimumBalances.length && relayerAboveMinimum) { return successResponse({ 'above_balance_minimum': true, 'minimum_balance': minimumBalance, 'balances': balances, 'relayer': { 'wallet': relayerPublicKey, 'balance': relayerBalance, 'above_balance_minimum': relayerAboveMinimum } }) } else { return errorResponseServerError({ 'above_balance_minimum': false, 'minimum_balance': minimumBalance, 'balances': balances, 'below_minimum_balance': belowMinimumBalances, 'relayer': { 'wallet': relayerPublicKey, 'balance': relayerBalance, 'above_balance_minimum': relayerAboveMinimum } }) } })) app.get('/eth_balance_check', handleResponse(async (req, res) => { let { minimumBalance, minimumFunderBalance } = req.query minimumBalance = parseFloat(minimumBalance || config.get('ethMinimumBalance')) minimumFunderBalance = parseFloat(minimumFunderBalance || config.get('ethMinimumFunderBalance')) let funderAddress = config.get('ethFunderAddress') let funderBalance = parseFloat(Web3.utils.fromWei(await getEthRelayerFunds(funderAddress), 'ether')) let funderAboveMinimum = funderBalance >= minimumFunderBalance let belowMinimumBalances = [] const balances = await Promise.all( [...ETH_RELAY_HEALTH_ACCOUNTS].map(async account => { let balance = parseFloat(Web3.utils.fromWei(await getEthRelayerFunds(account), 'ether')) if (balance < minimumBalance) { belowMinimumBalances.push({ account, balance }) } return { account, balance } }) ) let balanceResponse = { 'minimum_balance': minimumBalance, 'balances': balances, 'funder': { 'wallet': funderAddress, 'balance': funderBalance, 'above_balance_minimum': funderAboveMinimum } } // no accounts below minimum balance if (!belowMinimumBalances.length && funderAboveMinimum) { return successResponse({ 'above_balance_minimum': true, ...balanceResponse }) } else { return errorResponseServerError({ 'above_balance_minimum': false, 'below_minimum_balance': belowMinimumBalances, ...balanceResponse }) } })) app.get('/sol_balance_check', handleResponse(async (req, res) => { const minimumBalance = parseFloat(req.query.minimumBalance || config.get('solMinimumBalance')) const solanaFeePayerWallet = config.get('solanaFeePayerWallet') let solanaFeePayerPublicKey = null let balance = 0 if (solanaFeePayerWallet) { solanaFeePayerPublicKey = (new solanaWeb3.Account(solanaFeePayerWallet)).publicKey balance = await solanaConnection.getBalance(solanaFeePayerPublicKey) } const sol = Math.floor(balance / (10 ** 9)) const lamports = balance % (10 ** 9) if (balance > minimumBalance) { return successResponse({ above_balance_minimum: true, balance: { sol, lamports }, wallet: solanaFeePayerPublicKey ? solanaFeePayerPublicKey.toBase58() : null }) } return errorResponseServerError({ above_balance_minimum: false, balance: { sol, lamports }, wallet: solanaFeePayerPublicKey ? solanaFeePayerPublicKey.toBase58() : null }) })) app.get('/notification_check', handleResponse(async (req, res) => { let { maxBlockDifference, maxDrift } = req.query maxBlockDifference = maxBlockDifference || 100 let highestBlockNumber = await models.NotificationAction.max('blocknumber') if (!highestBlockNumber) { highestBlockNumber = config.get('notificationStartBlock') } let redis = req.app.get('redis') let maxFromRedis = await redis.get('maxBlockNumber') if (maxFromRedis) { highestBlockNumber = parseInt(maxFromRedis) } // Get job success timestamps const notificationJobLastSuccess = await redis.get(NOTIFICATION_JOB_LAST_SUCCESS_KEY) const notificationEmailsJobLastSuccess = await redis.get(NOTIFICATION_EMAILS_JOB_LAST_SUCCESS_KEY) const notificationAnnouncementsJobLastSuccess = await redis.get(NOTIFICATION_ANNOUNCEMENTS_JOB_LAST_SUCCESS_KEY) const { discoveryProvider } = audiusLibsWrapper.getAudiusLibs() let body = (await axios({ method: 'get', url: `${discoveryProvider.discoveryProviderEndpoint}/health_check` })).data let discProvDbHighestBlock = body.data['db']['number']
let notifBlockDiff = discProvDbHighestBlock - highestBlockNumber let resp = { 'discProv': body.data, 'identity': highestBlockNumber, 'notifBlockDiff': notifBlockDiff, notificationJobLastSuccess, notificationEmailsJobLastSuccess, notificationAnnouncementsJobLastSuccess } // Test if last runs were recent enough let withinBounds = true if (maxDrift) { const cutoff = moment().subtract(maxDrift, 'seconds') const isWithinBounds = (key) => key ? moment(key).isAfter(cutoff) : true withinBounds = ( isWithinBounds(notificationJobLastSuccess) && isWithinBounds(notificationEmailsJobLastSuccess) && isWithinBounds(notificationAnnouncementsJobLastSuccess) ) } if (!withinBounds || notifBlockDiff > maxBlockDifference) { return errorResponseServerError(resp) } return successResponse(resp) })) /** * Exposes current and max db connection stats. * Returns error if db connection threshold exceeded, else success. */ app.get('/db_check', handleResponse(async (req, res) => { const verbose = (req.query.verbose === 'true') const maxConnections = config.get('pgConnectionPoolMax') let numConnections = 0 let connectionInfo = null let activeConnections = null let idleConnections = null // Get number of open DB connections const numConnectionsQuery = await sequelize.query("SELECT numbackends from pg_stat_database where datname = 'audius_centralized_service'") if (numConnectionsQuery && numConnectionsQuery[0] && numConnectionsQuery[0][0] && numConnectionsQuery[0][0].numbackends) { numConnections = numConnectionsQuery[0][0].numbackends } // Get detailed connection info const connectionInfoQuery = (await sequelize.query("select wait_event_type, wait_event, state, query from pg_stat_activity where datname = 'audius_centralized_service'")) if (connectionInfoQuery && connectionInfoQuery[0]) { connectionInfo = connectionInfoQuery[0] activeConnections = (connectionInfo.filter(conn => conn.state === 'active')).length idleConnections = (connectionInfo.filter(conn => conn.state === 'idle')).length } const resp = { 'git': process.env.GIT_SHA, connectionStatus: { total: numConnections, active: activeConnections, idle: idleConnections }, maxConnections } if (verbose) { resp.connectionInfo = connectionInfo } return (numConnections >= maxConnections) ? errorResponseServerError(resp) : successResponse(resp) })) }
random_line_split
utils.py
#!/usr/bin/env python import os import sys import csv import numpy as np import theano import lasagne from sklearn.metrics import f1_score from multiprocessing import Pool theano.config.exception_verbosity = 'high' floatX = theano.config.floatX epsilon = np.float32(1e-6) one = np.float32(1) pf = np.float32(0.5) # IO ver = sys.version_info if ver >= (3, 0): import pickle as pk opts_write = {'encoding': 'utf-8', 'newline': ''} opts_read = {'encoding': 'utf-8'} else: import cPickle as pk opts_write = {} opts_read = {} # IO def read_lines(file_path): with open(file_path, 'r') as opdrf: data = [term.strip() for term in opdrf.readlines()] return data def write_lines(file_path, data_list): with open(file_path, 'w', **opts_write) as opdwf: opdwf.writelines([str(term)+'\n' for term in data_list]) def read_tsv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf, delimiter='\t') data = [term for term in csv_reader] return data def read_csv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf) data = [term for term in csv_reader] return data def pickle(file_path, obj, protocol=2): """ For python 3 compatibility, use protocol 2 """ if not file_path.endswith('.pkl'): file_path += '.pkl' with open(file_path, 'wb') as opdwf: pk.dump(obj, opdwf, protocol=protocol) def unpickle(file_path): with open(file_path, 'rb') as opdrf: data = pk.load(opdrf) return data # Load data def load_data_multiscale(data_dir, scale_list): X_tr_list = list() y_tr_list = list() X_te_list = list() y_te_list = list() X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_tr_fp = os.path.join(data_dir, scale, 'feat.tr.npy') target_tr_fp = os.path.join(data_dir, scale, 'target.tr.npy') feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_tr = np.load(feat_tr_fp) y_tr = np.load(target_tr_fp) X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_tr_list.append(X_tr) y_tr_list.append(y_tr) X_te_list.append(X_te) y_te_list.append(y_te) X_va_list.append(X_va) y_va_list.append(y_va) y_tr = y_tr_list[0] y_va = y_va_list[0] y_te = y_te_list[0] return X_tr_list, y_tr, X_va_list, y_va, X_te_list, y_te def load_data_multiscale_te(data_dir, scale_list): X_te_list = list() y_te_list = list() for ii, scale in enumerate(scale_list): feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_te_list.append(X_te) y_te_list.append(y_te) y_te = y_te_list[0] return X_te_list, y_te def load_data_multiscale_va(data_dir, scale_list): X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) # append X_va_list.append(X_va) y_va_list.append(y_va) y_va = y_va_list[0] return X_va_list, y_va # Recursively convert string to int in a list def to_int(data_list): return [to_int(term) if type(term) == list else int(term) for term in data_list] # Iterate inputs def iterate_minibatches_multiscale(inputs_list, targets, batchsize, shuffle=False): if type(targets) == np.ndarray: n = len(targets) k = targets.shape[-1] for inputs in inputs_list: assert len(inputs) == n if shuffle:
for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list], \ targets[excerpt].reshape((-1, k)) def iterate_minibatches_multiscale_feat(inputs_list, batchsize, shuffle=False): n = len(inputs_list[0]) for inputs in inputs_list: assert len(inputs) == n if shuffle: indices = np.arange(n) np.random.shuffle(indices) for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list] # Functions used in train for recording and printing def check_best_loss(best_val_loss, val_loss): if val_loss < best_val_loss: best_val_loss = val_loss best_val_updated = True else: best_val_updated = False return best_val_loss, best_val_updated def print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss): print("Epoch {} of {}.".format(epoch, n_epochs)) print(" training loss: {:.6f}".format(mean_tr_loss)) print(" validation loss: {:.6f}".format(mean_va_loss)) print(" best va (epoch, loss):({}, {:.6f})".format( best_va_epoch, best_va_loss )) print(" ") # Multiple input sources def train_multiscale( X_tr_list, y_tr, X_va_list, y_va, network, train_func, va_func, n_epochs, batch_size, lr_var, param_fp=None): print("Starting training...") best_va_epoch = 0 best_va_loss = np.inf for epoch in range(1, n_epochs+1): train_loss = 0 train_batches = 0 # Training for batch_ in iterate_minibatches_multiscale(X_tr_list, y_tr, batch_size, shuffle=True): inputs_list, targets = batch_ temp = inputs_list+[targets] train_loss_one = train_func(*temp) train_loss += train_loss_one train_batches += 1 mean_tr_loss = train_loss/train_batches # Validation pre_list, mean_va_loss = validate_multiscale(X_va_list, y_va, va_func) # Check best loss best_va_loss, best_va_updated = check_best_loss( best_va_loss, mean_va_loss) if best_va_updated: best_va_epoch = epoch if param_fp is not None: save_model(param_fp, network) # Print the results for this epoch: print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss) def validate_multiscale(X_list, y, val_func): val_loss = 0 val_batches = 0 pre_list = [] for batch in iterate_minibatches_multiscale(X_list, y, 1, shuffle=False): inputs_list, targets = batch temp = inputs_list+[targets] pre, loss = val_func(*temp) val_loss += loss val_batches += 1 pre_list.append(pre) mean_val_loss = val_loss / val_batches return pre_list, mean_val_loss def predict_multiscale(X_list, pr_func): pre_list = [] for inputs_list in iterate_minibatches_multiscale_feat( X_list, 1, shuffle=False): pre = pr_func(*inputs_list) pre_list.append(pre) return pre_list # Save/load def save_model(fp, network): np.savez(fp, *lasagne.layers.get_all_param_values(network)) def load_model(fp, network): with np.load(fp) as f: param_values = [f['arr_%d' % i] for i in range(len(f.files))] lasagne.layers.set_all_param_values(network, param_values) # Get thresholds def f1_one(y_target, y_predicted): ''' y_target, y_predicted: 1D binary array ''' return f1_score(y_target, y_predicted, average='binary') def f1(Y_target, Y_predicted): ''' Y_target, Y_predicted: n x k 2D binary array, where n is the number of data and k is the number of tags ''' scores = [f1_one(y_target, y_predicted) for y_target, y_predicted in zip(Y_target.T, Y_predicted.T)] scores = np.array(scores) return scores def get_measure(arg): threshold, prediction, target, step_size, lower_b, measure_func = arg pred_binary = ((prediction-threshold) > 0).astype(int) measures = measure_func(target, pred_binary) return measures def get_thresholds(pred, target, search_range, step_size, measure_func=f1, n_processes=20): ''' pred: np.array prediction from a model n x k 2D array, where n is the number of data and k is the number of tags target: np.array groundtruth n x k 2D binary array, where n is the number of data and k is the number of tags search_range: tuple the range for searching the thresholds (a, b), where a is the lower bound and b is the upper bound step_size: float searching the threholds in (a, a+step_size, a+2step_size, ..., ...) measure_func: function or str function defined in the begining of this fild ''' lower_b, upper_b = search_range assert(upper_b > lower_b) if measure_func == 'f1': measure_func = f1 n_tags = target.shape[1] diff = upper_b-lower_b n_steps = int(np.floor(diff/step_size)) threshold_list = [lower_b+ii*step_size for ii in range(n_steps+1)] arg_list = [] for th in threshold_list: arg_list.append( (th, pred, target, step_size, lower_b, measure_func)) pool = Pool(processes=n_processes) all_measures = np.array(pool.map(get_measure, arg_list)) pool.close() # print(all_measures.shape) best_idx_list = np.argmax(all_measures, axis=0) best_thresholds = lower_b+best_idx_list*step_size best_measures = all_measures[best_idx_list, [ii for ii in range(n_tags)]] # print(n_tags, len(best_idx_list)) return best_thresholds, best_measures # Upscale array def shift(array_list, shift_size, axis): n_axes = len(array_list[0].shape) obj = [slice(None, None, None) for ii in range(n_axes)] obj[axis] = slice(shift_size, None, 1) obj = tuple(obj) pad_width = [(0, 0) for ii in range(n_axes)] pad_width[axis] = (0, shift_size) out_array_list = [np.pad(array[obj], pad_width, 'constant') for array in array_list] return out_array_list def upscale(func, input_list, method='naive', scale_factor=1, in_axis=2, out_axis=2): ''' array: numpy.array method: str 'naive' or 'patching' scale_factor: int ''' assert(method in ['naive', 'patching']) if method == 'naive': array = func(*input_list)[0] new_array = np.repeat(array, scale_factor, axis=out_axis) elif method == 'patching': output_list = [func(*shift(input_list, ii, axis=in_axis))[0] for ii in range(scale_factor)] output = np.stack(output_list, axis=out_axis+1) new_shape = list(output_list[0].shape) new_shape[out_axis] = -1 new_shape = tuple(new_shape) new_array = np.reshape(output, new_shape) return new_array # Process tag list def get_test_tag_indices(tag_tr_fp, tag_te_fp, tag_conv_fp): tag_te_list = read_lines(tag_te_fp) tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list def get_test_tag_50(tag_tr_fp, tag_te_fp): tag_te_list = read_lines(tag_te_fp) #tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag) for tag in tag_te_list] #tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list
indices = np.arange(n) np.random.shuffle(indices)
conditional_block
utils.py
#!/usr/bin/env python import os import sys import csv import numpy as np import theano import lasagne from sklearn.metrics import f1_score from multiprocessing import Pool theano.config.exception_verbosity = 'high' floatX = theano.config.floatX epsilon = np.float32(1e-6) one = np.float32(1) pf = np.float32(0.5) # IO ver = sys.version_info if ver >= (3, 0): import pickle as pk opts_write = {'encoding': 'utf-8', 'newline': ''} opts_read = {'encoding': 'utf-8'} else: import cPickle as pk opts_write = {} opts_read = {} # IO def read_lines(file_path): with open(file_path, 'r') as opdrf: data = [term.strip() for term in opdrf.readlines()] return data def write_lines(file_path, data_list): with open(file_path, 'w', **opts_write) as opdwf: opdwf.writelines([str(term)+'\n' for term in data_list]) def read_tsv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf, delimiter='\t') data = [term for term in csv_reader] return data def read_csv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf) data = [term for term in csv_reader] return data def pickle(file_path, obj, protocol=2): """ For python 3 compatibility, use protocol 2 """ if not file_path.endswith('.pkl'): file_path += '.pkl' with open(file_path, 'wb') as opdwf: pk.dump(obj, opdwf, protocol=protocol) def unpickle(file_path): with open(file_path, 'rb') as opdrf: data = pk.load(opdrf) return data # Load data def load_data_multiscale(data_dir, scale_list): X_tr_list = list() y_tr_list = list() X_te_list = list() y_te_list = list() X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_tr_fp = os.path.join(data_dir, scale, 'feat.tr.npy') target_tr_fp = os.path.join(data_dir, scale, 'target.tr.npy') feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_tr = np.load(feat_tr_fp) y_tr = np.load(target_tr_fp) X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_tr_list.append(X_tr) y_tr_list.append(y_tr) X_te_list.append(X_te) y_te_list.append(y_te) X_va_list.append(X_va) y_va_list.append(y_va) y_tr = y_tr_list[0] y_va = y_va_list[0] y_te = y_te_list[0] return X_tr_list, y_tr, X_va_list, y_va, X_te_list, y_te def load_data_multiscale_te(data_dir, scale_list): X_te_list = list() y_te_list = list() for ii, scale in enumerate(scale_list): feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_te_list.append(X_te) y_te_list.append(y_te) y_te = y_te_list[0] return X_te_list, y_te def load_data_multiscale_va(data_dir, scale_list): X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) # append X_va_list.append(X_va) y_va_list.append(y_va) y_va = y_va_list[0] return X_va_list, y_va # Recursively convert string to int in a list def to_int(data_list): return [to_int(term) if type(term) == list else int(term) for term in data_list] # Iterate inputs def iterate_minibatches_multiscale(inputs_list, targets, batchsize, shuffle=False): if type(targets) == np.ndarray: n = len(targets) k = targets.shape[-1] for inputs in inputs_list: assert len(inputs) == n if shuffle: indices = np.arange(n) np.random.shuffle(indices) for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list], \ targets[excerpt].reshape((-1, k)) def iterate_minibatches_multiscale_feat(inputs_list, batchsize, shuffle=False): n = len(inputs_list[0]) for inputs in inputs_list: assert len(inputs) == n if shuffle: indices = np.arange(n) np.random.shuffle(indices) for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list] # Functions used in train for recording and printing def check_best_loss(best_val_loss, val_loss): if val_loss < best_val_loss: best_val_loss = val_loss best_val_updated = True else: best_val_updated = False return best_val_loss, best_val_updated def print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss): print("Epoch {} of {}.".format(epoch, n_epochs)) print(" training loss: {:.6f}".format(mean_tr_loss)) print(" validation loss: {:.6f}".format(mean_va_loss)) print(" best va (epoch, loss):({}, {:.6f})".format( best_va_epoch, best_va_loss )) print(" ") # Multiple input sources def train_multiscale( X_tr_list, y_tr, X_va_list, y_va, network, train_func, va_func, n_epochs, batch_size, lr_var, param_fp=None): print("Starting training...") best_va_epoch = 0 best_va_loss = np.inf for epoch in range(1, n_epochs+1): train_loss = 0 train_batches = 0 # Training for batch_ in iterate_minibatches_multiscale(X_tr_list, y_tr, batch_size, shuffle=True): inputs_list, targets = batch_ temp = inputs_list+[targets] train_loss_one = train_func(*temp) train_loss += train_loss_one train_batches += 1 mean_tr_loss = train_loss/train_batches # Validation pre_list, mean_va_loss = validate_multiscale(X_va_list, y_va, va_func) # Check best loss best_va_loss, best_va_updated = check_best_loss( best_va_loss, mean_va_loss) if best_va_updated: best_va_epoch = epoch if param_fp is not None: save_model(param_fp, network) # Print the results for this epoch: print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss) def validate_multiscale(X_list, y, val_func): val_loss = 0 val_batches = 0 pre_list = [] for batch in iterate_minibatches_multiscale(X_list, y, 1, shuffle=False): inputs_list, targets = batch temp = inputs_list+[targets] pre, loss = val_func(*temp) val_loss += loss val_batches += 1 pre_list.append(pre) mean_val_loss = val_loss / val_batches return pre_list, mean_val_loss def predict_multiscale(X_list, pr_func): pre_list = [] for inputs_list in iterate_minibatches_multiscale_feat( X_list, 1, shuffle=False): pre = pr_func(*inputs_list) pre_list.append(pre) return pre_list # Save/load def save_model(fp, network): np.savez(fp, *lasagne.layers.get_all_param_values(network)) def load_model(fp, network): with np.load(fp) as f: param_values = [f['arr_%d' % i] for i in range(len(f.files))] lasagne.layers.set_all_param_values(network, param_values) # Get thresholds def f1_one(y_target, y_predicted): ''' y_target, y_predicted: 1D binary array ''' return f1_score(y_target, y_predicted, average='binary') def f1(Y_target, Y_predicted): ''' Y_target, Y_predicted: n x k 2D binary array, where n is the number of data and k is the number of tags ''' scores = [f1_one(y_target, y_predicted) for y_target, y_predicted in zip(Y_target.T, Y_predicted.T)] scores = np.array(scores) return scores def get_measure(arg): threshold, prediction, target, step_size, lower_b, measure_func = arg
measures = measure_func(target, pred_binary) return measures def get_thresholds(pred, target, search_range, step_size, measure_func=f1, n_processes=20): ''' pred: np.array prediction from a model n x k 2D array, where n is the number of data and k is the number of tags target: np.array groundtruth n x k 2D binary array, where n is the number of data and k is the number of tags search_range: tuple the range for searching the thresholds (a, b), where a is the lower bound and b is the upper bound step_size: float searching the threholds in (a, a+step_size, a+2step_size, ..., ...) measure_func: function or str function defined in the begining of this fild ''' lower_b, upper_b = search_range assert(upper_b > lower_b) if measure_func == 'f1': measure_func = f1 n_tags = target.shape[1] diff = upper_b-lower_b n_steps = int(np.floor(diff/step_size)) threshold_list = [lower_b+ii*step_size for ii in range(n_steps+1)] arg_list = [] for th in threshold_list: arg_list.append( (th, pred, target, step_size, lower_b, measure_func)) pool = Pool(processes=n_processes) all_measures = np.array(pool.map(get_measure, arg_list)) pool.close() # print(all_measures.shape) best_idx_list = np.argmax(all_measures, axis=0) best_thresholds = lower_b+best_idx_list*step_size best_measures = all_measures[best_idx_list, [ii for ii in range(n_tags)]] # print(n_tags, len(best_idx_list)) return best_thresholds, best_measures # Upscale array def shift(array_list, shift_size, axis): n_axes = len(array_list[0].shape) obj = [slice(None, None, None) for ii in range(n_axes)] obj[axis] = slice(shift_size, None, 1) obj = tuple(obj) pad_width = [(0, 0) for ii in range(n_axes)] pad_width[axis] = (0, shift_size) out_array_list = [np.pad(array[obj], pad_width, 'constant') for array in array_list] return out_array_list def upscale(func, input_list, method='naive', scale_factor=1, in_axis=2, out_axis=2): ''' array: numpy.array method: str 'naive' or 'patching' scale_factor: int ''' assert(method in ['naive', 'patching']) if method == 'naive': array = func(*input_list)[0] new_array = np.repeat(array, scale_factor, axis=out_axis) elif method == 'patching': output_list = [func(*shift(input_list, ii, axis=in_axis))[0] for ii in range(scale_factor)] output = np.stack(output_list, axis=out_axis+1) new_shape = list(output_list[0].shape) new_shape[out_axis] = -1 new_shape = tuple(new_shape) new_array = np.reshape(output, new_shape) return new_array # Process tag list def get_test_tag_indices(tag_tr_fp, tag_te_fp, tag_conv_fp): tag_te_list = read_lines(tag_te_fp) tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list def get_test_tag_50(tag_tr_fp, tag_te_fp): tag_te_list = read_lines(tag_te_fp) #tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag) for tag in tag_te_list] #tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list
pred_binary = ((prediction-threshold) > 0).astype(int)
random_line_split
utils.py
#!/usr/bin/env python import os import sys import csv import numpy as np import theano import lasagne from sklearn.metrics import f1_score from multiprocessing import Pool theano.config.exception_verbosity = 'high' floatX = theano.config.floatX epsilon = np.float32(1e-6) one = np.float32(1) pf = np.float32(0.5) # IO ver = sys.version_info if ver >= (3, 0): import pickle as pk opts_write = {'encoding': 'utf-8', 'newline': ''} opts_read = {'encoding': 'utf-8'} else: import cPickle as pk opts_write = {} opts_read = {} # IO def read_lines(file_path): with open(file_path, 'r') as opdrf: data = [term.strip() for term in opdrf.readlines()] return data def write_lines(file_path, data_list): with open(file_path, 'w', **opts_write) as opdwf: opdwf.writelines([str(term)+'\n' for term in data_list]) def read_tsv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf, delimiter='\t') data = [term for term in csv_reader] return data def read_csv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf) data = [term for term in csv_reader] return data def pickle(file_path, obj, protocol=2): """ For python 3 compatibility, use protocol 2 """ if not file_path.endswith('.pkl'): file_path += '.pkl' with open(file_path, 'wb') as opdwf: pk.dump(obj, opdwf, protocol=protocol) def unpickle(file_path): with open(file_path, 'rb') as opdrf: data = pk.load(opdrf) return data # Load data def load_data_multiscale(data_dir, scale_list): X_tr_list = list() y_tr_list = list() X_te_list = list() y_te_list = list() X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_tr_fp = os.path.join(data_dir, scale, 'feat.tr.npy') target_tr_fp = os.path.join(data_dir, scale, 'target.tr.npy') feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_tr = np.load(feat_tr_fp) y_tr = np.load(target_tr_fp) X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_tr_list.append(X_tr) y_tr_list.append(y_tr) X_te_list.append(X_te) y_te_list.append(y_te) X_va_list.append(X_va) y_va_list.append(y_va) y_tr = y_tr_list[0] y_va = y_va_list[0] y_te = y_te_list[0] return X_tr_list, y_tr, X_va_list, y_va, X_te_list, y_te def load_data_multiscale_te(data_dir, scale_list): X_te_list = list() y_te_list = list() for ii, scale in enumerate(scale_list): feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_te_list.append(X_te) y_te_list.append(y_te) y_te = y_te_list[0] return X_te_list, y_te def load_data_multiscale_va(data_dir, scale_list): X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) # append X_va_list.append(X_va) y_va_list.append(y_va) y_va = y_va_list[0] return X_va_list, y_va # Recursively convert string to int in a list def to_int(data_list): return [to_int(term) if type(term) == list else int(term) for term in data_list] # Iterate inputs def iterate_minibatches_multiscale(inputs_list, targets, batchsize, shuffle=False): if type(targets) == np.ndarray: n = len(targets) k = targets.shape[-1] for inputs in inputs_list: assert len(inputs) == n if shuffle: indices = np.arange(n) np.random.shuffle(indices) for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list], \ targets[excerpt].reshape((-1, k)) def iterate_minibatches_multiscale_feat(inputs_list, batchsize, shuffle=False): n = len(inputs_list[0]) for inputs in inputs_list: assert len(inputs) == n if shuffle: indices = np.arange(n) np.random.shuffle(indices) for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list] # Functions used in train for recording and printing def check_best_loss(best_val_loss, val_loss): if val_loss < best_val_loss: best_val_loss = val_loss best_val_updated = True else: best_val_updated = False return best_val_loss, best_val_updated def print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss): print("Epoch {} of {}.".format(epoch, n_epochs)) print(" training loss: {:.6f}".format(mean_tr_loss)) print(" validation loss: {:.6f}".format(mean_va_loss)) print(" best va (epoch, loss):({}, {:.6f})".format( best_va_epoch, best_va_loss )) print(" ") # Multiple input sources def train_multiscale( X_tr_list, y_tr, X_va_list, y_va, network, train_func, va_func, n_epochs, batch_size, lr_var, param_fp=None): print("Starting training...") best_va_epoch = 0 best_va_loss = np.inf for epoch in range(1, n_epochs+1): train_loss = 0 train_batches = 0 # Training for batch_ in iterate_minibatches_multiscale(X_tr_list, y_tr, batch_size, shuffle=True): inputs_list, targets = batch_ temp = inputs_list+[targets] train_loss_one = train_func(*temp) train_loss += train_loss_one train_batches += 1 mean_tr_loss = train_loss/train_batches # Validation pre_list, mean_va_loss = validate_multiscale(X_va_list, y_va, va_func) # Check best loss best_va_loss, best_va_updated = check_best_loss( best_va_loss, mean_va_loss) if best_va_updated: best_va_epoch = epoch if param_fp is not None: save_model(param_fp, network) # Print the results for this epoch: print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss) def validate_multiscale(X_list, y, val_func): val_loss = 0 val_batches = 0 pre_list = [] for batch in iterate_minibatches_multiscale(X_list, y, 1, shuffle=False): inputs_list, targets = batch temp = inputs_list+[targets] pre, loss = val_func(*temp) val_loss += loss val_batches += 1 pre_list.append(pre) mean_val_loss = val_loss / val_batches return pre_list, mean_val_loss def predict_multiscale(X_list, pr_func): pre_list = [] for inputs_list in iterate_minibatches_multiscale_feat( X_list, 1, shuffle=False): pre = pr_func(*inputs_list) pre_list.append(pre) return pre_list # Save/load def save_model(fp, network):
def load_model(fp, network): with np.load(fp) as f: param_values = [f['arr_%d' % i] for i in range(len(f.files))] lasagne.layers.set_all_param_values(network, param_values) # Get thresholds def f1_one(y_target, y_predicted): ''' y_target, y_predicted: 1D binary array ''' return f1_score(y_target, y_predicted, average='binary') def f1(Y_target, Y_predicted): ''' Y_target, Y_predicted: n x k 2D binary array, where n is the number of data and k is the number of tags ''' scores = [f1_one(y_target, y_predicted) for y_target, y_predicted in zip(Y_target.T, Y_predicted.T)] scores = np.array(scores) return scores def get_measure(arg): threshold, prediction, target, step_size, lower_b, measure_func = arg pred_binary = ((prediction-threshold) > 0).astype(int) measures = measure_func(target, pred_binary) return measures def get_thresholds(pred, target, search_range, step_size, measure_func=f1, n_processes=20): ''' pred: np.array prediction from a model n x k 2D array, where n is the number of data and k is the number of tags target: np.array groundtruth n x k 2D binary array, where n is the number of data and k is the number of tags search_range: tuple the range for searching the thresholds (a, b), where a is the lower bound and b is the upper bound step_size: float searching the threholds in (a, a+step_size, a+2step_size, ..., ...) measure_func: function or str function defined in the begining of this fild ''' lower_b, upper_b = search_range assert(upper_b > lower_b) if measure_func == 'f1': measure_func = f1 n_tags = target.shape[1] diff = upper_b-lower_b n_steps = int(np.floor(diff/step_size)) threshold_list = [lower_b+ii*step_size for ii in range(n_steps+1)] arg_list = [] for th in threshold_list: arg_list.append( (th, pred, target, step_size, lower_b, measure_func)) pool = Pool(processes=n_processes) all_measures = np.array(pool.map(get_measure, arg_list)) pool.close() # print(all_measures.shape) best_idx_list = np.argmax(all_measures, axis=0) best_thresholds = lower_b+best_idx_list*step_size best_measures = all_measures[best_idx_list, [ii for ii in range(n_tags)]] # print(n_tags, len(best_idx_list)) return best_thresholds, best_measures # Upscale array def shift(array_list, shift_size, axis): n_axes = len(array_list[0].shape) obj = [slice(None, None, None) for ii in range(n_axes)] obj[axis] = slice(shift_size, None, 1) obj = tuple(obj) pad_width = [(0, 0) for ii in range(n_axes)] pad_width[axis] = (0, shift_size) out_array_list = [np.pad(array[obj], pad_width, 'constant') for array in array_list] return out_array_list def upscale(func, input_list, method='naive', scale_factor=1, in_axis=2, out_axis=2): ''' array: numpy.array method: str 'naive' or 'patching' scale_factor: int ''' assert(method in ['naive', 'patching']) if method == 'naive': array = func(*input_list)[0] new_array = np.repeat(array, scale_factor, axis=out_axis) elif method == 'patching': output_list = [func(*shift(input_list, ii, axis=in_axis))[0] for ii in range(scale_factor)] output = np.stack(output_list, axis=out_axis+1) new_shape = list(output_list[0].shape) new_shape[out_axis] = -1 new_shape = tuple(new_shape) new_array = np.reshape(output, new_shape) return new_array # Process tag list def get_test_tag_indices(tag_tr_fp, tag_te_fp, tag_conv_fp): tag_te_list = read_lines(tag_te_fp) tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list def get_test_tag_50(tag_tr_fp, tag_te_fp): tag_te_list = read_lines(tag_te_fp) #tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag) for tag in tag_te_list] #tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list
np.savez(fp, *lasagne.layers.get_all_param_values(network))
identifier_body
utils.py
#!/usr/bin/env python import os import sys import csv import numpy as np import theano import lasagne from sklearn.metrics import f1_score from multiprocessing import Pool theano.config.exception_verbosity = 'high' floatX = theano.config.floatX epsilon = np.float32(1e-6) one = np.float32(1) pf = np.float32(0.5) # IO ver = sys.version_info if ver >= (3, 0): import pickle as pk opts_write = {'encoding': 'utf-8', 'newline': ''} opts_read = {'encoding': 'utf-8'} else: import cPickle as pk opts_write = {} opts_read = {} # IO def read_lines(file_path): with open(file_path, 'r') as opdrf: data = [term.strip() for term in opdrf.readlines()] return data def write_lines(file_path, data_list): with open(file_path, 'w', **opts_write) as opdwf: opdwf.writelines([str(term)+'\n' for term in data_list]) def read_tsv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf, delimiter='\t') data = [term for term in csv_reader] return data def read_csv(file_path): with open(file_path, 'r', **opts_read) as opdrf: csv_reader = csv.reader(opdrf) data = [term for term in csv_reader] return data def pickle(file_path, obj, protocol=2): """ For python 3 compatibility, use protocol 2 """ if not file_path.endswith('.pkl'): file_path += '.pkl' with open(file_path, 'wb') as opdwf: pk.dump(obj, opdwf, protocol=protocol) def unpickle(file_path): with open(file_path, 'rb') as opdrf: data = pk.load(opdrf) return data # Load data def load_data_multiscale(data_dir, scale_list): X_tr_list = list() y_tr_list = list() X_te_list = list() y_te_list = list() X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_tr_fp = os.path.join(data_dir, scale, 'feat.tr.npy') target_tr_fp = os.path.join(data_dir, scale, 'target.tr.npy') feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_tr = np.load(feat_tr_fp) y_tr = np.load(target_tr_fp) X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_tr_list.append(X_tr) y_tr_list.append(y_tr) X_te_list.append(X_te) y_te_list.append(y_te) X_va_list.append(X_va) y_va_list.append(y_va) y_tr = y_tr_list[0] y_va = y_va_list[0] y_te = y_te_list[0] return X_tr_list, y_tr, X_va_list, y_va, X_te_list, y_te def load_data_multiscale_te(data_dir, scale_list): X_te_list = list() y_te_list = list() for ii, scale in enumerate(scale_list): feat_te_fp = os.path.join(data_dir, scale, 'feat.te.npy') target_te_fp = os.path.join(data_dir, scale, 'target.te.npy') X_te = np.load(feat_te_fp) y_te = np.load(target_te_fp) # append X_te_list.append(X_te) y_te_list.append(y_te) y_te = y_te_list[0] return X_te_list, y_te def load_data_multiscale_va(data_dir, scale_list): X_va_list = list() y_va_list = list() for ii, scale in enumerate(scale_list): feat_va_fp = os.path.join(data_dir, scale, 'feat.va.npy') target_va_fp = os.path.join(data_dir, scale, 'target.va.npy') X_va = np.load(feat_va_fp) y_va = np.load(target_va_fp) # append X_va_list.append(X_va) y_va_list.append(y_va) y_va = y_va_list[0] return X_va_list, y_va # Recursively convert string to int in a list def to_int(data_list): return [to_int(term) if type(term) == list else int(term) for term in data_list] # Iterate inputs def iterate_minibatches_multiscale(inputs_list, targets, batchsize, shuffle=False): if type(targets) == np.ndarray: n = len(targets) k = targets.shape[-1] for inputs in inputs_list: assert len(inputs) == n if shuffle: indices = np.arange(n) np.random.shuffle(indices) for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list], \ targets[excerpt].reshape((-1, k)) def iterate_minibatches_multiscale_feat(inputs_list, batchsize, shuffle=False): n = len(inputs_list[0]) for inputs in inputs_list: assert len(inputs) == n if shuffle: indices = np.arange(n) np.random.shuffle(indices) for start_idx in range(0, n - batchsize + 1, batchsize): if shuffle: excerpt = indices[start_idx:start_idx + batchsize] else: excerpt = slice(start_idx, start_idx + batchsize) yield [inputs[excerpt] for inputs in inputs_list] # Functions used in train for recording and printing def check_best_loss(best_val_loss, val_loss): if val_loss < best_val_loss: best_val_loss = val_loss best_val_updated = True else: best_val_updated = False return best_val_loss, best_val_updated def print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss): print("Epoch {} of {}.".format(epoch, n_epochs)) print(" training loss: {:.6f}".format(mean_tr_loss)) print(" validation loss: {:.6f}".format(mean_va_loss)) print(" best va (epoch, loss):({}, {:.6f})".format( best_va_epoch, best_va_loss )) print(" ") # Multiple input sources def train_multiscale( X_tr_list, y_tr, X_va_list, y_va, network, train_func, va_func, n_epochs, batch_size, lr_var, param_fp=None): print("Starting training...") best_va_epoch = 0 best_va_loss = np.inf for epoch in range(1, n_epochs+1): train_loss = 0 train_batches = 0 # Training for batch_ in iterate_minibatches_multiscale(X_tr_list, y_tr, batch_size, shuffle=True): inputs_list, targets = batch_ temp = inputs_list+[targets] train_loss_one = train_func(*temp) train_loss += train_loss_one train_batches += 1 mean_tr_loss = train_loss/train_batches # Validation pre_list, mean_va_loss = validate_multiscale(X_va_list, y_va, va_func) # Check best loss best_va_loss, best_va_updated = check_best_loss( best_va_loss, mean_va_loss) if best_va_updated: best_va_epoch = epoch if param_fp is not None: save_model(param_fp, network) # Print the results for this epoch: print_in_train(epoch, n_epochs, mean_tr_loss, mean_va_loss, best_va_epoch, best_va_loss) def validate_multiscale(X_list, y, val_func): val_loss = 0 val_batches = 0 pre_list = [] for batch in iterate_minibatches_multiscale(X_list, y, 1, shuffle=False): inputs_list, targets = batch temp = inputs_list+[targets] pre, loss = val_func(*temp) val_loss += loss val_batches += 1 pre_list.append(pre) mean_val_loss = val_loss / val_batches return pre_list, mean_val_loss def predict_multiscale(X_list, pr_func): pre_list = [] for inputs_list in iterate_minibatches_multiscale_feat( X_list, 1, shuffle=False): pre = pr_func(*inputs_list) pre_list.append(pre) return pre_list # Save/load def save_model(fp, network): np.savez(fp, *lasagne.layers.get_all_param_values(network)) def load_model(fp, network): with np.load(fp) as f: param_values = [f['arr_%d' % i] for i in range(len(f.files))] lasagne.layers.set_all_param_values(network, param_values) # Get thresholds def f1_one(y_target, y_predicted): ''' y_target, y_predicted: 1D binary array ''' return f1_score(y_target, y_predicted, average='binary') def f1(Y_target, Y_predicted): ''' Y_target, Y_predicted: n x k 2D binary array, where n is the number of data and k is the number of tags ''' scores = [f1_one(y_target, y_predicted) for y_target, y_predicted in zip(Y_target.T, Y_predicted.T)] scores = np.array(scores) return scores def get_measure(arg): threshold, prediction, target, step_size, lower_b, measure_func = arg pred_binary = ((prediction-threshold) > 0).astype(int) measures = measure_func(target, pred_binary) return measures def get_thresholds(pred, target, search_range, step_size, measure_func=f1, n_processes=20): ''' pred: np.array prediction from a model n x k 2D array, where n is the number of data and k is the number of tags target: np.array groundtruth n x k 2D binary array, where n is the number of data and k is the number of tags search_range: tuple the range for searching the thresholds (a, b), where a is the lower bound and b is the upper bound step_size: float searching the threholds in (a, a+step_size, a+2step_size, ..., ...) measure_func: function or str function defined in the begining of this fild ''' lower_b, upper_b = search_range assert(upper_b > lower_b) if measure_func == 'f1': measure_func = f1 n_tags = target.shape[1] diff = upper_b-lower_b n_steps = int(np.floor(diff/step_size)) threshold_list = [lower_b+ii*step_size for ii in range(n_steps+1)] arg_list = [] for th in threshold_list: arg_list.append( (th, pred, target, step_size, lower_b, measure_func)) pool = Pool(processes=n_processes) all_measures = np.array(pool.map(get_measure, arg_list)) pool.close() # print(all_measures.shape) best_idx_list = np.argmax(all_measures, axis=0) best_thresholds = lower_b+best_idx_list*step_size best_measures = all_measures[best_idx_list, [ii for ii in range(n_tags)]] # print(n_tags, len(best_idx_list)) return best_thresholds, best_measures # Upscale array def
(array_list, shift_size, axis): n_axes = len(array_list[0].shape) obj = [slice(None, None, None) for ii in range(n_axes)] obj[axis] = slice(shift_size, None, 1) obj = tuple(obj) pad_width = [(0, 0) for ii in range(n_axes)] pad_width[axis] = (0, shift_size) out_array_list = [np.pad(array[obj], pad_width, 'constant') for array in array_list] return out_array_list def upscale(func, input_list, method='naive', scale_factor=1, in_axis=2, out_axis=2): ''' array: numpy.array method: str 'naive' or 'patching' scale_factor: int ''' assert(method in ['naive', 'patching']) if method == 'naive': array = func(*input_list)[0] new_array = np.repeat(array, scale_factor, axis=out_axis) elif method == 'patching': output_list = [func(*shift(input_list, ii, axis=in_axis))[0] for ii in range(scale_factor)] output = np.stack(output_list, axis=out_axis+1) new_shape = list(output_list[0].shape) new_shape[out_axis] = -1 new_shape = tuple(new_shape) new_array = np.reshape(output, new_shape) return new_array # Process tag list def get_test_tag_indices(tag_tr_fp, tag_te_fp, tag_conv_fp): tag_te_list = read_lines(tag_te_fp) tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list def get_test_tag_50(tag_tr_fp, tag_te_fp): tag_te_list = read_lines(tag_te_fp) #tag_conv_dict = dict(read_csv(tag_conv_fp)) tag_tr_list = read_lines(tag_tr_fp) tag_idx_list = [tag_tr_list.index(tag) for tag in tag_te_list] #tag_idx_list = [tag_tr_list.index(tag_conv_dict[tag]) for tag in tag_te_list] return tag_idx_list
shift
identifier_name
pl.locales.ts
import { IHomeLocale } from '@app/abstractions'; import { SupportedLocalesEnum } from '@app/enums'; const language = SupportedLocalesEnum.POLISH; export const HomeLocalePolish: IHomeLocale = { metadata: { title: 'Tourney - Esports Discord Bot Obsługiwany przez AI', description: 'Organizuj i prowadź tysiące mobilnych turniejów e-sportowych w dowolnej społeczności gier dzięki naszemu botowi turniejowemu Tourney.', }, heroSection: { heading: 'Poznaj Tourney', content: 'Asystent Esports obsługiwany przez AI. Prowadzi Twój turniej, abyś mógł skoncentrować się na swojej społeczności', appLaunchButtons: [ { logoUrl: 'https://cdn.game.tv/global.png', logoExtraClasses: 'global-logo', link: '/user', name: 'Włącz apke ', extraClasses: 'alternate-background', }, { logoUrl: 'https://cdn.game.tv/discord_logo_white.png', logoExtraClasses: 'discord-logo', link: 'https://www.game.tv/oauthv2/begin?partner=add-discord-bot&redirect_url=%2Ftournament%2Fapi%2Fadd_guild_owner', name: 'Zaproś Tourney', }, ], discoverButtonTitle: 'Dowiedz się więcej', }, whyTourneySection: { heading: 'Dlaczego Tourney?', content: 'Trudno jest zorganizować turniej dla swojej społeczności. Śledzenie, kto gra przeciwko komu, do którego lobby dołączyć, jakie są najbardziej uczciwe pojedynki, dowodzenie, kto wygrał - to dużo. Tourney poprowadzi Twoje turnieje jako asystent lub całkowicie samodzielnie na podstawie Twojej społeczności. Jego potężna sztuczna inteligencja i funkcje mogą pomóc ci zbudować silniejszą i bardziej zabawną społeczność.', }, uHostSection: { heading: 'uHost, uHost AI-Assistant, and AI-Host', content: 'Trzy główne tryby turniejowe dają Ci pełną swobodę w prowadzeniu gier.', hostTypes: [ { heading: 'uHost', imageAlt: 'uHost', content: 'Wybierz z szablonu turnieju lub dostosuj własny. Opublikuj je, a Tourney utworzy wszystkie kanały i DM role, które chcesz.', imageUrl: 'https://cdn.game.tv/images/meet-tourney/uHost.png', }, { heading: 'uHost AI-Assistant', imageAlt: 'uHost AI-Assistant', content: 'Tourney pomaga moderować turniej i zapewnia wskazówki dla każdego gracza pomiędzy meczami i wiele więcej.', imageUrl: 'https://cdn.game.tv/images/meet-tourney/uHost-assistant.png', }, { heading: 'AI-Host', imageAlt: 'aiHost', content: 'Tourney wybierze grę, czas, format i przeprowadzi turniej całkowicie samodzielnie od początku do końca.', imageUrl: 'https://cdn.game.tv/images/meet-tourney/ai-Host.png', }, ], templateSection: { imageUrl: 'https://cdn.game.tv/images/meet-tourney/templates.png', imageAlt: 'Templates', heading: 'Szablony', content: 'Tourney zawiera dziesiątki wstępnie skonfigurowanych szablonów turniejów, dzięki czemu możesz je opublikować i przejść dalej.', }, messagingSection: { imageAlt: 'DMs and Messaging', imageUrl: 'https://cdn.game.tv/images/meet-tourney/dms.png', heading: 'Wiadomości', content: 'Tourney może wysyłać wiadomości dotyczące poszczególnych ról dla gry lub dowolnej roli, którą powiadomisz o nowym turnieju. Cała organizacja odbywa się na nowym kanale, który tworzy Tourney, a wszystkie zaproszenia do lobby i kojarzenie są wysyłane za pośrednictwem PW.', }, }, perksSection: { heading: 'Zalety',
imageUrl: 'https://cdn.game.tv/images/meet-tourney/perk-tournaments.png', imageAlt: 'Nagradzane Poziomy Turniejowe', }, { content: 'Streamujesz swoje turnieje? Idealnie, mamy dla Ciebie przygotoway plugin OBS.', imageUrl: 'https://cdn.game.tv/images/meet-tourney/perk-obs.png', imageAlt: 'Wewnętrzny plugin OBS dla streamowania', }, { content: 'Chcesz uruchomić ligę turniejową na swoim serwerze? Bingo, my też to mamy!', imageUrl: 'https://cdn.game.tv/images/meet-tourney/perk-league.png', imageAlt: 'Organizuj Ligi!', }, ], }, graphsSection: { heading: 'Role', content: 'Im więcej turniejów prowadzisz z Tourney, tym więcej korzyści odblokujesz. Twoja rola pojawia się w tabeli liderów społeczności Game.tv Discord, a każdy zestaw profitów otrzymasz po przejściu do następnej roli.', graphContent: { previousTitle: 'Poprzedni', nextTitle: 'Następny', perksTitle: 'Profity', forTitle: 'DLA', graphList: [ { type: 'bronze', imageCaption: 'Brąz', imageAlt: 'Bronze', imageUrl: 'https://cdn.game.tv/images/meet-tourney/tier-bronze.png', forDuration: 'Od 1 do 6 Turniejów tygodniowo', perks: [ 'Unikalne Emotikony Tourney', 'Profile i odznaki', 'Odblokuj AI Tourney', ], }, { type: 'silver', imageCaption: 'Srebro', imageAlt: 'Silver', imageUrl: 'https://cdn.game.tv/images/meet-tourney/tier-silver.png', forDuration: '7 Turniejów tygodniowo', perks: [ '2 Nitro boosty dla twojego serwera Discord', 'Odblokuj ligi', ], }, { type: 'gold', imageCaption: 'Złoto', imageAlt: 'Gold', imageUrl: 'https://cdn.game.tv/images/meet-tourney/tier-gold.png', forDuration: 'Poprowadź ligę z 300 lub więcej unikalnych uczestników/sezonów ', perks: ['Gwarantowany sponsoring nagród ligowych'], }, ], }, }, tourneyEmotesSection: { heading: 'Emotikony Tourney', content: 'Odblokowany w brązie otrzymujesz 42 niesamowitych emotikonów w wysokiej rozdzielczości, które możesz wykorzystać w swojej społeczności. ', }, profilesSection: { items: [ { imageUrl: 'https://cdn.game.tv/images/meet-tourney/profiles.png', heading: 'Profile', imageAlt: 'Profiles', content: 'Odblokowane w brązie, gracze w twoich turniejach automatycznie otrzymują profile, które mogą dostosować. Każdy profil pokazuje rozegrane gry, rekord wygranych / przegranych oraz ocenę gracza. Oceny graczy pomagają w dobieraniu graczy i awansowaniu ligi.', }, { imageUrl: 'https://cdn.game.tv/images/meet-tourney/badges.png', heading: 'Odznaki', imageAlt: 'Badges', content: 'Wygrywaj gry i zdobywaj odznaki, aby pochwalić się swoimi umiejętnościami. Wraz ze wzrostem ELO lub wygranymi w turniejach i nagrodach zdobywasz ekskluzywne odznaki w swoim profilu, które czasami zawierają super tajne i ekskluzywne dodatkowe korzyści.', reverse: true, }, ], }, tourneyAiSection: { heading: 'AI Tourney', content1: 'Po trafieniu w Brąz odblokujesz AI-Host. Włączenie AI-Host powoduje, że Tourney może automatycznie uruchamiać turnieje w Twojej społeczności. Pamiętaj, że tylko turnieje uHost liczą się do twojego postępu. Turnieje AI-Host nie liczą się (ale są świetne i może je prowadzić wraz z uHost).', content2: '', }, leaguesSection: { items: [ { imageUrl: 'https://cdn.game.tv/images/meet-tourney/league.png', heading: 'Ligi', imageAlt: 'Leagues', content: 'Odblokowane złotem, Tourney może prowadzić całą ligę na twoim serwerze. Gracze na twoim serwerze będą mieli okazję konkurować na równoległym systemie poziomów i zdobyć miesięczny sponsoring, jeśli znajdą się w TOP-8 krajowych rankingów.', }, ], }, getBoostedSection: { heading: 'Zdobądź premię', content: 'Zdobądź srebro, a my damy Twojemu serwerowi Discord nie jeden, ale dwa doładowania, które zapewnią ci te słodkie przywileje poziomu 1. Tak długo, jak co tydzień organizujesz siedem lub więcej turniejów (od poniedziałku do niedzieli), będziemy nadal ulepszać Twój serwer. Jeśli spadniesz poniżej siedmiu turniejów co tydzień, możemy usunąć twoje wzmocnienia, dopóki nie wrócisz do siedmiu lub więcej. Dodatkowo odblokowujesz możliwość tworzenia lig biegowych dla swojej społeczności za pomocą AI-Host', imgDesktopUrl: 'https://cdn.game.tv/images/meet-tourney/get-boosted.png', imgMobile1Url: 'https://cdn.game.tv/images/meet-tourney/get-boosted-mob1.png', imgMobile2Url: 'https://cdn.game.tv/images/meet-tourney/get-boosted-mob2.png', backgroundImgUrl: 'https://cdn.game.tv/images/meet-tourney/get-boosted-bg.png', getBoostedLevels: { perks: { mainIconUrl: 'https://cdn.game.tv/images/meet-tourney/ruby-white.png', heading: 'Atuty poziomu 1', mainlabel: 'Odblokowano', subheading: 'Zwiększone korzyści serwera obejmują -', items: [ { iconUrl: 'https://cdn.game.tv/images/meet-tourney/emoji.png', label: '+50 miejsc na emoji na serwerze (w sumie 100)', }, { iconUrl: 'https://cdn.game.tv/images/meet-tourney/audio.png', label: '128 Kbps jakość audio', }, { iconUrl: 'https://cdn.game.tv/images/meet-tourney/gif-icon.png', label: 'Animowana ikona serwerowa', }, { iconUrl: 'https://cdn.game.tv/images/meet-tourney/server.png', label: 'Własne tło na linku zaproszenia do serwera', }, { iconUrl: 'https://cdn.game.tv/images/meet-tourney/live.png', label: '720p 60fps Jakość Streamowania na żywo', }, ], }, boostedServer: { mainIconUrl: 'https://cdn.game.tv/images/meet-tourney/ruby-gold.png', heading: 'Kwalifikacja do ulepszonego serwera', subheading: 'Co więcej musisz wiedzieć -', items: [ { iconUrl: 'https://cdn.game.tv/images/meet-tourney/right-green.png', label: 'Turnieje muszą odbywać się tylko w trybie uHost lub uHost AI-Assistant. Tylko te wyżej wymienione wliczają się do twojej tygodniowej pracy.', }, { iconUrl: 'https://cdn.game.tv/images/meet-tourney/right-green.png', label: 'Turnieje muszą mieć współczynnik wypełnienia minimum 50%, aby się zakwalifikować. (to znaczy, jeśli a turniej ma szesnaście miejsc, przynajmniej osiem osób musi grać dla Ciebie turniej liczony do średniej tygodniowej.', }, { iconUrl: 'https://cdn.game.tv/images/meet-tourney/right-green.png', label: 'Patrzymy na twoje średnie turnieje tygodniowo. Prowadzenie trzech turniejów jeden dzień i czwarty w drugim są takie same jak codzienne przeprowadzanie jednego turnieju.', }, { iconUrl: 'https://cdn.game.tv/images/meet-tourney/right-green.png', label: 'Wzmocnienia mogą zostać usunięte, jeśli średnia turnieju spadnie poniżej siedmiu w danym momencie Tydzień od poniedziałku do piątku (czasu PST).', }, ], }, }, }, prizeSponsorshipSection: { items: [ { imageUrl: 'https://cdn.game.tv/images/meet-tourney/prize-sponsorships.png', heading: 'Sponsoring nagród', imageAlt: 'Nagrody Ligowe', content: 'Gdy zdobędziesz złoto, prowadząc ligę w swojej społeczności, będziemy sponsorować nagrody ligowe za każdy sezon kalendarzowy-miesiąc. Nagrody będą pochodzić od naszych wspaniałych partnerów, takich jak MSI, Intel i setki twórców gier, i mogą obejmować gotówkę, karty podarunkowe, sprzęt, towary lub przedmioty w grze.', }, ], }, availableGamesSection: { heading: 'Dostępne Gry', content: 'Więcej dodawane tygodniowo', viewMoreGames: 'Zobacz więcej', appLaunchButtons: [ { logoUrl: 'https://cdn.game.tv/discord_logo_white.png', link: 'https://www.game.tv/oauthv2/begin?partner=add-discord-bot&redirect_url=%2Ftournament%2Fapi%2Fadd_guild_owner', name: 'Zaproś Tourney', }, ], }, gamelink: `/${language}/find-tournaments`, };
content: 'Tourney nie byłby kompletny bez mnóstwa dodatków.', perksList: [ { content: 'Prowadzisz mnóstwo turniejów? Świetnie, mamy dla Ciebie system poziomów, który Cię wynagrodzi.',
random_line_split
build_cgd_dataset.py
#!/usr/local/bin/python '''Converts Cornell Grasping Dataset data into TFRecords data format using Example protos. The raw data set resides in png and txt files located in the following structure: dataset/03/pcd0302r.png dataset/03/pcd0302cpos.txt ''' import os import errno import traceback import itertools import six import os import glob import numpy as np import numpy as np import tensorflow as tf import re from scipy.ndimage.filters import median_filter # progress bars https://github.com/tqdm/tqdm # import tqdm without enforcing it as a dependency try: from tqdm import tqdm except ImportError: def tqdm(*args, **kwargs): if args: return args[0] return kwargs.get('iterable', None) from tensorflow.python.platform import flags from tensorflow.python.platform import gfile from tensorflow.python.ops import data_flow_ops from tensorflow.python.keras.utils import get_file from tensorflow.python.keras._impl.keras.utils.data_utils import _hash_file import keras from keras import backend as K flags.DEFINE_string('data_dir', os.path.join(os.path.expanduser("~"), '.keras', 'datasets', 'cornell_grasping'), """Path to dataset in TFRecord format (aka Example protobufs) and feature csv files.""") flags.DEFINE_string('grasp_dataset', 'all', 'TODO(ahundt): integrate with brainrobotdata or allow subsets to be specified') flags.DEFINE_boolean('grasp_download', True, """Download the grasp_dataset to data_dir if it is not already present.""") FLAGS = flags.FLAGS def mkdir_p(path): """Create the specified path on the filesystem like the `mkdir -p` command Creates one or more filesystem directory levels as needed, and does not return an error if the directory already exists. """ # http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def is_sequence(arg): """Returns true if arg is a list or another Python Sequence, and false otherwise. source: https://stackoverflow.com/a/17148334/99379 """ return (not hasattr(arg, "strip") and hasattr(arg, "__getitem__") or hasattr(arg, "__iter__")) class GraspDataset(object): """Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. # Arguments data_dir: Path to dataset in TFRecord format (aka Example protobufs) and feature csv files. `~/.keras/datasets/grasping` by default. dataset: 'all' to load all the data. download: True to actually download the dataset, also see FLAGS. """ def __init__(self, data_dir=None, dataset=None, download=None, verbose=0): if data_dir is None: data_dir = FLAGS.data_dir self.data_dir = data_dir if dataset is None: dataset = FLAGS.grasp_dataset self.dataset = dataset if download is None: download = FLAGS.grasp_download if download: self.download(data_dir, dataset) self.verbose = verbose def download(self, data_dir=None, dataset='all'): '''Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. Includes grasp_listing.txt with all files in all datasets; the feature csv files which specify the dataset size, the features (data channels), and the number of grasps; and the tfrecord files which actually contain all the data. If `grasp_listing_hashed.txt` is present, an additional hashing step will will be completed to verify dataset integrity. `grasp_listing_hashed.txt` will be generated automatically when downloading with `dataset='all'`. # Arguments dataset: The name of the dataset to download, downloads all by default with the '' parameter, 102 will download the 102 feature dataset found in grasp_listing.txt. # Returns list of paths to the downloaded files ''' dataset = self._update_dataset_param(dataset) if data_dir is None: if self.data_dir is None: data_dir = FLAGS.data_dir else: data_dir = self.data_dir mkdir_p(data_dir) print('Downloading datasets to: ', data_dir) url_prefix = '' # If a hashed version of the listing is available, # download the dataset and verify hashes to prevent data corruption. listing_hash = os.path.join(data_dir, 'grasp_listing_hash.txt') if os.path.isfile(listing_hash): files_and_hashes = np.genfromtxt(listing_hash, dtype='str', delimiter=' ') files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, file_hash=hash_str, extract=True) for fpath, hash_str in tqdm(files_and_hashes) if '_' + str(dataset) in fpath] else: # If a hashed version of the listing is not available, # simply download the dataset normally. listing_url = 'https://raw.githubusercontent.com/ahundt/robot-grasp-detection/master/grasp_listing.txt' grasp_listing_path = get_file('grasp_listing.txt', listing_url, cache_subdir=data_dir) grasp_files = np.genfromtxt(grasp_listing_path, dtype=str) files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, extract=True) for fpath in tqdm(grasp_files) if '_' + dataset in fpath] # If all files are downloaded, generate a hashed listing. if dataset is 'all' or dataset is '': print('Hashing all dataset files to prevent corruption...') hashes = [] for i, f in enumerate(tqdm(files)): hashes.append(_hash_file(f)) file_hash_np = np.column_stack([grasp_files, hashes]) with open(listing_hash, 'wb') as hash_file: np.savetxt(hash_file, file_hash_np, fmt='%s', delimiter=' ', header='file_path sha256') print('Hashing complete, {} contains each url plus hash, and will be used to verify the ' 'dataset during future calls to download().'.format(listing_hash)) return files def _update_dataset_param(self, dataset): """Internal function to configure which subset of the datasets is being used. Helps to choose a reasonable default action based on previous user parameters. """ if dataset is None and self.dataset is None: return [] if dataset is 'all': dataset = '' if dataset is None and self.dataset is not None: dataset = self.dataset return dataset class ImageCoder(object): def __init__(self): self._sess = tf.Session() self._decode_png_data = tf.placeholder(dtype=tf.string) self._decode_png = tf.image.decode_png(self._decode_png_data, channels=3) def decode_png(self, image_data): return self._sess.run(self._decode_png, feed_dict={self._decode_png_data: image_data}) def _process_image(filename, coder): # Decode the image with open(filename) as f: image_data = f.read() image = coder.decode_png(image_data) assert len(image.shape) == 3 height = image.shape[0] width = image.shape[1] assert image.shape[2] == 3 return image_data, height, width def _process_bboxes(name): '''Create a list with the coordinates of the grasping rectangles. Every element is either x or y of a vertex.''' with open(name, 'r') as f: bboxes = list(map( lambda coordinate: float(coordinate), f.read().strip().split())) return bboxes def _int64_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(int64_list=tf.train.Int64List(value=v)) def _floats_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(float_list=tf.train.FloatList(value=v)) def _bytes_feature(v):
def _convert_to_example(filename, bboxes, image_buffer, height, width): # Build an Example proto for an example example = tf.train.Example(features=tf.train.Features(feature={ 'image/filename': _bytes_feature(filename), 'image/encoded': _bytes_feature(image_buffer), 'image/height': _int64_feature(height), 'image/width': _int64_feature(width), 'bboxes': _floats_feature(bboxes)})) return example def main(): gd = GraspDataset() if FLAGS.grasp_download: gd.download(dataset=FLAGS.grasp_dataset) train_file = os.path.join(FLAGS.data_dir, 'train-cgd') validation_file = os.path.join(FLAGS.data_dir, 'validation-cgd') print(train_file) print(validation_file) writer_train = tf.python_io.TFRecordWriter(train_file) writer_validation = tf.python_io.TFRecordWriter(validation_file) # Creating a list with all the image paths folders = range(1,11) folders = ['0'+str(i) if i<10 else '10' for i in folders] filenames = [] for i in folders: for name in glob.glob(os.path.join(FLAGS.data_dir, i, 'pcd'+i+'*r.png')): filenames.append(name) # Shuffle the list of image paths np.random.shuffle(filenames) count = 0 valid_img = 0 train_img = 0 coder = ImageCoder() for filename in tqdm(filenames): bbox = filename[:-5]+'cpos.txt' bboxes = _process_bboxes(bbox) image_buffer, height, width = _process_image(filename, coder) example = _convert_to_example(filename, bboxes, image_buffer, height, width) # Split the dataset in 80% for training and 20% for validation if count % 5 == 0: writer_validation.write(example.SerializeToString()) valid_img +=1 else: writer_train.write(example.SerializeToString()) train_img +=1 count +=1 print('Done converting %d images in TFRecords with %d train images and %d validation images' % (count, train_img, valid_img)) writer_train.close() writer_validation.close() if __name__ == '__main__': main()
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[v]))
identifier_body
build_cgd_dataset.py
#!/usr/local/bin/python '''Converts Cornell Grasping Dataset data into TFRecords data format using Example protos. The raw data set resides in png and txt files located in the following structure: dataset/03/pcd0302r.png dataset/03/pcd0302cpos.txt ''' import os import errno import traceback import itertools import six import os import glob import numpy as np import numpy as np import tensorflow as tf import re from scipy.ndimage.filters import median_filter # progress bars https://github.com/tqdm/tqdm # import tqdm without enforcing it as a dependency try: from tqdm import tqdm except ImportError: def tqdm(*args, **kwargs): if args: return args[0] return kwargs.get('iterable', None) from tensorflow.python.platform import flags from tensorflow.python.platform import gfile from tensorflow.python.ops import data_flow_ops from tensorflow.python.keras.utils import get_file from tensorflow.python.keras._impl.keras.utils.data_utils import _hash_file import keras from keras import backend as K flags.DEFINE_string('data_dir', os.path.join(os.path.expanduser("~"), '.keras', 'datasets', 'cornell_grasping'), """Path to dataset in TFRecord format (aka Example protobufs) and feature csv files.""") flags.DEFINE_string('grasp_dataset', 'all', 'TODO(ahundt): integrate with brainrobotdata or allow subsets to be specified') flags.DEFINE_boolean('grasp_download', True, """Download the grasp_dataset to data_dir if it is not already present.""") FLAGS = flags.FLAGS def mkdir_p(path): """Create the specified path on the filesystem like the `mkdir -p` command Creates one or more filesystem directory levels as needed, and does not return an error if the directory already exists. """ # http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def is_sequence(arg): """Returns true if arg is a list or another Python Sequence, and false otherwise. source: https://stackoverflow.com/a/17148334/99379 """ return (not hasattr(arg, "strip") and hasattr(arg, "__getitem__") or hasattr(arg, "__iter__")) class GraspDataset(object): """Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. # Arguments data_dir: Path to dataset in TFRecord format (aka Example protobufs) and feature csv files. `~/.keras/datasets/grasping` by default. dataset: 'all' to load all the data. download: True to actually download the dataset, also see FLAGS. """ def __init__(self, data_dir=None, dataset=None, download=None, verbose=0): if data_dir is None: data_dir = FLAGS.data_dir self.data_dir = data_dir if dataset is None: dataset = FLAGS.grasp_dataset self.dataset = dataset if download is None: download = FLAGS.grasp_download if download: self.download(data_dir, dataset) self.verbose = verbose def
(self, data_dir=None, dataset='all'): '''Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. Includes grasp_listing.txt with all files in all datasets; the feature csv files which specify the dataset size, the features (data channels), and the number of grasps; and the tfrecord files which actually contain all the data. If `grasp_listing_hashed.txt` is present, an additional hashing step will will be completed to verify dataset integrity. `grasp_listing_hashed.txt` will be generated automatically when downloading with `dataset='all'`. # Arguments dataset: The name of the dataset to download, downloads all by default with the '' parameter, 102 will download the 102 feature dataset found in grasp_listing.txt. # Returns list of paths to the downloaded files ''' dataset = self._update_dataset_param(dataset) if data_dir is None: if self.data_dir is None: data_dir = FLAGS.data_dir else: data_dir = self.data_dir mkdir_p(data_dir) print('Downloading datasets to: ', data_dir) url_prefix = '' # If a hashed version of the listing is available, # download the dataset and verify hashes to prevent data corruption. listing_hash = os.path.join(data_dir, 'grasp_listing_hash.txt') if os.path.isfile(listing_hash): files_and_hashes = np.genfromtxt(listing_hash, dtype='str', delimiter=' ') files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, file_hash=hash_str, extract=True) for fpath, hash_str in tqdm(files_and_hashes) if '_' + str(dataset) in fpath] else: # If a hashed version of the listing is not available, # simply download the dataset normally. listing_url = 'https://raw.githubusercontent.com/ahundt/robot-grasp-detection/master/grasp_listing.txt' grasp_listing_path = get_file('grasp_listing.txt', listing_url, cache_subdir=data_dir) grasp_files = np.genfromtxt(grasp_listing_path, dtype=str) files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, extract=True) for fpath in tqdm(grasp_files) if '_' + dataset in fpath] # If all files are downloaded, generate a hashed listing. if dataset is 'all' or dataset is '': print('Hashing all dataset files to prevent corruption...') hashes = [] for i, f in enumerate(tqdm(files)): hashes.append(_hash_file(f)) file_hash_np = np.column_stack([grasp_files, hashes]) with open(listing_hash, 'wb') as hash_file: np.savetxt(hash_file, file_hash_np, fmt='%s', delimiter=' ', header='file_path sha256') print('Hashing complete, {} contains each url plus hash, and will be used to verify the ' 'dataset during future calls to download().'.format(listing_hash)) return files def _update_dataset_param(self, dataset): """Internal function to configure which subset of the datasets is being used. Helps to choose a reasonable default action based on previous user parameters. """ if dataset is None and self.dataset is None: return [] if dataset is 'all': dataset = '' if dataset is None and self.dataset is not None: dataset = self.dataset return dataset class ImageCoder(object): def __init__(self): self._sess = tf.Session() self._decode_png_data = tf.placeholder(dtype=tf.string) self._decode_png = tf.image.decode_png(self._decode_png_data, channels=3) def decode_png(self, image_data): return self._sess.run(self._decode_png, feed_dict={self._decode_png_data: image_data}) def _process_image(filename, coder): # Decode the image with open(filename) as f: image_data = f.read() image = coder.decode_png(image_data) assert len(image.shape) == 3 height = image.shape[0] width = image.shape[1] assert image.shape[2] == 3 return image_data, height, width def _process_bboxes(name): '''Create a list with the coordinates of the grasping rectangles. Every element is either x or y of a vertex.''' with open(name, 'r') as f: bboxes = list(map( lambda coordinate: float(coordinate), f.read().strip().split())) return bboxes def _int64_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(int64_list=tf.train.Int64List(value=v)) def _floats_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(float_list=tf.train.FloatList(value=v)) def _bytes_feature(v): return tf.train.Feature(bytes_list=tf.train.BytesList(value=[v])) def _convert_to_example(filename, bboxes, image_buffer, height, width): # Build an Example proto for an example example = tf.train.Example(features=tf.train.Features(feature={ 'image/filename': _bytes_feature(filename), 'image/encoded': _bytes_feature(image_buffer), 'image/height': _int64_feature(height), 'image/width': _int64_feature(width), 'bboxes': _floats_feature(bboxes)})) return example def main(): gd = GraspDataset() if FLAGS.grasp_download: gd.download(dataset=FLAGS.grasp_dataset) train_file = os.path.join(FLAGS.data_dir, 'train-cgd') validation_file = os.path.join(FLAGS.data_dir, 'validation-cgd') print(train_file) print(validation_file) writer_train = tf.python_io.TFRecordWriter(train_file) writer_validation = tf.python_io.TFRecordWriter(validation_file) # Creating a list with all the image paths folders = range(1,11) folders = ['0'+str(i) if i<10 else '10' for i in folders] filenames = [] for i in folders: for name in glob.glob(os.path.join(FLAGS.data_dir, i, 'pcd'+i+'*r.png')): filenames.append(name) # Shuffle the list of image paths np.random.shuffle(filenames) count = 0 valid_img = 0 train_img = 0 coder = ImageCoder() for filename in tqdm(filenames): bbox = filename[:-5]+'cpos.txt' bboxes = _process_bboxes(bbox) image_buffer, height, width = _process_image(filename, coder) example = _convert_to_example(filename, bboxes, image_buffer, height, width) # Split the dataset in 80% for training and 20% for validation if count % 5 == 0: writer_validation.write(example.SerializeToString()) valid_img +=1 else: writer_train.write(example.SerializeToString()) train_img +=1 count +=1 print('Done converting %d images in TFRecords with %d train images and %d validation images' % (count, train_img, valid_img)) writer_train.close() writer_validation.close() if __name__ == '__main__': main()
download
identifier_name
build_cgd_dataset.py
#!/usr/local/bin/python '''Converts Cornell Grasping Dataset data into TFRecords data format using Example protos. The raw data set resides in png and txt files located in the following structure: dataset/03/pcd0302r.png dataset/03/pcd0302cpos.txt ''' import os import errno import traceback import itertools import six import os import glob import numpy as np import numpy as np import tensorflow as tf import re from scipy.ndimage.filters import median_filter # progress bars https://github.com/tqdm/tqdm # import tqdm without enforcing it as a dependency try: from tqdm import tqdm except ImportError: def tqdm(*args, **kwargs): if args: return args[0] return kwargs.get('iterable', None) from tensorflow.python.platform import flags from tensorflow.python.platform import gfile from tensorflow.python.ops import data_flow_ops from tensorflow.python.keras.utils import get_file from tensorflow.python.keras._impl.keras.utils.data_utils import _hash_file import keras from keras import backend as K flags.DEFINE_string('data_dir', os.path.join(os.path.expanduser("~"), '.keras', 'datasets', 'cornell_grasping'), """Path to dataset in TFRecord format (aka Example protobufs) and feature csv files.""") flags.DEFINE_string('grasp_dataset', 'all', 'TODO(ahundt): integrate with brainrobotdata or allow subsets to be specified') flags.DEFINE_boolean('grasp_download', True, """Download the grasp_dataset to data_dir if it is not already present.""") FLAGS = flags.FLAGS def mkdir_p(path): """Create the specified path on the filesystem like the `mkdir -p` command Creates one or more filesystem directory levels as needed, and does not return an error if the directory already exists. """ # http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def is_sequence(arg): """Returns true if arg is a list or another Python Sequence, and false otherwise. source: https://stackoverflow.com/a/17148334/99379 """ return (not hasattr(arg, "strip") and hasattr(arg, "__getitem__") or hasattr(arg, "__iter__")) class GraspDataset(object): """Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. # Arguments data_dir: Path to dataset in TFRecord format (aka Example protobufs) and feature csv files. `~/.keras/datasets/grasping` by default. dataset: 'all' to load all the data. download: True to actually download the dataset, also see FLAGS. """ def __init__(self, data_dir=None, dataset=None, download=None, verbose=0): if data_dir is None: data_dir = FLAGS.data_dir self.data_dir = data_dir if dataset is None: dataset = FLAGS.grasp_dataset self.dataset = dataset if download is None: download = FLAGS.grasp_download if download: self.download(data_dir, dataset) self.verbose = verbose def download(self, data_dir=None, dataset='all'): '''Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. Includes grasp_listing.txt with all files in all datasets; the feature csv files which specify the dataset size, the features (data channels), and the number of grasps; and the tfrecord files which actually contain all the data. If `grasp_listing_hashed.txt` is present, an additional hashing step will will be completed to verify dataset integrity. `grasp_listing_hashed.txt` will be generated automatically when downloading with `dataset='all'`. # Arguments dataset: The name of the dataset to download, downloads all by default with the '' parameter, 102 will download the 102 feature dataset found in grasp_listing.txt. # Returns list of paths to the downloaded files ''' dataset = self._update_dataset_param(dataset) if data_dir is None: if self.data_dir is None: data_dir = FLAGS.data_dir else: data_dir = self.data_dir mkdir_p(data_dir) print('Downloading datasets to: ', data_dir) url_prefix = '' # If a hashed version of the listing is available, # download the dataset and verify hashes to prevent data corruption. listing_hash = os.path.join(data_dir, 'grasp_listing_hash.txt') if os.path.isfile(listing_hash): files_and_hashes = np.genfromtxt(listing_hash, dtype='str', delimiter=' ') files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, file_hash=hash_str, extract=True) for fpath, hash_str in tqdm(files_and_hashes) if '_' + str(dataset) in fpath] else: # If a hashed version of the listing is not available, # simply download the dataset normally. listing_url = 'https://raw.githubusercontent.com/ahundt/robot-grasp-detection/master/grasp_listing.txt' grasp_listing_path = get_file('grasp_listing.txt', listing_url, cache_subdir=data_dir) grasp_files = np.genfromtxt(grasp_listing_path, dtype=str) files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, extract=True) for fpath in tqdm(grasp_files) if '_' + dataset in fpath] # If all files are downloaded, generate a hashed listing. if dataset is 'all' or dataset is '': print('Hashing all dataset files to prevent corruption...') hashes = [] for i, f in enumerate(tqdm(files)): hashes.append(_hash_file(f)) file_hash_np = np.column_stack([grasp_files, hashes]) with open(listing_hash, 'wb') as hash_file: np.savetxt(hash_file, file_hash_np, fmt='%s', delimiter=' ', header='file_path sha256') print('Hashing complete, {} contains each url plus hash, and will be used to verify the ' 'dataset during future calls to download().'.format(listing_hash)) return files def _update_dataset_param(self, dataset): """Internal function to configure which subset of the datasets is being used. Helps to choose a reasonable default action based on previous user parameters. """ if dataset is None and self.dataset is None: return [] if dataset is 'all': dataset = '' if dataset is None and self.dataset is not None: dataset = self.dataset return dataset class ImageCoder(object): def __init__(self):
return self._sess.run(self._decode_png, feed_dict={self._decode_png_data: image_data}) def _process_image(filename, coder): # Decode the image with open(filename) as f: image_data = f.read() image = coder.decode_png(image_data) assert len(image.shape) == 3 height = image.shape[0] width = image.shape[1] assert image.shape[2] == 3 return image_data, height, width def _process_bboxes(name): '''Create a list with the coordinates of the grasping rectangles. Every element is either x or y of a vertex.''' with open(name, 'r') as f: bboxes = list(map( lambda coordinate: float(coordinate), f.read().strip().split())) return bboxes def _int64_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(int64_list=tf.train.Int64List(value=v)) def _floats_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(float_list=tf.train.FloatList(value=v)) def _bytes_feature(v): return tf.train.Feature(bytes_list=tf.train.BytesList(value=[v])) def _convert_to_example(filename, bboxes, image_buffer, height, width): # Build an Example proto for an example example = tf.train.Example(features=tf.train.Features(feature={ 'image/filename': _bytes_feature(filename), 'image/encoded': _bytes_feature(image_buffer), 'image/height': _int64_feature(height), 'image/width': _int64_feature(width), 'bboxes': _floats_feature(bboxes)})) return example def main(): gd = GraspDataset() if FLAGS.grasp_download: gd.download(dataset=FLAGS.grasp_dataset) train_file = os.path.join(FLAGS.data_dir, 'train-cgd') validation_file = os.path.join(FLAGS.data_dir, 'validation-cgd') print(train_file) print(validation_file) writer_train = tf.python_io.TFRecordWriter(train_file) writer_validation = tf.python_io.TFRecordWriter(validation_file) # Creating a list with all the image paths folders = range(1,11) folders = ['0'+str(i) if i<10 else '10' for i in folders] filenames = [] for i in folders: for name in glob.glob(os.path.join(FLAGS.data_dir, i, 'pcd'+i+'*r.png')): filenames.append(name) # Shuffle the list of image paths np.random.shuffle(filenames) count = 0 valid_img = 0 train_img = 0 coder = ImageCoder() for filename in tqdm(filenames): bbox = filename[:-5]+'cpos.txt' bboxes = _process_bboxes(bbox) image_buffer, height, width = _process_image(filename, coder) example = _convert_to_example(filename, bboxes, image_buffer, height, width) # Split the dataset in 80% for training and 20% for validation if count % 5 == 0: writer_validation.write(example.SerializeToString()) valid_img +=1 else: writer_train.write(example.SerializeToString()) train_img +=1 count +=1 print('Done converting %d images in TFRecords with %d train images and %d validation images' % (count, train_img, valid_img)) writer_train.close() writer_validation.close() if __name__ == '__main__': main()
self._sess = tf.Session() self._decode_png_data = tf.placeholder(dtype=tf.string) self._decode_png = tf.image.decode_png(self._decode_png_data, channels=3) def decode_png(self, image_data):
random_line_split
build_cgd_dataset.py
#!/usr/local/bin/python '''Converts Cornell Grasping Dataset data into TFRecords data format using Example protos. The raw data set resides in png and txt files located in the following structure: dataset/03/pcd0302r.png dataset/03/pcd0302cpos.txt ''' import os import errno import traceback import itertools import six import os import glob import numpy as np import numpy as np import tensorflow as tf import re from scipy.ndimage.filters import median_filter # progress bars https://github.com/tqdm/tqdm # import tqdm without enforcing it as a dependency try: from tqdm import tqdm except ImportError: def tqdm(*args, **kwargs): if args: return args[0] return kwargs.get('iterable', None) from tensorflow.python.platform import flags from tensorflow.python.platform import gfile from tensorflow.python.ops import data_flow_ops from tensorflow.python.keras.utils import get_file from tensorflow.python.keras._impl.keras.utils.data_utils import _hash_file import keras from keras import backend as K flags.DEFINE_string('data_dir', os.path.join(os.path.expanduser("~"), '.keras', 'datasets', 'cornell_grasping'), """Path to dataset in TFRecord format (aka Example protobufs) and feature csv files.""") flags.DEFINE_string('grasp_dataset', 'all', 'TODO(ahundt): integrate with brainrobotdata or allow subsets to be specified') flags.DEFINE_boolean('grasp_download', True, """Download the grasp_dataset to data_dir if it is not already present.""") FLAGS = flags.FLAGS def mkdir_p(path): """Create the specified path on the filesystem like the `mkdir -p` command Creates one or more filesystem directory levels as needed, and does not return an error if the directory already exists. """ # http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def is_sequence(arg): """Returns true if arg is a list or another Python Sequence, and false otherwise. source: https://stackoverflow.com/a/17148334/99379 """ return (not hasattr(arg, "strip") and hasattr(arg, "__getitem__") or hasattr(arg, "__iter__")) class GraspDataset(object): """Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. # Arguments data_dir: Path to dataset in TFRecord format (aka Example protobufs) and feature csv files. `~/.keras/datasets/grasping` by default. dataset: 'all' to load all the data. download: True to actually download the dataset, also see FLAGS. """ def __init__(self, data_dir=None, dataset=None, download=None, verbose=0): if data_dir is None: data_dir = FLAGS.data_dir self.data_dir = data_dir if dataset is None: dataset = FLAGS.grasp_dataset self.dataset = dataset if download is None: download = FLAGS.grasp_download if download: self.download(data_dir, dataset) self.verbose = verbose def download(self, data_dir=None, dataset='all'): '''Cornell Grasping Dataset - about 5GB total size http:pr.cs.cornell.edu/grasping/rect_data/data.php Downloads to `~/.keras/datasets/cornell_grasping` by default. Includes grasp_listing.txt with all files in all datasets; the feature csv files which specify the dataset size, the features (data channels), and the number of grasps; and the tfrecord files which actually contain all the data. If `grasp_listing_hashed.txt` is present, an additional hashing step will will be completed to verify dataset integrity. `grasp_listing_hashed.txt` will be generated automatically when downloading with `dataset='all'`. # Arguments dataset: The name of the dataset to download, downloads all by default with the '' parameter, 102 will download the 102 feature dataset found in grasp_listing.txt. # Returns list of paths to the downloaded files ''' dataset = self._update_dataset_param(dataset) if data_dir is None: if self.data_dir is None: data_dir = FLAGS.data_dir else: data_dir = self.data_dir mkdir_p(data_dir) print('Downloading datasets to: ', data_dir) url_prefix = '' # If a hashed version of the listing is available, # download the dataset and verify hashes to prevent data corruption. listing_hash = os.path.join(data_dir, 'grasp_listing_hash.txt') if os.path.isfile(listing_hash): files_and_hashes = np.genfromtxt(listing_hash, dtype='str', delimiter=' ') files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, file_hash=hash_str, extract=True) for fpath, hash_str in tqdm(files_and_hashes) if '_' + str(dataset) in fpath] else: # If a hashed version of the listing is not available, # simply download the dataset normally. listing_url = 'https://raw.githubusercontent.com/ahundt/robot-grasp-detection/master/grasp_listing.txt' grasp_listing_path = get_file('grasp_listing.txt', listing_url, cache_subdir=data_dir) grasp_files = np.genfromtxt(grasp_listing_path, dtype=str) files = [get_file(fpath.split('/')[-1], url_prefix + fpath, cache_subdir=data_dir, extract=True) for fpath in tqdm(grasp_files) if '_' + dataset in fpath] # If all files are downloaded, generate a hashed listing. if dataset is 'all' or dataset is '': print('Hashing all dataset files to prevent corruption...') hashes = [] for i, f in enumerate(tqdm(files)):
file_hash_np = np.column_stack([grasp_files, hashes]) with open(listing_hash, 'wb') as hash_file: np.savetxt(hash_file, file_hash_np, fmt='%s', delimiter=' ', header='file_path sha256') print('Hashing complete, {} contains each url plus hash, and will be used to verify the ' 'dataset during future calls to download().'.format(listing_hash)) return files def _update_dataset_param(self, dataset): """Internal function to configure which subset of the datasets is being used. Helps to choose a reasonable default action based on previous user parameters. """ if dataset is None and self.dataset is None: return [] if dataset is 'all': dataset = '' if dataset is None and self.dataset is not None: dataset = self.dataset return dataset class ImageCoder(object): def __init__(self): self._sess = tf.Session() self._decode_png_data = tf.placeholder(dtype=tf.string) self._decode_png = tf.image.decode_png(self._decode_png_data, channels=3) def decode_png(self, image_data): return self._sess.run(self._decode_png, feed_dict={self._decode_png_data: image_data}) def _process_image(filename, coder): # Decode the image with open(filename) as f: image_data = f.read() image = coder.decode_png(image_data) assert len(image.shape) == 3 height = image.shape[0] width = image.shape[1] assert image.shape[2] == 3 return image_data, height, width def _process_bboxes(name): '''Create a list with the coordinates of the grasping rectangles. Every element is either x or y of a vertex.''' with open(name, 'r') as f: bboxes = list(map( lambda coordinate: float(coordinate), f.read().strip().split())) return bboxes def _int64_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(int64_list=tf.train.Int64List(value=v)) def _floats_feature(v): if not isinstance(v, list): v = [v] return tf.train.Feature(float_list=tf.train.FloatList(value=v)) def _bytes_feature(v): return tf.train.Feature(bytes_list=tf.train.BytesList(value=[v])) def _convert_to_example(filename, bboxes, image_buffer, height, width): # Build an Example proto for an example example = tf.train.Example(features=tf.train.Features(feature={ 'image/filename': _bytes_feature(filename), 'image/encoded': _bytes_feature(image_buffer), 'image/height': _int64_feature(height), 'image/width': _int64_feature(width), 'bboxes': _floats_feature(bboxes)})) return example def main(): gd = GraspDataset() if FLAGS.grasp_download: gd.download(dataset=FLAGS.grasp_dataset) train_file = os.path.join(FLAGS.data_dir, 'train-cgd') validation_file = os.path.join(FLAGS.data_dir, 'validation-cgd') print(train_file) print(validation_file) writer_train = tf.python_io.TFRecordWriter(train_file) writer_validation = tf.python_io.TFRecordWriter(validation_file) # Creating a list with all the image paths folders = range(1,11) folders = ['0'+str(i) if i<10 else '10' for i in folders] filenames = [] for i in folders: for name in glob.glob(os.path.join(FLAGS.data_dir, i, 'pcd'+i+'*r.png')): filenames.append(name) # Shuffle the list of image paths np.random.shuffle(filenames) count = 0 valid_img = 0 train_img = 0 coder = ImageCoder() for filename in tqdm(filenames): bbox = filename[:-5]+'cpos.txt' bboxes = _process_bboxes(bbox) image_buffer, height, width = _process_image(filename, coder) example = _convert_to_example(filename, bboxes, image_buffer, height, width) # Split the dataset in 80% for training and 20% for validation if count % 5 == 0: writer_validation.write(example.SerializeToString()) valid_img +=1 else: writer_train.write(example.SerializeToString()) train_img +=1 count +=1 print('Done converting %d images in TFRecords with %d train images and %d validation images' % (count, train_img, valid_img)) writer_train.close() writer_validation.close() if __name__ == '__main__': main()
hashes.append(_hash_file(f))
conditional_block
AdobeFontLabUtils.py
# Support module for FontLab scripts. __copyright__ = """ Copyright 2014 Adobe Systems Incorporated (http://www.adobe.com/). All Rights Reserved. This software is licensed as OpenSource, under the Apache License, Version 2.0. This license is available at: http://opensource.org/licenses/Apache-2.0. """ __doc__ = """ AdobeFontLabUtils v1.5 Feb 09 2009. Support module for FontLab scripts. Defines commonly used functions and globals. """ import sys import os import string import re import time import plistlib import StringIO from FL import * kGlyphOrderAndAliasDBName = "GlyphOrderAndAliasDB" kSharedDataName = "SharedData" ######################################################## # Routines for managing a directory tree of font files, # and for exporting/importing ata between the font files # and a master dictionary file ######################################################## def setFDKToolsPath(toolName): """ On Mac, add std FDK path to sys.environ PATH. On all, check if tool is available. """ toolPath = 0 if sys.platform == "darwin": paths = os.environ["PATH"] if "FDK/Tools/osx" not in paths: home = os.environ["HOME"] fdkPath = ":%s/bin/FDK/Tools/osx" % (home) os.environ["PATH"] = paths + fdkPath if os.name == "nt": p = os.popen("for %%i in (%s) do @echo. %%~$PATH:i" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log else: p = os.popen("which %s" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log if not toolPath: print """ The script cannot run the command-line program '%s'. Please make sure the AFDKO is installed, and the system environment variable PATH contains the path the to FDK sub-directory containing '%s'.""" % (toolName, toolName) return toolPath # get reid of new-line def checkControlKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_CONTROL) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.controlKey: notPressed = 0 return notPressed def checkShiftKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.shiftKey: notPressed = 0 return notPressed def checkAltKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.optionKey: notPressed = 0 return notPressed def GetSharedDataPath(): sdPath = "" for path in sys.path: if not re.search(r"FDK/Tools", path): continue m = re.search(kSharedDataName, path) if not m: continue sdPath = path[:m.end()] if not sdPath: print "Error. The path to ",kSharedDataName," is not in the sys.path list." elif not os.path.exists(sdPath): print "Error.", sdPath,"does not exist." sdPath = "" return sdPath # fontDirPath is an absolute path to the font dir, supplied by FontLab # fontPSName is used to get the top family directory from the font library DB file. # so as to look back up the family tree for the GOASDB. def GetGOADBPath(fontDirPath, fontPSName): goadbPath = "" dirPath = fontDirPath trys = 3 # look first in the font's dir, then up to two levels up. while trys: goadbPath = os.path.join(dirPath, kGlyphOrderAndAliasDBName) if (goadbPath and os.path.exists(goadbPath)): break dirPath = os.path.dirname(dirPath) trys -= 1 if (goadbPath and os.path.exists(goadbPath)): return goadbPath # default to the global FDK GOADB. goadbPath = "" sharedDataDir = GetSharedDataPath() if sharedDataDir: goadbPath = os.path.join(sharedDataDir, kGlyphOrderAndAliasDBName ) if (goadbPath and os.path.exists(goadbPath)): return goadbPath print "Error. Could not find", kGlyphOrderAndAliasDBName,", even in FDK Shared Data Dir." goadbPath = "" return goadbPath def SplitGOADBEntries(line): global goadbIndex entry = string.split(line) if (len(entry) < 2) or (len(entry) > 3): print "Error in GOADB: bad entry - too many or two few columns <" + line + ">" entry = None if len(entry) == 3: if entry[2][0] != "u": print "Error in GOADB: 3rd column must be a uni or u Unicode name <" + line + ">" entry = None if len(entry) == 2: entry.append("") # Add GOADB index value if entry: entry.append(goadbIndex) goadbIndex = goadbIndex + 1 return entry ######################################################## # Misc utilities ######################################################## def RemoveComment(line): try: index = string.index(line, "#") line = line[:index] except: pass return line #return list of lines with comments and blank lines removed. def CleanLines(lineList): lineList = map(lambda line: RemoveComment(line) , lineList) lineList = filter(lambda line: string.strip(line), lineList) return lineList #split out lines from a stream of file data. def SplitLines(data): lineList = re.findall(r"([^\r\n]+)[\r\n]", data) return lineList def LoadGOADB(filePath): """ Read a glyph alias file for makeOTF into a dict.""" global goadbIndex finalNameDict = {} productionNameDict = {} goadbIndex = 0 gfile = open(filePath,"rb") data = gfile.read() gfile.close() glyphEntryList = SplitLines(data) glyphEntryList = CleanLines(glyphEntryList) glyphEntryList = map(SplitGOADBEntries, glyphEntryList) glyphEntryList = filter(lambda entry: entry, glyphEntryList) # drop out any entry == None for entry in glyphEntryList: finalNameDict[entry[0]] = [ entry[1], entry[2], entry[3] ] if productionNameDict.has_key(entry[1]): print "Error in GOADB: more than one final name for a production name!" print "\tfinal name 1:", productionNameDict[entry[1]], "Final name 2:", entry[0], "Production name:", entry[1] print "\tUsing Final name 2." productionNameDict[entry[1]] = [ entry[0], entry[2], entry[3] ] return finalNameDict, productionNameDict kDefaultReportExtension = "log" kDefaultLogSubdirectory = "logs" kDefaultVersionDigits = 3 kWriteBoth = 3 kWriteStdOut = 1 kWriteFile = 2 class Reporter: """ Logging class to let me echo output to both/either screen and a log file. Makes log files with same base name as font file, and special extension. Default extension is supplied, can be overridden. Trys to put log file in subdirectory under font file home directory.""" def __init__(self, fileOrPath, extension = kDefaultReportExtension): self.file = None self.fileName = None self.state = kWriteBoth if type(fileOrPath) == type(" "): # try to find or make log directory for report file. dir,name = os.path.split(fileOrPath) logDir = os.path.join(dir, kDefaultLogSubdirectory) if not os.path.exists(logDir): try: os.mkdir(logDir) except IOError: print "Failed to make log file subdir:", logDir return if os.path.exists(logDir): fileOrPath = os.path.join(logDir, name) basePath, fileExt = os.path.splitext(fileOrPath) self.fileName = self.makeSafeReportName(basePath, extension) try: self.file = open(self.fileName, "wt") except IOError: print "Failed to open file", self.fileName return else: self.fileName = None self.file = fileOrPath return def makeSafeReportName(self, baseFilePath, extension): global kDefaultVersionDigits """ make a report file name with a number 1 greater than any existing report file name with the same extension. We know the baseFilePath exists, as it comes from an open font file. We will not worry about 32 char name limits -> Mac OS X and Windows 2000 only. """ n = 1 dir, file = os.path.split(baseFilePath) numPattern = re.compile(file + "." + extension + r"v0*(\d+)$") fileList = os.listdir(dir) for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num + 1 if n > (10**kDefaultVersionDigits - 1): kDefaultVersionDigits = kDefaultVersionDigits +1 filePath = baseFilePath + "." + extension + "v" + str(n).zfill(kDefaultVersionDigits) return filePath def write(*args): self = args[0] text = [] for arg in args[1:]: try: text.append(str(arg)) except: text.append(repr(arg)) text = " ".join(text) if (self.state == kWriteBoth): print text if (self.file != sys.stdout): self.file.write(text + os.linesep) elif (self.state == kWriteFile): self.file.write(text + os.linesep) elif (self.state == kWriteStdOut): print text def set_state(self, state): self.state = state def close(self): if self.file and (self.file != sys.stdout): self.file.close() if self.fileName: print "Log saved to ", self.fileName def read(*args): # added to make this class look more like a file. pass def
(baseFilePath, extension=kDefaultReportExtension): """ FInd the latest report matching the path and extension. Assume that the highest number is the most recent. """ n = 1 dir, file = os.path.split(baseFilePath) logsDir = os.path.join(dir, kDefaultLogSubdirectory) matchString = r"%s.%sv0*(\d+)$" % (file, extension) print matchString numPattern = re.compile(matchString) fileList = os.listdir(logsDir) print fileList latestFile = "" for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num latestFile = file if latestFile: filePath = os.path.join(logsDir, latestFile) else: filePath = "" return filePath
getLatestReport
identifier_name
AdobeFontLabUtils.py
# Support module for FontLab scripts. __copyright__ = """ Copyright 2014 Adobe Systems Incorporated (http://www.adobe.com/). All Rights Reserved. This software is licensed as OpenSource, under the Apache License, Version 2.0. This license is available at: http://opensource.org/licenses/Apache-2.0. """ __doc__ = """ AdobeFontLabUtils v1.5 Feb 09 2009. Support module for FontLab scripts. Defines commonly used functions and globals. """ import sys import os import string import re import time import plistlib import StringIO from FL import * kGlyphOrderAndAliasDBName = "GlyphOrderAndAliasDB" kSharedDataName = "SharedData" ######################################################## # Routines for managing a directory tree of font files, # and for exporting/importing ata between the font files # and a master dictionary file ######################################################## def setFDKToolsPath(toolName): """ On Mac, add std FDK path to sys.environ PATH. On all, check if tool is available. """ toolPath = 0 if sys.platform == "darwin": paths = os.environ["PATH"] if "FDK/Tools/osx" not in paths: home = os.environ["HOME"] fdkPath = ":%s/bin/FDK/Tools/osx" % (home) os.environ["PATH"] = paths + fdkPath if os.name == "nt": p = os.popen("for %%i in (%s) do @echo. %%~$PATH:i" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log else: p = os.popen("which %s" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log if not toolPath: print """ The script cannot run the command-line program '%s'. Please make sure the AFDKO is installed, and the system environment variable PATH contains the path the to FDK sub-directory containing '%s'.""" % (toolName, toolName) return toolPath # get reid of new-line def checkControlKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_CONTROL) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.controlKey: notPressed = 0 return notPressed def checkShiftKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.shiftKey: notPressed = 0 return notPressed def checkAltKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.optionKey: notPressed = 0 return notPressed def GetSharedDataPath(): sdPath = "" for path in sys.path: if not re.search(r"FDK/Tools", path): continue m = re.search(kSharedDataName, path) if not m: continue sdPath = path[:m.end()] if not sdPath: print "Error. The path to ",kSharedDataName," is not in the sys.path list." elif not os.path.exists(sdPath): print "Error.", sdPath,"does not exist." sdPath = "" return sdPath # fontDirPath is an absolute path to the font dir, supplied by FontLab # fontPSName is used to get the top family directory from the font library DB file. # so as to look back up the family tree for the GOASDB. def GetGOADBPath(fontDirPath, fontPSName): goadbPath = "" dirPath = fontDirPath trys = 3 # look first in the font's dir, then up to two levels up. while trys: goadbPath = os.path.join(dirPath, kGlyphOrderAndAliasDBName) if (goadbPath and os.path.exists(goadbPath)): break dirPath = os.path.dirname(dirPath) trys -= 1 if (goadbPath and os.path.exists(goadbPath)): return goadbPath # default to the global FDK GOADB. goadbPath = "" sharedDataDir = GetSharedDataPath() if sharedDataDir: goadbPath = os.path.join(sharedDataDir, kGlyphOrderAndAliasDBName ) if (goadbPath and os.path.exists(goadbPath)): return goadbPath print "Error. Could not find", kGlyphOrderAndAliasDBName,", even in FDK Shared Data Dir." goadbPath = "" return goadbPath def SplitGOADBEntries(line): global goadbIndex entry = string.split(line) if (len(entry) < 2) or (len(entry) > 3): print "Error in GOADB: bad entry - too many or two few columns <" + line + ">" entry = None if len(entry) == 3: if entry[2][0] != "u": print "Error in GOADB: 3rd column must be a uni or u Unicode name <" + line + ">" entry = None if len(entry) == 2: entry.append("") # Add GOADB index value if entry: entry.append(goadbIndex) goadbIndex = goadbIndex + 1 return entry ######################################################## # Misc utilities ######################################################## def RemoveComment(line): try: index = string.index(line, "#") line = line[:index] except: pass return line #return list of lines with comments and blank lines removed. def CleanLines(lineList): lineList = map(lambda line: RemoveComment(line) , lineList) lineList = filter(lambda line: string.strip(line), lineList) return lineList #split out lines from a stream of file data. def SplitLines(data):
def LoadGOADB(filePath): """ Read a glyph alias file for makeOTF into a dict.""" global goadbIndex finalNameDict = {} productionNameDict = {} goadbIndex = 0 gfile = open(filePath,"rb") data = gfile.read() gfile.close() glyphEntryList = SplitLines(data) glyphEntryList = CleanLines(glyphEntryList) glyphEntryList = map(SplitGOADBEntries, glyphEntryList) glyphEntryList = filter(lambda entry: entry, glyphEntryList) # drop out any entry == None for entry in glyphEntryList: finalNameDict[entry[0]] = [ entry[1], entry[2], entry[3] ] if productionNameDict.has_key(entry[1]): print "Error in GOADB: more than one final name for a production name!" print "\tfinal name 1:", productionNameDict[entry[1]], "Final name 2:", entry[0], "Production name:", entry[1] print "\tUsing Final name 2." productionNameDict[entry[1]] = [ entry[0], entry[2], entry[3] ] return finalNameDict, productionNameDict kDefaultReportExtension = "log" kDefaultLogSubdirectory = "logs" kDefaultVersionDigits = 3 kWriteBoth = 3 kWriteStdOut = 1 kWriteFile = 2 class Reporter: """ Logging class to let me echo output to both/either screen and a log file. Makes log files with same base name as font file, and special extension. Default extension is supplied, can be overridden. Trys to put log file in subdirectory under font file home directory.""" def __init__(self, fileOrPath, extension = kDefaultReportExtension): self.file = None self.fileName = None self.state = kWriteBoth if type(fileOrPath) == type(" "): # try to find or make log directory for report file. dir,name = os.path.split(fileOrPath) logDir = os.path.join(dir, kDefaultLogSubdirectory) if not os.path.exists(logDir): try: os.mkdir(logDir) except IOError: print "Failed to make log file subdir:", logDir return if os.path.exists(logDir): fileOrPath = os.path.join(logDir, name) basePath, fileExt = os.path.splitext(fileOrPath) self.fileName = self.makeSafeReportName(basePath, extension) try: self.file = open(self.fileName, "wt") except IOError: print "Failed to open file", self.fileName return else: self.fileName = None self.file = fileOrPath return def makeSafeReportName(self, baseFilePath, extension): global kDefaultVersionDigits """ make a report file name with a number 1 greater than any existing report file name with the same extension. We know the baseFilePath exists, as it comes from an open font file. We will not worry about 32 char name limits -> Mac OS X and Windows 2000 only. """ n = 1 dir, file = os.path.split(baseFilePath) numPattern = re.compile(file + "." + extension + r"v0*(\d+)$") fileList = os.listdir(dir) for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num + 1 if n > (10**kDefaultVersionDigits - 1): kDefaultVersionDigits = kDefaultVersionDigits +1 filePath = baseFilePath + "." + extension + "v" + str(n).zfill(kDefaultVersionDigits) return filePath def write(*args): self = args[0] text = [] for arg in args[1:]: try: text.append(str(arg)) except: text.append(repr(arg)) text = " ".join(text) if (self.state == kWriteBoth): print text if (self.file != sys.stdout): self.file.write(text + os.linesep) elif (self.state == kWriteFile): self.file.write(text + os.linesep) elif (self.state == kWriteStdOut): print text def set_state(self, state): self.state = state def close(self): if self.file and (self.file != sys.stdout): self.file.close() if self.fileName: print "Log saved to ", self.fileName def read(*args): # added to make this class look more like a file. pass def getLatestReport(baseFilePath, extension=kDefaultReportExtension): """ FInd the latest report matching the path and extension. Assume that the highest number is the most recent. """ n = 1 dir, file = os.path.split(baseFilePath) logsDir = os.path.join(dir, kDefaultLogSubdirectory) matchString = r"%s.%sv0*(\d+)$" % (file, extension) print matchString numPattern = re.compile(matchString) fileList = os.listdir(logsDir) print fileList latestFile = "" for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num latestFile = file if latestFile: filePath = os.path.join(logsDir, latestFile) else: filePath = "" return filePath
lineList = re.findall(r"([^\r\n]+)[\r\n]", data) return lineList
identifier_body
AdobeFontLabUtils.py
# Support module for FontLab scripts. __copyright__ = """ Copyright 2014 Adobe Systems Incorporated (http://www.adobe.com/). All Rights Reserved. This software is licensed as OpenSource, under the Apache License, Version 2.0. This license is available at: http://opensource.org/licenses/Apache-2.0. """ __doc__ = """ AdobeFontLabUtils v1.5 Feb 09 2009. Support module for FontLab scripts. Defines commonly used functions and globals. """ import sys import os import string import re import time import plistlib import StringIO from FL import * kGlyphOrderAndAliasDBName = "GlyphOrderAndAliasDB" kSharedDataName = "SharedData" ######################################################## # Routines for managing a directory tree of font files, # and for exporting/importing ata between the font files # and a master dictionary file ######################################################## def setFDKToolsPath(toolName): """ On Mac, add std FDK path to sys.environ PATH. On all, check if tool is available. """ toolPath = 0 if sys.platform == "darwin": paths = os.environ["PATH"] if "FDK/Tools/osx" not in paths: home = os.environ["HOME"] fdkPath = ":%s/bin/FDK/Tools/osx" % (home) os.environ["PATH"] = paths + fdkPath if os.name == "nt": p = os.popen("for %%i in (%s) do @echo. %%~$PATH:i" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log else: p = os.popen("which %s" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log if not toolPath: print """ The script cannot run the command-line program '%s'. Please make sure the AFDKO is installed, and the system environment variable PATH contains the path the to FDK sub-directory containing '%s'.""" % (toolName, toolName) return toolPath # get reid of new-line def checkControlKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_CONTROL) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.controlKey: notPressed = 0 return notPressed def checkShiftKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.shiftKey: notPressed = 0 return notPressed def checkAltKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.optionKey: notPressed = 0 return notPressed def GetSharedDataPath(): sdPath = "" for path in sys.path: if not re.search(r"FDK/Tools", path): continue m = re.search(kSharedDataName, path) if not m: continue sdPath = path[:m.end()] if not sdPath: print "Error. The path to ",kSharedDataName," is not in the sys.path list." elif not os.path.exists(sdPath): print "Error.", sdPath,"does not exist." sdPath = "" return sdPath # fontDirPath is an absolute path to the font dir, supplied by FontLab # fontPSName is used to get the top family directory from the font library DB file. # so as to look back up the family tree for the GOASDB. def GetGOADBPath(fontDirPath, fontPSName): goadbPath = "" dirPath = fontDirPath trys = 3 # look first in the font's dir, then up to two levels up. while trys: goadbPath = os.path.join(dirPath, kGlyphOrderAndAliasDBName) if (goadbPath and os.path.exists(goadbPath)): break dirPath = os.path.dirname(dirPath) trys -= 1 if (goadbPath and os.path.exists(goadbPath)): return goadbPath # default to the global FDK GOADB. goadbPath = "" sharedDataDir = GetSharedDataPath() if sharedDataDir: goadbPath = os.path.join(sharedDataDir, kGlyphOrderAndAliasDBName ) if (goadbPath and os.path.exists(goadbPath)): return goadbPath print "Error. Could not find", kGlyphOrderAndAliasDBName,", even in FDK Shared Data Dir." goadbPath = "" return goadbPath def SplitGOADBEntries(line): global goadbIndex entry = string.split(line) if (len(entry) < 2) or (len(entry) > 3): print "Error in GOADB: bad entry - too many or two few columns <" + line + ">" entry = None if len(entry) == 3: if entry[2][0] != "u": print "Error in GOADB: 3rd column must be a uni or u Unicode name <" + line + ">" entry = None if len(entry) == 2: entry.append("") # Add GOADB index value if entry: entry.append(goadbIndex) goadbIndex = goadbIndex + 1 return entry ######################################################## # Misc utilities ######################################################## def RemoveComment(line): try: index = string.index(line, "#") line = line[:index] except: pass return line #return list of lines with comments and blank lines removed. def CleanLines(lineList): lineList = map(lambda line: RemoveComment(line) , lineList) lineList = filter(lambda line: string.strip(line), lineList) return lineList #split out lines from a stream of file data. def SplitLines(data): lineList = re.findall(r"([^\r\n]+)[\r\n]", data) return lineList def LoadGOADB(filePath): """ Read a glyph alias file for makeOTF into a dict.""" global goadbIndex finalNameDict = {} productionNameDict = {} goadbIndex = 0 gfile = open(filePath,"rb") data = gfile.read() gfile.close() glyphEntryList = SplitLines(data) glyphEntryList = CleanLines(glyphEntryList) glyphEntryList = map(SplitGOADBEntries, glyphEntryList) glyphEntryList = filter(lambda entry: entry, glyphEntryList) # drop out any entry == None for entry in glyphEntryList: finalNameDict[entry[0]] = [ entry[1], entry[2], entry[3] ] if productionNameDict.has_key(entry[1]): print "Error in GOADB: more than one final name for a production name!" print "\tfinal name 1:", productionNameDict[entry[1]], "Final name 2:", entry[0], "Production name:", entry[1] print "\tUsing Final name 2." productionNameDict[entry[1]] = [ entry[0], entry[2], entry[3] ] return finalNameDict, productionNameDict kDefaultReportExtension = "log" kDefaultLogSubdirectory = "logs" kDefaultVersionDigits = 3 kWriteBoth = 3 kWriteStdOut = 1 kWriteFile = 2 class Reporter: """ Logging class to let me echo output to both/either screen and a log file. Makes log files with same base name as font file, and special extension. Default extension is supplied, can be overridden. Trys to put log file in subdirectory under font file home directory.""" def __init__(self, fileOrPath, extension = kDefaultReportExtension): self.file = None self.fileName = None self.state = kWriteBoth if type(fileOrPath) == type(" "): # try to find or make log directory for report file. dir,name = os.path.split(fileOrPath) logDir = os.path.join(dir, kDefaultLogSubdirectory) if not os.path.exists(logDir): try: os.mkdir(logDir) except IOError: print "Failed to make log file subdir:", logDir return if os.path.exists(logDir): fileOrPath = os.path.join(logDir, name) basePath, fileExt = os.path.splitext(fileOrPath) self.fileName = self.makeSafeReportName(basePath, extension) try: self.file = open(self.fileName, "wt") except IOError: print "Failed to open file", self.fileName return else: self.fileName = None self.file = fileOrPath return def makeSafeReportName(self, baseFilePath, extension): global kDefaultVersionDigits """ make a report file name with a number 1 greater than any existing report file name with the same extension. We know the baseFilePath exists, as it comes from an open font file. We will not worry about 32 char name limits -> Mac OS X and Windows 2000 only. """ n = 1
numPattern = re.compile(file + "." + extension + r"v0*(\d+)$") fileList = os.listdir(dir) for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num + 1 if n > (10**kDefaultVersionDigits - 1): kDefaultVersionDigits = kDefaultVersionDigits +1 filePath = baseFilePath + "." + extension + "v" + str(n).zfill(kDefaultVersionDigits) return filePath def write(*args): self = args[0] text = [] for arg in args[1:]: try: text.append(str(arg)) except: text.append(repr(arg)) text = " ".join(text) if (self.state == kWriteBoth): print text if (self.file != sys.stdout): self.file.write(text + os.linesep) elif (self.state == kWriteFile): self.file.write(text + os.linesep) elif (self.state == kWriteStdOut): print text def set_state(self, state): self.state = state def close(self): if self.file and (self.file != sys.stdout): self.file.close() if self.fileName: print "Log saved to ", self.fileName def read(*args): # added to make this class look more like a file. pass def getLatestReport(baseFilePath, extension=kDefaultReportExtension): """ FInd the latest report matching the path and extension. Assume that the highest number is the most recent. """ n = 1 dir, file = os.path.split(baseFilePath) logsDir = os.path.join(dir, kDefaultLogSubdirectory) matchString = r"%s.%sv0*(\d+)$" % (file, extension) print matchString numPattern = re.compile(matchString) fileList = os.listdir(logsDir) print fileList latestFile = "" for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num latestFile = file if latestFile: filePath = os.path.join(logsDir, latestFile) else: filePath = "" return filePath
dir, file = os.path.split(baseFilePath)
random_line_split
AdobeFontLabUtils.py
# Support module for FontLab scripts. __copyright__ = """ Copyright 2014 Adobe Systems Incorporated (http://www.adobe.com/). All Rights Reserved. This software is licensed as OpenSource, under the Apache License, Version 2.0. This license is available at: http://opensource.org/licenses/Apache-2.0. """ __doc__ = """ AdobeFontLabUtils v1.5 Feb 09 2009. Support module for FontLab scripts. Defines commonly used functions and globals. """ import sys import os import string import re import time import plistlib import StringIO from FL import * kGlyphOrderAndAliasDBName = "GlyphOrderAndAliasDB" kSharedDataName = "SharedData" ######################################################## # Routines for managing a directory tree of font files, # and for exporting/importing ata between the font files # and a master dictionary file ######################################################## def setFDKToolsPath(toolName): """ On Mac, add std FDK path to sys.environ PATH. On all, check if tool is available. """ toolPath = 0 if sys.platform == "darwin": paths = os.environ["PATH"] if "FDK/Tools/osx" not in paths: home = os.environ["HOME"] fdkPath = ":%s/bin/FDK/Tools/osx" % (home) os.environ["PATH"] = paths + fdkPath if os.name == "nt": p = os.popen("for %%i in (%s) do @echo. %%~$PATH:i" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log else: p = os.popen("which %s" % (toolName)) log = p.read() p.close() log = log.strip() if log: toolPath = log if not toolPath: print """ The script cannot run the command-line program '%s'. Please make sure the AFDKO is installed, and the system environment variable PATH contains the path the to FDK sub-directory containing '%s'.""" % (toolName, toolName) return toolPath # get reid of new-line def checkControlKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_CONTROL) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.controlKey: notPressed = 0 return notPressed def checkShiftKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.shiftKey: notPressed = 0 return notPressed def checkAltKeyPress(): notPressed = 1 if os.name == "nt": try: import win32api import win32con keyState = win32api.GetAsyncKeyState(win32con.VK_SHIFT) if keyState < 0: notPressed = 0 except ImportError: print "Note: to be able to set options for this script, you must install" print "win32all Python module from Mark Hammond. This can be found at:" print " http://www.python.net/crew/mhammond/win32/Downloads.html" print "or http://sourceforge.net/, and search for 'Python Windows Extensions." else: import Carbon.Evt import Carbon.Events modifiers = Carbon.Evt.GetCurrentKeyModifiers() if modifiers & Carbon.Events.optionKey: notPressed = 0 return notPressed def GetSharedDataPath(): sdPath = "" for path in sys.path: if not re.search(r"FDK/Tools", path): continue m = re.search(kSharedDataName, path) if not m: continue sdPath = path[:m.end()] if not sdPath: print "Error. The path to ",kSharedDataName," is not in the sys.path list." elif not os.path.exists(sdPath): print "Error.", sdPath,"does not exist." sdPath = "" return sdPath # fontDirPath is an absolute path to the font dir, supplied by FontLab # fontPSName is used to get the top family directory from the font library DB file. # so as to look back up the family tree for the GOASDB. def GetGOADBPath(fontDirPath, fontPSName): goadbPath = "" dirPath = fontDirPath trys = 3 # look first in the font's dir, then up to two levels up. while trys: goadbPath = os.path.join(dirPath, kGlyphOrderAndAliasDBName) if (goadbPath and os.path.exists(goadbPath)): break dirPath = os.path.dirname(dirPath) trys -= 1 if (goadbPath and os.path.exists(goadbPath)): return goadbPath # default to the global FDK GOADB. goadbPath = "" sharedDataDir = GetSharedDataPath() if sharedDataDir: goadbPath = os.path.join(sharedDataDir, kGlyphOrderAndAliasDBName ) if (goadbPath and os.path.exists(goadbPath)): return goadbPath print "Error. Could not find", kGlyphOrderAndAliasDBName,", even in FDK Shared Data Dir." goadbPath = "" return goadbPath def SplitGOADBEntries(line): global goadbIndex entry = string.split(line) if (len(entry) < 2) or (len(entry) > 3): print "Error in GOADB: bad entry - too many or two few columns <" + line + ">" entry = None if len(entry) == 3: if entry[2][0] != "u": print "Error in GOADB: 3rd column must be a uni or u Unicode name <" + line + ">" entry = None if len(entry) == 2:
# Add GOADB index value if entry: entry.append(goadbIndex) goadbIndex = goadbIndex + 1 return entry ######################################################## # Misc utilities ######################################################## def RemoveComment(line): try: index = string.index(line, "#") line = line[:index] except: pass return line #return list of lines with comments and blank lines removed. def CleanLines(lineList): lineList = map(lambda line: RemoveComment(line) , lineList) lineList = filter(lambda line: string.strip(line), lineList) return lineList #split out lines from a stream of file data. def SplitLines(data): lineList = re.findall(r"([^\r\n]+)[\r\n]", data) return lineList def LoadGOADB(filePath): """ Read a glyph alias file for makeOTF into a dict.""" global goadbIndex finalNameDict = {} productionNameDict = {} goadbIndex = 0 gfile = open(filePath,"rb") data = gfile.read() gfile.close() glyphEntryList = SplitLines(data) glyphEntryList = CleanLines(glyphEntryList) glyphEntryList = map(SplitGOADBEntries, glyphEntryList) glyphEntryList = filter(lambda entry: entry, glyphEntryList) # drop out any entry == None for entry in glyphEntryList: finalNameDict[entry[0]] = [ entry[1], entry[2], entry[3] ] if productionNameDict.has_key(entry[1]): print "Error in GOADB: more than one final name for a production name!" print "\tfinal name 1:", productionNameDict[entry[1]], "Final name 2:", entry[0], "Production name:", entry[1] print "\tUsing Final name 2." productionNameDict[entry[1]] = [ entry[0], entry[2], entry[3] ] return finalNameDict, productionNameDict kDefaultReportExtension = "log" kDefaultLogSubdirectory = "logs" kDefaultVersionDigits = 3 kWriteBoth = 3 kWriteStdOut = 1 kWriteFile = 2 class Reporter: """ Logging class to let me echo output to both/either screen and a log file. Makes log files with same base name as font file, and special extension. Default extension is supplied, can be overridden. Trys to put log file in subdirectory under font file home directory.""" def __init__(self, fileOrPath, extension = kDefaultReportExtension): self.file = None self.fileName = None self.state = kWriteBoth if type(fileOrPath) == type(" "): # try to find or make log directory for report file. dir,name = os.path.split(fileOrPath) logDir = os.path.join(dir, kDefaultLogSubdirectory) if not os.path.exists(logDir): try: os.mkdir(logDir) except IOError: print "Failed to make log file subdir:", logDir return if os.path.exists(logDir): fileOrPath = os.path.join(logDir, name) basePath, fileExt = os.path.splitext(fileOrPath) self.fileName = self.makeSafeReportName(basePath, extension) try: self.file = open(self.fileName, "wt") except IOError: print "Failed to open file", self.fileName return else: self.fileName = None self.file = fileOrPath return def makeSafeReportName(self, baseFilePath, extension): global kDefaultVersionDigits """ make a report file name with a number 1 greater than any existing report file name with the same extension. We know the baseFilePath exists, as it comes from an open font file. We will not worry about 32 char name limits -> Mac OS X and Windows 2000 only. """ n = 1 dir, file = os.path.split(baseFilePath) numPattern = re.compile(file + "." + extension + r"v0*(\d+)$") fileList = os.listdir(dir) for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num + 1 if n > (10**kDefaultVersionDigits - 1): kDefaultVersionDigits = kDefaultVersionDigits +1 filePath = baseFilePath + "." + extension + "v" + str(n).zfill(kDefaultVersionDigits) return filePath def write(*args): self = args[0] text = [] for arg in args[1:]: try: text.append(str(arg)) except: text.append(repr(arg)) text = " ".join(text) if (self.state == kWriteBoth): print text if (self.file != sys.stdout): self.file.write(text + os.linesep) elif (self.state == kWriteFile): self.file.write(text + os.linesep) elif (self.state == kWriteStdOut): print text def set_state(self, state): self.state = state def close(self): if self.file and (self.file != sys.stdout): self.file.close() if self.fileName: print "Log saved to ", self.fileName def read(*args): # added to make this class look more like a file. pass def getLatestReport(baseFilePath, extension=kDefaultReportExtension): """ FInd the latest report matching the path and extension. Assume that the highest number is the most recent. """ n = 1 dir, file = os.path.split(baseFilePath) logsDir = os.path.join(dir, kDefaultLogSubdirectory) matchString = r"%s.%sv0*(\d+)$" % (file, extension) print matchString numPattern = re.compile(matchString) fileList = os.listdir(logsDir) print fileList latestFile = "" for file in fileList: match = numPattern.match(file) if match: num = match.group(1) num = eval(num) if num >= n: n = num latestFile = file if latestFile: filePath = os.path.join(logsDir, latestFile) else: filePath = "" return filePath
entry.append("")
conditional_block
gap_stats.py
#!/usr/bin/env python from __future__ import division import os import sys import ntpath import collections from dsa_seq_utils.Sequence import GapSequence from dsa_seq_utils.SeqReader import SeqReader from dsa_seq_utils.stats import calculate_median from dsa_seq_utils.utilities import log from dsa_seq_utils.utilities import help_desired if __name__ == "__main__": usage = """ ___________ Description: Command line utility for analyzing gaps in a fasta file. One file can be analyzed, or up to 3 can be compared. Use this tool to compare a genome assembly pre and post gap filling with tools such as PBJelly. _____ Usage: python gap_stats.py [options] <sequence1.fasta> <sequence2.fasta> <sequence3.fasta> OPTIONS: -m Save a matplotlib gap length histogram in current working directory. * Requires matplotlib to be installed * -p Write a plain text file of all gap lengths in current working directory for use as input into other statistical analysis software. -b Make a gap bed file for each input fasta. -h Print help message. """ def parse_args(args_list): """ Given all command line arguments, make a dictionary containing all of the flags, and all of the fasta files. If the command line arguments either request help or raises an error, that will be done here. If this function returns, it can be assumed that the command line statement is ready for further analysis. :param args_list: List of command line arguments (sys.argv) :return: Dictionary specifying all flags and all fasta files. """ # If no arguments specified, print usage statement with no error. if len(args_list) == 1: sys.exit(usage) # Make all flags upper case to avoid case sensitivity. flags = [i.upper() for i in args_list if i.startswith('-')] # See if help is desired. If so, print usage with no error. if help_desired(flags): sys.exit(usage) # Retrieve fasta files. At least one, up to 3 is needed. fastas = [ i for i in args_list if i.endswith('.fasta') or i.endswith('.fa') or i.endswith('.fan') or i.endswith('.fas') ] # Make sure that at least one fasta file was found. if not fastas: print usage raise ValueError('No fasta files found.') # Make sure that no more than 3 fasta files have been selected. if len(fastas) > 3: print usage raise ValueError( 'A maximum of 3 fasta files can be compared at once. You entered %r fasta files.' % len(fastas) ) return { 'flags': flags, 'fastas': fastas } def write_gap_stats(info): """ Use info obtained in get_gap_info to write a summary stats csv file. :param info: Dictionary where key = fasta file value = ordered dictionary containing all gap info from get_gap_info """ with open('gap_stats.txt', 'w') as out_file: # Get each category from each fasta file. One row for each. all_percent_N = [str(100*(info[i]['total_N']/info[i]['total_nucleotides'])) for i in info.keys()] all_total_gaps = [str(info[i]['total_gaps']) for i in info.keys()] all_total_gaps_over_100 = [str(info[i]['total_gaps_over_100']) for i in info.keys()] all_longest_gap = [str(max(info[i]['all_gap_lengths'])) for i in info.keys()] all_medians = [str(calculate_median(info[i]['all_gap_lengths'])) for i in info.keys()] files = [ntpath.basename(f) for f in info.keys()] # Write rows out to csv file. # First, write out the header (gap metrics). out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % ('file_name', '%N', 'Total Gaps', 'Total Gaps Longer Than 100bp', 'Longest Gap', 'Median Gap Length')) # Write results for each file. for i in range(len(files)): out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % (files[i], all_percent_N[i], all_total_gaps[i], all_total_gaps_over_100[i], all_longest_gap[i], all_medians[i])) def write_bed_file(bed_dict, out_file_name): """ From a dictionary storing bed file info, write output file in bed format. :param bed_dict: Dict where keys = fasta headers, and values = coordinates of each gap in that sequence. :param out_file_name: Name for output bed file. """ with open(os.getcwd() + '/' + ntpath.basename(out_file_name), 'w') as out_file: for header in bed_dict.keys(): for coordinates in bed_dict[header]: out_file.write( '%s\t%r\t%r\n' %(header[1:], coordinates[0], coordinates[1]) ) def write_hist_img_file(lengths, labels):
def write_hist_text_file(lengths, labels): """ Write a plain text file to current working directory. 1 ordered column of all histogram lengths. This is for input into statistical analysis software such as R. :param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ for lengths_list, label in zip(lengths, labels): hist_file_name = label[:label.rfind('.')] + '.all_lengths.txt' with open(os.getcwd() + '/' + ntpath.basename(hist_file_name), 'w') as out_file: out_file.write(ntpath.basename(label) + '\n') for length in sorted(lengths_list): out_file.write(str(length) + '\n') def get_gap_info(in_file): """ Given a fasta file, find out some information regarding its global gap content. :param in_file: Fasta or multi-fasta with sequences for gap analysis. :return: dictionary with total_N, total_nucleotides, total_gaps and all_gap_lengths """ # Initialize values to be computed. total_N = 0 total_nucleotides = 0 total_gaps = 0 total_gaps_over_100 = 0 all_gap_lengths = [] # Use a dictionary to store bed coordinates. # key = fasta header # Value = list of tuples corresponding to genomic coordinates. bed_gaps = collections.OrderedDict() # Iterate through each sequence in the fasta, # and get gap info from each. sequences = SeqReader(in_file) for header, sequence in sequences.parse_fasta(): gap_sequence = GapSequence(sequence) # Get total number of 'N' characters for this sequence. total_N += gap_sequence.count_Ns() # Get total number of nucleotides for this sequence. total_nucleotides += len(sequence) for gap in gap_sequence.get_gaps(): # Increment total number of gaps total_gaps += 1 if len(gap) > 100: total_gaps_over_100 += 1 # Save this gap length to master list. all_gap_lengths.append(len(gap)) # Now fill in bed file data structure. all_coordinates = [(m.start(0), m.end(0)) for m in gap_sequence.get_gap_coords()] if all_coordinates: bed_gaps[header] = all_coordinates return { 'total_N': total_N, 'total_nucleotides': total_nucleotides, 'total_gaps': total_gaps, 'total_gaps_over_100': total_gaps_over_100, 'all_gap_lengths': all_gap_lengths, 'bed_gaps': bed_gaps } # Parse the command line arguments. arg_dict = parse_args(sys.argv) # Get gap info for each fasta. all_files_info = collections.OrderedDict() for fasta in arg_dict['fastas']: log(' ---- Analyzing gaps for %s' % fasta) all_files_info[fasta] = get_gap_info(fasta) # Write csv file with basic gap stats. write_gap_stats(all_files_info) # Check if bed file is desired. # Save to current working directory if so. if '-B' in arg_dict['flags']: log(' ---- Writing bed file(s).') for f in all_files_info.keys(): file_name = f[:f.rfind('.')] + '.gaps.bed' write_bed_file(all_files_info[f]['bed_gaps'], file_name) # Check if histogram is desired. # Save to current working directory if so. if '-M' in arg_dict['flags']: log(' ---- Writing histogram image file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_img_file(all_lengths, all_files_info.keys()) # Make a plain text file for plugging into ones # favorite statistical analysis software. if '-P' in arg_dict['flags']: log(' ---- Writing histogram plain text file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_text_file(all_lengths, all_files_info.keys())
""" Save a matplotlib length histogram image to current working directory. :param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ import matplotlib.pyplot as plt # Find the max and min values for plotting. max_length = max(max(i) for i in lengths) min_length = min(min(i) for i in lengths) bin_size = int(0.025*max_length) # Make histogram colors = ['r', 'g', 'b'] plt.hist( lengths, bins=range(min_length, max_length+bin_size, bin_size), color=colors[:len(lengths)], label=[ntpath.basename(l) for l in labels] ) plt.legend() plt.title('Gap Length Histogram') plt.xlabel('Gap Length (b)') plt.ylabel('Frequency') plt.savefig(os.getcwd() + '/gap_stats_hist.pdf')
identifier_body
gap_stats.py
#!/usr/bin/env python from __future__ import division import os import sys import ntpath import collections from dsa_seq_utils.Sequence import GapSequence from dsa_seq_utils.SeqReader import SeqReader from dsa_seq_utils.stats import calculate_median from dsa_seq_utils.utilities import log from dsa_seq_utils.utilities import help_desired if __name__ == "__main__": usage = """ ___________ Description: Command line utility for analyzing gaps in a fasta file. One file can be analyzed, or up to 3 can be compared. Use this tool to compare a genome assembly pre and post gap filling with tools such as PBJelly. _____ Usage: python gap_stats.py [options] <sequence1.fasta> <sequence2.fasta> <sequence3.fasta> OPTIONS: -m Save a matplotlib gap length histogram in current working directory. * Requires matplotlib to be installed * -p Write a plain text file of all gap lengths in current working directory for use as input into other statistical analysis software. -b Make a gap bed file for each input fasta. -h Print help message. """ def parse_args(args_list): """ Given all command line arguments, make a dictionary containing all of the flags, and all of the fasta files. If the command line arguments either request help or raises an error, that will be done here. If this function returns, it can be assumed that the command line statement is ready for further analysis. :param args_list: List of command line arguments (sys.argv) :return: Dictionary specifying all flags and all fasta files. """ # If no arguments specified, print usage statement with no error. if len(args_list) == 1: sys.exit(usage) # Make all flags upper case to avoid case sensitivity. flags = [i.upper() for i in args_list if i.startswith('-')] # See if help is desired. If so, print usage with no error. if help_desired(flags): sys.exit(usage) # Retrieve fasta files. At least one, up to 3 is needed. fastas = [ i for i in args_list if i.endswith('.fasta') or i.endswith('.fa') or i.endswith('.fan') or i.endswith('.fas') ] # Make sure that at least one fasta file was found. if not fastas: print usage raise ValueError('No fasta files found.') # Make sure that no more than 3 fasta files have been selected. if len(fastas) > 3: print usage raise ValueError( 'A maximum of 3 fasta files can be compared at once. You entered %r fasta files.' % len(fastas) ) return { 'flags': flags, 'fastas': fastas } def write_gap_stats(info): """ Use info obtained in get_gap_info to write a summary stats csv file. :param info: Dictionary where key = fasta file value = ordered dictionary containing all gap info from get_gap_info """ with open('gap_stats.txt', 'w') as out_file: # Get each category from each fasta file. One row for each. all_percent_N = [str(100*(info[i]['total_N']/info[i]['total_nucleotides'])) for i in info.keys()] all_total_gaps = [str(info[i]['total_gaps']) for i in info.keys()] all_total_gaps_over_100 = [str(info[i]['total_gaps_over_100']) for i in info.keys()] all_longest_gap = [str(max(info[i]['all_gap_lengths'])) for i in info.keys()] all_medians = [str(calculate_median(info[i]['all_gap_lengths'])) for i in info.keys()] files = [ntpath.basename(f) for f in info.keys()] # Write rows out to csv file. # First, write out the header (gap metrics). out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % ('file_name', '%N', 'Total Gaps', 'Total Gaps Longer Than 100bp', 'Longest Gap', 'Median Gap Length')) # Write results for each file. for i in range(len(files)): out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % (files[i], all_percent_N[i], all_total_gaps[i], all_total_gaps_over_100[i], all_longest_gap[i], all_medians[i])) def write_bed_file(bed_dict, out_file_name): """ From a dictionary storing bed file info, write output file in bed format. :param bed_dict: Dict where keys = fasta headers, and values = coordinates of each gap in that sequence. :param out_file_name: Name for output bed file. """ with open(os.getcwd() + '/' + ntpath.basename(out_file_name), 'w') as out_file: for header in bed_dict.keys():
def write_hist_img_file(lengths, labels): """ Save a matplotlib length histogram image to current working directory. :param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ import matplotlib.pyplot as plt # Find the max and min values for plotting. max_length = max(max(i) for i in lengths) min_length = min(min(i) for i in lengths) bin_size = int(0.025*max_length) # Make histogram colors = ['r', 'g', 'b'] plt.hist( lengths, bins=range(min_length, max_length+bin_size, bin_size), color=colors[:len(lengths)], label=[ntpath.basename(l) for l in labels] ) plt.legend() plt.title('Gap Length Histogram') plt.xlabel('Gap Length (b)') plt.ylabel('Frequency') plt.savefig(os.getcwd() + '/gap_stats_hist.pdf') def write_hist_text_file(lengths, labels): """ Write a plain text file to current working directory. 1 ordered column of all histogram lengths. This is for input into statistical analysis software such as R. :param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ for lengths_list, label in zip(lengths, labels): hist_file_name = label[:label.rfind('.')] + '.all_lengths.txt' with open(os.getcwd() + '/' + ntpath.basename(hist_file_name), 'w') as out_file: out_file.write(ntpath.basename(label) + '\n') for length in sorted(lengths_list): out_file.write(str(length) + '\n') def get_gap_info(in_file): """ Given a fasta file, find out some information regarding its global gap content. :param in_file: Fasta or multi-fasta with sequences for gap analysis. :return: dictionary with total_N, total_nucleotides, total_gaps and all_gap_lengths """ # Initialize values to be computed. total_N = 0 total_nucleotides = 0 total_gaps = 0 total_gaps_over_100 = 0 all_gap_lengths = [] # Use a dictionary to store bed coordinates. # key = fasta header # Value = list of tuples corresponding to genomic coordinates. bed_gaps = collections.OrderedDict() # Iterate through each sequence in the fasta, # and get gap info from each. sequences = SeqReader(in_file) for header, sequence in sequences.parse_fasta(): gap_sequence = GapSequence(sequence) # Get total number of 'N' characters for this sequence. total_N += gap_sequence.count_Ns() # Get total number of nucleotides for this sequence. total_nucleotides += len(sequence) for gap in gap_sequence.get_gaps(): # Increment total number of gaps total_gaps += 1 if len(gap) > 100: total_gaps_over_100 += 1 # Save this gap length to master list. all_gap_lengths.append(len(gap)) # Now fill in bed file data structure. all_coordinates = [(m.start(0), m.end(0)) for m in gap_sequence.get_gap_coords()] if all_coordinates: bed_gaps[header] = all_coordinates return { 'total_N': total_N, 'total_nucleotides': total_nucleotides, 'total_gaps': total_gaps, 'total_gaps_over_100': total_gaps_over_100, 'all_gap_lengths': all_gap_lengths, 'bed_gaps': bed_gaps } # Parse the command line arguments. arg_dict = parse_args(sys.argv) # Get gap info for each fasta. all_files_info = collections.OrderedDict() for fasta in arg_dict['fastas']: log(' ---- Analyzing gaps for %s' % fasta) all_files_info[fasta] = get_gap_info(fasta) # Write csv file with basic gap stats. write_gap_stats(all_files_info) # Check if bed file is desired. # Save to current working directory if so. if '-B' in arg_dict['flags']: log(' ---- Writing bed file(s).') for f in all_files_info.keys(): file_name = f[:f.rfind('.')] + '.gaps.bed' write_bed_file(all_files_info[f]['bed_gaps'], file_name) # Check if histogram is desired. # Save to current working directory if so. if '-M' in arg_dict['flags']: log(' ---- Writing histogram image file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_img_file(all_lengths, all_files_info.keys()) # Make a plain text file for plugging into ones # favorite statistical analysis software. if '-P' in arg_dict['flags']: log(' ---- Writing histogram plain text file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_text_file(all_lengths, all_files_info.keys())
for coordinates in bed_dict[header]: out_file.write( '%s\t%r\t%r\n' %(header[1:], coordinates[0], coordinates[1]) )
conditional_block
gap_stats.py
#!/usr/bin/env python from __future__ import division import os import sys import ntpath import collections from dsa_seq_utils.Sequence import GapSequence from dsa_seq_utils.SeqReader import SeqReader from dsa_seq_utils.stats import calculate_median from dsa_seq_utils.utilities import log from dsa_seq_utils.utilities import help_desired if __name__ == "__main__": usage = """ ___________ Description: Command line utility for analyzing gaps in a fasta file. One file can be analyzed, or up to 3 can be compared. Use this tool to compare a genome assembly pre and post gap filling with tools such as PBJelly. _____ Usage: python gap_stats.py [options] <sequence1.fasta> <sequence2.fasta> <sequence3.fasta> OPTIONS: -m Save a matplotlib gap length histogram in current working directory. * Requires matplotlib to be installed * -p Write a plain text file of all gap lengths in current working directory for use as input into other statistical analysis software. -b Make a gap bed file for each input fasta. -h Print help message. """ def parse_args(args_list): """ Given all command line arguments, make a dictionary containing all of the flags, and all of the fasta files. If the command line arguments either request help or raises an error, that will be done here. If this function returns, it can be assumed that the command line statement is ready for further analysis. :param args_list: List of command line arguments (sys.argv) :return: Dictionary specifying all flags and all fasta files. """ # If no arguments specified, print usage statement with no error. if len(args_list) == 1: sys.exit(usage) # Make all flags upper case to avoid case sensitivity. flags = [i.upper() for i in args_list if i.startswith('-')] # See if help is desired. If so, print usage with no error. if help_desired(flags): sys.exit(usage) # Retrieve fasta files. At least one, up to 3 is needed. fastas = [ i for i in args_list if i.endswith('.fasta') or i.endswith('.fa') or i.endswith('.fan') or i.endswith('.fas') ] # Make sure that at least one fasta file was found. if not fastas: print usage raise ValueError('No fasta files found.') # Make sure that no more than 3 fasta files have been selected. if len(fastas) > 3: print usage raise ValueError( 'A maximum of 3 fasta files can be compared at once. You entered %r fasta files.' % len(fastas) ) return { 'flags': flags, 'fastas': fastas } def
(info): """ Use info obtained in get_gap_info to write a summary stats csv file. :param info: Dictionary where key = fasta file value = ordered dictionary containing all gap info from get_gap_info """ with open('gap_stats.txt', 'w') as out_file: # Get each category from each fasta file. One row for each. all_percent_N = [str(100*(info[i]['total_N']/info[i]['total_nucleotides'])) for i in info.keys()] all_total_gaps = [str(info[i]['total_gaps']) for i in info.keys()] all_total_gaps_over_100 = [str(info[i]['total_gaps_over_100']) for i in info.keys()] all_longest_gap = [str(max(info[i]['all_gap_lengths'])) for i in info.keys()] all_medians = [str(calculate_median(info[i]['all_gap_lengths'])) for i in info.keys()] files = [ntpath.basename(f) for f in info.keys()] # Write rows out to csv file. # First, write out the header (gap metrics). out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % ('file_name', '%N', 'Total Gaps', 'Total Gaps Longer Than 100bp', 'Longest Gap', 'Median Gap Length')) # Write results for each file. for i in range(len(files)): out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % (files[i], all_percent_N[i], all_total_gaps[i], all_total_gaps_over_100[i], all_longest_gap[i], all_medians[i])) def write_bed_file(bed_dict, out_file_name): """ From a dictionary storing bed file info, write output file in bed format. :param bed_dict: Dict where keys = fasta headers, and values = coordinates of each gap in that sequence. :param out_file_name: Name for output bed file. """ with open(os.getcwd() + '/' + ntpath.basename(out_file_name), 'w') as out_file: for header in bed_dict.keys(): for coordinates in bed_dict[header]: out_file.write( '%s\t%r\t%r\n' %(header[1:], coordinates[0], coordinates[1]) ) def write_hist_img_file(lengths, labels): """ Save a matplotlib length histogram image to current working directory. :param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ import matplotlib.pyplot as plt # Find the max and min values for plotting. max_length = max(max(i) for i in lengths) min_length = min(min(i) for i in lengths) bin_size = int(0.025*max_length) # Make histogram colors = ['r', 'g', 'b'] plt.hist( lengths, bins=range(min_length, max_length+bin_size, bin_size), color=colors[:len(lengths)], label=[ntpath.basename(l) for l in labels] ) plt.legend() plt.title('Gap Length Histogram') plt.xlabel('Gap Length (b)') plt.ylabel('Frequency') plt.savefig(os.getcwd() + '/gap_stats_hist.pdf') def write_hist_text_file(lengths, labels): """ Write a plain text file to current working directory. 1 ordered column of all histogram lengths. This is for input into statistical analysis software such as R. :param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ for lengths_list, label in zip(lengths, labels): hist_file_name = label[:label.rfind('.')] + '.all_lengths.txt' with open(os.getcwd() + '/' + ntpath.basename(hist_file_name), 'w') as out_file: out_file.write(ntpath.basename(label) + '\n') for length in sorted(lengths_list): out_file.write(str(length) + '\n') def get_gap_info(in_file): """ Given a fasta file, find out some information regarding its global gap content. :param in_file: Fasta or multi-fasta with sequences for gap analysis. :return: dictionary with total_N, total_nucleotides, total_gaps and all_gap_lengths """ # Initialize values to be computed. total_N = 0 total_nucleotides = 0 total_gaps = 0 total_gaps_over_100 = 0 all_gap_lengths = [] # Use a dictionary to store bed coordinates. # key = fasta header # Value = list of tuples corresponding to genomic coordinates. bed_gaps = collections.OrderedDict() # Iterate through each sequence in the fasta, # and get gap info from each. sequences = SeqReader(in_file) for header, sequence in sequences.parse_fasta(): gap_sequence = GapSequence(sequence) # Get total number of 'N' characters for this sequence. total_N += gap_sequence.count_Ns() # Get total number of nucleotides for this sequence. total_nucleotides += len(sequence) for gap in gap_sequence.get_gaps(): # Increment total number of gaps total_gaps += 1 if len(gap) > 100: total_gaps_over_100 += 1 # Save this gap length to master list. all_gap_lengths.append(len(gap)) # Now fill in bed file data structure. all_coordinates = [(m.start(0), m.end(0)) for m in gap_sequence.get_gap_coords()] if all_coordinates: bed_gaps[header] = all_coordinates return { 'total_N': total_N, 'total_nucleotides': total_nucleotides, 'total_gaps': total_gaps, 'total_gaps_over_100': total_gaps_over_100, 'all_gap_lengths': all_gap_lengths, 'bed_gaps': bed_gaps } # Parse the command line arguments. arg_dict = parse_args(sys.argv) # Get gap info for each fasta. all_files_info = collections.OrderedDict() for fasta in arg_dict['fastas']: log(' ---- Analyzing gaps for %s' % fasta) all_files_info[fasta] = get_gap_info(fasta) # Write csv file with basic gap stats. write_gap_stats(all_files_info) # Check if bed file is desired. # Save to current working directory if so. if '-B' in arg_dict['flags']: log(' ---- Writing bed file(s).') for f in all_files_info.keys(): file_name = f[:f.rfind('.')] + '.gaps.bed' write_bed_file(all_files_info[f]['bed_gaps'], file_name) # Check if histogram is desired. # Save to current working directory if so. if '-M' in arg_dict['flags']: log(' ---- Writing histogram image file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_img_file(all_lengths, all_files_info.keys()) # Make a plain text file for plugging into ones # favorite statistical analysis software. if '-P' in arg_dict['flags']: log(' ---- Writing histogram plain text file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_text_file(all_lengths, all_files_info.keys())
write_gap_stats
identifier_name
gap_stats.py
#!/usr/bin/env python from __future__ import division import os import sys import ntpath import collections from dsa_seq_utils.Sequence import GapSequence from dsa_seq_utils.SeqReader import SeqReader from dsa_seq_utils.stats import calculate_median from dsa_seq_utils.utilities import log from dsa_seq_utils.utilities import help_desired if __name__ == "__main__": usage = """ ___________ Description: Command line utility for analyzing gaps in a fasta file. One file can be analyzed, or up to 3 can be compared. Use this tool to compare a genome assembly pre and post gap filling with tools such as PBJelly. _____ Usage: python gap_stats.py [options] <sequence1.fasta> <sequence2.fasta> <sequence3.fasta> OPTIONS: -m Save a matplotlib gap length histogram in current working directory. * Requires matplotlib to be installed * -p Write a plain text file of all gap lengths in current working directory for use as input into other statistical analysis software. -b Make a gap bed file for each input fasta. -h Print help message. """ def parse_args(args_list): """ Given all command line arguments, make a dictionary containing all of the flags, and all of the fasta files. If the command line arguments either request help or raises an error, that will be done here. If this function returns, it can be assumed that the command line statement is ready for further analysis. :param args_list: List of command line arguments (sys.argv) :return: Dictionary specifying all flags and all fasta files. """ # If no arguments specified, print usage statement with no error. if len(args_list) == 1: sys.exit(usage) # Make all flags upper case to avoid case sensitivity. flags = [i.upper() for i in args_list if i.startswith('-')] # See if help is desired. If so, print usage with no error. if help_desired(flags): sys.exit(usage) # Retrieve fasta files. At least one, up to 3 is needed. fastas = [ i for i in args_list if i.endswith('.fasta') or i.endswith('.fa') or i.endswith('.fan') or i.endswith('.fas') ] # Make sure that at least one fasta file was found. if not fastas: print usage raise ValueError('No fasta files found.') # Make sure that no more than 3 fasta files have been selected. if len(fastas) > 3: print usage raise ValueError( 'A maximum of 3 fasta files can be compared at once. You entered %r fasta files.' % len(fastas) ) return { 'flags': flags, 'fastas': fastas } def write_gap_stats(info): """ Use info obtained in get_gap_info to write a summary stats csv file. :param info: Dictionary where key = fasta file value = ordered dictionary containing all gap info from get_gap_info """ with open('gap_stats.txt', 'w') as out_file: # Get each category from each fasta file. One row for each. all_percent_N = [str(100*(info[i]['total_N']/info[i]['total_nucleotides'])) for i in info.keys()] all_total_gaps = [str(info[i]['total_gaps']) for i in info.keys()] all_total_gaps_over_100 = [str(info[i]['total_gaps_over_100']) for i in info.keys()] all_longest_gap = [str(max(info[i]['all_gap_lengths'])) for i in info.keys()] all_medians = [str(calculate_median(info[i]['all_gap_lengths'])) for i in info.keys()] files = [ntpath.basename(f) for f in info.keys()] # Write rows out to csv file. # First, write out the header (gap metrics). out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % ('file_name', '%N', 'Total Gaps', 'Total Gaps Longer Than 100bp', 'Longest Gap', 'Median Gap Length')) # Write results for each file. for i in range(len(files)): out_file.write('%s\t%s\t%s\t%s\t%s\t%s\n' % (files[i], all_percent_N[i], all_total_gaps[i], all_total_gaps_over_100[i], all_longest_gap[i], all_medians[i])) def write_bed_file(bed_dict, out_file_name): """ From a dictionary storing bed file info, write output file in bed format. :param bed_dict: Dict where keys = fasta headers, and values = coordinates of each gap in that sequence. :param out_file_name: Name for output bed file. """ with open(os.getcwd() + '/' + ntpath.basename(out_file_name), 'w') as out_file: for header in bed_dict.keys(): for coordinates in bed_dict[header]: out_file.write( '%s\t%r\t%r\n' %(header[1:], coordinates[0], coordinates[1]) ) def write_hist_img_file(lengths, labels): """ Save a matplotlib length histogram image to current working directory. :param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ import matplotlib.pyplot as plt # Find the max and min values for plotting. max_length = max(max(i) for i in lengths) min_length = min(min(i) for i in lengths) bin_size = int(0.025*max_length) # Make histogram colors = ['r', 'g', 'b'] plt.hist( lengths, bins=range(min_length, max_length+bin_size, bin_size), color=colors[:len(lengths)], label=[ntpath.basename(l) for l in labels] ) plt.legend() plt.title('Gap Length Histogram') plt.xlabel('Gap Length (b)') plt.ylabel('Frequency') plt.savefig(os.getcwd() + '/gap_stats_hist.pdf') def write_hist_text_file(lengths, labels): """ Write a plain text file to current working directory. 1 ordered column of all histogram lengths. This is for input into statistical analysis software such as R.
with open(os.getcwd() + '/' + ntpath.basename(hist_file_name), 'w') as out_file: out_file.write(ntpath.basename(label) + '\n') for length in sorted(lengths_list): out_file.write(str(length) + '\n') def get_gap_info(in_file): """ Given a fasta file, find out some information regarding its global gap content. :param in_file: Fasta or multi-fasta with sequences for gap analysis. :return: dictionary with total_N, total_nucleotides, total_gaps and all_gap_lengths """ # Initialize values to be computed. total_N = 0 total_nucleotides = 0 total_gaps = 0 total_gaps_over_100 = 0 all_gap_lengths = [] # Use a dictionary to store bed coordinates. # key = fasta header # Value = list of tuples corresponding to genomic coordinates. bed_gaps = collections.OrderedDict() # Iterate through each sequence in the fasta, # and get gap info from each. sequences = SeqReader(in_file) for header, sequence in sequences.parse_fasta(): gap_sequence = GapSequence(sequence) # Get total number of 'N' characters for this sequence. total_N += gap_sequence.count_Ns() # Get total number of nucleotides for this sequence. total_nucleotides += len(sequence) for gap in gap_sequence.get_gaps(): # Increment total number of gaps total_gaps += 1 if len(gap) > 100: total_gaps_over_100 += 1 # Save this gap length to master list. all_gap_lengths.append(len(gap)) # Now fill in bed file data structure. all_coordinates = [(m.start(0), m.end(0)) for m in gap_sequence.get_gap_coords()] if all_coordinates: bed_gaps[header] = all_coordinates return { 'total_N': total_N, 'total_nucleotides': total_nucleotides, 'total_gaps': total_gaps, 'total_gaps_over_100': total_gaps_over_100, 'all_gap_lengths': all_gap_lengths, 'bed_gaps': bed_gaps } # Parse the command line arguments. arg_dict = parse_args(sys.argv) # Get gap info for each fasta. all_files_info = collections.OrderedDict() for fasta in arg_dict['fastas']: log(' ---- Analyzing gaps for %s' % fasta) all_files_info[fasta] = get_gap_info(fasta) # Write csv file with basic gap stats. write_gap_stats(all_files_info) # Check if bed file is desired. # Save to current working directory if so. if '-B' in arg_dict['flags']: log(' ---- Writing bed file(s).') for f in all_files_info.keys(): file_name = f[:f.rfind('.')] + '.gaps.bed' write_bed_file(all_files_info[f]['bed_gaps'], file_name) # Check if histogram is desired. # Save to current working directory if so. if '-M' in arg_dict['flags']: log(' ---- Writing histogram image file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_img_file(all_lengths, all_files_info.keys()) # Make a plain text file for plugging into ones # favorite statistical analysis software. if '-P' in arg_dict['flags']: log(' ---- Writing histogram plain text file.') all_lengths = [all_files_info[i]['all_gap_lengths'] for i in all_files_info.keys()] write_hist_text_file(all_lengths, all_files_info.keys())
:param lengths: List of Lists of all gap lengths for each fasta. :param labels: Labels to be used in the histogram image file. """ for lengths_list, label in zip(lengths, labels): hist_file_name = label[:label.rfind('.')] + '.all_lengths.txt'
random_line_split
wk11_main.py
import turtle import math wn=turtle.Screen() t1=turtle.Turtle() t1.color("red") t1.shape("turtle") t1.penup() def ring(): ring = turtle.Turtle() ring.penup() ring.setpos(-300,300) ring.pendown() ring.pensize(3) #-300,300 -> 300,300 -> 300,-300 -> -300,-300 for side in range(4): ring.fd(600) ring.right(90) ring.write(ring.pos()) ring.hideturtle() def turnright(): t1.right(45) def turnleft(): t1.left(45) def keyup(): t1.fd(100) def turnback(): t1.right(180) def mousegoto(x,y): t1.setpos(x,y) feedback() def keybye(): wn.bye() def addkeys(): wn.onkey(turnright,"Right") wn.onkey(turnleft,"Left") wn.onkey(keyup,"Up") wn.onkey(turnback,"Down") wn.onkey(keybye,"q") def addmouse(): wn.onclick(mousegoto) def feedback(): if t1.xcor() > 300 or t1.xcor() < -300: t1.right(180) t1.write("On the line") if t1.ycor() > 300 or t1.ycor() < -300:
def schoolLife(): survey = [["highly satisfactoty", "satisfaction", "dissatisfaction" ,"highly unsatisfactory"], [13.1, 37.1, 8.7, 1.5], [10.6, 34.6, 13.4, 1.9], [27.1, 40.0, 2.9, 1.5], [16.2, 37.8, 6.8, 0.8], [11.4, 29.8, 14.8, 4.9], [12.2, 26.5, 14.9, 4.4], [13.5, 29.7, 11.1, 2.4], [13.7, 37.6, 4.1, 1.2]] grade=survey[1:8] sSum=0 dsSum=0 for i in range(len(grade)): sSum = sSum + grade[i][0] + grade[i][1] dsSum = dsSum + grade[i][2] + grade[i][3] sAvg=sSum/len(grade) dsAvg=dsSum/len(grade) print "Average of (highly) Satisfaction:", sAvg print "Average of (highly) unsatisfactory :", dsAvg def speech(): Bush=["Vice President Cheney, Mr. Chief Justice, President Carter, President Bush, President Clinton, reverend clergy, distinguished guests, fellow citizens:", "On this day, prescribed by law and marked by ceremony, we celebrate the durable wisdom of our Constitution, and recall the deep commitments that unite our country.", "I am grateful for the honor of this hour, mindful of the consequential times in which we live, and determined to fulfill the oath that I have sworn and you have witnessed.", "At this second gathering, our duties are defined not by the words I use, but by the history we have seen together. For a half century, America defended our own freedom by standing watch on distant borders.", "After the shipwreck of communism came years of relative quiet, years of repose, years of sabbatical —and then there came a day of fire.", "We have seen our vulnerability —and we have seen its deepest source. For as long as whole regions of the world simmer in resentment and tyranny —prone to ideologies that feed hatred and excuse murder ", "— violence will gather, and multiply in destructive power, and cross the most defended borders, and raise a mortal threat. There is only one force of history that can break the reign of hatred and resentment,", "and expose the pretensions of tyrants, and reward the hopes of the decent and tolerant, and that is the force of human freedom.", "We are led, by events and common sense, to one conclusion: The survival of liberty in our land increasingly depends on the success of liberty in other lands. The best hope for peace in our world is the expansion of", "freedom in all the world.", "America's vital interests and our deepest beliefs are now one. From the day of our Founding, we have proclaimed that every man and woman on this earth has rights, and dignity, and matchless value, because they bear", " the image of the Maker of Heaven and earth. Across the generations we have proclaimed the imperative of self-government, because no one is fit to be a master, and no one deserves to be a slave. Advancing these ideals", "is the mission that created our Nation. It is the honorable achievement of our fathers. Now it is the urgent requirement of our nation's security, and the calling of our time.", "So it is the policy of the United States to seek and support the growth of democratic movements and institutions in every nation and culture, with the ultimate goal of ending tyranny in our world.", "This is not primarily the task of arms, though we will defend ourselves and our friends by force of arms when necessary. Freedom, by its nature, must be chosen, and defended by citizens, and sustained by the rule of ", " law and the protection of minorities. And when the soul of a nation finally speaks, the institutions that arise may reflect customs and traditions very different from our own. America will not impose our own style of", " government on the unwilling. Our goal instead is to help others find their own voice, attain their own freedom, and make their own way.", "The great objective of ending tyranny is the concentrated work of generations. The difficulty of the task is no excuse for avoiding it. America's influence is not unlimited, but fortunately for the oppressed, America's", " influence is considerable, and we will use it confidently in freedom's cause.", "My most solemn duty is to protect this nation and its people against further attacks and emerging threats. Some have unwisely chosen to test America's resolve, and have found it firm.", "We will persistently clarify the choice before every ruler and every nation: The moral choice between oppression, which is always wrong, and freedom, which is eternally right. America will not ", " pretend that jailed dissidents prefer their chains, or that women welcome humiliation and servitude, or that any human being aspires to live at the mercy of bullies.", "We will encourage reform in other governments by making clear that success in our relations will require the decent treatment of their own people. America's belief in human dignity will guide our policies, ", " yet rights must be more than the grudging concessions of dictators; they are secured by free dissent and the participation of the governed. In the long run, there is no justice without freedom, ", " and there can be no human rights without human liberty.", "Some, I know, have questioned the global appeal of liberty —though this time in history, four decades defined by the swiftest advance of freedom ever seen, ", " is an odd time for doubt. Americans, of all people, should never be surprised by the power of our ideals. Eventually, the call of freedom comes to every mind and every soul. ", " We do not accept the existence of permanent tyranny because we do not accept the possibility of permanent slavery. Liberty will come to those who love it.", "Today, America speaks anew to the peoples of the world:", "All who live in tyranny and hopelessness can know: the United States will not ignore your oppression, or excuse your oppressors. When you stand for your liberty, we will stand with you.", "Democratic reformers facing repression, prison, or exile can know: America sees you for who you are: the future leaders of your free country.", "The rulers of outlaw regimes can know that we still believe as Abraham Lincoln did: Those who deny freedom to others deserve it not for themselves; and, under the rule of a just God, cannot long retain it.", "The leaders of governments with long habits of control need to know: To serve your people you must learn to trust them. Start on this journey of progress and justice, and America will walk at your side.", "And all the allies of the United States can know: we honor your friendship, we rely on your counsel, and we depend on your help. Division among free nations is a primary goal of freedom's enemies. ", " The concerted effort of free nations to promote democracy is a prelude to our enemies' defeat.", "Today, I also speak anew to my fellow citizens:", "From all of you, I have asked patience in the hard task of securing America, which you have granted in good measure. Our country has accepted obligations that are difficult to fulfill,", " and would be dishonorable to abandon. Yet because we have acted in the great liberating tradition of this nation, tens of millions have achieved their freedom. And as hope kindles hope, ", " millions more will find it. By our efforts, we have lit a fire as well —a fire in the minds of men. It warms those who feel its power, it burns those who fight its progress, and one day this untamed ", " fire of freedom will reach the darkest corners of our world.", "A few Americans have accepted the hardest duties in this cause —in the quiet work of intelligence and diplomacy ... the idealistic work of helping raise up free governments ... ", " the dangerous and necessary work of fighting our enemies. Some have shown their devotion to our country in deaths that honored their whole lives —and we will always honor their names and their sacrifice.", "All Americans have witnessed this idealism, and some for the first time. I ask our youngest citizens to believe the evidence of your eyes. You have seen duty and allegiance in the determined faces of our soldiers. ", " You have seen that life is fragile, and evil is real, and courage triumphs. Make the choice to serve in a cause larger than your wants, larger than yourself —and in your days you will add not just to ", " the wealth of our country, but to its character.", "America has need of idealism and courage, because we have essential work at home —the unfinished work of American freedom. In a world moving toward liberty, ", " we are determined to show the meaning and promise of liberty.", "In America's ideal of freedom, citizens find the dignity and security of economic independence, instead of laboring on the edge of subsistence. ", " This is the broader definition of liberty that motivated the Homestead Act, the Social Security Act, and the G.I. Bill of Rights.", " And now we will extend this vision by reforming great institutions to serve the needs of our time. To give every American a stake in the promise and future of our country,", " we will bring the highest standards to our schools, and build an ownership society. We will widen the ownership of homes and businesses, retirement savings and health insurance —preparing ", " our people for the challenges of life in a free society. By making every citizen an agent of his or her own destiny, we will give our fellow Americans greater freedom from want and fear, ", " and make our society more prosperous and just and equal.", "In America's ideal of freedom, the public interest depends on private character —on integrity, and tolerance toward others, and the rule of conscience in our own lives. ", " Self-government relies, in the end, on the governing of the self. That edifice of character is built in families, supported by communities with standards, and sustained in our national ", " life by the truths of Sinai, the Sermon on the Mount, the words of the Koran, and the varied faiths of our people. Americans move forward in every generation by reaffirming all that is ", " good and true that came before —ideals of justice and conduct that are the same yesterday, today, and forever.", "In America's ideal of freedom, the exercise of rights is ennobled by service, and mercy, and a heart for the weak. Liberty for all does not mean independence from one another. Our nation relies", " on men and women who look after a neighbor and surround the lost with love. Americans, at our best, value the life we see in one another, and must always remember that even the unwanted have worth.", " And our country must abandon all the habits of racism, because we cannot carry the message of freedom and the baggage of bigotry at the same time.", "From the perspective of a single day, including this day of dedication, the issues and questions before our country are many. From the viewpoint of centuries, the questions that come to us are narrowed and few.", " Did our generation advance the cause of freedom? And did our character bring credit to that cause?", "These questions that judge us also unite us, because Americans of every party and background, Americans by choice and by birth, are bound to one another in the cause of freedom. We have known divisions, ", " which must be healed to move forward in great purposes —and I will strive in good faith to heal them. Yet those divisions do not define America. We felt the unity and fellowship of our nation when freedom ", " came under attack, and our response came like a single hand over a single heart. And we can feel that same unity and pride whenever America acts for good, and the victims of disaster are given hope, ", " and the unjust encounter justice, and the captives are set free.", "We go forward with complete confidence in the eventual triumph of freedom. Not because history runs on the wheels of inevitability; it is human choices that move events. Not because we consider ourselves ", " a chosen nation; God moves and chooses as He wills. We have confidence because freedom is the permanent hope of mankind, the hunger in dark places, the longing of the soul. When our Founders declared ", " a new order of the ages; when soldiers died in wave upon wave for a union based on liberty; when citizens marched in peaceful outrage under the banner Freedom Now —they were acting on an ancient hope that ", " is meant to be fulfilled. History has an ebb and flow of justice, but history also has a visible direction, set by liberty and the Author of Liberty.", "When the Declaration of Independence was first read in public and the Liberty Bell was sounded in celebration, a witness said, It rang as if it meant something. In our time it means something still. ", " America, in this young century, proclaims liberty throughout all the world, and to all the inhabitants thereof. Renewed in our strength — tested, but not weary — we are ready for the greatest achievements ", " in the history of freedom.", "May God bless you, and may He watch over the United States of America."] Bill_Clinton = [ "At this last presidential inauguration of the 20th century, let us lift our eyes toward the challenges that await us in the next century. It is our great good fortune that time and chance have put us not only at the edge of a new century, in a new millennium, but on the edge of a bright new prospect in human affairs - a moment that will define our course, and our character, for decades to come. We must keep our old democracy forever young. Guided by the ancient vision of a promised land, let us set our sights upon a land of new promise.", "The promise of America was born in the 18th century out of the bold conviction that we are all created equal. It was extended and preserved in the 19th century, when our nation spread across the continent, saved the union, and abolished the awful scourge of slavery.", "Then, in turmoil and triumph, that promise exploded onto the world stage to make this the American Century.", "And what a century it has been. America became the world's mightiest industrial power; saved the world from tyranny in two world wars and a long cold war; and time and again, reached out across the globe to millions who, like us, longed for the blessings of liberty.", "Along the way, Americans produced a great middle class and security in old age; built unrivaled centers of learning and opened public schools to all; split the atom and explored the heavens; invented the computer and the microchip; and deepened the wellspring of justice by making a revolution in civil rights for African Americans and all minorities, and extending the circle of citizenship, opportunity and dignity to women.", "Now, for the third time, a new century is upon us, and another time to choose. We began the 19th century with a choice, to spread our nation from coast to coast. We began the 20th century with a choice, to harness the Industrial Revolution to our values of free enterprise, conservation, and human decency. Those choices made all the difference. At the dawn of the 21st century a free people must now choose to shape the forces of the Information Age and the global society, to unleash the limitless potential of all our people, and, yes, to form a more perfect union.", "When last we gathered, our march to this new future seemed less certain than it does today. We vowed then to set a clear course to renew our nation.", "In these four years, we have been touched by tragedy, exhilarated by challenge, strengthened by achievement. America stands alone as the world's indispensable nation. Once again, our economy is the strongest on Earth. Once again, we are building stronger families, thriving communities, better educational opportunities, a cleaner environment. Problems that once seemed destined to deepen now bend to our efforts: our streets are safer and record numbers of our fellow citizens have moved from welfare to work.", "And once again, we have resolved for our time a great debate over the role of government. Today we can declare: Government is not the problem, and government is not the solution. We - the American people - we are the solution. Our founders understood that well and gave us a democracy strong enough to endure for centuries, flexible enough to face our common challenges and advance our common dreams in each new day.", "As times change, so government must change. We need a new government for a new century - humble enough not to try to solve all our problems for us, but strong enough to give us the tools to solve our problems for ourselves; a government that is smaller, lives within its means, and does more with less. Yet where it can stand up for our values and interests in the world, and where it can give Americans the power to make a real difference in their everyday lives, government should do more, not less. The preeminent mission of our new government is to give all Americans an opportunity - not a guarantee, but a real opportunity - to build better lives.", "Beyond that, my fellow citizens, the future is up to us. Our founders taught us that the preservation of our liberty and our union depends upon responsible citizenship. And we need a new sense of responsibility for a new century. There is work to do, work that government alone cannot do: teaching children to read; hiring people off welfare rolls; coming out from behind locked doors and shuttered windows to help reclaim our streets from drugs and gangs and crime; taking time out of our own lives to serve others.", "Each and every one of us, in our own way, must assume personal responsibility - not only for ourselves and our families, but for our neighbors and our nation. Our greatest responsibility is to embrace a new spirit of community for a new century. For any one of us to succeed, we must succeed as one America.", "The challenge of our past remains the challenge of our future - will we be one nation, one people, with one common destiny, or not? Will we all come together, or come apart?", "The divide of race has been America's constant curse. And each new wave of immigrants gives new targets to old prejudices. Prejudice and contempt, cloaked in the pretense of religious or political conviction are no different. These forces have nearly destroyed our nation in the past. They plague us still. They fuel the fanaticism of terror. And they torment the lives of millions in fractured nations all around the world.", "These obsessions cripple both those who hate and, of course, those who are hated, robbing both of what they might become. We cannot, we will not, succumb to the dark impulses that lurk in the far regions of the soul everywhere. We shall overcome them. And we shall replace them with the generous spirit of a people who feel at home with one another.", "Our rich texture of racial, religious and political diversity will be a Godsend in the 21st century. Great rewards will come to those who can live together, learn together, work together, forge new ties that bind together.", "As this new era approaches we can already see its broad outlines. Ten years ago, the Internet was the mystical province of physicists; today, it is a commonplace encyclopedia for millions of schoolchildren. Scientists now are decoding the blueprint of human life. Cures for our most feared illnesses seem close at hand.", "The world is no longer divided into two hostile camps. Instead, now we are building bonds with nations that once were our adversaries. Growing connections of commerce and culture give us a chance to lift the fortunes and spirits of people the world over. And for the very first time in all of history, more people on this planet live under democracy than dictatorship.", "My fellow Americans, as we look back at this remarkable century, we may ask, can we hope not just to follow, but even to surpass the achievements of the 20th century in America and to avoid the awful bloodshed that stained its legacy? To that question, every American here and every American in our land today must answer a resounding Yes.", "This is the heart of our task. With a new vision of government, a new sense of responsibility, a new spirit of community, we will sustain America's journey. The promise we sought in a new land we will find again in a land of new promise.", "In this new land, education will be every citizen's most prized possession. Our schools will have the highest standards in the world, igniting the spark of possibility in the eyes of every girl and every boy. And the doors of higher education will be open to all. The knowledge and power of the Information Age will be within reach not just of the few, but of every classroom, every library, every child. Parents and children will have time not only to work, but to read and play together. And the plans they make at their kitchen table will be those of a better home, a better job, the certain chance to go to college.", "Our streets will echo again with the laughter of our children, because no one will try to shoot them or sell them drugs anymore. Everyone who can work, will work, with today's permanent under class part of tomorrow's growing middle class. New miracles of medicine at last will reach not only those who can claim care now, but the children and hardworking families too long denied.", "We will stand mighty for peace and freedom, and maintain a strong defense against terror and destruction. Our children will sleep free from the threat of nuclear, chemical or biological weapons. Ports and airports, farms and factories will thrive with trade and innovation and ideas. And the world's greatest democracy will lead a whole world of democracies.", "Our land of new promise will be a nation that meets its obligations - a nation that balances its budget, but never loses the balance of its values. A nation where our grandparents have secure retirement and health care, and their grandchildren know we have made the reforms necessary to sustain those benefits for their time. A nation that fortifies the world's most productive economy even as it protects the great natural bounty of our water, air, and majestic land.", "And in this land of new promise, we will have reformed our politics so that the voice of the people will always speak louder than the din of narrow interests - regaining the participation and deserving the trust of all Americans.", "Fellow citizens, let us build that America, a nation ever moving forward toward realizing the full potential of all its citizens. Prosperity and power - yes, they are important, and we must maintain them. But let us never forget: The greatest progress we have made, and the greatest progress we have yet to make, is in the human heart. In the end, all the world's wealth and a thousand armies are no match for the strength and decency of the human spirit.", "Thirty-four years ago, the man whose life we celebrate today spoke to us down there, at the other end of this Mall, in words that moved the conscience of a nation. Like a prophet of old, he told of his dream that one day America would rise up and treat all its citizens as equals before the law and in the heart. Martin Luther King's dream was the American Dream. His quest is our quest: the ceaseless striving to live out our true creed. Our history has been built on such dreams and labors. And by our dreams and labors we will redeem the promise of America in the 21st century.", "To that effort I pledge all my strength and every power of my office. I ask the members of Congress here to join in that pledge. The American people returned to office a President of one party and a Congress of another. Surely, they did not do this to advance the politics of petty bickering and extreme partisanship they plainly deplore. No, they call on us instead to be repairers of the breach, and to move on with America's mission.", "America demands and deserves big things from us - and nothing big ever came from being small. Let us remember the timeless wisdom of Cardinal Bernardin, when facing the end of his own life. He said: “It is wrong to waste the precious gift of time, on acrimony and division.", "Fellow citizens, we must not waste the precious gift of this time. For all of us are on that same journey of our lives, and our journey, too, will come to an end. But the journey of our America must go on.", "And so, my fellow Americans, we must be strong, for there is much to dare. The demands of our time are great and they are different. Let us meet them with faith and courage, with patience and a grateful and happy heart. Let us shape the hope of this day into the noblest chapter in our history. Yes, let us build our bridge. A bridge wide enough and strong enough for every American to cross over to a blessed land of new promise.", "May those generations whose faces we cannot yet see, whose names we may never know, say of us here that we led our beloved land into a new century with the American Dream alive for all her children; with the American promise of a more perfect union a reality for all her people; with America's bright flame of freedom spreading throughout all the world.", "From the height of this place and the summit of this century, let us go forth. May God strengthen our hands for the good work ahead - and always, always bless our America." ] bsh=[] clin=[] for i in range(0, len(Bush)): for j in range(0, len(Bush[i].split())): bsh.append(Bush[i].split()[j]) for i in range(0, len(Bill_Clinton)): for j in range(0, len(Bill_Clinton[i].split())): clin.append(Bill_Clinton[i].split()[j]) d=dict() e=dict() for c in bsh: if c not in d: d[c]=1 else: d[c]=d[c]+1 for c in clin: if c not in e: e[c]=1 else: e[c]=e[c]+1 f=set() g=set() for n in d: f.add(d[n]) for n in e: g.add(e[n]) bb=list(f) cc=list(g) print "George. Bush - " for m in d: if(d[m]>bb[len(bb)-10]): print "==", m, " : ", d[m], print"\n" print "Bill. Clinton -" for m in e: if(e[m]>cc[len(cc)-10]): print "==", m, " : ", e[m], def lab11(): ring() addkeys() addmouse() turtle.listen() turtle.mainloop() schoolLife() speech() def main(): lab11() if __name__=="__main__": main()
t1.right(180) t1.write("On the line")
conditional_block
wk11_main.py
import turtle import math wn=turtle.Screen() t1=turtle.Turtle() t1.color("red") t1.shape("turtle") t1.penup() def ring():
def turnright(): t1.right(45) def turnleft(): t1.left(45) def keyup(): t1.fd(100) def turnback(): t1.right(180) def mousegoto(x,y): t1.setpos(x,y) feedback() def keybye(): wn.bye() def addkeys(): wn.onkey(turnright,"Right") wn.onkey(turnleft,"Left") wn.onkey(keyup,"Up") wn.onkey(turnback,"Down") wn.onkey(keybye,"q") def addmouse(): wn.onclick(mousegoto) def feedback(): if t1.xcor() > 300 or t1.xcor() < -300: t1.right(180) t1.write("On the line") if t1.ycor() > 300 or t1.ycor() < -300: t1.right(180) t1.write("On the line") def schoolLife(): survey = [["highly satisfactoty", "satisfaction", "dissatisfaction" ,"highly unsatisfactory"], [13.1, 37.1, 8.7, 1.5], [10.6, 34.6, 13.4, 1.9], [27.1, 40.0, 2.9, 1.5], [16.2, 37.8, 6.8, 0.8], [11.4, 29.8, 14.8, 4.9], [12.2, 26.5, 14.9, 4.4], [13.5, 29.7, 11.1, 2.4], [13.7, 37.6, 4.1, 1.2]] grade=survey[1:8] sSum=0 dsSum=0 for i in range(len(grade)): sSum = sSum + grade[i][0] + grade[i][1] dsSum = dsSum + grade[i][2] + grade[i][3] sAvg=sSum/len(grade) dsAvg=dsSum/len(grade) print "Average of (highly) Satisfaction:", sAvg print "Average of (highly) unsatisfactory :", dsAvg def speech(): Bush=["Vice President Cheney, Mr. Chief Justice, President Carter, President Bush, President Clinton, reverend clergy, distinguished guests, fellow citizens:", "On this day, prescribed by law and marked by ceremony, we celebrate the durable wisdom of our Constitution, and recall the deep commitments that unite our country.", "I am grateful for the honor of this hour, mindful of the consequential times in which we live, and determined to fulfill the oath that I have sworn and you have witnessed.", "At this second gathering, our duties are defined not by the words I use, but by the history we have seen together. For a half century, America defended our own freedom by standing watch on distant borders.", "After the shipwreck of communism came years of relative quiet, years of repose, years of sabbatical —and then there came a day of fire.", "We have seen our vulnerability —and we have seen its deepest source. For as long as whole regions of the world simmer in resentment and tyranny —prone to ideologies that feed hatred and excuse murder ", "— violence will gather, and multiply in destructive power, and cross the most defended borders, and raise a mortal threat. There is only one force of history that can break the reign of hatred and resentment,", "and expose the pretensions of tyrants, and reward the hopes of the decent and tolerant, and that is the force of human freedom.", "We are led, by events and common sense, to one conclusion: The survival of liberty in our land increasingly depends on the success of liberty in other lands. The best hope for peace in our world is the expansion of", "freedom in all the world.", "America's vital interests and our deepest beliefs are now one. From the day of our Founding, we have proclaimed that every man and woman on this earth has rights, and dignity, and matchless value, because they bear", " the image of the Maker of Heaven and earth. Across the generations we have proclaimed the imperative of self-government, because no one is fit to be a master, and no one deserves to be a slave. Advancing these ideals", "is the mission that created our Nation. It is the honorable achievement of our fathers. Now it is the urgent requirement of our nation's security, and the calling of our time.", "So it is the policy of the United States to seek and support the growth of democratic movements and institutions in every nation and culture, with the ultimate goal of ending tyranny in our world.", "This is not primarily the task of arms, though we will defend ourselves and our friends by force of arms when necessary. Freedom, by its nature, must be chosen, and defended by citizens, and sustained by the rule of ", " law and the protection of minorities. And when the soul of a nation finally speaks, the institutions that arise may reflect customs and traditions very different from our own. America will not impose our own style of", " government on the unwilling. Our goal instead is to help others find their own voice, attain their own freedom, and make their own way.", "The great objective of ending tyranny is the concentrated work of generations. The difficulty of the task is no excuse for avoiding it. America's influence is not unlimited, but fortunately for the oppressed, America's", " influence is considerable, and we will use it confidently in freedom's cause.", "My most solemn duty is to protect this nation and its people against further attacks and emerging threats. Some have unwisely chosen to test America's resolve, and have found it firm.", "We will persistently clarify the choice before every ruler and every nation: The moral choice between oppression, which is always wrong, and freedom, which is eternally right. America will not ", " pretend that jailed dissidents prefer their chains, or that women welcome humiliation and servitude, or that any human being aspires to live at the mercy of bullies.", "We will encourage reform in other governments by making clear that success in our relations will require the decent treatment of their own people. America's belief in human dignity will guide our policies, ", " yet rights must be more than the grudging concessions of dictators; they are secured by free dissent and the participation of the governed. In the long run, there is no justice without freedom, ", " and there can be no human rights without human liberty.", "Some, I know, have questioned the global appeal of liberty —though this time in history, four decades defined by the swiftest advance of freedom ever seen, ", " is an odd time for doubt. Americans, of all people, should never be surprised by the power of our ideals. Eventually, the call of freedom comes to every mind and every soul. ", " We do not accept the existence of permanent tyranny because we do not accept the possibility of permanent slavery. Liberty will come to those who love it.", "Today, America speaks anew to the peoples of the world:", "All who live in tyranny and hopelessness can know: the United States will not ignore your oppression, or excuse your oppressors. When you stand for your liberty, we will stand with you.", "Democratic reformers facing repression, prison, or exile can know: America sees you for who you are: the future leaders of your free country.", "The rulers of outlaw regimes can know that we still believe as Abraham Lincoln did: Those who deny freedom to others deserve it not for themselves; and, under the rule of a just God, cannot long retain it.", "The leaders of governments with long habits of control need to know: To serve your people you must learn to trust them. Start on this journey of progress and justice, and America will walk at your side.", "And all the allies of the United States can know: we honor your friendship, we rely on your counsel, and we depend on your help. Division among free nations is a primary goal of freedom's enemies. ", " The concerted effort of free nations to promote democracy is a prelude to our enemies' defeat.", "Today, I also speak anew to my fellow citizens:", "From all of you, I have asked patience in the hard task of securing America, which you have granted in good measure. Our country has accepted obligations that are difficult to fulfill,", " and would be dishonorable to abandon. Yet because we have acted in the great liberating tradition of this nation, tens of millions have achieved their freedom. And as hope kindles hope, ", " millions more will find it. By our efforts, we have lit a fire as well —a fire in the minds of men. It warms those who feel its power, it burns those who fight its progress, and one day this untamed ", " fire of freedom will reach the darkest corners of our world.", "A few Americans have accepted the hardest duties in this cause —in the quiet work of intelligence and diplomacy ... the idealistic work of helping raise up free governments ... ", " the dangerous and necessary work of fighting our enemies. Some have shown their devotion to our country in deaths that honored their whole lives —and we will always honor their names and their sacrifice.", "All Americans have witnessed this idealism, and some for the first time. I ask our youngest citizens to believe the evidence of your eyes. You have seen duty and allegiance in the determined faces of our soldiers. ", " You have seen that life is fragile, and evil is real, and courage triumphs. Make the choice to serve in a cause larger than your wants, larger than yourself —and in your days you will add not just to ", " the wealth of our country, but to its character.", "America has need of idealism and courage, because we have essential work at home —the unfinished work of American freedom. In a world moving toward liberty, ", " we are determined to show the meaning and promise of liberty.", "In America's ideal of freedom, citizens find the dignity and security of economic independence, instead of laboring on the edge of subsistence. ", " This is the broader definition of liberty that motivated the Homestead Act, the Social Security Act, and the G.I. Bill of Rights.", " And now we will extend this vision by reforming great institutions to serve the needs of our time. To give every American a stake in the promise and future of our country,", " we will bring the highest standards to our schools, and build an ownership society. We will widen the ownership of homes and businesses, retirement savings and health insurance —preparing ", " our people for the challenges of life in a free society. By making every citizen an agent of his or her own destiny, we will give our fellow Americans greater freedom from want and fear, ", " and make our society more prosperous and just and equal.", "In America's ideal of freedom, the public interest depends on private character —on integrity, and tolerance toward others, and the rule of conscience in our own lives. ", " Self-government relies, in the end, on the governing of the self. That edifice of character is built in families, supported by communities with standards, and sustained in our national ", " life by the truths of Sinai, the Sermon on the Mount, the words of the Koran, and the varied faiths of our people. Americans move forward in every generation by reaffirming all that is ", " good and true that came before —ideals of justice and conduct that are the same yesterday, today, and forever.", "In America's ideal of freedom, the exercise of rights is ennobled by service, and mercy, and a heart for the weak. Liberty for all does not mean independence from one another. Our nation relies", " on men and women who look after a neighbor and surround the lost with love. Americans, at our best, value the life we see in one another, and must always remember that even the unwanted have worth.", " And our country must abandon all the habits of racism, because we cannot carry the message of freedom and the baggage of bigotry at the same time.", "From the perspective of a single day, including this day of dedication, the issues and questions before our country are many. From the viewpoint of centuries, the questions that come to us are narrowed and few.", " Did our generation advance the cause of freedom? And did our character bring credit to that cause?", "These questions that judge us also unite us, because Americans of every party and background, Americans by choice and by birth, are bound to one another in the cause of freedom. We have known divisions, ", " which must be healed to move forward in great purposes —and I will strive in good faith to heal them. Yet those divisions do not define America. We felt the unity and fellowship of our nation when freedom ", " came under attack, and our response came like a single hand over a single heart. And we can feel that same unity and pride whenever America acts for good, and the victims of disaster are given hope, ", " and the unjust encounter justice, and the captives are set free.", "We go forward with complete confidence in the eventual triumph of freedom. Not because history runs on the wheels of inevitability; it is human choices that move events. Not because we consider ourselves ", " a chosen nation; God moves and chooses as He wills. We have confidence because freedom is the permanent hope of mankind, the hunger in dark places, the longing of the soul. When our Founders declared ", " a new order of the ages; when soldiers died in wave upon wave for a union based on liberty; when citizens marched in peaceful outrage under the banner Freedom Now —they were acting on an ancient hope that ", " is meant to be fulfilled. History has an ebb and flow of justice, but history also has a visible direction, set by liberty and the Author of Liberty.", "When the Declaration of Independence was first read in public and the Liberty Bell was sounded in celebration, a witness said, It rang as if it meant something. In our time it means something still. ", " America, in this young century, proclaims liberty throughout all the world, and to all the inhabitants thereof. Renewed in our strength — tested, but not weary — we are ready for the greatest achievements ", " in the history of freedom.", "May God bless you, and may He watch over the United States of America."] Bill_Clinton = [ "At this last presidential inauguration of the 20th century, let us lift our eyes toward the challenges that await us in the next century. It is our great good fortune that time and chance have put us not only at the edge of a new century, in a new millennium, but on the edge of a bright new prospect in human affairs - a moment that will define our course, and our character, for decades to come. We must keep our old democracy forever young. Guided by the ancient vision of a promised land, let us set our sights upon a land of new promise.", "The promise of America was born in the 18th century out of the bold conviction that we are all created equal. It was extended and preserved in the 19th century, when our nation spread across the continent, saved the union, and abolished the awful scourge of slavery.", "Then, in turmoil and triumph, that promise exploded onto the world stage to make this the American Century.", "And what a century it has been. America became the world's mightiest industrial power; saved the world from tyranny in two world wars and a long cold war; and time and again, reached out across the globe to millions who, like us, longed for the blessings of liberty.", "Along the way, Americans produced a great middle class and security in old age; built unrivaled centers of learning and opened public schools to all; split the atom and explored the heavens; invented the computer and the microchip; and deepened the wellspring of justice by making a revolution in civil rights for African Americans and all minorities, and extending the circle of citizenship, opportunity and dignity to women.", "Now, for the third time, a new century is upon us, and another time to choose. We began the 19th century with a choice, to spread our nation from coast to coast. We began the 20th century with a choice, to harness the Industrial Revolution to our values of free enterprise, conservation, and human decency. Those choices made all the difference. At the dawn of the 21st century a free people must now choose to shape the forces of the Information Age and the global society, to unleash the limitless potential of all our people, and, yes, to form a more perfect union.", "When last we gathered, our march to this new future seemed less certain than it does today. We vowed then to set a clear course to renew our nation.", "In these four years, we have been touched by tragedy, exhilarated by challenge, strengthened by achievement. America stands alone as the world's indispensable nation. Once again, our economy is the strongest on Earth. Once again, we are building stronger families, thriving communities, better educational opportunities, a cleaner environment. Problems that once seemed destined to deepen now bend to our efforts: our streets are safer and record numbers of our fellow citizens have moved from welfare to work.", "And once again, we have resolved for our time a great debate over the role of government. Today we can declare: Government is not the problem, and government is not the solution. We - the American people - we are the solution. Our founders understood that well and gave us a democracy strong enough to endure for centuries, flexible enough to face our common challenges and advance our common dreams in each new day.", "As times change, so government must change. We need a new government for a new century - humble enough not to try to solve all our problems for us, but strong enough to give us the tools to solve our problems for ourselves; a government that is smaller, lives within its means, and does more with less. Yet where it can stand up for our values and interests in the world, and where it can give Americans the power to make a real difference in their everyday lives, government should do more, not less. The preeminent mission of our new government is to give all Americans an opportunity - not a guarantee, but a real opportunity - to build better lives.", "Beyond that, my fellow citizens, the future is up to us. Our founders taught us that the preservation of our liberty and our union depends upon responsible citizenship. And we need a new sense of responsibility for a new century. There is work to do, work that government alone cannot do: teaching children to read; hiring people off welfare rolls; coming out from behind locked doors and shuttered windows to help reclaim our streets from drugs and gangs and crime; taking time out of our own lives to serve others.", "Each and every one of us, in our own way, must assume personal responsibility - not only for ourselves and our families, but for our neighbors and our nation. Our greatest responsibility is to embrace a new spirit of community for a new century. For any one of us to succeed, we must succeed as one America.", "The challenge of our past remains the challenge of our future - will we be one nation, one people, with one common destiny, or not? Will we all come together, or come apart?", "The divide of race has been America's constant curse. And each new wave of immigrants gives new targets to old prejudices. Prejudice and contempt, cloaked in the pretense of religious or political conviction are no different. These forces have nearly destroyed our nation in the past. They plague us still. They fuel the fanaticism of terror. And they torment the lives of millions in fractured nations all around the world.", "These obsessions cripple both those who hate and, of course, those who are hated, robbing both of what they might become. We cannot, we will not, succumb to the dark impulses that lurk in the far regions of the soul everywhere. We shall overcome them. And we shall replace them with the generous spirit of a people who feel at home with one another.", "Our rich texture of racial, religious and political diversity will be a Godsend in the 21st century. Great rewards will come to those who can live together, learn together, work together, forge new ties that bind together.", "As this new era approaches we can already see its broad outlines. Ten years ago, the Internet was the mystical province of physicists; today, it is a commonplace encyclopedia for millions of schoolchildren. Scientists now are decoding the blueprint of human life. Cures for our most feared illnesses seem close at hand.", "The world is no longer divided into two hostile camps. Instead, now we are building bonds with nations that once were our adversaries. Growing connections of commerce and culture give us a chance to lift the fortunes and spirits of people the world over. And for the very first time in all of history, more people on this planet live under democracy than dictatorship.", "My fellow Americans, as we look back at this remarkable century, we may ask, can we hope not just to follow, but even to surpass the achievements of the 20th century in America and to avoid the awful bloodshed that stained its legacy? To that question, every American here and every American in our land today must answer a resounding Yes.", "This is the heart of our task. With a new vision of government, a new sense of responsibility, a new spirit of community, we will sustain America's journey. The promise we sought in a new land we will find again in a land of new promise.", "In this new land, education will be every citizen's most prized possession. Our schools will have the highest standards in the world, igniting the spark of possibility in the eyes of every girl and every boy. And the doors of higher education will be open to all. The knowledge and power of the Information Age will be within reach not just of the few, but of every classroom, every library, every child. Parents and children will have time not only to work, but to read and play together. And the plans they make at their kitchen table will be those of a better home, a better job, the certain chance to go to college.", "Our streets will echo again with the laughter of our children, because no one will try to shoot them or sell them drugs anymore. Everyone who can work, will work, with today's permanent under class part of tomorrow's growing middle class. New miracles of medicine at last will reach not only those who can claim care now, but the children and hardworking families too long denied.", "We will stand mighty for peace and freedom, and maintain a strong defense against terror and destruction. Our children will sleep free from the threat of nuclear, chemical or biological weapons. Ports and airports, farms and factories will thrive with trade and innovation and ideas. And the world's greatest democracy will lead a whole world of democracies.", "Our land of new promise will be a nation that meets its obligations - a nation that balances its budget, but never loses the balance of its values. A nation where our grandparents have secure retirement and health care, and their grandchildren know we have made the reforms necessary to sustain those benefits for their time. A nation that fortifies the world's most productive economy even as it protects the great natural bounty of our water, air, and majestic land.", "And in this land of new promise, we will have reformed our politics so that the voice of the people will always speak louder than the din of narrow interests - regaining the participation and deserving the trust of all Americans.", "Fellow citizens, let us build that America, a nation ever moving forward toward realizing the full potential of all its citizens. Prosperity and power - yes, they are important, and we must maintain them. But let us never forget: The greatest progress we have made, and the greatest progress we have yet to make, is in the human heart. In the end, all the world's wealth and a thousand armies are no match for the strength and decency of the human spirit.", "Thirty-four years ago, the man whose life we celebrate today spoke to us down there, at the other end of this Mall, in words that moved the conscience of a nation. Like a prophet of old, he told of his dream that one day America would rise up and treat all its citizens as equals before the law and in the heart. Martin Luther King's dream was the American Dream. His quest is our quest: the ceaseless striving to live out our true creed. Our history has been built on such dreams and labors. And by our dreams and labors we will redeem the promise of America in the 21st century.", "To that effort I pledge all my strength and every power of my office. I ask the members of Congress here to join in that pledge. The American people returned to office a President of one party and a Congress of another. Surely, they did not do this to advance the politics of petty bickering and extreme partisanship they plainly deplore. No, they call on us instead to be repairers of the breach, and to move on with America's mission.", "America demands and deserves big things from us - and nothing big ever came from being small. Let us remember the timeless wisdom of Cardinal Bernardin, when facing the end of his own life. He said: “It is wrong to waste the precious gift of time, on acrimony and division.", "Fellow citizens, we must not waste the precious gift of this time. For all of us are on that same journey of our lives, and our journey, too, will come to an end. But the journey of our America must go on.", "And so, my fellow Americans, we must be strong, for there is much to dare. The demands of our time are great and they are different. Let us meet them with faith and courage, with patience and a grateful and happy heart. Let us shape the hope of this day into the noblest chapter in our history. Yes, let us build our bridge. A bridge wide enough and strong enough for every American to cross over to a blessed land of new promise.", "May those generations whose faces we cannot yet see, whose names we may never know, say of us here that we led our beloved land into a new century with the American Dream alive for all her children; with the American promise of a more perfect union a reality for all her people; with America's bright flame of freedom spreading throughout all the world.", "From the height of this place and the summit of this century, let us go forth. May God strengthen our hands for the good work ahead - and always, always bless our America." ] bsh=[] clin=[] for i in range(0, len(Bush)): for j in range(0, len(Bush[i].split())): bsh.append(Bush[i].split()[j]) for i in range(0, len(Bill_Clinton)): for j in range(0, len(Bill_Clinton[i].split())): clin.append(Bill_Clinton[i].split()[j]) d=dict() e=dict() for c in bsh: if c not in d: d[c]=1 else: d[c]=d[c]+1 for c in clin: if c not in e: e[c]=1 else: e[c]=e[c]+1 f=set() g=set() for n in d: f.add(d[n]) for n in e: g.add(e[n]) bb=list(f) cc=list(g) print "George. Bush - " for m in d: if(d[m]>bb[len(bb)-10]): print "==", m, " : ", d[m], print"\n" print "Bill. Clinton -" for m in e: if(e[m]>cc[len(cc)-10]): print "==", m, " : ", e[m], def lab11(): ring() addkeys() addmouse() turtle.listen() turtle.mainloop() schoolLife() speech() def main(): lab11() if __name__=="__main__": main()
ring = turtle.Turtle() ring.penup() ring.setpos(-300,300) ring.pendown() ring.pensize(3) #-300,300 -> 300,300 -> 300,-300 -> -300,-300 for side in range(4): ring.fd(600) ring.right(90) ring.write(ring.pos()) ring.hideturtle()
identifier_body
wk11_main.py
import turtle import math wn=turtle.Screen() t1=turtle.Turtle() t1.color("red") t1.shape("turtle") t1.penup() def ring(): ring = turtle.Turtle() ring.penup() ring.setpos(-300,300) ring.pendown() ring.pensize(3) #-300,300 -> 300,300 -> 300,-300 -> -300,-300 for side in range(4): ring.fd(600) ring.right(90) ring.write(ring.pos()) ring.hideturtle() def turnright(): t1.right(45) def turnleft(): t1.left(45) def keyup(): t1.fd(100) def turnback(): t1.right(180) def mousegoto(x,y): t1.setpos(x,y) feedback() def keybye(): wn.bye() def addkeys(): wn.onkey(turnright,"Right") wn.onkey(turnleft,"Left") wn.onkey(keyup,"Up") wn.onkey(turnback,"Down") wn.onkey(keybye,"q") def addmouse(): wn.onclick(mousegoto) def feedback(): if t1.xcor() > 300 or t1.xcor() < -300: t1.right(180) t1.write("On the line") if t1.ycor() > 300 or t1.ycor() < -300: t1.right(180) t1.write("On the line") def schoolLife(): survey = [["highly satisfactoty", "satisfaction", "dissatisfaction" ,"highly unsatisfactory"], [13.1, 37.1, 8.7, 1.5], [10.6, 34.6, 13.4, 1.9], [27.1, 40.0, 2.9, 1.5], [16.2, 37.8, 6.8, 0.8], [11.4, 29.8, 14.8, 4.9], [12.2, 26.5, 14.9, 4.4], [13.5, 29.7, 11.1, 2.4], [13.7, 37.6, 4.1, 1.2]] grade=survey[1:8] sSum=0 dsSum=0 for i in range(len(grade)): sSum = sSum + grade[i][0] + grade[i][1] dsSum = dsSum + grade[i][2] + grade[i][3] sAvg=sSum/len(grade) dsAvg=dsSum/len(grade) print "Average of (highly) Satisfaction:", sAvg print "Average of (highly) unsatisfactory :", dsAvg def speech(): Bush=["Vice President Cheney, Mr. Chief Justice, President Carter, President Bush, President Clinton, reverend clergy, distinguished guests, fellow citizens:", "On this day, prescribed by law and marked by ceremony, we celebrate the durable wisdom of our Constitution, and recall the deep commitments that unite our country.", "I am grateful for the honor of this hour, mindful of the consequential times in which we live, and determined to fulfill the oath that I have sworn and you have witnessed.", "At this second gathering, our duties are defined not by the words I use, but by the history we have seen together. For a half century, America defended our own freedom by standing watch on distant borders.", "After the shipwreck of communism came years of relative quiet, years of repose, years of sabbatical —and then there came a day of fire.", "We have seen our vulnerability —and we have seen its deepest source. For as long as whole regions of the world simmer in resentment and tyranny —prone to ideologies that feed hatred and excuse murder ", "— violence will gather, and multiply in destructive power, and cross the most defended borders, and raise a mortal threat. There is only one force of history that can break the reign of hatred and resentment,", "and expose the pretensions of tyrants, and reward the hopes of the decent and tolerant, and that is the force of human freedom.", "We are led, by events and common sense, to one conclusion: The survival of liberty in our land increasingly depends on the success of liberty in other lands. The best hope for peace in our world is the expansion of", "freedom in all the world.", "America's vital interests and our deepest beliefs are now one. From the day of our Founding, we have proclaimed that every man and woman on this earth has rights, and dignity, and matchless value, because they bear", " the image of the Maker of Heaven and earth. Across the generations we have proclaimed the imperative of self-government, because no one is fit to be a master, and no one deserves to be a slave. Advancing these ideals", "is the mission that created our Nation. It is the honorable achievement of our fathers. Now it is the urgent requirement of our nation's security, and the calling of our time.", "So it is the policy of the United States to seek and support the growth of democratic movements and institutions in every nation and culture, with the ultimate goal of ending tyranny in our world.", "This is not primarily the task of arms, though we will defend ourselves and our friends by force of arms when necessary. Freedom, by its nature, must be chosen, and defended by citizens, and sustained by the rule of ", " law and the protection of minorities. And when the soul of a nation finally speaks, the institutions that arise may reflect customs and traditions very different from our own. America will not impose our own style of", " government on the unwilling. Our goal instead is to help others find their own voice, attain their own freedom, and make their own way.", "The great objective of ending tyranny is the concentrated work of generations. The difficulty of the task is no excuse for avoiding it. America's influence is not unlimited, but fortunately for the oppressed, America's", " influence is considerable, and we will use it confidently in freedom's cause.", "My most solemn duty is to protect this nation and its people against further attacks and emerging threats. Some have unwisely chosen to test America's resolve, and have found it firm.", "We will persistently clarify the choice before every ruler and every nation: The moral choice between oppression, which is always wrong, and freedom, which is eternally right. America will not ", " pretend that jailed dissidents prefer their chains, or that women welcome humiliation and servitude, or that any human being aspires to live at the mercy of bullies.", "We will encourage reform in other governments by making clear that success in our relations will require the decent treatment of their own people. America's belief in human dignity will guide our policies, ", " yet rights must be more than the grudging concessions of dictators; they are secured by free dissent and the participation of the governed. In the long run, there is no justice without freedom, ", " and there can be no human rights without human liberty.", "Some, I know, have questioned the global appeal of liberty —though this time in history, four decades defined by the swiftest advance of freedom ever seen, ", " is an odd time for doubt. Americans, of all people, should never be surprised by the power of our ideals. Eventually, the call of freedom comes to every mind and every soul. ", " We do not accept the existence of permanent tyranny because we do not accept the possibility of permanent slavery. Liberty will come to those who love it.", "Today, America speaks anew to the peoples of the world:", "All who live in tyranny and hopelessness can know: the United States will not ignore your oppression, or excuse your oppressors. When you stand for your liberty, we will stand with you.", "Democratic reformers facing repression, prison, or exile can know: America sees you for who you are: the future leaders of your free country.", "The rulers of outlaw regimes can know that we still believe as Abraham Lincoln did: Those who deny freedom to others deserve it not for themselves; and, under the rule of a just God, cannot long retain it.", "The leaders of governments with long habits of control need to know: To serve your people you must learn to trust them. Start on this journey of progress and justice, and America will walk at your side.", "And all the allies of the United States can know: we honor your friendship, we rely on your counsel, and we depend on your help. Division among free nations is a primary goal of freedom's enemies. ", " The concerted effort of free nations to promote democracy is a prelude to our enemies' defeat.", "Today, I also speak anew to my fellow citizens:", "From all of you, I have asked patience in the hard task of securing America, which you have granted in good measure. Our country has accepted obligations that are difficult to fulfill,", " and would be dishonorable to abandon. Yet because we have acted in the great liberating tradition of this nation, tens of millions have achieved their freedom. And as hope kindles hope, ", " millions more will find it. By our efforts, we have lit a fire as well —a fire in the minds of men. It warms those who feel its power, it burns those who fight its progress, and one day this untamed ", " fire of freedom will reach the darkest corners of our world.", "A few Americans have accepted the hardest duties in this cause —in the quiet work of intelligence and diplomacy ... the idealistic work of helping raise up free governments ... ", " the dangerous and necessary work of fighting our enemies. Some have shown their devotion to our country in deaths that honored their whole lives —and we will always honor their names and their sacrifice.", "All Americans have witnessed this idealism, and some for the first time. I ask our youngest citizens to believe the evidence of your eyes. You have seen duty and allegiance in the determined faces of our soldiers. ", " You have seen that life is fragile, and evil is real, and courage triumphs. Make the choice to serve in a cause larger than your wants, larger than yourself —and in your days you will add not just to ", " the wealth of our country, but to its character.", "America has need of idealism and courage, because we have essential work at home —the unfinished work of American freedom. In a world moving toward liberty, ", " we are determined to show the meaning and promise of liberty.", "In America's ideal of freedom, citizens find the dignity and security of economic independence, instead of laboring on the edge of subsistence. ", " This is the broader definition of liberty that motivated the Homestead Act, the Social Security Act, and the G.I. Bill of Rights.", " And now we will extend this vision by reforming great institutions to serve the needs of our time. To give every American a stake in the promise and future of our country,", " we will bring the highest standards to our schools, and build an ownership society. We will widen the ownership of homes and businesses, retirement savings and health insurance —preparing ", " our people for the challenges of life in a free society. By making every citizen an agent of his or her own destiny, we will give our fellow Americans greater freedom from want and fear, ",
"In America's ideal of freedom, the public interest depends on private character —on integrity, and tolerance toward others, and the rule of conscience in our own lives. ", " Self-government relies, in the end, on the governing of the self. That edifice of character is built in families, supported by communities with standards, and sustained in our national ", " life by the truths of Sinai, the Sermon on the Mount, the words of the Koran, and the varied faiths of our people. Americans move forward in every generation by reaffirming all that is ", " good and true that came before —ideals of justice and conduct that are the same yesterday, today, and forever.", "In America's ideal of freedom, the exercise of rights is ennobled by service, and mercy, and a heart for the weak. Liberty for all does not mean independence from one another. Our nation relies", " on men and women who look after a neighbor and surround the lost with love. Americans, at our best, value the life we see in one another, and must always remember that even the unwanted have worth.", " And our country must abandon all the habits of racism, because we cannot carry the message of freedom and the baggage of bigotry at the same time.", "From the perspective of a single day, including this day of dedication, the issues and questions before our country are many. From the viewpoint of centuries, the questions that come to us are narrowed and few.", " Did our generation advance the cause of freedom? And did our character bring credit to that cause?", "These questions that judge us also unite us, because Americans of every party and background, Americans by choice and by birth, are bound to one another in the cause of freedom. We have known divisions, ", " which must be healed to move forward in great purposes —and I will strive in good faith to heal them. Yet those divisions do not define America. We felt the unity and fellowship of our nation when freedom ", " came under attack, and our response came like a single hand over a single heart. And we can feel that same unity and pride whenever America acts for good, and the victims of disaster are given hope, ", " and the unjust encounter justice, and the captives are set free.", "We go forward with complete confidence in the eventual triumph of freedom. Not because history runs on the wheels of inevitability; it is human choices that move events. Not because we consider ourselves ", " a chosen nation; God moves and chooses as He wills. We have confidence because freedom is the permanent hope of mankind, the hunger in dark places, the longing of the soul. When our Founders declared ", " a new order of the ages; when soldiers died in wave upon wave for a union based on liberty; when citizens marched in peaceful outrage under the banner Freedom Now —they were acting on an ancient hope that ", " is meant to be fulfilled. History has an ebb and flow of justice, but history also has a visible direction, set by liberty and the Author of Liberty.", "When the Declaration of Independence was first read in public and the Liberty Bell was sounded in celebration, a witness said, It rang as if it meant something. In our time it means something still. ", " America, in this young century, proclaims liberty throughout all the world, and to all the inhabitants thereof. Renewed in our strength — tested, but not weary — we are ready for the greatest achievements ", " in the history of freedom.", "May God bless you, and may He watch over the United States of America."] Bill_Clinton = [ "At this last presidential inauguration of the 20th century, let us lift our eyes toward the challenges that await us in the next century. It is our great good fortune that time and chance have put us not only at the edge of a new century, in a new millennium, but on the edge of a bright new prospect in human affairs - a moment that will define our course, and our character, for decades to come. We must keep our old democracy forever young. Guided by the ancient vision of a promised land, let us set our sights upon a land of new promise.", "The promise of America was born in the 18th century out of the bold conviction that we are all created equal. It was extended and preserved in the 19th century, when our nation spread across the continent, saved the union, and abolished the awful scourge of slavery.", "Then, in turmoil and triumph, that promise exploded onto the world stage to make this the American Century.", "And what a century it has been. America became the world's mightiest industrial power; saved the world from tyranny in two world wars and a long cold war; and time and again, reached out across the globe to millions who, like us, longed for the blessings of liberty.", "Along the way, Americans produced a great middle class and security in old age; built unrivaled centers of learning and opened public schools to all; split the atom and explored the heavens; invented the computer and the microchip; and deepened the wellspring of justice by making a revolution in civil rights for African Americans and all minorities, and extending the circle of citizenship, opportunity and dignity to women.", "Now, for the third time, a new century is upon us, and another time to choose. We began the 19th century with a choice, to spread our nation from coast to coast. We began the 20th century with a choice, to harness the Industrial Revolution to our values of free enterprise, conservation, and human decency. Those choices made all the difference. At the dawn of the 21st century a free people must now choose to shape the forces of the Information Age and the global society, to unleash the limitless potential of all our people, and, yes, to form a more perfect union.", "When last we gathered, our march to this new future seemed less certain than it does today. We vowed then to set a clear course to renew our nation.", "In these four years, we have been touched by tragedy, exhilarated by challenge, strengthened by achievement. America stands alone as the world's indispensable nation. Once again, our economy is the strongest on Earth. Once again, we are building stronger families, thriving communities, better educational opportunities, a cleaner environment. Problems that once seemed destined to deepen now bend to our efforts: our streets are safer and record numbers of our fellow citizens have moved from welfare to work.", "And once again, we have resolved for our time a great debate over the role of government. Today we can declare: Government is not the problem, and government is not the solution. We - the American people - we are the solution. Our founders understood that well and gave us a democracy strong enough to endure for centuries, flexible enough to face our common challenges and advance our common dreams in each new day.", "As times change, so government must change. We need a new government for a new century - humble enough not to try to solve all our problems for us, but strong enough to give us the tools to solve our problems for ourselves; a government that is smaller, lives within its means, and does more with less. Yet where it can stand up for our values and interests in the world, and where it can give Americans the power to make a real difference in their everyday lives, government should do more, not less. The preeminent mission of our new government is to give all Americans an opportunity - not a guarantee, but a real opportunity - to build better lives.", "Beyond that, my fellow citizens, the future is up to us. Our founders taught us that the preservation of our liberty and our union depends upon responsible citizenship. And we need a new sense of responsibility for a new century. There is work to do, work that government alone cannot do: teaching children to read; hiring people off welfare rolls; coming out from behind locked doors and shuttered windows to help reclaim our streets from drugs and gangs and crime; taking time out of our own lives to serve others.", "Each and every one of us, in our own way, must assume personal responsibility - not only for ourselves and our families, but for our neighbors and our nation. Our greatest responsibility is to embrace a new spirit of community for a new century. For any one of us to succeed, we must succeed as one America.", "The challenge of our past remains the challenge of our future - will we be one nation, one people, with one common destiny, or not? Will we all come together, or come apart?", "The divide of race has been America's constant curse. And each new wave of immigrants gives new targets to old prejudices. Prejudice and contempt, cloaked in the pretense of religious or political conviction are no different. These forces have nearly destroyed our nation in the past. They plague us still. They fuel the fanaticism of terror. And they torment the lives of millions in fractured nations all around the world.", "These obsessions cripple both those who hate and, of course, those who are hated, robbing both of what they might become. We cannot, we will not, succumb to the dark impulses that lurk in the far regions of the soul everywhere. We shall overcome them. And we shall replace them with the generous spirit of a people who feel at home with one another.", "Our rich texture of racial, religious and political diversity will be a Godsend in the 21st century. Great rewards will come to those who can live together, learn together, work together, forge new ties that bind together.", "As this new era approaches we can already see its broad outlines. Ten years ago, the Internet was the mystical province of physicists; today, it is a commonplace encyclopedia for millions of schoolchildren. Scientists now are decoding the blueprint of human life. Cures for our most feared illnesses seem close at hand.", "The world is no longer divided into two hostile camps. Instead, now we are building bonds with nations that once were our adversaries. Growing connections of commerce and culture give us a chance to lift the fortunes and spirits of people the world over. And for the very first time in all of history, more people on this planet live under democracy than dictatorship.", "My fellow Americans, as we look back at this remarkable century, we may ask, can we hope not just to follow, but even to surpass the achievements of the 20th century in America and to avoid the awful bloodshed that stained its legacy? To that question, every American here and every American in our land today must answer a resounding Yes.", "This is the heart of our task. With a new vision of government, a new sense of responsibility, a new spirit of community, we will sustain America's journey. The promise we sought in a new land we will find again in a land of new promise.", "In this new land, education will be every citizen's most prized possession. Our schools will have the highest standards in the world, igniting the spark of possibility in the eyes of every girl and every boy. And the doors of higher education will be open to all. The knowledge and power of the Information Age will be within reach not just of the few, but of every classroom, every library, every child. Parents and children will have time not only to work, but to read and play together. And the plans they make at their kitchen table will be those of a better home, a better job, the certain chance to go to college.", "Our streets will echo again with the laughter of our children, because no one will try to shoot them or sell them drugs anymore. Everyone who can work, will work, with today's permanent under class part of tomorrow's growing middle class. New miracles of medicine at last will reach not only those who can claim care now, but the children and hardworking families too long denied.", "We will stand mighty for peace and freedom, and maintain a strong defense against terror and destruction. Our children will sleep free from the threat of nuclear, chemical or biological weapons. Ports and airports, farms and factories will thrive with trade and innovation and ideas. And the world's greatest democracy will lead a whole world of democracies.", "Our land of new promise will be a nation that meets its obligations - a nation that balances its budget, but never loses the balance of its values. A nation where our grandparents have secure retirement and health care, and their grandchildren know we have made the reforms necessary to sustain those benefits for their time. A nation that fortifies the world's most productive economy even as it protects the great natural bounty of our water, air, and majestic land.", "And in this land of new promise, we will have reformed our politics so that the voice of the people will always speak louder than the din of narrow interests - regaining the participation and deserving the trust of all Americans.", "Fellow citizens, let us build that America, a nation ever moving forward toward realizing the full potential of all its citizens. Prosperity and power - yes, they are important, and we must maintain them. But let us never forget: The greatest progress we have made, and the greatest progress we have yet to make, is in the human heart. In the end, all the world's wealth and a thousand armies are no match for the strength and decency of the human spirit.", "Thirty-four years ago, the man whose life we celebrate today spoke to us down there, at the other end of this Mall, in words that moved the conscience of a nation. Like a prophet of old, he told of his dream that one day America would rise up and treat all its citizens as equals before the law and in the heart. Martin Luther King's dream was the American Dream. His quest is our quest: the ceaseless striving to live out our true creed. Our history has been built on such dreams and labors. And by our dreams and labors we will redeem the promise of America in the 21st century.", "To that effort I pledge all my strength and every power of my office. I ask the members of Congress here to join in that pledge. The American people returned to office a President of one party and a Congress of another. Surely, they did not do this to advance the politics of petty bickering and extreme partisanship they plainly deplore. No, they call on us instead to be repairers of the breach, and to move on with America's mission.", "America demands and deserves big things from us - and nothing big ever came from being small. Let us remember the timeless wisdom of Cardinal Bernardin, when facing the end of his own life. He said: “It is wrong to waste the precious gift of time, on acrimony and division.", "Fellow citizens, we must not waste the precious gift of this time. For all of us are on that same journey of our lives, and our journey, too, will come to an end. But the journey of our America must go on.", "And so, my fellow Americans, we must be strong, for there is much to dare. The demands of our time are great and they are different. Let us meet them with faith and courage, with patience and a grateful and happy heart. Let us shape the hope of this day into the noblest chapter in our history. Yes, let us build our bridge. A bridge wide enough and strong enough for every American to cross over to a blessed land of new promise.", "May those generations whose faces we cannot yet see, whose names we may never know, say of us here that we led our beloved land into a new century with the American Dream alive for all her children; with the American promise of a more perfect union a reality for all her people; with America's bright flame of freedom spreading throughout all the world.", "From the height of this place and the summit of this century, let us go forth. May God strengthen our hands for the good work ahead - and always, always bless our America." ] bsh=[] clin=[] for i in range(0, len(Bush)): for j in range(0, len(Bush[i].split())): bsh.append(Bush[i].split()[j]) for i in range(0, len(Bill_Clinton)): for j in range(0, len(Bill_Clinton[i].split())): clin.append(Bill_Clinton[i].split()[j]) d=dict() e=dict() for c in bsh: if c not in d: d[c]=1 else: d[c]=d[c]+1 for c in clin: if c not in e: e[c]=1 else: e[c]=e[c]+1 f=set() g=set() for n in d: f.add(d[n]) for n in e: g.add(e[n]) bb=list(f) cc=list(g) print "George. Bush - " for m in d: if(d[m]>bb[len(bb)-10]): print "==", m, " : ", d[m], print"\n" print "Bill. Clinton -" for m in e: if(e[m]>cc[len(cc)-10]): print "==", m, " : ", e[m], def lab11(): ring() addkeys() addmouse() turtle.listen() turtle.mainloop() schoolLife() speech() def main(): lab11() if __name__=="__main__": main()
" and make our society more prosperous and just and equal.",
random_line_split
wk11_main.py
import turtle import math wn=turtle.Screen() t1=turtle.Turtle() t1.color("red") t1.shape("turtle") t1.penup() def ring(): ring = turtle.Turtle() ring.penup() ring.setpos(-300,300) ring.pendown() ring.pensize(3) #-300,300 -> 300,300 -> 300,-300 -> -300,-300 for side in range(4): ring.fd(600) ring.right(90) ring.write(ring.pos()) ring.hideturtle() def turnright(): t1.right(45) def turnleft(): t1.left(45) def keyup(): t1.fd(100) def turnback(): t1.right(180) def mousegoto(x,y): t1.setpos(x,y) feedback() def keybye(): wn.bye() def addkeys(): wn.onkey(turnright,"Right") wn.onkey(turnleft,"Left") wn.onkey(keyup,"Up") wn.onkey(turnback,"Down") wn.onkey(keybye,"q") def addmouse(): wn.onclick(mousegoto) def feedback(): if t1.xcor() > 300 or t1.xcor() < -300: t1.right(180) t1.write("On the line") if t1.ycor() > 300 or t1.ycor() < -300: t1.right(180) t1.write("On the line") def
(): survey = [["highly satisfactoty", "satisfaction", "dissatisfaction" ,"highly unsatisfactory"], [13.1, 37.1, 8.7, 1.5], [10.6, 34.6, 13.4, 1.9], [27.1, 40.0, 2.9, 1.5], [16.2, 37.8, 6.8, 0.8], [11.4, 29.8, 14.8, 4.9], [12.2, 26.5, 14.9, 4.4], [13.5, 29.7, 11.1, 2.4], [13.7, 37.6, 4.1, 1.2]] grade=survey[1:8] sSum=0 dsSum=0 for i in range(len(grade)): sSum = sSum + grade[i][0] + grade[i][1] dsSum = dsSum + grade[i][2] + grade[i][3] sAvg=sSum/len(grade) dsAvg=dsSum/len(grade) print "Average of (highly) Satisfaction:", sAvg print "Average of (highly) unsatisfactory :", dsAvg def speech(): Bush=["Vice President Cheney, Mr. Chief Justice, President Carter, President Bush, President Clinton, reverend clergy, distinguished guests, fellow citizens:", "On this day, prescribed by law and marked by ceremony, we celebrate the durable wisdom of our Constitution, and recall the deep commitments that unite our country.", "I am grateful for the honor of this hour, mindful of the consequential times in which we live, and determined to fulfill the oath that I have sworn and you have witnessed.", "At this second gathering, our duties are defined not by the words I use, but by the history we have seen together. For a half century, America defended our own freedom by standing watch on distant borders.", "After the shipwreck of communism came years of relative quiet, years of repose, years of sabbatical —and then there came a day of fire.", "We have seen our vulnerability —and we have seen its deepest source. For as long as whole regions of the world simmer in resentment and tyranny —prone to ideologies that feed hatred and excuse murder ", "— violence will gather, and multiply in destructive power, and cross the most defended borders, and raise a mortal threat. There is only one force of history that can break the reign of hatred and resentment,", "and expose the pretensions of tyrants, and reward the hopes of the decent and tolerant, and that is the force of human freedom.", "We are led, by events and common sense, to one conclusion: The survival of liberty in our land increasingly depends on the success of liberty in other lands. The best hope for peace in our world is the expansion of", "freedom in all the world.", "America's vital interests and our deepest beliefs are now one. From the day of our Founding, we have proclaimed that every man and woman on this earth has rights, and dignity, and matchless value, because they bear", " the image of the Maker of Heaven and earth. Across the generations we have proclaimed the imperative of self-government, because no one is fit to be a master, and no one deserves to be a slave. Advancing these ideals", "is the mission that created our Nation. It is the honorable achievement of our fathers. Now it is the urgent requirement of our nation's security, and the calling of our time.", "So it is the policy of the United States to seek and support the growth of democratic movements and institutions in every nation and culture, with the ultimate goal of ending tyranny in our world.", "This is not primarily the task of arms, though we will defend ourselves and our friends by force of arms when necessary. Freedom, by its nature, must be chosen, and defended by citizens, and sustained by the rule of ", " law and the protection of minorities. And when the soul of a nation finally speaks, the institutions that arise may reflect customs and traditions very different from our own. America will not impose our own style of", " government on the unwilling. Our goal instead is to help others find their own voice, attain their own freedom, and make their own way.", "The great objective of ending tyranny is the concentrated work of generations. The difficulty of the task is no excuse for avoiding it. America's influence is not unlimited, but fortunately for the oppressed, America's", " influence is considerable, and we will use it confidently in freedom's cause.", "My most solemn duty is to protect this nation and its people against further attacks and emerging threats. Some have unwisely chosen to test America's resolve, and have found it firm.", "We will persistently clarify the choice before every ruler and every nation: The moral choice between oppression, which is always wrong, and freedom, which is eternally right. America will not ", " pretend that jailed dissidents prefer their chains, or that women welcome humiliation and servitude, or that any human being aspires to live at the mercy of bullies.", "We will encourage reform in other governments by making clear that success in our relations will require the decent treatment of their own people. America's belief in human dignity will guide our policies, ", " yet rights must be more than the grudging concessions of dictators; they are secured by free dissent and the participation of the governed. In the long run, there is no justice without freedom, ", " and there can be no human rights without human liberty.", "Some, I know, have questioned the global appeal of liberty —though this time in history, four decades defined by the swiftest advance of freedom ever seen, ", " is an odd time for doubt. Americans, of all people, should never be surprised by the power of our ideals. Eventually, the call of freedom comes to every mind and every soul. ", " We do not accept the existence of permanent tyranny because we do not accept the possibility of permanent slavery. Liberty will come to those who love it.", "Today, America speaks anew to the peoples of the world:", "All who live in tyranny and hopelessness can know: the United States will not ignore your oppression, or excuse your oppressors. When you stand for your liberty, we will stand with you.", "Democratic reformers facing repression, prison, or exile can know: America sees you for who you are: the future leaders of your free country.", "The rulers of outlaw regimes can know that we still believe as Abraham Lincoln did: Those who deny freedom to others deserve it not for themselves; and, under the rule of a just God, cannot long retain it.", "The leaders of governments with long habits of control need to know: To serve your people you must learn to trust them. Start on this journey of progress and justice, and America will walk at your side.", "And all the allies of the United States can know: we honor your friendship, we rely on your counsel, and we depend on your help. Division among free nations is a primary goal of freedom's enemies. ", " The concerted effort of free nations to promote democracy is a prelude to our enemies' defeat.", "Today, I also speak anew to my fellow citizens:", "From all of you, I have asked patience in the hard task of securing America, which you have granted in good measure. Our country has accepted obligations that are difficult to fulfill,", " and would be dishonorable to abandon. Yet because we have acted in the great liberating tradition of this nation, tens of millions have achieved their freedom. And as hope kindles hope, ", " millions more will find it. By our efforts, we have lit a fire as well —a fire in the minds of men. It warms those who feel its power, it burns those who fight its progress, and one day this untamed ", " fire of freedom will reach the darkest corners of our world.", "A few Americans have accepted the hardest duties in this cause —in the quiet work of intelligence and diplomacy ... the idealistic work of helping raise up free governments ... ", " the dangerous and necessary work of fighting our enemies. Some have shown their devotion to our country in deaths that honored their whole lives —and we will always honor their names and their sacrifice.", "All Americans have witnessed this idealism, and some for the first time. I ask our youngest citizens to believe the evidence of your eyes. You have seen duty and allegiance in the determined faces of our soldiers. ", " You have seen that life is fragile, and evil is real, and courage triumphs. Make the choice to serve in a cause larger than your wants, larger than yourself —and in your days you will add not just to ", " the wealth of our country, but to its character.", "America has need of idealism and courage, because we have essential work at home —the unfinished work of American freedom. In a world moving toward liberty, ", " we are determined to show the meaning and promise of liberty.", "In America's ideal of freedom, citizens find the dignity and security of economic independence, instead of laboring on the edge of subsistence. ", " This is the broader definition of liberty that motivated the Homestead Act, the Social Security Act, and the G.I. Bill of Rights.", " And now we will extend this vision by reforming great institutions to serve the needs of our time. To give every American a stake in the promise and future of our country,", " we will bring the highest standards to our schools, and build an ownership society. We will widen the ownership of homes and businesses, retirement savings and health insurance —preparing ", " our people for the challenges of life in a free society. By making every citizen an agent of his or her own destiny, we will give our fellow Americans greater freedom from want and fear, ", " and make our society more prosperous and just and equal.", "In America's ideal of freedom, the public interest depends on private character —on integrity, and tolerance toward others, and the rule of conscience in our own lives. ", " Self-government relies, in the end, on the governing of the self. That edifice of character is built in families, supported by communities with standards, and sustained in our national ", " life by the truths of Sinai, the Sermon on the Mount, the words of the Koran, and the varied faiths of our people. Americans move forward in every generation by reaffirming all that is ", " good and true that came before —ideals of justice and conduct that are the same yesterday, today, and forever.", "In America's ideal of freedom, the exercise of rights is ennobled by service, and mercy, and a heart for the weak. Liberty for all does not mean independence from one another. Our nation relies", " on men and women who look after a neighbor and surround the lost with love. Americans, at our best, value the life we see in one another, and must always remember that even the unwanted have worth.", " And our country must abandon all the habits of racism, because we cannot carry the message of freedom and the baggage of bigotry at the same time.", "From the perspective of a single day, including this day of dedication, the issues and questions before our country are many. From the viewpoint of centuries, the questions that come to us are narrowed and few.", " Did our generation advance the cause of freedom? And did our character bring credit to that cause?", "These questions that judge us also unite us, because Americans of every party and background, Americans by choice and by birth, are bound to one another in the cause of freedom. We have known divisions, ", " which must be healed to move forward in great purposes —and I will strive in good faith to heal them. Yet those divisions do not define America. We felt the unity and fellowship of our nation when freedom ", " came under attack, and our response came like a single hand over a single heart. And we can feel that same unity and pride whenever America acts for good, and the victims of disaster are given hope, ", " and the unjust encounter justice, and the captives are set free.", "We go forward with complete confidence in the eventual triumph of freedom. Not because history runs on the wheels of inevitability; it is human choices that move events. Not because we consider ourselves ", " a chosen nation; God moves and chooses as He wills. We have confidence because freedom is the permanent hope of mankind, the hunger in dark places, the longing of the soul. When our Founders declared ", " a new order of the ages; when soldiers died in wave upon wave for a union based on liberty; when citizens marched in peaceful outrage under the banner Freedom Now —they were acting on an ancient hope that ", " is meant to be fulfilled. History has an ebb and flow of justice, but history also has a visible direction, set by liberty and the Author of Liberty.", "When the Declaration of Independence was first read in public and the Liberty Bell was sounded in celebration, a witness said, It rang as if it meant something. In our time it means something still. ", " America, in this young century, proclaims liberty throughout all the world, and to all the inhabitants thereof. Renewed in our strength — tested, but not weary — we are ready for the greatest achievements ", " in the history of freedom.", "May God bless you, and may He watch over the United States of America."] Bill_Clinton = [ "At this last presidential inauguration of the 20th century, let us lift our eyes toward the challenges that await us in the next century. It is our great good fortune that time and chance have put us not only at the edge of a new century, in a new millennium, but on the edge of a bright new prospect in human affairs - a moment that will define our course, and our character, for decades to come. We must keep our old democracy forever young. Guided by the ancient vision of a promised land, let us set our sights upon a land of new promise.", "The promise of America was born in the 18th century out of the bold conviction that we are all created equal. It was extended and preserved in the 19th century, when our nation spread across the continent, saved the union, and abolished the awful scourge of slavery.", "Then, in turmoil and triumph, that promise exploded onto the world stage to make this the American Century.", "And what a century it has been. America became the world's mightiest industrial power; saved the world from tyranny in two world wars and a long cold war; and time and again, reached out across the globe to millions who, like us, longed for the blessings of liberty.", "Along the way, Americans produced a great middle class and security in old age; built unrivaled centers of learning and opened public schools to all; split the atom and explored the heavens; invented the computer and the microchip; and deepened the wellspring of justice by making a revolution in civil rights for African Americans and all minorities, and extending the circle of citizenship, opportunity and dignity to women.", "Now, for the third time, a new century is upon us, and another time to choose. We began the 19th century with a choice, to spread our nation from coast to coast. We began the 20th century with a choice, to harness the Industrial Revolution to our values of free enterprise, conservation, and human decency. Those choices made all the difference. At the dawn of the 21st century a free people must now choose to shape the forces of the Information Age and the global society, to unleash the limitless potential of all our people, and, yes, to form a more perfect union.", "When last we gathered, our march to this new future seemed less certain than it does today. We vowed then to set a clear course to renew our nation.", "In these four years, we have been touched by tragedy, exhilarated by challenge, strengthened by achievement. America stands alone as the world's indispensable nation. Once again, our economy is the strongest on Earth. Once again, we are building stronger families, thriving communities, better educational opportunities, a cleaner environment. Problems that once seemed destined to deepen now bend to our efforts: our streets are safer and record numbers of our fellow citizens have moved from welfare to work.", "And once again, we have resolved for our time a great debate over the role of government. Today we can declare: Government is not the problem, and government is not the solution. We - the American people - we are the solution. Our founders understood that well and gave us a democracy strong enough to endure for centuries, flexible enough to face our common challenges and advance our common dreams in each new day.", "As times change, so government must change. We need a new government for a new century - humble enough not to try to solve all our problems for us, but strong enough to give us the tools to solve our problems for ourselves; a government that is smaller, lives within its means, and does more with less. Yet where it can stand up for our values and interests in the world, and where it can give Americans the power to make a real difference in their everyday lives, government should do more, not less. The preeminent mission of our new government is to give all Americans an opportunity - not a guarantee, but a real opportunity - to build better lives.", "Beyond that, my fellow citizens, the future is up to us. Our founders taught us that the preservation of our liberty and our union depends upon responsible citizenship. And we need a new sense of responsibility for a new century. There is work to do, work that government alone cannot do: teaching children to read; hiring people off welfare rolls; coming out from behind locked doors and shuttered windows to help reclaim our streets from drugs and gangs and crime; taking time out of our own lives to serve others.", "Each and every one of us, in our own way, must assume personal responsibility - not only for ourselves and our families, but for our neighbors and our nation. Our greatest responsibility is to embrace a new spirit of community for a new century. For any one of us to succeed, we must succeed as one America.", "The challenge of our past remains the challenge of our future - will we be one nation, one people, with one common destiny, or not? Will we all come together, or come apart?", "The divide of race has been America's constant curse. And each new wave of immigrants gives new targets to old prejudices. Prejudice and contempt, cloaked in the pretense of religious or political conviction are no different. These forces have nearly destroyed our nation in the past. They plague us still. They fuel the fanaticism of terror. And they torment the lives of millions in fractured nations all around the world.", "These obsessions cripple both those who hate and, of course, those who are hated, robbing both of what they might become. We cannot, we will not, succumb to the dark impulses that lurk in the far regions of the soul everywhere. We shall overcome them. And we shall replace them with the generous spirit of a people who feel at home with one another.", "Our rich texture of racial, religious and political diversity will be a Godsend in the 21st century. Great rewards will come to those who can live together, learn together, work together, forge new ties that bind together.", "As this new era approaches we can already see its broad outlines. Ten years ago, the Internet was the mystical province of physicists; today, it is a commonplace encyclopedia for millions of schoolchildren. Scientists now are decoding the blueprint of human life. Cures for our most feared illnesses seem close at hand.", "The world is no longer divided into two hostile camps. Instead, now we are building bonds with nations that once were our adversaries. Growing connections of commerce and culture give us a chance to lift the fortunes and spirits of people the world over. And for the very first time in all of history, more people on this planet live under democracy than dictatorship.", "My fellow Americans, as we look back at this remarkable century, we may ask, can we hope not just to follow, but even to surpass the achievements of the 20th century in America and to avoid the awful bloodshed that stained its legacy? To that question, every American here and every American in our land today must answer a resounding Yes.", "This is the heart of our task. With a new vision of government, a new sense of responsibility, a new spirit of community, we will sustain America's journey. The promise we sought in a new land we will find again in a land of new promise.", "In this new land, education will be every citizen's most prized possession. Our schools will have the highest standards in the world, igniting the spark of possibility in the eyes of every girl and every boy. And the doors of higher education will be open to all. The knowledge and power of the Information Age will be within reach not just of the few, but of every classroom, every library, every child. Parents and children will have time not only to work, but to read and play together. And the plans they make at their kitchen table will be those of a better home, a better job, the certain chance to go to college.", "Our streets will echo again with the laughter of our children, because no one will try to shoot them or sell them drugs anymore. Everyone who can work, will work, with today's permanent under class part of tomorrow's growing middle class. New miracles of medicine at last will reach not only those who can claim care now, but the children and hardworking families too long denied.", "We will stand mighty for peace and freedom, and maintain a strong defense against terror and destruction. Our children will sleep free from the threat of nuclear, chemical or biological weapons. Ports and airports, farms and factories will thrive with trade and innovation and ideas. And the world's greatest democracy will lead a whole world of democracies.", "Our land of new promise will be a nation that meets its obligations - a nation that balances its budget, but never loses the balance of its values. A nation where our grandparents have secure retirement and health care, and their grandchildren know we have made the reforms necessary to sustain those benefits for their time. A nation that fortifies the world's most productive economy even as it protects the great natural bounty of our water, air, and majestic land.", "And in this land of new promise, we will have reformed our politics so that the voice of the people will always speak louder than the din of narrow interests - regaining the participation and deserving the trust of all Americans.", "Fellow citizens, let us build that America, a nation ever moving forward toward realizing the full potential of all its citizens. Prosperity and power - yes, they are important, and we must maintain them. But let us never forget: The greatest progress we have made, and the greatest progress we have yet to make, is in the human heart. In the end, all the world's wealth and a thousand armies are no match for the strength and decency of the human spirit.", "Thirty-four years ago, the man whose life we celebrate today spoke to us down there, at the other end of this Mall, in words that moved the conscience of a nation. Like a prophet of old, he told of his dream that one day America would rise up and treat all its citizens as equals before the law and in the heart. Martin Luther King's dream was the American Dream. His quest is our quest: the ceaseless striving to live out our true creed. Our history has been built on such dreams and labors. And by our dreams and labors we will redeem the promise of America in the 21st century.", "To that effort I pledge all my strength and every power of my office. I ask the members of Congress here to join in that pledge. The American people returned to office a President of one party and a Congress of another. Surely, they did not do this to advance the politics of petty bickering and extreme partisanship they plainly deplore. No, they call on us instead to be repairers of the breach, and to move on with America's mission.", "America demands and deserves big things from us - and nothing big ever came from being small. Let us remember the timeless wisdom of Cardinal Bernardin, when facing the end of his own life. He said: “It is wrong to waste the precious gift of time, on acrimony and division.", "Fellow citizens, we must not waste the precious gift of this time. For all of us are on that same journey of our lives, and our journey, too, will come to an end. But the journey of our America must go on.", "And so, my fellow Americans, we must be strong, for there is much to dare. The demands of our time are great and they are different. Let us meet them with faith and courage, with patience and a grateful and happy heart. Let us shape the hope of this day into the noblest chapter in our history. Yes, let us build our bridge. A bridge wide enough and strong enough for every American to cross over to a blessed land of new promise.", "May those generations whose faces we cannot yet see, whose names we may never know, say of us here that we led our beloved land into a new century with the American Dream alive for all her children; with the American promise of a more perfect union a reality for all her people; with America's bright flame of freedom spreading throughout all the world.", "From the height of this place and the summit of this century, let us go forth. May God strengthen our hands for the good work ahead - and always, always bless our America." ] bsh=[] clin=[] for i in range(0, len(Bush)): for j in range(0, len(Bush[i].split())): bsh.append(Bush[i].split()[j]) for i in range(0, len(Bill_Clinton)): for j in range(0, len(Bill_Clinton[i].split())): clin.append(Bill_Clinton[i].split()[j]) d=dict() e=dict() for c in bsh: if c not in d: d[c]=1 else: d[c]=d[c]+1 for c in clin: if c not in e: e[c]=1 else: e[c]=e[c]+1 f=set() g=set() for n in d: f.add(d[n]) for n in e: g.add(e[n]) bb=list(f) cc=list(g) print "George. Bush - " for m in d: if(d[m]>bb[len(bb)-10]): print "==", m, " : ", d[m], print"\n" print "Bill. Clinton -" for m in e: if(e[m]>cc[len(cc)-10]): print "==", m, " : ", e[m], def lab11(): ring() addkeys() addmouse() turtle.listen() turtle.mainloop() schoolLife() speech() def main(): lab11() if __name__=="__main__": main()
schoolLife
identifier_name
GAN_word_embedding.py
from __future__ import print_function import numpy as np import chainer from chainer.functions.evaluation import accuracy from chainer.functions.loss import softmax_cross_entropy from chainer import link from chainer import reporter from chainer import optimizers import chainer.functions as F import chainer.links as L import time from matplotlib import pyplot as plt # load and arrange the dictionary file def get_dictionary(namefile): print('loading '+ namefile + '...') # all " strings are removed and the data is split at each line break with open(namefile) as f: dict_={line.replace('"', '').replace('\n', ''): int(v) for v, line in enumerate(f)} # end of line statement <eol> is added to the dictionary dict_.update({'<eol>': len(dict_)+1}) print('done.') return dict_ # load and arrange the title file def get_titles(namefile,dictionary,shuffle=0): print('loading ' + namefile + '...') with open(namefile) as file: lines = '' next(file) for line in file: # all zeros are removed and the data is split at each line break # <eol> is added at the end of each title
print('done.') lines=np.array(map(int, lines[:-1].split(','))) # if desired the titles are shuffled if shuffle: endoftitles = [x for x in range(len(lines)) if lines[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) idx = np.random.permutation(len(endoftitles)) endoftitles=[endoftitles[x] for x in idx] startoftitles = [startoftitles[x] for x in idx] lines = [lines[range(startoftitles[x], endoftitles[x] + 1)] for x in range(len(endoftitles))] lines = np.hstack(lines) # the function returns a vector containing all dictionary indices of all titles return lines # load and arrange the word embedding file def get_embeddedwords(namefile='word2vec.model'): print('loading ' + namefile + '...') with open(namefile, 'r') as f: ss = f.readline().split() # each line is split and the respective human readable word and the vector embedding vector is extracted n_vocab, n_units = int(ss[0]), int(ss[1]) word2index = {} index2word = {} w = np.empty((n_vocab, n_units), dtype=np.float32) # the embedding matrix is created by sorting all word vectors according to the dictionary index # the resulting matrix is of size NumIndices x 200 # note that due to splitting white space got removed. For that reason it is added again and a vector of # zeros is used within w for i, line in enumerate(f): ss = line.split() if len(ss)<201: ss = [' ']+ss word = ss[0] word2index[word] = i index2word[i] = word w[i] = np.array([float(s) for s in ss[1:]], dtype=np.float32) w[word2index[' ']]=np.zeros((1, 200)) print('done.') # word2index transforms a dictionary index to a human readable word # index2word transforms a human readable word to a dictionary index return word2index,index2word,w # this function is used to obtain the maximum number of words across all titles by finding <eol> statements def get_max_words_over_titles(titles_raw,dictionary): endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch = max(np.abs(np.subtract(startoftitles, endoftitles))) + 1 return max_title_length_in_batch # this function creates batch data used to train the network def createtitlebatch(titles_raw,dictionary,skipbatches=0,numtitles=10,testpart=0.05): # skip_ is used to select parts of the title vector # skip_ = 10 given numtitles = 80 would mean that the first 800 titles in the vector are skipped and # all following operations are performed on data that comes after that skip_=numtitles*skipbatches endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch=max(np.abs(np.subtract(startoftitles,endoftitles)))+1 max_skip=len(endoftitles) if max_skip<(numtitles+skip_): print('maximum requested number of titles is '+ str(numtitles+skip_)+'; dataset only has ' + str(max_skip) + ' titles') print('maximum number of batches at ' + str(numtitles) + ' titles per batch: ' + str(max_skip/numtitles-1)) else: title_matrix=[] # extraction of the data from w given the amount of titles selected for n in range(skip_,numtitles+skip_): title_num=n title_vec=titles_raw[range(startoftitles[title_num],endoftitles[title_num]+1)] title_matrix.append([w[x-1] for x in title_vec]) # shuffling the selected batch randidx=np.random.permutation(len(title_matrix)) idx_train=randidx[:-int(np.floor(len(randidx)*(testpart)))] idx_test=randidx[int(np.floor(len(randidx)*(1-testpart))):] train = [title_matrix[x] for x in idx_train] test = [title_matrix[x] for x in idx_test] train = [np.concatenate((x,np.zeros((max_title_length_in_batch-len(x),200))),0) for x in train] test = [np.concatenate((x, np.zeros((max_title_length_in_batch - len(x), 200))), 0) for x in test] # train and test are returned in a shape optimized for the use with convolutional networks # the respective shape is numExamples x channel x numWords x ndimEmbedding return np.asarray(train).astype('float32').reshape((len(train),max_title_length_in_batch/7,max_title_length_in_batch/7,200)),np.asarray(test).astype('float32').reshape((len(test),1,max_title_length_in_batch,200)) # this function can be used to transform a title matrix (words x ndim) back into a human readable title # the argmin() L2 norm between each word vector of the title and all word vectors in w serves as the # index for the respective word in the dictionary def vec2title(vec_,w,index2word): dict_trans=w#(w-w.min())/(w-w.min()).max() title_recon='' for i in range(len(vec_)): word_ = vec_.data[i] word_ = np.tile(word_,(len(w),1)) dist_=np.sqrt(np.sum((dict_trans-word_)**2,1)) title_recon=title_recon+index2word[dist_.argmin()]+' ' return title_recon # classifier to compute loss based on softmax cross entropy and accuracy class Classifier(link.Chain): compute_accuracy = True def __init__(self, predictor, lossfun=softmax_cross_entropy.softmax_cross_entropy, accfun=accuracy.accuracy): super(Classifier, self).__init__() self.lossfun = lossfun self.accfun = accfun self.y = None self.loss = None self.accuracy = None with self.init_scope(): self.predictor = predictor def __call__(self, *args): assert len(args) >= 2 x = args[:-1] t = args[-1] self.y = None self.loss = None self.accuracy = None self.y = self.predictor(*x) self.loss = self.lossfun(self.y, t) reporter.report({'loss': self.loss}, self) if self.compute_accuracy: self.accuracy = self.accfun(self.y, t) reporter.report({'accuracy': self.accuracy}, self) return self.loss # Convolutional neuronal network to do the discrimination # respective architecture choices are explained in the report class MLPConv(chainer.Chain): def __init__(self,words_per_title): super(MLPConv, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.conv = L.Convolution2D(in_channels=1, out_channels=1, ksize=3) self.l2 = L.Linear(None, 2) def __call__(self, x): x2 = F.relu(self.conv(F.reshape(x,(x.data.shape[0], 1,self.words_per_title ,200)))) x3 = F.max_pooling_2d(x2, 3) y = F.sigmoid(self.l2(F.dropout(x3,0.2))) return y # Deconvolutional neuronal network to do the generation # respective architecture choices are explained in the report class generator(chainer.Chain): def __init__(self, words_per_title): super(generator, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.l1 = L.Linear(None, words_per_title*200) # linear input layer self.l2 = L.Deconvolution2D(in_channels=1, out_channels=1, ksize=3) # applying deconvolution self.l3 = L.Linear(None, words_per_title * 200) # linear input layer def __call__(self, x): h = F.relu(self.l1(x)) # rectified activation function h = F.reshape(h, (x.data.shape[0], 1,self.words_per_title,200)) h = F.relu(self.l2(h)) return F.reshape(self.l3(h),(x.data.shape[0], 1, self.words_per_title, 200)) # loading the respective data word2index,index2word,w=get_embeddedwords() dictionary=get_dictionary('dictionary.txt') titles_high_raw=get_titles('titlesDict_high.txt',dictionary,shuffle=1) # get maximum number of words in all titles words_per_title = get_max_words_over_titles(titles_high_raw,dictionary) # setup networks dis = MLPConv(words_per_title) gen = generator(words_per_title) # Setup an optimizer opti_gen = optimizers.MomentumSGD(lr=0.01) # Using Stochastic Gradient Decent employing momentum opti_gen.setup(gen) opti_dis = optimizers.MomentumSGD(lr=0.001) # Using Stochastic Gradient Decent employing momentum opti_dis.setup(dis) # due to hardware limitations only one training epoch is used n_epoch = 1 maxiter=14400 # preparing some start values for logging start_timer = time.time() overall_acc = [] overall_loss_dis = [] overall_loss_gen = [] title_creations = [] # initiate epochs for epoch in range(n_epoch): for iteration in range(maxiter): # initiate iterations print('epoch' , epoch, ' - iteration ', iteration) # prompting the word 'epoch ' and the coresponding training epoch to the Python Consol # obtaining original training titles train_batch_orig, _ = createtitlebatch(titles_high_raw, dictionary, skipbatches=iteration,testpart=0.1) # creating random value distribution for title generation train_batch_gen = chainer.Variable(np.random.uniform(w.min(), w.max(),(len(train_batch_orig),words_per_title*200) ).astype('float32')) # obtaining generated training titles train_batch_gen = gen(train_batch_gen) # evaluate created titles judge_fake = dis(train_batch_gen) # compute loss for the respective generated titles Loss_gen = F.softmax_cross_entropy(judge_fake, chainer.Variable( np.zeros(len(train_batch_orig), dtype=np.int32))) # obtain generator loss # evaluate real titles judge_real = dis(chainer.Variable(train_batch_orig)) # compute combined loss for the respective generated titles and real titles Loss_dis = F.softmax_cross_entropy(judge_fake, chainer.Variable(np.ones(len(train_batch_orig), dtype=np.int32))) + \ F.softmax_cross_entropy(judge_real, chainer.Variable(np.zeros(len(train_batch_orig), dtype=np.int32))) # compute discriminator accuracy acc = (sum(list(np.int32(judge_fake[:, 0].data < 0.5)) + list(np.int32(judge_real[:, 0].data > 0.5))) / (len(train_batch_orig) * 2.)) # log values for later plotting overall_acc.append(acc) overall_loss_dis.append(float(Loss_dis.data)) overall_loss_gen.append(float(Loss_gen.data)) # update discriminator dis.cleargrads() Loss_dis.backward() opti_dis.update() # update generator gen.cleargrads() Loss_gen.backward() opti_gen.update() print('Training current loss Discriminator =', (float(Loss_dis.data)), ', Training current loss Generator =', (float(Loss_gen.data)), ',Training current Accuracy =', (acc),'Fake detected = ',np.float32(judge_fake[:, 0].data < 0.5).sum()/ len(train_batch_orig)) # after every 10th training iteration an example title is plotted if iteration%10==0: print(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) title_creations.append(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) # saving data to .txt files with open('Accuracy_trainGAN.txt', 'w') as file_handler: for item in overall_acc: file_handler.write("{}\n".format(item)) with open('Loss_DisGAN.txt', 'w') as file_handler: for item in overall_loss_dis: file_handler.write("{}\n".format(item)) with open('Loss_GenGAN.txt', 'w') as file_handler: for item in overall_loss_gen: file_handler.write("{}\n".format(item)) with open('Titles_GAN.txt', 'w') as file_handler: for item in title_creations: file_handler.write("{}\n".format(item))
lines = lines + line.replace(',0', '').replace('\n', '').replace('"', '') + ',' + str( dictionary.get('<eol>')) + ','
conditional_block
GAN_word_embedding.py
from __future__ import print_function import numpy as np import chainer from chainer.functions.evaluation import accuracy from chainer.functions.loss import softmax_cross_entropy from chainer import link from chainer import reporter from chainer import optimizers import chainer.functions as F import chainer.links as L import time from matplotlib import pyplot as plt # load and arrange the dictionary file def get_dictionary(namefile): print('loading '+ namefile + '...') # all " strings are removed and the data is split at each line break with open(namefile) as f: dict_={line.replace('"', '').replace('\n', ''): int(v) for v, line in enumerate(f)} # end of line statement <eol> is added to the dictionary dict_.update({'<eol>': len(dict_)+1}) print('done.') return dict_ # load and arrange the title file def get_titles(namefile,dictionary,shuffle=0): print('loading ' + namefile + '...') with open(namefile) as file: lines = '' next(file) for line in file: # all zeros are removed and the data is split at each line break # <eol> is added at the end of each title lines = lines + line.replace(',0', '').replace('\n', '').replace('"', '') + ',' + str( dictionary.get('<eol>')) + ',' print('done.') lines=np.array(map(int, lines[:-1].split(','))) # if desired the titles are shuffled if shuffle: endoftitles = [x for x in range(len(lines)) if lines[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) idx = np.random.permutation(len(endoftitles)) endoftitles=[endoftitles[x] for x in idx] startoftitles = [startoftitles[x] for x in idx] lines = [lines[range(startoftitles[x], endoftitles[x] + 1)] for x in range(len(endoftitles))] lines = np.hstack(lines) # the function returns a vector containing all dictionary indices of all titles return lines # load and arrange the word embedding file def get_embeddedwords(namefile='word2vec.model'): print('loading ' + namefile + '...') with open(namefile, 'r') as f: ss = f.readline().split() # each line is split and the respective human readable word and the vector embedding vector is extracted n_vocab, n_units = int(ss[0]), int(ss[1]) word2index = {} index2word = {} w = np.empty((n_vocab, n_units), dtype=np.float32) # the embedding matrix is created by sorting all word vectors according to the dictionary index # the resulting matrix is of size NumIndices x 200 # note that due to splitting white space got removed. For that reason it is added again and a vector of # zeros is used within w for i, line in enumerate(f): ss = line.split() if len(ss)<201: ss = [' ']+ss word = ss[0] word2index[word] = i index2word[i] = word w[i] = np.array([float(s) for s in ss[1:]], dtype=np.float32) w[word2index[' ']]=np.zeros((1, 200)) print('done.') # word2index transforms a dictionary index to a human readable word # index2word transforms a human readable word to a dictionary index return word2index,index2word,w # this function is used to obtain the maximum number of words across all titles by finding <eol> statements def get_max_words_over_titles(titles_raw,dictionary): endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch = max(np.abs(np.subtract(startoftitles, endoftitles))) + 1 return max_title_length_in_batch # this function creates batch data used to train the network def createtitlebatch(titles_raw,dictionary,skipbatches=0,numtitles=10,testpart=0.05): # skip_ is used to select parts of the title vector # skip_ = 10 given numtitles = 80 would mean that the first 800 titles in the vector are skipped and # all following operations are performed on data that comes after that skip_=numtitles*skipbatches endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch=max(np.abs(np.subtract(startoftitles,endoftitles)))+1 max_skip=len(endoftitles) if max_skip<(numtitles+skip_): print('maximum requested number of titles is '+ str(numtitles+skip_)+'; dataset only has ' + str(max_skip) + ' titles') print('maximum number of batches at ' + str(numtitles) + ' titles per batch: ' + str(max_skip/numtitles-1)) else: title_matrix=[] # extraction of the data from w given the amount of titles selected for n in range(skip_,numtitles+skip_): title_num=n title_vec=titles_raw[range(startoftitles[title_num],endoftitles[title_num]+1)] title_matrix.append([w[x-1] for x in title_vec]) # shuffling the selected batch randidx=np.random.permutation(len(title_matrix)) idx_train=randidx[:-int(np.floor(len(randidx)*(testpart)))] idx_test=randidx[int(np.floor(len(randidx)*(1-testpart))):] train = [title_matrix[x] for x in idx_train] test = [title_matrix[x] for x in idx_test] train = [np.concatenate((x,np.zeros((max_title_length_in_batch-len(x),200))),0) for x in train] test = [np.concatenate((x, np.zeros((max_title_length_in_batch - len(x), 200))), 0) for x in test] # train and test are returned in a shape optimized for the use with convolutional networks # the respective shape is numExamples x channel x numWords x ndimEmbedding return np.asarray(train).astype('float32').reshape((len(train),max_title_length_in_batch/7,max_title_length_in_batch/7,200)),np.asarray(test).astype('float32').reshape((len(test),1,max_title_length_in_batch,200)) # this function can be used to transform a title matrix (words x ndim) back into a human readable title # the argmin() L2 norm between each word vector of the title and all word vectors in w serves as the # index for the respective word in the dictionary def vec2title(vec_,w,index2word):
# classifier to compute loss based on softmax cross entropy and accuracy class Classifier(link.Chain): compute_accuracy = True def __init__(self, predictor, lossfun=softmax_cross_entropy.softmax_cross_entropy, accfun=accuracy.accuracy): super(Classifier, self).__init__() self.lossfun = lossfun self.accfun = accfun self.y = None self.loss = None self.accuracy = None with self.init_scope(): self.predictor = predictor def __call__(self, *args): assert len(args) >= 2 x = args[:-1] t = args[-1] self.y = None self.loss = None self.accuracy = None self.y = self.predictor(*x) self.loss = self.lossfun(self.y, t) reporter.report({'loss': self.loss}, self) if self.compute_accuracy: self.accuracy = self.accfun(self.y, t) reporter.report({'accuracy': self.accuracy}, self) return self.loss # Convolutional neuronal network to do the discrimination # respective architecture choices are explained in the report class MLPConv(chainer.Chain): def __init__(self,words_per_title): super(MLPConv, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.conv = L.Convolution2D(in_channels=1, out_channels=1, ksize=3) self.l2 = L.Linear(None, 2) def __call__(self, x): x2 = F.relu(self.conv(F.reshape(x,(x.data.shape[0], 1,self.words_per_title ,200)))) x3 = F.max_pooling_2d(x2, 3) y = F.sigmoid(self.l2(F.dropout(x3,0.2))) return y # Deconvolutional neuronal network to do the generation # respective architecture choices are explained in the report class generator(chainer.Chain): def __init__(self, words_per_title): super(generator, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.l1 = L.Linear(None, words_per_title*200) # linear input layer self.l2 = L.Deconvolution2D(in_channels=1, out_channels=1, ksize=3) # applying deconvolution self.l3 = L.Linear(None, words_per_title * 200) # linear input layer def __call__(self, x): h = F.relu(self.l1(x)) # rectified activation function h = F.reshape(h, (x.data.shape[0], 1,self.words_per_title,200)) h = F.relu(self.l2(h)) return F.reshape(self.l3(h),(x.data.shape[0], 1, self.words_per_title, 200)) # loading the respective data word2index,index2word,w=get_embeddedwords() dictionary=get_dictionary('dictionary.txt') titles_high_raw=get_titles('titlesDict_high.txt',dictionary,shuffle=1) # get maximum number of words in all titles words_per_title = get_max_words_over_titles(titles_high_raw,dictionary) # setup networks dis = MLPConv(words_per_title) gen = generator(words_per_title) # Setup an optimizer opti_gen = optimizers.MomentumSGD(lr=0.01) # Using Stochastic Gradient Decent employing momentum opti_gen.setup(gen) opti_dis = optimizers.MomentumSGD(lr=0.001) # Using Stochastic Gradient Decent employing momentum opti_dis.setup(dis) # due to hardware limitations only one training epoch is used n_epoch = 1 maxiter=14400 # preparing some start values for logging start_timer = time.time() overall_acc = [] overall_loss_dis = [] overall_loss_gen = [] title_creations = [] # initiate epochs for epoch in range(n_epoch): for iteration in range(maxiter): # initiate iterations print('epoch' , epoch, ' - iteration ', iteration) # prompting the word 'epoch ' and the coresponding training epoch to the Python Consol # obtaining original training titles train_batch_orig, _ = createtitlebatch(titles_high_raw, dictionary, skipbatches=iteration,testpart=0.1) # creating random value distribution for title generation train_batch_gen = chainer.Variable(np.random.uniform(w.min(), w.max(),(len(train_batch_orig),words_per_title*200) ).astype('float32')) # obtaining generated training titles train_batch_gen = gen(train_batch_gen) # evaluate created titles judge_fake = dis(train_batch_gen) # compute loss for the respective generated titles Loss_gen = F.softmax_cross_entropy(judge_fake, chainer.Variable( np.zeros(len(train_batch_orig), dtype=np.int32))) # obtain generator loss # evaluate real titles judge_real = dis(chainer.Variable(train_batch_orig)) # compute combined loss for the respective generated titles and real titles Loss_dis = F.softmax_cross_entropy(judge_fake, chainer.Variable(np.ones(len(train_batch_orig), dtype=np.int32))) + \ F.softmax_cross_entropy(judge_real, chainer.Variable(np.zeros(len(train_batch_orig), dtype=np.int32))) # compute discriminator accuracy acc = (sum(list(np.int32(judge_fake[:, 0].data < 0.5)) + list(np.int32(judge_real[:, 0].data > 0.5))) / (len(train_batch_orig) * 2.)) # log values for later plotting overall_acc.append(acc) overall_loss_dis.append(float(Loss_dis.data)) overall_loss_gen.append(float(Loss_gen.data)) # update discriminator dis.cleargrads() Loss_dis.backward() opti_dis.update() # update generator gen.cleargrads() Loss_gen.backward() opti_gen.update() print('Training current loss Discriminator =', (float(Loss_dis.data)), ', Training current loss Generator =', (float(Loss_gen.data)), ',Training current Accuracy =', (acc),'Fake detected = ',np.float32(judge_fake[:, 0].data < 0.5).sum()/ len(train_batch_orig)) # after every 10th training iteration an example title is plotted if iteration%10==0: print(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) title_creations.append(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) # saving data to .txt files with open('Accuracy_trainGAN.txt', 'w') as file_handler: for item in overall_acc: file_handler.write("{}\n".format(item)) with open('Loss_DisGAN.txt', 'w') as file_handler: for item in overall_loss_dis: file_handler.write("{}\n".format(item)) with open('Loss_GenGAN.txt', 'w') as file_handler: for item in overall_loss_gen: file_handler.write("{}\n".format(item)) with open('Titles_GAN.txt', 'w') as file_handler: for item in title_creations: file_handler.write("{}\n".format(item))
dict_trans=w#(w-w.min())/(w-w.min()).max() title_recon='' for i in range(len(vec_)): word_ = vec_.data[i] word_ = np.tile(word_,(len(w),1)) dist_=np.sqrt(np.sum((dict_trans-word_)**2,1)) title_recon=title_recon+index2word[dist_.argmin()]+' ' return title_recon
identifier_body
GAN_word_embedding.py
from __future__ import print_function import numpy as np import chainer from chainer.functions.evaluation import accuracy from chainer.functions.loss import softmax_cross_entropy from chainer import link from chainer import reporter from chainer import optimizers import chainer.functions as F import chainer.links as L import time from matplotlib import pyplot as plt # load and arrange the dictionary file def get_dictionary(namefile): print('loading '+ namefile + '...') # all " strings are removed and the data is split at each line break with open(namefile) as f: dict_={line.replace('"', '').replace('\n', ''): int(v) for v, line in enumerate(f)} # end of line statement <eol> is added to the dictionary dict_.update({'<eol>': len(dict_)+1}) print('done.') return dict_ # load and arrange the title file def get_titles(namefile,dictionary,shuffle=0): print('loading ' + namefile + '...') with open(namefile) as file: lines = '' next(file) for line in file: # all zeros are removed and the data is split at each line break # <eol> is added at the end of each title lines = lines + line.replace(',0', '').replace('\n', '').replace('"', '') + ',' + str( dictionary.get('<eol>')) + ',' print('done.') lines=np.array(map(int, lines[:-1].split(','))) # if desired the titles are shuffled if shuffle: endoftitles = [x for x in range(len(lines)) if lines[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) idx = np.random.permutation(len(endoftitles)) endoftitles=[endoftitles[x] for x in idx] startoftitles = [startoftitles[x] for x in idx] lines = [lines[range(startoftitles[x], endoftitles[x] + 1)] for x in range(len(endoftitles))] lines = np.hstack(lines) # the function returns a vector containing all dictionary indices of all titles return lines # load and arrange the word embedding file def get_embeddedwords(namefile='word2vec.model'): print('loading ' + namefile + '...') with open(namefile, 'r') as f: ss = f.readline().split() # each line is split and the respective human readable word and the vector embedding vector is extracted n_vocab, n_units = int(ss[0]), int(ss[1]) word2index = {} index2word = {} w = np.empty((n_vocab, n_units), dtype=np.float32) # the embedding matrix is created by sorting all word vectors according to the dictionary index # the resulting matrix is of size NumIndices x 200 # note that due to splitting white space got removed. For that reason it is added again and a vector of # zeros is used within w for i, line in enumerate(f): ss = line.split() if len(ss)<201: ss = [' ']+ss word = ss[0] word2index[word] = i index2word[i] = word w[i] = np.array([float(s) for s in ss[1:]], dtype=np.float32) w[word2index[' ']]=np.zeros((1, 200)) print('done.') # word2index transforms a dictionary index to a human readable word # index2word transforms a human readable word to a dictionary index return word2index,index2word,w # this function is used to obtain the maximum number of words across all titles by finding <eol> statements def get_max_words_over_titles(titles_raw,dictionary): endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch = max(np.abs(np.subtract(startoftitles, endoftitles))) + 1 return max_title_length_in_batch # this function creates batch data used to train the network def createtitlebatch(titles_raw,dictionary,skipbatches=0,numtitles=10,testpart=0.05): # skip_ is used to select parts of the title vector # skip_ = 10 given numtitles = 80 would mean that the first 800 titles in the vector are skipped and # all following operations are performed on data that comes after that skip_=numtitles*skipbatches endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch=max(np.abs(np.subtract(startoftitles,endoftitles)))+1 max_skip=len(endoftitles) if max_skip<(numtitles+skip_): print('maximum requested number of titles is '+ str(numtitles+skip_)+'; dataset only has ' + str(max_skip) + ' titles') print('maximum number of batches at ' + str(numtitles) + ' titles per batch: ' + str(max_skip/numtitles-1)) else: title_matrix=[] # extraction of the data from w given the amount of titles selected for n in range(skip_,numtitles+skip_): title_num=n title_vec=titles_raw[range(startoftitles[title_num],endoftitles[title_num]+1)] title_matrix.append([w[x-1] for x in title_vec]) # shuffling the selected batch randidx=np.random.permutation(len(title_matrix)) idx_train=randidx[:-int(np.floor(len(randidx)*(testpart)))] idx_test=randidx[int(np.floor(len(randidx)*(1-testpart))):] train = [title_matrix[x] for x in idx_train] test = [title_matrix[x] for x in idx_test] train = [np.concatenate((x,np.zeros((max_title_length_in_batch-len(x),200))),0) for x in train] test = [np.concatenate((x, np.zeros((max_title_length_in_batch - len(x), 200))), 0) for x in test] # train and test are returned in a shape optimized for the use with convolutional networks # the respective shape is numExamples x channel x numWords x ndimEmbedding return np.asarray(train).astype('float32').reshape((len(train),max_title_length_in_batch/7,max_title_length_in_batch/7,200)),np.asarray(test).astype('float32').reshape((len(test),1,max_title_length_in_batch,200)) # this function can be used to transform a title matrix (words x ndim) back into a human readable title # the argmin() L2 norm between each word vector of the title and all word vectors in w serves as the # index for the respective word in the dictionary def vec2title(vec_,w,index2word): dict_trans=w#(w-w.min())/(w-w.min()).max() title_recon='' for i in range(len(vec_)): word_ = vec_.data[i] word_ = np.tile(word_,(len(w),1)) dist_=np.sqrt(np.sum((dict_trans-word_)**2,1)) title_recon=title_recon+index2word[dist_.argmin()]+' ' return title_recon # classifier to compute loss based on softmax cross entropy and accuracy class Classifier(link.Chain): compute_accuracy = True def __init__(self, predictor, lossfun=softmax_cross_entropy.softmax_cross_entropy, accfun=accuracy.accuracy): super(Classifier, self).__init__() self.lossfun = lossfun self.accfun = accfun
with self.init_scope(): self.predictor = predictor def __call__(self, *args): assert len(args) >= 2 x = args[:-1] t = args[-1] self.y = None self.loss = None self.accuracy = None self.y = self.predictor(*x) self.loss = self.lossfun(self.y, t) reporter.report({'loss': self.loss}, self) if self.compute_accuracy: self.accuracy = self.accfun(self.y, t) reporter.report({'accuracy': self.accuracy}, self) return self.loss # Convolutional neuronal network to do the discrimination # respective architecture choices are explained in the report class MLPConv(chainer.Chain): def __init__(self,words_per_title): super(MLPConv, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.conv = L.Convolution2D(in_channels=1, out_channels=1, ksize=3) self.l2 = L.Linear(None, 2) def __call__(self, x): x2 = F.relu(self.conv(F.reshape(x,(x.data.shape[0], 1,self.words_per_title ,200)))) x3 = F.max_pooling_2d(x2, 3) y = F.sigmoid(self.l2(F.dropout(x3,0.2))) return y # Deconvolutional neuronal network to do the generation # respective architecture choices are explained in the report class generator(chainer.Chain): def __init__(self, words_per_title): super(generator, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.l1 = L.Linear(None, words_per_title*200) # linear input layer self.l2 = L.Deconvolution2D(in_channels=1, out_channels=1, ksize=3) # applying deconvolution self.l3 = L.Linear(None, words_per_title * 200) # linear input layer def __call__(self, x): h = F.relu(self.l1(x)) # rectified activation function h = F.reshape(h, (x.data.shape[0], 1,self.words_per_title,200)) h = F.relu(self.l2(h)) return F.reshape(self.l3(h),(x.data.shape[0], 1, self.words_per_title, 200)) # loading the respective data word2index,index2word,w=get_embeddedwords() dictionary=get_dictionary('dictionary.txt') titles_high_raw=get_titles('titlesDict_high.txt',dictionary,shuffle=1) # get maximum number of words in all titles words_per_title = get_max_words_over_titles(titles_high_raw,dictionary) # setup networks dis = MLPConv(words_per_title) gen = generator(words_per_title) # Setup an optimizer opti_gen = optimizers.MomentumSGD(lr=0.01) # Using Stochastic Gradient Decent employing momentum opti_gen.setup(gen) opti_dis = optimizers.MomentumSGD(lr=0.001) # Using Stochastic Gradient Decent employing momentum opti_dis.setup(dis) # due to hardware limitations only one training epoch is used n_epoch = 1 maxiter=14400 # preparing some start values for logging start_timer = time.time() overall_acc = [] overall_loss_dis = [] overall_loss_gen = [] title_creations = [] # initiate epochs for epoch in range(n_epoch): for iteration in range(maxiter): # initiate iterations print('epoch' , epoch, ' - iteration ', iteration) # prompting the word 'epoch ' and the coresponding training epoch to the Python Consol # obtaining original training titles train_batch_orig, _ = createtitlebatch(titles_high_raw, dictionary, skipbatches=iteration,testpart=0.1) # creating random value distribution for title generation train_batch_gen = chainer.Variable(np.random.uniform(w.min(), w.max(),(len(train_batch_orig),words_per_title*200) ).astype('float32')) # obtaining generated training titles train_batch_gen = gen(train_batch_gen) # evaluate created titles judge_fake = dis(train_batch_gen) # compute loss for the respective generated titles Loss_gen = F.softmax_cross_entropy(judge_fake, chainer.Variable( np.zeros(len(train_batch_orig), dtype=np.int32))) # obtain generator loss # evaluate real titles judge_real = dis(chainer.Variable(train_batch_orig)) # compute combined loss for the respective generated titles and real titles Loss_dis = F.softmax_cross_entropy(judge_fake, chainer.Variable(np.ones(len(train_batch_orig), dtype=np.int32))) + \ F.softmax_cross_entropy(judge_real, chainer.Variable(np.zeros(len(train_batch_orig), dtype=np.int32))) # compute discriminator accuracy acc = (sum(list(np.int32(judge_fake[:, 0].data < 0.5)) + list(np.int32(judge_real[:, 0].data > 0.5))) / (len(train_batch_orig) * 2.)) # log values for later plotting overall_acc.append(acc) overall_loss_dis.append(float(Loss_dis.data)) overall_loss_gen.append(float(Loss_gen.data)) # update discriminator dis.cleargrads() Loss_dis.backward() opti_dis.update() # update generator gen.cleargrads() Loss_gen.backward() opti_gen.update() print('Training current loss Discriminator =', (float(Loss_dis.data)), ', Training current loss Generator =', (float(Loss_gen.data)), ',Training current Accuracy =', (acc),'Fake detected = ',np.float32(judge_fake[:, 0].data < 0.5).sum()/ len(train_batch_orig)) # after every 10th training iteration an example title is plotted if iteration%10==0: print(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) title_creations.append(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) # saving data to .txt files with open('Accuracy_trainGAN.txt', 'w') as file_handler: for item in overall_acc: file_handler.write("{}\n".format(item)) with open('Loss_DisGAN.txt', 'w') as file_handler: for item in overall_loss_dis: file_handler.write("{}\n".format(item)) with open('Loss_GenGAN.txt', 'w') as file_handler: for item in overall_loss_gen: file_handler.write("{}\n".format(item)) with open('Titles_GAN.txt', 'w') as file_handler: for item in title_creations: file_handler.write("{}\n".format(item))
self.y = None self.loss = None self.accuracy = None
random_line_split
GAN_word_embedding.py
from __future__ import print_function import numpy as np import chainer from chainer.functions.evaluation import accuracy from chainer.functions.loss import softmax_cross_entropy from chainer import link from chainer import reporter from chainer import optimizers import chainer.functions as F import chainer.links as L import time from matplotlib import pyplot as plt # load and arrange the dictionary file def get_dictionary(namefile): print('loading '+ namefile + '...') # all " strings are removed and the data is split at each line break with open(namefile) as f: dict_={line.replace('"', '').replace('\n', ''): int(v) for v, line in enumerate(f)} # end of line statement <eol> is added to the dictionary dict_.update({'<eol>': len(dict_)+1}) print('done.') return dict_ # load and arrange the title file def get_titles(namefile,dictionary,shuffle=0): print('loading ' + namefile + '...') with open(namefile) as file: lines = '' next(file) for line in file: # all zeros are removed and the data is split at each line break # <eol> is added at the end of each title lines = lines + line.replace(',0', '').replace('\n', '').replace('"', '') + ',' + str( dictionary.get('<eol>')) + ',' print('done.') lines=np.array(map(int, lines[:-1].split(','))) # if desired the titles are shuffled if shuffle: endoftitles = [x for x in range(len(lines)) if lines[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) idx = np.random.permutation(len(endoftitles)) endoftitles=[endoftitles[x] for x in idx] startoftitles = [startoftitles[x] for x in idx] lines = [lines[range(startoftitles[x], endoftitles[x] + 1)] for x in range(len(endoftitles))] lines = np.hstack(lines) # the function returns a vector containing all dictionary indices of all titles return lines # load and arrange the word embedding file def get_embeddedwords(namefile='word2vec.model'): print('loading ' + namefile + '...') with open(namefile, 'r') as f: ss = f.readline().split() # each line is split and the respective human readable word and the vector embedding vector is extracted n_vocab, n_units = int(ss[0]), int(ss[1]) word2index = {} index2word = {} w = np.empty((n_vocab, n_units), dtype=np.float32) # the embedding matrix is created by sorting all word vectors according to the dictionary index # the resulting matrix is of size NumIndices x 200 # note that due to splitting white space got removed. For that reason it is added again and a vector of # zeros is used within w for i, line in enumerate(f): ss = line.split() if len(ss)<201: ss = [' ']+ss word = ss[0] word2index[word] = i index2word[i] = word w[i] = np.array([float(s) for s in ss[1:]], dtype=np.float32) w[word2index[' ']]=np.zeros((1, 200)) print('done.') # word2index transforms a dictionary index to a human readable word # index2word transforms a human readable word to a dictionary index return word2index,index2word,w # this function is used to obtain the maximum number of words across all titles by finding <eol> statements def get_max_words_over_titles(titles_raw,dictionary): endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch = max(np.abs(np.subtract(startoftitles, endoftitles))) + 1 return max_title_length_in_batch # this function creates batch data used to train the network def createtitlebatch(titles_raw,dictionary,skipbatches=0,numtitles=10,testpart=0.05): # skip_ is used to select parts of the title vector # skip_ = 10 given numtitles = 80 would mean that the first 800 titles in the vector are skipped and # all following operations are performed on data that comes after that skip_=numtitles*skipbatches endoftitles = [x for x in range(len(titles_raw)) if titles_raw[x] == dictionary.get('<eol>')] startoftitles = [0] + list(np.add(endoftitles[:-1], 1)) max_title_length_in_batch=max(np.abs(np.subtract(startoftitles,endoftitles)))+1 max_skip=len(endoftitles) if max_skip<(numtitles+skip_): print('maximum requested number of titles is '+ str(numtitles+skip_)+'; dataset only has ' + str(max_skip) + ' titles') print('maximum number of batches at ' + str(numtitles) + ' titles per batch: ' + str(max_skip/numtitles-1)) else: title_matrix=[] # extraction of the data from w given the amount of titles selected for n in range(skip_,numtitles+skip_): title_num=n title_vec=titles_raw[range(startoftitles[title_num],endoftitles[title_num]+1)] title_matrix.append([w[x-1] for x in title_vec]) # shuffling the selected batch randidx=np.random.permutation(len(title_matrix)) idx_train=randidx[:-int(np.floor(len(randidx)*(testpart)))] idx_test=randidx[int(np.floor(len(randidx)*(1-testpart))):] train = [title_matrix[x] for x in idx_train] test = [title_matrix[x] for x in idx_test] train = [np.concatenate((x,np.zeros((max_title_length_in_batch-len(x),200))),0) for x in train] test = [np.concatenate((x, np.zeros((max_title_length_in_batch - len(x), 200))), 0) for x in test] # train and test are returned in a shape optimized for the use with convolutional networks # the respective shape is numExamples x channel x numWords x ndimEmbedding return np.asarray(train).astype('float32').reshape((len(train),max_title_length_in_batch/7,max_title_length_in_batch/7,200)),np.asarray(test).astype('float32').reshape((len(test),1,max_title_length_in_batch,200)) # this function can be used to transform a title matrix (words x ndim) back into a human readable title # the argmin() L2 norm between each word vector of the title and all word vectors in w serves as the # index for the respective word in the dictionary def vec2title(vec_,w,index2word): dict_trans=w#(w-w.min())/(w-w.min()).max() title_recon='' for i in range(len(vec_)): word_ = vec_.data[i] word_ = np.tile(word_,(len(w),1)) dist_=np.sqrt(np.sum((dict_trans-word_)**2,1)) title_recon=title_recon+index2word[dist_.argmin()]+' ' return title_recon # classifier to compute loss based on softmax cross entropy and accuracy class
(link.Chain): compute_accuracy = True def __init__(self, predictor, lossfun=softmax_cross_entropy.softmax_cross_entropy, accfun=accuracy.accuracy): super(Classifier, self).__init__() self.lossfun = lossfun self.accfun = accfun self.y = None self.loss = None self.accuracy = None with self.init_scope(): self.predictor = predictor def __call__(self, *args): assert len(args) >= 2 x = args[:-1] t = args[-1] self.y = None self.loss = None self.accuracy = None self.y = self.predictor(*x) self.loss = self.lossfun(self.y, t) reporter.report({'loss': self.loss}, self) if self.compute_accuracy: self.accuracy = self.accfun(self.y, t) reporter.report({'accuracy': self.accuracy}, self) return self.loss # Convolutional neuronal network to do the discrimination # respective architecture choices are explained in the report class MLPConv(chainer.Chain): def __init__(self,words_per_title): super(MLPConv, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.conv = L.Convolution2D(in_channels=1, out_channels=1, ksize=3) self.l2 = L.Linear(None, 2) def __call__(self, x): x2 = F.relu(self.conv(F.reshape(x,(x.data.shape[0], 1,self.words_per_title ,200)))) x3 = F.max_pooling_2d(x2, 3) y = F.sigmoid(self.l2(F.dropout(x3,0.2))) return y # Deconvolutional neuronal network to do the generation # respective architecture choices are explained in the report class generator(chainer.Chain): def __init__(self, words_per_title): super(generator, self).__init__() with self.init_scope(): self.words_per_title = words_per_title self.l1 = L.Linear(None, words_per_title*200) # linear input layer self.l2 = L.Deconvolution2D(in_channels=1, out_channels=1, ksize=3) # applying deconvolution self.l3 = L.Linear(None, words_per_title * 200) # linear input layer def __call__(self, x): h = F.relu(self.l1(x)) # rectified activation function h = F.reshape(h, (x.data.shape[0], 1,self.words_per_title,200)) h = F.relu(self.l2(h)) return F.reshape(self.l3(h),(x.data.shape[0], 1, self.words_per_title, 200)) # loading the respective data word2index,index2word,w=get_embeddedwords() dictionary=get_dictionary('dictionary.txt') titles_high_raw=get_titles('titlesDict_high.txt',dictionary,shuffle=1) # get maximum number of words in all titles words_per_title = get_max_words_over_titles(titles_high_raw,dictionary) # setup networks dis = MLPConv(words_per_title) gen = generator(words_per_title) # Setup an optimizer opti_gen = optimizers.MomentumSGD(lr=0.01) # Using Stochastic Gradient Decent employing momentum opti_gen.setup(gen) opti_dis = optimizers.MomentumSGD(lr=0.001) # Using Stochastic Gradient Decent employing momentum opti_dis.setup(dis) # due to hardware limitations only one training epoch is used n_epoch = 1 maxiter=14400 # preparing some start values for logging start_timer = time.time() overall_acc = [] overall_loss_dis = [] overall_loss_gen = [] title_creations = [] # initiate epochs for epoch in range(n_epoch): for iteration in range(maxiter): # initiate iterations print('epoch' , epoch, ' - iteration ', iteration) # prompting the word 'epoch ' and the coresponding training epoch to the Python Consol # obtaining original training titles train_batch_orig, _ = createtitlebatch(titles_high_raw, dictionary, skipbatches=iteration,testpart=0.1) # creating random value distribution for title generation train_batch_gen = chainer.Variable(np.random.uniform(w.min(), w.max(),(len(train_batch_orig),words_per_title*200) ).astype('float32')) # obtaining generated training titles train_batch_gen = gen(train_batch_gen) # evaluate created titles judge_fake = dis(train_batch_gen) # compute loss for the respective generated titles Loss_gen = F.softmax_cross_entropy(judge_fake, chainer.Variable( np.zeros(len(train_batch_orig), dtype=np.int32))) # obtain generator loss # evaluate real titles judge_real = dis(chainer.Variable(train_batch_orig)) # compute combined loss for the respective generated titles and real titles Loss_dis = F.softmax_cross_entropy(judge_fake, chainer.Variable(np.ones(len(train_batch_orig), dtype=np.int32))) + \ F.softmax_cross_entropy(judge_real, chainer.Variable(np.zeros(len(train_batch_orig), dtype=np.int32))) # compute discriminator accuracy acc = (sum(list(np.int32(judge_fake[:, 0].data < 0.5)) + list(np.int32(judge_real[:, 0].data > 0.5))) / (len(train_batch_orig) * 2.)) # log values for later plotting overall_acc.append(acc) overall_loss_dis.append(float(Loss_dis.data)) overall_loss_gen.append(float(Loss_gen.data)) # update discriminator dis.cleargrads() Loss_dis.backward() opti_dis.update() # update generator gen.cleargrads() Loss_gen.backward() opti_gen.update() print('Training current loss Discriminator =', (float(Loss_dis.data)), ', Training current loss Generator =', (float(Loss_gen.data)), ',Training current Accuracy =', (acc),'Fake detected = ',np.float32(judge_fake[:, 0].data < 0.5).sum()/ len(train_batch_orig)) # after every 10th training iteration an example title is plotted if iteration%10==0: print(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) title_creations.append(vec2title(train_batch_gen[np.random.randint(len(train_batch_gen))][0], w, index2word)) # saving data to .txt files with open('Accuracy_trainGAN.txt', 'w') as file_handler: for item in overall_acc: file_handler.write("{}\n".format(item)) with open('Loss_DisGAN.txt', 'w') as file_handler: for item in overall_loss_dis: file_handler.write("{}\n".format(item)) with open('Loss_GenGAN.txt', 'w') as file_handler: for item in overall_loss_gen: file_handler.write("{}\n".format(item)) with open('Titles_GAN.txt', 'w') as file_handler: for item in title_creations: file_handler.write("{}\n".format(item))
Classifier
identifier_name
jobs.py
import asyncio import logging import pickle import warnings from dataclasses import dataclass from datetime import datetime from enum import Enum from typing import Any, Callable, Dict, Optional, Tuple from redis.asyncio import Redis from .constants import abort_jobs_ss, default_queue_name, in_progress_key_prefix, job_key_prefix, result_key_prefix from .utils import ms_to_datetime, poll, timestamp_ms logger = logging.getLogger('arq.jobs') Serializer = Callable[[Dict[str, Any]], bytes] Deserializer = Callable[[bytes], Dict[str, Any]] class ResultNotFound(RuntimeError): pass class JobStatus(str, Enum): """ Enum of job statuses. """ #: job is in the queue, time it should be run not yet reached deferred = 'deferred' #: job is in the queue, time it should run has been reached queued = 'queued' #: job is in progress in_progress = 'in_progress' #: job is complete, result is available complete = 'complete' #: job not found in any way not_found = 'not_found' @dataclass class JobDef: function: str args: Tuple[Any, ...] kwargs: Dict[str, Any] job_try: int enqueue_time: datetime score: Optional[int] def __post_init__(self) -> None: if isinstance(self.score, float): self.score = int(self.score) @dataclass class JobResult(JobDef): success: bool result: Any start_time: datetime finish_time: datetime queue_name: str job_id: Optional[str] = None class Job: """ Holds data a reference to a job. """ __slots__ = 'job_id', '_redis', '_queue_name', '_deserializer' def __init__( self, job_id: str, redis: 'Redis[bytes]', _queue_name: str = default_queue_name, _deserializer: Optional[Deserializer] = None, ): self.job_id = job_id self._redis = redis self._queue_name = _queue_name self._deserializer = _deserializer async def result( self, timeout: Optional[float] = None, *, poll_delay: float = 0.5, pole_delay: float = None ) -> Any: """ Get the result of the job or, if the job raised an exception, reraise it. This function waits for the result if it's not yet available and the job is present in the queue. Otherwise ``ResultNotFound`` is raised. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever :param poll_delay: how often to poll redis for the job result :param pole_delay: deprecated, use poll_delay instead """ if pole_delay is not None: warnings.warn( '"pole_delay" is deprecated, use the correct spelling "poll_delay" instead', DeprecationWarning ) poll_delay = pole_delay async for delay in poll(poll_delay): async with self._redis.pipeline(transaction=True) as tr: tr.get(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] v, s = await tr.execute() if v: info = deserialize_result(v, deserializer=self._deserializer) if info.success: return info.result elif isinstance(info.result, (Exception, asyncio.CancelledError)): raise info.result else: raise SerializationError(info.result) elif s is None: raise ResultNotFound( 'Not waiting for job result because the job is not in queue. ' 'Is the worker function configured to keep result?' ) if timeout is not None and delay > timeout: raise asyncio.TimeoutError() async def info(self) -> Optional[JobDef]: """ All information on a job, including its result if it's available, does not wait for the result. """ info: Optional[JobDef] = await self.result_info() if not info: v = await self._redis.get(job_key_prefix + self.job_id) if v: info = deserialize_job(v, deserializer=self._deserializer) if info: s = await self._redis.zscore(self._queue_name, self.job_id) info.score = None if s is None else int(s) return info async def result_info(self) -> Optional[JobResult]: """ Information about the job result if available, does not wait for the result. Does not raise an exception even if the job raised one. """ v = await self._redis.get(result_key_prefix + self.job_id) if v: return deserialize_result(v, deserializer=self._deserializer) else: return None async def status(self) -> JobStatus: """ Status of the job. """ async with self._redis.pipeline(transaction=True) as tr: tr.exists(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.exists(in_progress_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] is_complete, is_in_progress, score = await tr.execute() if is_complete: return JobStatus.complete elif is_in_progress: return JobStatus.in_progress elif score: return JobStatus.deferred if score > timestamp_ms() else JobStatus.queued else: return JobStatus.not_found async def abort(self, *, timeout: Optional[float] = None, poll_delay: float = 0.5) -> bool: """ Abort the job. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever on None :param poll_delay: how often to poll redis for the job result :return: True if the job aborted properly, False otherwise """ job_info = await self.info() if job_info and job_info.score and job_info.score > timestamp_ms(): async with self._redis.pipeline(transaction=True) as tr: tr.zrem(self._queue_name, self.job_id) # type: ignore[unused-coroutine] tr.zadd(self._queue_name, {self.job_id: 1}) # type: ignore[unused-coroutine] await tr.execute() await self._redis.zadd(abort_jobs_ss, {self.job_id: timestamp_ms()}) try: await self.result(timeout=timeout, poll_delay=poll_delay) except asyncio.CancelledError: return True except ResultNotFound: # We do not know if the job was cancelled or not return False else: return False def __repr__(self) -> str: return f'<arq job {self.job_id}>' class SerializationError(RuntimeError): pass class DeserializationError(SerializationError): pass def serialize_job( function_name: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: Optional[int], enqueue_time_ms: int, *, serializer: Optional[Serializer] = None, ) -> bytes: data = {'t': job_try, 'f': function_name, 'a': args, 'k': kwargs, 'et': enqueue_time_ms} if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception as e: raise SerializationError(f'unable to serialize job "{function_name}"') from e def serialize_result( function: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: int, enqueue_time_ms: int, success: bool, result: Any, start_ms: int, finished_ms: int, ref: str, queue_name: str, *, serializer: Optional[Serializer] = None, ) -> Optional[bytes]: data = { 't': job_try, 'f': function, 'a': args, 'k': kwargs, 'et': enqueue_time_ms, 's': success, 'r': result, 'st': start_ms, 'ft': finished_ms, 'q': queue_name, } if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception: logger.warning('error serializing result of %s', ref, exc_info=True) # use string in case serialization fails again data.update(r='unable to serialize result', s=False) try: return serializer(data) except Exception: logger.critical('error serializing result of %s even after replacing result', ref, exc_info=True) return None def deserialize_job(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobDef: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobDef( function=d['f'], args=d['a'], kwargs=d['k'], job_try=d['t'], enqueue_time=ms_to_datetime(d['et']), score=None, ) except Exception as e: raise DeserializationError('unable to deserialize job') from e def
( r: bytes, *, deserializer: Optional[Deserializer] = None ) -> Tuple[str, Tuple[Any, ...], Dict[str, Any], int, int]: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return d['f'], d['a'], d['k'], d['t'], d['et'] except Exception as e: raise DeserializationError('unable to deserialize job') from e def deserialize_result(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobResult: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobResult( job_try=d['t'], function=d['f'], args=d['a'], kwargs=d['k'], enqueue_time=ms_to_datetime(d['et']), score=None, success=d['s'], result=d['r'], start_time=ms_to_datetime(d['st']), finish_time=ms_to_datetime(d['ft']), queue_name=d.get('q', '<unknown>'), ) except Exception as e: raise DeserializationError('unable to deserialize job result') from e
deserialize_job_raw
identifier_name
jobs.py
import asyncio import logging import pickle import warnings from dataclasses import dataclass from datetime import datetime from enum import Enum from typing import Any, Callable, Dict, Optional, Tuple from redis.asyncio import Redis from .constants import abort_jobs_ss, default_queue_name, in_progress_key_prefix, job_key_prefix, result_key_prefix from .utils import ms_to_datetime, poll, timestamp_ms logger = logging.getLogger('arq.jobs') Serializer = Callable[[Dict[str, Any]], bytes] Deserializer = Callable[[bytes], Dict[str, Any]] class ResultNotFound(RuntimeError): pass class JobStatus(str, Enum): """ Enum of job statuses. """ #: job is in the queue, time it should be run not yet reached deferred = 'deferred' #: job is in the queue, time it should run has been reached queued = 'queued' #: job is in progress in_progress = 'in_progress' #: job is complete, result is available complete = 'complete' #: job not found in any way not_found = 'not_found' @dataclass class JobDef: function: str args: Tuple[Any, ...] kwargs: Dict[str, Any] job_try: int enqueue_time: datetime score: Optional[int] def __post_init__(self) -> None: if isinstance(self.score, float): self.score = int(self.score) @dataclass class JobResult(JobDef): success: bool result: Any start_time: datetime finish_time: datetime queue_name: str job_id: Optional[str] = None class Job: """ Holds data a reference to a job. """ __slots__ = 'job_id', '_redis', '_queue_name', '_deserializer' def __init__( self, job_id: str, redis: 'Redis[bytes]', _queue_name: str = default_queue_name, _deserializer: Optional[Deserializer] = None, ): self.job_id = job_id self._redis = redis self._queue_name = _queue_name self._deserializer = _deserializer async def result( self, timeout: Optional[float] = None, *, poll_delay: float = 0.5, pole_delay: float = None ) -> Any: """ Get the result of the job or, if the job raised an exception, reraise it. This function waits for the result if it's not yet available and the job is present in the queue. Otherwise ``ResultNotFound`` is raised. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever :param poll_delay: how often to poll redis for the job result :param pole_delay: deprecated, use poll_delay instead """ if pole_delay is not None: warnings.warn( '"pole_delay" is deprecated, use the correct spelling "poll_delay" instead', DeprecationWarning ) poll_delay = pole_delay async for delay in poll(poll_delay): async with self._redis.pipeline(transaction=True) as tr: tr.get(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] v, s = await tr.execute() if v: info = deserialize_result(v, deserializer=self._deserializer) if info.success: return info.result elif isinstance(info.result, (Exception, asyncio.CancelledError)): raise info.result else: raise SerializationError(info.result) elif s is None: raise ResultNotFound( 'Not waiting for job result because the job is not in queue. ' 'Is the worker function configured to keep result?' ) if timeout is not None and delay > timeout: raise asyncio.TimeoutError() async def info(self) -> Optional[JobDef]: """ All information on a job, including its result if it's available, does not wait for the result. """ info: Optional[JobDef] = await self.result_info() if not info: v = await self._redis.get(job_key_prefix + self.job_id) if v: info = deserialize_job(v, deserializer=self._deserializer) if info: s = await self._redis.zscore(self._queue_name, self.job_id) info.score = None if s is None else int(s) return info async def result_info(self) -> Optional[JobResult]: """ Information about the job result if available, does not wait for the result. Does not raise an exception even if the job raised one. """ v = await self._redis.get(result_key_prefix + self.job_id) if v: return deserialize_result(v, deserializer=self._deserializer) else: return None async def status(self) -> JobStatus:
async def abort(self, *, timeout: Optional[float] = None, poll_delay: float = 0.5) -> bool: """ Abort the job. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever on None :param poll_delay: how often to poll redis for the job result :return: True if the job aborted properly, False otherwise """ job_info = await self.info() if job_info and job_info.score and job_info.score > timestamp_ms(): async with self._redis.pipeline(transaction=True) as tr: tr.zrem(self._queue_name, self.job_id) # type: ignore[unused-coroutine] tr.zadd(self._queue_name, {self.job_id: 1}) # type: ignore[unused-coroutine] await tr.execute() await self._redis.zadd(abort_jobs_ss, {self.job_id: timestamp_ms()}) try: await self.result(timeout=timeout, poll_delay=poll_delay) except asyncio.CancelledError: return True except ResultNotFound: # We do not know if the job was cancelled or not return False else: return False def __repr__(self) -> str: return f'<arq job {self.job_id}>' class SerializationError(RuntimeError): pass class DeserializationError(SerializationError): pass def serialize_job( function_name: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: Optional[int], enqueue_time_ms: int, *, serializer: Optional[Serializer] = None, ) -> bytes: data = {'t': job_try, 'f': function_name, 'a': args, 'k': kwargs, 'et': enqueue_time_ms} if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception as e: raise SerializationError(f'unable to serialize job "{function_name}"') from e def serialize_result( function: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: int, enqueue_time_ms: int, success: bool, result: Any, start_ms: int, finished_ms: int, ref: str, queue_name: str, *, serializer: Optional[Serializer] = None, ) -> Optional[bytes]: data = { 't': job_try, 'f': function, 'a': args, 'k': kwargs, 'et': enqueue_time_ms, 's': success, 'r': result, 'st': start_ms, 'ft': finished_ms, 'q': queue_name, } if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception: logger.warning('error serializing result of %s', ref, exc_info=True) # use string in case serialization fails again data.update(r='unable to serialize result', s=False) try: return serializer(data) except Exception: logger.critical('error serializing result of %s even after replacing result', ref, exc_info=True) return None def deserialize_job(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobDef: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobDef( function=d['f'], args=d['a'], kwargs=d['k'], job_try=d['t'], enqueue_time=ms_to_datetime(d['et']), score=None, ) except Exception as e: raise DeserializationError('unable to deserialize job') from e def deserialize_job_raw( r: bytes, *, deserializer: Optional[Deserializer] = None ) -> Tuple[str, Tuple[Any, ...], Dict[str, Any], int, int]: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return d['f'], d['a'], d['k'], d['t'], d['et'] except Exception as e: raise DeserializationError('unable to deserialize job') from e def deserialize_result(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobResult: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobResult( job_try=d['t'], function=d['f'], args=d['a'], kwargs=d['k'], enqueue_time=ms_to_datetime(d['et']), score=None, success=d['s'], result=d['r'], start_time=ms_to_datetime(d['st']), finish_time=ms_to_datetime(d['ft']), queue_name=d.get('q', '<unknown>'), ) except Exception as e: raise DeserializationError('unable to deserialize job result') from e
""" Status of the job. """ async with self._redis.pipeline(transaction=True) as tr: tr.exists(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.exists(in_progress_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] is_complete, is_in_progress, score = await tr.execute() if is_complete: return JobStatus.complete elif is_in_progress: return JobStatus.in_progress elif score: return JobStatus.deferred if score > timestamp_ms() else JobStatus.queued else: return JobStatus.not_found
identifier_body
jobs.py
import asyncio import logging import pickle import warnings from dataclasses import dataclass from datetime import datetime from enum import Enum from typing import Any, Callable, Dict, Optional, Tuple from redis.asyncio import Redis from .constants import abort_jobs_ss, default_queue_name, in_progress_key_prefix, job_key_prefix, result_key_prefix from .utils import ms_to_datetime, poll, timestamp_ms logger = logging.getLogger('arq.jobs') Serializer = Callable[[Dict[str, Any]], bytes] Deserializer = Callable[[bytes], Dict[str, Any]] class ResultNotFound(RuntimeError): pass class JobStatus(str, Enum): """ Enum of job statuses. """ #: job is in the queue, time it should be run not yet reached deferred = 'deferred' #: job is in the queue, time it should run has been reached queued = 'queued' #: job is in progress in_progress = 'in_progress' #: job is complete, result is available complete = 'complete' #: job not found in any way not_found = 'not_found' @dataclass class JobDef: function: str args: Tuple[Any, ...] kwargs: Dict[str, Any] job_try: int enqueue_time: datetime score: Optional[int] def __post_init__(self) -> None: if isinstance(self.score, float): self.score = int(self.score) @dataclass class JobResult(JobDef): success: bool result: Any start_time: datetime finish_time: datetime queue_name: str job_id: Optional[str] = None class Job: """ Holds data a reference to a job. """ __slots__ = 'job_id', '_redis', '_queue_name', '_deserializer' def __init__( self, job_id: str,
): self.job_id = job_id self._redis = redis self._queue_name = _queue_name self._deserializer = _deserializer async def result( self, timeout: Optional[float] = None, *, poll_delay: float = 0.5, pole_delay: float = None ) -> Any: """ Get the result of the job or, if the job raised an exception, reraise it. This function waits for the result if it's not yet available and the job is present in the queue. Otherwise ``ResultNotFound`` is raised. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever :param poll_delay: how often to poll redis for the job result :param pole_delay: deprecated, use poll_delay instead """ if pole_delay is not None: warnings.warn( '"pole_delay" is deprecated, use the correct spelling "poll_delay" instead', DeprecationWarning ) poll_delay = pole_delay async for delay in poll(poll_delay): async with self._redis.pipeline(transaction=True) as tr: tr.get(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] v, s = await tr.execute() if v: info = deserialize_result(v, deserializer=self._deserializer) if info.success: return info.result elif isinstance(info.result, (Exception, asyncio.CancelledError)): raise info.result else: raise SerializationError(info.result) elif s is None: raise ResultNotFound( 'Not waiting for job result because the job is not in queue. ' 'Is the worker function configured to keep result?' ) if timeout is not None and delay > timeout: raise asyncio.TimeoutError() async def info(self) -> Optional[JobDef]: """ All information on a job, including its result if it's available, does not wait for the result. """ info: Optional[JobDef] = await self.result_info() if not info: v = await self._redis.get(job_key_prefix + self.job_id) if v: info = deserialize_job(v, deserializer=self._deserializer) if info: s = await self._redis.zscore(self._queue_name, self.job_id) info.score = None if s is None else int(s) return info async def result_info(self) -> Optional[JobResult]: """ Information about the job result if available, does not wait for the result. Does not raise an exception even if the job raised one. """ v = await self._redis.get(result_key_prefix + self.job_id) if v: return deserialize_result(v, deserializer=self._deserializer) else: return None async def status(self) -> JobStatus: """ Status of the job. """ async with self._redis.pipeline(transaction=True) as tr: tr.exists(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.exists(in_progress_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] is_complete, is_in_progress, score = await tr.execute() if is_complete: return JobStatus.complete elif is_in_progress: return JobStatus.in_progress elif score: return JobStatus.deferred if score > timestamp_ms() else JobStatus.queued else: return JobStatus.not_found async def abort(self, *, timeout: Optional[float] = None, poll_delay: float = 0.5) -> bool: """ Abort the job. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever on None :param poll_delay: how often to poll redis for the job result :return: True if the job aborted properly, False otherwise """ job_info = await self.info() if job_info and job_info.score and job_info.score > timestamp_ms(): async with self._redis.pipeline(transaction=True) as tr: tr.zrem(self._queue_name, self.job_id) # type: ignore[unused-coroutine] tr.zadd(self._queue_name, {self.job_id: 1}) # type: ignore[unused-coroutine] await tr.execute() await self._redis.zadd(abort_jobs_ss, {self.job_id: timestamp_ms()}) try: await self.result(timeout=timeout, poll_delay=poll_delay) except asyncio.CancelledError: return True except ResultNotFound: # We do not know if the job was cancelled or not return False else: return False def __repr__(self) -> str: return f'<arq job {self.job_id}>' class SerializationError(RuntimeError): pass class DeserializationError(SerializationError): pass def serialize_job( function_name: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: Optional[int], enqueue_time_ms: int, *, serializer: Optional[Serializer] = None, ) -> bytes: data = {'t': job_try, 'f': function_name, 'a': args, 'k': kwargs, 'et': enqueue_time_ms} if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception as e: raise SerializationError(f'unable to serialize job "{function_name}"') from e def serialize_result( function: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: int, enqueue_time_ms: int, success: bool, result: Any, start_ms: int, finished_ms: int, ref: str, queue_name: str, *, serializer: Optional[Serializer] = None, ) -> Optional[bytes]: data = { 't': job_try, 'f': function, 'a': args, 'k': kwargs, 'et': enqueue_time_ms, 's': success, 'r': result, 'st': start_ms, 'ft': finished_ms, 'q': queue_name, } if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception: logger.warning('error serializing result of %s', ref, exc_info=True) # use string in case serialization fails again data.update(r='unable to serialize result', s=False) try: return serializer(data) except Exception: logger.critical('error serializing result of %s even after replacing result', ref, exc_info=True) return None def deserialize_job(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobDef: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobDef( function=d['f'], args=d['a'], kwargs=d['k'], job_try=d['t'], enqueue_time=ms_to_datetime(d['et']), score=None, ) except Exception as e: raise DeserializationError('unable to deserialize job') from e def deserialize_job_raw( r: bytes, *, deserializer: Optional[Deserializer] = None ) -> Tuple[str, Tuple[Any, ...], Dict[str, Any], int, int]: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return d['f'], d['a'], d['k'], d['t'], d['et'] except Exception as e: raise DeserializationError('unable to deserialize job') from e def deserialize_result(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobResult: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobResult( job_try=d['t'], function=d['f'], args=d['a'], kwargs=d['k'], enqueue_time=ms_to_datetime(d['et']), score=None, success=d['s'], result=d['r'], start_time=ms_to_datetime(d['st']), finish_time=ms_to_datetime(d['ft']), queue_name=d.get('q', '<unknown>'), ) except Exception as e: raise DeserializationError('unable to deserialize job result') from e
redis: 'Redis[bytes]', _queue_name: str = default_queue_name, _deserializer: Optional[Deserializer] = None,
random_line_split
jobs.py
import asyncio import logging import pickle import warnings from dataclasses import dataclass from datetime import datetime from enum import Enum from typing import Any, Callable, Dict, Optional, Tuple from redis.asyncio import Redis from .constants import abort_jobs_ss, default_queue_name, in_progress_key_prefix, job_key_prefix, result_key_prefix from .utils import ms_to_datetime, poll, timestamp_ms logger = logging.getLogger('arq.jobs') Serializer = Callable[[Dict[str, Any]], bytes] Deserializer = Callable[[bytes], Dict[str, Any]] class ResultNotFound(RuntimeError): pass class JobStatus(str, Enum): """ Enum of job statuses. """ #: job is in the queue, time it should be run not yet reached deferred = 'deferred' #: job is in the queue, time it should run has been reached queued = 'queued' #: job is in progress in_progress = 'in_progress' #: job is complete, result is available complete = 'complete' #: job not found in any way not_found = 'not_found' @dataclass class JobDef: function: str args: Tuple[Any, ...] kwargs: Dict[str, Any] job_try: int enqueue_time: datetime score: Optional[int] def __post_init__(self) -> None: if isinstance(self.score, float): self.score = int(self.score) @dataclass class JobResult(JobDef): success: bool result: Any start_time: datetime finish_time: datetime queue_name: str job_id: Optional[str] = None class Job: """ Holds data a reference to a job. """ __slots__ = 'job_id', '_redis', '_queue_name', '_deserializer' def __init__( self, job_id: str, redis: 'Redis[bytes]', _queue_name: str = default_queue_name, _deserializer: Optional[Deserializer] = None, ): self.job_id = job_id self._redis = redis self._queue_name = _queue_name self._deserializer = _deserializer async def result( self, timeout: Optional[float] = None, *, poll_delay: float = 0.5, pole_delay: float = None ) -> Any: """ Get the result of the job or, if the job raised an exception, reraise it. This function waits for the result if it's not yet available and the job is present in the queue. Otherwise ``ResultNotFound`` is raised. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever :param poll_delay: how often to poll redis for the job result :param pole_delay: deprecated, use poll_delay instead """ if pole_delay is not None: warnings.warn( '"pole_delay" is deprecated, use the correct spelling "poll_delay" instead', DeprecationWarning ) poll_delay = pole_delay async for delay in poll(poll_delay): async with self._redis.pipeline(transaction=True) as tr: tr.get(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] v, s = await tr.execute() if v:
elif s is None: raise ResultNotFound( 'Not waiting for job result because the job is not in queue. ' 'Is the worker function configured to keep result?' ) if timeout is not None and delay > timeout: raise asyncio.TimeoutError() async def info(self) -> Optional[JobDef]: """ All information on a job, including its result if it's available, does not wait for the result. """ info: Optional[JobDef] = await self.result_info() if not info: v = await self._redis.get(job_key_prefix + self.job_id) if v: info = deserialize_job(v, deserializer=self._deserializer) if info: s = await self._redis.zscore(self._queue_name, self.job_id) info.score = None if s is None else int(s) return info async def result_info(self) -> Optional[JobResult]: """ Information about the job result if available, does not wait for the result. Does not raise an exception even if the job raised one. """ v = await self._redis.get(result_key_prefix + self.job_id) if v: return deserialize_result(v, deserializer=self._deserializer) else: return None async def status(self) -> JobStatus: """ Status of the job. """ async with self._redis.pipeline(transaction=True) as tr: tr.exists(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.exists(in_progress_key_prefix + self.job_id) # type: ignore[unused-coroutine] tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] is_complete, is_in_progress, score = await tr.execute() if is_complete: return JobStatus.complete elif is_in_progress: return JobStatus.in_progress elif score: return JobStatus.deferred if score > timestamp_ms() else JobStatus.queued else: return JobStatus.not_found async def abort(self, *, timeout: Optional[float] = None, poll_delay: float = 0.5) -> bool: """ Abort the job. :param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever on None :param poll_delay: how often to poll redis for the job result :return: True if the job aborted properly, False otherwise """ job_info = await self.info() if job_info and job_info.score and job_info.score > timestamp_ms(): async with self._redis.pipeline(transaction=True) as tr: tr.zrem(self._queue_name, self.job_id) # type: ignore[unused-coroutine] tr.zadd(self._queue_name, {self.job_id: 1}) # type: ignore[unused-coroutine] await tr.execute() await self._redis.zadd(abort_jobs_ss, {self.job_id: timestamp_ms()}) try: await self.result(timeout=timeout, poll_delay=poll_delay) except asyncio.CancelledError: return True except ResultNotFound: # We do not know if the job was cancelled or not return False else: return False def __repr__(self) -> str: return f'<arq job {self.job_id}>' class SerializationError(RuntimeError): pass class DeserializationError(SerializationError): pass def serialize_job( function_name: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: Optional[int], enqueue_time_ms: int, *, serializer: Optional[Serializer] = None, ) -> bytes: data = {'t': job_try, 'f': function_name, 'a': args, 'k': kwargs, 'et': enqueue_time_ms} if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception as e: raise SerializationError(f'unable to serialize job "{function_name}"') from e def serialize_result( function: str, args: Tuple[Any, ...], kwargs: Dict[str, Any], job_try: int, enqueue_time_ms: int, success: bool, result: Any, start_ms: int, finished_ms: int, ref: str, queue_name: str, *, serializer: Optional[Serializer] = None, ) -> Optional[bytes]: data = { 't': job_try, 'f': function, 'a': args, 'k': kwargs, 'et': enqueue_time_ms, 's': success, 'r': result, 'st': start_ms, 'ft': finished_ms, 'q': queue_name, } if serializer is None: serializer = pickle.dumps try: return serializer(data) except Exception: logger.warning('error serializing result of %s', ref, exc_info=True) # use string in case serialization fails again data.update(r='unable to serialize result', s=False) try: return serializer(data) except Exception: logger.critical('error serializing result of %s even after replacing result', ref, exc_info=True) return None def deserialize_job(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobDef: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobDef( function=d['f'], args=d['a'], kwargs=d['k'], job_try=d['t'], enqueue_time=ms_to_datetime(d['et']), score=None, ) except Exception as e: raise DeserializationError('unable to deserialize job') from e def deserialize_job_raw( r: bytes, *, deserializer: Optional[Deserializer] = None ) -> Tuple[str, Tuple[Any, ...], Dict[str, Any], int, int]: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return d['f'], d['a'], d['k'], d['t'], d['et'] except Exception as e: raise DeserializationError('unable to deserialize job') from e def deserialize_result(r: bytes, *, deserializer: Optional[Deserializer] = None) -> JobResult: if deserializer is None: deserializer = pickle.loads try: d = deserializer(r) return JobResult( job_try=d['t'], function=d['f'], args=d['a'], kwargs=d['k'], enqueue_time=ms_to_datetime(d['et']), score=None, success=d['s'], result=d['r'], start_time=ms_to_datetime(d['st']), finish_time=ms_to_datetime(d['ft']), queue_name=d.get('q', '<unknown>'), ) except Exception as e: raise DeserializationError('unable to deserialize job result') from e
info = deserialize_result(v, deserializer=self._deserializer) if info.success: return info.result elif isinstance(info.result, (Exception, asyncio.CancelledError)): raise info.result else: raise SerializationError(info.result)
conditional_block
machineconfig.go
package machineconfig import ( "bytes" "encoding/base64" "encoding/json" "fmt" "io/ioutil" "path/filepath" "text/template" "github.com/openshift-kni/performance-addon-operators/build/assets" "github.com/coreos/go-systemd/unit" igntypes "github.com/coreos/ignition/v2/config/v3_2/types" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/pointer" performancev2 "github.com/openshift-kni/performance-addon-operators/api/v2" "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components" profilecomponent "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components/profile" machineconfigv1 "github.com/openshift/machine-config-operator/pkg/apis/machineconfiguration.openshift.io/v1" ) const ( defaultIgnitionVersion = "3.2.0" defaultIgnitionContentSource = "data:text/plain;charset=utf-8;base64" ) const ( // MCKernelRT is the value of the kernel setting in MachineConfig for the RT kernel MCKernelRT = "realtime" // MCKernelDefault is the value of the kernel setting in MachineConfig for the default kernel MCKernelDefault = "default" // HighPerformanceRuntime contains the name of the high-performance runtime HighPerformanceRuntime = "high-performance" bashScriptsDir = "/usr/local/bin" crioConfd = "/etc/crio/crio.conf.d" crioRuntimesConfig = "99-runtimes.conf" // OCIHooksConfigDir is the default directory for the OCI hooks OCIHooksConfigDir = "/etc/containers/oci/hooks.d" // OCIHooksConfig file contains the low latency hooks configuration OCIHooksConfig = "99-low-latency-hooks.json" ociTemplateRPSMask = "RPSMask" udevRulesDir = "/etc/udev/rules.d" udevRpsRules = "99-netdev-rps.rules" // scripts hugepagesAllocation = "hugepages-allocation" ociHooks = "low-latency-hooks" setRPSMask = "set-rps-mask" ) const ( systemdSectionUnit = "Unit" systemdSectionService = "Service" systemdSectionInstall = "Install" systemdDescription = "Description" systemdBefore = "Before" systemdEnvironment = "Environment" systemdType = "Type" systemdRemainAfterExit = "RemainAfterExit" systemdExecStart = "ExecStart" systemdWantedBy = "WantedBy" ) const ( systemdServiceKubelet = "kubelet.service" systemdServiceTypeOneshot = "oneshot" systemdTargetMultiUser = "multi-user.target" systemdTrue = "true" ) const ( environmentHugepagesSize = "HUGEPAGES_SIZE" environmentHugepagesCount = "HUGEPAGES_COUNT" environmentNUMANode = "NUMA_NODE" ) const ( templateReservedCpus = "ReservedCpus" ) // New returns new machine configuration object for performance sensitive workloads func New(profile *performancev2.PerformanceProfile) (*machineconfigv1.MachineConfig, error)
// GetMachineConfigName generates machine config name from the performance profile func GetMachineConfigName(profile *performancev2.PerformanceProfile) string { name := components.GetComponentName(profile.Name, components.ComponentNamePrefix) return fmt.Sprintf("50-%s", name) } func getIgnitionConfig(profile *performancev2.PerformanceProfile) (*igntypes.Config, error) { ignitionConfig := &igntypes.Config{ Ignition: igntypes.Ignition{ Version: defaultIgnitionVersion, }, Storage: igntypes.Storage{ Files: []igntypes.File{}, }, } // add script files under the node /usr/local/bin directory mode := 0700 for _, script := range []string{hugepagesAllocation, ociHooks, setRPSMask} { dst := GetBashScriptPath(script) content, err := assets.Scripts.ReadFile(fmt.Sprintf("scripts/%s.sh", script)) if err != nil { return nil, err } AddContent(ignitionConfig, content, dst, &mode) } // add crio config snippet under the node /etc/crio/crio.conf.d/ directory crioConfdRuntimesMode := 0644 crioConfigSnippetContent, err := renderCrioConfigSnippet(profile, filepath.Join("configs", crioRuntimesConfig)) if err != nil { return nil, err } crioConfSnippetDst := filepath.Join(crioConfd, crioRuntimesConfig) AddContent(ignitionConfig, crioConfigSnippetContent, crioConfSnippetDst, &crioConfdRuntimesMode) // add crio hooks config under the node cri-o hook directory crioHooksConfigsMode := 0644 ociHooksConfigContent, err := GetOCIHooksConfigContent(OCIHooksConfig, profile) if err != nil { return nil, err } ociHookConfigDst := filepath.Join(OCIHooksConfigDir, OCIHooksConfig) AddContent(ignitionConfig, ociHooksConfigContent, ociHookConfigDst, &crioHooksConfigsMode) // add rps udev rule rpsRulesMode := 0644 rpsRulesContent, err := assets.Configs.ReadFile(filepath.Join("configs", udevRpsRules)) if err != nil { return nil, err } rpsRulesDst := filepath.Join(udevRulesDir, udevRpsRules) AddContent(ignitionConfig, rpsRulesContent, rpsRulesDst, &rpsRulesMode) if profile.Spec.HugePages != nil { for _, page := range profile.Spec.HugePages.Pages { // we already allocated non NUMA specific hugepages via kernel arguments if page.Node == nil { continue } hugepagesSize, err := GetHugepagesSizeKilobytes(page.Size) if err != nil { return nil, err } hugepagesService, err := GetSystemdContent(GetHugepagesAllocationUnitOptions( hugepagesSize, page.Count, *page.Node, )) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &hugepagesService, Enabled: pointer.BoolPtr(true), Name: GetSystemdService(fmt.Sprintf("%s-%skB-NUMA%d", hugepagesAllocation, hugepagesSize, *page.Node)), }) } } if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err := components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } rpsService, err := GetSystemdContent(getRPSUnitOptions(rpsMask)) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &rpsService, Name: GetSystemdService("update-rps@"), }) } return ignitionConfig, nil } //GetBashScriptPath returns the script path containing teh directory and the script name func GetBashScriptPath(scriptName string) string { return fmt.Sprintf("%s/%s.sh", bashScriptsDir, scriptName) } func getSystemdEnvironment(key string, value string) string { return fmt.Sprintf("%s=%s", key, value) } //GetSystemdService returns the service name in systemd func GetSystemdService(serviceName string) string { return fmt.Sprintf("%s.service", serviceName) } //GetSystemdContent get systemd content from list of unit options func GetSystemdContent(options []*unit.UnitOption) (string, error) { outReader := unit.Serialize(options) outBytes, err := ioutil.ReadAll(outReader) if err != nil { return "", err } return string(outBytes), nil } // GetOCIHooksConfigContent reads and returns the content of the OCI hook file func GetOCIHooksConfigContent(configFile string, profile *performancev2.PerformanceProfile) ([]byte, error) { ociHookConfigTemplate, err := template.ParseFS(assets.Configs, filepath.Join("configs", configFile)) if err != nil { return nil, err } rpsMask := "0" // RPS disabled if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err = components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } } outContent := &bytes.Buffer{} templateArgs := map[string]string{ociTemplateRPSMask: rpsMask} if err := ociHookConfigTemplate.Execute(outContent, templateArgs); err != nil { return nil, err } return outContent.Bytes(), nil } // GetHugepagesSizeKilobytes retruns hugepages size in kilobytes func GetHugepagesSizeKilobytes(hugepagesSize performancev2.HugePageSize) (string, error) { switch hugepagesSize { case "1G": return "1048576", nil case "2M": return "2048", nil default: return "", fmt.Errorf("can not convert size %q to kilobytes", hugepagesSize) } } //GetHugepagesAllocationUnitOptions returns list of unit options based on the settings of the hugepage func GetHugepagesAllocationUnitOptions(hugepagesSize string, hugepagesCount int32, numaNode int32) []*unit.UnitOption { return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, fmt.Sprintf("Hugepages-%skB allocation on the node %d", hugepagesSize, numaNode)), // Before unit.NewUnitOption(systemdSectionUnit, systemdBefore, systemdServiceKubelet), // [Service] // Environment unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesCount, fmt.Sprint(hugepagesCount))), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesSize, hugepagesSize)), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentNUMANode, fmt.Sprint(numaNode))), // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // RemainAfterExit unit.NewUnitOption(systemdSectionService, systemdRemainAfterExit, systemdTrue), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, GetBashScriptPath(hugepagesAllocation)), // [Install] // WantedBy unit.NewUnitOption(systemdSectionInstall, systemdWantedBy, systemdTargetMultiUser), } } func getRPSUnitOptions(rpsMask string) []*unit.UnitOption { cmd := fmt.Sprintf("%s %%i %s", GetBashScriptPath(setRPSMask), rpsMask) return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, "Sets network devices RPS mask"), // [Service] // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, cmd), } } //AddContent appends more content to the ignition configuration func AddContent(ignitionConfig *igntypes.Config, content []byte, dst string, mode *int) { contentBase64 := base64.StdEncoding.EncodeToString(content) ignitionConfig.Storage.Files = append(ignitionConfig.Storage.Files, igntypes.File{ Node: igntypes.Node{ Path: dst, }, FileEmbedded1: igntypes.FileEmbedded1{ Contents: igntypes.Resource{ Source: pointer.StringPtr(fmt.Sprintf("%s,%s", defaultIgnitionContentSource, contentBase64)), }, Mode: mode, }, }) } func renderCrioConfigSnippet(profile *performancev2.PerformanceProfile, src string) ([]byte, error) { templateArgs := make(map[string]string) if profile.Spec.CPU.Reserved != nil { templateArgs[templateReservedCpus] = string(*profile.Spec.CPU.Reserved) } profileTemplate, err := template.ParseFS(assets.Configs, src) if err != nil { return nil, err } crioConfig := &bytes.Buffer{} if err := profileTemplate.Execute(crioConfig, templateArgs); err != nil { return nil, err } return crioConfig.Bytes(), nil }
{ name := GetMachineConfigName(profile) mc := &machineconfigv1.MachineConfig{ TypeMeta: metav1.TypeMeta{ APIVersion: machineconfigv1.GroupVersion.String(), Kind: "MachineConfig", }, ObjectMeta: metav1.ObjectMeta{ Name: name, Labels: profilecomponent.GetMachineConfigLabel(profile), }, Spec: machineconfigv1.MachineConfigSpec{}, } ignitionConfig, err := getIgnitionConfig(profile) if err != nil { return nil, err } rawIgnition, err := json.Marshal(ignitionConfig) if err != nil { return nil, err } mc.Spec.Config = runtime.RawExtension{Raw: rawIgnition} enableRTKernel := profile.Spec.RealTimeKernel != nil && profile.Spec.RealTimeKernel.Enabled != nil && *profile.Spec.RealTimeKernel.Enabled if enableRTKernel { mc.Spec.KernelType = MCKernelRT } else { mc.Spec.KernelType = MCKernelDefault } return mc, nil }
identifier_body
machineconfig.go
package machineconfig import ( "bytes" "encoding/base64" "encoding/json" "fmt" "io/ioutil" "path/filepath" "text/template" "github.com/openshift-kni/performance-addon-operators/build/assets" "github.com/coreos/go-systemd/unit" igntypes "github.com/coreos/ignition/v2/config/v3_2/types" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/pointer" performancev2 "github.com/openshift-kni/performance-addon-operators/api/v2" "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components" profilecomponent "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components/profile" machineconfigv1 "github.com/openshift/machine-config-operator/pkg/apis/machineconfiguration.openshift.io/v1" ) const ( defaultIgnitionVersion = "3.2.0" defaultIgnitionContentSource = "data:text/plain;charset=utf-8;base64" ) const ( // MCKernelRT is the value of the kernel setting in MachineConfig for the RT kernel MCKernelRT = "realtime" // MCKernelDefault is the value of the kernel setting in MachineConfig for the default kernel MCKernelDefault = "default" // HighPerformanceRuntime contains the name of the high-performance runtime HighPerformanceRuntime = "high-performance" bashScriptsDir = "/usr/local/bin" crioConfd = "/etc/crio/crio.conf.d" crioRuntimesConfig = "99-runtimes.conf" // OCIHooksConfigDir is the default directory for the OCI hooks OCIHooksConfigDir = "/etc/containers/oci/hooks.d" // OCIHooksConfig file contains the low latency hooks configuration OCIHooksConfig = "99-low-latency-hooks.json" ociTemplateRPSMask = "RPSMask" udevRulesDir = "/etc/udev/rules.d" udevRpsRules = "99-netdev-rps.rules" // scripts hugepagesAllocation = "hugepages-allocation" ociHooks = "low-latency-hooks" setRPSMask = "set-rps-mask" ) const ( systemdSectionUnit = "Unit" systemdSectionService = "Service" systemdSectionInstall = "Install" systemdDescription = "Description" systemdBefore = "Before" systemdEnvironment = "Environment" systemdType = "Type" systemdRemainAfterExit = "RemainAfterExit" systemdExecStart = "ExecStart" systemdWantedBy = "WantedBy" ) const ( systemdServiceKubelet = "kubelet.service" systemdServiceTypeOneshot = "oneshot" systemdTargetMultiUser = "multi-user.target" systemdTrue = "true" ) const ( environmentHugepagesSize = "HUGEPAGES_SIZE" environmentHugepagesCount = "HUGEPAGES_COUNT" environmentNUMANode = "NUMA_NODE" ) const ( templateReservedCpus = "ReservedCpus" ) // New returns new machine configuration object for performance sensitive workloads func New(profile *performancev2.PerformanceProfile) (*machineconfigv1.MachineConfig, error) { name := GetMachineConfigName(profile) mc := &machineconfigv1.MachineConfig{ TypeMeta: metav1.TypeMeta{ APIVersion: machineconfigv1.GroupVersion.String(), Kind: "MachineConfig", }, ObjectMeta: metav1.ObjectMeta{ Name: name, Labels: profilecomponent.GetMachineConfigLabel(profile), }, Spec: machineconfigv1.MachineConfigSpec{}, } ignitionConfig, err := getIgnitionConfig(profile) if err != nil { return nil, err } rawIgnition, err := json.Marshal(ignitionConfig) if err != nil { return nil, err } mc.Spec.Config = runtime.RawExtension{Raw: rawIgnition} enableRTKernel := profile.Spec.RealTimeKernel != nil && profile.Spec.RealTimeKernel.Enabled != nil && *profile.Spec.RealTimeKernel.Enabled if enableRTKernel { mc.Spec.KernelType = MCKernelRT } else { mc.Spec.KernelType = MCKernelDefault } return mc, nil } // GetMachineConfigName generates machine config name from the performance profile func GetMachineConfigName(profile *performancev2.PerformanceProfile) string { name := components.GetComponentName(profile.Name, components.ComponentNamePrefix) return fmt.Sprintf("50-%s", name) } func getIgnitionConfig(profile *performancev2.PerformanceProfile) (*igntypes.Config, error) { ignitionConfig := &igntypes.Config{ Ignition: igntypes.Ignition{ Version: defaultIgnitionVersion, }, Storage: igntypes.Storage{ Files: []igntypes.File{}, }, } // add script files under the node /usr/local/bin directory mode := 0700 for _, script := range []string{hugepagesAllocation, ociHooks, setRPSMask} { dst := GetBashScriptPath(script) content, err := assets.Scripts.ReadFile(fmt.Sprintf("scripts/%s.sh", script)) if err != nil { return nil, err } AddContent(ignitionConfig, content, dst, &mode) } // add crio config snippet under the node /etc/crio/crio.conf.d/ directory crioConfdRuntimesMode := 0644 crioConfigSnippetContent, err := renderCrioConfigSnippet(profile, filepath.Join("configs", crioRuntimesConfig)) if err != nil { return nil, err } crioConfSnippetDst := filepath.Join(crioConfd, crioRuntimesConfig) AddContent(ignitionConfig, crioConfigSnippetContent, crioConfSnippetDst, &crioConfdRuntimesMode) // add crio hooks config under the node cri-o hook directory crioHooksConfigsMode := 0644 ociHooksConfigContent, err := GetOCIHooksConfigContent(OCIHooksConfig, profile) if err != nil { return nil, err } ociHookConfigDst := filepath.Join(OCIHooksConfigDir, OCIHooksConfig) AddContent(ignitionConfig, ociHooksConfigContent, ociHookConfigDst, &crioHooksConfigsMode) // add rps udev rule rpsRulesMode := 0644 rpsRulesContent, err := assets.Configs.ReadFile(filepath.Join("configs", udevRpsRules)) if err != nil { return nil, err } rpsRulesDst := filepath.Join(udevRulesDir, udevRpsRules) AddContent(ignitionConfig, rpsRulesContent, rpsRulesDst, &rpsRulesMode) if profile.Spec.HugePages != nil { for _, page := range profile.Spec.HugePages.Pages { // we already allocated non NUMA specific hugepages via kernel arguments if page.Node == nil { continue } hugepagesSize, err := GetHugepagesSizeKilobytes(page.Size) if err != nil { return nil, err } hugepagesService, err := GetSystemdContent(GetHugepagesAllocationUnitOptions( hugepagesSize, page.Count, *page.Node, )) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &hugepagesService, Enabled: pointer.BoolPtr(true), Name: GetSystemdService(fmt.Sprintf("%s-%skB-NUMA%d", hugepagesAllocation, hugepagesSize, *page.Node)), }) } } if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err := components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } rpsService, err := GetSystemdContent(getRPSUnitOptions(rpsMask)) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &rpsService, Name: GetSystemdService("update-rps@"), }) } return ignitionConfig, nil } //GetBashScriptPath returns the script path containing teh directory and the script name func GetBashScriptPath(scriptName string) string { return fmt.Sprintf("%s/%s.sh", bashScriptsDir, scriptName) } func getSystemdEnvironment(key string, value string) string { return fmt.Sprintf("%s=%s", key, value) } //GetSystemdService returns the service name in systemd func GetSystemdService(serviceName string) string { return fmt.Sprintf("%s.service", serviceName) } //GetSystemdContent get systemd content from list of unit options func GetSystemdContent(options []*unit.UnitOption) (string, error) { outReader := unit.Serialize(options) outBytes, err := ioutil.ReadAll(outReader) if err != nil { return "", err } return string(outBytes), nil } // GetOCIHooksConfigContent reads and returns the content of the OCI hook file func GetOCIHooksConfigContent(configFile string, profile *performancev2.PerformanceProfile) ([]byte, error) { ociHookConfigTemplate, err := template.ParseFS(assets.Configs, filepath.Join("configs", configFile)) if err != nil { return nil, err } rpsMask := "0" // RPS disabled if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err = components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } } outContent := &bytes.Buffer{} templateArgs := map[string]string{ociTemplateRPSMask: rpsMask} if err := ociHookConfigTemplate.Execute(outContent, templateArgs); err != nil { return nil, err } return outContent.Bytes(), nil } // GetHugepagesSizeKilobytes retruns hugepages size in kilobytes func GetHugepagesSizeKilobytes(hugepagesSize performancev2.HugePageSize) (string, error) { switch hugepagesSize { case "1G": return "1048576", nil case "2M": return "2048", nil default: return "", fmt.Errorf("can not convert size %q to kilobytes", hugepagesSize) } } //GetHugepagesAllocationUnitOptions returns list of unit options based on the settings of the hugepage func
(hugepagesSize string, hugepagesCount int32, numaNode int32) []*unit.UnitOption { return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, fmt.Sprintf("Hugepages-%skB allocation on the node %d", hugepagesSize, numaNode)), // Before unit.NewUnitOption(systemdSectionUnit, systemdBefore, systemdServiceKubelet), // [Service] // Environment unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesCount, fmt.Sprint(hugepagesCount))), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesSize, hugepagesSize)), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentNUMANode, fmt.Sprint(numaNode))), // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // RemainAfterExit unit.NewUnitOption(systemdSectionService, systemdRemainAfterExit, systemdTrue), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, GetBashScriptPath(hugepagesAllocation)), // [Install] // WantedBy unit.NewUnitOption(systemdSectionInstall, systemdWantedBy, systemdTargetMultiUser), } } func getRPSUnitOptions(rpsMask string) []*unit.UnitOption { cmd := fmt.Sprintf("%s %%i %s", GetBashScriptPath(setRPSMask), rpsMask) return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, "Sets network devices RPS mask"), // [Service] // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, cmd), } } //AddContent appends more content to the ignition configuration func AddContent(ignitionConfig *igntypes.Config, content []byte, dst string, mode *int) { contentBase64 := base64.StdEncoding.EncodeToString(content) ignitionConfig.Storage.Files = append(ignitionConfig.Storage.Files, igntypes.File{ Node: igntypes.Node{ Path: dst, }, FileEmbedded1: igntypes.FileEmbedded1{ Contents: igntypes.Resource{ Source: pointer.StringPtr(fmt.Sprintf("%s,%s", defaultIgnitionContentSource, contentBase64)), }, Mode: mode, }, }) } func renderCrioConfigSnippet(profile *performancev2.PerformanceProfile, src string) ([]byte, error) { templateArgs := make(map[string]string) if profile.Spec.CPU.Reserved != nil { templateArgs[templateReservedCpus] = string(*profile.Spec.CPU.Reserved) } profileTemplate, err := template.ParseFS(assets.Configs, src) if err != nil { return nil, err } crioConfig := &bytes.Buffer{} if err := profileTemplate.Execute(crioConfig, templateArgs); err != nil { return nil, err } return crioConfig.Bytes(), nil }
GetHugepagesAllocationUnitOptions
identifier_name
machineconfig.go
package machineconfig import ( "bytes" "encoding/base64" "encoding/json" "fmt" "io/ioutil" "path/filepath" "text/template" "github.com/openshift-kni/performance-addon-operators/build/assets" "github.com/coreos/go-systemd/unit" igntypes "github.com/coreos/ignition/v2/config/v3_2/types" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/pointer" performancev2 "github.com/openshift-kni/performance-addon-operators/api/v2" "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components" profilecomponent "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components/profile" machineconfigv1 "github.com/openshift/machine-config-operator/pkg/apis/machineconfiguration.openshift.io/v1" ) const ( defaultIgnitionVersion = "3.2.0" defaultIgnitionContentSource = "data:text/plain;charset=utf-8;base64" ) const ( // MCKernelRT is the value of the kernel setting in MachineConfig for the RT kernel MCKernelRT = "realtime" // MCKernelDefault is the value of the kernel setting in MachineConfig for the default kernel MCKernelDefault = "default" // HighPerformanceRuntime contains the name of the high-performance runtime HighPerformanceRuntime = "high-performance" bashScriptsDir = "/usr/local/bin" crioConfd = "/etc/crio/crio.conf.d" crioRuntimesConfig = "99-runtimes.conf" // OCIHooksConfigDir is the default directory for the OCI hooks OCIHooksConfigDir = "/etc/containers/oci/hooks.d" // OCIHooksConfig file contains the low latency hooks configuration OCIHooksConfig = "99-low-latency-hooks.json" ociTemplateRPSMask = "RPSMask" udevRulesDir = "/etc/udev/rules.d" udevRpsRules = "99-netdev-rps.rules" // scripts hugepagesAllocation = "hugepages-allocation" ociHooks = "low-latency-hooks" setRPSMask = "set-rps-mask" ) const ( systemdSectionUnit = "Unit" systemdSectionService = "Service" systemdSectionInstall = "Install" systemdDescription = "Description" systemdBefore = "Before" systemdEnvironment = "Environment" systemdType = "Type" systemdRemainAfterExit = "RemainAfterExit" systemdExecStart = "ExecStart" systemdWantedBy = "WantedBy" ) const ( systemdServiceKubelet = "kubelet.service" systemdServiceTypeOneshot = "oneshot" systemdTargetMultiUser = "multi-user.target" systemdTrue = "true" ) const ( environmentHugepagesSize = "HUGEPAGES_SIZE" environmentHugepagesCount = "HUGEPAGES_COUNT" environmentNUMANode = "NUMA_NODE" ) const ( templateReservedCpus = "ReservedCpus" ) // New returns new machine configuration object for performance sensitive workloads func New(profile *performancev2.PerformanceProfile) (*machineconfigv1.MachineConfig, error) { name := GetMachineConfigName(profile) mc := &machineconfigv1.MachineConfig{ TypeMeta: metav1.TypeMeta{ APIVersion: machineconfigv1.GroupVersion.String(), Kind: "MachineConfig", }, ObjectMeta: metav1.ObjectMeta{ Name: name, Labels: profilecomponent.GetMachineConfigLabel(profile), }, Spec: machineconfigv1.MachineConfigSpec{}, } ignitionConfig, err := getIgnitionConfig(profile) if err != nil { return nil, err } rawIgnition, err := json.Marshal(ignitionConfig) if err != nil { return nil, err } mc.Spec.Config = runtime.RawExtension{Raw: rawIgnition} enableRTKernel := profile.Spec.RealTimeKernel != nil && profile.Spec.RealTimeKernel.Enabled != nil && *profile.Spec.RealTimeKernel.Enabled if enableRTKernel { mc.Spec.KernelType = MCKernelRT } else { mc.Spec.KernelType = MCKernelDefault } return mc, nil } // GetMachineConfigName generates machine config name from the performance profile func GetMachineConfigName(profile *performancev2.PerformanceProfile) string { name := components.GetComponentName(profile.Name, components.ComponentNamePrefix) return fmt.Sprintf("50-%s", name) } func getIgnitionConfig(profile *performancev2.PerformanceProfile) (*igntypes.Config, error) { ignitionConfig := &igntypes.Config{ Ignition: igntypes.Ignition{ Version: defaultIgnitionVersion, }, Storage: igntypes.Storage{ Files: []igntypes.File{}, }, } // add script files under the node /usr/local/bin directory mode := 0700 for _, script := range []string{hugepagesAllocation, ociHooks, setRPSMask} { dst := GetBashScriptPath(script) content, err := assets.Scripts.ReadFile(fmt.Sprintf("scripts/%s.sh", script)) if err != nil { return nil, err } AddContent(ignitionConfig, content, dst, &mode) } // add crio config snippet under the node /etc/crio/crio.conf.d/ directory crioConfdRuntimesMode := 0644 crioConfigSnippetContent, err := renderCrioConfigSnippet(profile, filepath.Join("configs", crioRuntimesConfig)) if err != nil { return nil, err } crioConfSnippetDst := filepath.Join(crioConfd, crioRuntimesConfig) AddContent(ignitionConfig, crioConfigSnippetContent, crioConfSnippetDst, &crioConfdRuntimesMode) // add crio hooks config under the node cri-o hook directory crioHooksConfigsMode := 0644 ociHooksConfigContent, err := GetOCIHooksConfigContent(OCIHooksConfig, profile) if err != nil { return nil, err } ociHookConfigDst := filepath.Join(OCIHooksConfigDir, OCIHooksConfig) AddContent(ignitionConfig, ociHooksConfigContent, ociHookConfigDst, &crioHooksConfigsMode) // add rps udev rule rpsRulesMode := 0644 rpsRulesContent, err := assets.Configs.ReadFile(filepath.Join("configs", udevRpsRules)) if err != nil { return nil, err } rpsRulesDst := filepath.Join(udevRulesDir, udevRpsRules) AddContent(ignitionConfig, rpsRulesContent, rpsRulesDst, &rpsRulesMode) if profile.Spec.HugePages != nil { for _, page := range profile.Spec.HugePages.Pages { // we already allocated non NUMA specific hugepages via kernel arguments if page.Node == nil { continue } hugepagesSize, err := GetHugepagesSizeKilobytes(page.Size) if err != nil { return nil, err } hugepagesService, err := GetSystemdContent(GetHugepagesAllocationUnitOptions( hugepagesSize, page.Count, *page.Node, )) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &hugepagesService, Enabled: pointer.BoolPtr(true), Name: GetSystemdService(fmt.Sprintf("%s-%skB-NUMA%d", hugepagesAllocation, hugepagesSize, *page.Node)), }) } } if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err := components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } rpsService, err := GetSystemdContent(getRPSUnitOptions(rpsMask)) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &rpsService, Name: GetSystemdService("update-rps@"), }) } return ignitionConfig, nil } //GetBashScriptPath returns the script path containing teh directory and the script name func GetBashScriptPath(scriptName string) string { return fmt.Sprintf("%s/%s.sh", bashScriptsDir, scriptName) } func getSystemdEnvironment(key string, value string) string { return fmt.Sprintf("%s=%s", key, value) } //GetSystemdService returns the service name in systemd func GetSystemdService(serviceName string) string { return fmt.Sprintf("%s.service", serviceName) } //GetSystemdContent get systemd content from list of unit options func GetSystemdContent(options []*unit.UnitOption) (string, error) { outReader := unit.Serialize(options) outBytes, err := ioutil.ReadAll(outReader) if err != nil { return "", err } return string(outBytes), nil } // GetOCIHooksConfigContent reads and returns the content of the OCI hook file func GetOCIHooksConfigContent(configFile string, profile *performancev2.PerformanceProfile) ([]byte, error) { ociHookConfigTemplate, err := template.ParseFS(assets.Configs, filepath.Join("configs", configFile)) if err != nil
rpsMask := "0" // RPS disabled if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err = components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } } outContent := &bytes.Buffer{} templateArgs := map[string]string{ociTemplateRPSMask: rpsMask} if err := ociHookConfigTemplate.Execute(outContent, templateArgs); err != nil { return nil, err } return outContent.Bytes(), nil } // GetHugepagesSizeKilobytes retruns hugepages size in kilobytes func GetHugepagesSizeKilobytes(hugepagesSize performancev2.HugePageSize) (string, error) { switch hugepagesSize { case "1G": return "1048576", nil case "2M": return "2048", nil default: return "", fmt.Errorf("can not convert size %q to kilobytes", hugepagesSize) } } //GetHugepagesAllocationUnitOptions returns list of unit options based on the settings of the hugepage func GetHugepagesAllocationUnitOptions(hugepagesSize string, hugepagesCount int32, numaNode int32) []*unit.UnitOption { return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, fmt.Sprintf("Hugepages-%skB allocation on the node %d", hugepagesSize, numaNode)), // Before unit.NewUnitOption(systemdSectionUnit, systemdBefore, systemdServiceKubelet), // [Service] // Environment unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesCount, fmt.Sprint(hugepagesCount))), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesSize, hugepagesSize)), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentNUMANode, fmt.Sprint(numaNode))), // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // RemainAfterExit unit.NewUnitOption(systemdSectionService, systemdRemainAfterExit, systemdTrue), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, GetBashScriptPath(hugepagesAllocation)), // [Install] // WantedBy unit.NewUnitOption(systemdSectionInstall, systemdWantedBy, systemdTargetMultiUser), } } func getRPSUnitOptions(rpsMask string) []*unit.UnitOption { cmd := fmt.Sprintf("%s %%i %s", GetBashScriptPath(setRPSMask), rpsMask) return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, "Sets network devices RPS mask"), // [Service] // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, cmd), } } //AddContent appends more content to the ignition configuration func AddContent(ignitionConfig *igntypes.Config, content []byte, dst string, mode *int) { contentBase64 := base64.StdEncoding.EncodeToString(content) ignitionConfig.Storage.Files = append(ignitionConfig.Storage.Files, igntypes.File{ Node: igntypes.Node{ Path: dst, }, FileEmbedded1: igntypes.FileEmbedded1{ Contents: igntypes.Resource{ Source: pointer.StringPtr(fmt.Sprintf("%s,%s", defaultIgnitionContentSource, contentBase64)), }, Mode: mode, }, }) } func renderCrioConfigSnippet(profile *performancev2.PerformanceProfile, src string) ([]byte, error) { templateArgs := make(map[string]string) if profile.Spec.CPU.Reserved != nil { templateArgs[templateReservedCpus] = string(*profile.Spec.CPU.Reserved) } profileTemplate, err := template.ParseFS(assets.Configs, src) if err != nil { return nil, err } crioConfig := &bytes.Buffer{} if err := profileTemplate.Execute(crioConfig, templateArgs); err != nil { return nil, err } return crioConfig.Bytes(), nil }
{ return nil, err }
conditional_block
machineconfig.go
package machineconfig import ( "bytes" "encoding/base64" "encoding/json" "fmt" "io/ioutil" "path/filepath" "text/template" "github.com/openshift-kni/performance-addon-operators/build/assets" "github.com/coreos/go-systemd/unit" igntypes "github.com/coreos/ignition/v2/config/v3_2/types" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/pointer" performancev2 "github.com/openshift-kni/performance-addon-operators/api/v2" "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components" profilecomponent "github.com/openshift-kni/performance-addon-operators/pkg/controller/performanceprofile/components/profile" machineconfigv1 "github.com/openshift/machine-config-operator/pkg/apis/machineconfiguration.openshift.io/v1" ) const ( defaultIgnitionVersion = "3.2.0" defaultIgnitionContentSource = "data:text/plain;charset=utf-8;base64" ) const ( // MCKernelRT is the value of the kernel setting in MachineConfig for the RT kernel MCKernelRT = "realtime" // MCKernelDefault is the value of the kernel setting in MachineConfig for the default kernel MCKernelDefault = "default" // HighPerformanceRuntime contains the name of the high-performance runtime HighPerformanceRuntime = "high-performance" bashScriptsDir = "/usr/local/bin" crioConfd = "/etc/crio/crio.conf.d" crioRuntimesConfig = "99-runtimes.conf" // OCIHooksConfigDir is the default directory for the OCI hooks OCIHooksConfigDir = "/etc/containers/oci/hooks.d" // OCIHooksConfig file contains the low latency hooks configuration OCIHooksConfig = "99-low-latency-hooks.json" ociTemplateRPSMask = "RPSMask" udevRulesDir = "/etc/udev/rules.d" udevRpsRules = "99-netdev-rps.rules" // scripts hugepagesAllocation = "hugepages-allocation" ociHooks = "low-latency-hooks" setRPSMask = "set-rps-mask" ) const ( systemdSectionUnit = "Unit" systemdSectionService = "Service" systemdSectionInstall = "Install" systemdDescription = "Description" systemdBefore = "Before" systemdEnvironment = "Environment" systemdType = "Type" systemdRemainAfterExit = "RemainAfterExit" systemdExecStart = "ExecStart" systemdWantedBy = "WantedBy" ) const ( systemdServiceKubelet = "kubelet.service" systemdServiceTypeOneshot = "oneshot" systemdTargetMultiUser = "multi-user.target" systemdTrue = "true" ) const ( environmentHugepagesSize = "HUGEPAGES_SIZE" environmentHugepagesCount = "HUGEPAGES_COUNT" environmentNUMANode = "NUMA_NODE" ) const ( templateReservedCpus = "ReservedCpus" ) // New returns new machine configuration object for performance sensitive workloads func New(profile *performancev2.PerformanceProfile) (*machineconfigv1.MachineConfig, error) { name := GetMachineConfigName(profile) mc := &machineconfigv1.MachineConfig{ TypeMeta: metav1.TypeMeta{ APIVersion: machineconfigv1.GroupVersion.String(), Kind: "MachineConfig", }, ObjectMeta: metav1.ObjectMeta{ Name: name, Labels: profilecomponent.GetMachineConfigLabel(profile), }, Spec: machineconfigv1.MachineConfigSpec{}, } ignitionConfig, err := getIgnitionConfig(profile) if err != nil { return nil, err } rawIgnition, err := json.Marshal(ignitionConfig) if err != nil { return nil, err } mc.Spec.Config = runtime.RawExtension{Raw: rawIgnition} enableRTKernel := profile.Spec.RealTimeKernel != nil && profile.Spec.RealTimeKernel.Enabled != nil && *profile.Spec.RealTimeKernel.Enabled if enableRTKernel { mc.Spec.KernelType = MCKernelRT } else { mc.Spec.KernelType = MCKernelDefault } return mc, nil } // GetMachineConfigName generates machine config name from the performance profile func GetMachineConfigName(profile *performancev2.PerformanceProfile) string { name := components.GetComponentName(profile.Name, components.ComponentNamePrefix) return fmt.Sprintf("50-%s", name) } func getIgnitionConfig(profile *performancev2.PerformanceProfile) (*igntypes.Config, error) { ignitionConfig := &igntypes.Config{ Ignition: igntypes.Ignition{ Version: defaultIgnitionVersion, }, Storage: igntypes.Storage{ Files: []igntypes.File{}, }, } // add script files under the node /usr/local/bin directory mode := 0700 for _, script := range []string{hugepagesAllocation, ociHooks, setRPSMask} { dst := GetBashScriptPath(script) content, err := assets.Scripts.ReadFile(fmt.Sprintf("scripts/%s.sh", script)) if err != nil { return nil, err } AddContent(ignitionConfig, content, dst, &mode) } // add crio config snippet under the node /etc/crio/crio.conf.d/ directory crioConfdRuntimesMode := 0644 crioConfigSnippetContent, err := renderCrioConfigSnippet(profile, filepath.Join("configs", crioRuntimesConfig)) if err != nil { return nil, err } crioConfSnippetDst := filepath.Join(crioConfd, crioRuntimesConfig) AddContent(ignitionConfig, crioConfigSnippetContent, crioConfSnippetDst, &crioConfdRuntimesMode) // add crio hooks config under the node cri-o hook directory crioHooksConfigsMode := 0644 ociHooksConfigContent, err := GetOCIHooksConfigContent(OCIHooksConfig, profile) if err != nil { return nil, err } ociHookConfigDst := filepath.Join(OCIHooksConfigDir, OCIHooksConfig) AddContent(ignitionConfig, ociHooksConfigContent, ociHookConfigDst, &crioHooksConfigsMode) // add rps udev rule rpsRulesMode := 0644 rpsRulesContent, err := assets.Configs.ReadFile(filepath.Join("configs", udevRpsRules)) if err != nil { return nil, err } rpsRulesDst := filepath.Join(udevRulesDir, udevRpsRules) AddContent(ignitionConfig, rpsRulesContent, rpsRulesDst, &rpsRulesMode) if profile.Spec.HugePages != nil { for _, page := range profile.Spec.HugePages.Pages { // we already allocated non NUMA specific hugepages via kernel arguments if page.Node == nil { continue } hugepagesSize, err := GetHugepagesSizeKilobytes(page.Size) if err != nil { return nil, err } hugepagesService, err := GetSystemdContent(GetHugepagesAllocationUnitOptions( hugepagesSize, page.Count, *page.Node, )) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &hugepagesService, Enabled: pointer.BoolPtr(true), Name: GetSystemdService(fmt.Sprintf("%s-%skB-NUMA%d", hugepagesAllocation, hugepagesSize, *page.Node)), }) } } if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err := components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } rpsService, err := GetSystemdContent(getRPSUnitOptions(rpsMask)) if err != nil { return nil, err } ignitionConfig.Systemd.Units = append(ignitionConfig.Systemd.Units, igntypes.Unit{ Contents: &rpsService, Name: GetSystemdService("update-rps@"), }) } return ignitionConfig, nil } //GetBashScriptPath returns the script path containing teh directory and the script name func GetBashScriptPath(scriptName string) string { return fmt.Sprintf("%s/%s.sh", bashScriptsDir, scriptName) } func getSystemdEnvironment(key string, value string) string { return fmt.Sprintf("%s=%s", key, value) } //GetSystemdService returns the service name in systemd func GetSystemdService(serviceName string) string { return fmt.Sprintf("%s.service", serviceName) } //GetSystemdContent get systemd content from list of unit options func GetSystemdContent(options []*unit.UnitOption) (string, error) { outReader := unit.Serialize(options) outBytes, err := ioutil.ReadAll(outReader) if err != nil { return "", err } return string(outBytes), nil } // GetOCIHooksConfigContent reads and returns the content of the OCI hook file func GetOCIHooksConfigContent(configFile string, profile *performancev2.PerformanceProfile) ([]byte, error) { ociHookConfigTemplate, err := template.ParseFS(assets.Configs, filepath.Join("configs", configFile)) if err != nil { return nil, err } rpsMask := "0" // RPS disabled if profile.Spec.CPU != nil && profile.Spec.CPU.Reserved != nil { rpsMask, err = components.CPUListToMaskList(string(*profile.Spec.CPU.Reserved)) if err != nil { return nil, err } } outContent := &bytes.Buffer{} templateArgs := map[string]string{ociTemplateRPSMask: rpsMask} if err := ociHookConfigTemplate.Execute(outContent, templateArgs); err != nil { return nil, err } return outContent.Bytes(), nil } // GetHugepagesSizeKilobytes retruns hugepages size in kilobytes func GetHugepagesSizeKilobytes(hugepagesSize performancev2.HugePageSize) (string, error) { switch hugepagesSize { case "1G": return "1048576", nil case "2M": return "2048", nil default: return "", fmt.Errorf("can not convert size %q to kilobytes", hugepagesSize) } } //GetHugepagesAllocationUnitOptions returns list of unit options based on the settings of the hugepage func GetHugepagesAllocationUnitOptions(hugepagesSize string, hugepagesCount int32, numaNode int32) []*unit.UnitOption { return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, fmt.Sprintf("Hugepages-%skB allocation on the node %d", hugepagesSize, numaNode)), // Before unit.NewUnitOption(systemdSectionUnit, systemdBefore, systemdServiceKubelet), // [Service] // Environment unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesCount, fmt.Sprint(hugepagesCount))), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentHugepagesSize, hugepagesSize)), unit.NewUnitOption(systemdSectionService, systemdEnvironment, getSystemdEnvironment(environmentNUMANode, fmt.Sprint(numaNode))), // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // RemainAfterExit unit.NewUnitOption(systemdSectionService, systemdRemainAfterExit, systemdTrue), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, GetBashScriptPath(hugepagesAllocation)), // [Install] // WantedBy unit.NewUnitOption(systemdSectionInstall, systemdWantedBy, systemdTargetMultiUser), } } func getRPSUnitOptions(rpsMask string) []*unit.UnitOption { cmd := fmt.Sprintf("%s %%i %s", GetBashScriptPath(setRPSMask), rpsMask) return []*unit.UnitOption{ // [Unit] // Description unit.NewUnitOption(systemdSectionUnit, systemdDescription, "Sets network devices RPS mask"), // [Service] // Type unit.NewUnitOption(systemdSectionService, systemdType, systemdServiceTypeOneshot), // ExecStart unit.NewUnitOption(systemdSectionService, systemdExecStart, cmd), } } //AddContent appends more content to the ignition configuration func AddContent(ignitionConfig *igntypes.Config, content []byte, dst string, mode *int) { contentBase64 := base64.StdEncoding.EncodeToString(content) ignitionConfig.Storage.Files = append(ignitionConfig.Storage.Files, igntypes.File{ Node: igntypes.Node{ Path: dst, }, FileEmbedded1: igntypes.FileEmbedded1{ Contents: igntypes.Resource{ Source: pointer.StringPtr(fmt.Sprintf("%s,%s", defaultIgnitionContentSource, contentBase64)), }, Mode: mode, }, }) } func renderCrioConfigSnippet(profile *performancev2.PerformanceProfile, src string) ([]byte, error) { templateArgs := make(map[string]string) if profile.Spec.CPU.Reserved != nil { templateArgs[templateReservedCpus] = string(*profile.Spec.CPU.Reserved) } profileTemplate, err := template.ParseFS(assets.Configs, src) if err != nil { return nil, err
crioConfig := &bytes.Buffer{} if err := profileTemplate.Execute(crioConfig, templateArgs); err != nil { return nil, err } return crioConfig.Bytes(), nil }
}
random_line_split
main.go
package fuzz import ( "context" "errors" "fmt" "log" "net/http" "net/url" "os" "path/filepath" "regexp" "strconv" "sync" "time" "github.com/RedTeamPentesting/monsoon/cli" "github.com/RedTeamPentesting/monsoon/producer" "github.com/RedTeamPentesting/monsoon/recorder" "github.com/RedTeamPentesting/monsoon/reporter" "github.com/RedTeamPentesting/monsoon/request" "github.com/RedTeamPentesting/monsoon/response" "github.com/RedTeamPentesting/monsoon/shell" "github.com/fd0/termstatus" "github.com/spf13/cobra" "golang.org/x/sync/errgroup" ) // Options collect options for a run. type Options struct { Range []string RangeFormat string Filename string Logfile string Logdir string Threads int RequestsPerSecond float64 BufferSize int Skip int Limit int Request *request.Request // the template for the HTTP request FollowRedirect int HideStatusCodes []string ShowStatusCodes []string HideHeaderSize []string HideBodySize []string HidePattern []string hidePattern []*regexp.Regexp ShowPattern []string showPattern []*regexp.Regexp Extract []string extract []*regexp.Regexp ExtractPipe []string extractPipe [][]string MaxBodySize int } var opts Options func compileRegexps(pattern []string) (res []*regexp.Regexp, err error) { for _, pat := range pattern { r, err := regexp.Compile(pat) if err != nil { return nil, fmt.Errorf("regexp %q failed to compile: %v", pat, err) } res = append(res, r) } return res, nil } func splitShell(cmds []string) ([][]string, error) { var data [][]string for _, cmd := range cmds { args, err := shell.Split(cmd) if err != nil { return nil, err } if len(args) < 1 { return nil, fmt.Errorf("invalid command: %q", cmd) } data = append(data, args) } return data, nil } // valid validates the options and returns an error if something is invalid. func (opts *Options) valid() (err error) { if opts.Threads <= 0 { return errors.New("invalid number of threads") } if len(opts.Range) > 0 && opts.Filename != "" { return errors.New("only one source allowed but both range and filename specified") } if len(opts.Range) == 0 && opts.Filename == "" { return errors.New("neither file nor range specified, nothing to do") } opts.extract, err = compileRegexps(opts.Extract) if err != nil { return err } opts.extractPipe, err = splitShell(opts.ExtractPipe) if err != nil { return err } opts.hidePattern, err = compileRegexps(opts.HidePattern) if err != nil { return err } opts.showPattern, err = compileRegexps(opts.ShowPattern) if err != nil { return err } return nil } var cmd = &cobra.Command{ Use: "fuzz [options] URL", DisableFlagsInUseLine: true, Short: helpShort, Long: helpLong, Example: helpExamples, RunE: func(cmd *cobra.Command, args []string) error { return cli.WithContext(func(ctx context.Context, g *errgroup.Group) error { return run(ctx, g, &opts, args) }) }, } // AddCommand adds the 'run' command to cmd. func AddCommand(c *cobra.Command) { c.AddCommand(cmd) fs := cmd.Flags() fs.SortFlags = false fs.StringSliceVarP(&opts.Range, "range", "r", nil, "set range `from-to`") fs.StringVar(&opts.RangeFormat, "range-format", "%d", "set `format` for range") fs.StringVarP(&opts.Filename, "file", "f", "", "read values from `filename`") fs.StringVar(&opts.Logfile, "logfile", "", "write copy of printed messages to `filename`.log") fs.StringVar(&opts.Logdir, "logdir", os.Getenv("MONSOON_LOG_DIR"), "automatically log all output to files in `dir`") fs.IntVarP(&opts.Threads, "threads", "t", 5, "make as many as `n` parallel requests") fs.IntVar(&opts.BufferSize, "buffer-size", 100000, "set number of buffered items to `n`") fs.IntVar(&opts.Skip, "skip", 0, "skip the first `n` requests") fs.IntVar(&opts.Limit, "limit", 0, "only run `n` requests, then exit") fs.Float64Var(&opts.RequestsPerSecond, "requests-per-second", 0, "do at most `n` requests per second (e.g. 0.5)") // add all options to define a request opts.Request = request.New("") request.AddFlags(opts.Request, fs) fs.IntVar(&opts.FollowRedirect, "follow-redirect", 0, "follow `n` redirects") fs.StringSliceVar(&opts.HideStatusCodes, "hide-status", nil, "hide responses with this status `code,[code-code],[-code],[...]`") fs.StringSliceVar(&opts.ShowStatusCodes, "show-status", nil, "show only responses with this status `code,[code-code],[code-],[...]`") fs.StringSliceVar(&opts.HideHeaderSize, "hide-header-size", nil, "hide responses with this header size (`size,from-to,from-,-to`)") fs.StringSliceVar(&opts.HideBodySize, "hide-body-size", nil, "hide responses with this body size (`size,from-to,from-,-to`)") fs.StringArrayVar(&opts.HidePattern, "hide-pattern", nil, "hide responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.ShowPattern, "show-pattern", nil, "show only responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.Extract, "extract", nil, "extract `regex` from response body (can be specified multiple times)") fs.StringArrayVar(&opts.ExtractPipe, "extract-pipe", nil, "pipe response body to `cmd` to extract data (can be specified multiple times)") fs.IntVar(&opts.MaxBodySize, "max-body-size", 5, "read at most `n` MiB from a returned response body (used for extracting data from the body)") } // logfilePath returns the prefix for the logfiles, if any. func logfilePath(opts *Options, inputURL string) (prefix string, err error) { if opts.Logdir != "" && opts.Logfile == "" { url, err := url.Parse(inputURL) if err != nil { return "", err } ts := time.Now().Format("20060102_150405") fn := fmt.Sprintf("monsoon_%s_%s", url.Host, ts) p := filepath.Join(opts.Logdir, fn) return p, nil } return opts.Logfile, nil } func setupProducer(ctx context.Context, g *errgroup.Group, opts *Options, ch chan<- string, count chan<- int) error { switch { case len(opts.Range) > 0: var ranges []producer.Range for _, r := range opts.Range { rng, err := producer.ParseRange(r) if err != nil { return err } ranges = append(ranges, rng) } g.Go(func() error { return producer.Ranges(ctx, ranges, opts.RangeFormat, ch, count) }) return nil case opts.Filename == "-": g.Go(func() error { return producer.Reader(ctx, os.Stdin, ch, count) }) return nil case opts.Filename != "": file, err := os.Open(opts.Filename) if err != nil { return err } g.Go(func() error { return producer.Reader(ctx, file, ch, count) }) return nil default: return errors.New("neither file nor range specified, nothing to do") } } func setupTerminal(ctx context.Context, g *errgroup.Group, maxFrameRate uint, logfilePrefix string) (term cli.Terminal, cleanup func(), err error) { ctx, cancel := context.WithCancel(context.Background()) statusTerm := termstatus.New(os.Stdout, os.Stderr, false) if maxFrameRate != 0 { statusTerm.MaxFrameRate = maxFrameRate } term = statusTerm if logfilePrefix != "" { fmt.Printf(reporter.Bold("Logfile:")+" %s.log\n", logfilePrefix) logfile, err := os.Create(logfilePrefix + ".log") if err != nil { return nil, cancel, err } fmt.Fprintln(logfile, shell.Join(os.Args)) // write copies of messages to logfile term = &cli.LogTerminal{ Terminal: statusTerm, Writer: logfile, } } // make sure error messages logged via the log package are printed nicely w := cli.NewStdioWrapper(term) log.SetOutput(w.Stderr()) g.Go(func() error { term.Run(ctx) return nil }) return term, cancel, nil } func setupResponseFilters(opts *Options) ([]response.Filter, error) { var filters []response.Filter filter, err := response.NewFilterStatusCode(opts.HideStatusCodes, opts.ShowStatusCodes) if err != nil { return nil, err } filters = append(filters, filter) if len(opts.HideHeaderSize) > 0 || len(opts.HideBodySize) > 0 { f, err := response.NewFilterSize(opts.HideHeaderSize, opts.HideBodySize) if err != nil { return nil, err } filters = append(filters, f) } if len(opts.hidePattern) > 0 { filters = append(filters, response.FilterRejectPattern{Pattern: opts.hidePattern}) } if len(opts.showPattern) > 0 { filters = append(filters, response.FilterAcceptPattern{Pattern: opts.showPattern}) } return filters, nil } func setupValueFilters(ctx context.Context, opts *Options, valueCh <-chan string, countCh <-chan int) (<-chan string, <-chan int) { if opts.Skip > 0 { f := &producer.FilterSkip{Skip: opts.Skip} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } if opts.Limit > 0 { f := &producer.FilterLimit{Max: opts.Limit} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } return valueCh, countCh } func startRunners(ctx context.Context, opts *Options, in <-chan string) (<-chan response.Response, error) { out := make(chan response.Response) var wg sync.WaitGroup transport, err := response.NewTransport(opts.Request.Insecure, opts.Request.TLSClientKeyCertFile, opts.Request.DisableHTTP2, opts.Threads) if err != nil { return nil, err } for i := 0; i < opts.Threads; i++ { runner := response.NewRunner(transport, opts.Request, in, out) runner.MaxBodySize = opts.MaxBodySize * 1024 * 1024 runner.Extract = opts.extract runner.Client.CheckRedirect = func(req *http.Request, via []*http.Request) error { if len(via) <= opts.FollowRedirect { return nil } return http.ErrUseLastResponse } wg.Add(1) go func() { runner.Run(ctx) wg.Done() }() } go func() { // wait until the runners are done, then close the output channel wg.Wait() close(out) }() return out, nil } func run(ctx context.Context, g *errgroup.Group, opts *Options, args []string) error
{ // make sure the options and arguments are valid if len(args) == 0 { return errors.New("last argument needs to be the URL") } if len(args) > 1 { return errors.New("more than one target URL specified") } err := opts.valid() if err != nil { return err } inputURL := args[0] opts.Request.URL = inputURL // setup logging and the terminal logfilePrefix, err := logfilePath(opts, inputURL) if err != nil { return err } var maxFrameRate uint if s, ok := os.LookupEnv("MONSOON_PROGRESS_FPS"); ok { rate, err := strconv.ParseUint(s, 10, 32) if err != nil { return fmt.Errorf("parse $MONSOON_PROGRESS_FPS: %w", err) } maxFrameRate = uint(rate) } term, cleanup, err := setupTerminal(ctx, g, maxFrameRate, logfilePrefix) defer cleanup() if err != nil { return err } // collect the filters for the responses responseFilters, err := setupResponseFilters(opts) if err != nil { return err } // setup the pipeline for the values vch := make(chan string, opts.BufferSize) var valueCh <-chan string = vch cch := make(chan int, 1) var countCh <-chan int = cch // start a producer from the options err = setupProducer(ctx, g, opts, vch, cch) if err != nil { return err } // filter values (skip, limit) valueCh, countCh = setupValueFilters(ctx, opts, valueCh, countCh) // limit the throughput (if requested) if opts.RequestsPerSecond > 0 { valueCh = producer.Limit(ctx, opts.RequestsPerSecond, valueCh) } // start the runners responseCh, err := startRunners(ctx, opts, valueCh) if err != nil { return err } // filter the responses responseCh = response.Mark(responseCh, responseFilters) // extract data from all interesting (non-hidden) responses extracter := &response.Extracter{ Pattern: opts.extract, Commands: opts.extractPipe, Error: func(err error) { term.Printf("%v", err) }, } responseCh = extracter.Run(responseCh) if logfilePrefix != "" { rec, err := recorder.New(logfilePrefix+".json", opts.Request) if err != nil { return err } // fill in information for generating the request rec.Data.InputFile = opts.Filename rec.Data.Ranges = opts.Range rec.Data.RangeFormat = opts.RangeFormat rec.Data.Extract = opts.Extract rec.Data.ExtractPipe = opts.ExtractPipe out := make(chan response.Response) in := responseCh responseCh = out outCount := make(chan int) inCount := countCh countCh = outCount g.Go(func() error { return rec.Run(ctx, in, out, inCount, outCount) }) } // run the reporter term.Printf(reporter.Bold("Target URL:")+" %v\n\n", inputURL) reporter := reporter.New(term) return reporter.Display(responseCh, countCh) }
identifier_body
main.go
package fuzz import ( "context" "errors" "fmt" "log" "net/http" "net/url" "os" "path/filepath" "regexp" "strconv" "sync" "time" "github.com/RedTeamPentesting/monsoon/cli" "github.com/RedTeamPentesting/monsoon/producer" "github.com/RedTeamPentesting/monsoon/recorder" "github.com/RedTeamPentesting/monsoon/reporter" "github.com/RedTeamPentesting/monsoon/request" "github.com/RedTeamPentesting/monsoon/response" "github.com/RedTeamPentesting/monsoon/shell" "github.com/fd0/termstatus" "github.com/spf13/cobra" "golang.org/x/sync/errgroup" ) // Options collect options for a run. type Options struct { Range []string RangeFormat string Filename string Logfile string Logdir string Threads int RequestsPerSecond float64 BufferSize int Skip int Limit int Request *request.Request // the template for the HTTP request FollowRedirect int HideStatusCodes []string ShowStatusCodes []string HideHeaderSize []string HideBodySize []string HidePattern []string hidePattern []*regexp.Regexp ShowPattern []string showPattern []*regexp.Regexp Extract []string extract []*regexp.Regexp ExtractPipe []string extractPipe [][]string MaxBodySize int } var opts Options func compileRegexps(pattern []string) (res []*regexp.Regexp, err error) { for _, pat := range pattern { r, err := regexp.Compile(pat) if err != nil { return nil, fmt.Errorf("regexp %q failed to compile: %v", pat, err) } res = append(res, r) } return res, nil } func splitShell(cmds []string) ([][]string, error) { var data [][]string for _, cmd := range cmds { args, err := shell.Split(cmd) if err != nil { return nil, err } if len(args) < 1 { return nil, fmt.Errorf("invalid command: %q", cmd) } data = append(data, args) } return data, nil } // valid validates the options and returns an error if something is invalid. func (opts *Options) valid() (err error) { if opts.Threads <= 0 { return errors.New("invalid number of threads") } if len(opts.Range) > 0 && opts.Filename != "" { return errors.New("only one source allowed but both range and filename specified") } if len(opts.Range) == 0 && opts.Filename == "" { return errors.New("neither file nor range specified, nothing to do") } opts.extract, err = compileRegexps(opts.Extract) if err != nil { return err } opts.extractPipe, err = splitShell(opts.ExtractPipe) if err != nil { return err } opts.hidePattern, err = compileRegexps(opts.HidePattern) if err != nil { return err } opts.showPattern, err = compileRegexps(opts.ShowPattern) if err != nil { return err } return nil } var cmd = &cobra.Command{ Use: "fuzz [options] URL", DisableFlagsInUseLine: true, Short: helpShort, Long: helpLong, Example: helpExamples, RunE: func(cmd *cobra.Command, args []string) error { return cli.WithContext(func(ctx context.Context, g *errgroup.Group) error { return run(ctx, g, &opts, args) }) }, } // AddCommand adds the 'run' command to cmd. func AddCommand(c *cobra.Command) { c.AddCommand(cmd) fs := cmd.Flags() fs.SortFlags = false fs.StringSliceVarP(&opts.Range, "range", "r", nil, "set range `from-to`") fs.StringVar(&opts.RangeFormat, "range-format", "%d", "set `format` for range") fs.StringVarP(&opts.Filename, "file", "f", "", "read values from `filename`") fs.StringVar(&opts.Logfile, "logfile", "", "write copy of printed messages to `filename`.log") fs.StringVar(&opts.Logdir, "logdir", os.Getenv("MONSOON_LOG_DIR"), "automatically log all output to files in `dir`") fs.IntVarP(&opts.Threads, "threads", "t", 5, "make as many as `n` parallel requests") fs.IntVar(&opts.BufferSize, "buffer-size", 100000, "set number of buffered items to `n`") fs.IntVar(&opts.Skip, "skip", 0, "skip the first `n` requests") fs.IntVar(&opts.Limit, "limit", 0, "only run `n` requests, then exit") fs.Float64Var(&opts.RequestsPerSecond, "requests-per-second", 0, "do at most `n` requests per second (e.g. 0.5)") // add all options to define a request opts.Request = request.New("") request.AddFlags(opts.Request, fs) fs.IntVar(&opts.FollowRedirect, "follow-redirect", 0, "follow `n` redirects") fs.StringSliceVar(&opts.HideStatusCodes, "hide-status", nil, "hide responses with this status `code,[code-code],[-code],[...]`") fs.StringSliceVar(&opts.ShowStatusCodes, "show-status", nil, "show only responses with this status `code,[code-code],[code-],[...]`") fs.StringSliceVar(&opts.HideHeaderSize, "hide-header-size", nil, "hide responses with this header size (`size,from-to,from-,-to`)") fs.StringSliceVar(&opts.HideBodySize, "hide-body-size", nil, "hide responses with this body size (`size,from-to,from-,-to`)") fs.StringArrayVar(&opts.HidePattern, "hide-pattern", nil, "hide responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.ShowPattern, "show-pattern", nil, "show only responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.Extract, "extract", nil, "extract `regex` from response body (can be specified multiple times)") fs.StringArrayVar(&opts.ExtractPipe, "extract-pipe", nil, "pipe response body to `cmd` to extract data (can be specified multiple times)") fs.IntVar(&opts.MaxBodySize, "max-body-size", 5, "read at most `n` MiB from a returned response body (used for extracting data from the body)") } // logfilePath returns the prefix for the logfiles, if any. func logfilePath(opts *Options, inputURL string) (prefix string, err error) { if opts.Logdir != "" && opts.Logfile == "" { url, err := url.Parse(inputURL) if err != nil { return "", err } ts := time.Now().Format("20060102_150405") fn := fmt.Sprintf("monsoon_%s_%s", url.Host, ts) p := filepath.Join(opts.Logdir, fn) return p, nil } return opts.Logfile, nil } func setupProducer(ctx context.Context, g *errgroup.Group, opts *Options, ch chan<- string, count chan<- int) error { switch { case len(opts.Range) > 0: var ranges []producer.Range for _, r := range opts.Range { rng, err := producer.ParseRange(r) if err != nil { return err } ranges = append(ranges, rng) } g.Go(func() error { return producer.Ranges(ctx, ranges, opts.RangeFormat, ch, count) }) return nil case opts.Filename == "-": g.Go(func() error { return producer.Reader(ctx, os.Stdin, ch, count) }) return nil case opts.Filename != "": file, err := os.Open(opts.Filename) if err != nil { return err } g.Go(func() error { return producer.Reader(ctx, file, ch, count) }) return nil default: return errors.New("neither file nor range specified, nothing to do") } } func setupTerminal(ctx context.Context, g *errgroup.Group, maxFrameRate uint, logfilePrefix string) (term cli.Terminal, cleanup func(), err error) { ctx, cancel := context.WithCancel(context.Background()) statusTerm := termstatus.New(os.Stdout, os.Stderr, false) if maxFrameRate != 0 { statusTerm.MaxFrameRate = maxFrameRate } term = statusTerm if logfilePrefix != "" { fmt.Printf(reporter.Bold("Logfile:")+" %s.log\n", logfilePrefix) logfile, err := os.Create(logfilePrefix + ".log") if err != nil { return nil, cancel, err } fmt.Fprintln(logfile, shell.Join(os.Args))
} } // make sure error messages logged via the log package are printed nicely w := cli.NewStdioWrapper(term) log.SetOutput(w.Stderr()) g.Go(func() error { term.Run(ctx) return nil }) return term, cancel, nil } func setupResponseFilters(opts *Options) ([]response.Filter, error) { var filters []response.Filter filter, err := response.NewFilterStatusCode(opts.HideStatusCodes, opts.ShowStatusCodes) if err != nil { return nil, err } filters = append(filters, filter) if len(opts.HideHeaderSize) > 0 || len(opts.HideBodySize) > 0 { f, err := response.NewFilterSize(opts.HideHeaderSize, opts.HideBodySize) if err != nil { return nil, err } filters = append(filters, f) } if len(opts.hidePattern) > 0 { filters = append(filters, response.FilterRejectPattern{Pattern: opts.hidePattern}) } if len(opts.showPattern) > 0 { filters = append(filters, response.FilterAcceptPattern{Pattern: opts.showPattern}) } return filters, nil } func setupValueFilters(ctx context.Context, opts *Options, valueCh <-chan string, countCh <-chan int) (<-chan string, <-chan int) { if opts.Skip > 0 { f := &producer.FilterSkip{Skip: opts.Skip} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } if opts.Limit > 0 { f := &producer.FilterLimit{Max: opts.Limit} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } return valueCh, countCh } func startRunners(ctx context.Context, opts *Options, in <-chan string) (<-chan response.Response, error) { out := make(chan response.Response) var wg sync.WaitGroup transport, err := response.NewTransport(opts.Request.Insecure, opts.Request.TLSClientKeyCertFile, opts.Request.DisableHTTP2, opts.Threads) if err != nil { return nil, err } for i := 0; i < opts.Threads; i++ { runner := response.NewRunner(transport, opts.Request, in, out) runner.MaxBodySize = opts.MaxBodySize * 1024 * 1024 runner.Extract = opts.extract runner.Client.CheckRedirect = func(req *http.Request, via []*http.Request) error { if len(via) <= opts.FollowRedirect { return nil } return http.ErrUseLastResponse } wg.Add(1) go func() { runner.Run(ctx) wg.Done() }() } go func() { // wait until the runners are done, then close the output channel wg.Wait() close(out) }() return out, nil } func run(ctx context.Context, g *errgroup.Group, opts *Options, args []string) error { // make sure the options and arguments are valid if len(args) == 0 { return errors.New("last argument needs to be the URL") } if len(args) > 1 { return errors.New("more than one target URL specified") } err := opts.valid() if err != nil { return err } inputURL := args[0] opts.Request.URL = inputURL // setup logging and the terminal logfilePrefix, err := logfilePath(opts, inputURL) if err != nil { return err } var maxFrameRate uint if s, ok := os.LookupEnv("MONSOON_PROGRESS_FPS"); ok { rate, err := strconv.ParseUint(s, 10, 32) if err != nil { return fmt.Errorf("parse $MONSOON_PROGRESS_FPS: %w", err) } maxFrameRate = uint(rate) } term, cleanup, err := setupTerminal(ctx, g, maxFrameRate, logfilePrefix) defer cleanup() if err != nil { return err } // collect the filters for the responses responseFilters, err := setupResponseFilters(opts) if err != nil { return err } // setup the pipeline for the values vch := make(chan string, opts.BufferSize) var valueCh <-chan string = vch cch := make(chan int, 1) var countCh <-chan int = cch // start a producer from the options err = setupProducer(ctx, g, opts, vch, cch) if err != nil { return err } // filter values (skip, limit) valueCh, countCh = setupValueFilters(ctx, opts, valueCh, countCh) // limit the throughput (if requested) if opts.RequestsPerSecond > 0 { valueCh = producer.Limit(ctx, opts.RequestsPerSecond, valueCh) } // start the runners responseCh, err := startRunners(ctx, opts, valueCh) if err != nil { return err } // filter the responses responseCh = response.Mark(responseCh, responseFilters) // extract data from all interesting (non-hidden) responses extracter := &response.Extracter{ Pattern: opts.extract, Commands: opts.extractPipe, Error: func(err error) { term.Printf("%v", err) }, } responseCh = extracter.Run(responseCh) if logfilePrefix != "" { rec, err := recorder.New(logfilePrefix+".json", opts.Request) if err != nil { return err } // fill in information for generating the request rec.Data.InputFile = opts.Filename rec.Data.Ranges = opts.Range rec.Data.RangeFormat = opts.RangeFormat rec.Data.Extract = opts.Extract rec.Data.ExtractPipe = opts.ExtractPipe out := make(chan response.Response) in := responseCh responseCh = out outCount := make(chan int) inCount := countCh countCh = outCount g.Go(func() error { return rec.Run(ctx, in, out, inCount, outCount) }) } // run the reporter term.Printf(reporter.Bold("Target URL:")+" %v\n\n", inputURL) reporter := reporter.New(term) return reporter.Display(responseCh, countCh) }
// write copies of messages to logfile term = &cli.LogTerminal{ Terminal: statusTerm, Writer: logfile,
random_line_split
main.go
package fuzz import ( "context" "errors" "fmt" "log" "net/http" "net/url" "os" "path/filepath" "regexp" "strconv" "sync" "time" "github.com/RedTeamPentesting/monsoon/cli" "github.com/RedTeamPentesting/monsoon/producer" "github.com/RedTeamPentesting/monsoon/recorder" "github.com/RedTeamPentesting/monsoon/reporter" "github.com/RedTeamPentesting/monsoon/request" "github.com/RedTeamPentesting/monsoon/response" "github.com/RedTeamPentesting/monsoon/shell" "github.com/fd0/termstatus" "github.com/spf13/cobra" "golang.org/x/sync/errgroup" ) // Options collect options for a run. type Options struct { Range []string RangeFormat string Filename string Logfile string Logdir string Threads int RequestsPerSecond float64 BufferSize int Skip int Limit int Request *request.Request // the template for the HTTP request FollowRedirect int HideStatusCodes []string ShowStatusCodes []string HideHeaderSize []string HideBodySize []string HidePattern []string hidePattern []*regexp.Regexp ShowPattern []string showPattern []*regexp.Regexp Extract []string extract []*regexp.Regexp ExtractPipe []string extractPipe [][]string MaxBodySize int } var opts Options func compileRegexps(pattern []string) (res []*regexp.Regexp, err error) { for _, pat := range pattern { r, err := regexp.Compile(pat) if err != nil { return nil, fmt.Errorf("regexp %q failed to compile: %v", pat, err) } res = append(res, r) } return res, nil } func splitShell(cmds []string) ([][]string, error) { var data [][]string for _, cmd := range cmds { args, err := shell.Split(cmd) if err != nil { return nil, err } if len(args) < 1 { return nil, fmt.Errorf("invalid command: %q", cmd) } data = append(data, args) } return data, nil } // valid validates the options and returns an error if something is invalid. func (opts *Options) valid() (err error) { if opts.Threads <= 0 { return errors.New("invalid number of threads") } if len(opts.Range) > 0 && opts.Filename != "" { return errors.New("only one source allowed but both range and filename specified") } if len(opts.Range) == 0 && opts.Filename == "" { return errors.New("neither file nor range specified, nothing to do") } opts.extract, err = compileRegexps(opts.Extract) if err != nil { return err } opts.extractPipe, err = splitShell(opts.ExtractPipe) if err != nil { return err } opts.hidePattern, err = compileRegexps(opts.HidePattern) if err != nil { return err } opts.showPattern, err = compileRegexps(opts.ShowPattern) if err != nil { return err } return nil } var cmd = &cobra.Command{ Use: "fuzz [options] URL", DisableFlagsInUseLine: true, Short: helpShort, Long: helpLong, Example: helpExamples, RunE: func(cmd *cobra.Command, args []string) error { return cli.WithContext(func(ctx context.Context, g *errgroup.Group) error { return run(ctx, g, &opts, args) }) }, } // AddCommand adds the 'run' command to cmd. func AddCommand(c *cobra.Command) { c.AddCommand(cmd) fs := cmd.Flags() fs.SortFlags = false fs.StringSliceVarP(&opts.Range, "range", "r", nil, "set range `from-to`") fs.StringVar(&opts.RangeFormat, "range-format", "%d", "set `format` for range") fs.StringVarP(&opts.Filename, "file", "f", "", "read values from `filename`") fs.StringVar(&opts.Logfile, "logfile", "", "write copy of printed messages to `filename`.log") fs.StringVar(&opts.Logdir, "logdir", os.Getenv("MONSOON_LOG_DIR"), "automatically log all output to files in `dir`") fs.IntVarP(&opts.Threads, "threads", "t", 5, "make as many as `n` parallel requests") fs.IntVar(&opts.BufferSize, "buffer-size", 100000, "set number of buffered items to `n`") fs.IntVar(&opts.Skip, "skip", 0, "skip the first `n` requests") fs.IntVar(&opts.Limit, "limit", 0, "only run `n` requests, then exit") fs.Float64Var(&opts.RequestsPerSecond, "requests-per-second", 0, "do at most `n` requests per second (e.g. 0.5)") // add all options to define a request opts.Request = request.New("") request.AddFlags(opts.Request, fs) fs.IntVar(&opts.FollowRedirect, "follow-redirect", 0, "follow `n` redirects") fs.StringSliceVar(&opts.HideStatusCodes, "hide-status", nil, "hide responses with this status `code,[code-code],[-code],[...]`") fs.StringSliceVar(&opts.ShowStatusCodes, "show-status", nil, "show only responses with this status `code,[code-code],[code-],[...]`") fs.StringSliceVar(&opts.HideHeaderSize, "hide-header-size", nil, "hide responses with this header size (`size,from-to,from-,-to`)") fs.StringSliceVar(&opts.HideBodySize, "hide-body-size", nil, "hide responses with this body size (`size,from-to,from-,-to`)") fs.StringArrayVar(&opts.HidePattern, "hide-pattern", nil, "hide responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.ShowPattern, "show-pattern", nil, "show only responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.Extract, "extract", nil, "extract `regex` from response body (can be specified multiple times)") fs.StringArrayVar(&opts.ExtractPipe, "extract-pipe", nil, "pipe response body to `cmd` to extract data (can be specified multiple times)") fs.IntVar(&opts.MaxBodySize, "max-body-size", 5, "read at most `n` MiB from a returned response body (used for extracting data from the body)") } // logfilePath returns the prefix for the logfiles, if any. func logfilePath(opts *Options, inputURL string) (prefix string, err error) { if opts.Logdir != "" && opts.Logfile == "" { url, err := url.Parse(inputURL) if err != nil { return "", err } ts := time.Now().Format("20060102_150405") fn := fmt.Sprintf("monsoon_%s_%s", url.Host, ts) p := filepath.Join(opts.Logdir, fn) return p, nil } return opts.Logfile, nil } func setupProducer(ctx context.Context, g *errgroup.Group, opts *Options, ch chan<- string, count chan<- int) error { switch { case len(opts.Range) > 0: var ranges []producer.Range for _, r := range opts.Range { rng, err := producer.ParseRange(r) if err != nil { return err } ranges = append(ranges, rng) } g.Go(func() error { return producer.Ranges(ctx, ranges, opts.RangeFormat, ch, count) }) return nil case opts.Filename == "-": g.Go(func() error { return producer.Reader(ctx, os.Stdin, ch, count) }) return nil case opts.Filename != "": file, err := os.Open(opts.Filename) if err != nil { return err } g.Go(func() error { return producer.Reader(ctx, file, ch, count) }) return nil default: return errors.New("neither file nor range specified, nothing to do") } } func setupTerminal(ctx context.Context, g *errgroup.Group, maxFrameRate uint, logfilePrefix string) (term cli.Terminal, cleanup func(), err error) { ctx, cancel := context.WithCancel(context.Background()) statusTerm := termstatus.New(os.Stdout, os.Stderr, false) if maxFrameRate != 0 { statusTerm.MaxFrameRate = maxFrameRate } term = statusTerm if logfilePrefix != "" { fmt.Printf(reporter.Bold("Logfile:")+" %s.log\n", logfilePrefix) logfile, err := os.Create(logfilePrefix + ".log") if err != nil { return nil, cancel, err } fmt.Fprintln(logfile, shell.Join(os.Args)) // write copies of messages to logfile term = &cli.LogTerminal{ Terminal: statusTerm, Writer: logfile, } } // make sure error messages logged via the log package are printed nicely w := cli.NewStdioWrapper(term) log.SetOutput(w.Stderr()) g.Go(func() error { term.Run(ctx) return nil }) return term, cancel, nil } func setupResponseFilters(opts *Options) ([]response.Filter, error) { var filters []response.Filter filter, err := response.NewFilterStatusCode(opts.HideStatusCodes, opts.ShowStatusCodes) if err != nil { return nil, err } filters = append(filters, filter) if len(opts.HideHeaderSize) > 0 || len(opts.HideBodySize) > 0 { f, err := response.NewFilterSize(opts.HideHeaderSize, opts.HideBodySize) if err != nil { return nil, err } filters = append(filters, f) } if len(opts.hidePattern) > 0 { filters = append(filters, response.FilterRejectPattern{Pattern: opts.hidePattern}) } if len(opts.showPattern) > 0 { filters = append(filters, response.FilterAcceptPattern{Pattern: opts.showPattern}) } return filters, nil } func setupValueFilters(ctx context.Context, opts *Options, valueCh <-chan string, countCh <-chan int) (<-chan string, <-chan int) { if opts.Skip > 0 { f := &producer.FilterSkip{Skip: opts.Skip} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } if opts.Limit > 0 { f := &producer.FilterLimit{Max: opts.Limit} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } return valueCh, countCh } func startRunners(ctx context.Context, opts *Options, in <-chan string) (<-chan response.Response, error) { out := make(chan response.Response) var wg sync.WaitGroup transport, err := response.NewTransport(opts.Request.Insecure, opts.Request.TLSClientKeyCertFile, opts.Request.DisableHTTP2, opts.Threads) if err != nil { return nil, err } for i := 0; i < opts.Threads; i++ { runner := response.NewRunner(transport, opts.Request, in, out) runner.MaxBodySize = opts.MaxBodySize * 1024 * 1024 runner.Extract = opts.extract runner.Client.CheckRedirect = func(req *http.Request, via []*http.Request) error { if len(via) <= opts.FollowRedirect { return nil } return http.ErrUseLastResponse } wg.Add(1) go func() { runner.Run(ctx) wg.Done() }() } go func() { // wait until the runners are done, then close the output channel wg.Wait() close(out) }() return out, nil } func
(ctx context.Context, g *errgroup.Group, opts *Options, args []string) error { // make sure the options and arguments are valid if len(args) == 0 { return errors.New("last argument needs to be the URL") } if len(args) > 1 { return errors.New("more than one target URL specified") } err := opts.valid() if err != nil { return err } inputURL := args[0] opts.Request.URL = inputURL // setup logging and the terminal logfilePrefix, err := logfilePath(opts, inputURL) if err != nil { return err } var maxFrameRate uint if s, ok := os.LookupEnv("MONSOON_PROGRESS_FPS"); ok { rate, err := strconv.ParseUint(s, 10, 32) if err != nil { return fmt.Errorf("parse $MONSOON_PROGRESS_FPS: %w", err) } maxFrameRate = uint(rate) } term, cleanup, err := setupTerminal(ctx, g, maxFrameRate, logfilePrefix) defer cleanup() if err != nil { return err } // collect the filters for the responses responseFilters, err := setupResponseFilters(opts) if err != nil { return err } // setup the pipeline for the values vch := make(chan string, opts.BufferSize) var valueCh <-chan string = vch cch := make(chan int, 1) var countCh <-chan int = cch // start a producer from the options err = setupProducer(ctx, g, opts, vch, cch) if err != nil { return err } // filter values (skip, limit) valueCh, countCh = setupValueFilters(ctx, opts, valueCh, countCh) // limit the throughput (if requested) if opts.RequestsPerSecond > 0 { valueCh = producer.Limit(ctx, opts.RequestsPerSecond, valueCh) } // start the runners responseCh, err := startRunners(ctx, opts, valueCh) if err != nil { return err } // filter the responses responseCh = response.Mark(responseCh, responseFilters) // extract data from all interesting (non-hidden) responses extracter := &response.Extracter{ Pattern: opts.extract, Commands: opts.extractPipe, Error: func(err error) { term.Printf("%v", err) }, } responseCh = extracter.Run(responseCh) if logfilePrefix != "" { rec, err := recorder.New(logfilePrefix+".json", opts.Request) if err != nil { return err } // fill in information for generating the request rec.Data.InputFile = opts.Filename rec.Data.Ranges = opts.Range rec.Data.RangeFormat = opts.RangeFormat rec.Data.Extract = opts.Extract rec.Data.ExtractPipe = opts.ExtractPipe out := make(chan response.Response) in := responseCh responseCh = out outCount := make(chan int) inCount := countCh countCh = outCount g.Go(func() error { return rec.Run(ctx, in, out, inCount, outCount) }) } // run the reporter term.Printf(reporter.Bold("Target URL:")+" %v\n\n", inputURL) reporter := reporter.New(term) return reporter.Display(responseCh, countCh) }
run
identifier_name
main.go
package fuzz import ( "context" "errors" "fmt" "log" "net/http" "net/url" "os" "path/filepath" "regexp" "strconv" "sync" "time" "github.com/RedTeamPentesting/monsoon/cli" "github.com/RedTeamPentesting/monsoon/producer" "github.com/RedTeamPentesting/monsoon/recorder" "github.com/RedTeamPentesting/monsoon/reporter" "github.com/RedTeamPentesting/monsoon/request" "github.com/RedTeamPentesting/monsoon/response" "github.com/RedTeamPentesting/monsoon/shell" "github.com/fd0/termstatus" "github.com/spf13/cobra" "golang.org/x/sync/errgroup" ) // Options collect options for a run. type Options struct { Range []string RangeFormat string Filename string Logfile string Logdir string Threads int RequestsPerSecond float64 BufferSize int Skip int Limit int Request *request.Request // the template for the HTTP request FollowRedirect int HideStatusCodes []string ShowStatusCodes []string HideHeaderSize []string HideBodySize []string HidePattern []string hidePattern []*regexp.Regexp ShowPattern []string showPattern []*regexp.Regexp Extract []string extract []*regexp.Regexp ExtractPipe []string extractPipe [][]string MaxBodySize int } var opts Options func compileRegexps(pattern []string) (res []*regexp.Regexp, err error) { for _, pat := range pattern { r, err := regexp.Compile(pat) if err != nil { return nil, fmt.Errorf("regexp %q failed to compile: %v", pat, err) } res = append(res, r) } return res, nil } func splitShell(cmds []string) ([][]string, error) { var data [][]string for _, cmd := range cmds { args, err := shell.Split(cmd) if err != nil { return nil, err } if len(args) < 1 { return nil, fmt.Errorf("invalid command: %q", cmd) } data = append(data, args) } return data, nil } // valid validates the options and returns an error if something is invalid. func (opts *Options) valid() (err error) { if opts.Threads <= 0 { return errors.New("invalid number of threads") } if len(opts.Range) > 0 && opts.Filename != "" { return errors.New("only one source allowed but both range and filename specified") } if len(opts.Range) == 0 && opts.Filename == "" { return errors.New("neither file nor range specified, nothing to do") } opts.extract, err = compileRegexps(opts.Extract) if err != nil { return err } opts.extractPipe, err = splitShell(opts.ExtractPipe) if err != nil { return err } opts.hidePattern, err = compileRegexps(opts.HidePattern) if err != nil { return err } opts.showPattern, err = compileRegexps(opts.ShowPattern) if err != nil { return err } return nil } var cmd = &cobra.Command{ Use: "fuzz [options] URL", DisableFlagsInUseLine: true, Short: helpShort, Long: helpLong, Example: helpExamples, RunE: func(cmd *cobra.Command, args []string) error { return cli.WithContext(func(ctx context.Context, g *errgroup.Group) error { return run(ctx, g, &opts, args) }) }, } // AddCommand adds the 'run' command to cmd. func AddCommand(c *cobra.Command) { c.AddCommand(cmd) fs := cmd.Flags() fs.SortFlags = false fs.StringSliceVarP(&opts.Range, "range", "r", nil, "set range `from-to`") fs.StringVar(&opts.RangeFormat, "range-format", "%d", "set `format` for range") fs.StringVarP(&opts.Filename, "file", "f", "", "read values from `filename`") fs.StringVar(&opts.Logfile, "logfile", "", "write copy of printed messages to `filename`.log") fs.StringVar(&opts.Logdir, "logdir", os.Getenv("MONSOON_LOG_DIR"), "automatically log all output to files in `dir`") fs.IntVarP(&opts.Threads, "threads", "t", 5, "make as many as `n` parallel requests") fs.IntVar(&opts.BufferSize, "buffer-size", 100000, "set number of buffered items to `n`") fs.IntVar(&opts.Skip, "skip", 0, "skip the first `n` requests") fs.IntVar(&opts.Limit, "limit", 0, "only run `n` requests, then exit") fs.Float64Var(&opts.RequestsPerSecond, "requests-per-second", 0, "do at most `n` requests per second (e.g. 0.5)") // add all options to define a request opts.Request = request.New("") request.AddFlags(opts.Request, fs) fs.IntVar(&opts.FollowRedirect, "follow-redirect", 0, "follow `n` redirects") fs.StringSliceVar(&opts.HideStatusCodes, "hide-status", nil, "hide responses with this status `code,[code-code],[-code],[...]`") fs.StringSliceVar(&opts.ShowStatusCodes, "show-status", nil, "show only responses with this status `code,[code-code],[code-],[...]`") fs.StringSliceVar(&opts.HideHeaderSize, "hide-header-size", nil, "hide responses with this header size (`size,from-to,from-,-to`)") fs.StringSliceVar(&opts.HideBodySize, "hide-body-size", nil, "hide responses with this body size (`size,from-to,from-,-to`)") fs.StringArrayVar(&opts.HidePattern, "hide-pattern", nil, "hide responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.ShowPattern, "show-pattern", nil, "show only responses containing `regex` in response header or body (can be specified multiple times)") fs.StringArrayVar(&opts.Extract, "extract", nil, "extract `regex` from response body (can be specified multiple times)") fs.StringArrayVar(&opts.ExtractPipe, "extract-pipe", nil, "pipe response body to `cmd` to extract data (can be specified multiple times)") fs.IntVar(&opts.MaxBodySize, "max-body-size", 5, "read at most `n` MiB from a returned response body (used for extracting data from the body)") } // logfilePath returns the prefix for the logfiles, if any. func logfilePath(opts *Options, inputURL string) (prefix string, err error) { if opts.Logdir != "" && opts.Logfile == "" { url, err := url.Parse(inputURL) if err != nil { return "", err } ts := time.Now().Format("20060102_150405") fn := fmt.Sprintf("monsoon_%s_%s", url.Host, ts) p := filepath.Join(opts.Logdir, fn) return p, nil } return opts.Logfile, nil } func setupProducer(ctx context.Context, g *errgroup.Group, opts *Options, ch chan<- string, count chan<- int) error { switch { case len(opts.Range) > 0: var ranges []producer.Range for _, r := range opts.Range { rng, err := producer.ParseRange(r) if err != nil { return err } ranges = append(ranges, rng) } g.Go(func() error { return producer.Ranges(ctx, ranges, opts.RangeFormat, ch, count) }) return nil case opts.Filename == "-": g.Go(func() error { return producer.Reader(ctx, os.Stdin, ch, count) }) return nil case opts.Filename != "": file, err := os.Open(opts.Filename) if err != nil { return err } g.Go(func() error { return producer.Reader(ctx, file, ch, count) }) return nil default: return errors.New("neither file nor range specified, nothing to do") } } func setupTerminal(ctx context.Context, g *errgroup.Group, maxFrameRate uint, logfilePrefix string) (term cli.Terminal, cleanup func(), err error) { ctx, cancel := context.WithCancel(context.Background()) statusTerm := termstatus.New(os.Stdout, os.Stderr, false) if maxFrameRate != 0 { statusTerm.MaxFrameRate = maxFrameRate } term = statusTerm if logfilePrefix != "" { fmt.Printf(reporter.Bold("Logfile:")+" %s.log\n", logfilePrefix) logfile, err := os.Create(logfilePrefix + ".log") if err != nil { return nil, cancel, err } fmt.Fprintln(logfile, shell.Join(os.Args)) // write copies of messages to logfile term = &cli.LogTerminal{ Terminal: statusTerm, Writer: logfile, } } // make sure error messages logged via the log package are printed nicely w := cli.NewStdioWrapper(term) log.SetOutput(w.Stderr()) g.Go(func() error { term.Run(ctx) return nil }) return term, cancel, nil } func setupResponseFilters(opts *Options) ([]response.Filter, error) { var filters []response.Filter filter, err := response.NewFilterStatusCode(opts.HideStatusCodes, opts.ShowStatusCodes) if err != nil { return nil, err } filters = append(filters, filter) if len(opts.HideHeaderSize) > 0 || len(opts.HideBodySize) > 0 { f, err := response.NewFilterSize(opts.HideHeaderSize, opts.HideBodySize) if err != nil { return nil, err } filters = append(filters, f) } if len(opts.hidePattern) > 0 { filters = append(filters, response.FilterRejectPattern{Pattern: opts.hidePattern}) } if len(opts.showPattern) > 0 { filters = append(filters, response.FilterAcceptPattern{Pattern: opts.showPattern}) } return filters, nil } func setupValueFilters(ctx context.Context, opts *Options, valueCh <-chan string, countCh <-chan int) (<-chan string, <-chan int) { if opts.Skip > 0 { f := &producer.FilterSkip{Skip: opts.Skip} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } if opts.Limit > 0 { f := &producer.FilterLimit{Max: opts.Limit} countCh = f.Count(ctx, countCh) valueCh = f.Select(ctx, valueCh) } return valueCh, countCh } func startRunners(ctx context.Context, opts *Options, in <-chan string) (<-chan response.Response, error) { out := make(chan response.Response) var wg sync.WaitGroup transport, err := response.NewTransport(opts.Request.Insecure, opts.Request.TLSClientKeyCertFile, opts.Request.DisableHTTP2, opts.Threads) if err != nil { return nil, err } for i := 0; i < opts.Threads; i++ { runner := response.NewRunner(transport, opts.Request, in, out) runner.MaxBodySize = opts.MaxBodySize * 1024 * 1024 runner.Extract = opts.extract runner.Client.CheckRedirect = func(req *http.Request, via []*http.Request) error { if len(via) <= opts.FollowRedirect { return nil } return http.ErrUseLastResponse } wg.Add(1) go func() { runner.Run(ctx) wg.Done() }() } go func() { // wait until the runners are done, then close the output channel wg.Wait() close(out) }() return out, nil } func run(ctx context.Context, g *errgroup.Group, opts *Options, args []string) error { // make sure the options and arguments are valid if len(args) == 0 { return errors.New("last argument needs to be the URL") } if len(args) > 1 { return errors.New("more than one target URL specified") } err := opts.valid() if err != nil { return err } inputURL := args[0] opts.Request.URL = inputURL // setup logging and the terminal logfilePrefix, err := logfilePath(opts, inputURL) if err != nil
var maxFrameRate uint if s, ok := os.LookupEnv("MONSOON_PROGRESS_FPS"); ok { rate, err := strconv.ParseUint(s, 10, 32) if err != nil { return fmt.Errorf("parse $MONSOON_PROGRESS_FPS: %w", err) } maxFrameRate = uint(rate) } term, cleanup, err := setupTerminal(ctx, g, maxFrameRate, logfilePrefix) defer cleanup() if err != nil { return err } // collect the filters for the responses responseFilters, err := setupResponseFilters(opts) if err != nil { return err } // setup the pipeline for the values vch := make(chan string, opts.BufferSize) var valueCh <-chan string = vch cch := make(chan int, 1) var countCh <-chan int = cch // start a producer from the options err = setupProducer(ctx, g, opts, vch, cch) if err != nil { return err } // filter values (skip, limit) valueCh, countCh = setupValueFilters(ctx, opts, valueCh, countCh) // limit the throughput (if requested) if opts.RequestsPerSecond > 0 { valueCh = producer.Limit(ctx, opts.RequestsPerSecond, valueCh) } // start the runners responseCh, err := startRunners(ctx, opts, valueCh) if err != nil { return err } // filter the responses responseCh = response.Mark(responseCh, responseFilters) // extract data from all interesting (non-hidden) responses extracter := &response.Extracter{ Pattern: opts.extract, Commands: opts.extractPipe, Error: func(err error) { term.Printf("%v", err) }, } responseCh = extracter.Run(responseCh) if logfilePrefix != "" { rec, err := recorder.New(logfilePrefix+".json", opts.Request) if err != nil { return err } // fill in information for generating the request rec.Data.InputFile = opts.Filename rec.Data.Ranges = opts.Range rec.Data.RangeFormat = opts.RangeFormat rec.Data.Extract = opts.Extract rec.Data.ExtractPipe = opts.ExtractPipe out := make(chan response.Response) in := responseCh responseCh = out outCount := make(chan int) inCount := countCh countCh = outCount g.Go(func() error { return rec.Run(ctx, in, out, inCount, outCount) }) } // run the reporter term.Printf(reporter.Bold("Target URL:")+" %v\n\n", inputURL) reporter := reporter.New(term) return reporter.Display(responseCh, countCh) }
{ return err }
conditional_block
resp.go
/* Copyright 2019 yametech. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package canal import ( "bytes" "errors" "fmt" "io" "strconv" "strings" ) //const bufsz = 4096 // Type represents a Value type type Type byte const ( SimpleString Type = '+' Error Type = '-' Integer Type = ':' BulkString Type = '$' Array Type = '*' Rdb Type = 'R' ) // TypeName returns name of the underlying RESP type. func (t Type) String() string { switch t { default: return "Unknown" case '+': return "SimpleString" case '-': return "Error" case ':': return "Integer" case '$': return "BulkString" case '*': return "Array" case 'R': return "RDB" } } // Value represents the data of a valid RESP type. type Value struct { Typ Type IntegerV int Str []byte ArrayV []Value Null bool RDB bool Size int } func (v Value) ReplInfo() (runID string, offset int64) { if v.Type() != Rdb { return } buf := bytes.Split(v.Str, []byte(" ")) if len(buf) < 3 { return } _offset, err := strconv.ParseInt(string(buf[2]), 10, 64) if err != nil { return } return string(buf[1]), _offset
switch v.Typ { default: n, _ := strconv.ParseInt(v.String(), 10, 64) return int(n) case ':': return v.IntegerV } } // String converts Value to a string. func (v Value) String() string { if v.Typ == '$' { return string(v.Str) } switch v.Typ { case '+', '-': return string(v.Str) case ':': return strconv.FormatInt(int64(v.IntegerV), 10) case '*': buf := bytes.NewBuffer(nil) concatArray(buf, v.ArrayV...) return strings.TrimSuffix(buf.String(), " ") case '\r': return "\r\n" } return "" } func concatArray(wr io.Writer, vs ...Value) { for i := range vs { _, err := wr.Write([]byte(vs[i].String())) if err != nil { panic(err) } _, err = wr.Write([]byte("\r\n")) if err != nil { panic(err) } concatArray(wr, vs[i].Array()...) } } // Bytes converts the Value to a byte array. An empty string is converted to a non-nil empty byte array. // If it's a RESP Null value, nil is returned. func (v Value) Bytes() []byte { switch v.Typ { default: return []byte(v.String()) case '$', '+', '-': return v.Str } } // Float converts Value to a float64. If Value cannot be converted // Zero is returned. func (v Value) Float() float64 { switch v.Typ { default: f, _ := strconv.ParseFloat(v.String(), 64) return f case ':': return float64(v.IntegerV) } } // IsNull indicates whether or not the base value is null. func (v Value) IsNull() bool { return v.Null } // Bool converts Value to an bool. If Value cannot be converted, false is returned. func (v Value) Bool() bool { return v.Integer() != 0 } // Error converts the Value to an error. If Value is not an error, nil is returned. func (v Value) Error() error { switch v.Typ { case '-': return errors.New(string(v.Str)) } return nil } // Array converts the Value to a an array. // If Value is not an array or when it's is a RESP Null value, nil is returned. func (v Value) Array() []Value { if v.Typ == '*' && !v.Null { return v.ArrayV } return nil } // Type returns the underlying RESP type. // The following types are represent valid RESP values. func (v Value) Type() Type { return v.Typ } func marshalSimpleRESP(typ Type, b []byte) ([]byte, error) { bb := make([]byte, 3+len(b)) bb[0] = byte(typ) copy(bb[1:], b) bb[1+len(b)+0] = '\r' bb[1+len(b)+1] = '\n' return bb, nil } func marshalBulkRESP(v Value) ([]byte, error) { if v.Null { return []byte("$-1\r\n"), nil } szb := []byte(strconv.FormatInt(int64(len(v.Str)), 10)) bb := make([]byte, 5+len(szb)+len(v.Str)) bb[0] = '$' copy(bb[1:], szb) bb[1+len(szb)+0] = '\r' bb[1+len(szb)+1] = '\n' copy(bb[1+len(szb)+2:], v.Str) bb[1+len(szb)+2+len(v.Str)+0] = '\r' bb[1+len(szb)+2+len(v.Str)+1] = '\n' return bb, nil } func marshalArrayRESP(v Value) ([]byte, error) { if v.Null { return []byte("*-1\r\n"), nil } szb := []byte(strconv.FormatInt(int64(len(v.ArrayV)), 10)) var buf bytes.Buffer buf.Grow(3 + len(szb) + 16*len(v.ArrayV)) // prime the buffer buf.WriteByte('*') buf.Write(szb) buf.WriteByte('\r') buf.WriteByte('\n') for i := 0; i < len(v.ArrayV); i++ { data, err := v.ArrayV[i].MarshalRESP() if err != nil { return nil, err } buf.Write(data) } return buf.Bytes(), nil } func marshalAnyRESP(v Value) ([]byte, error) { switch v.Typ { default: if v.Typ == 0 && v.Null { return []byte("$-1\r\n"), nil } return nil, errors.New("unknown resp type encountered") case '-', '+': return marshalSimpleRESP(v.Typ, v.Str) case ':': return marshalSimpleRESP(v.Typ, []byte(strconv.FormatInt(int64(v.IntegerV), 10))) case '$': return marshalBulkRESP(v) case '*': return marshalArrayRESP(v) } } // Equals compares one value to another value. func (v Value) Equals(value Value) bool { data1, err := v.MarshalRESP() if err != nil { return false } data2, err := value.MarshalRESP() if err != nil { return false } return string(data1) == string(data2) } // MarshalRESP returns the original serialized byte representation of Value. // For more information on this format please see http://redis.io/topics/protocol. func (v Value) MarshalRESP() ([]byte, error) { return marshalAnyRESP(v) } var NilValue = Value{Null: true} type ErrProtocol struct{ Msg string } func (err ErrProtocol) Error() string { return "Protocol error: " + err.Msg } // AnyValue returns a RESP value from an interface. // This function infers the types. Arrays are not allowed. func AnyValue(v interface{}) Value { switch v := v.(type) { default: return StringValue(fmt.Sprintf("%v", v)) case nil: return NullValue() case int: return IntegerValue(int(v)) case uint: return IntegerValue(int(v)) case int8: return IntegerValue(int(v)) case uint8: return IntegerValue(int(v)) case int16: return IntegerValue(int(v)) case uint16: return IntegerValue(int(v)) case int32: return IntegerValue(int(v)) case uint32: return IntegerValue(int(v)) case int64: return IntegerValue(int(v)) case uint64: return IntegerValue(int(v)) case bool: return BoolValue(v) case float32: return FloatValue(float64(v)) case float64: return FloatValue(float64(v)) case []byte: return BytesValue(v) case string: return StringValue(v) } } // SimpleStringValue returns a RESP simple string. A simple string has no new lines. The carriage return and new line characters are replaced with spaces. func SimpleStringValue(s string) Value { return Value{Typ: '+', Str: []byte(formSingleLine(s))} } // BytesValue returns a RESP bulk string. A bulk string can represent any data. func BytesValue(b []byte) Value { return Value{Typ: '$', Str: b} } // StringValue returns a RESP bulk string. A bulk string can represent any data. func StringValue(s string) Value { return Value{Typ: '$', Str: []byte(s)} } // NullValue returns a RESP null bulk string. func NullValue() Value { return Value{Typ: '$', Null: true} } // ErrorValue returns a RESP error. func ErrorValue(err error) Value { if err == nil { return Value{Typ: '-'} } return Value{Typ: '-', Str: []byte(err.Error())} } // IntegerValue returns a RESP integer. func IntegerValue(i int) Value { return Value{Typ: ':', IntegerV: i} } // BoolValue returns a RESP integer representation of a bool. func BoolValue(t bool) Value { if t { return Value{Typ: ':', IntegerV: 1} } return Value{Typ: ':', IntegerV: 0} } // FloatValue returns a RESP bulk string representation of a float. func FloatValue(f float64) Value { return StringValue(strconv.FormatFloat(f, 'f', -1, 64)) } // ArrayValue returns a RESP array. func ArrayValue(vals []Value) Value { return Value{Typ: '*', ArrayV: vals} } func formSingleLine(s string) string { bs1 := []byte(s) for i := 0; i < len(bs1); i++ { switch bs1[i] { case '\r', '\n': bs2 := make([]byte, len(bs1)) copy(bs2, bs1) bs2[i] = ' ' i++ for ; i < len(bs2); i++ { switch bs1[i] { case '\r', '\n': bs2[i] = ' ' } } return string(bs2) } } return s } // MultiBulkValue returns a RESP array which contains one or more bulk strings. // For more information on RESP arrays and strings please see http://redis.io/topics/protocol. func MultiBulkValue(commandName string, args ...interface{}) Value { vals := make([]Value, len(args)+1) vals[0] = StringValue(commandName) for i, arg := range args { if rval, ok := arg.(Value); ok && rval.Type() == BulkString { vals[i+1] = rval continue } switch arg := arg.(type) { default: vals[i+1] = StringValue(fmt.Sprintf("%v", arg)) case []byte: vals[i+1] = StringValue(string(arg)) case string: vals[i+1] = StringValue(arg) case nil: vals[i+1] = NullValue() } } return ArrayValue(vals) } func MultiBulkBytes(val Value) ([]byte, int) { buf := bytes.NewBuffer(nil) switch val.Typ { case '+', '-': buf.WriteByte(byte(val.Typ)) buf.WriteString(val.String()) buf.Write([]byte{'\r', '\n'}) return buf.Bytes(), len(buf.Bytes()) case '$', ':': buf.WriteByte(byte(val.Typ)) buf.WriteString(fmt.Sprintf("%d", len(val.String()))) buf.Write([]byte{'\r', '\n'}) buf.WriteString(val.String()) buf.Write([]byte{'\r', '\n'}) return buf.Bytes(), len(buf.Bytes()) case '*': buf.WriteByte(byte(val.Typ)) length := len(val.ArrayV) buf.WriteString(fmt.Sprintf("%d", length)) buf.Write([]byte{'\r', '\n'}) for i := range val.ArrayV { bs, _ := MultiBulkBytes(val.ArrayV[i]) buf.Write(bs) } return buf.Bytes(), buf.Len() } return []byte{}, 0 }
} // Integer converts Value to an int. If Value cannot be converted, Zero is returned. func (v Value) Integer() int {
random_line_split
resp.go
/* Copyright 2019 yametech. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package canal import ( "bytes" "errors" "fmt" "io" "strconv" "strings" ) //const bufsz = 4096 // Type represents a Value type type Type byte const ( SimpleString Type = '+' Error Type = '-' Integer Type = ':' BulkString Type = '$' Array Type = '*' Rdb Type = 'R' ) // TypeName returns name of the underlying RESP type. func (t Type) String() string { switch t { default: return "Unknown" case '+': return "SimpleString" case '-': return "Error" case ':': return "Integer" case '$': return "BulkString" case '*': return "Array" case 'R': return "RDB" } } // Value represents the data of a valid RESP type. type Value struct { Typ Type IntegerV int Str []byte ArrayV []Value Null bool RDB bool Size int } func (v Value) ReplInfo() (runID string, offset int64) { if v.Type() != Rdb { return } buf := bytes.Split(v.Str, []byte(" ")) if len(buf) < 3 { return } _offset, err := strconv.ParseInt(string(buf[2]), 10, 64) if err != nil { return } return string(buf[1]), _offset } // Integer converts Value to an int. If Value cannot be converted, Zero is returned. func (v Value) Integer() int { switch v.Typ { default: n, _ := strconv.ParseInt(v.String(), 10, 64) return int(n) case ':': return v.IntegerV } } // String converts Value to a string. func (v Value) String() string { if v.Typ == '$' { return string(v.Str) } switch v.Typ { case '+', '-': return string(v.Str) case ':': return strconv.FormatInt(int64(v.IntegerV), 10) case '*': buf := bytes.NewBuffer(nil) concatArray(buf, v.ArrayV...) return strings.TrimSuffix(buf.String(), " ") case '\r': return "\r\n" } return "" } func concatArray(wr io.Writer, vs ...Value) { for i := range vs { _, err := wr.Write([]byte(vs[i].String())) if err != nil { panic(err) } _, err = wr.Write([]byte("\r\n")) if err != nil { panic(err) } concatArray(wr, vs[i].Array()...) } } // Bytes converts the Value to a byte array. An empty string is converted to a non-nil empty byte array. // If it's a RESP Null value, nil is returned. func (v Value) Bytes() []byte { switch v.Typ { default: return []byte(v.String()) case '$', '+', '-': return v.Str } } // Float converts Value to a float64. If Value cannot be converted // Zero is returned. func (v Value) Float() float64 { switch v.Typ { default: f, _ := strconv.ParseFloat(v.String(), 64) return f case ':': return float64(v.IntegerV) } } // IsNull indicates whether or not the base value is null. func (v Value) IsNull() bool { return v.Null } // Bool converts Value to an bool. If Value cannot be converted, false is returned. func (v Value) Bool() bool { return v.Integer() != 0 } // Error converts the Value to an error. If Value is not an error, nil is returned. func (v Value) Error() error { switch v.Typ { case '-': return errors.New(string(v.Str)) } return nil } // Array converts the Value to a an array. // If Value is not an array or when it's is a RESP Null value, nil is returned. func (v Value) Array() []Value { if v.Typ == '*' && !v.Null { return v.ArrayV } return nil } // Type returns the underlying RESP type. // The following types are represent valid RESP values. func (v Value) Type() Type { return v.Typ } func marshalSimpleRESP(typ Type, b []byte) ([]byte, error) { bb := make([]byte, 3+len(b)) bb[0] = byte(typ) copy(bb[1:], b) bb[1+len(b)+0] = '\r' bb[1+len(b)+1] = '\n' return bb, nil } func marshalBulkRESP(v Value) ([]byte, error) { if v.Null { return []byte("$-1\r\n"), nil } szb := []byte(strconv.FormatInt(int64(len(v.Str)), 10)) bb := make([]byte, 5+len(szb)+len(v.Str)) bb[0] = '$' copy(bb[1:], szb) bb[1+len(szb)+0] = '\r' bb[1+len(szb)+1] = '\n' copy(bb[1+len(szb)+2:], v.Str) bb[1+len(szb)+2+len(v.Str)+0] = '\r' bb[1+len(szb)+2+len(v.Str)+1] = '\n' return bb, nil } func marshalArrayRESP(v Value) ([]byte, error) { if v.Null { return []byte("*-1\r\n"), nil } szb := []byte(strconv.FormatInt(int64(len(v.ArrayV)), 10)) var buf bytes.Buffer buf.Grow(3 + len(szb) + 16*len(v.ArrayV)) // prime the buffer buf.WriteByte('*') buf.Write(szb) buf.WriteByte('\r') buf.WriteByte('\n') for i := 0; i < len(v.ArrayV); i++ { data, err := v.ArrayV[i].MarshalRESP() if err != nil { return nil, err } buf.Write(data) } return buf.Bytes(), nil } func marshalAnyRESP(v Value) ([]byte, error) { switch v.Typ { default: if v.Typ == 0 && v.Null { return []byte("$-1\r\n"), nil } return nil, errors.New("unknown resp type encountered") case '-', '+': return marshalSimpleRESP(v.Typ, v.Str) case ':': return marshalSimpleRESP(v.Typ, []byte(strconv.FormatInt(int64(v.IntegerV), 10))) case '$': return marshalBulkRESP(v) case '*': return marshalArrayRESP(v) } } // Equals compares one value to another value. func (v Value) Equals(value Value) bool { data1, err := v.MarshalRESP() if err != nil { return false } data2, err := value.MarshalRESP() if err != nil { return false } return string(data1) == string(data2) } // MarshalRESP returns the original serialized byte representation of Value. // For more information on this format please see http://redis.io/topics/protocol. func (v Value) MarshalRESP() ([]byte, error) { return marshalAnyRESP(v) } var NilValue = Value{Null: true} type ErrProtocol struct{ Msg string } func (err ErrProtocol) Error() string { return "Protocol error: " + err.Msg } // AnyValue returns a RESP value from an interface. // This function infers the types. Arrays are not allowed. func AnyValue(v interface{}) Value { switch v := v.(type) { default: return StringValue(fmt.Sprintf("%v", v)) case nil: return NullValue() case int: return IntegerValue(int(v)) case uint: return IntegerValue(int(v)) case int8: return IntegerValue(int(v)) case uint8: return IntegerValue(int(v)) case int16: return IntegerValue(int(v)) case uint16: return IntegerValue(int(v)) case int32: return IntegerValue(int(v)) case uint32: return IntegerValue(int(v)) case int64: return IntegerValue(int(v)) case uint64: return IntegerValue(int(v)) case bool: return BoolValue(v) case float32: return FloatValue(float64(v)) case float64: return FloatValue(float64(v)) case []byte: return BytesValue(v) case string: return StringValue(v) } } // SimpleStringValue returns a RESP simple string. A simple string has no new lines. The carriage return and new line characters are replaced with spaces. func SimpleStringValue(s string) Value { return Value{Typ: '+', Str: []byte(formSingleLine(s))} } // BytesValue returns a RESP bulk string. A bulk string can represent any data. func BytesValue(b []byte) Value { return Value{Typ: '$', Str: b} } // StringValue returns a RESP bulk string. A bulk string can represent any data. func StringValue(s string) Value { return Value{Typ: '$', Str: []byte(s)} } // NullValue returns a RESP null bulk string. func NullValue() Value { return Value{Typ: '$', Null: true} } // ErrorValue returns a RESP error. func ErrorValue(err error) Value { if err == nil { return Value{Typ: '-'} } return Value{Typ: '-', Str: []byte(err.Error())} } // IntegerValue returns a RESP integer. func IntegerValue(i int) Value { return Value{Typ: ':', IntegerV: i} } // BoolValue returns a RESP integer representation of a bool. func BoolValue(t bool) Value { if t { return Value{Typ: ':', IntegerV: 1} } return Value{Typ: ':', IntegerV: 0} } // FloatValue returns a RESP bulk string representation of a float. func
(f float64) Value { return StringValue(strconv.FormatFloat(f, 'f', -1, 64)) } // ArrayValue returns a RESP array. func ArrayValue(vals []Value) Value { return Value{Typ: '*', ArrayV: vals} } func formSingleLine(s string) string { bs1 := []byte(s) for i := 0; i < len(bs1); i++ { switch bs1[i] { case '\r', '\n': bs2 := make([]byte, len(bs1)) copy(bs2, bs1) bs2[i] = ' ' i++ for ; i < len(bs2); i++ { switch bs1[i] { case '\r', '\n': bs2[i] = ' ' } } return string(bs2) } } return s } // MultiBulkValue returns a RESP array which contains one or more bulk strings. // For more information on RESP arrays and strings please see http://redis.io/topics/protocol. func MultiBulkValue(commandName string, args ...interface{}) Value { vals := make([]Value, len(args)+1) vals[0] = StringValue(commandName) for i, arg := range args { if rval, ok := arg.(Value); ok && rval.Type() == BulkString { vals[i+1] = rval continue } switch arg := arg.(type) { default: vals[i+1] = StringValue(fmt.Sprintf("%v", arg)) case []byte: vals[i+1] = StringValue(string(arg)) case string: vals[i+1] = StringValue(arg) case nil: vals[i+1] = NullValue() } } return ArrayValue(vals) } func MultiBulkBytes(val Value) ([]byte, int) { buf := bytes.NewBuffer(nil) switch val.Typ { case '+', '-': buf.WriteByte(byte(val.Typ)) buf.WriteString(val.String()) buf.Write([]byte{'\r', '\n'}) return buf.Bytes(), len(buf.Bytes()) case '$', ':': buf.WriteByte(byte(val.Typ)) buf.WriteString(fmt.Sprintf("%d", len(val.String()))) buf.Write([]byte{'\r', '\n'}) buf.WriteString(val.String()) buf.Write([]byte{'\r', '\n'}) return buf.Bytes(), len(buf.Bytes()) case '*': buf.WriteByte(byte(val.Typ)) length := len(val.ArrayV) buf.WriteString(fmt.Sprintf("%d", length)) buf.Write([]byte{'\r', '\n'}) for i := range val.ArrayV { bs, _ := MultiBulkBytes(val.ArrayV[i]) buf.Write(bs) } return buf.Bytes(), buf.Len() } return []byte{}, 0 }
FloatValue
identifier_name