file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
shootout-ackermann.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::os; fn ack(m: int, n: int) -> int { if m == 0 { return n + 1 } else { if n == 0 { return ack(m - 1, 1); } else { return ack(m - 1, ack(m, n - 1)); } } } fn main()
{ let args = os::args(); let args = if os::getenv("RUST_BENCH").is_some() { vec!("".to_owned(), "12".to_owned()) } else if args.len() <= 1u { vec!("".to_owned(), "8".to_owned()) } else { args.move_iter().collect() }; let n = from_str::<int>(*args.get(1)).unwrap(); println!("Ack(3,{}): {}\n", n, ack(3, n)); }
identifier_body
authorize-config.ts
//============== OpenId Connect授权配置 ========== //Copyright 2018 何镇汐 //Licensed under the MIT license //================================================ /** * OpenId Connect授权配置 */ export class AuthorizeConfig { /** * OpenId Connect认证服务入口地址,该项为必填 */ authority: string; /** * 客户端标识,该项为必填 */ clientId: string; /** * 资源访问范围,默认值: openid profile */ scope: string = "openid profile"; /** * 响应类型,默认值:id_token token */ responseType: string = "id_token token"; /** * 登录回调页面地址,默认值:/callback,注意:该值必须与OpenId Connect认证服务配置的登录回调地址匹配 */ redirectUri: string = "/callback"; /** * 注销回调页面地址,默认值:/,注意:该值必须与OpenId Connect认证服务配置的注销回调地址匹配 */ postLogoutRedirectUri: string; /** * 验证 */ validate() { if (!this.authority) throw new Error("OpenId Connect认证服务入口地址未设置,请设置AuthorizeConfig的authority属性"); if (!this.clientId) throw new Error("OpenId Connect客户端标识未设置,请设置AuthorizeConfig的clientId属性"); } /** * 获取登录回调页面地址 */ getRedirectUri() { if (this.redirectUri === "/callback") return `${location.origin}${this.redirectUri}`; return this.redirectUri; } /** * 获取注销回调页面地址 */ getPostLogoutRedirectUri() { if (!this.postLogoutRedirectUri) return location.origin; return this.postLogoutRedir
}
ectUri; }
identifier_name
authorize-config.ts
//============== OpenId Connect授权配置 ========== //Copyright 2018 何镇汐 //Licensed under the MIT license //================================================ /** * OpenId Connect授权配置 */ export class AuthorizeConfig { /** * OpenId Connect认证服务入口地址,该项为必填 */ authority: string; /** * 客户端标识,该项为必填 */ clientId: string; /** * 资源访问范围,默认值: openid profile */ scope: string = "openid profile"; /** * 响应类型,默认值:id_token token */ responseType: string = "id_token token"; /** * 登录回调页面地址,默认值:/callback,注意:该值必须与OpenId Connect认证服务配置的登录回调地址匹配 */ redirectUri: string = "/callback"; /** * 注销回调页面地址,默认值:/,注意:该值必须与OpenId Connect认证服务配置的注销回调地址匹配 */ postLogoutRedirectUri: string; /** * 验证 */ validate() { if (!this.authority) throw new Error("OpenId Connect认证服务入口地址未设置,请设置AuthorizeConfig的authority属性"); if (!this.clientId) throw new Error("OpenId Connect客户端标识未设置,请设置AuthorizeConfig的clientId属性"); } /** * 获取登
return location.origin; return this.postLogoutRedirectUri; } }
录回调页面地址 */ getRedirectUri() { if (this.redirectUri === "/callback") return `${location.origin}${this.redirectUri}`; return this.redirectUri; } /** * 获取注销回调页面地址 */ getPostLogoutRedirectUri() { if (!this.postLogoutRedirectUri)
identifier_body
authorize-config.ts
//============== OpenId Connect授权配置 ========== //Copyright 2018 何镇汐 //Licensed under the MIT license //================================================ /** * OpenId Connect授权配置 */ export class AuthorizeConfig { /** * OpenId Connect认证服务入口地址,该项为必填 */ authority: string; /** * 客户端标识,该项为必填 */ clientId: string; /** * 资源访问范围,默认值: openid profile */ scope: string = "openid profile"; /** * 响应类型,默认值:id_token token */ responseType: string = "id_token token"; /** * 登录回调页面地址,默认值:/callback,注意:该值必须与OpenId Connect认证服务配置的登录回调地址匹配 */ redirectUri: string = "/callback"; /** * 注销回调页面地址,默认值:/,注意:该值必须与OpenId Connect认证服务配置的注销回调地址匹配 */ postLogoutRedirectUri: string;
/** * 验证 */ validate() { if (!this.authority) throw new Error("OpenId Connect认证服务入口地址未设置,请设置AuthorizeConfig的authority属性"); if (!this.clientId) throw new Error("OpenId Connect客户端标识未设置,请设置AuthorizeConfig的clientId属性"); } /** * 获取登录回调页面地址 */ getRedirectUri() { if (this.redirectUri === "/callback") return `${location.origin}${this.redirectUri}`; return this.redirectUri; } /** * 获取注销回调页面地址 */ getPostLogoutRedirectUri() { if (!this.postLogoutRedirectUri) return location.origin; return this.postLogoutRedirectUri; } }
random_line_split
converter.ts
/** * Converts modlogs between text and SQLite; also modernizes old-format modlogs * @author Annika * @author jetou */ if (!global.Config) { let hasSQLite = true; try { require.resolve('better-sqlite3'); } catch (e) { hasSQLite = false; } global.Config = { nofswriting: false, usesqlitemodlog: hasSQLite, usesqlite: hasSQLite, }; } import type * as DatabaseType from 'better-sqlite3'; import {FS} from '../../lib'; import {Modlog, ModlogEntry} from '../../server/modlog'; import {IPTools} from '../../server/ip-tools'; const Database = Config.usesqlite ? require('better-sqlite3') : null; type ModlogFormat = 'txt' | 'sqlite'; /** The number of modlog entries to write to the database on each transaction */ const ENTRIES_TO_BUFFER = 25000; const IP_ONLY_ACTIONS = new Set([ 'SHAREDIP', 'UNSHAREDIP', 'UNLOCKIP', 'UNLOCKRANGE', 'RANGEBAN', 'RANGELOCK', ]); export function parseBrackets(line: string, openingBracket: '(' | '[', greedy?: boolean) { const brackets = { '(': ')', '[': ']', }; const bracketOpenIndex = line.indexOf(openingBracket); const bracketCloseIndex = greedy ? line.lastIndexOf(brackets[openingBracket]) : line.indexOf(brackets[openingBracket]); if (bracketCloseIndex < 0 || bracketOpenIndex < 0) return ''; return line.slice(bracketOpenIndex + 1, bracketCloseIndex); } function toID(text: any): ID { return (text && typeof text === "string" ? text : "").toLowerCase().replace(/[^a-z0-9]+/g, "") as ID; } export function modernizeLog(line: string, nextLine?: string): string | undefined { // first we save and remove the timestamp and the roomname const prefix = line.match(/\[.+?\] \(.+?\) /i)?.[0]; if (!prefix) return; if (/\]'s\s.*\salts: \[/.test(line)) return; line = line.replace(prefix, ''); if (line.startsWith('(') && line.endsWith(')')) { line = line.slice(1, -1); } const getAlts = () => { let alts; const regex = new RegExp(`\\(\\[.*\\]'s (lock|mut|bann|blacklist)ed alts: (\\[.*\\])\\)`); nextLine?.replace(regex, (a, b, rawAlts) => { alts = rawAlts; return ''; }); return alts ? `alts: ${alts} ` : ``; }; // Special cases if (line.startsWith('SCAV ')) { line = line.replace(/: (\[room: .*?\]) by (.*)/, (match, roominfo, rest) => `: by ${rest} ${roominfo}`); } line = line.replace( /(GIVEAWAY WIN|GTS FINISHED): ([A-Za-z0-9].*?)(won|has finished)/, (match, action, user) => `${action}: [${toID(user)}]:` ); if (line.includes(':')) { const possibleModernAction = line.slice(0, line.indexOf(':')).trim(); if (possibleModernAction === possibleModernAction.toUpperCase()) { if (possibleModernAction.includes('[')) { // for corrupted lines const [drop, ...keep] = line.split('['); process.stderr.write(`Ignoring malformed line: ${drop}\n`); return modernizeLog(keep.join('')); } if (/\(.+\) by [a-z0-9]{1,19}$/.test(line) && !['OLD MODLOG', 'NOTE'].includes(possibleModernAction)) { // weird reason formatting const reason = parseBrackets(line, '(', true); return `${prefix}${line.replace(` (${reason})`, '')}: ${reason}`; } // Log is already modernized return `${prefix}${line}`; } } if (/\[(the|a)poll\] was (started|ended) by/.test(line)) { const actionTaker = toID(line.slice(line.indexOf(' by ') + ' by '.length)); const isEnding = line.includes('was ended by'); return `${prefix}POLL${isEnding ? ' END' : ''}: by ${actionTaker}`; } if (/User (.*?) won the game of (.*?) mode trivia/.test(line)) { return `${prefix}TRIVIAGAME: by unknown: ${line}`; } const modernizerTransformations: {[k: string]: (log: string) => string} = { 'notes: ': (log) => { const [actionTaker, ...rest] = line.split(' notes: '); return `NOTE: by ${toID(actionTaker)}: ${rest.join('')}`; }, ' declared': (log) => { let newAction = 'DECLARE'; let oldAction = ' declared'; if (log.includes(' globally declared')) { oldAction = ' globally declared'; newAction = 'GLOBALDECLARE'; } if (log.includes('(chat level)')) { oldAction += ' (chat level)'; newAction = `CHATDECLARE`; } const actionTakerName = toID(log.slice(0, log.lastIndexOf(oldAction))); log = log.slice(actionTakerName.length); log = log.slice(oldAction.length); log = log.replace(/^\s?:/, '').trim(); return `${newAction}: by ${actionTakerName}: ${log}`; }, 'changed the roomdesc to: ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('changed the roomdesc to: '.length + 1, -2); return `ROOMDESC: by ${actionTaker}: to "${log}"`; }, 'roomevent titled "': (log) => { let action; if (log.includes(' added a roomevent titled "')) { action = 'added a'; } else if (log.includes(' removed a roomevent titled "')) { action = 'removed a'; } else { action = 'edited the'; } const actionTakerName = log.slice(0, log.lastIndexOf(` ${action} roomevent titled "`)); log = log.slice(actionTakerName.length + 1); const eventName = log.slice(` ${action} roomevent titled `.length, -2); return `ROOMEVENT: by ${toID(actionTakerName)}: ${action.split(' ')[0]} "${eventName}"`; }, 'set modchat to ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('set modchat to '.length); return `MODCHAT: by ${actionTaker}: to ${log}`; }, 'set modjoin to ': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' set')); log = log.slice(actionTakerName.length + 1); log = log.slice('set modjoin to '.length); const rank = log.startsWith('sync') ? 'sync' : log.replace('.', ''); return `MODJOIN${rank === 'sync' ? ' SYNC' : ''}: by ${toID(actionTakerName)}${rank !== 'sync' ? `: ${rank}` : ``}`; }, 'turned off modjoin': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' turned off modjoin')); return `MODJOIN: by ${toID(actionTakerName)}: OFF`; }, 'changed the roomintro': (log) => { const isDeletion = /deleted the (staff|room)intro/.test(log); const isRoomintro = log.includes('roomintro'); const actionTaker = toID(log.slice(0, log.indexOf(isDeletion ? 'deleted' : 'changed'))); return `${isDeletion ? 'DELETE' : ''}${isRoomintro ? 'ROOM' : 'STAFF'}INTRO: by ${actionTaker}`; }, 'deleted the roomintro': (log) => modernizerTransformations['changed the roomintro'](log), 'changed the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'deleted the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'created a tournament in': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice(24, -8); return `TOUR CREATE: by ${actionTaker}: ${log}`; }, 'was disqualified from the tournament by': (log) => { const disqualified = parseBrackets(log, '['); log = log.slice(disqualified.length + 3); log = log.slice('was disqualified from the tournament by'.length); return `TOUR DQ: [${toID(disqualified)}] by ${toID(log)}`; }, 'The tournament auto disqualify timeout was set to': (log) => { const byIndex = log.indexOf(' by '); const actionTaker = log.slice(byIndex + ' by '.length); const length = log.slice('The tournament auto disqualify timeout was set to'.length, byIndex); return `TOUR AUTODQ: by ${toID(actionTaker)}: ${length.trim()}`; }, ' was blacklisted from ': (log) => { const isName = log.includes(' was nameblacklisted from '); const banned = toID(log.slice(0, log.indexOf(` was ${isName ? 'name' : ''}blacklisted from `))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isName ? 'NAME' : ''}BLACKLIST: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was nameblacklisted from ': (log) => modernizerTransformations[' was blacklisted from '](log), ' was banned from room ': (log) => { const banned = toID(log.slice(0, log.indexOf(' was banned from room '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `ROOMBAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was muted by ': (log) => { let muted = ''; let isHour = false; [muted, log] = log.split(' was muted by '); muted = toID(muted); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } let actionTaker = toID(log); if (actionTaker.endsWith('for1hour')) { isHour = true; actionTaker = actionTaker.replace(/^(.*)(for1hour)$/, (match, staff) => staff) as ID; } return `${isHour ? 'HOUR' : ''}MUTE: [${muted}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was locked from talking ': (log) => { const isWeek = log.includes(' was locked from talking for a week '); const locked = toID(log.slice(0, log.indexOf(' was locked from talking '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isWeek ? 'WEEK' : ''}LOCK: [${locked}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was banned ': (log) => { if (log.includes(' was banned from room ')) return modernizerTransformations[' was banned from room '](log); const banned = toID(log.slice(0, log.indexOf(' was banned '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `BAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, 'was promoted to ': (log) => { const isDemotion = log.includes('was demoted to '); const userid = toID(log.split(' was ')[0]); if (!userid) { throw new Error(`Ignoring malformed line: ${prefix}${log}`); } log = log.slice(userid.length + 3); log = log.slice(`was ${isDemotion ? 'demoted' : 'promoted'} to `.length); let rank = log.slice(0, log.indexOf(' by')).replace(/ /, '').toUpperCase(); log = log.slice(`${rank} by `.length); if (!rank.startsWith('ROOM')) rank = `GLOBAL ${rank}`; const actionTaker = parseBrackets(log, '['); return `${rank}: [${userid}] by ${actionTaker}${isDemotion ? ': (demote)' : ''}`; }, 'was demoted to ': (log) => modernizerTransformations['was promoted to '](log), 'was appointed Room Owner by ': (log) => { const userid = parseBrackets(log, '['); log = log.slice(userid.length + 3); log = log.slice('was appointed Room Owner by '.length); const actionTaker = parseBrackets(log, '['); return `ROOMOWNER: [${userid}] by ${actionTaker}`; }, ' claimed this ticket': (log) => { const actions: {[k: string]: string} = { ' claimed this ticket': 'TICKETCLAIM', ' closed this ticket': 'TICKETCLOSE', ' deleted this ticket': 'TICKETDELETE', }; for (const oldAction in actions) { if (log.includes(oldAction)) { const actionTaker = toID(log.slice(0, log.indexOf(oldAction))); return `${actions[oldAction]}: by ${actionTaker}`; } } return log; }, 'This ticket is now claimed by ': (log) => { const claimer = toID(log.slice(log.indexOf(' by ') + ' by '.length)); return `TICKETCLAIM: by ${claimer}`; }, ' is no longer interested in this ticket': (log) => { const abandoner = toID(log.slice(0, log.indexOf(' is no longer interested in this ticket'))); return `TICKETABANDON: by ${abandoner}`; }, ' opened a new ticket': (log) => { const opener = toID(log.slice(0, log.indexOf(' opened a new ticket'))); const problem = log.slice(log.indexOf(' Issue: ') + ' Issue: '.length).trim(); return `TICKETOPEN: by ${opener}: ${problem}`; }, ' closed this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), ' deleted this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), 'This ticket is no longer claimed': () => 'TICKETUNCLAIM', ' has been caught attempting a hunt with ': (log) => { const index = log.indexOf(' has been caught attempting a hunt with '); const user = toID(log.slice(0, index)); log = log.slice(index + ' has been caught attempting a hunt with '.length); log = log.replace('. The user has also', '; has also').replace('.', ''); return `SCAV CHEATER: [${user}]: caught attempting a hunt with ${log}`; }, }; for (const oldAction in modernizerTransformations) { if (line.includes(oldAction)) { try { return prefix + modernizerTransformations[oldAction](line); } catch (err) { if (Config.nofswriting) throw err; process.stderr.write(`${err.message}\n`); } } } return `${prefix}${line}`; } export function parseModlog(raw: string, nextLine?: string, isGlobal = false): ModlogEntry | undefined { let line = modernizeLog(raw); if (!line) return; const timestamp = parseBrackets(line, '['); line = line.slice(timestamp.length + 3); const [roomID, ...bonus] = parseBrackets(line, '(').split(' '); const log: ModlogEntry = { action: 'NULL', roomID, visualRoomID: '', userid: null, autoconfirmedID: null, alts: [], ip: null, isGlobal, loggedBy: null, note: '', time: Math.floor(new Date(timestamp).getTime()) || Date.now(), }; if (bonus.length) log.visualRoomID = `${log.roomID} ${bonus.join(' ')}`; line = line.slice((log.visualRoomID || log.roomID).length + 3); const actionColonIndex = line.indexOf(':'); const action = line.slice(0, actionColonIndex); if (action !== action.toUpperCase()) { // no action (probably an old-format log that slipped past the modernizer) log.action = 'OLD MODLOG'; log.loggedBy = 'unknown' as ID; log.note = line.trim(); return log; } else { log.action = action; if (log.action === 'OLD MODLOG') { log.loggedBy = 'unknown' as ID; log.note = line.slice(line.indexOf('by unknown: ') + 'by unknown :'.length).trim(); return log; } line = line.slice(actionColonIndex + 2); } if (line[0] === '[') { if (!IP_ONLY_ACTIONS.has(log.action))
if (line[0] === '[') { log.ip = parseBrackets(line, '['); line = line.slice(log.ip.length + 3).trim(); } } let regex = /\bby .*:/; let actionTakerIndex = regex.exec(line)?.index; if (actionTakerIndex === undefined) { actionTakerIndex = line.indexOf('by '); regex = /\bby .*/; } if (actionTakerIndex !== -1) { const colonIndex = line.indexOf(': '); const actionTaker = line.slice(actionTakerIndex + 3, colonIndex > actionTakerIndex ? colonIndex : undefined); if (toID(actionTaker).length < 19) { log.loggedBy = toID(actionTaker) || null; if (colonIndex > actionTakerIndex) line = line.slice(colonIndex); line = line.replace(regex, ' '); } } if (line) log.note = line.replace(/^\s?:\s?/, '').trim(); return log; } export function rawifyLog(log: ModlogEntry) { let result = `[${new Date(log.time || Date.now()).toJSON()}] (${(log.visualRoomID || log.roomID || 'global').replace(/^global-/, '')}) ${log.action}`; if (log.userid) result += `: [${log.userid}]`; if (log.autoconfirmedID) result += ` ac: [${log.autoconfirmedID}]`; if (log.alts.length) result += ` alts: [${log.alts.join('], [')}]`; if (log.ip) { if (!log.userid) result += `:`; result += ` [${log.ip}]`; } if (log.loggedBy) result += `${result.endsWith(']') ? '' : ':'} by ${log.loggedBy}`; if (log.note) result += `: ${log.note}`; return result + `\n`; } export class ModlogConverterSQLite { readonly databaseFile: string; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, db: DatabaseType.Database} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: DatabaseType.Database) { this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = {files: new Map<string, string>(), db: isTesting || new Database(':memory:')}; } } async toTxt() { const database = this.isTesting?.db || new Database(this.databaseFile, {fileMustExist: true}); const roomids = database.prepare('SELECT DISTINCT roomid FROM modlog').all(); const globalEntries = []; for (const {roomid} of roomids) { if (!Config.nofswriting) console.log(`Reading ${roomid}...`); const results = database.prepare( `SELECT *, (SELECT group_concat(userid, ',') FROM alts WHERE alts.modlog_id = modlog.modlog_id) as alts ` + `FROM modlog WHERE roomid = ? ORDER BY timestamp ASC` ).all(roomid); const trueRoomID = roomid.replace(/^global-/, ''); let entriesLogged = 0; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${trueRoomID}'`); } await this.writeFile(`${this.textLogDir}/modlog_${trueRoomID}.txt`, entries.join('')); entries = []; }; for (const result of results) { const entry: ModlogEntry = { action: result.action, roomID: result.roomid?.replace(/^global-/, ''), visualRoomID: result.visual_roomid, userid: result.userid, autoconfirmedID: result.autoconfirmed_userid, alts: result.alts?.split(','), ip: result.ip, isGlobal: result.roomid?.startsWith('global-') || result.roomid === 'global', loggedBy: result.action_taker_userid, note: result.note, time: result.timestamp, }; const rawLog = rawifyLog(entry); entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); } await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await this.writeFile(`${this.textLogDir}/modlog_global.txt`, globalEntries.join('')); } async writeFile(path: string, text: string) { if (this.isTesting) { const old = this.isTesting.files.get(path); return this.isTesting.files.set(path, `${old || ''}${text}`); } return FS(path).append(text); } } export class ModlogConverterTxt { readonly databaseFile: string; readonly modlog: Modlog; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, ml?: Modlog} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: Map<string, string>, useFTSExtension?: boolean) { this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = { files: isTesting || new Map<string, string>(), }; } this.modlog = new Modlog(this.textLogDir, this.isTesting ? ':memory:' : this.databaseFile); } async toSQLite() { const files = this.isTesting ? [...this.isTesting.files.keys()] : await FS(this.textLogDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); const lines = this.isTesting ? this.isTesting.files.get(file)?.split('\n') || [] : FS(`${this.textLogDir}/${file}`).createReadStream().byLine(); let entriesLogged = 0; let lastLine = undefined; let entries: ModlogEntry[] = []; const insertEntries = (alwaysShowProgress?: boolean) => { this.modlog.writeSQL(entries); entriesLogged += entries.length; if (!Config.nofswriting && ( alwaysShowProgress || entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER )) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Inserted ${entriesLogged} entries from '${roomid}'`); } entries = []; }; for await (const line of lines) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; if (roomid !== 'global') entries.push(entry); if (entry.isGlobal) { globalEntries.push(entry); } if (entries.length === ENTRIES_TO_BUFFER) insertEntries(); } insertEntries(true); if (entriesLogged) process.stdout.write('\n'); } return this.modlog.database; } } export class ModlogConverterTest { readonly inputDir: string; readonly outputDir: string; constructor(inputDir: string, outputDir: string) { this.inputDir = inputDir; this.outputDir = outputDir; } async toTxt() { const files = await FS(this.inputDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); let entriesLogged = 0; let lastLine = undefined; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${roomid}'`); } await FS(`${this.outputDir}/modlog_${roomid}.txt`).append(entries.join('')); entries = []; }; const readStream = FS(`${this.inputDir}/${file}`).createReadStream(); for await (const line of readStream.byLine()) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; const rawLog = rawifyLog(entry); if (roomid !== 'global') entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); } await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await FS(`${this.outputDir}/modlog_global.txt`).append(globalEntries.join('')); } } export const ModlogConverter = { async convert( from: ModlogFormat, to: ModlogFormat, databasePath: string, textLogDirectoryPath: string, outputLogPath?: string ) { if (from === 'txt' && to === 'txt' && outputLogPath) { const converter = new ModlogConverterTest(textLogDirectoryPath, outputLogPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'sqlite' && to === 'txt') { const converter = new ModlogConverterSQLite(databasePath, textLogDirectoryPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'txt' && to === 'sqlite') { const converter = new ModlogConverterTxt(databasePath, textLogDirectoryPath); await converter.toSQLite(); console.log("\nDone!"); process.exit(); } }, };
{ const userid = toID(parseBrackets(line, '[')); log.userid = userid; line = line.slice(userid.length + 3).trim(); if (line.startsWith('ac:')) { line = line.slice(3).trim(); const ac = parseBrackets(line, '['); log.autoconfirmedID = toID(ac); line = line.slice(ac.length + 3).trim(); } if (line.startsWith('alts:')) { line = line.slice(5).trim(); const alts = new Set<ID>(); // we need to weed out duplicate alts let alt = parseBrackets(line, '['); do { if (alt.includes(', ')) { // old alt format for (const trueAlt of alt.split(', ')) { alts.add(toID(trueAlt)); } line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (!line.startsWith('[')) line = `[${line}`; } else { if (IPTools.ipRegex.test(alt)) break; alts.add(toID(alt)); line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (alt.includes('[') && !line.startsWith('[')) line = `[${line}`; } alt = parseBrackets(line, '['); } while (alt); log.alts = [...alts]; } }
conditional_block
converter.ts
/** * Converts modlogs between text and SQLite; also modernizes old-format modlogs * @author Annika * @author jetou */ if (!global.Config) { let hasSQLite = true; try { require.resolve('better-sqlite3'); } catch (e) { hasSQLite = false; } global.Config = { nofswriting: false, usesqlitemodlog: hasSQLite, usesqlite: hasSQLite, }; } import type * as DatabaseType from 'better-sqlite3'; import {FS} from '../../lib'; import {Modlog, ModlogEntry} from '../../server/modlog'; import {IPTools} from '../../server/ip-tools'; const Database = Config.usesqlite ? require('better-sqlite3') : null; type ModlogFormat = 'txt' | 'sqlite'; /** The number of modlog entries to write to the database on each transaction */ const ENTRIES_TO_BUFFER = 25000; const IP_ONLY_ACTIONS = new Set([ 'SHAREDIP', 'UNSHAREDIP', 'UNLOCKIP', 'UNLOCKRANGE', 'RANGEBAN', 'RANGELOCK', ]); export function parseBrackets(line: string, openingBracket: '(' | '[', greedy?: boolean) { const brackets = { '(': ')', '[': ']', }; const bracketOpenIndex = line.indexOf(openingBracket); const bracketCloseIndex = greedy ? line.lastIndexOf(brackets[openingBracket]) : line.indexOf(brackets[openingBracket]); if (bracketCloseIndex < 0 || bracketOpenIndex < 0) return ''; return line.slice(bracketOpenIndex + 1, bracketCloseIndex); } function toID(text: any): ID { return (text && typeof text === "string" ? text : "").toLowerCase().replace(/[^a-z0-9]+/g, "") as ID; } export function modernizeLog(line: string, nextLine?: string): string | undefined { // first we save and remove the timestamp and the roomname const prefix = line.match(/\[.+?\] \(.+?\) /i)?.[0]; if (!prefix) return; if (/\]'s\s.*\salts: \[/.test(line)) return; line = line.replace(prefix, ''); if (line.startsWith('(') && line.endsWith(')')) { line = line.slice(1, -1); } const getAlts = () => { let alts; const regex = new RegExp(`\\(\\[.*\\]'s (lock|mut|bann|blacklist)ed alts: (\\[.*\\])\\)`); nextLine?.replace(regex, (a, b, rawAlts) => { alts = rawAlts; return ''; }); return alts ? `alts: ${alts} ` : ``; }; // Special cases if (line.startsWith('SCAV ')) { line = line.replace(/: (\[room: .*?\]) by (.*)/, (match, roominfo, rest) => `: by ${rest} ${roominfo}`); } line = line.replace( /(GIVEAWAY WIN|GTS FINISHED): ([A-Za-z0-9].*?)(won|has finished)/, (match, action, user) => `${action}: [${toID(user)}]:` ); if (line.includes(':')) { const possibleModernAction = line.slice(0, line.indexOf(':')).trim(); if (possibleModernAction === possibleModernAction.toUpperCase()) { if (possibleModernAction.includes('[')) { // for corrupted lines const [drop, ...keep] = line.split('['); process.stderr.write(`Ignoring malformed line: ${drop}\n`); return modernizeLog(keep.join('')); } if (/\(.+\) by [a-z0-9]{1,19}$/.test(line) && !['OLD MODLOG', 'NOTE'].includes(possibleModernAction)) { // weird reason formatting const reason = parseBrackets(line, '(', true); return `${prefix}${line.replace(` (${reason})`, '')}: ${reason}`; } // Log is already modernized return `${prefix}${line}`; } } if (/\[(the|a)poll\] was (started|ended) by/.test(line)) { const actionTaker = toID(line.slice(line.indexOf(' by ') + ' by '.length)); const isEnding = line.includes('was ended by'); return `${prefix}POLL${isEnding ? ' END' : ''}: by ${actionTaker}`; } if (/User (.*?) won the game of (.*?) mode trivia/.test(line)) { return `${prefix}TRIVIAGAME: by unknown: ${line}`; } const modernizerTransformations: {[k: string]: (log: string) => string} = { 'notes: ': (log) => { const [actionTaker, ...rest] = line.split(' notes: '); return `NOTE: by ${toID(actionTaker)}: ${rest.join('')}`; }, ' declared': (log) => { let newAction = 'DECLARE'; let oldAction = ' declared'; if (log.includes(' globally declared')) { oldAction = ' globally declared'; newAction = 'GLOBALDECLARE'; } if (log.includes('(chat level)')) { oldAction += ' (chat level)'; newAction = `CHATDECLARE`; } const actionTakerName = toID(log.slice(0, log.lastIndexOf(oldAction))); log = log.slice(actionTakerName.length); log = log.slice(oldAction.length); log = log.replace(/^\s?:/, '').trim(); return `${newAction}: by ${actionTakerName}: ${log}`; }, 'changed the roomdesc to: ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('changed the roomdesc to: '.length + 1, -2); return `ROOMDESC: by ${actionTaker}: to "${log}"`; }, 'roomevent titled "': (log) => { let action; if (log.includes(' added a roomevent titled "')) { action = 'added a'; } else if (log.includes(' removed a roomevent titled "')) { action = 'removed a'; } else { action = 'edited the'; } const actionTakerName = log.slice(0, log.lastIndexOf(` ${action} roomevent titled "`)); log = log.slice(actionTakerName.length + 1); const eventName = log.slice(` ${action} roomevent titled `.length, -2); return `ROOMEVENT: by ${toID(actionTakerName)}: ${action.split(' ')[0]} "${eventName}"`; }, 'set modchat to ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('set modchat to '.length); return `MODCHAT: by ${actionTaker}: to ${log}`; }, 'set modjoin to ': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' set')); log = log.slice(actionTakerName.length + 1); log = log.slice('set modjoin to '.length); const rank = log.startsWith('sync') ? 'sync' : log.replace('.', ''); return `MODJOIN${rank === 'sync' ? ' SYNC' : ''}: by ${toID(actionTakerName)}${rank !== 'sync' ? `: ${rank}` : ``}`; }, 'turned off modjoin': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' turned off modjoin')); return `MODJOIN: by ${toID(actionTakerName)}: OFF`; }, 'changed the roomintro': (log) => { const isDeletion = /deleted the (staff|room)intro/.test(log); const isRoomintro = log.includes('roomintro'); const actionTaker = toID(log.slice(0, log.indexOf(isDeletion ? 'deleted' : 'changed'))); return `${isDeletion ? 'DELETE' : ''}${isRoomintro ? 'ROOM' : 'STAFF'}INTRO: by ${actionTaker}`; }, 'deleted the roomintro': (log) => modernizerTransformations['changed the roomintro'](log), 'changed the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'deleted the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'created a tournament in': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice(24, -8); return `TOUR CREATE: by ${actionTaker}: ${log}`; }, 'was disqualified from the tournament by': (log) => { const disqualified = parseBrackets(log, '['); log = log.slice(disqualified.length + 3); log = log.slice('was disqualified from the tournament by'.length); return `TOUR DQ: [${toID(disqualified)}] by ${toID(log)}`; }, 'The tournament auto disqualify timeout was set to': (log) => { const byIndex = log.indexOf(' by '); const actionTaker = log.slice(byIndex + ' by '.length); const length = log.slice('The tournament auto disqualify timeout was set to'.length, byIndex); return `TOUR AUTODQ: by ${toID(actionTaker)}: ${length.trim()}`; }, ' was blacklisted from ': (log) => { const isName = log.includes(' was nameblacklisted from '); const banned = toID(log.slice(0, log.indexOf(` was ${isName ? 'name' : ''}blacklisted from `))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isName ? 'NAME' : ''}BLACKLIST: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was nameblacklisted from ': (log) => modernizerTransformations[' was blacklisted from '](log), ' was banned from room ': (log) => { const banned = toID(log.slice(0, log.indexOf(' was banned from room '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `ROOMBAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was muted by ': (log) => { let muted = ''; let isHour = false; [muted, log] = log.split(' was muted by '); muted = toID(muted); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } let actionTaker = toID(log); if (actionTaker.endsWith('for1hour')) { isHour = true; actionTaker = actionTaker.replace(/^(.*)(for1hour)$/, (match, staff) => staff) as ID; } return `${isHour ? 'HOUR' : ''}MUTE: [${muted}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was locked from talking ': (log) => { const isWeek = log.includes(' was locked from talking for a week '); const locked = toID(log.slice(0, log.indexOf(' was locked from talking '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isWeek ? 'WEEK' : ''}LOCK: [${locked}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was banned ': (log) => { if (log.includes(' was banned from room ')) return modernizerTransformations[' was banned from room '](log); const banned = toID(log.slice(0, log.indexOf(' was banned '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `BAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, 'was promoted to ': (log) => { const isDemotion = log.includes('was demoted to '); const userid = toID(log.split(' was ')[0]); if (!userid) { throw new Error(`Ignoring malformed line: ${prefix}${log}`); } log = log.slice(userid.length + 3); log = log.slice(`was ${isDemotion ? 'demoted' : 'promoted'} to `.length); let rank = log.slice(0, log.indexOf(' by')).replace(/ /, '').toUpperCase(); log = log.slice(`${rank} by `.length); if (!rank.startsWith('ROOM')) rank = `GLOBAL ${rank}`; const actionTaker = parseBrackets(log, '['); return `${rank}: [${userid}] by ${actionTaker}${isDemotion ? ': (demote)' : ''}`; }, 'was demoted to ': (log) => modernizerTransformations['was promoted to '](log), 'was appointed Room Owner by ': (log) => { const userid = parseBrackets(log, '['); log = log.slice(userid.length + 3); log = log.slice('was appointed Room Owner by '.length); const actionTaker = parseBrackets(log, '['); return `ROOMOWNER: [${userid}] by ${actionTaker}`; }, ' claimed this ticket': (log) => { const actions: {[k: string]: string} = { ' claimed this ticket': 'TICKETCLAIM', ' closed this ticket': 'TICKETCLOSE', ' deleted this ticket': 'TICKETDELETE', }; for (const oldAction in actions) { if (log.includes(oldAction)) { const actionTaker = toID(log.slice(0, log.indexOf(oldAction))); return `${actions[oldAction]}: by ${actionTaker}`; } } return log; }, 'This ticket is now claimed by ': (log) => { const claimer = toID(log.slice(log.indexOf(' by ') + ' by '.length)); return `TICKETCLAIM: by ${claimer}`; }, ' is no longer interested in this ticket': (log) => { const abandoner = toID(log.slice(0, log.indexOf(' is no longer interested in this ticket'))); return `TICKETABANDON: by ${abandoner}`; }, ' opened a new ticket': (log) => { const opener = toID(log.slice(0, log.indexOf(' opened a new ticket'))); const problem = log.slice(log.indexOf(' Issue: ') + ' Issue: '.length).trim(); return `TICKETOPEN: by ${opener}: ${problem}`; }, ' closed this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), ' deleted this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), 'This ticket is no longer claimed': () => 'TICKETUNCLAIM', ' has been caught attempting a hunt with ': (log) => { const index = log.indexOf(' has been caught attempting a hunt with '); const user = toID(log.slice(0, index)); log = log.slice(index + ' has been caught attempting a hunt with '.length); log = log.replace('. The user has also', '; has also').replace('.', ''); return `SCAV CHEATER: [${user}]: caught attempting a hunt with ${log}`; }, }; for (const oldAction in modernizerTransformations) { if (line.includes(oldAction)) { try { return prefix + modernizerTransformations[oldAction](line); } catch (err) { if (Config.nofswriting) throw err; process.stderr.write(`${err.message}\n`); } } } return `${prefix}${line}`; } export function parseModlog(raw: string, nextLine?: string, isGlobal = false): ModlogEntry | undefined { let line = modernizeLog(raw); if (!line) return; const timestamp = parseBrackets(line, '['); line = line.slice(timestamp.length + 3); const [roomID, ...bonus] = parseBrackets(line, '(').split(' '); const log: ModlogEntry = { action: 'NULL', roomID, visualRoomID: '', userid: null, autoconfirmedID: null, alts: [], ip: null, isGlobal, loggedBy: null, note: '', time: Math.floor(new Date(timestamp).getTime()) || Date.now(), }; if (bonus.length) log.visualRoomID = `${log.roomID} ${bonus.join(' ')}`; line = line.slice((log.visualRoomID || log.roomID).length + 3); const actionColonIndex = line.indexOf(':'); const action = line.slice(0, actionColonIndex); if (action !== action.toUpperCase()) { // no action (probably an old-format log that slipped past the modernizer) log.action = 'OLD MODLOG'; log.loggedBy = 'unknown' as ID; log.note = line.trim(); return log; } else { log.action = action; if (log.action === 'OLD MODLOG') { log.loggedBy = 'unknown' as ID; log.note = line.slice(line.indexOf('by unknown: ') + 'by unknown :'.length).trim(); return log; } line = line.slice(actionColonIndex + 2); } if (line[0] === '[') { if (!IP_ONLY_ACTIONS.has(log.action)) { const userid = toID(parseBrackets(line, '[')); log.userid = userid; line = line.slice(userid.length + 3).trim(); if (line.startsWith('ac:')) { line = line.slice(3).trim(); const ac = parseBrackets(line, '['); log.autoconfirmedID = toID(ac); line = line.slice(ac.length + 3).trim(); } if (line.startsWith('alts:')) { line = line.slice(5).trim(); const alts = new Set<ID>(); // we need to weed out duplicate alts let alt = parseBrackets(line, '['); do { if (alt.includes(', ')) { // old alt format for (const trueAlt of alt.split(', ')) { alts.add(toID(trueAlt)); } line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (!line.startsWith('[')) line = `[${line}`; } else { if (IPTools.ipRegex.test(alt)) break; alts.add(toID(alt)); line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (alt.includes('[') && !line.startsWith('[')) line = `[${line}`; } alt = parseBrackets(line, '['); } while (alt); log.alts = [...alts]; } } if (line[0] === '[') { log.ip = parseBrackets(line, '['); line = line.slice(log.ip.length + 3).trim(); } } let regex = /\bby .*:/; let actionTakerIndex = regex.exec(line)?.index; if (actionTakerIndex === undefined) { actionTakerIndex = line.indexOf('by '); regex = /\bby .*/; } if (actionTakerIndex !== -1) { const colonIndex = line.indexOf(': '); const actionTaker = line.slice(actionTakerIndex + 3, colonIndex > actionTakerIndex ? colonIndex : undefined); if (toID(actionTaker).length < 19) { log.loggedBy = toID(actionTaker) || null; if (colonIndex > actionTakerIndex) line = line.slice(colonIndex); line = line.replace(regex, ' '); } } if (line) log.note = line.replace(/^\s?:\s?/, '').trim(); return log; } export function rawifyLog(log: ModlogEntry) { let result = `[${new Date(log.time || Date.now()).toJSON()}] (${(log.visualRoomID || log.roomID || 'global').replace(/^global-/, '')}) ${log.action}`; if (log.userid) result += `: [${log.userid}]`; if (log.autoconfirmedID) result += ` ac: [${log.autoconfirmedID}]`; if (log.alts.length) result += ` alts: [${log.alts.join('], [')}]`; if (log.ip) { if (!log.userid) result += `:`; result += ` [${log.ip}]`; } if (log.loggedBy) result += `${result.endsWith(']') ? '' : ':'} by ${log.loggedBy}`; if (log.note) result += `: ${log.note}`; return result + `\n`; } export class ModlogConverterSQLite { readonly databaseFile: string; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, db: DatabaseType.Database} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: DatabaseType.Database) { this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = {files: new Map<string, string>(), db: isTesting || new Database(':memory:')}; } } async toTxt() { const database = this.isTesting?.db || new Database(this.databaseFile, {fileMustExist: true}); const roomids = database.prepare('SELECT DISTINCT roomid FROM modlog').all(); const globalEntries = []; for (const {roomid} of roomids) { if (!Config.nofswriting) console.log(`Reading ${roomid}...`); const results = database.prepare( `SELECT *, (SELECT group_concat(userid, ',') FROM alts WHERE alts.modlog_id = modlog.modlog_id) as alts ` + `FROM modlog WHERE roomid = ? ORDER BY timestamp ASC` ).all(roomid); const trueRoomID = roomid.replace(/^global-/, ''); let entriesLogged = 0; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${trueRoomID}'`); } await this.writeFile(`${this.textLogDir}/modlog_${trueRoomID}.txt`, entries.join('')); entries = []; }; for (const result of results) { const entry: ModlogEntry = { action: result.action, roomID: result.roomid?.replace(/^global-/, ''), visualRoomID: result.visual_roomid, userid: result.userid, autoconfirmedID: result.autoconfirmed_userid, alts: result.alts?.split(','), ip: result.ip, isGlobal: result.roomid?.startsWith('global-') || result.roomid === 'global', loggedBy: result.action_taker_userid, note: result.note, time: result.timestamp, }; const rawLog = rawifyLog(entry); entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); }
await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await this.writeFile(`${this.textLogDir}/modlog_global.txt`, globalEntries.join('')); } async writeFile(path: string, text: string) { if (this.isTesting) { const old = this.isTesting.files.get(path); return this.isTesting.files.set(path, `${old || ''}${text}`); } return FS(path).append(text); } } export class ModlogConverterTxt { readonly databaseFile: string; readonly modlog: Modlog; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, ml?: Modlog} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: Map<string, string>, useFTSExtension?: boolean) { this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = { files: isTesting || new Map<string, string>(), }; } this.modlog = new Modlog(this.textLogDir, this.isTesting ? ':memory:' : this.databaseFile); } async toSQLite() { const files = this.isTesting ? [...this.isTesting.files.keys()] : await FS(this.textLogDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); const lines = this.isTesting ? this.isTesting.files.get(file)?.split('\n') || [] : FS(`${this.textLogDir}/${file}`).createReadStream().byLine(); let entriesLogged = 0; let lastLine = undefined; let entries: ModlogEntry[] = []; const insertEntries = (alwaysShowProgress?: boolean) => { this.modlog.writeSQL(entries); entriesLogged += entries.length; if (!Config.nofswriting && ( alwaysShowProgress || entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER )) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Inserted ${entriesLogged} entries from '${roomid}'`); } entries = []; }; for await (const line of lines) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; if (roomid !== 'global') entries.push(entry); if (entry.isGlobal) { globalEntries.push(entry); } if (entries.length === ENTRIES_TO_BUFFER) insertEntries(); } insertEntries(true); if (entriesLogged) process.stdout.write('\n'); } return this.modlog.database; } } export class ModlogConverterTest { readonly inputDir: string; readonly outputDir: string; constructor(inputDir: string, outputDir: string) { this.inputDir = inputDir; this.outputDir = outputDir; } async toTxt() { const files = await FS(this.inputDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); let entriesLogged = 0; let lastLine = undefined; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${roomid}'`); } await FS(`${this.outputDir}/modlog_${roomid}.txt`).append(entries.join('')); entries = []; }; const readStream = FS(`${this.inputDir}/${file}`).createReadStream(); for await (const line of readStream.byLine()) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; const rawLog = rawifyLog(entry); if (roomid !== 'global') entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); } await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await FS(`${this.outputDir}/modlog_global.txt`).append(globalEntries.join('')); } } export const ModlogConverter = { async convert( from: ModlogFormat, to: ModlogFormat, databasePath: string, textLogDirectoryPath: string, outputLogPath?: string ) { if (from === 'txt' && to === 'txt' && outputLogPath) { const converter = new ModlogConverterTest(textLogDirectoryPath, outputLogPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'sqlite' && to === 'txt') { const converter = new ModlogConverterSQLite(databasePath, textLogDirectoryPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'txt' && to === 'sqlite') { const converter = new ModlogConverterTxt(databasePath, textLogDirectoryPath); await converter.toSQLite(); console.log("\nDone!"); process.exit(); } }, };
random_line_split
converter.ts
/** * Converts modlogs between text and SQLite; also modernizes old-format modlogs * @author Annika * @author jetou */ if (!global.Config) { let hasSQLite = true; try { require.resolve('better-sqlite3'); } catch (e) { hasSQLite = false; } global.Config = { nofswriting: false, usesqlitemodlog: hasSQLite, usesqlite: hasSQLite, }; } import type * as DatabaseType from 'better-sqlite3'; import {FS} from '../../lib'; import {Modlog, ModlogEntry} from '../../server/modlog'; import {IPTools} from '../../server/ip-tools'; const Database = Config.usesqlite ? require('better-sqlite3') : null; type ModlogFormat = 'txt' | 'sqlite'; /** The number of modlog entries to write to the database on each transaction */ const ENTRIES_TO_BUFFER = 25000; const IP_ONLY_ACTIONS = new Set([ 'SHAREDIP', 'UNSHAREDIP', 'UNLOCKIP', 'UNLOCKRANGE', 'RANGEBAN', 'RANGELOCK', ]); export function parseBrackets(line: string, openingBracket: '(' | '[', greedy?: boolean) { const brackets = { '(': ')', '[': ']', }; const bracketOpenIndex = line.indexOf(openingBracket); const bracketCloseIndex = greedy ? line.lastIndexOf(brackets[openingBracket]) : line.indexOf(brackets[openingBracket]); if (bracketCloseIndex < 0 || bracketOpenIndex < 0) return ''; return line.slice(bracketOpenIndex + 1, bracketCloseIndex); } function toID(text: any): ID { return (text && typeof text === "string" ? text : "").toLowerCase().replace(/[^a-z0-9]+/g, "") as ID; } export function modernizeLog(line: string, nextLine?: string): string | undefined { // first we save and remove the timestamp and the roomname const prefix = line.match(/\[.+?\] \(.+?\) /i)?.[0]; if (!prefix) return; if (/\]'s\s.*\salts: \[/.test(line)) return; line = line.replace(prefix, ''); if (line.startsWith('(') && line.endsWith(')')) { line = line.slice(1, -1); } const getAlts = () => { let alts; const regex = new RegExp(`\\(\\[.*\\]'s (lock|mut|bann|blacklist)ed alts: (\\[.*\\])\\)`); nextLine?.replace(regex, (a, b, rawAlts) => { alts = rawAlts; return ''; }); return alts ? `alts: ${alts} ` : ``; }; // Special cases if (line.startsWith('SCAV ')) { line = line.replace(/: (\[room: .*?\]) by (.*)/, (match, roominfo, rest) => `: by ${rest} ${roominfo}`); } line = line.replace( /(GIVEAWAY WIN|GTS FINISHED): ([A-Za-z0-9].*?)(won|has finished)/, (match, action, user) => `${action}: [${toID(user)}]:` ); if (line.includes(':')) { const possibleModernAction = line.slice(0, line.indexOf(':')).trim(); if (possibleModernAction === possibleModernAction.toUpperCase()) { if (possibleModernAction.includes('[')) { // for corrupted lines const [drop, ...keep] = line.split('['); process.stderr.write(`Ignoring malformed line: ${drop}\n`); return modernizeLog(keep.join('')); } if (/\(.+\) by [a-z0-9]{1,19}$/.test(line) && !['OLD MODLOG', 'NOTE'].includes(possibleModernAction)) { // weird reason formatting const reason = parseBrackets(line, '(', true); return `${prefix}${line.replace(` (${reason})`, '')}: ${reason}`; } // Log is already modernized return `${prefix}${line}`; } } if (/\[(the|a)poll\] was (started|ended) by/.test(line)) { const actionTaker = toID(line.slice(line.indexOf(' by ') + ' by '.length)); const isEnding = line.includes('was ended by'); return `${prefix}POLL${isEnding ? ' END' : ''}: by ${actionTaker}`; } if (/User (.*?) won the game of (.*?) mode trivia/.test(line)) { return `${prefix}TRIVIAGAME: by unknown: ${line}`; } const modernizerTransformations: {[k: string]: (log: string) => string} = { 'notes: ': (log) => { const [actionTaker, ...rest] = line.split(' notes: '); return `NOTE: by ${toID(actionTaker)}: ${rest.join('')}`; }, ' declared': (log) => { let newAction = 'DECLARE'; let oldAction = ' declared'; if (log.includes(' globally declared')) { oldAction = ' globally declared'; newAction = 'GLOBALDECLARE'; } if (log.includes('(chat level)')) { oldAction += ' (chat level)'; newAction = `CHATDECLARE`; } const actionTakerName = toID(log.slice(0, log.lastIndexOf(oldAction))); log = log.slice(actionTakerName.length); log = log.slice(oldAction.length); log = log.replace(/^\s?:/, '').trim(); return `${newAction}: by ${actionTakerName}: ${log}`; }, 'changed the roomdesc to: ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('changed the roomdesc to: '.length + 1, -2); return `ROOMDESC: by ${actionTaker}: to "${log}"`; }, 'roomevent titled "': (log) => { let action; if (log.includes(' added a roomevent titled "')) { action = 'added a'; } else if (log.includes(' removed a roomevent titled "')) { action = 'removed a'; } else { action = 'edited the'; } const actionTakerName = log.slice(0, log.lastIndexOf(` ${action} roomevent titled "`)); log = log.slice(actionTakerName.length + 1); const eventName = log.slice(` ${action} roomevent titled `.length, -2); return `ROOMEVENT: by ${toID(actionTakerName)}: ${action.split(' ')[0]} "${eventName}"`; }, 'set modchat to ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('set modchat to '.length); return `MODCHAT: by ${actionTaker}: to ${log}`; }, 'set modjoin to ': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' set')); log = log.slice(actionTakerName.length + 1); log = log.slice('set modjoin to '.length); const rank = log.startsWith('sync') ? 'sync' : log.replace('.', ''); return `MODJOIN${rank === 'sync' ? ' SYNC' : ''}: by ${toID(actionTakerName)}${rank !== 'sync' ? `: ${rank}` : ``}`; }, 'turned off modjoin': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' turned off modjoin')); return `MODJOIN: by ${toID(actionTakerName)}: OFF`; }, 'changed the roomintro': (log) => { const isDeletion = /deleted the (staff|room)intro/.test(log); const isRoomintro = log.includes('roomintro'); const actionTaker = toID(log.slice(0, log.indexOf(isDeletion ? 'deleted' : 'changed'))); return `${isDeletion ? 'DELETE' : ''}${isRoomintro ? 'ROOM' : 'STAFF'}INTRO: by ${actionTaker}`; }, 'deleted the roomintro': (log) => modernizerTransformations['changed the roomintro'](log), 'changed the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'deleted the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'created a tournament in': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice(24, -8); return `TOUR CREATE: by ${actionTaker}: ${log}`; }, 'was disqualified from the tournament by': (log) => { const disqualified = parseBrackets(log, '['); log = log.slice(disqualified.length + 3); log = log.slice('was disqualified from the tournament by'.length); return `TOUR DQ: [${toID(disqualified)}] by ${toID(log)}`; }, 'The tournament auto disqualify timeout was set to': (log) => { const byIndex = log.indexOf(' by '); const actionTaker = log.slice(byIndex + ' by '.length); const length = log.slice('The tournament auto disqualify timeout was set to'.length, byIndex); return `TOUR AUTODQ: by ${toID(actionTaker)}: ${length.trim()}`; }, ' was blacklisted from ': (log) => { const isName = log.includes(' was nameblacklisted from '); const banned = toID(log.slice(0, log.indexOf(` was ${isName ? 'name' : ''}blacklisted from `))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isName ? 'NAME' : ''}BLACKLIST: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was nameblacklisted from ': (log) => modernizerTransformations[' was blacklisted from '](log), ' was banned from room ': (log) => { const banned = toID(log.slice(0, log.indexOf(' was banned from room '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `ROOMBAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was muted by ': (log) => { let muted = ''; let isHour = false; [muted, log] = log.split(' was muted by '); muted = toID(muted); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } let actionTaker = toID(log); if (actionTaker.endsWith('for1hour')) { isHour = true; actionTaker = actionTaker.replace(/^(.*)(for1hour)$/, (match, staff) => staff) as ID; } return `${isHour ? 'HOUR' : ''}MUTE: [${muted}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was locked from talking ': (log) => { const isWeek = log.includes(' was locked from talking for a week '); const locked = toID(log.slice(0, log.indexOf(' was locked from talking '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isWeek ? 'WEEK' : ''}LOCK: [${locked}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was banned ': (log) => { if (log.includes(' was banned from room ')) return modernizerTransformations[' was banned from room '](log); const banned = toID(log.slice(0, log.indexOf(' was banned '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `BAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, 'was promoted to ': (log) => { const isDemotion = log.includes('was demoted to '); const userid = toID(log.split(' was ')[0]); if (!userid) { throw new Error(`Ignoring malformed line: ${prefix}${log}`); } log = log.slice(userid.length + 3); log = log.slice(`was ${isDemotion ? 'demoted' : 'promoted'} to `.length); let rank = log.slice(0, log.indexOf(' by')).replace(/ /, '').toUpperCase(); log = log.slice(`${rank} by `.length); if (!rank.startsWith('ROOM')) rank = `GLOBAL ${rank}`; const actionTaker = parseBrackets(log, '['); return `${rank}: [${userid}] by ${actionTaker}${isDemotion ? ': (demote)' : ''}`; }, 'was demoted to ': (log) => modernizerTransformations['was promoted to '](log), 'was appointed Room Owner by ': (log) => { const userid = parseBrackets(log, '['); log = log.slice(userid.length + 3); log = log.slice('was appointed Room Owner by '.length); const actionTaker = parseBrackets(log, '['); return `ROOMOWNER: [${userid}] by ${actionTaker}`; }, ' claimed this ticket': (log) => { const actions: {[k: string]: string} = { ' claimed this ticket': 'TICKETCLAIM', ' closed this ticket': 'TICKETCLOSE', ' deleted this ticket': 'TICKETDELETE', }; for (const oldAction in actions) { if (log.includes(oldAction)) { const actionTaker = toID(log.slice(0, log.indexOf(oldAction))); return `${actions[oldAction]}: by ${actionTaker}`; } } return log; }, 'This ticket is now claimed by ': (log) => { const claimer = toID(log.slice(log.indexOf(' by ') + ' by '.length)); return `TICKETCLAIM: by ${claimer}`; }, ' is no longer interested in this ticket': (log) => { const abandoner = toID(log.slice(0, log.indexOf(' is no longer interested in this ticket'))); return `TICKETABANDON: by ${abandoner}`; }, ' opened a new ticket': (log) => { const opener = toID(log.slice(0, log.indexOf(' opened a new ticket'))); const problem = log.slice(log.indexOf(' Issue: ') + ' Issue: '.length).trim(); return `TICKETOPEN: by ${opener}: ${problem}`; }, ' closed this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), ' deleted this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), 'This ticket is no longer claimed': () => 'TICKETUNCLAIM', ' has been caught attempting a hunt with ': (log) => { const index = log.indexOf(' has been caught attempting a hunt with '); const user = toID(log.slice(0, index)); log = log.slice(index + ' has been caught attempting a hunt with '.length); log = log.replace('. The user has also', '; has also').replace('.', ''); return `SCAV CHEATER: [${user}]: caught attempting a hunt with ${log}`; }, }; for (const oldAction in modernizerTransformations) { if (line.includes(oldAction)) { try { return prefix + modernizerTransformations[oldAction](line); } catch (err) { if (Config.nofswriting) throw err; process.stderr.write(`${err.message}\n`); } } } return `${prefix}${line}`; } export function parseModlog(raw: string, nextLine?: string, isGlobal = false): ModlogEntry | undefined { let line = modernizeLog(raw); if (!line) return; const timestamp = parseBrackets(line, '['); line = line.slice(timestamp.length + 3); const [roomID, ...bonus] = parseBrackets(line, '(').split(' '); const log: ModlogEntry = { action: 'NULL', roomID, visualRoomID: '', userid: null, autoconfirmedID: null, alts: [], ip: null, isGlobal, loggedBy: null, note: '', time: Math.floor(new Date(timestamp).getTime()) || Date.now(), }; if (bonus.length) log.visualRoomID = `${log.roomID} ${bonus.join(' ')}`; line = line.slice((log.visualRoomID || log.roomID).length + 3); const actionColonIndex = line.indexOf(':'); const action = line.slice(0, actionColonIndex); if (action !== action.toUpperCase()) { // no action (probably an old-format log that slipped past the modernizer) log.action = 'OLD MODLOG'; log.loggedBy = 'unknown' as ID; log.note = line.trim(); return log; } else { log.action = action; if (log.action === 'OLD MODLOG') { log.loggedBy = 'unknown' as ID; log.note = line.slice(line.indexOf('by unknown: ') + 'by unknown :'.length).trim(); return log; } line = line.slice(actionColonIndex + 2); } if (line[0] === '[') { if (!IP_ONLY_ACTIONS.has(log.action)) { const userid = toID(parseBrackets(line, '[')); log.userid = userid; line = line.slice(userid.length + 3).trim(); if (line.startsWith('ac:')) { line = line.slice(3).trim(); const ac = parseBrackets(line, '['); log.autoconfirmedID = toID(ac); line = line.slice(ac.length + 3).trim(); } if (line.startsWith('alts:')) { line = line.slice(5).trim(); const alts = new Set<ID>(); // we need to weed out duplicate alts let alt = parseBrackets(line, '['); do { if (alt.includes(', ')) { // old alt format for (const trueAlt of alt.split(', ')) { alts.add(toID(trueAlt)); } line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (!line.startsWith('[')) line = `[${line}`; } else { if (IPTools.ipRegex.test(alt)) break; alts.add(toID(alt)); line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (alt.includes('[') && !line.startsWith('[')) line = `[${line}`; } alt = parseBrackets(line, '['); } while (alt); log.alts = [...alts]; } } if (line[0] === '[') { log.ip = parseBrackets(line, '['); line = line.slice(log.ip.length + 3).trim(); } } let regex = /\bby .*:/; let actionTakerIndex = regex.exec(line)?.index; if (actionTakerIndex === undefined) { actionTakerIndex = line.indexOf('by '); regex = /\bby .*/; } if (actionTakerIndex !== -1) { const colonIndex = line.indexOf(': '); const actionTaker = line.slice(actionTakerIndex + 3, colonIndex > actionTakerIndex ? colonIndex : undefined); if (toID(actionTaker).length < 19) { log.loggedBy = toID(actionTaker) || null; if (colonIndex > actionTakerIndex) line = line.slice(colonIndex); line = line.replace(regex, ' '); } } if (line) log.note = line.replace(/^\s?:\s?/, '').trim(); return log; } export function rawifyLog(log: ModlogEntry) { let result = `[${new Date(log.time || Date.now()).toJSON()}] (${(log.visualRoomID || log.roomID || 'global').replace(/^global-/, '')}) ${log.action}`; if (log.userid) result += `: [${log.userid}]`; if (log.autoconfirmedID) result += ` ac: [${log.autoconfirmedID}]`; if (log.alts.length) result += ` alts: [${log.alts.join('], [')}]`; if (log.ip) { if (!log.userid) result += `:`; result += ` [${log.ip}]`; } if (log.loggedBy) result += `${result.endsWith(']') ? '' : ':'} by ${log.loggedBy}`; if (log.note) result += `: ${log.note}`; return result + `\n`; } export class ModlogConverterSQLite { readonly databaseFile: string; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, db: DatabaseType.Database} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: DatabaseType.Database)
async toTxt() { const database = this.isTesting?.db || new Database(this.databaseFile, {fileMustExist: true}); const roomids = database.prepare('SELECT DISTINCT roomid FROM modlog').all(); const globalEntries = []; for (const {roomid} of roomids) { if (!Config.nofswriting) console.log(`Reading ${roomid}...`); const results = database.prepare( `SELECT *, (SELECT group_concat(userid, ',') FROM alts WHERE alts.modlog_id = modlog.modlog_id) as alts ` + `FROM modlog WHERE roomid = ? ORDER BY timestamp ASC` ).all(roomid); const trueRoomID = roomid.replace(/^global-/, ''); let entriesLogged = 0; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${trueRoomID}'`); } await this.writeFile(`${this.textLogDir}/modlog_${trueRoomID}.txt`, entries.join('')); entries = []; }; for (const result of results) { const entry: ModlogEntry = { action: result.action, roomID: result.roomid?.replace(/^global-/, ''), visualRoomID: result.visual_roomid, userid: result.userid, autoconfirmedID: result.autoconfirmed_userid, alts: result.alts?.split(','), ip: result.ip, isGlobal: result.roomid?.startsWith('global-') || result.roomid === 'global', loggedBy: result.action_taker_userid, note: result.note, time: result.timestamp, }; const rawLog = rawifyLog(entry); entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); } await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await this.writeFile(`${this.textLogDir}/modlog_global.txt`, globalEntries.join('')); } async writeFile(path: string, text: string) { if (this.isTesting) { const old = this.isTesting.files.get(path); return this.isTesting.files.set(path, `${old || ''}${text}`); } return FS(path).append(text); } } export class ModlogConverterTxt { readonly databaseFile: string; readonly modlog: Modlog; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, ml?: Modlog} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: Map<string, string>, useFTSExtension?: boolean) { this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = { files: isTesting || new Map<string, string>(), }; } this.modlog = new Modlog(this.textLogDir, this.isTesting ? ':memory:' : this.databaseFile); } async toSQLite() { const files = this.isTesting ? [...this.isTesting.files.keys()] : await FS(this.textLogDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); const lines = this.isTesting ? this.isTesting.files.get(file)?.split('\n') || [] : FS(`${this.textLogDir}/${file}`).createReadStream().byLine(); let entriesLogged = 0; let lastLine = undefined; let entries: ModlogEntry[] = []; const insertEntries = (alwaysShowProgress?: boolean) => { this.modlog.writeSQL(entries); entriesLogged += entries.length; if (!Config.nofswriting && ( alwaysShowProgress || entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER )) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Inserted ${entriesLogged} entries from '${roomid}'`); } entries = []; }; for await (const line of lines) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; if (roomid !== 'global') entries.push(entry); if (entry.isGlobal) { globalEntries.push(entry); } if (entries.length === ENTRIES_TO_BUFFER) insertEntries(); } insertEntries(true); if (entriesLogged) process.stdout.write('\n'); } return this.modlog.database; } } export class ModlogConverterTest { readonly inputDir: string; readonly outputDir: string; constructor(inputDir: string, outputDir: string) { this.inputDir = inputDir; this.outputDir = outputDir; } async toTxt() { const files = await FS(this.inputDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); let entriesLogged = 0; let lastLine = undefined; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${roomid}'`); } await FS(`${this.outputDir}/modlog_${roomid}.txt`).append(entries.join('')); entries = []; }; const readStream = FS(`${this.inputDir}/${file}`).createReadStream(); for await (const line of readStream.byLine()) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; const rawLog = rawifyLog(entry); if (roomid !== 'global') entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); } await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await FS(`${this.outputDir}/modlog_global.txt`).append(globalEntries.join('')); } } export const ModlogConverter = { async convert( from: ModlogFormat, to: ModlogFormat, databasePath: string, textLogDirectoryPath: string, outputLogPath?: string ) { if (from === 'txt' && to === 'txt' && outputLogPath) { const converter = new ModlogConverterTest(textLogDirectoryPath, outputLogPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'sqlite' && to === 'txt') { const converter = new ModlogConverterSQLite(databasePath, textLogDirectoryPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'txt' && to === 'sqlite') { const converter = new ModlogConverterTxt(databasePath, textLogDirectoryPath); await converter.toSQLite(); console.log("\nDone!"); process.exit(); } }, };
{ this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = {files: new Map<string, string>(), db: isTesting || new Database(':memory:')}; } }
identifier_body
converter.ts
/** * Converts modlogs between text and SQLite; also modernizes old-format modlogs * @author Annika * @author jetou */ if (!global.Config) { let hasSQLite = true; try { require.resolve('better-sqlite3'); } catch (e) { hasSQLite = false; } global.Config = { nofswriting: false, usesqlitemodlog: hasSQLite, usesqlite: hasSQLite, }; } import type * as DatabaseType from 'better-sqlite3'; import {FS} from '../../lib'; import {Modlog, ModlogEntry} from '../../server/modlog'; import {IPTools} from '../../server/ip-tools'; const Database = Config.usesqlite ? require('better-sqlite3') : null; type ModlogFormat = 'txt' | 'sqlite'; /** The number of modlog entries to write to the database on each transaction */ const ENTRIES_TO_BUFFER = 25000; const IP_ONLY_ACTIONS = new Set([ 'SHAREDIP', 'UNSHAREDIP', 'UNLOCKIP', 'UNLOCKRANGE', 'RANGEBAN', 'RANGELOCK', ]); export function parseBrackets(line: string, openingBracket: '(' | '[', greedy?: boolean) { const brackets = { '(': ')', '[': ']', }; const bracketOpenIndex = line.indexOf(openingBracket); const bracketCloseIndex = greedy ? line.lastIndexOf(brackets[openingBracket]) : line.indexOf(brackets[openingBracket]); if (bracketCloseIndex < 0 || bracketOpenIndex < 0) return ''; return line.slice(bracketOpenIndex + 1, bracketCloseIndex); } function toID(text: any): ID { return (text && typeof text === "string" ? text : "").toLowerCase().replace(/[^a-z0-9]+/g, "") as ID; } export function modernizeLog(line: string, nextLine?: string): string | undefined { // first we save and remove the timestamp and the roomname const prefix = line.match(/\[.+?\] \(.+?\) /i)?.[0]; if (!prefix) return; if (/\]'s\s.*\salts: \[/.test(line)) return; line = line.replace(prefix, ''); if (line.startsWith('(') && line.endsWith(')')) { line = line.slice(1, -1); } const getAlts = () => { let alts; const regex = new RegExp(`\\(\\[.*\\]'s (lock|mut|bann|blacklist)ed alts: (\\[.*\\])\\)`); nextLine?.replace(regex, (a, b, rawAlts) => { alts = rawAlts; return ''; }); return alts ? `alts: ${alts} ` : ``; }; // Special cases if (line.startsWith('SCAV ')) { line = line.replace(/: (\[room: .*?\]) by (.*)/, (match, roominfo, rest) => `: by ${rest} ${roominfo}`); } line = line.replace( /(GIVEAWAY WIN|GTS FINISHED): ([A-Za-z0-9].*?)(won|has finished)/, (match, action, user) => `${action}: [${toID(user)}]:` ); if (line.includes(':')) { const possibleModernAction = line.slice(0, line.indexOf(':')).trim(); if (possibleModernAction === possibleModernAction.toUpperCase()) { if (possibleModernAction.includes('[')) { // for corrupted lines const [drop, ...keep] = line.split('['); process.stderr.write(`Ignoring malformed line: ${drop}\n`); return modernizeLog(keep.join('')); } if (/\(.+\) by [a-z0-9]{1,19}$/.test(line) && !['OLD MODLOG', 'NOTE'].includes(possibleModernAction)) { // weird reason formatting const reason = parseBrackets(line, '(', true); return `${prefix}${line.replace(` (${reason})`, '')}: ${reason}`; } // Log is already modernized return `${prefix}${line}`; } } if (/\[(the|a)poll\] was (started|ended) by/.test(line)) { const actionTaker = toID(line.slice(line.indexOf(' by ') + ' by '.length)); const isEnding = line.includes('was ended by'); return `${prefix}POLL${isEnding ? ' END' : ''}: by ${actionTaker}`; } if (/User (.*?) won the game of (.*?) mode trivia/.test(line)) { return `${prefix}TRIVIAGAME: by unknown: ${line}`; } const modernizerTransformations: {[k: string]: (log: string) => string} = { 'notes: ': (log) => { const [actionTaker, ...rest] = line.split(' notes: '); return `NOTE: by ${toID(actionTaker)}: ${rest.join('')}`; }, ' declared': (log) => { let newAction = 'DECLARE'; let oldAction = ' declared'; if (log.includes(' globally declared')) { oldAction = ' globally declared'; newAction = 'GLOBALDECLARE'; } if (log.includes('(chat level)')) { oldAction += ' (chat level)'; newAction = `CHATDECLARE`; } const actionTakerName = toID(log.slice(0, log.lastIndexOf(oldAction))); log = log.slice(actionTakerName.length); log = log.slice(oldAction.length); log = log.replace(/^\s?:/, '').trim(); return `${newAction}: by ${actionTakerName}: ${log}`; }, 'changed the roomdesc to: ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('changed the roomdesc to: '.length + 1, -2); return `ROOMDESC: by ${actionTaker}: to "${log}"`; }, 'roomevent titled "': (log) => { let action; if (log.includes(' added a roomevent titled "')) { action = 'added a'; } else if (log.includes(' removed a roomevent titled "')) { action = 'removed a'; } else { action = 'edited the'; } const actionTakerName = log.slice(0, log.lastIndexOf(` ${action} roomevent titled "`)); log = log.slice(actionTakerName.length + 1); const eventName = log.slice(` ${action} roomevent titled `.length, -2); return `ROOMEVENT: by ${toID(actionTakerName)}: ${action.split(' ')[0]} "${eventName}"`; }, 'set modchat to ': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice('set modchat to '.length); return `MODCHAT: by ${actionTaker}: to ${log}`; }, 'set modjoin to ': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' set')); log = log.slice(actionTakerName.length + 1); log = log.slice('set modjoin to '.length); const rank = log.startsWith('sync') ? 'sync' : log.replace('.', ''); return `MODJOIN${rank === 'sync' ? ' SYNC' : ''}: by ${toID(actionTakerName)}${rank !== 'sync' ? `: ${rank}` : ``}`; }, 'turned off modjoin': (log) => { const actionTakerName = log.slice(0, log.lastIndexOf(' turned off modjoin')); return `MODJOIN: by ${toID(actionTakerName)}: OFF`; }, 'changed the roomintro': (log) => { const isDeletion = /deleted the (staff|room)intro/.test(log); const isRoomintro = log.includes('roomintro'); const actionTaker = toID(log.slice(0, log.indexOf(isDeletion ? 'deleted' : 'changed'))); return `${isDeletion ? 'DELETE' : ''}${isRoomintro ? 'ROOM' : 'STAFF'}INTRO: by ${actionTaker}`; }, 'deleted the roomintro': (log) => modernizerTransformations['changed the roomintro'](log), 'changed the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'deleted the staffintro': (log) => modernizerTransformations['changed the roomintro'](log), 'created a tournament in': (log) => { const actionTaker = parseBrackets(log, '['); log = log.slice(actionTaker.length + 3); log = log.slice(24, -8); return `TOUR CREATE: by ${actionTaker}: ${log}`; }, 'was disqualified from the tournament by': (log) => { const disqualified = parseBrackets(log, '['); log = log.slice(disqualified.length + 3); log = log.slice('was disqualified from the tournament by'.length); return `TOUR DQ: [${toID(disqualified)}] by ${toID(log)}`; }, 'The tournament auto disqualify timeout was set to': (log) => { const byIndex = log.indexOf(' by '); const actionTaker = log.slice(byIndex + ' by '.length); const length = log.slice('The tournament auto disqualify timeout was set to'.length, byIndex); return `TOUR AUTODQ: by ${toID(actionTaker)}: ${length.trim()}`; }, ' was blacklisted from ': (log) => { const isName = log.includes(' was nameblacklisted from '); const banned = toID(log.slice(0, log.indexOf(` was ${isName ? 'name' : ''}blacklisted from `))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isName ? 'NAME' : ''}BLACKLIST: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was nameblacklisted from ': (log) => modernizerTransformations[' was blacklisted from '](log), ' was banned from room ': (log) => { const banned = toID(log.slice(0, log.indexOf(' was banned from room '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `ROOMBAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was muted by ': (log) => { let muted = ''; let isHour = false; [muted, log] = log.split(' was muted by '); muted = toID(muted); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } let actionTaker = toID(log); if (actionTaker.endsWith('for1hour')) { isHour = true; actionTaker = actionTaker.replace(/^(.*)(for1hour)$/, (match, staff) => staff) as ID; } return `${isHour ? 'HOUR' : ''}MUTE: [${muted}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was locked from talking ': (log) => { const isWeek = log.includes(' was locked from talking for a week '); const locked = toID(log.slice(0, log.indexOf(' was locked from talking '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `${isWeek ? 'WEEK' : ''}LOCK: [${locked}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, ' was banned ': (log) => { if (log.includes(' was banned from room ')) return modernizerTransformations[' was banned from room '](log); const banned = toID(log.slice(0, log.indexOf(' was banned '))); log = log.slice(log.indexOf(' by ') + ' by '.length); let reason, ip; if (/\(.*\)/.test(log)) { reason = parseBrackets(log, '('); if (/\[.*\]/.test(log)) ip = parseBrackets(log, '['); log = log.slice(0, log.indexOf('(')); } const actionTaker = toID(log); return `BAN: [${banned}] ${getAlts()}${ip ? `[${ip}] ` : ``}by ${actionTaker}${reason ? `: ${reason}` : ``}`; }, 'was promoted to ': (log) => { const isDemotion = log.includes('was demoted to '); const userid = toID(log.split(' was ')[0]); if (!userid) { throw new Error(`Ignoring malformed line: ${prefix}${log}`); } log = log.slice(userid.length + 3); log = log.slice(`was ${isDemotion ? 'demoted' : 'promoted'} to `.length); let rank = log.slice(0, log.indexOf(' by')).replace(/ /, '').toUpperCase(); log = log.slice(`${rank} by `.length); if (!rank.startsWith('ROOM')) rank = `GLOBAL ${rank}`; const actionTaker = parseBrackets(log, '['); return `${rank}: [${userid}] by ${actionTaker}${isDemotion ? ': (demote)' : ''}`; }, 'was demoted to ': (log) => modernizerTransformations['was promoted to '](log), 'was appointed Room Owner by ': (log) => { const userid = parseBrackets(log, '['); log = log.slice(userid.length + 3); log = log.slice('was appointed Room Owner by '.length); const actionTaker = parseBrackets(log, '['); return `ROOMOWNER: [${userid}] by ${actionTaker}`; }, ' claimed this ticket': (log) => { const actions: {[k: string]: string} = { ' claimed this ticket': 'TICKETCLAIM', ' closed this ticket': 'TICKETCLOSE', ' deleted this ticket': 'TICKETDELETE', }; for (const oldAction in actions) { if (log.includes(oldAction)) { const actionTaker = toID(log.slice(0, log.indexOf(oldAction))); return `${actions[oldAction]}: by ${actionTaker}`; } } return log; }, 'This ticket is now claimed by ': (log) => { const claimer = toID(log.slice(log.indexOf(' by ') + ' by '.length)); return `TICKETCLAIM: by ${claimer}`; }, ' is no longer interested in this ticket': (log) => { const abandoner = toID(log.slice(0, log.indexOf(' is no longer interested in this ticket'))); return `TICKETABANDON: by ${abandoner}`; }, ' opened a new ticket': (log) => { const opener = toID(log.slice(0, log.indexOf(' opened a new ticket'))); const problem = log.slice(log.indexOf(' Issue: ') + ' Issue: '.length).trim(); return `TICKETOPEN: by ${opener}: ${problem}`; }, ' closed this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), ' deleted this ticket': (log) => modernizerTransformations[' claimed this ticket'](log), 'This ticket is no longer claimed': () => 'TICKETUNCLAIM', ' has been caught attempting a hunt with ': (log) => { const index = log.indexOf(' has been caught attempting a hunt with '); const user = toID(log.slice(0, index)); log = log.slice(index + ' has been caught attempting a hunt with '.length); log = log.replace('. The user has also', '; has also').replace('.', ''); return `SCAV CHEATER: [${user}]: caught attempting a hunt with ${log}`; }, }; for (const oldAction in modernizerTransformations) { if (line.includes(oldAction)) { try { return prefix + modernizerTransformations[oldAction](line); } catch (err) { if (Config.nofswriting) throw err; process.stderr.write(`${err.message}\n`); } } } return `${prefix}${line}`; } export function parseModlog(raw: string, nextLine?: string, isGlobal = false): ModlogEntry | undefined { let line = modernizeLog(raw); if (!line) return; const timestamp = parseBrackets(line, '['); line = line.slice(timestamp.length + 3); const [roomID, ...bonus] = parseBrackets(line, '(').split(' '); const log: ModlogEntry = { action: 'NULL', roomID, visualRoomID: '', userid: null, autoconfirmedID: null, alts: [], ip: null, isGlobal, loggedBy: null, note: '', time: Math.floor(new Date(timestamp).getTime()) || Date.now(), }; if (bonus.length) log.visualRoomID = `${log.roomID} ${bonus.join(' ')}`; line = line.slice((log.visualRoomID || log.roomID).length + 3); const actionColonIndex = line.indexOf(':'); const action = line.slice(0, actionColonIndex); if (action !== action.toUpperCase()) { // no action (probably an old-format log that slipped past the modernizer) log.action = 'OLD MODLOG'; log.loggedBy = 'unknown' as ID; log.note = line.trim(); return log; } else { log.action = action; if (log.action === 'OLD MODLOG') { log.loggedBy = 'unknown' as ID; log.note = line.slice(line.indexOf('by unknown: ') + 'by unknown :'.length).trim(); return log; } line = line.slice(actionColonIndex + 2); } if (line[0] === '[') { if (!IP_ONLY_ACTIONS.has(log.action)) { const userid = toID(parseBrackets(line, '[')); log.userid = userid; line = line.slice(userid.length + 3).trim(); if (line.startsWith('ac:')) { line = line.slice(3).trim(); const ac = parseBrackets(line, '['); log.autoconfirmedID = toID(ac); line = line.slice(ac.length + 3).trim(); } if (line.startsWith('alts:')) { line = line.slice(5).trim(); const alts = new Set<ID>(); // we need to weed out duplicate alts let alt = parseBrackets(line, '['); do { if (alt.includes(', ')) { // old alt format for (const trueAlt of alt.split(', ')) { alts.add(toID(trueAlt)); } line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (!line.startsWith('[')) line = `[${line}`; } else { if (IPTools.ipRegex.test(alt)) break; alts.add(toID(alt)); line = line.slice(line.indexOf(`[${alt}],`) + `[${alt}],`.length).trim(); if (alt.includes('[') && !line.startsWith('[')) line = `[${line}`; } alt = parseBrackets(line, '['); } while (alt); log.alts = [...alts]; } } if (line[0] === '[') { log.ip = parseBrackets(line, '['); line = line.slice(log.ip.length + 3).trim(); } } let regex = /\bby .*:/; let actionTakerIndex = regex.exec(line)?.index; if (actionTakerIndex === undefined) { actionTakerIndex = line.indexOf('by '); regex = /\bby .*/; } if (actionTakerIndex !== -1) { const colonIndex = line.indexOf(': '); const actionTaker = line.slice(actionTakerIndex + 3, colonIndex > actionTakerIndex ? colonIndex : undefined); if (toID(actionTaker).length < 19) { log.loggedBy = toID(actionTaker) || null; if (colonIndex > actionTakerIndex) line = line.slice(colonIndex); line = line.replace(regex, ' '); } } if (line) log.note = line.replace(/^\s?:\s?/, '').trim(); return log; } export function rawifyLog(log: ModlogEntry) { let result = `[${new Date(log.time || Date.now()).toJSON()}] (${(log.visualRoomID || log.roomID || 'global').replace(/^global-/, '')}) ${log.action}`; if (log.userid) result += `: [${log.userid}]`; if (log.autoconfirmedID) result += ` ac: [${log.autoconfirmedID}]`; if (log.alts.length) result += ` alts: [${log.alts.join('], [')}]`; if (log.ip) { if (!log.userid) result += `:`; result += ` [${log.ip}]`; } if (log.loggedBy) result += `${result.endsWith(']') ? '' : ':'} by ${log.loggedBy}`; if (log.note) result += `: ${log.note}`; return result + `\n`; } export class ModlogConverterSQLite { readonly databaseFile: string; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, db: DatabaseType.Database} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: DatabaseType.Database) { this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = {files: new Map<string, string>(), db: isTesting || new Database(':memory:')}; } } async toTxt() { const database = this.isTesting?.db || new Database(this.databaseFile, {fileMustExist: true}); const roomids = database.prepare('SELECT DISTINCT roomid FROM modlog').all(); const globalEntries = []; for (const {roomid} of roomids) { if (!Config.nofswriting) console.log(`Reading ${roomid}...`); const results = database.prepare( `SELECT *, (SELECT group_concat(userid, ',') FROM alts WHERE alts.modlog_id = modlog.modlog_id) as alts ` + `FROM modlog WHERE roomid = ? ORDER BY timestamp ASC` ).all(roomid); const trueRoomID = roomid.replace(/^global-/, ''); let entriesLogged = 0; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${trueRoomID}'`); } await this.writeFile(`${this.textLogDir}/modlog_${trueRoomID}.txt`, entries.join('')); entries = []; }; for (const result of results) { const entry: ModlogEntry = { action: result.action, roomID: result.roomid?.replace(/^global-/, ''), visualRoomID: result.visual_roomid, userid: result.userid, autoconfirmedID: result.autoconfirmed_userid, alts: result.alts?.split(','), ip: result.ip, isGlobal: result.roomid?.startsWith('global-') || result.roomid === 'global', loggedBy: result.action_taker_userid, note: result.note, time: result.timestamp, }; const rawLog = rawifyLog(entry); entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); } await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await this.writeFile(`${this.textLogDir}/modlog_global.txt`, globalEntries.join('')); } async writeFile(path: string, text: string) { if (this.isTesting) { const old = this.isTesting.files.get(path); return this.isTesting.files.set(path, `${old || ''}${text}`); } return FS(path).append(text); } } export class ModlogConverterTxt { readonly databaseFile: string; readonly modlog: Modlog; readonly textLogDir: string; readonly isTesting: {files: Map<string, string>, ml?: Modlog} | null = null; constructor(databaseFile: string, textLogDir: string, isTesting?: Map<string, string>, useFTSExtension?: boolean) { this.databaseFile = databaseFile; this.textLogDir = textLogDir; if (isTesting || Config.nofswriting) { this.isTesting = { files: isTesting || new Map<string, string>(), }; } this.modlog = new Modlog(this.textLogDir, this.isTesting ? ':memory:' : this.databaseFile); } async toSQLite() { const files = this.isTesting ? [...this.isTesting.files.keys()] : await FS(this.textLogDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); const lines = this.isTesting ? this.isTesting.files.get(file)?.split('\n') || [] : FS(`${this.textLogDir}/${file}`).createReadStream().byLine(); let entriesLogged = 0; let lastLine = undefined; let entries: ModlogEntry[] = []; const insertEntries = (alwaysShowProgress?: boolean) => { this.modlog.writeSQL(entries); entriesLogged += entries.length; if (!Config.nofswriting && ( alwaysShowProgress || entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER )) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Inserted ${entriesLogged} entries from '${roomid}'`); } entries = []; }; for await (const line of lines) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; if (roomid !== 'global') entries.push(entry); if (entry.isGlobal) { globalEntries.push(entry); } if (entries.length === ENTRIES_TO_BUFFER) insertEntries(); } insertEntries(true); if (entriesLogged) process.stdout.write('\n'); } return this.modlog.database; } } export class ModlogConverterTest { readonly inputDir: string; readonly outputDir: string; constructor(inputDir: string, outputDir: string) { this.inputDir = inputDir; this.outputDir = outputDir; } async
() { const files = await FS(this.inputDir).readdir(); // Read global modlog last to avoid inserting duplicate data to database if (files.includes('modlog_global.txt')) { files.splice(files.indexOf('modlog_global.txt'), 1); files.push('modlog_global.txt'); } const globalEntries = []; for (const file of files) { if (file === 'README.md') continue; const roomid = file.slice(7, -4); let entriesLogged = 0; let lastLine = undefined; let entries: string[] = []; const insertEntries = async () => { if (roomid === 'global') return; entriesLogged += entries.length; if (!Config.nofswriting && (entriesLogged % ENTRIES_TO_BUFFER === 0 || entriesLogged < ENTRIES_TO_BUFFER)) { process.stdout.clearLine(0); process.stdout.cursorTo(0); process.stdout.write(`Wrote ${entriesLogged} entries from '${roomid}'`); } await FS(`${this.outputDir}/modlog_${roomid}.txt`).append(entries.join('')); entries = []; }; const readStream = FS(`${this.inputDir}/${file}`).createReadStream(); for await (const line of readStream.byLine()) { const entry = parseModlog(line, lastLine, roomid === 'global'); lastLine = line; if (!entry) continue; const rawLog = rawifyLog(entry); if (roomid !== 'global') entries.push(rawLog); if (entry.isGlobal) { globalEntries.push(rawLog); } if (entries.length === ENTRIES_TO_BUFFER) await insertEntries(); } await insertEntries(); if (entriesLogged) process.stdout.write('\n'); } if (!Config.nofswriting) console.log(`Writing the global modlog...`); await FS(`${this.outputDir}/modlog_global.txt`).append(globalEntries.join('')); } } export const ModlogConverter = { async convert( from: ModlogFormat, to: ModlogFormat, databasePath: string, textLogDirectoryPath: string, outputLogPath?: string ) { if (from === 'txt' && to === 'txt' && outputLogPath) { const converter = new ModlogConverterTest(textLogDirectoryPath, outputLogPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'sqlite' && to === 'txt') { const converter = new ModlogConverterSQLite(databasePath, textLogDirectoryPath); await converter.toTxt(); console.log("\nDone!"); process.exit(); } else if (from === 'txt' && to === 'sqlite') { const converter = new ModlogConverterTxt(databasePath, textLogDirectoryPath); await converter.toSQLite(); console.log("\nDone!"); process.exit(); } }, };
toTxt
identifier_name
wide_deep.py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Example code for TensorFlow Wide & Deep Tutorial using TF.Learn API.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import shutil import sys import tensorflow as tf _CSV_COLUMNS = [ 'age', 'workclass', 'fnlwgt', 'education', 'education_num', 'marital_status', 'occupation', 'relationship', 'race', 'gender', 'capital_gain', 'capital_loss', 'hours_per_week', 'native_country', 'income_bracket' ] _CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''], [0], [0], [0], [''], ['']] parser = argparse.ArgumentParser() parser.add_argument( '--model_dir', type=str, default='/tmp/census_model', help='Base directory for the model.') parser.add_argument( '--model_type', type=str, default='wide_deep', help="Valid model types: {'wide', 'deep', 'wide_deep'}.") parser.add_argument( '--train_epochs', type=int, default=20, help='Number of training epochs.') parser.add_argument( '--epochs_per_eval', type=int, default=2, help='The number of training epochs to run between evaluations.') parser.add_argument( '--batch_size', type=int, default=40, help='Number of examples per batch.') parser.add_argument( '--train_data', type=str, default='/tmp/census_data/adult.data', help='Path to the training data.') parser.add_argument( '--test_data', type=str, default='/tmp/census_data/adult.test', help='Path to the test data.') def build_model_columns(): """Builds a set of wide and deep feature columns.""" # Continuous columns age = tf.feature_column.numeric_column('age') education_num = tf.feature_column.numeric_column('education_num') capital_gain = tf.feature_column.numeric_column('capital_gain') capital_loss = tf.feature_column.numeric_column('capital_loss') hours_per_week = tf.feature_column.numeric_column('hours_per_week') education = tf.feature_column.categorical_column_with_vocabulary_list( 'education', [ 'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college', 'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school', '5th-6th', '10th', '1st-4th', 'Preschool', '12th']) marital_status = tf.feature_column.categorical_column_with_vocabulary_list( 'marital_status', [ 'Married-civ-spouse', 'Divorced', 'Married-spouse-absent', 'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed']) relationship = tf.feature_column.categorical_column_with_vocabulary_list( 'relationship', [ 'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried', 'Other-relative']) workclass = tf.feature_column.categorical_column_with_vocabulary_list( 'workclass', [ 'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov', 'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked']) # To show an example of hashing: occupation = tf.feature_column.categorical_column_with_hash_bucket( 'occupation', hash_bucket_size=1000) # Transformations. age_buckets = tf.feature_column.bucketized_column( age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) # Wide columns and deep columns. base_columns = [ education, marital_status, relationship, workclass, occupation, age_buckets, ] crossed_columns = [ tf.feature_column.crossed_column( ['education', 'occupation'], hash_bucket_size=1000), tf.feature_column.crossed_column( [age_buckets, 'education', 'occupation'], hash_bucket_size=1000), ] wide_columns = base_columns + crossed_columns deep_columns = [ age, education_num, capital_gain, capital_loss, hours_per_week, tf.feature_column.indicator_column(workclass), tf.feature_column.indicator_column(education), tf.feature_column.indicator_column(marital_status), tf.feature_column.indicator_column(relationship), # To show an example of embedding tf.feature_column.embedding_column(occupation, dimension=8), ] return wide_columns, deep_columns def build_estimator(model_dir, model_type): """Build an estimator appropriate for the given model type.""" wide_columns, deep_columns = build_model_columns() hidden_units = [100, 75, 50, 25] # Create a tf.estimator.RunConfig to ensure the model is run on CPU, which # trains faster than GPU for this model. run_config = tf.estimator.RunConfig().replace( session_config=tf.ConfigProto(device_count={'GPU': 0})) if model_type == 'wide': return tf.estimator.LinearClassifier( model_dir=model_dir, feature_columns=wide_columns, config=run_config) elif model_type == 'deep': return tf.estimator.DNNClassifier( model_dir=model_dir, feature_columns=deep_columns, hidden_units=hidden_units, config=run_config) else: return tf.estimator.DNNLinearCombinedClassifier( model_dir=model_dir, linear_feature_columns=wide_columns, dnn_feature_columns=deep_columns, dnn_hidden_units=hidden_units, config=run_config) def input_fn(data_file, num_epochs, shuffle, batch_size): """Generate an input function for the Estimator.""" assert tf.gfile.Exists(data_file), ( '%s not found. Please make sure you have either run data_download.py or ' 'set both arguments --train_data and --test_data.' % data_file) def parse_csv(value):
# Extract lines from input files using the Dataset API. dataset = tf.contrib.data.TextLineDataset(data_file) dataset = dataset.map(parse_csv, num_threads=5) # Apply transformations to the Dataset dataset = dataset.batch(batch_size) dataset = dataset.repeat(num_epochs) # Input function that is called by the Estimator def _input_fn(): if shuffle: # Apply shuffle transformation to re-shuffle the dataset in each call. shuffled_dataset = dataset.shuffle(buffer_size=100000) iterator = shuffled_dataset.make_one_shot_iterator() else: iterator = dataset.make_one_shot_iterator() features, labels = iterator.get_next() return features, labels return _input_fn def main(unused_argv): # Clean up the model directory if present shutil.rmtree(FLAGS.model_dir, ignore_errors=True) model = build_estimator(FLAGS.model_dir, FLAGS.model_type) # Set up input function generators for the train and test data files. train_input_fn = input_fn( data_file=FLAGS.train_data, num_epochs=FLAGS.epochs_per_eval, shuffle=True, batch_size=FLAGS.batch_size) eval_input_fn = input_fn( data_file=FLAGS.test_data, num_epochs=1, shuffle=False, batch_size=FLAGS.batch_size) # Train and evaluate the model every `FLAGS.epochs_per_eval` epochs. for n in range(FLAGS.train_epochs // FLAGS.epochs_per_eval): model.train(input_fn=train_input_fn) results = model.evaluate(input_fn=eval_input_fn) # Display evaluation metrics print('Results at epoch', (n + 1) * FLAGS.epochs_per_eval) print('-' * 30) for key in sorted(results): print('%s: %s' % (key, results[key])) if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) FLAGS, unparsed = parser.parse_known_args() tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
print('Parsing', data_file) columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS) features = dict(zip(_CSV_COLUMNS, columns)) labels = features.pop('income_bracket') return features, tf.equal(labels, '>50K')
identifier_body
wide_deep.py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Example code for TensorFlow Wide & Deep Tutorial using TF.Learn API.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import shutil import sys import tensorflow as tf _CSV_COLUMNS = [ 'age', 'workclass', 'fnlwgt', 'education', 'education_num', 'marital_status', 'occupation', 'relationship', 'race', 'gender', 'capital_gain', 'capital_loss', 'hours_per_week', 'native_country', 'income_bracket' ] _CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''], [0], [0], [0], [''], ['']] parser = argparse.ArgumentParser() parser.add_argument( '--model_dir', type=str, default='/tmp/census_model', help='Base directory for the model.') parser.add_argument( '--model_type', type=str, default='wide_deep', help="Valid model types: {'wide', 'deep', 'wide_deep'}.") parser.add_argument( '--train_epochs', type=int, default=20, help='Number of training epochs.') parser.add_argument( '--epochs_per_eval', type=int, default=2, help='The number of training epochs to run between evaluations.') parser.add_argument( '--batch_size', type=int, default=40, help='Number of examples per batch.') parser.add_argument( '--train_data', type=str, default='/tmp/census_data/adult.data', help='Path to the training data.') parser.add_argument( '--test_data', type=str, default='/tmp/census_data/adult.test', help='Path to the test data.') def build_model_columns(): """Builds a set of wide and deep feature columns.""" # Continuous columns age = tf.feature_column.numeric_column('age') education_num = tf.feature_column.numeric_column('education_num') capital_gain = tf.feature_column.numeric_column('capital_gain') capital_loss = tf.feature_column.numeric_column('capital_loss') hours_per_week = tf.feature_column.numeric_column('hours_per_week') education = tf.feature_column.categorical_column_with_vocabulary_list( 'education', [ 'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college', 'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school', '5th-6th', '10th', '1st-4th', 'Preschool', '12th']) marital_status = tf.feature_column.categorical_column_with_vocabulary_list( 'marital_status', [ 'Married-civ-spouse', 'Divorced', 'Married-spouse-absent', 'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed']) relationship = tf.feature_column.categorical_column_with_vocabulary_list( 'relationship', [ 'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried', 'Other-relative']) workclass = tf.feature_column.categorical_column_with_vocabulary_list( 'workclass', [ 'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov', 'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked']) # To show an example of hashing: occupation = tf.feature_column.categorical_column_with_hash_bucket( 'occupation', hash_bucket_size=1000) # Transformations. age_buckets = tf.feature_column.bucketized_column( age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) # Wide columns and deep columns. base_columns = [ education, marital_status, relationship, workclass, occupation, age_buckets, ] crossed_columns = [ tf.feature_column.crossed_column( ['education', 'occupation'], hash_bucket_size=1000), tf.feature_column.crossed_column( [age_buckets, 'education', 'occupation'], hash_bucket_size=1000), ] wide_columns = base_columns + crossed_columns deep_columns = [ age, education_num, capital_gain, capital_loss, hours_per_week, tf.feature_column.indicator_column(workclass), tf.feature_column.indicator_column(education), tf.feature_column.indicator_column(marital_status), tf.feature_column.indicator_column(relationship), # To show an example of embedding tf.feature_column.embedding_column(occupation, dimension=8), ] return wide_columns, deep_columns def build_estimator(model_dir, model_type): """Build an estimator appropriate for the given model type.""" wide_columns, deep_columns = build_model_columns() hidden_units = [100, 75, 50, 25] # Create a tf.estimator.RunConfig to ensure the model is run on CPU, which # trains faster than GPU for this model. run_config = tf.estimator.RunConfig().replace( session_config=tf.ConfigProto(device_count={'GPU': 0})) if model_type == 'wide': return tf.estimator.LinearClassifier( model_dir=model_dir, feature_columns=wide_columns, config=run_config) elif model_type == 'deep': return tf.estimator.DNNClassifier( model_dir=model_dir, feature_columns=deep_columns, hidden_units=hidden_units, config=run_config) else: return tf.estimator.DNNLinearCombinedClassifier( model_dir=model_dir, linear_feature_columns=wide_columns, dnn_feature_columns=deep_columns, dnn_hidden_units=hidden_units, config=run_config) def input_fn(data_file, num_epochs, shuffle, batch_size): """Generate an input function for the Estimator.""" assert tf.gfile.Exists(data_file), ( '%s not found. Please make sure you have either run data_download.py or ' 'set both arguments --train_data and --test_data.' % data_file) def parse_csv(value): print('Parsing', data_file) columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS) features = dict(zip(_CSV_COLUMNS, columns)) labels = features.pop('income_bracket') return features, tf.equal(labels, '>50K') # Extract lines from input files using the Dataset API. dataset = tf.contrib.data.TextLineDataset(data_file) dataset = dataset.map(parse_csv, num_threads=5) # Apply transformations to the Dataset dataset = dataset.batch(batch_size) dataset = dataset.repeat(num_epochs) # Input function that is called by the Estimator def _input_fn(): if shuffle: # Apply shuffle transformation to re-shuffle the dataset in each call.
else: iterator = dataset.make_one_shot_iterator() features, labels = iterator.get_next() return features, labels return _input_fn def main(unused_argv): # Clean up the model directory if present shutil.rmtree(FLAGS.model_dir, ignore_errors=True) model = build_estimator(FLAGS.model_dir, FLAGS.model_type) # Set up input function generators for the train and test data files. train_input_fn = input_fn( data_file=FLAGS.train_data, num_epochs=FLAGS.epochs_per_eval, shuffle=True, batch_size=FLAGS.batch_size) eval_input_fn = input_fn( data_file=FLAGS.test_data, num_epochs=1, shuffle=False, batch_size=FLAGS.batch_size) # Train and evaluate the model every `FLAGS.epochs_per_eval` epochs. for n in range(FLAGS.train_epochs // FLAGS.epochs_per_eval): model.train(input_fn=train_input_fn) results = model.evaluate(input_fn=eval_input_fn) # Display evaluation metrics print('Results at epoch', (n + 1) * FLAGS.epochs_per_eval) print('-' * 30) for key in sorted(results): print('%s: %s' % (key, results[key])) if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) FLAGS, unparsed = parser.parse_known_args() tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
shuffled_dataset = dataset.shuffle(buffer_size=100000) iterator = shuffled_dataset.make_one_shot_iterator()
conditional_block
wide_deep.py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Example code for TensorFlow Wide & Deep Tutorial using TF.Learn API.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import shutil import sys import tensorflow as tf _CSV_COLUMNS = [ 'age', 'workclass', 'fnlwgt', 'education', 'education_num', 'marital_status', 'occupation', 'relationship', 'race', 'gender', 'capital_gain', 'capital_loss', 'hours_per_week', 'native_country', 'income_bracket' ] _CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''], [0], [0], [0], [''], ['']] parser = argparse.ArgumentParser() parser.add_argument( '--model_dir', type=str, default='/tmp/census_model', help='Base directory for the model.') parser.add_argument( '--model_type', type=str, default='wide_deep', help="Valid model types: {'wide', 'deep', 'wide_deep'}.") parser.add_argument( '--train_epochs', type=int, default=20, help='Number of training epochs.') parser.add_argument( '--epochs_per_eval', type=int, default=2, help='The number of training epochs to run between evaluations.') parser.add_argument( '--batch_size', type=int, default=40, help='Number of examples per batch.') parser.add_argument( '--train_data', type=str, default='/tmp/census_data/adult.data', help='Path to the training data.') parser.add_argument( '--test_data', type=str, default='/tmp/census_data/adult.test', help='Path to the test data.') def build_model_columns(): """Builds a set of wide and deep feature columns.""" # Continuous columns age = tf.feature_column.numeric_column('age') education_num = tf.feature_column.numeric_column('education_num') capital_gain = tf.feature_column.numeric_column('capital_gain') capital_loss = tf.feature_column.numeric_column('capital_loss') hours_per_week = tf.feature_column.numeric_column('hours_per_week') education = tf.feature_column.categorical_column_with_vocabulary_list( 'education', [ 'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college', 'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school', '5th-6th', '10th', '1st-4th', 'Preschool', '12th']) marital_status = tf.feature_column.categorical_column_with_vocabulary_list( 'marital_status', [ 'Married-civ-spouse', 'Divorced', 'Married-spouse-absent', 'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed']) relationship = tf.feature_column.categorical_column_with_vocabulary_list( 'relationship', [ 'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried', 'Other-relative']) workclass = tf.feature_column.categorical_column_with_vocabulary_list( 'workclass', [ 'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov', 'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked']) # To show an example of hashing: occupation = tf.feature_column.categorical_column_with_hash_bucket( 'occupation', hash_bucket_size=1000) # Transformations. age_buckets = tf.feature_column.bucketized_column( age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) # Wide columns and deep columns. base_columns = [ education, marital_status, relationship, workclass, occupation, age_buckets, ] crossed_columns = [ tf.feature_column.crossed_column( ['education', 'occupation'], hash_bucket_size=1000), tf.feature_column.crossed_column( [age_buckets, 'education', 'occupation'], hash_bucket_size=1000), ] wide_columns = base_columns + crossed_columns deep_columns = [ age, education_num, capital_gain, capital_loss, hours_per_week, tf.feature_column.indicator_column(workclass), tf.feature_column.indicator_column(education), tf.feature_column.indicator_column(marital_status), tf.feature_column.indicator_column(relationship), # To show an example of embedding tf.feature_column.embedding_column(occupation, dimension=8), ] return wide_columns, deep_columns def build_estimator(model_dir, model_type): """Build an estimator appropriate for the given model type.""" wide_columns, deep_columns = build_model_columns() hidden_units = [100, 75, 50, 25] # Create a tf.estimator.RunConfig to ensure the model is run on CPU, which # trains faster than GPU for this model. run_config = tf.estimator.RunConfig().replace( session_config=tf.ConfigProto(device_count={'GPU': 0})) if model_type == 'wide': return tf.estimator.LinearClassifier( model_dir=model_dir, feature_columns=wide_columns, config=run_config) elif model_type == 'deep': return tf.estimator.DNNClassifier( model_dir=model_dir, feature_columns=deep_columns, hidden_units=hidden_units, config=run_config) else: return tf.estimator.DNNLinearCombinedClassifier( model_dir=model_dir, linear_feature_columns=wide_columns, dnn_feature_columns=deep_columns, dnn_hidden_units=hidden_units, config=run_config) def input_fn(data_file, num_epochs, shuffle, batch_size): """Generate an input function for the Estimator.""" assert tf.gfile.Exists(data_file), ( '%s not found. Please make sure you have either run data_download.py or ' 'set both arguments --train_data and --test_data.' % data_file) def parse_csv(value): print('Parsing', data_file) columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS) features = dict(zip(_CSV_COLUMNS, columns)) labels = features.pop('income_bracket') return features, tf.equal(labels, '>50K') # Extract lines from input files using the Dataset API. dataset = tf.contrib.data.TextLineDataset(data_file) dataset = dataset.map(parse_csv, num_threads=5) # Apply transformations to the Dataset dataset = dataset.batch(batch_size) dataset = dataset.repeat(num_epochs) # Input function that is called by the Estimator def _input_fn(): if shuffle: # Apply shuffle transformation to re-shuffle the dataset in each call. shuffled_dataset = dataset.shuffle(buffer_size=100000) iterator = shuffled_dataset.make_one_shot_iterator() else: iterator = dataset.make_one_shot_iterator() features, labels = iterator.get_next() return features, labels return _input_fn def
(unused_argv): # Clean up the model directory if present shutil.rmtree(FLAGS.model_dir, ignore_errors=True) model = build_estimator(FLAGS.model_dir, FLAGS.model_type) # Set up input function generators for the train and test data files. train_input_fn = input_fn( data_file=FLAGS.train_data, num_epochs=FLAGS.epochs_per_eval, shuffle=True, batch_size=FLAGS.batch_size) eval_input_fn = input_fn( data_file=FLAGS.test_data, num_epochs=1, shuffle=False, batch_size=FLAGS.batch_size) # Train and evaluate the model every `FLAGS.epochs_per_eval` epochs. for n in range(FLAGS.train_epochs // FLAGS.epochs_per_eval): model.train(input_fn=train_input_fn) results = model.evaluate(input_fn=eval_input_fn) # Display evaluation metrics print('Results at epoch', (n + 1) * FLAGS.epochs_per_eval) print('-' * 30) for key in sorted(results): print('%s: %s' % (key, results[key])) if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) FLAGS, unparsed = parser.parse_known_args() tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
main
identifier_name
wide_deep.py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Example code for TensorFlow Wide & Deep Tutorial using TF.Learn API.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import shutil import sys import tensorflow as tf _CSV_COLUMNS = [ 'age', 'workclass', 'fnlwgt', 'education', 'education_num', 'marital_status', 'occupation', 'relationship', 'race', 'gender', 'capital_gain', 'capital_loss', 'hours_per_week', 'native_country', 'income_bracket' ] _CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''], [0], [0], [0], [''], ['']] parser = argparse.ArgumentParser() parser.add_argument( '--model_dir', type=str, default='/tmp/census_model', help='Base directory for the model.') parser.add_argument( '--model_type', type=str, default='wide_deep', help="Valid model types: {'wide', 'deep', 'wide_deep'}.") parser.add_argument( '--train_epochs', type=int, default=20, help='Number of training epochs.') parser.add_argument( '--epochs_per_eval', type=int, default=2, help='The number of training epochs to run between evaluations.') parser.add_argument( '--batch_size', type=int, default=40, help='Number of examples per batch.') parser.add_argument( '--train_data', type=str, default='/tmp/census_data/adult.data', help='Path to the training data.') parser.add_argument( '--test_data', type=str, default='/tmp/census_data/adult.test', help='Path to the test data.') def build_model_columns(): """Builds a set of wide and deep feature columns.""" # Continuous columns age = tf.feature_column.numeric_column('age') education_num = tf.feature_column.numeric_column('education_num') capital_gain = tf.feature_column.numeric_column('capital_gain') capital_loss = tf.feature_column.numeric_column('capital_loss') hours_per_week = tf.feature_column.numeric_column('hours_per_week') education = tf.feature_column.categorical_column_with_vocabulary_list( 'education', [ 'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college', 'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school', '5th-6th', '10th', '1st-4th', 'Preschool', '12th']) marital_status = tf.feature_column.categorical_column_with_vocabulary_list( 'marital_status', [ 'Married-civ-spouse', 'Divorced', 'Married-spouse-absent', 'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed']) relationship = tf.feature_column.categorical_column_with_vocabulary_list( 'relationship', [ 'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried', 'Other-relative']) workclass = tf.feature_column.categorical_column_with_vocabulary_list( 'workclass', [ 'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov', 'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked']) # To show an example of hashing: occupation = tf.feature_column.categorical_column_with_hash_bucket( 'occupation', hash_bucket_size=1000) # Transformations. age_buckets = tf.feature_column.bucketized_column( age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) # Wide columns and deep columns. base_columns = [ education, marital_status, relationship, workclass, occupation, age_buckets, ] crossed_columns = [ tf.feature_column.crossed_column( ['education', 'occupation'], hash_bucket_size=1000), tf.feature_column.crossed_column( [age_buckets, 'education', 'occupation'], hash_bucket_size=1000), ] wide_columns = base_columns + crossed_columns deep_columns = [ age, education_num, capital_gain, capital_loss, hours_per_week, tf.feature_column.indicator_column(workclass), tf.feature_column.indicator_column(education), tf.feature_column.indicator_column(marital_status), tf.feature_column.indicator_column(relationship), # To show an example of embedding tf.feature_column.embedding_column(occupation, dimension=8), ] return wide_columns, deep_columns def build_estimator(model_dir, model_type): """Build an estimator appropriate for the given model type.""" wide_columns, deep_columns = build_model_columns() hidden_units = [100, 75, 50, 25] # Create a tf.estimator.RunConfig to ensure the model is run on CPU, which # trains faster than GPU for this model. run_config = tf.estimator.RunConfig().replace( session_config=tf.ConfigProto(device_count={'GPU': 0})) if model_type == 'wide': return tf.estimator.LinearClassifier( model_dir=model_dir, feature_columns=wide_columns, config=run_config) elif model_type == 'deep': return tf.estimator.DNNClassifier( model_dir=model_dir, feature_columns=deep_columns, hidden_units=hidden_units, config=run_config) else: return tf.estimator.DNNLinearCombinedClassifier( model_dir=model_dir, linear_feature_columns=wide_columns, dnn_feature_columns=deep_columns, dnn_hidden_units=hidden_units, config=run_config) def input_fn(data_file, num_epochs, shuffle, batch_size): """Generate an input function for the Estimator.""" assert tf.gfile.Exists(data_file), ( '%s not found. Please make sure you have either run data_download.py or ' 'set both arguments --train_data and --test_data.' % data_file) def parse_csv(value): print('Parsing', data_file) columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS) features = dict(zip(_CSV_COLUMNS, columns)) labels = features.pop('income_bracket') return features, tf.equal(labels, '>50K') # Extract lines from input files using the Dataset API. dataset = tf.contrib.data.TextLineDataset(data_file) dataset = dataset.map(parse_csv, num_threads=5) # Apply transformations to the Dataset dataset = dataset.batch(batch_size) dataset = dataset.repeat(num_epochs) # Input function that is called by the Estimator def _input_fn(): if shuffle: # Apply shuffle transformation to re-shuffle the dataset in each call. shuffled_dataset = dataset.shuffle(buffer_size=100000) iterator = shuffled_dataset.make_one_shot_iterator() else: iterator = dataset.make_one_shot_iterator() features, labels = iterator.get_next() return features, labels return _input_fn def main(unused_argv): # Clean up the model directory if present shutil.rmtree(FLAGS.model_dir, ignore_errors=True) model = build_estimator(FLAGS.model_dir, FLAGS.model_type) # Set up input function generators for the train and test data files. train_input_fn = input_fn( data_file=FLAGS.train_data, num_epochs=FLAGS.epochs_per_eval, shuffle=True, batch_size=FLAGS.batch_size) eval_input_fn = input_fn( data_file=FLAGS.test_data, num_epochs=1, shuffle=False,
model.train(input_fn=train_input_fn) results = model.evaluate(input_fn=eval_input_fn) # Display evaluation metrics print('Results at epoch', (n + 1) * FLAGS.epochs_per_eval) print('-' * 30) for key in sorted(results): print('%s: %s' % (key, results[key])) if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) FLAGS, unparsed = parser.parse_known_args() tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
batch_size=FLAGS.batch_size) # Train and evaluate the model every `FLAGS.epochs_per_eval` epochs. for n in range(FLAGS.train_epochs // FLAGS.epochs_per_eval):
random_line_split
xyz.py
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems. # Copyright (C) 2011-2017 The HORTON Development Team # # This file is part of HORTON. # # HORTON is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # HORTON is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/> # # -- '''XYZ file format''' import numpy as np from horton.units import angstrom from horton.periodic import periodic __all__ = ['load_xyz', 'dump_xyz'] def load_xyz(filename): '''Load a molecular geometry from a .xyz file. **Argument:** filename The file to load the geometry from **Returns:** dictionary with ``title`, ``coordinates`` and ``numbers``. ''' f = file(filename) size = int(f.next()) title = f.next().strip() coordinates = np.empty((size, 3), float) numbers = np.empty(size, int) for i in xrange(size): words = f.next().split() numbers[i] = periodic[words[0]].number coordinates[i,0] = float(words[1])*angstrom coordinates[i,1] = float(words[2])*angstrom coordinates[i,2] = float(words[3])*angstrom f.close() return { 'title': title, 'coordinates': coordinates, 'numbers': numbers } def dump_xyz(filename, data): '''Write an ``.xyz`` file. **Arguments:** filename The name of the file to be written. This usually the extension ".xyz". data An IOData instance. Must contain ``coordinates`` and ``numbers``. May contain ``title``. ''' with open(filename, 'w') as f: print >> f, data.natom print >> f, getattr(data, 'title', 'Created with HORTON') for i in xrange(data.natom): n = periodic[data.numbers[i]].symbol x, y, z = data.coordinates[i]/angstrom print >> f, '%2s %15.10f %15.10f %15.10f' % (n, x, y, z)
random_line_split
xyz.py
# -*- coding: utf-8 -*- # HORTON: Helpful Open-source Research TOol for N-fermion systems. # Copyright (C) 2011-2017 The HORTON Development Team # # This file is part of HORTON. # # HORTON is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # HORTON is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/> # # -- '''XYZ file format''' import numpy as np from horton.units import angstrom from horton.periodic import periodic __all__ = ['load_xyz', 'dump_xyz'] def
(filename): '''Load a molecular geometry from a .xyz file. **Argument:** filename The file to load the geometry from **Returns:** dictionary with ``title`, ``coordinates`` and ``numbers``. ''' f = file(filename) size = int(f.next()) title = f.next().strip() coordinates = np.empty((size, 3), float) numbers = np.empty(size, int) for i in xrange(size): words = f.next().split() numbers[i] = periodic[words[0]].number coordinates[i,0] = float(words[1])*angstrom coordinates[i,1] = float(words[2])*angstrom coordinates[i,2] = float(words[3])*angstrom f.close() return { 'title': title, 'coordinates': coordinates, 'numbers': numbers } def dump_xyz(filename, data): '''Write an ``.xyz`` file. **Arguments:** filename The name of the file to be written. This usually the extension ".xyz". data An IOData instance. Must contain ``coordinates`` and ``numbers``. May contain ``title``. ''' with open(filename, 'w') as f: print >> f, data.natom print >> f, getattr(data, 'title', 'Created with HORTON') for i in xrange(data.natom): n = periodic[data.numbers[i]].symbol x, y, z = data.coordinates[i]/angstrom print >> f, '%2s %15.10f %15.10f %15.10f' % (n, x, y, z)
load_xyz
identifier_name
xyz.py
# -*- coding: utf-8 -*- # HORTON: Helpful Open-source Research TOol for N-fermion systems. # Copyright (C) 2011-2017 The HORTON Development Team # # This file is part of HORTON. # # HORTON is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # HORTON is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/> # # -- '''XYZ file format''' import numpy as np from horton.units import angstrom from horton.periodic import periodic __all__ = ['load_xyz', 'dump_xyz'] def load_xyz(filename):
def dump_xyz(filename, data): '''Write an ``.xyz`` file. **Arguments:** filename The name of the file to be written. This usually the extension ".xyz". data An IOData instance. Must contain ``coordinates`` and ``numbers``. May contain ``title``. ''' with open(filename, 'w') as f: print >> f, data.natom print >> f, getattr(data, 'title', 'Created with HORTON') for i in xrange(data.natom): n = periodic[data.numbers[i]].symbol x, y, z = data.coordinates[i]/angstrom print >> f, '%2s %15.10f %15.10f %15.10f' % (n, x, y, z)
'''Load a molecular geometry from a .xyz file. **Argument:** filename The file to load the geometry from **Returns:** dictionary with ``title`, ``coordinates`` and ``numbers``. ''' f = file(filename) size = int(f.next()) title = f.next().strip() coordinates = np.empty((size, 3), float) numbers = np.empty(size, int) for i in xrange(size): words = f.next().split() numbers[i] = periodic[words[0]].number coordinates[i,0] = float(words[1])*angstrom coordinates[i,1] = float(words[2])*angstrom coordinates[i,2] = float(words[3])*angstrom f.close() return { 'title': title, 'coordinates': coordinates, 'numbers': numbers }
identifier_body
xyz.py
# -*- coding: utf-8 -*- # HORTON: Helpful Open-source Research TOol for N-fermion systems. # Copyright (C) 2011-2017 The HORTON Development Team # # This file is part of HORTON. # # HORTON is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # HORTON is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/> # # -- '''XYZ file format''' import numpy as np from horton.units import angstrom from horton.periodic import periodic __all__ = ['load_xyz', 'dump_xyz'] def load_xyz(filename): '''Load a molecular geometry from a .xyz file. **Argument:** filename The file to load the geometry from **Returns:** dictionary with ``title`, ``coordinates`` and ``numbers``. ''' f = file(filename) size = int(f.next()) title = f.next().strip() coordinates = np.empty((size, 3), float) numbers = np.empty(size, int) for i in xrange(size): words = f.next().split() numbers[i] = periodic[words[0]].number coordinates[i,0] = float(words[1])*angstrom coordinates[i,1] = float(words[2])*angstrom coordinates[i,2] = float(words[3])*angstrom f.close() return { 'title': title, 'coordinates': coordinates, 'numbers': numbers } def dump_xyz(filename, data): '''Write an ``.xyz`` file. **Arguments:** filename The name of the file to be written. This usually the extension ".xyz". data An IOData instance. Must contain ``coordinates`` and ``numbers``. May contain ``title``. ''' with open(filename, 'w') as f: print >> f, data.natom print >> f, getattr(data, 'title', 'Created with HORTON') for i in xrange(data.natom):
n = periodic[data.numbers[i]].symbol x, y, z = data.coordinates[i]/angstrom print >> f, '%2s %15.10f %15.10f %15.10f' % (n, x, y, z)
conditional_block
print_queue_resolvers.rs
use async_graphql::{ ID, Context, FieldResult, }; // use eyre::{ // eyre, // // Result, // Context as _, // }; use printspool_json_store::{ Record, JsonRow }; use crate::{ PrintQueue, part::Part, // package::Package, }; #[derive(async_graphql::InputObject, Default)] struct PrintQueuePartsInput { /// Include the prints that are currently in the queue or actively printing (default: true) #[graphql(default = true)] include_queued: bool, /// Include the print history of parts that have completed all of their prints (default: false) #[graphql(default = false)] include_finished: bool, /// Include starred prints regardless of whether they are in the queue or finished /// (default: false) #[graphql(default = false)] include_starred: bool, } #[async_graphql::Object] impl PrintQueue { async fn id(&self) -> ID { (&self.id).into() } async fn name<'ctx>(&self) -> &String
async fn parts<'ctx>( &self, ctx: &'ctx Context<'_>, // id: Option<ID>, input: Option<PrintQueuePartsInput>, ) -> FieldResult<Vec<Part>> { let db: &crate::Db = ctx.data()?; let input = input.unwrap_or(PrintQueuePartsInput { include_queued: true, include_finished: false, include_starred: false, }); let parts = sqlx::query_as!( JsonRow, r#" SELECT parts.props FROM parts INNER JOIN packages ON packages.id = parts.package_id AND packages.print_queue_id = $1 LEFT OUTER JOIN tasks ON tasks.part_id = parts.id AND tasks.status = 'finished' WHERE parts.deleted_at IS NULL AND (tasks.id IS NULL OR tasks.status IS NOT NULL) GROUP BY parts.id, parts.quantity, packages.quantity, packages.starred HAVING ( $2 IS TRUE AND parts.quantity * packages.quantity > COUNT(tasks.id) ) OR ( $3 IS TRUE AND parts.quantity * packages.quantity <= COUNT(tasks.id) ) OR ( $4 IS TRUE AND packages.starred ) ORDER BY parts.position "#, self.id, input.include_queued, input.include_finished, input.include_starred, ) .fetch_all(db) .await?; let parts = Part::from_rows(parts)?; Ok(parts) } }
{ &self.name }
identifier_body
print_queue_resolvers.rs
use async_graphql::{ ID, Context, FieldResult, }; // use eyre::{ // eyre, // // Result, // Context as _, // }; use printspool_json_store::{ Record, JsonRow }; use crate::{ PrintQueue, part::Part, // package::Package, }; #[derive(async_graphql::InputObject, Default)] struct PrintQueuePartsInput { /// Include the prints that are currently in the queue or actively printing (default: true) #[graphql(default = true)] include_queued: bool, /// Include the print history of parts that have completed all of their prints (default: false) #[graphql(default = false)] include_finished: bool, /// Include starred prints regardless of whether they are in the queue or finished /// (default: false) #[graphql(default = false)] include_starred: bool, } #[async_graphql::Object] impl PrintQueue { async fn id(&self) -> ID { (&self.id).into() } async fn name<'ctx>(&self) -> &String { &self.name } async fn
<'ctx>( &self, ctx: &'ctx Context<'_>, // id: Option<ID>, input: Option<PrintQueuePartsInput>, ) -> FieldResult<Vec<Part>> { let db: &crate::Db = ctx.data()?; let input = input.unwrap_or(PrintQueuePartsInput { include_queued: true, include_finished: false, include_starred: false, }); let parts = sqlx::query_as!( JsonRow, r#" SELECT parts.props FROM parts INNER JOIN packages ON packages.id = parts.package_id AND packages.print_queue_id = $1 LEFT OUTER JOIN tasks ON tasks.part_id = parts.id AND tasks.status = 'finished' WHERE parts.deleted_at IS NULL AND (tasks.id IS NULL OR tasks.status IS NOT NULL) GROUP BY parts.id, parts.quantity, packages.quantity, packages.starred HAVING ( $2 IS TRUE AND parts.quantity * packages.quantity > COUNT(tasks.id) ) OR ( $3 IS TRUE AND parts.quantity * packages.quantity <= COUNT(tasks.id) ) OR ( $4 IS TRUE AND packages.starred ) ORDER BY parts.position "#, self.id, input.include_queued, input.include_finished, input.include_starred, ) .fetch_all(db) .await?; let parts = Part::from_rows(parts)?; Ok(parts) } }
parts
identifier_name
print_queue_resolvers.rs
use async_graphql::{ ID, Context, FieldResult, }; // use eyre::{ // eyre, // // Result, // Context as _, // }; use printspool_json_store::{ Record, JsonRow }; use crate::{ PrintQueue, part::Part, // package::Package, }; #[derive(async_graphql::InputObject, Default)] struct PrintQueuePartsInput { /// Include the prints that are currently in the queue or actively printing (default: true) #[graphql(default = true)] include_queued: bool, /// Include the print history of parts that have completed all of their prints (default: false) #[graphql(default = false)] include_finished: bool, /// Include starred prints regardless of whether they are in the queue or finished /// (default: false) #[graphql(default = false)] include_starred: bool, }
impl PrintQueue { async fn id(&self) -> ID { (&self.id).into() } async fn name<'ctx>(&self) -> &String { &self.name } async fn parts<'ctx>( &self, ctx: &'ctx Context<'_>, // id: Option<ID>, input: Option<PrintQueuePartsInput>, ) -> FieldResult<Vec<Part>> { let db: &crate::Db = ctx.data()?; let input = input.unwrap_or(PrintQueuePartsInput { include_queued: true, include_finished: false, include_starred: false, }); let parts = sqlx::query_as!( JsonRow, r#" SELECT parts.props FROM parts INNER JOIN packages ON packages.id = parts.package_id AND packages.print_queue_id = $1 LEFT OUTER JOIN tasks ON tasks.part_id = parts.id AND tasks.status = 'finished' WHERE parts.deleted_at IS NULL AND (tasks.id IS NULL OR tasks.status IS NOT NULL) GROUP BY parts.id, parts.quantity, packages.quantity, packages.starred HAVING ( $2 IS TRUE AND parts.quantity * packages.quantity > COUNT(tasks.id) ) OR ( $3 IS TRUE AND parts.quantity * packages.quantity <= COUNT(tasks.id) ) OR ( $4 IS TRUE AND packages.starred ) ORDER BY parts.position "#, self.id, input.include_queued, input.include_finished, input.include_starred, ) .fetch_all(db) .await?; let parts = Part::from_rows(parts)?; Ok(parts) } }
#[async_graphql::Object]
random_line_split
cargo.rs
extern crate rustc_serialize; extern crate docopt; use docopt::Docopt; // Write the Docopt usage string. const USAGE: &'static str = " Rust's package manager Usage: cargo <command> [<args>...] cargo [options] Options: -h, --help Display this message -V, --version Print version info and exit --list List installed commands -v, --verbose Use verbose output Some common cargo commands are: build Compile the current project clean Remove the target directory doc Build this project's and its dependencies' documentation new Create a new cargo project run Build and execute src/main.rs test Run the tests bench Run the benchmarks update Update dependencies listed in Cargo.lock See 'cargo help <command>' for more information on a specific command. "; #[derive(Debug, RustcDecodable)] struct Args { arg_command: Option<Command>, arg_args: Vec<String>, flag_list: bool, flag_verbose: bool, } #[derive(Debug, RustcDecodable)] enum Command { Build, Clean, Doc, New, Run, Test, Bench, Update, } fn main() { let args: Args = Docopt::new(USAGE) .and_then(|d| d.options_first(true).decode())
.unwrap_or_else(|e| e.exit()); println!("{:?}", args); }
random_line_split
cargo.rs
extern crate rustc_serialize; extern crate docopt; use docopt::Docopt; // Write the Docopt usage string. const USAGE: &'static str = " Rust's package manager Usage: cargo <command> [<args>...] cargo [options] Options: -h, --help Display this message -V, --version Print version info and exit --list List installed commands -v, --verbose Use verbose output Some common cargo commands are: build Compile the current project clean Remove the target directory doc Build this project's and its dependencies' documentation new Create a new cargo project run Build and execute src/main.rs test Run the tests bench Run the benchmarks update Update dependencies listed in Cargo.lock See 'cargo help <command>' for more information on a specific command. "; #[derive(Debug, RustcDecodable)] struct Args { arg_command: Option<Command>, arg_args: Vec<String>, flag_list: bool, flag_verbose: bool, } #[derive(Debug, RustcDecodable)] enum Command { Build, Clean, Doc, New, Run, Test, Bench, Update, } fn
() { let args: Args = Docopt::new(USAGE) .and_then(|d| d.options_first(true).decode()) .unwrap_or_else(|e| e.exit()); println!("{:?}", args); }
main
identifier_name
all_c.js
['maxsteps',['maxSteps',['../class_dataset.html#a5350c5b634efd3979339149aca3264f2',1,'Dataset']]], ['model',['Model',['../class_model.html',1,'Model'],['../class_model.html#ae3b375de5f6df4faf74a95d64748e048',1,'Model::Model()']]], ['msg',['msg',['../class_timer.html#a439eced3ba76f02f685358373cd7d124',1,'Timer']]], ['msrc',['MSRC',['../class_m_s_r_c.html',1,'MSRC'],['../class_m_s_r_c.html#a3c6f744f80f2c3f98d55542e50579de9',1,'MSRC::MSRC()']]] ];
var searchData= [ ['mainfolder',['mainFolder',['../class_dataset.html#accd7ce9be4329d6e3b758f71269a8703',1,'Dataset']]],
random_line_split
sha.rs
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use core::cell::Cell; use core::mem; use crate::hil::digest::{DigestEngine, DigestMode, DigestError}; use kernel::common::cells::VolatileCell; use super::keymgr::{KEYMGR0_REGS, Registers}; #[allow(unused)] enum ShaTrigMask { Go = 0x1, Reset = 0x2, Step = 0x4, Stop = 0x8, } #[allow(unused)] enum ShaCfgEnMask { BigEndian = 0x01, Sha1 = 0x02, BusError = 0x08, Livestream = 0x10, Hmac = 0x20, IntEnDone = 0x1_0000, IntMaskDone = 0x2_0000, } pub struct ShaEngine { regs: *mut Registers, current_mode: Cell<Option<DigestMode>>, } enum CertificateMask { CertBits = 0x3f, // Bits 0:5 Enable = 0x40, // 1 << 6 //CheckOnly = 0x80, // 1 << 7 } impl ShaEngine { const unsafe fn new(regs: *mut Registers) -> ShaEngine
pub fn handle_interrupt(&self, _nvic: u32) { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); } } pub static mut KEYMGR0_SHA: ShaEngine = unsafe { ShaEngine::new(KEYMGR0_REGS) }; const HMAC_KEY_SIZE_BYTES: usize = 32; const HMAC_KEY_SIZE_WORDS: usize = HMAC_KEY_SIZE_BYTES / 4; impl DigestEngine for ShaEngine { fn initialize(&self, mode: DigestMode) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status // Compile-time check for DigestMode exhaustiveness match mode { DigestMode::Sha1 | DigestMode::Sha256 | DigestMode::Sha256Hmac => (), }; self.current_mode.set(Some(mode)); regs.trig.set(ShaTrigMask::Stop as u32); let mut flags = ShaCfgEnMask::Livestream as u32 | ShaCfgEnMask::IntEnDone as u32; match mode { DigestMode::Sha1 => flags |= ShaCfgEnMask::Sha1 as u32, DigestMode::Sha256 => (), DigestMode::Sha256Hmac => flags |= ShaCfgEnMask::Hmac as u32, } regs.cfg_en.set(flags); regs.trig.set(ShaTrigMask::Go as u32); Ok(()) } fn initialize_hmac(&self, key: &[u8]) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status self.current_mode.set(Some(DigestMode::Sha256Hmac)); if key.len() < HMAC_KEY_SIZE_BYTES { print!("Key too small: {}\n", key.len()); return Err(DigestError::BufferTooSmall(HMAC_KEY_SIZE_BYTES)); } for i in 0..HMAC_KEY_SIZE_WORDS { let word: u32 = (key[4 * i + 0] as u32) << 0 | (key[4 * i + 1] as u32) << 8 | (key[4 * i + 2] as u32) << 16 | (key[4 * i + 3] as u32) << 24; regs.key_w[i].set(word); } let flags = ShaCfgEnMask::Livestream as u32 | ShaCfgEnMask::IntEnDone as u32 | ShaCfgEnMask::Hmac as u32; regs.cfg_en.set(flags); regs.trig.set(ShaTrigMask::Go as u32); return Ok(()); } fn initialize_certificate(&self, certificate_id: u32) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status regs.use_cert.set(certificate_id & CertificateMask::CertBits as u32 | CertificateMask::Enable as u32); regs.cfg_en.set(ShaCfgEnMask::IntEnDone as u32); regs.trig.set(ShaTrigMask::Go as u32); Ok(()) } fn update(&self, data: &[u8]) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; if self.current_mode.get().is_none() { print!("ERROR: SHA::update called but engine not initialized!\n"); return Err(DigestError::NotConfigured); } let fifo_u8: &VolatileCell<u8> = unsafe { mem::transmute(&regs.input_fifo) }; // TODO(yuriks): Feed FIFO word at a time when possible for b in data { fifo_u8.set(*b); } Ok(data.len()) } fn finalize(&self, output: &mut [u8]) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; let expected_output_size = match self.current_mode.get() { None => return Err(DigestError::NotConfigured), Some(mode) => mode.output_size(), }; if output.len() < expected_output_size { return Err(DigestError::BufferTooSmall(expected_output_size)); } // Tell hardware we're done streaming and then wait for the // hash calculation to finish. regs.itop.set(0); regs.trig.set(ShaTrigMask::Stop as u32); while regs.itop.get() == 0 {} for i in 0..(expected_output_size / 4) { let word = regs.sts_h[i].get(); output[i * 4 + 0] = (word >> 0) as u8; output[i * 4 + 1] = (word >> 8) as u8; output[i * 4 + 2] = (word >> 16) as u8; output[i * 4 + 3] = (word >> 24) as u8; } regs.itop.set(0); Ok(expected_output_size) } // Finalize withtout seeing the result; this is used for certificates // (hidden secret generation) fn finalize_hidden(&self) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); regs.trig.set(ShaTrigMask::Stop as u32); while regs.itop.get() == 0 {} regs.itop.set(0); Ok(0) } }
{ ShaEngine { regs: regs, current_mode: Cell::new(None), } }
identifier_body
sha.rs
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use core::cell::Cell; use core::mem; use crate::hil::digest::{DigestEngine, DigestMode, DigestError}; use kernel::common::cells::VolatileCell; use super::keymgr::{KEYMGR0_REGS, Registers}; #[allow(unused)] enum ShaTrigMask { Go = 0x1, Reset = 0x2, Step = 0x4, Stop = 0x8, } #[allow(unused)] enum ShaCfgEnMask { BigEndian = 0x01, Sha1 = 0x02, BusError = 0x08, Livestream = 0x10, Hmac = 0x20, IntEnDone = 0x1_0000, IntMaskDone = 0x2_0000, } pub struct ShaEngine { regs: *mut Registers, current_mode: Cell<Option<DigestMode>>, } enum CertificateMask { CertBits = 0x3f, // Bits 0:5 Enable = 0x40, // 1 << 6 //CheckOnly = 0x80, // 1 << 7 } impl ShaEngine { const unsafe fn
(regs: *mut Registers) -> ShaEngine { ShaEngine { regs: regs, current_mode: Cell::new(None), } } pub fn handle_interrupt(&self, _nvic: u32) { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); } } pub static mut KEYMGR0_SHA: ShaEngine = unsafe { ShaEngine::new(KEYMGR0_REGS) }; const HMAC_KEY_SIZE_BYTES: usize = 32; const HMAC_KEY_SIZE_WORDS: usize = HMAC_KEY_SIZE_BYTES / 4; impl DigestEngine for ShaEngine { fn initialize(&self, mode: DigestMode) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status // Compile-time check for DigestMode exhaustiveness match mode { DigestMode::Sha1 | DigestMode::Sha256 | DigestMode::Sha256Hmac => (), }; self.current_mode.set(Some(mode)); regs.trig.set(ShaTrigMask::Stop as u32); let mut flags = ShaCfgEnMask::Livestream as u32 | ShaCfgEnMask::IntEnDone as u32; match mode { DigestMode::Sha1 => flags |= ShaCfgEnMask::Sha1 as u32, DigestMode::Sha256 => (), DigestMode::Sha256Hmac => flags |= ShaCfgEnMask::Hmac as u32, } regs.cfg_en.set(flags); regs.trig.set(ShaTrigMask::Go as u32); Ok(()) } fn initialize_hmac(&self, key: &[u8]) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status self.current_mode.set(Some(DigestMode::Sha256Hmac)); if key.len() < HMAC_KEY_SIZE_BYTES { print!("Key too small: {}\n", key.len()); return Err(DigestError::BufferTooSmall(HMAC_KEY_SIZE_BYTES)); } for i in 0..HMAC_KEY_SIZE_WORDS { let word: u32 = (key[4 * i + 0] as u32) << 0 | (key[4 * i + 1] as u32) << 8 | (key[4 * i + 2] as u32) << 16 | (key[4 * i + 3] as u32) << 24; regs.key_w[i].set(word); } let flags = ShaCfgEnMask::Livestream as u32 | ShaCfgEnMask::IntEnDone as u32 | ShaCfgEnMask::Hmac as u32; regs.cfg_en.set(flags); regs.trig.set(ShaTrigMask::Go as u32); return Ok(()); } fn initialize_certificate(&self, certificate_id: u32) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status regs.use_cert.set(certificate_id & CertificateMask::CertBits as u32 | CertificateMask::Enable as u32); regs.cfg_en.set(ShaCfgEnMask::IntEnDone as u32); regs.trig.set(ShaTrigMask::Go as u32); Ok(()) } fn update(&self, data: &[u8]) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; if self.current_mode.get().is_none() { print!("ERROR: SHA::update called but engine not initialized!\n"); return Err(DigestError::NotConfigured); } let fifo_u8: &VolatileCell<u8> = unsafe { mem::transmute(&regs.input_fifo) }; // TODO(yuriks): Feed FIFO word at a time when possible for b in data { fifo_u8.set(*b); } Ok(data.len()) } fn finalize(&self, output: &mut [u8]) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; let expected_output_size = match self.current_mode.get() { None => return Err(DigestError::NotConfigured), Some(mode) => mode.output_size(), }; if output.len() < expected_output_size { return Err(DigestError::BufferTooSmall(expected_output_size)); } // Tell hardware we're done streaming and then wait for the // hash calculation to finish. regs.itop.set(0); regs.trig.set(ShaTrigMask::Stop as u32); while regs.itop.get() == 0 {} for i in 0..(expected_output_size / 4) { let word = regs.sts_h[i].get(); output[i * 4 + 0] = (word >> 0) as u8; output[i * 4 + 1] = (word >> 8) as u8; output[i * 4 + 2] = (word >> 16) as u8; output[i * 4 + 3] = (word >> 24) as u8; } regs.itop.set(0); Ok(expected_output_size) } // Finalize withtout seeing the result; this is used for certificates // (hidden secret generation) fn finalize_hidden(&self) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); regs.trig.set(ShaTrigMask::Stop as u32); while regs.itop.get() == 0 {} regs.itop.set(0); Ok(0) } }
new
identifier_name
sha.rs
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use core::cell::Cell; use core::mem; use crate::hil::digest::{DigestEngine, DigestMode, DigestError}; use kernel::common::cells::VolatileCell; use super::keymgr::{KEYMGR0_REGS, Registers}; #[allow(unused)] enum ShaTrigMask { Go = 0x1, Reset = 0x2, Step = 0x4, Stop = 0x8, } #[allow(unused)] enum ShaCfgEnMask { BigEndian = 0x01, Sha1 = 0x02, BusError = 0x08, Livestream = 0x10, Hmac = 0x20, IntEnDone = 0x1_0000, IntMaskDone = 0x2_0000, } pub struct ShaEngine { regs: *mut Registers, current_mode: Cell<Option<DigestMode>>, } enum CertificateMask { CertBits = 0x3f, // Bits 0:5 Enable = 0x40, // 1 << 6 //CheckOnly = 0x80, // 1 << 7 } impl ShaEngine { const unsafe fn new(regs: *mut Registers) -> ShaEngine { ShaEngine { regs: regs, current_mode: Cell::new(None), } } pub fn handle_interrupt(&self, _nvic: u32) { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); } } pub static mut KEYMGR0_SHA: ShaEngine = unsafe { ShaEngine::new(KEYMGR0_REGS) }; const HMAC_KEY_SIZE_BYTES: usize = 32; const HMAC_KEY_SIZE_WORDS: usize = HMAC_KEY_SIZE_BYTES / 4; impl DigestEngine for ShaEngine { fn initialize(&self, mode: DigestMode) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status // Compile-time check for DigestMode exhaustiveness match mode { DigestMode::Sha1 | DigestMode::Sha256 | DigestMode::Sha256Hmac => (), }; self.current_mode.set(Some(mode)); regs.trig.set(ShaTrigMask::Stop as u32);
DigestMode::Sha1 => flags |= ShaCfgEnMask::Sha1 as u32, DigestMode::Sha256 => (), DigestMode::Sha256Hmac => flags |= ShaCfgEnMask::Hmac as u32, } regs.cfg_en.set(flags); regs.trig.set(ShaTrigMask::Go as u32); Ok(()) } fn initialize_hmac(&self, key: &[u8]) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status self.current_mode.set(Some(DigestMode::Sha256Hmac)); if key.len() < HMAC_KEY_SIZE_BYTES { print!("Key too small: {}\n", key.len()); return Err(DigestError::BufferTooSmall(HMAC_KEY_SIZE_BYTES)); } for i in 0..HMAC_KEY_SIZE_WORDS { let word: u32 = (key[4 * i + 0] as u32) << 0 | (key[4 * i + 1] as u32) << 8 | (key[4 * i + 2] as u32) << 16 | (key[4 * i + 3] as u32) << 24; regs.key_w[i].set(word); } let flags = ShaCfgEnMask::Livestream as u32 | ShaCfgEnMask::IntEnDone as u32 | ShaCfgEnMask::Hmac as u32; regs.cfg_en.set(flags); regs.trig.set(ShaTrigMask::Go as u32); return Ok(()); } fn initialize_certificate(&self, certificate_id: u32) -> Result<(), DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); // clear status regs.use_cert.set(certificate_id & CertificateMask::CertBits as u32 | CertificateMask::Enable as u32); regs.cfg_en.set(ShaCfgEnMask::IntEnDone as u32); regs.trig.set(ShaTrigMask::Go as u32); Ok(()) } fn update(&self, data: &[u8]) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; if self.current_mode.get().is_none() { print!("ERROR: SHA::update called but engine not initialized!\n"); return Err(DigestError::NotConfigured); } let fifo_u8: &VolatileCell<u8> = unsafe { mem::transmute(&regs.input_fifo) }; // TODO(yuriks): Feed FIFO word at a time when possible for b in data { fifo_u8.set(*b); } Ok(data.len()) } fn finalize(&self, output: &mut [u8]) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; let expected_output_size = match self.current_mode.get() { None => return Err(DigestError::NotConfigured), Some(mode) => mode.output_size(), }; if output.len() < expected_output_size { return Err(DigestError::BufferTooSmall(expected_output_size)); } // Tell hardware we're done streaming and then wait for the // hash calculation to finish. regs.itop.set(0); regs.trig.set(ShaTrigMask::Stop as u32); while regs.itop.get() == 0 {} for i in 0..(expected_output_size / 4) { let word = regs.sts_h[i].get(); output[i * 4 + 0] = (word >> 0) as u8; output[i * 4 + 1] = (word >> 8) as u8; output[i * 4 + 2] = (word >> 16) as u8; output[i * 4 + 3] = (word >> 24) as u8; } regs.itop.set(0); Ok(expected_output_size) } // Finalize withtout seeing the result; this is used for certificates // (hidden secret generation) fn finalize_hidden(&self) -> Result<usize, DigestError> { let ref regs = unsafe { &*self.regs }.sha; regs.itop.set(0); regs.trig.set(ShaTrigMask::Stop as u32); while regs.itop.get() == 0 {} regs.itop.set(0); Ok(0) } }
let mut flags = ShaCfgEnMask::Livestream as u32 | ShaCfgEnMask::IntEnDone as u32; match mode {
random_line_split
renderer.tsx
import * as electron from 'electron'; import * as React from 'react'; import * as ReactDOM from 'react-dom'; type SizeType = '640 x 480' | '800 x 600' | '1280 x 800' | '1440 x 900' | '1680 x 1050'; export type FormatType = 'webm' | 'gif'; export interface CaptureSize { width: number; height: number; } interface ToolbarState { alwaysOnTop: boolean; size: SizeType; format: FormatType; } interface ScreenState { captureSources: Electron.DesktopCapturerSource[]; } interface VideoState { isRecord: boolean; timer: string; } class Toolbar extends React.Component<any, ToolbarState> { private ipc: Electron.IpcRenderer; private window: Electron.BrowserWindow; private menu: Electron.Menu; constructor() { super(); this.state = { alwaysOnTop: false, size: '640 x 480', format: 'webm' }; this.ipc = electron.ipcRenderer; this.window = electron.remote.getCurrentWindow(); this.menu = new electron.remote.Menu(); ['640 x 480', '800 x 600', '1280 x 800', '1440 x 900', '1680 x 1050'].map((label: string) => { this.menu.append(new electron.remote.MenuItem({ label: label, type: 'radio', checked: label === '640 x 480', click: this.toggleSize.bind(this) })); }); } render(): JSX.Element { return ( <div className="toolbar-actions" > <div className="btn-group"> <button className={['btn', 'btn-default', this.state.alwaysOnTop && 'active'].join(' ')} style={{borderRadius: 4}} onClick={this.toggleAlwaysOnTop.bind(this)} > <span className="icon icon-popup"/> </button> </div> <button className="btn btn-default btn-dropdown" onClick={() => { this.menu.popup(this.window); }}> <span className="icon icon-text icon-resize-full"/> {this.state.size} </button> <div className="btn-group"> <button className={['btn', 'btn-default', this.state.format === 'webm' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>webm</button> <button className={['btn', 'btn-default', this.state.format === 'gif' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>gif</button> </div> <button className="btn btn-default pull-right" onClick={this.fetchScreen.bind(this)} > <span className="icon icon-arrows-ccw"/> </button> </div> ); } private toggleAlwaysOnTop(): void { let alwaysOnTop = !this.state.alwaysOnTop; this.window.setAlwaysOnTop(alwaysOnTop); this.setState({ alwaysOnTop: alwaysOnTop, size: this.state.size, format: this.state.format }); } private toggleSize(item: Electron.MenuItem): void { this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: item.label as SizeType, format: this.state.format }); let captureSize = item.label.split(' x '); this.ipc.send('toggleSize', { width: parseInt(captureSize[0], 10), height: parseInt(captureSize[1], 10), } as CaptureSize); } private toggleFormat(): void { let format = (this.state.format === 'webm' ? 'gif' : 'webm') as FormatType; this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: this.state.size, format: format }); this.ipc.send('toggleFormat', format); } private fetchScreen(): void { this.ipc.send('fetchScreen'); } } class Screen extends React.Component<any, ScreenState> { private ipc: Electron.IpcRenderer; private desktopCapturer: Electron.DesktopCapturer; private prevTarget: HTMLElement; constructor() { super(); this.state = { captureSources: [] }; this.ipc = electron.ipcRenderer; this.desktopCapturer = electron.desktopCapturer; } componentDidMount(): void
render(): JSX.Element { return ( <ul className="list-group" >{this.state.captureSources.map((source: Electron.DesktopCapturerSource) => { return ( <li key={source.id} id={source.id} className='list-group-item' onClick={this.selectScreen.bind(this)} > <img src={source.thumbnail.toDataURL()} className='img-rounded media-object pull-left' style={{width: 32, height: 32}}/> <div className='media-body' > <strong>{source.name}</strong> <p>{source.id}</p> </div> </li> ); })}</ul> ); } private fetchScreen(): void { this.desktopCapturer.getSources({ types: ['window', 'screen'] }, (error, sources) => { if (error) { return; } this.setState({ captureSources: sources }); }); } private selectScreen(event: React.MouseEvent<any>): void { if (this.prevTarget) { this.prevTarget.classList.remove('active'); } let target: HTMLElement = event.currentTarget; target.classList.add('active'); this.ipc.send('captureScreen', target.id); this.prevTarget = target; } } class Video extends React.Component<any, VideoState> { private ipc: Electron.IpcRenderer; private player: HTMLVideoElement; private toast: HTMLParagraphElement; private capture: HTMLSpanElement; private canvas: HTMLCanvasElement; private canvasctx: CanvasRenderingContext2D; private downloader: HTMLAnchorElement; private frames: string[]; private startTime: number; private requestId: number; private screenId: string; private captureSize: CaptureSize; private format: FormatType; constructor() { super(); this.state = { isRecord: false, timer: '00:00:00' }; this.canvas = document.createElement('canvas') as HTMLCanvasElement; this.canvas.setAttribute('width', '640px'); this.canvas.setAttribute('height', '480px'); this.canvasctx = this.canvas.getContext('2d'); this.downloader = document.createElement('a') as HTMLAnchorElement; this.captureSize = { width: 640, height: 480 } as CaptureSize; this.format = 'webm' as FormatType; this.ipc = electron.ipcRenderer; this.ipc.on('toggleSize', (event: Electron.IpcRendererEvent, captureSize: CaptureSize) => { this.captureSize = captureSize; this.canvas.setAttribute('width', `${this.captureSize.width}px`); this.canvas.setAttribute('height', `${this.captureSize.height}px`); if (this.screenId) { this.captureScreen(); } }); this.ipc.on('toggleFormat', (event: Electron.IpcRendererEvent, format: FormatType) => { this.format = format; }); this.ipc.on('captureScreen', (event: Electron.IpcRendererEvent, screenId: string) => { this.screenId = screenId; this.captureScreen(); }); } componentDidMount(): void { this.player = ReactDOM.findDOMNode(this.refs['player']) as HTMLVideoElement; this.toast = ReactDOM.findDOMNode(this.refs['toast']) as HTMLParagraphElement; this.capture = ReactDOM.findDOMNode(this.refs['capture']) as HTMLSpanElement; } render(): JSX.Element { return ( <div> <video ref="player" style={{width: 580, height: 545}}/> <p ref="toast" className="toast" >{this.state.timer}</p> <span ref="capture" className="icon icon-record capture" onClick={this.capturing.bind(this)}/> </div> ); } private captureScreen(): void { let nav = navigator as any; nav.webkitGetUserMedia({ audio: false, video: { mandatory: { chromeMediaSource: 'desktop', chromeMediaSourceId: this.screenId, minWidth: this.captureSize.width, maxWidth: this.captureSize.width, minHeight: this.captureSize.height, maxHeight: this.captureSize.height } } }, this.streamScreen.bind(this), (e) => { console.log(e); }); } private streamScreen(stream: MediaStream): void { let playerURL = this.player.getAttribute('src'); if (playerURL) { window.URL.revokeObjectURL(playerURL); } this.player.setAttribute('src', window.URL.createObjectURL(stream)); this.player.play(); } private capturing(): void { if (!this.player.getAttribute('src')) { return; } if (this.state.isRecord) { this.capture.className = 'icon icon-record capture'; cancelAnimationFrame(this.requestId); let downloaderURL = this.downloader.getAttribute('href'); if (downloaderURL) { window.URL.revokeObjectURL(downloaderURL); } if (this.format === 'webm') { this.toWebm(); } else if (this.format === 'gif') { this.toGif(); } this.setState({ isRecord: false, timer: '00:00:00' }); return; } this.capture.className = 'icon icon-stop capture'; this.frames = []; this.startTime = Date.now(); this.setState({ isRecord: true, timer: '00:00:00' }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private draw(): void { this.canvasctx.drawImage(this.player, 0, 0); this.frames.push(this.canvas.toDataURL('image/webp', 0.8)); let diff = Date.now() - this.startTime; let hours = String(Math.floor(diff / 3600000) + 100).substring(1); let minutes = String(Math.floor((diff - parseInt(hours, 10) * 3600000) / 60000) + 100).substring(1); let seconds = String(Math.round((diff - parseInt(hours, 10) * 3600000 - parseInt(minutes, 10) * 60000) / 1000) + 100).substring(1); this.setState({ isRecord: this.state.isRecord, timer: `${hours}:${minutes}:${seconds}` }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private toWebm(): void { let webm = window['Whammy'].fromImageArray(this.frames, 1000 / 60) as Blob; this.finalize(webm, 'webm' as FormatType); } private toGif(): void { window['gifshot'].createGIF({ images: this.frames, gifWidth: this.captureSize.width, gifHeight: this.captureSize.height }, (response) => { if (response.error) { return; } let bin = atob(response.image.slice(22)); let buffer = new Uint8Array(bin.length); for (let i = 0; i < bin.length; i++) { buffer[i] = bin.charCodeAt(i); } let gif = new Blob([buffer]); this.finalize(gif, 'gif' as FormatType); }); } private finalize(blob: Blob, format: FormatType): void { let clicker = document.createEvent('MouseEvent') as MouseEvent; clicker.initEvent('click', false, true); this.downloader.setAttribute('href', window.URL.createObjectURL(blob)); this.downloader.setAttribute('download', `${this.startTime}.${format}`); this.downloader.dispatchEvent(clicker); } } export default class Renderer { private toolbar: React.ReactElement<{}>; private screen: React.ReactElement<{}>; private video: React.ReactElement<{}>; constructor() { this.toolbar = React.createElement(Toolbar); ReactDOM.render(this.toolbar, document.querySelector('#toolbar')); this.screen = React.createElement(Screen); ReactDOM.render(this.screen, document.querySelector('#screen')); this.video = React.createElement(Video); ReactDOM.render(this.video, document.querySelector('#video')); } } new Renderer();
{ this.fetchScreen(); this.ipc.on('fetchScreen', () => { this.fetchScreen(); }); }
identifier_body
renderer.tsx
import * as electron from 'electron'; import * as React from 'react'; import * as ReactDOM from 'react-dom'; type SizeType = '640 x 480' | '800 x 600' | '1280 x 800' | '1440 x 900' | '1680 x 1050'; export type FormatType = 'webm' | 'gif'; export interface CaptureSize { width: number; height: number; } interface ToolbarState { alwaysOnTop: boolean; size: SizeType; format: FormatType; } interface ScreenState { captureSources: Electron.DesktopCapturerSource[]; } interface VideoState { isRecord: boolean; timer: string; } class Toolbar extends React.Component<any, ToolbarState> { private ipc: Electron.IpcRenderer; private window: Electron.BrowserWindow; private menu: Electron.Menu; constructor() { super(); this.state = { alwaysOnTop: false, size: '640 x 480', format: 'webm' }; this.ipc = electron.ipcRenderer; this.window = electron.remote.getCurrentWindow(); this.menu = new electron.remote.Menu(); ['640 x 480', '800 x 600', '1280 x 800', '1440 x 900', '1680 x 1050'].map((label: string) => { this.menu.append(new electron.remote.MenuItem({ label: label, type: 'radio', checked: label === '640 x 480', click: this.toggleSize.bind(this) })); }); } render(): JSX.Element { return ( <div className="toolbar-actions" > <div className="btn-group"> <button className={['btn', 'btn-default', this.state.alwaysOnTop && 'active'].join(' ')} style={{borderRadius: 4}} onClick={this.toggleAlwaysOnTop.bind(this)} > <span className="icon icon-popup"/> </button> </div> <button className="btn btn-default btn-dropdown" onClick={() => { this.menu.popup(this.window); }}> <span className="icon icon-text icon-resize-full"/> {this.state.size} </button> <div className="btn-group"> <button className={['btn', 'btn-default', this.state.format === 'webm' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>webm</button> <button className={['btn', 'btn-default', this.state.format === 'gif' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>gif</button> </div> <button className="btn btn-default pull-right" onClick={this.fetchScreen.bind(this)} > <span className="icon icon-arrows-ccw"/> </button> </div> ); } private toggleAlwaysOnTop(): void { let alwaysOnTop = !this.state.alwaysOnTop; this.window.setAlwaysOnTop(alwaysOnTop); this.setState({ alwaysOnTop: alwaysOnTop, size: this.state.size, format: this.state.format }); } private toggleSize(item: Electron.MenuItem): void { this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: item.label as SizeType, format: this.state.format }); let captureSize = item.label.split(' x '); this.ipc.send('toggleSize', { width: parseInt(captureSize[0], 10), height: parseInt(captureSize[1], 10), } as CaptureSize); } private toggleFormat(): void { let format = (this.state.format === 'webm' ? 'gif' : 'webm') as FormatType; this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: this.state.size, format: format }); this.ipc.send('toggleFormat', format); } private fetchScreen(): void { this.ipc.send('fetchScreen'); } } class Screen extends React.Component<any, ScreenState> { private ipc: Electron.IpcRenderer; private desktopCapturer: Electron.DesktopCapturer; private prevTarget: HTMLElement; constructor() { super(); this.state = { captureSources: [] }; this.ipc = electron.ipcRenderer; this.desktopCapturer = electron.desktopCapturer; } componentDidMount(): void { this.fetchScreen(); this.ipc.on('fetchScreen', () => { this.fetchScreen(); }); } render(): JSX.Element { return ( <ul className="list-group" >{this.state.captureSources.map((source: Electron.DesktopCapturerSource) => { return ( <li key={source.id} id={source.id} className='list-group-item' onClick={this.selectScreen.bind(this)} > <img src={source.thumbnail.toDataURL()} className='img-rounded media-object pull-left' style={{width: 32, height: 32}}/> <div className='media-body' > <strong>{source.name}</strong> <p>{source.id}</p> </div> </li> ); })}</ul> ); } private fetchScreen(): void { this.desktopCapturer.getSources({ types: ['window', 'screen'] }, (error, sources) => { if (error) { return; } this.setState({ captureSources: sources }); }); } private selectScreen(event: React.MouseEvent<any>): void { if (this.prevTarget) { this.prevTarget.classList.remove('active'); } let target: HTMLElement = event.currentTarget; target.classList.add('active'); this.ipc.send('captureScreen', target.id); this.prevTarget = target; } } class Video extends React.Component<any, VideoState> { private ipc: Electron.IpcRenderer; private player: HTMLVideoElement; private toast: HTMLParagraphElement; private capture: HTMLSpanElement; private canvas: HTMLCanvasElement; private canvasctx: CanvasRenderingContext2D; private downloader: HTMLAnchorElement; private frames: string[]; private startTime: number; private requestId: number; private screenId: string; private captureSize: CaptureSize; private format: FormatType; constructor() { super(); this.state = { isRecord: false, timer: '00:00:00' }; this.canvas = document.createElement('canvas') as HTMLCanvasElement; this.canvas.setAttribute('width', '640px'); this.canvas.setAttribute('height', '480px'); this.canvasctx = this.canvas.getContext('2d'); this.downloader = document.createElement('a') as HTMLAnchorElement; this.captureSize = { width: 640, height: 480 } as CaptureSize; this.format = 'webm' as FormatType; this.ipc = electron.ipcRenderer; this.ipc.on('toggleSize', (event: Electron.IpcRendererEvent, captureSize: CaptureSize) => { this.captureSize = captureSize; this.canvas.setAttribute('width', `${this.captureSize.width}px`); this.canvas.setAttribute('height', `${this.captureSize.height}px`); if (this.screenId) { this.captureScreen(); } }); this.ipc.on('toggleFormat', (event: Electron.IpcRendererEvent, format: FormatType) => { this.format = format; }); this.ipc.on('captureScreen', (event: Electron.IpcRendererEvent, screenId: string) => { this.screenId = screenId; this.captureScreen(); }); } componentDidMount(): void { this.player = ReactDOM.findDOMNode(this.refs['player']) as HTMLVideoElement; this.toast = ReactDOM.findDOMNode(this.refs['toast']) as HTMLParagraphElement; this.capture = ReactDOM.findDOMNode(this.refs['capture']) as HTMLSpanElement; } render(): JSX.Element { return ( <div> <video ref="player" style={{width: 580, height: 545}}/> <p ref="toast" className="toast" >{this.state.timer}</p> <span ref="capture" className="icon icon-record capture" onClick={this.capturing.bind(this)}/> </div> ); } private captureScreen(): void { let nav = navigator as any; nav.webkitGetUserMedia({ audio: false, video: { mandatory: { chromeMediaSource: 'desktop', chromeMediaSourceId: this.screenId, minWidth: this.captureSize.width, maxWidth: this.captureSize.width, minHeight: this.captureSize.height, maxHeight: this.captureSize.height } } }, this.streamScreen.bind(this), (e) => { console.log(e); }); } private streamScreen(stream: MediaStream): void { let playerURL = this.player.getAttribute('src'); if (playerURL) { window.URL.revokeObjectURL(playerURL); } this.player.setAttribute('src', window.URL.createObjectURL(stream)); this.player.play(); } private capturing(): void { if (!this.player.getAttribute('src')) { return; } if (this.state.isRecord)
this.capture.className = 'icon icon-stop capture'; this.frames = []; this.startTime = Date.now(); this.setState({ isRecord: true, timer: '00:00:00' }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private draw(): void { this.canvasctx.drawImage(this.player, 0, 0); this.frames.push(this.canvas.toDataURL('image/webp', 0.8)); let diff = Date.now() - this.startTime; let hours = String(Math.floor(diff / 3600000) + 100).substring(1); let minutes = String(Math.floor((diff - parseInt(hours, 10) * 3600000) / 60000) + 100).substring(1); let seconds = String(Math.round((diff - parseInt(hours, 10) * 3600000 - parseInt(minutes, 10) * 60000) / 1000) + 100).substring(1); this.setState({ isRecord: this.state.isRecord, timer: `${hours}:${minutes}:${seconds}` }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private toWebm(): void { let webm = window['Whammy'].fromImageArray(this.frames, 1000 / 60) as Blob; this.finalize(webm, 'webm' as FormatType); } private toGif(): void { window['gifshot'].createGIF({ images: this.frames, gifWidth: this.captureSize.width, gifHeight: this.captureSize.height }, (response) => { if (response.error) { return; } let bin = atob(response.image.slice(22)); let buffer = new Uint8Array(bin.length); for (let i = 0; i < bin.length; i++) { buffer[i] = bin.charCodeAt(i); } let gif = new Blob([buffer]); this.finalize(gif, 'gif' as FormatType); }); } private finalize(blob: Blob, format: FormatType): void { let clicker = document.createEvent('MouseEvent') as MouseEvent; clicker.initEvent('click', false, true); this.downloader.setAttribute('href', window.URL.createObjectURL(blob)); this.downloader.setAttribute('download', `${this.startTime}.${format}`); this.downloader.dispatchEvent(clicker); } } export default class Renderer { private toolbar: React.ReactElement<{}>; private screen: React.ReactElement<{}>; private video: React.ReactElement<{}>; constructor() { this.toolbar = React.createElement(Toolbar); ReactDOM.render(this.toolbar, document.querySelector('#toolbar')); this.screen = React.createElement(Screen); ReactDOM.render(this.screen, document.querySelector('#screen')); this.video = React.createElement(Video); ReactDOM.render(this.video, document.querySelector('#video')); } } new Renderer();
{ this.capture.className = 'icon icon-record capture'; cancelAnimationFrame(this.requestId); let downloaderURL = this.downloader.getAttribute('href'); if (downloaderURL) { window.URL.revokeObjectURL(downloaderURL); } if (this.format === 'webm') { this.toWebm(); } else if (this.format === 'gif') { this.toGif(); } this.setState({ isRecord: false, timer: '00:00:00' }); return; }
conditional_block
renderer.tsx
import * as electron from 'electron'; import * as React from 'react'; import * as ReactDOM from 'react-dom'; type SizeType = '640 x 480' | '800 x 600' | '1280 x 800' | '1440 x 900' | '1680 x 1050'; export type FormatType = 'webm' | 'gif'; export interface CaptureSize { width: number; height: number; } interface ToolbarState { alwaysOnTop: boolean; size: SizeType; format: FormatType; } interface ScreenState { captureSources: Electron.DesktopCapturerSource[]; } interface VideoState { isRecord: boolean; timer: string; } class Toolbar extends React.Component<any, ToolbarState> { private ipc: Electron.IpcRenderer; private window: Electron.BrowserWindow; private menu: Electron.Menu; constructor() { super(); this.state = { alwaysOnTop: false, size: '640 x 480', format: 'webm' }; this.ipc = electron.ipcRenderer; this.window = electron.remote.getCurrentWindow(); this.menu = new electron.remote.Menu(); ['640 x 480', '800 x 600', '1280 x 800', '1440 x 900', '1680 x 1050'].map((label: string) => { this.menu.append(new electron.remote.MenuItem({ label: label, type: 'radio', checked: label === '640 x 480', click: this.toggleSize.bind(this) })); }); } render(): JSX.Element { return ( <div className="toolbar-actions" > <div className="btn-group"> <button className={['btn', 'btn-default', this.state.alwaysOnTop && 'active'].join(' ')} style={{borderRadius: 4}} onClick={this.toggleAlwaysOnTop.bind(this)} > <span className="icon icon-popup"/> </button> </div> <button className="btn btn-default btn-dropdown" onClick={() => { this.menu.popup(this.window); }}> <span className="icon icon-text icon-resize-full"/> {this.state.size} </button> <div className="btn-group"> <button className={['btn', 'btn-default', this.state.format === 'webm' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>webm</button> <button className={['btn', 'btn-default', this.state.format === 'gif' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>gif</button> </div> <button className="btn btn-default pull-right" onClick={this.fetchScreen.bind(this)} > <span className="icon icon-arrows-ccw"/> </button> </div> ); } private toggleAlwaysOnTop(): void { let alwaysOnTop = !this.state.alwaysOnTop; this.window.setAlwaysOnTop(alwaysOnTop); this.setState({ alwaysOnTop: alwaysOnTop, size: this.state.size, format: this.state.format }); } private toggleSize(item: Electron.MenuItem): void { this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: item.label as SizeType, format: this.state.format }); let captureSize = item.label.split(' x '); this.ipc.send('toggleSize', { width: parseInt(captureSize[0], 10), height: parseInt(captureSize[1], 10), } as CaptureSize); } private toggleFormat(): void { let format = (this.state.format === 'webm' ? 'gif' : 'webm') as FormatType; this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: this.state.size, format: format }); this.ipc.send('toggleFormat', format); } private fetchScreen(): void { this.ipc.send('fetchScreen'); } } class Screen extends React.Component<any, ScreenState> { private ipc: Electron.IpcRenderer; private desktopCapturer: Electron.DesktopCapturer; private prevTarget: HTMLElement; constructor() { super(); this.state = { captureSources: [] }; this.ipc = electron.ipcRenderer; this.desktopCapturer = electron.desktopCapturer; } componentDidMount(): void { this.fetchScreen(); this.ipc.on('fetchScreen', () => { this.fetchScreen(); }); } render(): JSX.Element { return ( <ul className="list-group" >{this.state.captureSources.map((source: Electron.DesktopCapturerSource) => { return ( <li key={source.id} id={source.id} className='list-group-item' onClick={this.selectScreen.bind(this)} > <img src={source.thumbnail.toDataURL()} className='img-rounded media-object pull-left' style={{width: 32, height: 32}}/> <div className='media-body' > <strong>{source.name}</strong> <p>{source.id}</p> </div> </li> ); })}</ul> ); } private fetchScreen(): void { this.desktopCapturer.getSources({ types: ['window', 'screen'] }, (error, sources) => { if (error) { return; } this.setState({ captureSources: sources }); }); } private selectScreen(event: React.MouseEvent<any>): void { if (this.prevTarget) { this.prevTarget.classList.remove('active'); } let target: HTMLElement = event.currentTarget; target.classList.add('active'); this.ipc.send('captureScreen', target.id); this.prevTarget = target; } } class Video extends React.Component<any, VideoState> { private ipc: Electron.IpcRenderer; private player: HTMLVideoElement; private toast: HTMLParagraphElement; private capture: HTMLSpanElement; private canvas: HTMLCanvasElement; private canvasctx: CanvasRenderingContext2D; private downloader: HTMLAnchorElement; private frames: string[]; private startTime: number; private requestId: number; private screenId: string; private captureSize: CaptureSize; private format: FormatType; constructor() { super(); this.state = { isRecord: false, timer: '00:00:00' }; this.canvas = document.createElement('canvas') as HTMLCanvasElement; this.canvas.setAttribute('width', '640px'); this.canvas.setAttribute('height', '480px'); this.canvasctx = this.canvas.getContext('2d'); this.downloader = document.createElement('a') as HTMLAnchorElement; this.captureSize = { width: 640, height: 480 } as CaptureSize; this.format = 'webm' as FormatType; this.ipc = electron.ipcRenderer; this.ipc.on('toggleSize', (event: Electron.IpcRendererEvent, captureSize: CaptureSize) => { this.captureSize = captureSize; this.canvas.setAttribute('width', `${this.captureSize.width}px`); this.canvas.setAttribute('height', `${this.captureSize.height}px`); if (this.screenId) { this.captureScreen(); } }); this.ipc.on('toggleFormat', (event: Electron.IpcRendererEvent, format: FormatType) => { this.format = format; }); this.ipc.on('captureScreen', (event: Electron.IpcRendererEvent, screenId: string) => { this.screenId = screenId; this.captureScreen(); }); } componentDidMount(): void { this.player = ReactDOM.findDOMNode(this.refs['player']) as HTMLVideoElement; this.toast = ReactDOM.findDOMNode(this.refs['toast']) as HTMLParagraphElement; this.capture = ReactDOM.findDOMNode(this.refs['capture']) as HTMLSpanElement; } render(): JSX.Element { return ( <div> <video ref="player" style={{width: 580, height: 545}}/> <p ref="toast" className="toast" >{this.state.timer}</p> <span ref="capture" className="icon icon-record capture" onClick={this.capturing.bind(this)}/> </div> ); } private captureScreen(): void { let nav = navigator as any; nav.webkitGetUserMedia({ audio: false, video: { mandatory: { chromeMediaSource: 'desktop', chromeMediaSourceId: this.screenId, minWidth: this.captureSize.width, maxWidth: this.captureSize.width, minHeight: this.captureSize.height, maxHeight: this.captureSize.height } } }, this.streamScreen.bind(this), (e) => { console.log(e); }); } private streamScreen(stream: MediaStream): void { let playerURL = this.player.getAttribute('src'); if (playerURL) { window.URL.revokeObjectURL(playerURL); } this.player.setAttribute('src', window.URL.createObjectURL(stream)); this.player.play(); } private capturing(): void { if (!this.player.getAttribute('src')) { return; } if (this.state.isRecord) { this.capture.className = 'icon icon-record capture'; cancelAnimationFrame(this.requestId); let downloaderURL = this.downloader.getAttribute('href'); if (downloaderURL) { window.URL.revokeObjectURL(downloaderURL); } if (this.format === 'webm') { this.toWebm(); } else if (this.format === 'gif') { this.toGif(); } this.setState({ isRecord: false, timer: '00:00:00' }); return; } this.capture.className = 'icon icon-stop capture'; this.frames = []; this.startTime = Date.now(); this.setState({ isRecord: true, timer: '00:00:00' }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private draw(): void { this.canvasctx.drawImage(this.player, 0, 0); this.frames.push(this.canvas.toDataURL('image/webp', 0.8)); let diff = Date.now() - this.startTime; let hours = String(Math.floor(diff / 3600000) + 100).substring(1); let minutes = String(Math.floor((diff - parseInt(hours, 10) * 3600000) / 60000) + 100).substring(1); let seconds = String(Math.round((diff - parseInt(hours, 10) * 3600000 - parseInt(minutes, 10) * 60000) / 1000) + 100).substring(1); this.setState({ isRecord: this.state.isRecord, timer: `${hours}:${minutes}:${seconds}` }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private toWebm(): void { let webm = window['Whammy'].fromImageArray(this.frames, 1000 / 60) as Blob; this.finalize(webm, 'webm' as FormatType); } private toGif(): void { window['gifshot'].createGIF({ images: this.frames, gifWidth: this.captureSize.width, gifHeight: this.captureSize.height }, (response) => { if (response.error) { return; } let bin = atob(response.image.slice(22)); let buffer = new Uint8Array(bin.length); for (let i = 0; i < bin.length; i++) { buffer[i] = bin.charCodeAt(i); } let gif = new Blob([buffer]); this.finalize(gif, 'gif' as FormatType); }); } private
(blob: Blob, format: FormatType): void { let clicker = document.createEvent('MouseEvent') as MouseEvent; clicker.initEvent('click', false, true); this.downloader.setAttribute('href', window.URL.createObjectURL(blob)); this.downloader.setAttribute('download', `${this.startTime}.${format}`); this.downloader.dispatchEvent(clicker); } } export default class Renderer { private toolbar: React.ReactElement<{}>; private screen: React.ReactElement<{}>; private video: React.ReactElement<{}>; constructor() { this.toolbar = React.createElement(Toolbar); ReactDOM.render(this.toolbar, document.querySelector('#toolbar')); this.screen = React.createElement(Screen); ReactDOM.render(this.screen, document.querySelector('#screen')); this.video = React.createElement(Video); ReactDOM.render(this.video, document.querySelector('#video')); } } new Renderer();
finalize
identifier_name
renderer.tsx
import * as electron from 'electron'; import * as React from 'react'; import * as ReactDOM from 'react-dom'; type SizeType = '640 x 480' | '800 x 600' | '1280 x 800' | '1440 x 900' | '1680 x 1050'; export type FormatType = 'webm' | 'gif'; export interface CaptureSize { width: number; height: number; } interface ToolbarState { alwaysOnTop: boolean; size: SizeType; format: FormatType; } interface ScreenState { captureSources: Electron.DesktopCapturerSource[]; } interface VideoState { isRecord: boolean; timer: string; } class Toolbar extends React.Component<any, ToolbarState> { private ipc: Electron.IpcRenderer; private window: Electron.BrowserWindow; private menu: Electron.Menu; constructor() { super(); this.state = { alwaysOnTop: false, size: '640 x 480', format: 'webm' }; this.ipc = electron.ipcRenderer; this.window = electron.remote.getCurrentWindow(); this.menu = new electron.remote.Menu(); ['640 x 480', '800 x 600', '1280 x 800', '1440 x 900', '1680 x 1050'].map((label: string) => { this.menu.append(new electron.remote.MenuItem({ label: label, type: 'radio', checked: label === '640 x 480', click: this.toggleSize.bind(this) })); }); } render(): JSX.Element { return ( <div className="toolbar-actions" > <div className="btn-group"> <button className={['btn', 'btn-default', this.state.alwaysOnTop && 'active'].join(' ')} style={{borderRadius: 4}} onClick={this.toggleAlwaysOnTop.bind(this)} > <span className="icon icon-popup"/> </button> </div> <button className="btn btn-default btn-dropdown" onClick={() => { this.menu.popup(this.window); }}> <span className="icon icon-text icon-resize-full"/> {this.state.size} </button> <div className="btn-group"> <button className={['btn', 'btn-default', this.state.format === 'webm' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>webm</button> <button className={['btn', 'btn-default', this.state.format === 'gif' && 'active'].join(' ')} onClick={this.toggleFormat.bind(this)}>gif</button> </div>
className="btn btn-default pull-right" onClick={this.fetchScreen.bind(this)} > <span className="icon icon-arrows-ccw"/> </button> </div> ); } private toggleAlwaysOnTop(): void { let alwaysOnTop = !this.state.alwaysOnTop; this.window.setAlwaysOnTop(alwaysOnTop); this.setState({ alwaysOnTop: alwaysOnTop, size: this.state.size, format: this.state.format }); } private toggleSize(item: Electron.MenuItem): void { this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: item.label as SizeType, format: this.state.format }); let captureSize = item.label.split(' x '); this.ipc.send('toggleSize', { width: parseInt(captureSize[0], 10), height: parseInt(captureSize[1], 10), } as CaptureSize); } private toggleFormat(): void { let format = (this.state.format === 'webm' ? 'gif' : 'webm') as FormatType; this.setState({ alwaysOnTop: this.state.alwaysOnTop, size: this.state.size, format: format }); this.ipc.send('toggleFormat', format); } private fetchScreen(): void { this.ipc.send('fetchScreen'); } } class Screen extends React.Component<any, ScreenState> { private ipc: Electron.IpcRenderer; private desktopCapturer: Electron.DesktopCapturer; private prevTarget: HTMLElement; constructor() { super(); this.state = { captureSources: [] }; this.ipc = electron.ipcRenderer; this.desktopCapturer = electron.desktopCapturer; } componentDidMount(): void { this.fetchScreen(); this.ipc.on('fetchScreen', () => { this.fetchScreen(); }); } render(): JSX.Element { return ( <ul className="list-group" >{this.state.captureSources.map((source: Electron.DesktopCapturerSource) => { return ( <li key={source.id} id={source.id} className='list-group-item' onClick={this.selectScreen.bind(this)} > <img src={source.thumbnail.toDataURL()} className='img-rounded media-object pull-left' style={{width: 32, height: 32}}/> <div className='media-body' > <strong>{source.name}</strong> <p>{source.id}</p> </div> </li> ); })}</ul> ); } private fetchScreen(): void { this.desktopCapturer.getSources({ types: ['window', 'screen'] }, (error, sources) => { if (error) { return; } this.setState({ captureSources: sources }); }); } private selectScreen(event: React.MouseEvent<any>): void { if (this.prevTarget) { this.prevTarget.classList.remove('active'); } let target: HTMLElement = event.currentTarget; target.classList.add('active'); this.ipc.send('captureScreen', target.id); this.prevTarget = target; } } class Video extends React.Component<any, VideoState> { private ipc: Electron.IpcRenderer; private player: HTMLVideoElement; private toast: HTMLParagraphElement; private capture: HTMLSpanElement; private canvas: HTMLCanvasElement; private canvasctx: CanvasRenderingContext2D; private downloader: HTMLAnchorElement; private frames: string[]; private startTime: number; private requestId: number; private screenId: string; private captureSize: CaptureSize; private format: FormatType; constructor() { super(); this.state = { isRecord: false, timer: '00:00:00' }; this.canvas = document.createElement('canvas') as HTMLCanvasElement; this.canvas.setAttribute('width', '640px'); this.canvas.setAttribute('height', '480px'); this.canvasctx = this.canvas.getContext('2d'); this.downloader = document.createElement('a') as HTMLAnchorElement; this.captureSize = { width: 640, height: 480 } as CaptureSize; this.format = 'webm' as FormatType; this.ipc = electron.ipcRenderer; this.ipc.on('toggleSize', (event: Electron.IpcRendererEvent, captureSize: CaptureSize) => { this.captureSize = captureSize; this.canvas.setAttribute('width', `${this.captureSize.width}px`); this.canvas.setAttribute('height', `${this.captureSize.height}px`); if (this.screenId) { this.captureScreen(); } }); this.ipc.on('toggleFormat', (event: Electron.IpcRendererEvent, format: FormatType) => { this.format = format; }); this.ipc.on('captureScreen', (event: Electron.IpcRendererEvent, screenId: string) => { this.screenId = screenId; this.captureScreen(); }); } componentDidMount(): void { this.player = ReactDOM.findDOMNode(this.refs['player']) as HTMLVideoElement; this.toast = ReactDOM.findDOMNode(this.refs['toast']) as HTMLParagraphElement; this.capture = ReactDOM.findDOMNode(this.refs['capture']) as HTMLSpanElement; } render(): JSX.Element { return ( <div> <video ref="player" style={{width: 580, height: 545}}/> <p ref="toast" className="toast" >{this.state.timer}</p> <span ref="capture" className="icon icon-record capture" onClick={this.capturing.bind(this)}/> </div> ); } private captureScreen(): void { let nav = navigator as any; nav.webkitGetUserMedia({ audio: false, video: { mandatory: { chromeMediaSource: 'desktop', chromeMediaSourceId: this.screenId, minWidth: this.captureSize.width, maxWidth: this.captureSize.width, minHeight: this.captureSize.height, maxHeight: this.captureSize.height } } }, this.streamScreen.bind(this), (e) => { console.log(e); }); } private streamScreen(stream: MediaStream): void { let playerURL = this.player.getAttribute('src'); if (playerURL) { window.URL.revokeObjectURL(playerURL); } this.player.setAttribute('src', window.URL.createObjectURL(stream)); this.player.play(); } private capturing(): void { if (!this.player.getAttribute('src')) { return; } if (this.state.isRecord) { this.capture.className = 'icon icon-record capture'; cancelAnimationFrame(this.requestId); let downloaderURL = this.downloader.getAttribute('href'); if (downloaderURL) { window.URL.revokeObjectURL(downloaderURL); } if (this.format === 'webm') { this.toWebm(); } else if (this.format === 'gif') { this.toGif(); } this.setState({ isRecord: false, timer: '00:00:00' }); return; } this.capture.className = 'icon icon-stop capture'; this.frames = []; this.startTime = Date.now(); this.setState({ isRecord: true, timer: '00:00:00' }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private draw(): void { this.canvasctx.drawImage(this.player, 0, 0); this.frames.push(this.canvas.toDataURL('image/webp', 0.8)); let diff = Date.now() - this.startTime; let hours = String(Math.floor(diff / 3600000) + 100).substring(1); let minutes = String(Math.floor((diff - parseInt(hours, 10) * 3600000) / 60000) + 100).substring(1); let seconds = String(Math.round((diff - parseInt(hours, 10) * 3600000 - parseInt(minutes, 10) * 60000) / 1000) + 100).substring(1); this.setState({ isRecord: this.state.isRecord, timer: `${hours}:${minutes}:${seconds}` }); this.requestId = requestAnimationFrame(this.draw.bind(this)); } private toWebm(): void { let webm = window['Whammy'].fromImageArray(this.frames, 1000 / 60) as Blob; this.finalize(webm, 'webm' as FormatType); } private toGif(): void { window['gifshot'].createGIF({ images: this.frames, gifWidth: this.captureSize.width, gifHeight: this.captureSize.height }, (response) => { if (response.error) { return; } let bin = atob(response.image.slice(22)); let buffer = new Uint8Array(bin.length); for (let i = 0; i < bin.length; i++) { buffer[i] = bin.charCodeAt(i); } let gif = new Blob([buffer]); this.finalize(gif, 'gif' as FormatType); }); } private finalize(blob: Blob, format: FormatType): void { let clicker = document.createEvent('MouseEvent') as MouseEvent; clicker.initEvent('click', false, true); this.downloader.setAttribute('href', window.URL.createObjectURL(blob)); this.downloader.setAttribute('download', `${this.startTime}.${format}`); this.downloader.dispatchEvent(clicker); } } export default class Renderer { private toolbar: React.ReactElement<{}>; private screen: React.ReactElement<{}>; private video: React.ReactElement<{}>; constructor() { this.toolbar = React.createElement(Toolbar); ReactDOM.render(this.toolbar, document.querySelector('#toolbar')); this.screen = React.createElement(Screen); ReactDOM.render(this.screen, document.querySelector('#screen')); this.video = React.createElement(Video); ReactDOM.render(this.video, document.querySelector('#video')); } } new Renderer();
<button
random_line_split
script.rs
use std::fmt; use std::str::FromStr; use super::lang_mapping; use crate::error::Error; use crate::Lang; #[cfg(feature = "enum-map")] use enum_map::Enum; /// Represents a writing system (Latin, Cyrillic, Arabic, etc). #[cfg_attr(feature = "enum-map", derive(Enum))] #[derive(PartialEq, Eq, Debug, Clone, Copy)] pub enum Script { // Keep this in alphabetic order (for C bindings) Arabic, Bengali, Cyrillic, Devanagari, Ethiopic, Georgian, Greek, Gujarati, Gurmukhi, Hangul, Hebrew, Hiragana, Kannada, Katakana, Khmer, Latin, Malayalam, Mandarin, Myanmar, Oriya, Sinhala, Tamil, Telugu, Thai, } // Array of all existing Script values. const VALUES: [Script; 24] = [ Script::Arabic, Script::Bengali, Script::Cyrillic, Script::Devanagari, Script::Ethiopic, Script::Georgian, Script::Greek, Script::Gujarati, Script::Gurmukhi, Script::Hangul, Script::Hebrew, Script::Hiragana, Script::Kannada, Script::Katakana, Script::Khmer, Script::Latin, Script::Malayalam, Script::Mandarin, Script::Myanmar, Script::Oriya, Script::Sinhala, Script::Tamil, Script::Telugu, Script::Thai, ]; impl Script { /// Get all existing scripts. /// /// # Example /// ``` /// use whatlang::Script; /// for script in Script::all() { /// println!("{}", script); /// } /// ``` pub fn all() -> &'static [Script] { &VALUES } pub fn name(&self) -> &str { match *self { Script::Latin => "Latin", Script::Cyrillic => "Cyrillic", Script::Arabic => "Arabic", Script::Devanagari => "Devanagari", Script::Hiragana => "Hiragana", Script::Katakana => "Katakana", Script::Ethiopic => "Ethiopic", Script::Hebrew => "Hebrew", Script::Bengali => "Bengali", Script::Georgian => "Georgian", Script::Mandarin => "Mandarin", Script::Hangul => "Hangul", Script::Greek => "Greek", Script::Kannada => "Kannada", Script::Tamil => "Tamil", Script::Thai => "Thai", Script::Gujarati => "Gujarati", Script::Gurmukhi => "Gurmukhi", Script::Telugu => "Telugu", Script::Malayalam => "Malayalam", Script::Oriya => "Oriya", Script::Myanmar => "Myanmar", Script::Sinhala => "Sinhala", Script::Khmer => "Khmer", } } pub fn langs(&self) -> &[Lang] { lang_mapping::script_langs(*self) } } impl fmt::Display for Script { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name()) } } impl FromStr for Script { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { match s.to_lowercase().trim() { "latin" => Ok(Script::Latin), "cyrillic" => Ok(Script::Cyrillic), "arabic" => Ok(Script::Arabic), "devanagari" => Ok(Script::Devanagari), "hiragana" => Ok(Script::Hiragana), "katakana" => Ok(Script::Katakana), "ethiopic" => Ok(Script::Ethiopic), "hebrew" => Ok(Script::Hebrew), "bengali" => Ok(Script::Bengali), "georgian" => Ok(Script::Georgian), "mandarin" => Ok(Script::Mandarin), "hangul" => Ok(Script::Hangul), "greek" => Ok(Script::Greek), "kannada" => Ok(Script::Kannada), "tamil" => Ok(Script::Tamil), "thai" => Ok(Script::Thai), "gujarati" => Ok(Script::Gujarati), "gurmukhi" => Ok(Script::Gurmukhi), "telugu" => Ok(Script::Telugu), "malayalam" => Ok(Script::Malayalam), "oriya" => Ok(Script::Oriya), "myanmar" => Ok(Script::Myanmar), "sinhala" => Ok(Script::Sinhala), "khmer" => Ok(Script::Khmer), _ => Err(Error::ParseScript(s.to_string())), } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_all() { assert_eq!(Script::all().len(), 24); let all = Script::all(); assert!(all.contains(&Script::Cyrillic)); assert!(all.contains(&Script::Arabic)); assert!(all.contains(&Script::Latin)); } #[test] fn test_from_str() { for &script in Script::all() { let s = script.name(); assert_eq!(s.parse::<Script>().unwrap(), script); assert_eq!(s.to_lowercase().parse::<Script>().unwrap(), script); assert_eq!(s.to_uppercase().parse::<Script>().unwrap(), script); } let result = "foobar".parse::<Script>(); assert!(matches!(result, Err(Error::ParseScript(_)))); } #[test] fn test_langs()
}
{ // Vec of all langs obtained with script.langs() let script_langs: Vec<Lang> = Script::all() .iter() .map(|script| script.langs()) .flatten() .copied() .collect(); // Ensure all langs belong at least to one script for lang in Lang::all() { assert!(script_langs.contains(&lang)); } }
identifier_body
script.rs
use std::fmt; use std::str::FromStr; use super::lang_mapping; use crate::error::Error; use crate::Lang; #[cfg(feature = "enum-map")] use enum_map::Enum; /// Represents a writing system (Latin, Cyrillic, Arabic, etc). #[cfg_attr(feature = "enum-map", derive(Enum))] #[derive(PartialEq, Eq, Debug, Clone, Copy)] pub enum Script { // Keep this in alphabetic order (for C bindings) Arabic, Bengali, Cyrillic, Devanagari, Ethiopic, Georgian, Greek, Gujarati, Gurmukhi, Hangul, Hebrew, Hiragana, Kannada, Katakana, Khmer, Latin, Malayalam, Mandarin, Myanmar, Oriya, Sinhala, Tamil, Telugu, Thai, } // Array of all existing Script values. const VALUES: [Script; 24] = [ Script::Arabic, Script::Bengali, Script::Cyrillic, Script::Devanagari, Script::Ethiopic, Script::Georgian, Script::Greek, Script::Gujarati, Script::Gurmukhi, Script::Hangul, Script::Hebrew, Script::Hiragana, Script::Kannada, Script::Katakana, Script::Khmer, Script::Latin, Script::Malayalam, Script::Mandarin, Script::Myanmar, Script::Oriya, Script::Sinhala, Script::Tamil, Script::Telugu, Script::Thai, ]; impl Script { /// Get all existing scripts. /// /// # Example /// ``` /// use whatlang::Script; /// for script in Script::all() { /// println!("{}", script); /// } /// ``` pub fn all() -> &'static [Script] { &VALUES } pub fn name(&self) -> &str { match *self { Script::Latin => "Latin", Script::Cyrillic => "Cyrillic", Script::Arabic => "Arabic", Script::Devanagari => "Devanagari", Script::Hiragana => "Hiragana", Script::Katakana => "Katakana", Script::Ethiopic => "Ethiopic", Script::Hebrew => "Hebrew", Script::Bengali => "Bengali", Script::Georgian => "Georgian", Script::Mandarin => "Mandarin", Script::Hangul => "Hangul", Script::Greek => "Greek", Script::Kannada => "Kannada", Script::Tamil => "Tamil", Script::Thai => "Thai", Script::Gujarati => "Gujarati", Script::Gurmukhi => "Gurmukhi", Script::Telugu => "Telugu", Script::Malayalam => "Malayalam", Script::Oriya => "Oriya", Script::Myanmar => "Myanmar", Script::Sinhala => "Sinhala", Script::Khmer => "Khmer", } } pub fn langs(&self) -> &[Lang] { lang_mapping::script_langs(*self) } } impl fmt::Display for Script { fn
(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name()) } } impl FromStr for Script { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { match s.to_lowercase().trim() { "latin" => Ok(Script::Latin), "cyrillic" => Ok(Script::Cyrillic), "arabic" => Ok(Script::Arabic), "devanagari" => Ok(Script::Devanagari), "hiragana" => Ok(Script::Hiragana), "katakana" => Ok(Script::Katakana), "ethiopic" => Ok(Script::Ethiopic), "hebrew" => Ok(Script::Hebrew), "bengali" => Ok(Script::Bengali), "georgian" => Ok(Script::Georgian), "mandarin" => Ok(Script::Mandarin), "hangul" => Ok(Script::Hangul), "greek" => Ok(Script::Greek), "kannada" => Ok(Script::Kannada), "tamil" => Ok(Script::Tamil), "thai" => Ok(Script::Thai), "gujarati" => Ok(Script::Gujarati), "gurmukhi" => Ok(Script::Gurmukhi), "telugu" => Ok(Script::Telugu), "malayalam" => Ok(Script::Malayalam), "oriya" => Ok(Script::Oriya), "myanmar" => Ok(Script::Myanmar), "sinhala" => Ok(Script::Sinhala), "khmer" => Ok(Script::Khmer), _ => Err(Error::ParseScript(s.to_string())), } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_all() { assert_eq!(Script::all().len(), 24); let all = Script::all(); assert!(all.contains(&Script::Cyrillic)); assert!(all.contains(&Script::Arabic)); assert!(all.contains(&Script::Latin)); } #[test] fn test_from_str() { for &script in Script::all() { let s = script.name(); assert_eq!(s.parse::<Script>().unwrap(), script); assert_eq!(s.to_lowercase().parse::<Script>().unwrap(), script); assert_eq!(s.to_uppercase().parse::<Script>().unwrap(), script); } let result = "foobar".parse::<Script>(); assert!(matches!(result, Err(Error::ParseScript(_)))); } #[test] fn test_langs() { // Vec of all langs obtained with script.langs() let script_langs: Vec<Lang> = Script::all() .iter() .map(|script| script.langs()) .flatten() .copied() .collect(); // Ensure all langs belong at least to one script for lang in Lang::all() { assert!(script_langs.contains(&lang)); } } }
fmt
identifier_name
script.rs
use std::fmt; use std::str::FromStr; use super::lang_mapping; use crate::error::Error; use crate::Lang; #[cfg(feature = "enum-map")] use enum_map::Enum; /// Represents a writing system (Latin, Cyrillic, Arabic, etc). #[cfg_attr(feature = "enum-map", derive(Enum))] #[derive(PartialEq, Eq, Debug, Clone, Copy)] pub enum Script { // Keep this in alphabetic order (for C bindings) Arabic, Bengali, Cyrillic, Devanagari, Ethiopic, Georgian, Greek, Gujarati, Gurmukhi, Hangul, Hebrew, Hiragana, Kannada, Katakana, Khmer, Latin, Malayalam, Mandarin, Myanmar, Oriya, Sinhala, Tamil, Telugu, Thai, } // Array of all existing Script values. const VALUES: [Script; 24] = [ Script::Arabic, Script::Bengali, Script::Cyrillic, Script::Devanagari, Script::Ethiopic, Script::Georgian, Script::Greek, Script::Gujarati, Script::Gurmukhi, Script::Hangul, Script::Hebrew, Script::Hiragana, Script::Kannada, Script::Katakana, Script::Khmer, Script::Latin, Script::Malayalam, Script::Mandarin, Script::Myanmar, Script::Oriya, Script::Sinhala, Script::Tamil, Script::Telugu, Script::Thai, ]; impl Script { /// Get all existing scripts. /// /// # Example /// ``` /// use whatlang::Script; /// for script in Script::all() { /// println!("{}", script); /// } /// ``` pub fn all() -> &'static [Script] { &VALUES } pub fn name(&self) -> &str { match *self { Script::Latin => "Latin", Script::Cyrillic => "Cyrillic", Script::Arabic => "Arabic", Script::Devanagari => "Devanagari", Script::Hiragana => "Hiragana", Script::Katakana => "Katakana", Script::Ethiopic => "Ethiopic", Script::Hebrew => "Hebrew", Script::Bengali => "Bengali", Script::Georgian => "Georgian", Script::Mandarin => "Mandarin", Script::Hangul => "Hangul", Script::Greek => "Greek", Script::Kannada => "Kannada", Script::Tamil => "Tamil", Script::Thai => "Thai", Script::Gujarati => "Gujarati", Script::Gurmukhi => "Gurmukhi", Script::Telugu => "Telugu", Script::Malayalam => "Malayalam", Script::Oriya => "Oriya", Script::Myanmar => "Myanmar", Script::Sinhala => "Sinhala", Script::Khmer => "Khmer", } } pub fn langs(&self) -> &[Lang] { lang_mapping::script_langs(*self) } } impl fmt::Display for Script { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name()) } } impl FromStr for Script { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { match s.to_lowercase().trim() { "latin" => Ok(Script::Latin), "cyrillic" => Ok(Script::Cyrillic), "arabic" => Ok(Script::Arabic), "devanagari" => Ok(Script::Devanagari), "hiragana" => Ok(Script::Hiragana),
"mandarin" => Ok(Script::Mandarin), "hangul" => Ok(Script::Hangul), "greek" => Ok(Script::Greek), "kannada" => Ok(Script::Kannada), "tamil" => Ok(Script::Tamil), "thai" => Ok(Script::Thai), "gujarati" => Ok(Script::Gujarati), "gurmukhi" => Ok(Script::Gurmukhi), "telugu" => Ok(Script::Telugu), "malayalam" => Ok(Script::Malayalam), "oriya" => Ok(Script::Oriya), "myanmar" => Ok(Script::Myanmar), "sinhala" => Ok(Script::Sinhala), "khmer" => Ok(Script::Khmer), _ => Err(Error::ParseScript(s.to_string())), } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_all() { assert_eq!(Script::all().len(), 24); let all = Script::all(); assert!(all.contains(&Script::Cyrillic)); assert!(all.contains(&Script::Arabic)); assert!(all.contains(&Script::Latin)); } #[test] fn test_from_str() { for &script in Script::all() { let s = script.name(); assert_eq!(s.parse::<Script>().unwrap(), script); assert_eq!(s.to_lowercase().parse::<Script>().unwrap(), script); assert_eq!(s.to_uppercase().parse::<Script>().unwrap(), script); } let result = "foobar".parse::<Script>(); assert!(matches!(result, Err(Error::ParseScript(_)))); } #[test] fn test_langs() { // Vec of all langs obtained with script.langs() let script_langs: Vec<Lang> = Script::all() .iter() .map(|script| script.langs()) .flatten() .copied() .collect(); // Ensure all langs belong at least to one script for lang in Lang::all() { assert!(script_langs.contains(&lang)); } } }
"katakana" => Ok(Script::Katakana), "ethiopic" => Ok(Script::Ethiopic), "hebrew" => Ok(Script::Hebrew), "bengali" => Ok(Script::Bengali), "georgian" => Ok(Script::Georgian),
random_line_split
sync.py
import io try: import requests except ImportError: print("Requests module not installed, sync functions unavailable!") class ResponseError(BaseException): pass class Route: def __init__(self, base_url, path, cdn_url=None, method="GET", headers=None): self.base_url = base_url self.path = path self.method = method self.headers = headers self.cdn_url = cdn_url def sync_query(self, url_params=None): res = getattr(requests, self.method.lower())( self.base_url+self.path, headers=self.headers) if 200 <= res.status_code < 300: retval = res.json() # Some endpoints are not images if self.cdn_url is None: return retval return Result(**retval, cdn_url=self.cdn_url) else: raise ResponseError( "Expected a status code 200-299, got {} \n{}" .format(res.status_code, self.base_url+self.path)) def __call__(self, url_params=None): return self.sync_query(url_params) class Result: def __init__(self, path, id, type, nsfw, cdn_url):
def sync_download(self): res = requests.get(self.cdn_url+self.cdn_path) if 200 <= res.status_code < 300: return io.BytesIO(res.content) else: raise ResponseError( "Expected a status code 200-299, got {}" .format(res.status_code)) def __call__(self): return self.sync_download()
self.path = path self.cdn_path = path[2:] self.img_id = id self.img_type = type self.nsfw = nsfw self.cdn_url = cdn_url
identifier_body
sync.py
import io try: import requests except ImportError: print("Requests module not installed, sync functions unavailable!") class ResponseError(BaseException): pass class Route: def __init__(self, base_url, path, cdn_url=None, method="GET", headers=None): self.base_url = base_url self.path = path self.method = method self.headers = headers self.cdn_url = cdn_url def sync_query(self, url_params=None): res = getattr(requests, self.method.lower())( self.base_url+self.path, headers=self.headers) if 200 <= res.status_code < 300: retval = res.json() # Some endpoints are not images if self.cdn_url is None: return retval return Result(**retval, cdn_url=self.cdn_url) else: raise ResponseError( "Expected a status code 200-299, got {} \n{}" .format(res.status_code, self.base_url+self.path)) def __call__(self, url_params=None): return self.sync_query(url_params) class Result: def __init__(self, path, id, type, nsfw, cdn_url): self.path = path self.cdn_path = path[2:] self.img_id = id self.img_type = type self.nsfw = nsfw self.cdn_url = cdn_url def
(self): res = requests.get(self.cdn_url+self.cdn_path) if 200 <= res.status_code < 300: return io.BytesIO(res.content) else: raise ResponseError( "Expected a status code 200-299, got {}" .format(res.status_code)) def __call__(self): return self.sync_download()
sync_download
identifier_name
sync.py
import io try: import requests except ImportError: print("Requests module not installed, sync functions unavailable!") class ResponseError(BaseException): pass class Route: def __init__(self, base_url, path, cdn_url=None, method="GET", headers=None): self.base_url = base_url self.path = path self.method = method self.headers = headers self.cdn_url = cdn_url def sync_query(self, url_params=None): res = getattr(requests, self.method.lower())( self.base_url+self.path, headers=self.headers) if 200 <= res.status_code < 300: retval = res.json() # Some endpoints are not images if self.cdn_url is None: return retval return Result(**retval, cdn_url=self.cdn_url) else: raise ResponseError( "Expected a status code 200-299, got {} \n{}" .format(res.status_code, self.base_url+self.path)) def __call__(self, url_params=None): return self.sync_query(url_params) class Result: def __init__(self, path, id, type, nsfw, cdn_url): self.path = path self.cdn_path = path[2:] self.img_id = id self.img_type = type self.nsfw = nsfw self.cdn_url = cdn_url def sync_download(self): res = requests.get(self.cdn_url+self.cdn_path) if 200 <= res.status_code < 300: return io.BytesIO(res.content) else:
def __call__(self): return self.sync_download()
raise ResponseError( "Expected a status code 200-299, got {}" .format(res.status_code))
conditional_block
sync.py
import io try: import requests except ImportError: print("Requests module not installed, sync functions unavailable!") class ResponseError(BaseException): pass class Route: def __init__(self, base_url, path, cdn_url=None, method="GET", headers=None): self.base_url = base_url self.path = path self.method = method self.headers = headers self.cdn_url = cdn_url def sync_query(self, url_params=None): res = getattr(requests, self.method.lower())( self.base_url+self.path, headers=self.headers) if 200 <= res.status_code < 300: retval = res.json() # Some endpoints are not images if self.cdn_url is None: return retval return Result(**retval, cdn_url=self.cdn_url) else: raise ResponseError( "Expected a status code 200-299, got {} \n{}" .format(res.status_code, self.base_url+self.path)) def __call__(self, url_params=None): return self.sync_query(url_params) class Result: def __init__(self, path, id, type, nsfw, cdn_url): self.path = path self.cdn_path = path[2:] self.img_id = id self.img_type = type self.nsfw = nsfw self.cdn_url = cdn_url def sync_download(self): res = requests.get(self.cdn_url+self.cdn_path) if 200 <= res.status_code < 300: return io.BytesIO(res.content) else: raise ResponseError( "Expected a status code 200-299, got {}" .format(res.status_code))
def __call__(self): return self.sync_download()
random_line_split
cst.py
# -*- coding: utf-8 -*- """ Created on Wed Apr 20 16:01:21 2016 @author: KB """ #Listes des images du jeu import pygame dossier = "./Img_FS/" extension1 = ".png" extension2 = ".jpg" image_accueil = dossier + "accueil" + extension1 image_fond = dossier + "back1" + extension2 image_mur = dossier + "mur" + extension1 image_persos = dossier + "persos" + extension1 image_monstres = dossier + "monstres" + extension1 image_fin = dossier + "fin" + extension1 image_pause = dossier + "pause" + extension1 image_tache = dossier + "tache" + extension1 image_credit = dossier + "credit" + extension1 #Paramètres de la fenêtre nombre_sprite_cote = 25 taille_sprite = 42
cote_fenetre = nombre_sprite_cote * taille_sprite #Personnalisation de la fenêtre titre_fenetre = "Fear Shadows"
random_line_split
contact_ball_ball.rs
use crate::math::{Point, Vector}; use crate::query::Contact; use crate::shape::Ball; use na::{self, RealField, Unit}; /// Contact between balls. #[inline] pub fn contact_ball_ball<N: RealField>( center1: &Point<N>, b1: &Ball<N>, center2: &Point<N>, b2: &Ball<N>, prediction: N, ) -> Option<Contact<N>> { let r1 = b1.radius; let r2 = b2.radius; let delta_pos = *center2 - *center1; let distance_squared = delta_pos.norm_squared(); let sum_radius = r1 + r2; let sum_radius_with_error = sum_radius + prediction; if distance_squared < sum_radius_with_error * sum_radius_with_error { let normal = if !distance_squared.is_zero() { Unit::new_normalize(delta_pos) } else
; Some(Contact::new( *center1 + *normal * r1, *center2 + *normal * (-r2), normal, sum_radius - distance_squared.sqrt(), )) } else { None } }
{ Vector::x_axis() }
conditional_block
contact_ball_ball.rs
use crate::math::{Point, Vector}; use crate::query::Contact; use crate::shape::Ball; use na::{self, RealField, Unit}; /// Contact between balls. #[inline] pub fn contact_ball_ball<N: RealField>( center1: &Point<N>, b1: &Ball<N>, center2: &Point<N>, b2: &Ball<N>, prediction: N, ) -> Option<Contact<N>>
{ let r1 = b1.radius; let r2 = b2.radius; let delta_pos = *center2 - *center1; let distance_squared = delta_pos.norm_squared(); let sum_radius = r1 + r2; let sum_radius_with_error = sum_radius + prediction; if distance_squared < sum_radius_with_error * sum_radius_with_error { let normal = if !distance_squared.is_zero() { Unit::new_normalize(delta_pos) } else { Vector::x_axis() }; Some(Contact::new( *center1 + *normal * r1, *center2 + *normal * (-r2), normal, sum_radius - distance_squared.sqrt(), )) } else { None } }
identifier_body
contact_ball_ball.rs
use crate::math::{Point, Vector}; use crate::query::Contact; use crate::shape::Ball; use na::{self, RealField, Unit}; /// Contact between balls. #[inline] pub fn
<N: RealField>( center1: &Point<N>, b1: &Ball<N>, center2: &Point<N>, b2: &Ball<N>, prediction: N, ) -> Option<Contact<N>> { let r1 = b1.radius; let r2 = b2.radius; let delta_pos = *center2 - *center1; let distance_squared = delta_pos.norm_squared(); let sum_radius = r1 + r2; let sum_radius_with_error = sum_radius + prediction; if distance_squared < sum_radius_with_error * sum_radius_with_error { let normal = if !distance_squared.is_zero() { Unit::new_normalize(delta_pos) } else { Vector::x_axis() }; Some(Contact::new( *center1 + *normal * r1, *center2 + *normal * (-r2), normal, sum_radius - distance_squared.sqrt(), )) } else { None } }
contact_ball_ball
identifier_name
contact_ball_ball.rs
use crate::math::{Point, Vector}; use crate::query::Contact; use crate::shape::Ball; use na::{self, RealField, Unit}; /// Contact between balls. #[inline] pub fn contact_ball_ball<N: RealField>( center1: &Point<N>, b1: &Ball<N>, center2: &Point<N>, b2: &Ball<N>, prediction: N, ) -> Option<Contact<N>> {
let r2 = b2.radius; let delta_pos = *center2 - *center1; let distance_squared = delta_pos.norm_squared(); let sum_radius = r1 + r2; let sum_radius_with_error = sum_radius + prediction; if distance_squared < sum_radius_with_error * sum_radius_with_error { let normal = if !distance_squared.is_zero() { Unit::new_normalize(delta_pos) } else { Vector::x_axis() }; Some(Contact::new( *center1 + *normal * r1, *center2 + *normal * (-r2), normal, sum_radius - distance_squared.sqrt(), )) } else { None } }
let r1 = b1.radius;
random_line_split
longlive.js
var mongo = require('../'); var db = mongo.db('192.168.0.103/test'); // var db = mongo.db('127.0.0.1/test'); var myconsole = require('myconsole'); var foo = db.collection('foo'); setInterval(function() { foo.insert({foo:'foo'}, function(err, result){ if(err) return myconsole.error(err); foo.count(function(err, count){ if(err) return myconsole.error(err); myconsole.log('count: %d', count); foo.find().limit(10).toArray(function(err, arr) { if(err) return myconsole.error(err); myconsole.log('arr: %d', arr.length); }) }) }) }, 500);
process.on('SIGINT', function(){ myconsole.log('SIGINT') foo.drop(function(err){ if(err) myconsole.error(err); process.exit(); }) })
random_line_split
tests.rs
extern crate rustsocks; use rustsocks::{Socks4, Socks4a, Socks5}; use std::io::net::ip::IpAddr; static SOCKS_HOST : &'static str = "127.0.0.1"; static SOCKS_PORT : u16 = 9150; static GET_REQUEST : &'static str = "GET /404 HTTP/1.1\nHost: www.google.com\nConnection: close\n\n"; #[test] fn socks4a() { let mut socks = Socks4a::new(SOCKS_HOST, SOCKS_PORT); let mut stream = socks.connect("www.google.com", 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks4() { let mut socks = Socks4::new(SOCKS_HOST, SOCKS_PORT); let addr = from_str::<IpAddr>("74.125.230.65").unwrap(); let mut stream = socks.connect(addr, 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks5_domain() { let mut socks = Socks5::new(SOCKS_HOST, SOCKS_PORT); let mut stream = socks.connect("www.google.com", 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks5_ipv4()
{ let mut socks = Socks5::new(SOCKS_HOST, SOCKS_PORT); let addr = from_str::<IpAddr>("74.125.230.65").unwrap(); let mut stream = socks.connect(addr, 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); }
identifier_body
tests.rs
extern crate rustsocks; use rustsocks::{Socks4, Socks4a, Socks5}; use std::io::net::ip::IpAddr; static SOCKS_HOST : &'static str = "127.0.0.1"; static SOCKS_PORT : u16 = 9150; static GET_REQUEST : &'static str = "GET /404 HTTP/1.1\nHost: www.google.com\nConnection: close\n\n"; #[test] fn socks4a() { let mut socks = Socks4a::new(SOCKS_HOST, SOCKS_PORT); let mut stream = socks.connect("www.google.com", 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks4() { let mut socks = Socks4::new(SOCKS_HOST, SOCKS_PORT); let addr = from_str::<IpAddr>("74.125.230.65").unwrap(); let mut stream = socks.connect(addr, 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn
() { let mut socks = Socks5::new(SOCKS_HOST, SOCKS_PORT); let mut stream = socks.connect("www.google.com", 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks5_ipv4() { let mut socks = Socks5::new(SOCKS_HOST, SOCKS_PORT); let addr = from_str::<IpAddr>("74.125.230.65").unwrap(); let mut stream = socks.connect(addr, 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); }
socks5_domain
identifier_name
tests.rs
extern crate rustsocks; use rustsocks::{Socks4, Socks4a, Socks5}; use std::io::net::ip::IpAddr; static SOCKS_HOST : &'static str = "127.0.0.1"; static SOCKS_PORT : u16 = 9150; static GET_REQUEST : &'static str = "GET /404 HTTP/1.1\nHost: www.google.com\nConnection: close\n\n"; #[test] fn socks4a() { let mut socks = Socks4a::new(SOCKS_HOST, SOCKS_PORT); let mut stream = socks.connect("www.google.com", 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks4() { let mut socks = Socks4::new(SOCKS_HOST, SOCKS_PORT); let addr = from_str::<IpAddr>("74.125.230.65").unwrap(); let mut stream = socks.connect(addr, 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks5_domain() { let mut socks = Socks5::new(SOCKS_HOST, SOCKS_PORT); let mut stream = socks.connect("www.google.com", 80); let _ = stream.write_str(GET_REQUEST); println!("{}", stream.read_to_string().unwrap()); } #[test] fn socks5_ipv4() { let mut socks = Socks5::new(SOCKS_HOST, SOCKS_PORT); let addr = from_str::<IpAddr>("74.125.230.65").unwrap(); let mut stream = socks.connect(addr, 80);
println!("{}", stream.read_to_string().unwrap()); }
let _ = stream.write_str(GET_REQUEST);
random_line_split
main.rs
use { futures::{ future::{BoxFuture, FutureExt}, task::{waker_ref, ArcWake}, }, std::{ future::Future, sync::mpsc::{sync_channel, Receiver, SyncSender}, sync::{Arc, Mutex}, task::{Context, Poll}, time::Duration, }, }; struct Task { future: Mutex<Option<BoxFuture<'static, ()>>>, task_sender: SyncSender<Arc<Task>>, } impl ArcWake for Task { fn wake_by_ref(arc_self: &Arc<Self>) { println!("wake_by_ref.1"); let cloned = arc_self.clone(); println!("wake_by_ref.2"); arc_self .task_sender .send(cloned) .expect("too many tasks queued"); println!("wake_by_ref.3"); } } struct Sender(SyncSender<BoxFuture<'static, ()>>); struct Executor { task_sender: Arc<Sender>, ready_queue: Receiver<BoxFuture<'static, ()>>, } impl ArcWake for Sender { fn wake_by_ref(arc_self: &Arc<Self>) { arc_self. } } impl Executor { fn run(&self) { while let Ok(mut f) = self.ready_queue.recv() { let c = self.task_sender.clone(); let waker = waker_ref(&c); let ctx = &mut Context::from_waker(&*waker); let t = f.as_mut(); t.poll(ctx); // let mut future_slot = task.future.lock().unwrap(); // if let Some(mut future) = future_slot.take() { // let waker = waker_ref(&task); // let context = &mut Context::from_waker(&*waker); // if let Poll::Pending = future.as_mut().poll(context) { // *future_slot = Some(future); // } // } } } fn spawn(&self, future: impl Future<Output = ()> + 'static + Send) { let future = future.boxed(); self.task_sender .0 .send(future) .expect("too many tasks queued"); } } fn new_executor_and_spawner() -> Executor { const MAX_QUEUED_TASKS: usize = 10_000; let (task_sender, ready_queue) = sync_channel(MAX_QUEUED_TASKS); Executor { task_sender: Arc::new(Sender(task_sender)), ready_queue, } } struct FutureReceiver<T>(Arc<Mutex<std::sync::mpsc::Receiver<T>>>, Option<T>); impl<T: 'static + Send + Sync> Future for FutureReceiver<T> { type Output = T; fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<T> { println!("FutureReceiver.poll.1"); let ch = self.0.lock().unwrap(); let mut iter = ch.try_iter(); match iter.next() { Some(v) => std::task::Poll::Ready(v), None => { let waker = ctx.waker().clone(); let channel = self.0.clone(); std::thread::spawn(move || { let item = channel.lock().unwrap().recv(); println!("received!"); waker.wake(); }); std::task::Poll::Pending } } } } fn main()
{ let (sender, receiver) = std::sync::mpsc::channel::<i32>(); let s1 = sender.clone(); std::thread::spawn(move || { std::thread::sleep(std::time::Duration::from_millis(1000)); println!("sent!"); s1.send(1).unwrap(); }); let receiver = FutureReceiver(Arc::from(Mutex::from(receiver)), None); //sender.send(1).unwrap(); let f = async move { println!("howdy!"); receiver.await; println!("done!"); }; let exec = new_executor_and_spawner(); exec.spawn(f); exec.run(); }
identifier_body
main.rs
use { futures::{ future::{BoxFuture, FutureExt}, task::{waker_ref, ArcWake}, }, std::{ future::Future, sync::mpsc::{sync_channel, Receiver, SyncSender}, sync::{Arc, Mutex}, task::{Context, Poll}, time::Duration, }, }; struct Task { future: Mutex<Option<BoxFuture<'static, ()>>>, task_sender: SyncSender<Arc<Task>>, } impl ArcWake for Task { fn wake_by_ref(arc_self: &Arc<Self>) { println!("wake_by_ref.1"); let cloned = arc_self.clone(); println!("wake_by_ref.2"); arc_self .task_sender .send(cloned) .expect("too many tasks queued"); println!("wake_by_ref.3"); } } struct Sender(SyncSender<BoxFuture<'static, ()>>); struct Executor { task_sender: Arc<Sender>, ready_queue: Receiver<BoxFuture<'static, ()>>, } impl ArcWake for Sender { fn wake_by_ref(arc_self: &Arc<Self>) { arc_self. } } impl Executor { fn run(&self) { while let Ok(mut f) = self.ready_queue.recv() { let c = self.task_sender.clone(); let waker = waker_ref(&c); let ctx = &mut Context::from_waker(&*waker); let t = f.as_mut(); t.poll(ctx); // let mut future_slot = task.future.lock().unwrap(); // if let Some(mut future) = future_slot.take() { // let waker = waker_ref(&task); // let context = &mut Context::from_waker(&*waker); // if let Poll::Pending = future.as_mut().poll(context) { // *future_slot = Some(future); // } // } } } fn spawn(&self, future: impl Future<Output = ()> + 'static + Send) { let future = future.boxed(); self.task_sender .0 .send(future) .expect("too many tasks queued"); } } fn new_executor_and_spawner() -> Executor { const MAX_QUEUED_TASKS: usize = 10_000; let (task_sender, ready_queue) = sync_channel(MAX_QUEUED_TASKS); Executor { task_sender: Arc::new(Sender(task_sender)), ready_queue, } } struct FutureReceiver<T>(Arc<Mutex<std::sync::mpsc::Receiver<T>>>, Option<T>); impl<T: 'static + Send + Sync> Future for FutureReceiver<T> { type Output = T; fn
( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<T> { println!("FutureReceiver.poll.1"); let ch = self.0.lock().unwrap(); let mut iter = ch.try_iter(); match iter.next() { Some(v) => std::task::Poll::Ready(v), None => { let waker = ctx.waker().clone(); let channel = self.0.clone(); std::thread::spawn(move || { let item = channel.lock().unwrap().recv(); println!("received!"); waker.wake(); }); std::task::Poll::Pending } } } } fn main() { let (sender, receiver) = std::sync::mpsc::channel::<i32>(); let s1 = sender.clone(); std::thread::spawn(move || { std::thread::sleep(std::time::Duration::from_millis(1000)); println!("sent!"); s1.send(1).unwrap(); }); let receiver = FutureReceiver(Arc::from(Mutex::from(receiver)), None); //sender.send(1).unwrap(); let f = async move { println!("howdy!"); receiver.await; println!("done!"); }; let exec = new_executor_and_spawner(); exec.spawn(f); exec.run(); }
poll
identifier_name
main.rs
use { futures::{ future::{BoxFuture, FutureExt}, task::{waker_ref, ArcWake}, }, std::{ future::Future, sync::mpsc::{sync_channel, Receiver, SyncSender}, sync::{Arc, Mutex}, task::{Context, Poll}, time::Duration, }, }; struct Task { future: Mutex<Option<BoxFuture<'static, ()>>>, task_sender: SyncSender<Arc<Task>>, } impl ArcWake for Task { fn wake_by_ref(arc_self: &Arc<Self>) { println!("wake_by_ref.1"); let cloned = arc_self.clone(); println!("wake_by_ref.2"); arc_self .task_sender .send(cloned) .expect("too many tasks queued"); println!("wake_by_ref.3"); } } struct Sender(SyncSender<BoxFuture<'static, ()>>); struct Executor { task_sender: Arc<Sender>, ready_queue: Receiver<BoxFuture<'static, ()>>, } impl ArcWake for Sender { fn wake_by_ref(arc_self: &Arc<Self>) { arc_self. } } impl Executor { fn run(&self) { while let Ok(mut f) = self.ready_queue.recv() { let c = self.task_sender.clone(); let waker = waker_ref(&c); let ctx = &mut Context::from_waker(&*waker); let t = f.as_mut(); t.poll(ctx); // let mut future_slot = task.future.lock().unwrap(); // if let Some(mut future) = future_slot.take() { // let waker = waker_ref(&task); // let context = &mut Context::from_waker(&*waker); // if let Poll::Pending = future.as_mut().poll(context) { // *future_slot = Some(future); // } // } } } fn spawn(&self, future: impl Future<Output = ()> + 'static + Send) { let future = future.boxed(); self.task_sender .0 .send(future) .expect("too many tasks queued"); } } fn new_executor_and_spawner() -> Executor { const MAX_QUEUED_TASKS: usize = 10_000; let (task_sender, ready_queue) = sync_channel(MAX_QUEUED_TASKS); Executor { task_sender: Arc::new(Sender(task_sender)), ready_queue, } } struct FutureReceiver<T>(Arc<Mutex<std::sync::mpsc::Receiver<T>>>, Option<T>); impl<T: 'static + Send + Sync> Future for FutureReceiver<T> { type Output = T; fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<T> { println!("FutureReceiver.poll.1"); let ch = self.0.lock().unwrap(); let mut iter = ch.try_iter(); match iter.next() { Some(v) => std::task::Poll::Ready(v), None => { let waker = ctx.waker().clone(); let channel = self.0.clone(); std::thread::spawn(move || { let item = channel.lock().unwrap().recv(); println!("received!"); waker.wake(); }); std::task::Poll::Pending } } } } fn main() { let (sender, receiver) = std::sync::mpsc::channel::<i32>(); let s1 = sender.clone(); std::thread::spawn(move || { std::thread::sleep(std::time::Duration::from_millis(1000)); println!("sent!"); s1.send(1).unwrap(); }); let receiver = FutureReceiver(Arc::from(Mutex::from(receiver)), None); //sender.send(1).unwrap(); let f = async move { println!("howdy!");
}; let exec = new_executor_and_spawner(); exec.spawn(f); exec.run(); }
receiver.await; println!("done!");
random_line_split
__init__.py
# -*-coding:Utf-8 -* # Copyright (c) 2010-2017 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Package contenant la commande 'matelot' et ses sous-commandes. Dans ce fichier se trouve la commande même. """ from primaires.interpreteur.commande.commande import Commande from .affecter import PrmAffecter from .creer import PrmCreer from .editer import PrmEditer from .info import PrmInfo from .liste import PrmListe from .poste import PrmPoste from .promouvoir import PrmPromouvoir from .recruter import PrmRecruter from .renommer import PrmRenommer from .retirer import PrmRetirer from .score import PrmScore class CmdMatelot(Commande): """Commande 'matelot'. """ def __init__(self): """Constructeur de la commande""" Commande.__init__(self, "matelot", "seaman")
"votre équipage individuellement. Il existe également " \ "la commande %équipage% qui permet de manipuler l'équipage " \ "d'un coup d'un seul." def ajouter_parametres(self): """Ajout des paramètres""" self.ajouter_parametre(PrmAffecter()) self.ajouter_parametre(PrmCreer()) self.ajouter_parametre(PrmEditer()) self.ajouter_parametre(PrmInfo()) self.ajouter_parametre(PrmListe()) self.ajouter_parametre(PrmPoste()) self.ajouter_parametre(PrmPromouvoir()) self.ajouter_parametre(PrmRecruter()) self.ajouter_parametre(PrmRenommer()) self.ajouter_parametre(PrmRetirer()) self.ajouter_parametre(PrmScore())
self.nom_categorie = "navire" self.aide_courte = "manipulation des matelots" self.aide_longue = \ "Cette commande permet de manipuler les matelots de " \
random_line_split
__init__.py
# -*-coding:Utf-8 -* # Copyright (c) 2010-2017 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Package contenant la commande 'matelot' et ses sous-commandes. Dans ce fichier se trouve la commande même. """ from primaires.interpreteur.commande.commande import Commande from .affecter import PrmAffecter from .creer import PrmCreer from .editer import PrmEditer from .info import PrmInfo from .liste import PrmListe from .poste import PrmPoste from .promouvoir import PrmPromouvoir from .recruter import PrmRecruter from .renommer import PrmRenommer from .retirer import PrmRetirer from .score import PrmScore class CmdMatelot(Commande): """Commande 'matelot'. """ def __init__(self): """Constructeur de la commande""" Commande.__init__(self, "matelot", "seaman") self.nom_categorie = "navire" self.aide_courte = "manipulation des matelots" self.aide_longue = \ "Cette commande permet de manipuler les matelots de " \ "votre équipage individuellement. Il existe également " \ "la commande %équipage% qui permet de manipuler l'équipage " \ "d'un coup d'un seul." def ajouter_parametres(self): """Aj
out des paramètres""" self.ajouter_parametre(PrmAffecter()) self.ajouter_parametre(PrmCreer()) self.ajouter_parametre(PrmEditer()) self.ajouter_parametre(PrmInfo()) self.ajouter_parametre(PrmListe()) self.ajouter_parametre(PrmPoste()) self.ajouter_parametre(PrmPromouvoir()) self.ajouter_parametre(PrmRecruter()) self.ajouter_parametre(PrmRenommer()) self.ajouter_parametre(PrmRetirer()) self.ajouter_parametre(PrmScore())
identifier_body
__init__.py
# -*-coding:Utf-8 -* # Copyright (c) 2010-2017 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Package contenant la commande 'matelot' et ses sous-commandes. Dans ce fichier se trouve la commande même. """ from primaires.interpreteur.commande.commande import Commande from .affecter import PrmAffecter from .creer import PrmCreer from .editer import PrmEditer from .info import PrmInfo from .liste import PrmListe from .poste import PrmPoste from .promouvoir import PrmPromouvoir from .recruter import PrmRecruter from .renommer import PrmRenommer from .retirer import PrmRetirer from .score import PrmScore class C
Commande): """Commande 'matelot'. """ def __init__(self): """Constructeur de la commande""" Commande.__init__(self, "matelot", "seaman") self.nom_categorie = "navire" self.aide_courte = "manipulation des matelots" self.aide_longue = \ "Cette commande permet de manipuler les matelots de " \ "votre équipage individuellement. Il existe également " \ "la commande %équipage% qui permet de manipuler l'équipage " \ "d'un coup d'un seul." def ajouter_parametres(self): """Ajout des paramètres""" self.ajouter_parametre(PrmAffecter()) self.ajouter_parametre(PrmCreer()) self.ajouter_parametre(PrmEditer()) self.ajouter_parametre(PrmInfo()) self.ajouter_parametre(PrmListe()) self.ajouter_parametre(PrmPoste()) self.ajouter_parametre(PrmPromouvoir()) self.ajouter_parametre(PrmRecruter()) self.ajouter_parametre(PrmRenommer()) self.ajouter_parametre(PrmRetirer()) self.ajouter_parametre(PrmScore())
mdMatelot(
identifier_name
ellipse.js
/* Class: Graphic.Ellipse Shape implementation of an ellipse. Author: Sébastien Gruhier, <http://www.xilinus.com> License: MIT-style license. See Also:
Graphic.Ellipse = Class.create(); Object.extend(Graphic.Ellipse.prototype, Graphic.Shape.prototype); // Keep parent initialize Graphic.Ellipse.prototype._shapeInitialize = Graphic.Shape.prototype.initialize; Object.extend(Graphic.Ellipse.prototype, { initialize: function(renderer) { this._shapeInitialize(renderer, "ellipse"); Object.extend(this.attributes, {cx: 0, cy: 0, rx: 0, ry: 0}) return this; }, getSize: function() { return {w: 2 * this.attributes.rx, h: 2 * this.attributes.ry} }, setSize: function(width, height) { var location = this.getLocation(); this._setAttributes({rx: width/2, ry: height/2}); this.setLocation(location.x, location.y); return this; }, getLocation: function() { return {x: this.attributes.cx - this.attributes.rx, y: this.attributes.cy - this.attributes.ry} }, setLocation: function(x, y) { this._setAttributes({cx: x + this.attributes.rx, cy: y + this.attributes.ry}); return this; } })
<Shape> */
random_line_split
treeUtils.js
//@line 39 "/builds/slave/rel-m-rel-xr-osx64-bld/build/toolkit/content/treeUtils.js" var gTreeUtils = { deleteAll: function (aTree, aView, aItems, aDeletedItems) { for (var i = 0; i < aItems.length; ++i) aDeletedItems.push(aItems[i]); aItems.splice(0, aItems.length); var oldCount = aView.rowCount; aView._rowCount = 0; aTree.treeBoxObject.rowCountChanged(0, -oldCount); }, deleteSelectedItems: function (aTree, aView, aItems, aDeletedItems) { var selection = aTree.view.selection; selection.selectEventsSuppressed = true; var rc = selection.getRangeCount(); for (var i = 0; i < rc; ++i) { var min = { }; var max = { }; selection.getRangeAt(i, min, max); for (var j = min.value; j <= max.value; ++j) { aDeletedItems.push(aItems[j]); aItems[j] = null; } } var nextSelection = 0; for (i = 0; i < aItems.length; ++i) { if (!aItems[i]) { var j = i; while (j < aItems.length && !aItems[j]) ++j; aItems.splice(i, j - i); nextSelection = j < aView.rowCount ? j - 1 : j - 2; aView._rowCount -= j - i; aTree.treeBoxObject.rowCountChanged(i, i - j); } } if (aItems.length) { selection.select(nextSelection); aTree.treeBoxObject.ensureRowIsVisible(nextSelection); aTree.focus(); } selection.selectEventsSuppressed = false; }, sort: function (aTree, aView, aDataSet, aColumn, aComparator, aLastSortColumn, aLastSortAscending) { var ascending = (aColumn == aLastSortColumn) ? !aLastSortAscending : true; if (aDataSet.length == 0) return ascending; var numericSort = !isNaN(aDataSet[0][aColumn]); var sortFunction = null; if (aComparator) { sortFunction = function (a, b) { return aComparator(a[aColumn], b[aColumn]); }; } aDataSet.sort(sortFunction); if (!ascending) aDataSet.reverse(); aTree.view.selection.clearSelection(); aTree.view.selection.select(0); aTree.treeBoxObject.invalidate(); aTree.treeBoxObject.ensureRowIsVisible(0); return ascending; }
};
random_line_split
treeUtils.js
//@line 39 "/builds/slave/rel-m-rel-xr-osx64-bld/build/toolkit/content/treeUtils.js" var gTreeUtils = { deleteAll: function (aTree, aView, aItems, aDeletedItems) { for (var i = 0; i < aItems.length; ++i) aDeletedItems.push(aItems[i]); aItems.splice(0, aItems.length); var oldCount = aView.rowCount; aView._rowCount = 0; aTree.treeBoxObject.rowCountChanged(0, -oldCount); }, deleteSelectedItems: function (aTree, aView, aItems, aDeletedItems) { var selection = aTree.view.selection; selection.selectEventsSuppressed = true; var rc = selection.getRangeCount(); for (var i = 0; i < rc; ++i) { var min = { }; var max = { }; selection.getRangeAt(i, min, max); for (var j = min.value; j <= max.value; ++j)
} var nextSelection = 0; for (i = 0; i < aItems.length; ++i) { if (!aItems[i]) { var j = i; while (j < aItems.length && !aItems[j]) ++j; aItems.splice(i, j - i); nextSelection = j < aView.rowCount ? j - 1 : j - 2; aView._rowCount -= j - i; aTree.treeBoxObject.rowCountChanged(i, i - j); } } if (aItems.length) { selection.select(nextSelection); aTree.treeBoxObject.ensureRowIsVisible(nextSelection); aTree.focus(); } selection.selectEventsSuppressed = false; }, sort: function (aTree, aView, aDataSet, aColumn, aComparator, aLastSortColumn, aLastSortAscending) { var ascending = (aColumn == aLastSortColumn) ? !aLastSortAscending : true; if (aDataSet.length == 0) return ascending; var numericSort = !isNaN(aDataSet[0][aColumn]); var sortFunction = null; if (aComparator) { sortFunction = function (a, b) { return aComparator(a[aColumn], b[aColumn]); }; } aDataSet.sort(sortFunction); if (!ascending) aDataSet.reverse(); aTree.view.selection.clearSelection(); aTree.view.selection.select(0); aTree.treeBoxObject.invalidate(); aTree.treeBoxObject.ensureRowIsVisible(0); return ascending; } };
{ aDeletedItems.push(aItems[j]); aItems[j] = null; }
conditional_block
cg.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use darling::{FromDeriveInput, FromField, FromVariant}; use proc_macro2::{Span, TokenStream}; use quote::TokenStreamExt; use syn::{self, AngleBracketedGenericArguments, Binding, DeriveInput, Field}; use syn::{GenericArgument, GenericParam, Ident, Path}; use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray}; use syn::{TypeParam, TypeParen, TypePath, TypeSlice, TypeTuple}; use syn::{Variant, WherePredicate}; use synstructure::{self, BindStyle, BindingInfo, VariantAst, VariantInfo}; /// Given an input type which has some where clauses already, like: /// /// struct InputType<T> /// where /// T: Zero, /// { /// ... /// } /// /// Add the necessary `where` clauses so that the output type of a trait /// fulfils them. /// /// For example: /// /// <T as ToComputedValue>::ComputedValue: Zero, /// /// This needs to run before adding other bounds to the type parameters. pub fn propagate_clauses_to_output_type( where_clause: &mut Option<syn::WhereClause>, generics: &syn::Generics, trait_path: Path, trait_output: Ident, ) { let where_clause = match *where_clause { Some(ref mut clause) => clause, None => return, }; let mut extra_bounds = vec![]; for pred in &where_clause.predicates { let ty = match *pred { syn::WherePredicate::Type(ref ty) => ty, ref predicate => panic!("Unhanded complex where predicate: {:?}", predicate), }; let path = match ty.bounded_ty { syn::Type::Path(ref p) => &p.path, ref ty => panic!("Unhanded complex where type: {:?}", ty), }; assert!( ty.lifetimes.is_none(), "Unhanded complex lifetime bound: {:?}", ty, ); let ident = match path_to_ident(path) { Some(i) => i, None => panic!("Unhanded complex where type path: {:?}", path), }; if generics.type_params().any(|param| param.ident == *ident) { extra_bounds.push(ty.clone()); } } for bound in extra_bounds { let ty = bound.bounded_ty; let bounds = bound.bounds; where_clause .predicates .push(parse_quote!(<#ty as #trait_path>::#trait_output: #bounds)) } } pub fn add_predicate(where_clause: &mut Option<syn::WhereClause>, pred: WherePredicate) { where_clause .get_or_insert(parse_quote!(where)) .predicates .push(pred); } pub fn fmap_match<F>(input: &DeriveInput, bind_style: BindStyle, mut f: F) -> TokenStream where F: FnMut(BindingInfo) -> TokenStream, { let mut s = synstructure::Structure::new(input); s.variants_mut().iter_mut().for_each(|v| { v.bind_with(|_| bind_style); }); s.each_variant(|variant| { let (mapped, mapped_fields) = value(variant, "mapped"); let fields_pairs = variant.bindings().iter().zip(mapped_fields); let mut computations = quote!(); computations.append_all(fields_pairs.map(|(field, mapped_field)| { let expr = f(field.clone()); quote! { let #mapped_field = #expr; } })); computations.append_all(mapped); Some(computations) }) } pub fn fmap_trait_output(input: &DeriveInput, trait_path: &Path, trait_output: Ident) -> Path { let segment = PathSegment { ident: input.ident.clone(), arguments: PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: input .generics .params .iter() .map(|arg| match arg { &GenericParam::Lifetime(ref data) => { GenericArgument::Lifetime(data.lifetime.clone()) }, &GenericParam::Type(ref data) => { let ident = &data.ident; GenericArgument::Type(parse_quote!(<#ident as #trait_path>::#trait_output)) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), colon2_token: Default::default(), gt_token: Default::default(), lt_token: Default::default(), }), }; segment.into() } pub fn
<F>(ty: &Type, params: &[&TypeParam], f: &mut F) -> Type where F: FnMut(&Ident) -> Type, { match *ty { Type::Slice(ref inner) => Type::from(TypeSlice { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), Type::Array(ref inner) => { //ref ty, ref expr) => { Type::from(TypeArray { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }) }, ref ty @ Type::Never(_) => ty.clone(), Type::Tuple(ref inner) => Type::from(TypeTuple { elems: inner .elems .iter() .map(|ty| map_type_params(&ty, params, f)) .collect(), ..inner.clone() }), Type::Path(TypePath { qself: None, ref path, }) => { if let Some(ident) = path_to_ident(path) { if params.iter().any(|ref param| &param.ident == ident) { return f(ident); } } Type::from(TypePath { qself: None, path: map_type_params_in_path(path, params, f), }) }, Type::Path(TypePath { ref qself, ref path, }) => Type::from(TypePath { qself: qself.as_ref().map(|qself| QSelf { ty: Box::new(map_type_params(&qself.ty, params, f)), position: qself.position, ..qself.clone() }), path: map_type_params_in_path(path, params, f), }), Type::Paren(ref inner) => Type::from(TypeParen { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), ref ty => panic!("type {:?} cannot be mapped yet", ty), } } fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], f: &mut F) -> Path where F: FnMut(&Ident) -> Type, { Path { leading_colon: path.leading_colon, segments: path .segments .iter() .map(|segment| PathSegment { ident: segment.ident.clone(), arguments: match segment.arguments { PathArguments::AngleBracketed(ref data) => { PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: data .args .iter() .map(|arg| match arg { ty @ &GenericArgument::Lifetime(_) => ty.clone(), &GenericArgument::Type(ref data) => { GenericArgument::Type(map_type_params(data, params, f)) }, &GenericArgument::Binding(ref data) => { GenericArgument::Binding(Binding { ty: map_type_params(&data.ty, params, f), ..data.clone() }) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), ..data.clone() }) }, ref arg @ PathArguments::None => arg.clone(), ref parameters => panic!("parameters {:?} cannot be mapped yet", parameters), }, }) .collect(), } } fn path_to_ident(path: &Path) -> Option<&Ident> { match *path { Path { leading_colon: None, ref segments, } if segments.len() == 1 => { if segments[0].arguments.is_empty() { Some(&segments[0].ident) } else { None } }, _ => None, } } pub fn parse_field_attrs<A>(field: &Field) -> A where A: FromField, { match A::from_field(field) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse field attributes: {}", e), } } pub fn parse_input_attrs<A>(input: &DeriveInput) -> A where A: FromDeriveInput, { match A::from_derive_input(input) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse input attributes: {}", e), } } pub fn parse_variant_attrs_from_ast<A>(variant: &VariantAst) -> A where A: FromVariant, { let v = Variant { ident: variant.ident.clone(), attrs: variant.attrs.to_vec(), fields: variant.fields.clone(), discriminant: variant.discriminant.clone(), }; parse_variant_attrs(&v) } pub fn parse_variant_attrs<A>(variant: &Variant) -> A where A: FromVariant, { match A::from_variant(variant) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse variant attributes: {}", e), } } pub fn ref_pattern<'a>( variant: &'a VariantInfo, prefix: &str, ) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bind_with(|_| BindStyle::Ref); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); (v.pat(), v.bindings().to_vec()) } pub fn value<'a>(variant: &'a VariantInfo, prefix: &str) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); v.bind_with(|_| BindStyle::Move); (v.pat(), v.bindings().to_vec()) } /// Transforms "FooBar" to "foo-bar". /// /// If the first Camel segment is "Moz", "Webkit", or "Servo", the result string /// is prepended with "-". pub fn to_css_identifier(mut camel_case: &str) -> String { camel_case = camel_case.trim_end_matches('_'); let mut first = true; let mut result = String::with_capacity(camel_case.len()); while let Some(segment) = split_camel_segment(&mut camel_case) { if first { match segment { "Moz" | "Webkit" | "Servo" => first = false, _ => {}, } } if !first { result.push_str("-"); } first = false; result.push_str(&segment.to_lowercase()); } result } /// Given "FooBar", returns "Foo" and sets `camel_case` to "Bar". fn split_camel_segment<'input>(camel_case: &mut &'input str) -> Option<&'input str> { let index = match camel_case.chars().next() { None => return None, Some(ch) => ch.len_utf8(), }; let end_position = camel_case[index..] .find(char::is_uppercase) .map_or(camel_case.len(), |pos| index + pos); let result = &camel_case[..end_position]; *camel_case = &camel_case[end_position..]; Some(result) }
map_type_params
identifier_name
cg.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use darling::{FromDeriveInput, FromField, FromVariant}; use proc_macro2::{Span, TokenStream}; use quote::TokenStreamExt; use syn::{self, AngleBracketedGenericArguments, Binding, DeriveInput, Field}; use syn::{GenericArgument, GenericParam, Ident, Path}; use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray}; use syn::{TypeParam, TypeParen, TypePath, TypeSlice, TypeTuple}; use syn::{Variant, WherePredicate}; use synstructure::{self, BindStyle, BindingInfo, VariantAst, VariantInfo}; /// Given an input type which has some where clauses already, like: /// /// struct InputType<T> /// where /// T: Zero, /// { /// ... /// } /// /// Add the necessary `where` clauses so that the output type of a trait /// fulfils them. /// /// For example: /// /// <T as ToComputedValue>::ComputedValue: Zero, /// /// This needs to run before adding other bounds to the type parameters. pub fn propagate_clauses_to_output_type( where_clause: &mut Option<syn::WhereClause>, generics: &syn::Generics, trait_path: Path, trait_output: Ident, ) { let where_clause = match *where_clause { Some(ref mut clause) => clause, None => return, }; let mut extra_bounds = vec![]; for pred in &where_clause.predicates { let ty = match *pred { syn::WherePredicate::Type(ref ty) => ty, ref predicate => panic!("Unhanded complex where predicate: {:?}", predicate), }; let path = match ty.bounded_ty { syn::Type::Path(ref p) => &p.path, ref ty => panic!("Unhanded complex where type: {:?}", ty), }; assert!( ty.lifetimes.is_none(), "Unhanded complex lifetime bound: {:?}", ty, ); let ident = match path_to_ident(path) { Some(i) => i, None => panic!("Unhanded complex where type path: {:?}", path), }; if generics.type_params().any(|param| param.ident == *ident) { extra_bounds.push(ty.clone()); } } for bound in extra_bounds { let ty = bound.bounded_ty; let bounds = bound.bounds; where_clause .predicates .push(parse_quote!(<#ty as #trait_path>::#trait_output: #bounds)) } } pub fn add_predicate(where_clause: &mut Option<syn::WhereClause>, pred: WherePredicate) { where_clause .get_or_insert(parse_quote!(where)) .predicates .push(pred); } pub fn fmap_match<F>(input: &DeriveInput, bind_style: BindStyle, mut f: F) -> TokenStream where F: FnMut(BindingInfo) -> TokenStream, { let mut s = synstructure::Structure::new(input); s.variants_mut().iter_mut().for_each(|v| { v.bind_with(|_| bind_style); }); s.each_variant(|variant| { let (mapped, mapped_fields) = value(variant, "mapped"); let fields_pairs = variant.bindings().iter().zip(mapped_fields); let mut computations = quote!(); computations.append_all(fields_pairs.map(|(field, mapped_field)| { let expr = f(field.clone()); quote! { let #mapped_field = #expr; } })); computations.append_all(mapped); Some(computations) }) } pub fn fmap_trait_output(input: &DeriveInput, trait_path: &Path, trait_output: Ident) -> Path { let segment = PathSegment { ident: input.ident.clone(), arguments: PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: input .generics .params .iter() .map(|arg| match arg { &GenericParam::Lifetime(ref data) => { GenericArgument::Lifetime(data.lifetime.clone()) }, &GenericParam::Type(ref data) => { let ident = &data.ident; GenericArgument::Type(parse_quote!(<#ident as #trait_path>::#trait_output)) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), colon2_token: Default::default(), gt_token: Default::default(), lt_token: Default::default(), }), }; segment.into() } pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], f: &mut F) -> Type where F: FnMut(&Ident) -> Type, { match *ty { Type::Slice(ref inner) => Type::from(TypeSlice { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), Type::Array(ref inner) => { //ref ty, ref expr) => { Type::from(TypeArray { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }) }, ref ty @ Type::Never(_) => ty.clone(), Type::Tuple(ref inner) => Type::from(TypeTuple { elems: inner .elems .iter() .map(|ty| map_type_params(&ty, params, f)) .collect(), ..inner.clone() }), Type::Path(TypePath { qself: None, ref path, }) => { if let Some(ident) = path_to_ident(path) { if params.iter().any(|ref param| &param.ident == ident)
} Type::from(TypePath { qself: None, path: map_type_params_in_path(path, params, f), }) }, Type::Path(TypePath { ref qself, ref path, }) => Type::from(TypePath { qself: qself.as_ref().map(|qself| QSelf { ty: Box::new(map_type_params(&qself.ty, params, f)), position: qself.position, ..qself.clone() }), path: map_type_params_in_path(path, params, f), }), Type::Paren(ref inner) => Type::from(TypeParen { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), ref ty => panic!("type {:?} cannot be mapped yet", ty), } } fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], f: &mut F) -> Path where F: FnMut(&Ident) -> Type, { Path { leading_colon: path.leading_colon, segments: path .segments .iter() .map(|segment| PathSegment { ident: segment.ident.clone(), arguments: match segment.arguments { PathArguments::AngleBracketed(ref data) => { PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: data .args .iter() .map(|arg| match arg { ty @ &GenericArgument::Lifetime(_) => ty.clone(), &GenericArgument::Type(ref data) => { GenericArgument::Type(map_type_params(data, params, f)) }, &GenericArgument::Binding(ref data) => { GenericArgument::Binding(Binding { ty: map_type_params(&data.ty, params, f), ..data.clone() }) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), ..data.clone() }) }, ref arg @ PathArguments::None => arg.clone(), ref parameters => panic!("parameters {:?} cannot be mapped yet", parameters), }, }) .collect(), } } fn path_to_ident(path: &Path) -> Option<&Ident> { match *path { Path { leading_colon: None, ref segments, } if segments.len() == 1 => { if segments[0].arguments.is_empty() { Some(&segments[0].ident) } else { None } }, _ => None, } } pub fn parse_field_attrs<A>(field: &Field) -> A where A: FromField, { match A::from_field(field) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse field attributes: {}", e), } } pub fn parse_input_attrs<A>(input: &DeriveInput) -> A where A: FromDeriveInput, { match A::from_derive_input(input) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse input attributes: {}", e), } } pub fn parse_variant_attrs_from_ast<A>(variant: &VariantAst) -> A where A: FromVariant, { let v = Variant { ident: variant.ident.clone(), attrs: variant.attrs.to_vec(), fields: variant.fields.clone(), discriminant: variant.discriminant.clone(), }; parse_variant_attrs(&v) } pub fn parse_variant_attrs<A>(variant: &Variant) -> A where A: FromVariant, { match A::from_variant(variant) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse variant attributes: {}", e), } } pub fn ref_pattern<'a>( variant: &'a VariantInfo, prefix: &str, ) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bind_with(|_| BindStyle::Ref); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); (v.pat(), v.bindings().to_vec()) } pub fn value<'a>(variant: &'a VariantInfo, prefix: &str) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); v.bind_with(|_| BindStyle::Move); (v.pat(), v.bindings().to_vec()) } /// Transforms "FooBar" to "foo-bar". /// /// If the first Camel segment is "Moz", "Webkit", or "Servo", the result string /// is prepended with "-". pub fn to_css_identifier(mut camel_case: &str) -> String { camel_case = camel_case.trim_end_matches('_'); let mut first = true; let mut result = String::with_capacity(camel_case.len()); while let Some(segment) = split_camel_segment(&mut camel_case) { if first { match segment { "Moz" | "Webkit" | "Servo" => first = false, _ => {}, } } if !first { result.push_str("-"); } first = false; result.push_str(&segment.to_lowercase()); } result } /// Given "FooBar", returns "Foo" and sets `camel_case` to "Bar". fn split_camel_segment<'input>(camel_case: &mut &'input str) -> Option<&'input str> { let index = match camel_case.chars().next() { None => return None, Some(ch) => ch.len_utf8(), }; let end_position = camel_case[index..] .find(char::is_uppercase) .map_or(camel_case.len(), |pos| index + pos); let result = &camel_case[..end_position]; *camel_case = &camel_case[end_position..]; Some(result) }
{ return f(ident); }
conditional_block
cg.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use darling::{FromDeriveInput, FromField, FromVariant}; use proc_macro2::{Span, TokenStream}; use quote::TokenStreamExt; use syn::{self, AngleBracketedGenericArguments, Binding, DeriveInput, Field}; use syn::{GenericArgument, GenericParam, Ident, Path}; use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray}; use syn::{TypeParam, TypeParen, TypePath, TypeSlice, TypeTuple}; use syn::{Variant, WherePredicate}; use synstructure::{self, BindStyle, BindingInfo, VariantAst, VariantInfo}; /// Given an input type which has some where clauses already, like: /// /// struct InputType<T> /// where /// T: Zero, /// { /// ... /// } /// /// Add the necessary `where` clauses so that the output type of a trait /// fulfils them. /// /// For example: /// /// <T as ToComputedValue>::ComputedValue: Zero, /// /// This needs to run before adding other bounds to the type parameters. pub fn propagate_clauses_to_output_type( where_clause: &mut Option<syn::WhereClause>, generics: &syn::Generics, trait_path: Path, trait_output: Ident, ) { let where_clause = match *where_clause { Some(ref mut clause) => clause, None => return, }; let mut extra_bounds = vec![]; for pred in &where_clause.predicates { let ty = match *pred { syn::WherePredicate::Type(ref ty) => ty, ref predicate => panic!("Unhanded complex where predicate: {:?}", predicate), }; let path = match ty.bounded_ty { syn::Type::Path(ref p) => &p.path, ref ty => panic!("Unhanded complex where type: {:?}", ty), };
ty, ); let ident = match path_to_ident(path) { Some(i) => i, None => panic!("Unhanded complex where type path: {:?}", path), }; if generics.type_params().any(|param| param.ident == *ident) { extra_bounds.push(ty.clone()); } } for bound in extra_bounds { let ty = bound.bounded_ty; let bounds = bound.bounds; where_clause .predicates .push(parse_quote!(<#ty as #trait_path>::#trait_output: #bounds)) } } pub fn add_predicate(where_clause: &mut Option<syn::WhereClause>, pred: WherePredicate) { where_clause .get_or_insert(parse_quote!(where)) .predicates .push(pred); } pub fn fmap_match<F>(input: &DeriveInput, bind_style: BindStyle, mut f: F) -> TokenStream where F: FnMut(BindingInfo) -> TokenStream, { let mut s = synstructure::Structure::new(input); s.variants_mut().iter_mut().for_each(|v| { v.bind_with(|_| bind_style); }); s.each_variant(|variant| { let (mapped, mapped_fields) = value(variant, "mapped"); let fields_pairs = variant.bindings().iter().zip(mapped_fields); let mut computations = quote!(); computations.append_all(fields_pairs.map(|(field, mapped_field)| { let expr = f(field.clone()); quote! { let #mapped_field = #expr; } })); computations.append_all(mapped); Some(computations) }) } pub fn fmap_trait_output(input: &DeriveInput, trait_path: &Path, trait_output: Ident) -> Path { let segment = PathSegment { ident: input.ident.clone(), arguments: PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: input .generics .params .iter() .map(|arg| match arg { &GenericParam::Lifetime(ref data) => { GenericArgument::Lifetime(data.lifetime.clone()) }, &GenericParam::Type(ref data) => { let ident = &data.ident; GenericArgument::Type(parse_quote!(<#ident as #trait_path>::#trait_output)) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), colon2_token: Default::default(), gt_token: Default::default(), lt_token: Default::default(), }), }; segment.into() } pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], f: &mut F) -> Type where F: FnMut(&Ident) -> Type, { match *ty { Type::Slice(ref inner) => Type::from(TypeSlice { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), Type::Array(ref inner) => { //ref ty, ref expr) => { Type::from(TypeArray { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }) }, ref ty @ Type::Never(_) => ty.clone(), Type::Tuple(ref inner) => Type::from(TypeTuple { elems: inner .elems .iter() .map(|ty| map_type_params(&ty, params, f)) .collect(), ..inner.clone() }), Type::Path(TypePath { qself: None, ref path, }) => { if let Some(ident) = path_to_ident(path) { if params.iter().any(|ref param| &param.ident == ident) { return f(ident); } } Type::from(TypePath { qself: None, path: map_type_params_in_path(path, params, f), }) }, Type::Path(TypePath { ref qself, ref path, }) => Type::from(TypePath { qself: qself.as_ref().map(|qself| QSelf { ty: Box::new(map_type_params(&qself.ty, params, f)), position: qself.position, ..qself.clone() }), path: map_type_params_in_path(path, params, f), }), Type::Paren(ref inner) => Type::from(TypeParen { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), ref ty => panic!("type {:?} cannot be mapped yet", ty), } } fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], f: &mut F) -> Path where F: FnMut(&Ident) -> Type, { Path { leading_colon: path.leading_colon, segments: path .segments .iter() .map(|segment| PathSegment { ident: segment.ident.clone(), arguments: match segment.arguments { PathArguments::AngleBracketed(ref data) => { PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: data .args .iter() .map(|arg| match arg { ty @ &GenericArgument::Lifetime(_) => ty.clone(), &GenericArgument::Type(ref data) => { GenericArgument::Type(map_type_params(data, params, f)) }, &GenericArgument::Binding(ref data) => { GenericArgument::Binding(Binding { ty: map_type_params(&data.ty, params, f), ..data.clone() }) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), ..data.clone() }) }, ref arg @ PathArguments::None => arg.clone(), ref parameters => panic!("parameters {:?} cannot be mapped yet", parameters), }, }) .collect(), } } fn path_to_ident(path: &Path) -> Option<&Ident> { match *path { Path { leading_colon: None, ref segments, } if segments.len() == 1 => { if segments[0].arguments.is_empty() { Some(&segments[0].ident) } else { None } }, _ => None, } } pub fn parse_field_attrs<A>(field: &Field) -> A where A: FromField, { match A::from_field(field) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse field attributes: {}", e), } } pub fn parse_input_attrs<A>(input: &DeriveInput) -> A where A: FromDeriveInput, { match A::from_derive_input(input) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse input attributes: {}", e), } } pub fn parse_variant_attrs_from_ast<A>(variant: &VariantAst) -> A where A: FromVariant, { let v = Variant { ident: variant.ident.clone(), attrs: variant.attrs.to_vec(), fields: variant.fields.clone(), discriminant: variant.discriminant.clone(), }; parse_variant_attrs(&v) } pub fn parse_variant_attrs<A>(variant: &Variant) -> A where A: FromVariant, { match A::from_variant(variant) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse variant attributes: {}", e), } } pub fn ref_pattern<'a>( variant: &'a VariantInfo, prefix: &str, ) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bind_with(|_| BindStyle::Ref); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); (v.pat(), v.bindings().to_vec()) } pub fn value<'a>(variant: &'a VariantInfo, prefix: &str) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); v.bind_with(|_| BindStyle::Move); (v.pat(), v.bindings().to_vec()) } /// Transforms "FooBar" to "foo-bar". /// /// If the first Camel segment is "Moz", "Webkit", or "Servo", the result string /// is prepended with "-". pub fn to_css_identifier(mut camel_case: &str) -> String { camel_case = camel_case.trim_end_matches('_'); let mut first = true; let mut result = String::with_capacity(camel_case.len()); while let Some(segment) = split_camel_segment(&mut camel_case) { if first { match segment { "Moz" | "Webkit" | "Servo" => first = false, _ => {}, } } if !first { result.push_str("-"); } first = false; result.push_str(&segment.to_lowercase()); } result } /// Given "FooBar", returns "Foo" and sets `camel_case` to "Bar". fn split_camel_segment<'input>(camel_case: &mut &'input str) -> Option<&'input str> { let index = match camel_case.chars().next() { None => return None, Some(ch) => ch.len_utf8(), }; let end_position = camel_case[index..] .find(char::is_uppercase) .map_or(camel_case.len(), |pos| index + pos); let result = &camel_case[..end_position]; *camel_case = &camel_case[end_position..]; Some(result) }
assert!( ty.lifetimes.is_none(), "Unhanded complex lifetime bound: {:?}",
random_line_split
cg.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use darling::{FromDeriveInput, FromField, FromVariant}; use proc_macro2::{Span, TokenStream}; use quote::TokenStreamExt; use syn::{self, AngleBracketedGenericArguments, Binding, DeriveInput, Field}; use syn::{GenericArgument, GenericParam, Ident, Path}; use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray}; use syn::{TypeParam, TypeParen, TypePath, TypeSlice, TypeTuple}; use syn::{Variant, WherePredicate}; use synstructure::{self, BindStyle, BindingInfo, VariantAst, VariantInfo}; /// Given an input type which has some where clauses already, like: /// /// struct InputType<T> /// where /// T: Zero, /// { /// ... /// } /// /// Add the necessary `where` clauses so that the output type of a trait /// fulfils them. /// /// For example: /// /// <T as ToComputedValue>::ComputedValue: Zero, /// /// This needs to run before adding other bounds to the type parameters. pub fn propagate_clauses_to_output_type( where_clause: &mut Option<syn::WhereClause>, generics: &syn::Generics, trait_path: Path, trait_output: Ident, ) { let where_clause = match *where_clause { Some(ref mut clause) => clause, None => return, }; let mut extra_bounds = vec![]; for pred in &where_clause.predicates { let ty = match *pred { syn::WherePredicate::Type(ref ty) => ty, ref predicate => panic!("Unhanded complex where predicate: {:?}", predicate), }; let path = match ty.bounded_ty { syn::Type::Path(ref p) => &p.path, ref ty => panic!("Unhanded complex where type: {:?}", ty), }; assert!( ty.lifetimes.is_none(), "Unhanded complex lifetime bound: {:?}", ty, ); let ident = match path_to_ident(path) { Some(i) => i, None => panic!("Unhanded complex where type path: {:?}", path), }; if generics.type_params().any(|param| param.ident == *ident) { extra_bounds.push(ty.clone()); } } for bound in extra_bounds { let ty = bound.bounded_ty; let bounds = bound.bounds; where_clause .predicates .push(parse_quote!(<#ty as #trait_path>::#trait_output: #bounds)) } } pub fn add_predicate(where_clause: &mut Option<syn::WhereClause>, pred: WherePredicate) { where_clause .get_or_insert(parse_quote!(where)) .predicates .push(pred); } pub fn fmap_match<F>(input: &DeriveInput, bind_style: BindStyle, mut f: F) -> TokenStream where F: FnMut(BindingInfo) -> TokenStream, { let mut s = synstructure::Structure::new(input); s.variants_mut().iter_mut().for_each(|v| { v.bind_with(|_| bind_style); }); s.each_variant(|variant| { let (mapped, mapped_fields) = value(variant, "mapped"); let fields_pairs = variant.bindings().iter().zip(mapped_fields); let mut computations = quote!(); computations.append_all(fields_pairs.map(|(field, mapped_field)| { let expr = f(field.clone()); quote! { let #mapped_field = #expr; } })); computations.append_all(mapped); Some(computations) }) } pub fn fmap_trait_output(input: &DeriveInput, trait_path: &Path, trait_output: Ident) -> Path { let segment = PathSegment { ident: input.ident.clone(), arguments: PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: input .generics .params .iter() .map(|arg| match arg { &GenericParam::Lifetime(ref data) => { GenericArgument::Lifetime(data.lifetime.clone()) }, &GenericParam::Type(ref data) => { let ident = &data.ident; GenericArgument::Type(parse_quote!(<#ident as #trait_path>::#trait_output)) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), colon2_token: Default::default(), gt_token: Default::default(), lt_token: Default::default(), }), }; segment.into() } pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], f: &mut F) -> Type where F: FnMut(&Ident) -> Type, { match *ty { Type::Slice(ref inner) => Type::from(TypeSlice { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), Type::Array(ref inner) => { //ref ty, ref expr) => { Type::from(TypeArray { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }) }, ref ty @ Type::Never(_) => ty.clone(), Type::Tuple(ref inner) => Type::from(TypeTuple { elems: inner .elems .iter() .map(|ty| map_type_params(&ty, params, f)) .collect(), ..inner.clone() }), Type::Path(TypePath { qself: None, ref path, }) => { if let Some(ident) = path_to_ident(path) { if params.iter().any(|ref param| &param.ident == ident) { return f(ident); } } Type::from(TypePath { qself: None, path: map_type_params_in_path(path, params, f), }) }, Type::Path(TypePath { ref qself, ref path, }) => Type::from(TypePath { qself: qself.as_ref().map(|qself| QSelf { ty: Box::new(map_type_params(&qself.ty, params, f)), position: qself.position, ..qself.clone() }), path: map_type_params_in_path(path, params, f), }), Type::Paren(ref inner) => Type::from(TypeParen { elem: Box::new(map_type_params(&inner.elem, params, f)), ..inner.clone() }), ref ty => panic!("type {:?} cannot be mapped yet", ty), } } fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], f: &mut F) -> Path where F: FnMut(&Ident) -> Type, { Path { leading_colon: path.leading_colon, segments: path .segments .iter() .map(|segment| PathSegment { ident: segment.ident.clone(), arguments: match segment.arguments { PathArguments::AngleBracketed(ref data) => { PathArguments::AngleBracketed(AngleBracketedGenericArguments { args: data .args .iter() .map(|arg| match arg { ty @ &GenericArgument::Lifetime(_) => ty.clone(), &GenericArgument::Type(ref data) => { GenericArgument::Type(map_type_params(data, params, f)) }, &GenericArgument::Binding(ref data) => { GenericArgument::Binding(Binding { ty: map_type_params(&data.ty, params, f), ..data.clone() }) }, ref arg => panic!("arguments {:?} cannot be mapped yet", arg), }) .collect(), ..data.clone() }) }, ref arg @ PathArguments::None => arg.clone(), ref parameters => panic!("parameters {:?} cannot be mapped yet", parameters), }, }) .collect(), } } fn path_to_ident(path: &Path) -> Option<&Ident> { match *path { Path { leading_colon: None, ref segments, } if segments.len() == 1 => { if segments[0].arguments.is_empty() { Some(&segments[0].ident) } else { None } }, _ => None, } } pub fn parse_field_attrs<A>(field: &Field) -> A where A: FromField, { match A::from_field(field) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse field attributes: {}", e), } } pub fn parse_input_attrs<A>(input: &DeriveInput) -> A where A: FromDeriveInput, { match A::from_derive_input(input) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse input attributes: {}", e), } } pub fn parse_variant_attrs_from_ast<A>(variant: &VariantAst) -> A where A: FromVariant,
pub fn parse_variant_attrs<A>(variant: &Variant) -> A where A: FromVariant, { match A::from_variant(variant) { Ok(attrs) => attrs, Err(e) => panic!("failed to parse variant attributes: {}", e), } } pub fn ref_pattern<'a>( variant: &'a VariantInfo, prefix: &str, ) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bind_with(|_| BindStyle::Ref); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); (v.pat(), v.bindings().to_vec()) } pub fn value<'a>(variant: &'a VariantInfo, prefix: &str) -> (TokenStream, Vec<BindingInfo<'a>>) { let mut v = variant.clone(); v.bindings_mut().iter_mut().for_each(|b| { b.binding = Ident::new(&format!("{}_{}", b.binding, prefix), Span::call_site()) }); v.bind_with(|_| BindStyle::Move); (v.pat(), v.bindings().to_vec()) } /// Transforms "FooBar" to "foo-bar". /// /// If the first Camel segment is "Moz", "Webkit", or "Servo", the result string /// is prepended with "-". pub fn to_css_identifier(mut camel_case: &str) -> String { camel_case = camel_case.trim_end_matches('_'); let mut first = true; let mut result = String::with_capacity(camel_case.len()); while let Some(segment) = split_camel_segment(&mut camel_case) { if first { match segment { "Moz" | "Webkit" | "Servo" => first = false, _ => {}, } } if !first { result.push_str("-"); } first = false; result.push_str(&segment.to_lowercase()); } result } /// Given "FooBar", returns "Foo" and sets `camel_case` to "Bar". fn split_camel_segment<'input>(camel_case: &mut &'input str) -> Option<&'input str> { let index = match camel_case.chars().next() { None => return None, Some(ch) => ch.len_utf8(), }; let end_position = camel_case[index..] .find(char::is_uppercase) .map_or(camel_case.len(), |pos| index + pos); let result = &camel_case[..end_position]; *camel_case = &camel_case[end_position..]; Some(result) }
{ let v = Variant { ident: variant.ident.clone(), attrs: variant.attrs.to_vec(), fields: variant.fields.clone(), discriminant: variant.discriminant.clone(), }; parse_variant_attrs(&v) }
identifier_body
webgl-detector.js
'use strict'; /* global THREE,Modernizr */ angular.module('artpopApp') .directive('webglDetector', function (X3) { function Detector(){ X3.apply(this,arguments); } Detector.prototype = Object.create(X3.prototype); var app = new Detector(); window.apwgl = app; app.init(); function configCamera(){ app.camera.position.z = 20; } configCamera(); function addObject(){ //onetime use only. var material = new THREE.MeshLambertMaterial({ color: 0xff0000, wireframe: true, wireframeLinewidth: 2, side: THREE.BackSide, transparent: true, opacity: 0.9, }); var inner = new THREE.Mesh( new THREE.IcosahedronGeometry( 10, 2 ), material ); app.scene.add(inner); app.updateStack.push(function(){ inner.rotation.z += 0.004; inner.rotation.x += 0.004; inner.rotation.y += 0.004; inner.material.color.offsetHSL(0.001,0.0,0); }); } addObject();
function addLight(){ var lightBack = new THREE.DirectionalLight( 0xffffff, 5, 1000 ); lightBack.position.set( 0, 0, 400 ); app.scene.add( lightBack ); app.updateStack.push(function(){ lightBack.rotation.z += 0.004; lightBack.rotation.x += 0.004; lightBack.rotation.y += 0.004; lightBack.color.offsetHSL(0.001,0.0,0); }); } addLight(); return { template: '<div class="gl-canvas-container"></div>', restrict: 'E', transclude: true, //link function is not di. link: function($scope, $element, $transclude){ var container = $element[0].querySelector('.gl-canvas-container'); if (Modernizr.webgl){ app.reconfig($scope, $element, container); }else{ container.appendChild($transclude()); } } }; }); /**/
random_line_split
webgl-detector.js
'use strict'; /* global THREE,Modernizr */ angular.module('artpopApp') .directive('webglDetector', function (X3) { function Detector(){ X3.apply(this,arguments); } Detector.prototype = Object.create(X3.prototype); var app = new Detector(); window.apwgl = app; app.init(); function configCamera(){ app.camera.position.z = 20; } configCamera(); function addObject(){ //onetime use only. var material = new THREE.MeshLambertMaterial({ color: 0xff0000, wireframe: true, wireframeLinewidth: 2, side: THREE.BackSide, transparent: true, opacity: 0.9, }); var inner = new THREE.Mesh( new THREE.IcosahedronGeometry( 10, 2 ), material ); app.scene.add(inner); app.updateStack.push(function(){ inner.rotation.z += 0.004; inner.rotation.x += 0.004; inner.rotation.y += 0.004; inner.material.color.offsetHSL(0.001,0.0,0); }); } addObject(); function addLight()
addLight(); return { template: '<div class="gl-canvas-container"></div>', restrict: 'E', transclude: true, //link function is not di. link: function($scope, $element, $transclude){ var container = $element[0].querySelector('.gl-canvas-container'); if (Modernizr.webgl){ app.reconfig($scope, $element, container); }else{ container.appendChild($transclude()); } } }; }); /**/
{ var lightBack = new THREE.DirectionalLight( 0xffffff, 5, 1000 ); lightBack.position.set( 0, 0, 400 ); app.scene.add( lightBack ); app.updateStack.push(function(){ lightBack.rotation.z += 0.004; lightBack.rotation.x += 0.004; lightBack.rotation.y += 0.004; lightBack.color.offsetHSL(0.001,0.0,0); }); }
identifier_body
webgl-detector.js
'use strict'; /* global THREE,Modernizr */ angular.module('artpopApp') .directive('webglDetector', function (X3) { function Detector(){ X3.apply(this,arguments); } Detector.prototype = Object.create(X3.prototype); var app = new Detector(); window.apwgl = app; app.init(); function configCamera(){ app.camera.position.z = 20; } configCamera(); function addObject(){ //onetime use only. var material = new THREE.MeshLambertMaterial({ color: 0xff0000, wireframe: true, wireframeLinewidth: 2, side: THREE.BackSide, transparent: true, opacity: 0.9, }); var inner = new THREE.Mesh( new THREE.IcosahedronGeometry( 10, 2 ), material ); app.scene.add(inner); app.updateStack.push(function(){ inner.rotation.z += 0.004; inner.rotation.x += 0.004; inner.rotation.y += 0.004; inner.material.color.offsetHSL(0.001,0.0,0); }); } addObject(); function addLight(){ var lightBack = new THREE.DirectionalLight( 0xffffff, 5, 1000 ); lightBack.position.set( 0, 0, 400 ); app.scene.add( lightBack ); app.updateStack.push(function(){ lightBack.rotation.z += 0.004; lightBack.rotation.x += 0.004; lightBack.rotation.y += 0.004; lightBack.color.offsetHSL(0.001,0.0,0); }); } addLight(); return { template: '<div class="gl-canvas-container"></div>', restrict: 'E', transclude: true, //link function is not di. link: function($scope, $element, $transclude){ var container = $element[0].querySelector('.gl-canvas-container'); if (Modernizr.webgl)
else{ container.appendChild($transclude()); } } }; }); /**/
{ app.reconfig($scope, $element, container); }
conditional_block
webgl-detector.js
'use strict'; /* global THREE,Modernizr */ angular.module('artpopApp') .directive('webglDetector', function (X3) { function Detector(){ X3.apply(this,arguments); } Detector.prototype = Object.create(X3.prototype); var app = new Detector(); window.apwgl = app; app.init(); function configCamera(){ app.camera.position.z = 20; } configCamera(); function
(){ //onetime use only. var material = new THREE.MeshLambertMaterial({ color: 0xff0000, wireframe: true, wireframeLinewidth: 2, side: THREE.BackSide, transparent: true, opacity: 0.9, }); var inner = new THREE.Mesh( new THREE.IcosahedronGeometry( 10, 2 ), material ); app.scene.add(inner); app.updateStack.push(function(){ inner.rotation.z += 0.004; inner.rotation.x += 0.004; inner.rotation.y += 0.004; inner.material.color.offsetHSL(0.001,0.0,0); }); } addObject(); function addLight(){ var lightBack = new THREE.DirectionalLight( 0xffffff, 5, 1000 ); lightBack.position.set( 0, 0, 400 ); app.scene.add( lightBack ); app.updateStack.push(function(){ lightBack.rotation.z += 0.004; lightBack.rotation.x += 0.004; lightBack.rotation.y += 0.004; lightBack.color.offsetHSL(0.001,0.0,0); }); } addLight(); return { template: '<div class="gl-canvas-container"></div>', restrict: 'E', transclude: true, //link function is not di. link: function($scope, $element, $transclude){ var container = $element[0].querySelector('.gl-canvas-container'); if (Modernizr.webgl){ app.reconfig($scope, $element, container); }else{ container.appendChild($transclude()); } } }; }); /**/
addObject
identifier_name
db.py
import sqlalchemy from sqlalchemy import Column, Integer, String from sqlalchemy.orm import mapper, sessionmaker import subprocess class PygrationState(object): '''Python object representing the state table''' def __init__(self, migration=None, step_id=None, step_name=None): self.migration = migration self.step_id = step_id self.step_name = step_name self.sequence = None self.add_state = None self.simdrop_state = None self.drop_state = None def __repr__(self): return "<PygrationState(%s, %s)>" % (self.migration, self.step_id) class Table(object): metadata = sqlalchemy.MetaData() engine = None pygration_state = None @classmethod def define(cls, schema=None): cls.pygration_state = sqlalchemy.Table('pygration_state', cls.metadata , Column('migration', String(length=160), primary_key=True) , Column('step_id', String(length=160), primary_key=True) , Column('step_name', String(length=160)) , Column('sequence', Integer) , Column('add_state', String(length=16)) , Column('simdrop_state', String(length=16)) , Column('drop_state', String(length=16)) , schema=schema ) class FileLoader(object): '''Object for running SQL from a file on the file system''' def __init__(self, binary, args = [], formatting_dict = {}): self._binary = binary self._args = [arg.format(filename="{filename}", **formatting_dict) for arg in args] def __call__(self, filename): args = [arg.format(filename=filename) for arg in self._args] print self._binary, args subprocess.check_call([self._binary] + args) def
(url=None, drivername=None, schema=None, username=None, password=None, host=None, port=None, database=None, query=None): """Open the DB through a SQLAlchemy engine. Returns an open session. """ if url is None and drivername is None: raise Exception("Either a url or a driver name is required to open a db connection") if url is None: url = sqlalchemy.engine.url.URL(drivername = drivername, username = username, password = password, host = host, port = port, database = database, query = query) Table.engine = sqlalchemy.create_engine(url) Table.metadata.bind = Table.engine Session = sessionmaker() Session.configure(bind=Table.engine) session = Session() Table.define(schema) mapper(PygrationState, Table.pygration_state) return session
open
identifier_name
db.py
import sqlalchemy from sqlalchemy import Column, Integer, String from sqlalchemy.orm import mapper, sessionmaker import subprocess class PygrationState(object): '''Python object representing the state table''' def __init__(self, migration=None, step_id=None, step_name=None): self.migration = migration self.step_id = step_id self.step_name = step_name self.sequence = None self.add_state = None self.simdrop_state = None self.drop_state = None def __repr__(self): return "<PygrationState(%s, %s)>" % (self.migration, self.step_id) class Table(object): metadata = sqlalchemy.MetaData() engine = None pygration_state = None @classmethod def define(cls, schema=None):
class FileLoader(object): '''Object for running SQL from a file on the file system''' def __init__(self, binary, args = [], formatting_dict = {}): self._binary = binary self._args = [arg.format(filename="{filename}", **formatting_dict) for arg in args] def __call__(self, filename): args = [arg.format(filename=filename) for arg in self._args] print self._binary, args subprocess.check_call([self._binary] + args) def open(url=None, drivername=None, schema=None, username=None, password=None, host=None, port=None, database=None, query=None): """Open the DB through a SQLAlchemy engine. Returns an open session. """ if url is None and drivername is None: raise Exception("Either a url or a driver name is required to open a db connection") if url is None: url = sqlalchemy.engine.url.URL(drivername = drivername, username = username, password = password, host = host, port = port, database = database, query = query) Table.engine = sqlalchemy.create_engine(url) Table.metadata.bind = Table.engine Session = sessionmaker() Session.configure(bind=Table.engine) session = Session() Table.define(schema) mapper(PygrationState, Table.pygration_state) return session
cls.pygration_state = sqlalchemy.Table('pygration_state', cls.metadata , Column('migration', String(length=160), primary_key=True) , Column('step_id', String(length=160), primary_key=True) , Column('step_name', String(length=160)) , Column('sequence', Integer) , Column('add_state', String(length=16)) , Column('simdrop_state', String(length=16)) , Column('drop_state', String(length=16)) , schema=schema )
identifier_body
db.py
import sqlalchemy from sqlalchemy import Column, Integer, String from sqlalchemy.orm import mapper, sessionmaker import subprocess class PygrationState(object): '''Python object representing the state table''' def __init__(self, migration=None, step_id=None, step_name=None): self.migration = migration self.step_id = step_id self.step_name = step_name self.sequence = None self.add_state = None self.simdrop_state = None self.drop_state = None
metadata = sqlalchemy.MetaData() engine = None pygration_state = None @classmethod def define(cls, schema=None): cls.pygration_state = sqlalchemy.Table('pygration_state', cls.metadata , Column('migration', String(length=160), primary_key=True) , Column('step_id', String(length=160), primary_key=True) , Column('step_name', String(length=160)) , Column('sequence', Integer) , Column('add_state', String(length=16)) , Column('simdrop_state', String(length=16)) , Column('drop_state', String(length=16)) , schema=schema ) class FileLoader(object): '''Object for running SQL from a file on the file system''' def __init__(self, binary, args = [], formatting_dict = {}): self._binary = binary self._args = [arg.format(filename="{filename}", **formatting_dict) for arg in args] def __call__(self, filename): args = [arg.format(filename=filename) for arg in self._args] print self._binary, args subprocess.check_call([self._binary] + args) def open(url=None, drivername=None, schema=None, username=None, password=None, host=None, port=None, database=None, query=None): """Open the DB through a SQLAlchemy engine. Returns an open session. """ if url is None and drivername is None: raise Exception("Either a url or a driver name is required to open a db connection") if url is None: url = sqlalchemy.engine.url.URL(drivername = drivername, username = username, password = password, host = host, port = port, database = database, query = query) Table.engine = sqlalchemy.create_engine(url) Table.metadata.bind = Table.engine Session = sessionmaker() Session.configure(bind=Table.engine) session = Session() Table.define(schema) mapper(PygrationState, Table.pygration_state) return session
def __repr__(self): return "<PygrationState(%s, %s)>" % (self.migration, self.step_id) class Table(object):
random_line_split
db.py
import sqlalchemy from sqlalchemy import Column, Integer, String from sqlalchemy.orm import mapper, sessionmaker import subprocess class PygrationState(object): '''Python object representing the state table''' def __init__(self, migration=None, step_id=None, step_name=None): self.migration = migration self.step_id = step_id self.step_name = step_name self.sequence = None self.add_state = None self.simdrop_state = None self.drop_state = None def __repr__(self): return "<PygrationState(%s, %s)>" % (self.migration, self.step_id) class Table(object): metadata = sqlalchemy.MetaData() engine = None pygration_state = None @classmethod def define(cls, schema=None): cls.pygration_state = sqlalchemy.Table('pygration_state', cls.metadata , Column('migration', String(length=160), primary_key=True) , Column('step_id', String(length=160), primary_key=True) , Column('step_name', String(length=160)) , Column('sequence', Integer) , Column('add_state', String(length=16)) , Column('simdrop_state', String(length=16)) , Column('drop_state', String(length=16)) , schema=schema ) class FileLoader(object): '''Object for running SQL from a file on the file system''' def __init__(self, binary, args = [], formatting_dict = {}): self._binary = binary self._args = [arg.format(filename="{filename}", **formatting_dict) for arg in args] def __call__(self, filename): args = [arg.format(filename=filename) for arg in self._args] print self._binary, args subprocess.check_call([self._binary] + args) def open(url=None, drivername=None, schema=None, username=None, password=None, host=None, port=None, database=None, query=None): """Open the DB through a SQLAlchemy engine. Returns an open session. """ if url is None and drivername is None: raise Exception("Either a url or a driver name is required to open a db connection") if url is None:
Table.engine = sqlalchemy.create_engine(url) Table.metadata.bind = Table.engine Session = sessionmaker() Session.configure(bind=Table.engine) session = Session() Table.define(schema) mapper(PygrationState, Table.pygration_state) return session
url = sqlalchemy.engine.url.URL(drivername = drivername, username = username, password = password, host = host, port = port, database = database, query = query)
conditional_block
sendmanyBuilder.ts
import { BaseCoin as CoinConfig, NetworkType, StacksNetwork as BitgoStacksNetwork } from '@bitgo/statics'; import BigNum from 'bn.js'; import { AddressHashMode, addressToString, AddressVersion, bufferCVFromString, ClarityValue, FungibleConditionCode, listCV, makeStandardSTXPostCondition, PostCondition, PostConditionMode, standardPrincipalCV, tupleCV, uintCV, } from '@stacks/transactions'; import { BuildTransactionError } from '../baseCoin/errors'; import { Transaction } from './transaction'; import { functionArgsToSendParams, getSTXAddressFromPubKeys, isValidAddress, isValidAmount, isValidMemo, } from './utils'; import { SendParams } from './iface'; import { CONTRACT_NAME_SENDMANY, FUNCTION_NAME_SENDMANY } from './constants'; import { ContractCallPayload } from '@stacks/transactions/dist/payload'; import { AbstractContractBuilder } from './abstractContractBuilder'; export class SendmanyBuilder extends AbstractContractBuilder { private _sendParams: SendParams[] = []; constructor(_coinConfig: Readonly<CoinConfig>) { super(_coinConfig); } public static isValidContractCall(coinConfig: Readonly<CoinConfig>, payload: ContractCallPayload): boolean { return ( (coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress === addressToString(payload.contractAddress) && CONTRACT_NAME_SENDMANY === payload.contractName.content && FUNCTION_NAME_SENDMANY === payload.functionName.content ); } private sendParamsToFunctionArgs = (sendParams: SendParams[]): ClarityValue[] => [ listCV( sendParams.map((recipient) => tupleCV({ to: standardPrincipalCV(recipient.address), ustx: uintCV(recipient.amount), memo: bufferCVFromString(recipient.memo || ''), }), ), ), ]; private sendParamsToPostcondition(sendParams: SendParams[]): PostCondition[] { const sum: BigNum = sendParams.reduce((current, next) => current.add(new BigNum(next.amount)), new BigNum(0)); return [ makeStandardSTXPostCondition( getSTXAddressFromPubKeys( this._fromPubKeys, this._coinConfig.network.type === NetworkType.MAINNET ? AddressVersion.MainnetMultiSig : AddressVersion.TestnetMultiSig, this._fromPubKeys.length > 1 ? AddressHashMode.SerializeP2SH : AddressHashMode.SerializeP2PKH, this._numberSignatures, ).address, FungibleConditionCode.Equal, sum, ), ]; } initBuilder(tx: Transaction): void { super.initBuilder(tx); this._sendParams = functionArgsToSendParams((tx.stxTransaction.payload as ContractCallPayload).functionArgs); } /** * Set a transfer * * @param {SendParams} sendParams - the sender address * @returns {TransactionBuilder} This transaction builder */ send({ address, amount, memo }: SendParams): this { if (!address || !isValidAddress(address)) { throw new BuildTransactionError('Invalid or missing address, got: ' + address); } if (!amount || !isValidAmount(amount))
if (!!memo && !isValidMemo(memo)) { throw new BuildTransactionError('Invalid memo, got: ' + memo); } this._sendParams.push({ address, amount, memo }); return this; } /** @inheritdoc */ protected async buildImplementation(): Promise<Transaction> { this._contractAddress = (this._coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress; this._contractName = CONTRACT_NAME_SENDMANY; this._functionName = FUNCTION_NAME_SENDMANY; this._functionArgs = this.sendParamsToFunctionArgs(this._sendParams); this._postConditionMode = PostConditionMode.Deny; this._postConditions = this.sendParamsToPostcondition(this._sendParams); return await super.buildImplementation(); } }
{ throw new BuildTransactionError('Invalid or missing amount, got: ' + amount); }
conditional_block
sendmanyBuilder.ts
import { BaseCoin as CoinConfig, NetworkType, StacksNetwork as BitgoStacksNetwork } from '@bitgo/statics'; import BigNum from 'bn.js'; import { AddressHashMode, addressToString, AddressVersion, bufferCVFromString, ClarityValue, FungibleConditionCode, listCV, makeStandardSTXPostCondition, PostCondition, PostConditionMode, standardPrincipalCV, tupleCV, uintCV, } from '@stacks/transactions'; import { BuildTransactionError } from '../baseCoin/errors'; import { Transaction } from './transaction'; import { functionArgsToSendParams, getSTXAddressFromPubKeys, isValidAddress, isValidAmount, isValidMemo, } from './utils'; import { SendParams } from './iface'; import { CONTRACT_NAME_SENDMANY, FUNCTION_NAME_SENDMANY } from './constants'; import { ContractCallPayload } from '@stacks/transactions/dist/payload'; import { AbstractContractBuilder } from './abstractContractBuilder'; export class SendmanyBuilder extends AbstractContractBuilder { private _sendParams: SendParams[] = []; constructor(_coinConfig: Readonly<CoinConfig>)
public static isValidContractCall(coinConfig: Readonly<CoinConfig>, payload: ContractCallPayload): boolean { return ( (coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress === addressToString(payload.contractAddress) && CONTRACT_NAME_SENDMANY === payload.contractName.content && FUNCTION_NAME_SENDMANY === payload.functionName.content ); } private sendParamsToFunctionArgs = (sendParams: SendParams[]): ClarityValue[] => [ listCV( sendParams.map((recipient) => tupleCV({ to: standardPrincipalCV(recipient.address), ustx: uintCV(recipient.amount), memo: bufferCVFromString(recipient.memo || ''), }), ), ), ]; private sendParamsToPostcondition(sendParams: SendParams[]): PostCondition[] { const sum: BigNum = sendParams.reduce((current, next) => current.add(new BigNum(next.amount)), new BigNum(0)); return [ makeStandardSTXPostCondition( getSTXAddressFromPubKeys( this._fromPubKeys, this._coinConfig.network.type === NetworkType.MAINNET ? AddressVersion.MainnetMultiSig : AddressVersion.TestnetMultiSig, this._fromPubKeys.length > 1 ? AddressHashMode.SerializeP2SH : AddressHashMode.SerializeP2PKH, this._numberSignatures, ).address, FungibleConditionCode.Equal, sum, ), ]; } initBuilder(tx: Transaction): void { super.initBuilder(tx); this._sendParams = functionArgsToSendParams((tx.stxTransaction.payload as ContractCallPayload).functionArgs); } /** * Set a transfer * * @param {SendParams} sendParams - the sender address * @returns {TransactionBuilder} This transaction builder */ send({ address, amount, memo }: SendParams): this { if (!address || !isValidAddress(address)) { throw new BuildTransactionError('Invalid or missing address, got: ' + address); } if (!amount || !isValidAmount(amount)) { throw new BuildTransactionError('Invalid or missing amount, got: ' + amount); } if (!!memo && !isValidMemo(memo)) { throw new BuildTransactionError('Invalid memo, got: ' + memo); } this._sendParams.push({ address, amount, memo }); return this; } /** @inheritdoc */ protected async buildImplementation(): Promise<Transaction> { this._contractAddress = (this._coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress; this._contractName = CONTRACT_NAME_SENDMANY; this._functionName = FUNCTION_NAME_SENDMANY; this._functionArgs = this.sendParamsToFunctionArgs(this._sendParams); this._postConditionMode = PostConditionMode.Deny; this._postConditions = this.sendParamsToPostcondition(this._sendParams); return await super.buildImplementation(); } }
{ super(_coinConfig); }
identifier_body
sendmanyBuilder.ts
import { BaseCoin as CoinConfig, NetworkType, StacksNetwork as BitgoStacksNetwork } from '@bitgo/statics'; import BigNum from 'bn.js'; import { AddressHashMode, addressToString, AddressVersion, bufferCVFromString, ClarityValue, FungibleConditionCode, listCV, makeStandardSTXPostCondition, PostCondition, PostConditionMode, standardPrincipalCV, tupleCV, uintCV, } from '@stacks/transactions'; import { BuildTransactionError } from '../baseCoin/errors'; import { Transaction } from './transaction'; import { functionArgsToSendParams, getSTXAddressFromPubKeys, isValidAddress, isValidAmount, isValidMemo, } from './utils'; import { SendParams } from './iface'; import { CONTRACT_NAME_SENDMANY, FUNCTION_NAME_SENDMANY } from './constants'; import { ContractCallPayload } from '@stacks/transactions/dist/payload'; import { AbstractContractBuilder } from './abstractContractBuilder'; export class SendmanyBuilder extends AbstractContractBuilder { private _sendParams: SendParams[] = []; constructor(_coinConfig: Readonly<CoinConfig>) { super(_coinConfig); }
(coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress === addressToString(payload.contractAddress) && CONTRACT_NAME_SENDMANY === payload.contractName.content && FUNCTION_NAME_SENDMANY === payload.functionName.content ); } private sendParamsToFunctionArgs = (sendParams: SendParams[]): ClarityValue[] => [ listCV( sendParams.map((recipient) => tupleCV({ to: standardPrincipalCV(recipient.address), ustx: uintCV(recipient.amount), memo: bufferCVFromString(recipient.memo || ''), }), ), ), ]; private sendParamsToPostcondition(sendParams: SendParams[]): PostCondition[] { const sum: BigNum = sendParams.reduce((current, next) => current.add(new BigNum(next.amount)), new BigNum(0)); return [ makeStandardSTXPostCondition( getSTXAddressFromPubKeys( this._fromPubKeys, this._coinConfig.network.type === NetworkType.MAINNET ? AddressVersion.MainnetMultiSig : AddressVersion.TestnetMultiSig, this._fromPubKeys.length > 1 ? AddressHashMode.SerializeP2SH : AddressHashMode.SerializeP2PKH, this._numberSignatures, ).address, FungibleConditionCode.Equal, sum, ), ]; } initBuilder(tx: Transaction): void { super.initBuilder(tx); this._sendParams = functionArgsToSendParams((tx.stxTransaction.payload as ContractCallPayload).functionArgs); } /** * Set a transfer * * @param {SendParams} sendParams - the sender address * @returns {TransactionBuilder} This transaction builder */ send({ address, amount, memo }: SendParams): this { if (!address || !isValidAddress(address)) { throw new BuildTransactionError('Invalid or missing address, got: ' + address); } if (!amount || !isValidAmount(amount)) { throw new BuildTransactionError('Invalid or missing amount, got: ' + amount); } if (!!memo && !isValidMemo(memo)) { throw new BuildTransactionError('Invalid memo, got: ' + memo); } this._sendParams.push({ address, amount, memo }); return this; } /** @inheritdoc */ protected async buildImplementation(): Promise<Transaction> { this._contractAddress = (this._coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress; this._contractName = CONTRACT_NAME_SENDMANY; this._functionName = FUNCTION_NAME_SENDMANY; this._functionArgs = this.sendParamsToFunctionArgs(this._sendParams); this._postConditionMode = PostConditionMode.Deny; this._postConditions = this.sendParamsToPostcondition(this._sendParams); return await super.buildImplementation(); } }
public static isValidContractCall(coinConfig: Readonly<CoinConfig>, payload: ContractCallPayload): boolean { return (
random_line_split
sendmanyBuilder.ts
import { BaseCoin as CoinConfig, NetworkType, StacksNetwork as BitgoStacksNetwork } from '@bitgo/statics'; import BigNum from 'bn.js'; import { AddressHashMode, addressToString, AddressVersion, bufferCVFromString, ClarityValue, FungibleConditionCode, listCV, makeStandardSTXPostCondition, PostCondition, PostConditionMode, standardPrincipalCV, tupleCV, uintCV, } from '@stacks/transactions'; import { BuildTransactionError } from '../baseCoin/errors'; import { Transaction } from './transaction'; import { functionArgsToSendParams, getSTXAddressFromPubKeys, isValidAddress, isValidAmount, isValidMemo, } from './utils'; import { SendParams } from './iface'; import { CONTRACT_NAME_SENDMANY, FUNCTION_NAME_SENDMANY } from './constants'; import { ContractCallPayload } from '@stacks/transactions/dist/payload'; import { AbstractContractBuilder } from './abstractContractBuilder'; export class
extends AbstractContractBuilder { private _sendParams: SendParams[] = []; constructor(_coinConfig: Readonly<CoinConfig>) { super(_coinConfig); } public static isValidContractCall(coinConfig: Readonly<CoinConfig>, payload: ContractCallPayload): boolean { return ( (coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress === addressToString(payload.contractAddress) && CONTRACT_NAME_SENDMANY === payload.contractName.content && FUNCTION_NAME_SENDMANY === payload.functionName.content ); } private sendParamsToFunctionArgs = (sendParams: SendParams[]): ClarityValue[] => [ listCV( sendParams.map((recipient) => tupleCV({ to: standardPrincipalCV(recipient.address), ustx: uintCV(recipient.amount), memo: bufferCVFromString(recipient.memo || ''), }), ), ), ]; private sendParamsToPostcondition(sendParams: SendParams[]): PostCondition[] { const sum: BigNum = sendParams.reduce((current, next) => current.add(new BigNum(next.amount)), new BigNum(0)); return [ makeStandardSTXPostCondition( getSTXAddressFromPubKeys( this._fromPubKeys, this._coinConfig.network.type === NetworkType.MAINNET ? AddressVersion.MainnetMultiSig : AddressVersion.TestnetMultiSig, this._fromPubKeys.length > 1 ? AddressHashMode.SerializeP2SH : AddressHashMode.SerializeP2PKH, this._numberSignatures, ).address, FungibleConditionCode.Equal, sum, ), ]; } initBuilder(tx: Transaction): void { super.initBuilder(tx); this._sendParams = functionArgsToSendParams((tx.stxTransaction.payload as ContractCallPayload).functionArgs); } /** * Set a transfer * * @param {SendParams} sendParams - the sender address * @returns {TransactionBuilder} This transaction builder */ send({ address, amount, memo }: SendParams): this { if (!address || !isValidAddress(address)) { throw new BuildTransactionError('Invalid or missing address, got: ' + address); } if (!amount || !isValidAmount(amount)) { throw new BuildTransactionError('Invalid or missing amount, got: ' + amount); } if (!!memo && !isValidMemo(memo)) { throw new BuildTransactionError('Invalid memo, got: ' + memo); } this._sendParams.push({ address, amount, memo }); return this; } /** @inheritdoc */ protected async buildImplementation(): Promise<Transaction> { this._contractAddress = (this._coinConfig.network as BitgoStacksNetwork).sendmanymemoContractAddress; this._contractName = CONTRACT_NAME_SENDMANY; this._functionName = FUNCTION_NAME_SENDMANY; this._functionArgs = this.sendParamsToFunctionArgs(this._sendParams); this._postConditionMode = PostConditionMode.Deny; this._postConditions = this.sendParamsToPostcondition(this._sendParams); return await super.buildImplementation(); } }
SendmanyBuilder
identifier_name
index.js
/* Copyright (c) 2013, Yahoo! Inc. All rights reserved. Code licensed under the BSD License: http://yuilibrary.com/license/ */ var UNKNOWN = 'UNKNOWN'; var fs = require('fs'); var path = require('path'); var read = require('read-installed'); var chalk = require('chalk'); var treeify = require('treeify'); var license = require('./license'); var flatten = function(options) { var moduleInfo = { licenses: UNKNOWN }, json = options.deps, data = options.data, key = json.name + '@' + json.version, colorize = options.color, unknown = options.unknown, licenseData, files = [], licenseFile; /*istanbul ignore next*/ if (colorize) { moduleInfo = { licenses: chalk.bold.red(UNKNOWN) }; key = chalk.blue(json.name) + chalk.dim('@') + chalk.green(json.version); } // If we have processed this key already, just return the data object. // This was added so that we don't recurse forever if there was a circular // dependency in the dependency tree. /*istanbul ignore next*/ if (data[key]) { return data; } data[key] = moduleInfo; if (json.repository) { /*istanbul ignore else*/ if (typeof json.repository === 'object' && typeof json.repository.url === 'string') { moduleInfo.repository = json.repository.url.replace('git+ssh://git@', 'git://').replace('.git', ''); moduleInfo.repository = moduleInfo.repository.replace('git://github.com', 'https://github.com').replace('.git', ''); moduleInfo.repository = moduleInfo.repository.replace('[email protected]:', 'https://github.com/').replace('.git', ''); } } if (json.url) { /*istanbul ignore next*/ if (typeof json.url === 'object') { moduleInfo.url = json.url.web; } } /*istanbul ignore next*/ if (unknown) { moduleInfo.dependencyPath = json.path; } /*istanbul ignore next*/ if (options.customFormat) { Object.keys(options.customFormat).forEach(function forEachCallback(item) { if (json[item]) { //For now, we only support strings, not JSON objects if (typeof json[item] === 'string') { moduleInfo[item] = json[item]; } } else { moduleInfo[item] = options.customFormat[item]; } }); } licenseData = json.license || json.licenses || undefined; if (licenseData) { /*istanbul ignore else*/ if (Array.isArray(licenseData) && licenseData.length > 0) { moduleInfo.licenses = licenseData.map(function(license){ /*istanbul ignore else*/ if (typeof license === 'object') { return license.type; } else if (typeof license === 'string') { return license; } }); } else if (typeof licenseData === 'object' && licenseData.type) { moduleInfo.licenses = licenseData.type; } else if (typeof licenseData === 'string') { moduleInfo.licenses = licenseData; } } else if (license(json.readme)) { moduleInfo.licenses = license(json.readme); } /*istanbul ignore else*/ if (json.path && fs.existsSync(json.path)) { files = fs.readdirSync(json.path).filter(function(filename) { filename = filename.toUpperCase(); return filename.indexOf('LICENSE') > -1 || filename.indexOf('LICENCE') > -1 ; }); } files.forEach(function(filename) { licenseFile = path.join(json.path, filename); // Checking that the file is in fact a normal file and not a directory for example. /*istanbul ignore else*/ if (fs.lstatSync(licenseFile).isFile()) { if (!moduleInfo.licenses || moduleInfo.licenses.indexOf(UNKNOWN) > -1) { //Only re-check the license if we didn't get it from elsewhere moduleInfo.licenses = license(fs.readFileSync(licenseFile, {encoding: 'utf8'})); } moduleInfo.licenseFile = licenseFile; } }); if (Array.isArray(moduleInfo.licenses)) { /*istanbul ignore else*/ if (moduleInfo.licenses.length === 1) { moduleInfo.licenses = moduleInfo.licenses[0]; } } /*istanbul ignore else*/ if (json.dependencies) { Object.keys(json.dependencies).forEach(function(name) { var childDependency = json.dependencies[name], dependencyId = childDependency.name + '@' + childDependency.version; if (data[dependencyId]) { // already exists return; } data = flatten({ deps: childDependency, data: data, color: colorize, unknown: unknown, customFormat: options.customFormat }); }); } return data; }; exports.init = function(options, callback) { console.error('scanning' , options.start); if (options.customPath) { options.customFormat = this.parseJson(options.customPath); } read(options.start, { dev: true }, function(err, json) { var data = flatten({ deps: json, data: {}, color: options.color, unknown: options.unknown, customFormat: options.customFormat }), colorize = options.color, sorted = {}, filtered = {}, exclude = options.exclude && options.exclude.replace(/^\s+|\s+$/g, '').split(/\s*,\s*/), inputError = null; Object.keys(data).sort().forEach(function(item) { if (!data[item].licenses) { /*istanbul ignore else*/ if (colorize) { data[item].licenses = chalk.bold.red(UNKNOWN); } else { data[item].licenses = UNKNOWN; } } if (options.unknown) { if (data[item].licenses && data[item].licenses !== UNKNOWN) { if (data[item].licenses.indexOf('*') > -1) { /*istanbul ignore if*/ if (colorize) { data[item].licenses = chalk.bold.red(UNKNOWN); } else { data[item].licenses = UNKNOWN; } } } } /*istanbul ignore else*/ if (data[item]) { if (options.onlyunknown) { if (data[item].licenses.indexOf('*') > -1 || data[item].licenses.indexOf('UNKNOWN') > -1) { sorted[item] = data[item]; } } else { sorted[item] = data[item]; } } }); if (exclude) { Object.keys(sorted).forEach(function(item) { if (!(sorted[item].licenses && exclude.indexOf(sorted[item].licenses) !== -1)) { filtered[item] = sorted[item]; } }); } else { filtered = sorted; } /*istanbul ignore next*/ if (err) { inputError = err; } //Initiate an error if needed (e.g. something awful happaned) if (inputError === null && Object.keys(sorted).length === 1) { Object.keys(sorted).forEach(function forEachCallback(item) { var tempItem = chalk.stripColor(item); if (tempItem === 'undefined@undefined') { inputError = new Error('No Npm Packages Found'); } }); } //Return the callback and variables nicely callback(filtered, inputError); }); }; /*istanbul ignore next*/ exports.print = function(sorted) { console.log(exports.asTree(sorted)); }; exports.asTree = function(sorted) { return treeify.asTree(sorted, true); }; exports.asCSV = function(sorted, customFormat) { var text = [ ], textArr = [ ], lineArr = [ ]; if (customFormat && Object.keys(customFormat).length > 0) { textArr = [ ]; textArr.push('"module name"'); Object.keys(customFormat).forEach(function forEachCallback(item) { textArr.push('"' + item + '"'); }); text.push(textArr.join(',')); } else { text.push(['"module name"','"license"','"repository"'].join(',')); } Object.keys(sorted).forEach(function(key) { var module = sorted[key], line = ''; lineArr = [ ]; //Grab the custom keys from the custom format if (customFormat && Object.keys(customFormat).length > 0) { lineArr.push('"' + key + '"'); Object.keys(customFormat).forEach(function forEachCallback(item) { lineArr.push('"' + module[item] + '"'); }); line = lineArr.join(','); } else { line = [ '"' + key + '"', '"' + (module.licenses || '') + '"', '"' + (module.repository || '') + '"' ].join(','); } text.push(line); }); return text.join('\n'); }; /** * Exports data as markdown (*.md) file which has it's own syntax. * @method * @param {JSON} sorted The sorted JSON data from all packages. * @param {JSON} customFormat The custom format with information about the needed keys. * @return {String} The returning plain text. */ exports.asMarkDown = function(sorted, customFormat) { var text = []; if (customFormat && Object.keys(customFormat).length > 0) { Object.keys(sorted).forEach(function sortedCallback(sortedItem) { text.push(' - **[' + sortedItem + '](' + sorted[sortedItem].repository + ')**'); Object.keys(customFormat).forEach(function customCallback(customItem) { text.push(' - ' + customItem + ': ' + sorted[sortedItem][customItem]); }); }); text = text.join('\n'); } else { Object.keys(sorted).forEach(function(key) {
var module = sorted[key]; text.push('[' + key + '](' + module.repository + ') - ' + module.licenses); }); text = text.join('\n'); } return text; }; /*istanbul ignore next*/ exports.parseJson = function(jsonPath) { if (typeof jsonPath !== 'string') { return new Error('did not specify a path'); } var jsonFileContents = '', result = { }; try { jsonFileContents = fs.readFileSync(jsonPath, {encoding: 'utf8'}); result = JSON.parse(jsonFileContents); } catch (err) { result = err; } finally { return result; } };
random_line_split
index.js
/* Copyright (c) 2013, Yahoo! Inc. All rights reserved. Code licensed under the BSD License: http://yuilibrary.com/license/ */ var UNKNOWN = 'UNKNOWN'; var fs = require('fs'); var path = require('path'); var read = require('read-installed'); var chalk = require('chalk'); var treeify = require('treeify'); var license = require('./license'); var flatten = function(options) { var moduleInfo = { licenses: UNKNOWN }, json = options.deps, data = options.data, key = json.name + '@' + json.version, colorize = options.color, unknown = options.unknown, licenseData, files = [], licenseFile; /*istanbul ignore next*/ if (colorize) { moduleInfo = { licenses: chalk.bold.red(UNKNOWN) }; key = chalk.blue(json.name) + chalk.dim('@') + chalk.green(json.version); } // If we have processed this key already, just return the data object. // This was added so that we don't recurse forever if there was a circular // dependency in the dependency tree. /*istanbul ignore next*/ if (data[key]) { return data; } data[key] = moduleInfo; if (json.repository) { /*istanbul ignore else*/ if (typeof json.repository === 'object' && typeof json.repository.url === 'string') { moduleInfo.repository = json.repository.url.replace('git+ssh://git@', 'git://').replace('.git', ''); moduleInfo.repository = moduleInfo.repository.replace('git://github.com', 'https://github.com').replace('.git', ''); moduleInfo.repository = moduleInfo.repository.replace('[email protected]:', 'https://github.com/').replace('.git', ''); } } if (json.url) { /*istanbul ignore next*/ if (typeof json.url === 'object') { moduleInfo.url = json.url.web; } } /*istanbul ignore next*/ if (unknown) { moduleInfo.dependencyPath = json.path; } /*istanbul ignore next*/ if (options.customFormat) { Object.keys(options.customFormat).forEach(function forEachCallback(item) { if (json[item]) { //For now, we only support strings, not JSON objects if (typeof json[item] === 'string') { moduleInfo[item] = json[item]; } } else { moduleInfo[item] = options.customFormat[item]; } }); } licenseData = json.license || json.licenses || undefined; if (licenseData) { /*istanbul ignore else*/ if (Array.isArray(licenseData) && licenseData.length > 0) { moduleInfo.licenses = licenseData.map(function(license){ /*istanbul ignore else*/ if (typeof license === 'object') { return license.type; } else if (typeof license === 'string') { return license; } }); } else if (typeof licenseData === 'object' && licenseData.type)
else if (typeof licenseData === 'string') { moduleInfo.licenses = licenseData; } } else if (license(json.readme)) { moduleInfo.licenses = license(json.readme); } /*istanbul ignore else*/ if (json.path && fs.existsSync(json.path)) { files = fs.readdirSync(json.path).filter(function(filename) { filename = filename.toUpperCase(); return filename.indexOf('LICENSE') > -1 || filename.indexOf('LICENCE') > -1 ; }); } files.forEach(function(filename) { licenseFile = path.join(json.path, filename); // Checking that the file is in fact a normal file and not a directory for example. /*istanbul ignore else*/ if (fs.lstatSync(licenseFile).isFile()) { if (!moduleInfo.licenses || moduleInfo.licenses.indexOf(UNKNOWN) > -1) { //Only re-check the license if we didn't get it from elsewhere moduleInfo.licenses = license(fs.readFileSync(licenseFile, {encoding: 'utf8'})); } moduleInfo.licenseFile = licenseFile; } }); if (Array.isArray(moduleInfo.licenses)) { /*istanbul ignore else*/ if (moduleInfo.licenses.length === 1) { moduleInfo.licenses = moduleInfo.licenses[0]; } } /*istanbul ignore else*/ if (json.dependencies) { Object.keys(json.dependencies).forEach(function(name) { var childDependency = json.dependencies[name], dependencyId = childDependency.name + '@' + childDependency.version; if (data[dependencyId]) { // already exists return; } data = flatten({ deps: childDependency, data: data, color: colorize, unknown: unknown, customFormat: options.customFormat }); }); } return data; }; exports.init = function(options, callback) { console.error('scanning' , options.start); if (options.customPath) { options.customFormat = this.parseJson(options.customPath); } read(options.start, { dev: true }, function(err, json) { var data = flatten({ deps: json, data: {}, color: options.color, unknown: options.unknown, customFormat: options.customFormat }), colorize = options.color, sorted = {}, filtered = {}, exclude = options.exclude && options.exclude.replace(/^\s+|\s+$/g, '').split(/\s*,\s*/), inputError = null; Object.keys(data).sort().forEach(function(item) { if (!data[item].licenses) { /*istanbul ignore else*/ if (colorize) { data[item].licenses = chalk.bold.red(UNKNOWN); } else { data[item].licenses = UNKNOWN; } } if (options.unknown) { if (data[item].licenses && data[item].licenses !== UNKNOWN) { if (data[item].licenses.indexOf('*') > -1) { /*istanbul ignore if*/ if (colorize) { data[item].licenses = chalk.bold.red(UNKNOWN); } else { data[item].licenses = UNKNOWN; } } } } /*istanbul ignore else*/ if (data[item]) { if (options.onlyunknown) { if (data[item].licenses.indexOf('*') > -1 || data[item].licenses.indexOf('UNKNOWN') > -1) { sorted[item] = data[item]; } } else { sorted[item] = data[item]; } } }); if (exclude) { Object.keys(sorted).forEach(function(item) { if (!(sorted[item].licenses && exclude.indexOf(sorted[item].licenses) !== -1)) { filtered[item] = sorted[item]; } }); } else { filtered = sorted; } /*istanbul ignore next*/ if (err) { inputError = err; } //Initiate an error if needed (e.g. something awful happaned) if (inputError === null && Object.keys(sorted).length === 1) { Object.keys(sorted).forEach(function forEachCallback(item) { var tempItem = chalk.stripColor(item); if (tempItem === 'undefined@undefined') { inputError = new Error('No Npm Packages Found'); } }); } //Return the callback and variables nicely callback(filtered, inputError); }); }; /*istanbul ignore next*/ exports.print = function(sorted) { console.log(exports.asTree(sorted)); }; exports.asTree = function(sorted) { return treeify.asTree(sorted, true); }; exports.asCSV = function(sorted, customFormat) { var text = [ ], textArr = [ ], lineArr = [ ]; if (customFormat && Object.keys(customFormat).length > 0) { textArr = [ ]; textArr.push('"module name"'); Object.keys(customFormat).forEach(function forEachCallback(item) { textArr.push('"' + item + '"'); }); text.push(textArr.join(',')); } else { text.push(['"module name"','"license"','"repository"'].join(',')); } Object.keys(sorted).forEach(function(key) { var module = sorted[key], line = ''; lineArr = [ ]; //Grab the custom keys from the custom format if (customFormat && Object.keys(customFormat).length > 0) { lineArr.push('"' + key + '"'); Object.keys(customFormat).forEach(function forEachCallback(item) { lineArr.push('"' + module[item] + '"'); }); line = lineArr.join(','); } else { line = [ '"' + key + '"', '"' + (module.licenses || '') + '"', '"' + (module.repository || '') + '"' ].join(','); } text.push(line); }); return text.join('\n'); }; /** * Exports data as markdown (*.md) file which has it's own syntax. * @method * @param {JSON} sorted The sorted JSON data from all packages. * @param {JSON} customFormat The custom format with information about the needed keys. * @return {String} The returning plain text. */ exports.asMarkDown = function(sorted, customFormat) { var text = []; if (customFormat && Object.keys(customFormat).length > 0) { Object.keys(sorted).forEach(function sortedCallback(sortedItem) { text.push(' - **[' + sortedItem + '](' + sorted[sortedItem].repository + ')**'); Object.keys(customFormat).forEach(function customCallback(customItem) { text.push(' - ' + customItem + ': ' + sorted[sortedItem][customItem]); }); }); text = text.join('\n'); } else { Object.keys(sorted).forEach(function(key) { var module = sorted[key]; text.push('[' + key + '](' + module.repository + ') - ' + module.licenses); }); text = text.join('\n'); } return text; }; /*istanbul ignore next*/ exports.parseJson = function(jsonPath) { if (typeof jsonPath !== 'string') { return new Error('did not specify a path'); } var jsonFileContents = '', result = { }; try { jsonFileContents = fs.readFileSync(jsonPath, {encoding: 'utf8'}); result = JSON.parse(jsonFileContents); } catch (err) { result = err; } finally { return result; } };
{ moduleInfo.licenses = licenseData.type; }
conditional_block
subp_main.py
# Copyright 2009 Noam Yorav-Raphael # # This file is part of DreamPie. # # DreamPie is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DreamPie is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with DreamPie. If not, see <http://www.gnu.org/licenses/>. # This file is a script (not a module) run by the DreamPie GUI. # It expects one argument: the port to connect to. # It creates a package called dreampielib from subp-py2.zip or subp-py3.zip # (which are expected to be in the directory of __file__), # and runs dreampielib.subprocess.main(port). import sys from os.path import abspath, join, dirname def
(): port = int(sys.argv[1]) py_ver = sys.version_info[0] lib_name = abspath(join(dirname(__file__), 'subp-py%d' % py_ver)) sys.path.insert(0, lib_name) from dreampielib.subprocess import main as subprocess_main del sys.path[0] if sys.version_info[:2] == (3, 0): sys.stderr.write("Warning: DreamPie doesn't support Python 3.0. \n" "Please upgrade to Python 3.1.\n") subprocess_main(port) if __name__ == '__main__': main()
main
identifier_name
subp_main.py
# Copyright 2009 Noam Yorav-Raphael # # This file is part of DreamPie. # # DreamPie is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DreamPie is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with DreamPie. If not, see <http://www.gnu.org/licenses/>. # This file is a script (not a module) run by the DreamPie GUI. # It expects one argument: the port to connect to. # It creates a package called dreampielib from subp-py2.zip or subp-py3.zip # (which are expected to be in the directory of __file__), # and runs dreampielib.subprocess.main(port). import sys from os.path import abspath, join, dirname def main(): port = int(sys.argv[1]) py_ver = sys.version_info[0] lib_name = abspath(join(dirname(__file__), 'subp-py%d' % py_ver)) sys.path.insert(0, lib_name) from dreampielib.subprocess import main as subprocess_main del sys.path[0] if sys.version_info[:2] == (3, 0):
subprocess_main(port) if __name__ == '__main__': main()
sys.stderr.write("Warning: DreamPie doesn't support Python 3.0. \n" "Please upgrade to Python 3.1.\n")
conditional_block
subp_main.py
# Copyright 2009 Noam Yorav-Raphael # # This file is part of DreamPie. # # DreamPie is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DreamPie is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with DreamPie. If not, see <http://www.gnu.org/licenses/>. # This file is a script (not a module) run by the DreamPie GUI. # It expects one argument: the port to connect to. # It creates a package called dreampielib from subp-py2.zip or subp-py3.zip
import sys from os.path import abspath, join, dirname def main(): port = int(sys.argv[1]) py_ver = sys.version_info[0] lib_name = abspath(join(dirname(__file__), 'subp-py%d' % py_ver)) sys.path.insert(0, lib_name) from dreampielib.subprocess import main as subprocess_main del sys.path[0] if sys.version_info[:2] == (3, 0): sys.stderr.write("Warning: DreamPie doesn't support Python 3.0. \n" "Please upgrade to Python 3.1.\n") subprocess_main(port) if __name__ == '__main__': main()
# (which are expected to be in the directory of __file__), # and runs dreampielib.subprocess.main(port).
random_line_split
subp_main.py
# Copyright 2009 Noam Yorav-Raphael # # This file is part of DreamPie. # # DreamPie is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DreamPie is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with DreamPie. If not, see <http://www.gnu.org/licenses/>. # This file is a script (not a module) run by the DreamPie GUI. # It expects one argument: the port to connect to. # It creates a package called dreampielib from subp-py2.zip or subp-py3.zip # (which are expected to be in the directory of __file__), # and runs dreampielib.subprocess.main(port). import sys from os.path import abspath, join, dirname def main():
if __name__ == '__main__': main()
port = int(sys.argv[1]) py_ver = sys.version_info[0] lib_name = abspath(join(dirname(__file__), 'subp-py%d' % py_ver)) sys.path.insert(0, lib_name) from dreampielib.subprocess import main as subprocess_main del sys.path[0] if sys.version_info[:2] == (3, 0): sys.stderr.write("Warning: DreamPie doesn't support Python 3.0. \n" "Please upgrade to Python 3.1.\n") subprocess_main(port)
identifier_body
test_dot-jump25Oct2016_10-53.py
from __future__ import print_function __author__ = """Alex "O." Holcombe, Charles Ludowici, """ ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor import time, sys, platform, os from math import atan, atan2, pi, cos, sin, sqrt, ceil, radians, degrees import numpy as np import psychopy, psychopy.info import copy from psychopy import visual, sound, monitors, logging, gui, event, core, data try: from helpersAOH import accelerateComputer, openMyStimWindow except Exception as e: print(e); print('Problem loading helpersAOH. Check that the file helpersAOH.py in the same directory as this file') print('Current directory is ',os.getcwd()) eyeTracking = False if eyeTracking: try: import eyelinkEyetrackerForPsychopySUPA3 except Exception as e: print(e) print('Problem loading eyelinkEyetrackerForPsychopySUPA3. Check that the file eyelinkEyetrackerForPsychopySUPA3.py in the same directory as this file') print('While a different version of pylink might make your eyetracking code work, your code appears to generally be out of date. Rewrite your eyetracker code based on the SR website examples') #Psychopy v1.83.01 broke this, pylink version prevents EyelinkEyetrackerForPsychopySUPA3 stuff from importing. But what really needs to be done is to change eyetracking code to more modern calls, as indicated on SR site eyeTracking = False expname= "dot-jump" demo = False; exportImages = False autopilot = False subject='test' ############################### ### Setup the screen parameters ############################################################################################## ## allowGUI = False units='deg' #'cm' fullscrn=False waitBlank=False if True: #just so I can indent all the below refreshRate= 85 *1.0; #160 #set to the framerate of the monitor fullscrn=True; #show in small window (0) or full screen (1) scrn=True #which screen to display the stimuli. 0 is home screen, 1 is second screen # create a dialog from dictionary infoFirst = { 'Autopilot':autopilot, 'Check refresh etc':True, 'Use second screen':scrn, 'Fullscreen (timing errors if not)': fullscrn, 'Screen refresh rate': refreshRate } OK = gui.DlgFromDict(dictionary=infoFirst, title='MOT', order=['Autopilot','Check refresh etc', 'Use second screen', 'Screen refresh rate', 'Fullscreen (timing errors if not)'], tip={'Check refresh etc': 'To confirm refresh rate and that can keep up, at least when drawing a grating', 'Use second Screen': ''}, ) if not OK.OK: print('User cancelled from dialog box'); logging.info('User cancelled from dialog box'); core.quit() autopilot = infoFirst['Autopilot'] checkRefreshEtc = infoFirst['Check refresh etc'] scrn = infoFirst['Use second screen'] print('scrn = ',scrn, ' from dialog box') fullscrn = infoFirst['Fullscreen (timing errors if not)'] refreshRate = infoFirst['Screen refresh rate'] #monitor parameters widthPix = 1280 #1440 #monitor width in pixels heightPix =1024 #900 #monitor height in pixels monitorwidth = 40.5 #28.5 #monitor width in centimeters viewdist = 55.; #cm pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) /np.pi*180) bgColor = [-1,-1,-1] #black background monitorname = 'testMonitor' # 'mitsubishi' #in psychopy Monitors Center mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor mon.setSizePix( (widthPix,heightPix) ) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) myWin.setRecordFrameIntervals(False) trialsPerCondition = 2 #default value refreshMsg2 = '' if not checkRefreshEtc: refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED' refreshRateWrong = False else: #checkRefreshEtc runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) print('Finished runInfo- which assesses the refresh and processes of this computer') refreshMsg1 = 'Median frames per second ='+ str( np.round(1000./runInfo["windowRefreshTimeMedian_ms"],1) ) refreshRateTolerancePct = 3 pctOff = abs( (1000./runInfo["windowRefreshTimeMedian_ms"]-refreshRate) / refreshRate) refreshRateWrong = pctOff > (refreshRateTolerancePct/100.) if refreshRateWrong: refreshMsg1 += ' BUT' refreshMsg1 += ' program assumes ' + str(refreshRate) refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!' else: refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) ) myWinRes = myWin.size myWin.allowGUI =True myWin.close() #have to close window to show dialog box ## ### END Setup of the screen parameters ############################################################################################## #################################### askUserAndConfirmExpParams = True if autopilot: subject = 'autoTest' ############################### ### Ask user exp params ############################################################################################## ## askUserAndConfirmExpParams if askUserAndConfirmExpParams: dlgLabelsOrdered = list() #new dialog box myDlg = gui.Dlg(title=expname, pos=(200,400)) if not autopilot: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') else: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') myDlg.addField('autoPilotTime:', 0, tip='Auto response time relative to cue') myDlg.addField('randomTime:',False, tip = 'Add (rounded) gaussian N(0,2) error to time offset?') myDlg.addField('autoPilotSpace:',0, tip='Auto response position relative to cue') myDlg.addField('randomSpace:',False, tip = 'Add (rounded) gaussian N(0,2) error to space offset?') dlgLabelsOrdered.append('autoPilotTime') dlgLabelsOrdered.append('randomTime') dlgLabelsOrdered.append('autoPilotSpace') dlgLabelsOrdered.append('randomSpace') myDlg.addField('Trials per condition (default=' + str(trialsPerCondition) + '):', trialsPerCondition, tip=str(trialsPerCondition)) dlgLabelsOrdered.append('trialsPerCondition') pctCompletedBreak = 50 myDlg.addText(refreshMsg1, color='Black') if refreshRateWrong: myDlg.addText(refreshMsg2, color='Red') msgWrongResolution = '' if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any(): msgWrongResolution = 'Instead of desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels, screen apparently '+ str(myWinRes[0])+ 'x'+ str(myWinRes[1]) myDlg.addText(msgWrongResolution, color='Red') print(msgWrongResolution); logging.info(msgWrongResolution) myDlg.addText('Note: to abort press ESC at response time', color='DimGrey') #works in PsychoPy1.84 #myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) #color names not working for some pre-1.84 versions myDlg.show() if myDlg.OK: #unpack information from dialogue box thisInfo = myDlg.data #this will be a list of data returned from each field added in order if autopilot: name=thisInfo[dlgLabelsOrdered.index('subject')] if len(name) > 0: #if entered something subject = name #change subject default name to what user entered trialsPerCondition = int( thisInfo[ dlgLabelsOrdered.index('trialsPerCondition') ] ) #convert string to integer autoSpace = thisInfo[dlgLabelsOrdered.index('autoPilotSpace')] autoTime = thisInfo[dlgLabelsOrdered.index('autoPilotTime')] randomTime = thisInfo[dlgLabelsOrdered.index('randomTime')] randomSpace = thisInfo[dlgLabelsOrdered.index('randomSpace')] print('trialsPerCondition=',trialsPerCondition) logging.info('trialsPerCondition ='+str(trialsPerCondition)) else: print('User cancelled from dialog box.'); logging.info('User cancelled from dialog box') logging.flush() core.quit() ### Ask user exp params ## END askUserAndConfirmExpParams ############################### ############################################################################################## if os.path.isdir('.'+os.sep+'dataRaw'): dataDir='dataRaw' else: msg= 'dataRaw directory does not exist, so saving data in present working directory' print(msg); logging.info(msg) dataDir='.' timeAndDateStr = time.strftime("%d%b%Y_%H-%M", time.localtime()) fileNameWithPath = dataDir+os.sep+subject+ '_' + expname+timeAndDateStr if not demo and not exportImages: saveCodeCmd = 'cp \'' + sys.argv[0] + '\' '+ fileNameWithPath + '.py' os.system(saveCodeCmd) #save a copy of the code as it was when that subject was run logF = logging.LogFile(fileNameWithPath+'.log', filemode='w',#if you set this to 'a' it will append instead of overwriting level=logging.INFO)#info, data, warnings, and errors will be sent to this logfile if demo or exportImages: logging.console.setLevel(logging.ERROR) #only show this level's and higher messages logging.console.setLevel(logging.WARNING) #DEBUG means set the console to receive nearly all messges, INFO is for everything else, INFO, EXP, DATA, WARNING and ERROR if refreshRateWrong: logging.error(refreshMsg1+refreshMsg2) else: logging.info(refreshMsg1+refreshMsg2) longerThanRefreshTolerance = 0.27 longFrameLimit = round(1000./refreshRate*(1.0+longerThanRefreshTolerance),3) # round(1000/refreshRate*1.5,2) msg = 'longFrameLimit='+ str(longFrameLimit) +' Recording trials where one or more interframe interval exceeded this figure ' logging.info(msg); print(msg) if msgWrongResolution != '': logging.error(msgWrongResolution) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) msg = 'second window opening runInfo mean ms='+ str( runInfo["windowRefreshTimeAvg_ms"] ) logging.info(msg); print(msg) logging.info(runInfo) logging.info('gammaGrid='+str(mon.getGammaGrid())) logging.info('linearizeMethod='+str(mon.getLinearizeMethod())) ####Functions. Save time by automating processes like stimulus creation and ordering ############################################################################ def oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, trialObjects): cueFrame = cuePos * SOAFrames cueMax = cueFrame + cueFrames showIdx = int(np.floor(n/SOAFrames)) #objectIdxs = [i for i in range(len(trialObjects))] #objectIdxs.append(len(trialObjects)-1) #AWFUL hack #print(objectIdxs[showIdx]) #floored quotient obj = trialObjects[showIdx] drawObject = n%SOAFrames < itemFrames if drawObject: myWin.color = bgColor if n >= cueFrame and n < cueMax: #print('cueFrames! n is', n,'. cueFrame is ,', cueFrame, 'cueFrame + cueFrames is ', (cueFrame + cueFrames)) #if n%2 == 0: #This should make it flash, but it might be too fast #print('cue flash') #myWin.color = (0,0,0) obj.draw() cue.draw() else: obj.draw() return True #objects: Stimuli to display or #cue: cue stimulus or stimuli #timing parameters: Could be item duration, soa and isi. i.e. if SOA+Duration % n == 0: stimulus.setColor(stimulusColor) #bgColor and stimulusColor: if displaying and hiding stimuli, i.e. for RSVP #movementVector: direction and distance of movement if moving stimuli def oneTrial(stimuli): dotOrder = np.arange(len(stimuli)) np.random.shuffle(dotOrder) print(dotOrder) shuffledStimuli = [stimuli[i] for i in dotOrder] ts = [] myWin.flip(); myWin.flip() #Make sure raster at top of screen (unless not in blocking mode), and give CPU a chance to finish other tasks t0 = trialClock.getTime() for n in range(trialFrames): fixation.draw() #print(n//SOAFrames) oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, shuffledStimuli) myWin.flip() ts.append(trialClock.getTime() - t0) return True, shuffledStimuli, dotOrder, ts def getResponse(trialStimuli): if autopilot: spacing = 360./nDots autoResponseIdx = cuePos + autoTime #The serial position of the response in the stream if randomTime: autoResponseIdx += int(round( np.random.normal(0,2) )) itemAtTemporalSelection = trialStimuli[autoResponseIdx] unshuffledPositions = [dot.pos.tolist() for dot in stimuli] itemSpatial = unshuffledPositions.index(itemAtTemporalSelection.pos.tolist()) itemSpatial = itemSpatial + autoSpace if randomSpace: itemSpatial += int(round( np.random.normal(0,2) )) while itemSpatial>23: itemSpatial = itemSpatial - 23 #Once we have temporal pos of selected item relative to start of the trial #Need to get the serial spatial pos of this item, so that we can select items around it based on the autoSpace offset #print('itemSpatial is: ', itemSpatial) selectionTemporal = trialStimuli.index(stimuli[itemSpatial]) #This seems redundant, but it tests that the item we've selected in space is the cued item in time. if the temporal and spatial offsets are 0, it should be the same as cuePos. accuracy = cuePos == selectionTemporal mousePos = (stimuli[itemSpatial].pos[0],stimuli[itemSpatial].pos[1]) expStop = False item = stimuli[itemSpatial] return accuracy, item, expStop, mousePos elif not autopilot: myMouse = event.Mouse(visible = False,win=myWin) responded = False expStop = False event.clearEvents() mousePos = (1e6,1e6) escape = event.getKeys() myMouse.setPos((0,0)) myMouse.setVisible(True) while not responded: for item in trialStimuli: item.draw() myWin.flip() button = myMouse.getPressed() mousePos = myMouse.getPos() escapeKey = event.getKeys() if button[0]: print('click detected') responded = True print('getResponse mousePos:',mousePos) elif len(escapeKey)>0: if escapeKey[0] == 'space' or escapeKey[0] == 'ESCAPE': expStop = True responded = True return False, np.random.choice(trialStimuli), expStop, (0,0) clickDistances = [] for item in trialStimuli: x = mousePos[0] - item.pos[0] y = mousePos[1] - item.pos[1] distance = sqrt(x**2 + y**2) clickDistances.append(distance) if not expStop: minDistanceIdx = clickDistances.index(min(clickDistances)) accuracy = minDistanceIdx == cuePos item = trialStimuli[minDistanceIdx] myMouse.setVisible(False) return accuracy, item, expStop, mousePos def drawStimuli(nDots, radius, center, stimulusObject, sameEachTime = True): if len(center) > 2 or len(center) < 2: print('Center coords must be list of length 2') return None if not sameEachTime and not isinstance(stimulusObject, (list, tuple)): print('You want different objects in each position, but your stimuli is not a list or tuple') return None if not sameEachTime and isinstance(stimulusObject, (list, tuple)) and len(stimulusObject)!=nDots: print('You want different objects in each position, but the number of positions does not equal the number of items') return None spacing = 360./nDots stimuli = [] for dot in range(nDots): #have to specify positions for multiples of 90deg because python (computers in general?) can't store exact value of pi and thus cos(pi/2) = 6.123e-17, not 0 angle = dot*spacing if angle == 0: xpos = radius ypos = 0 elif angle == 90: xpos = 0 ypos = radius elif angle == 180: xpos = -radius ypos = 0 elif angle == 270: xpos = 0 ypos = -radius elif angle%90!=0: xpos = radius*cos(radians(angle)) ypos = radius*sin(radians(angle)) if sameEachTime:
elif not sameEachTime: stim = stimulusObject[dot] stim.pos = (xpos,ypos) stimuli.append(stim) return stimuli def checkTiming(ts): interframeIntervals = np.diff(ts) * 1000 #print(interframeIntervals) frameTimeTolerance=.3 #proportion longer than refreshRate that will not count as a miss longFrameLimit = np.round(1000/refreshRate*(1.0+frameTimeTolerance),2) idxsInterframeLong = np.where( interframeIntervals > longFrameLimit ) [0] #frames that exceeded 150% of expected duration numCasesInterframeLong = len( idxsInterframeLong ) if numCasesInterframeLong > 0: print(numCasesInterframeLong,'frames of', trialFrames,'were longer than',str(1000/refreshRate*(1.0+frameTimeTolerance))) return numCasesInterframeLong ##Set up stimuli stimulus = visual.Circle(myWin, radius = .2, fillColor = (1,1,1) ) nDots = 24 radius = 4 center = (0,0) sameEachTime = True #(nDots, radius, center, stimulusObject, sameEachTime = True) stimuli = drawStimuli(nDots, radius, center, stimulus, sameEachTime) #print(stimuli) #print('length of stimuli object', len(stimuli)) ######Create visual objects, noise masks, response prompts etc. ########### ######Draw your stimuli here if they don't change across trials, but other parameters do (like timing or distance) ######If you want to automate your stimuli. Do it in a function below and save clutter. ######For instance, maybe you want random pairs of letters. Write a function! ########################################################################### fixSize = .1 fixation= visual.Circle(myWin, radius = fixSize , fillColor = (1,1,1), units=units) cue = visual.Circle(myWin, radius = radius + 2, fillColor = None, lineColor = (1,1,1), units = units) ###Trial timing parameters SOAMS = 333.333 itemMS = 111.111 ISIMS = SOAMS - itemMS trialMS = SOAMS * nDots cueMS = itemMS SOAFrames = int(np.floor(SOAMS/(1000./refreshRate))) itemFrames = int(np.floor(itemMS/(1000./refreshRate))) ISIFrames = int(np.floor(ISIMS/(1000./refreshRate))) trialFrames = int(nDots*SOAFrames) cueFrames = int(np.floor(cueMS/(1000./refreshRate))) print('cueFrames=',cueFrames) print('itemFrames=',itemFrames) print('refreshRate =', refreshRate) print('cueMS from frames =', cueFrames*(1000./refreshRate)) print('num of SOAs in the trial:', trialFrames/SOAFrames) ##Factorial design numResponsesPerTrial = 1 #default. Used to create headers for dataFile stimList = [] #cuePositions = [dot for dot in range(nDots) if dot not in [0,nDots-1]] cuePositions = [10] print('cuePositions: ',cuePositions) #cuePositions = cuePositions[2:(nDots-3)] #drop the first and final two dots #Set up the factorial design (list of all conditions) for cuePos in cuePositions: stimList.append({'cuePos':cuePos}) trials = data.TrialHandler(stimList, nReps = trialsPerCondition) #print(trials) ####Create output file### ######################################################################### dataFile = open(fileNameWithPath + '.txt', 'w') numResponsesPerTrial = 1 #headers for initial datafile rows, they don't get repeated. These appear in the file in the order they appear here. oneOffHeaders = [ 'subject', 'task', 'staircase', 'trialNum' ] for header in oneOffHeaders: print(header, '\t', end='', file=dataFile) #Headers for duplicated datafile rows. These are repeated using numResponsesPerTrial. For instance, we might have two responses in a trial. duplicatedHeaders = [ 'responseSpatialPos', 'responseX', 'responseY', 'correctX', 'correctY', 'clickX', 'clickY', 'accuracy', 'responsePosInStream', 'correctPosInStream' ] if numResponsesPerTrial == 1: for header in duplicatedHeaders: print(header, '\t', end='', file=dataFile) elif numResponsesPerTrial > 1: for response in range(numResponsesPerTrial): for header in duplicatedHeaders: print(header+str(response), '\t', end='', file=dataFile) for pos in range(nDots): print('position'+str(pos),'\t',end='',file=dataFile) #Headers done. Do a new line print('longFrames',file=dataFile) expStop = False trialNum=0; numTrialsCorrect=0; expStop=False; framesSaved=0; print('Starting experiment of',trials.nTotal,'trials. Current trial is trial ',trialNum) #NextRemindCountText.setText( str(trialNum) + ' of ' + str(trials.nTotal) ) #NextRemindCountText.draw() myWin.flip() #end of header trialClock = core.Clock() stimClock = core.Clock() if eyeTracking: if getEyeTrackingFileFromEyetrackingMachineAtEndOfExperiment: eyeMoveFile=('EyeTrack_'+subject+'_'+timeAndDateStr+'.EDF') tracker=Tracker_EyeLink(myWin,trialClock,subject,1, 'HV5',(255,255,255),(0,0,0),False,(widthPix,heightPix)) while trialNum < trials.nTotal and expStop==False: fixation.draw() myWin.flip() if not autopilot: core.wait(1) trial = trials.next() # print('trial idx is',trials.thisIndex) cuePos = trial.cuePos # print(cuePos) print("Doing trialNum",trialNum) trialDone, trialStimuli, trialStimuliOrder, ts = oneTrial(stimuli) #Shift positions so that the list starts at 1, which is positioned at (0,radius), and increases clockwise. This is what the MM code expects MMPositions = list() #Mixture modelling positions for dotPos in trialStimuliOrder: if dotPos < (nDots/4 - 1): #Because python indexes start at 0, 5 is the 6th pos. MMPositions.append(dotPos + 20) elif dotPos >= (nDots/4 -1): MMPositions.append(dotPos -4) nBlips = checkTiming(ts) # print(trialStimuliOrder) if trialDone: accuracy, response, expStop, clickPos = getResponse(trialStimuli) responseCoord = response.pos.tolist() spatialRelativeToXAxis = [item.pos.tolist() for item in stimuli] try: responseSpatialRelativeToXAxis = spatialRelativeToXAxis.index(responseCoord) except ValueError: print('coord not in list') if responseSpatialRelativeToXAxis < (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis + 20 elif responseSpatialRelativeToXAxis >= (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis - 4 trialPositions = [item.pos.tolist() for item in trialStimuli] responseTemporal = trialPositions.index(responseCoord) # print('trial positions in sequence:',trialPositions) # print('position of item nearest to click:',responseSpatial) # print('Position in sequence of item nearest to click:',responseTemporal) correctSpatial = trialStimuli[cuePos].pos correctTemporal = cuePos print(subject,'\t', 'dot-jump','\t', 'False','\t', trialNum,'\t', responseSpatial,'\t', responseCoord[0],'\t', responseCoord[1],'\t', correctSpatial[0],'\t', correctSpatial[1],'\t', clickPos[0],'\t', clickPos[1],'\t', accuracy,'\t', responseTemporal,'\t', correctTemporal,'\t', end='', file = dataFile ) for dot in range(nDots): print(MMPositions[dot], '\t',end='', file=dataFile) print(nBlips, file=dataFile) trialNum += 1 dataFile.flush() if expStop: dataFile.flush()
stim = copy.copy(stimulusObject)
conditional_block
test_dot-jump25Oct2016_10-53.py
from __future__ import print_function __author__ = """Alex "O." Holcombe, Charles Ludowici, """ ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor import time, sys, platform, os from math import atan, atan2, pi, cos, sin, sqrt, ceil, radians, degrees import numpy as np import psychopy, psychopy.info import copy from psychopy import visual, sound, monitors, logging, gui, event, core, data try: from helpersAOH import accelerateComputer, openMyStimWindow except Exception as e: print(e); print('Problem loading helpersAOH. Check that the file helpersAOH.py in the same directory as this file') print('Current directory is ',os.getcwd()) eyeTracking = False if eyeTracking: try: import eyelinkEyetrackerForPsychopySUPA3 except Exception as e: print(e) print('Problem loading eyelinkEyetrackerForPsychopySUPA3. Check that the file eyelinkEyetrackerForPsychopySUPA3.py in the same directory as this file') print('While a different version of pylink might make your eyetracking code work, your code appears to generally be out of date. Rewrite your eyetracker code based on the SR website examples') #Psychopy v1.83.01 broke this, pylink version prevents EyelinkEyetrackerForPsychopySUPA3 stuff from importing. But what really needs to be done is to change eyetracking code to more modern calls, as indicated on SR site eyeTracking = False expname= "dot-jump" demo = False; exportImages = False autopilot = False subject='test' ############################### ### Setup the screen parameters ############################################################################################## ## allowGUI = False units='deg' #'cm' fullscrn=False waitBlank=False if True: #just so I can indent all the below refreshRate= 85 *1.0; #160 #set to the framerate of the monitor fullscrn=True; #show in small window (0) or full screen (1) scrn=True #which screen to display the stimuli. 0 is home screen, 1 is second screen # create a dialog from dictionary infoFirst = { 'Autopilot':autopilot, 'Check refresh etc':True, 'Use second screen':scrn, 'Fullscreen (timing errors if not)': fullscrn, 'Screen refresh rate': refreshRate } OK = gui.DlgFromDict(dictionary=infoFirst, title='MOT', order=['Autopilot','Check refresh etc', 'Use second screen', 'Screen refresh rate', 'Fullscreen (timing errors if not)'], tip={'Check refresh etc': 'To confirm refresh rate and that can keep up, at least when drawing a grating', 'Use second Screen': ''}, ) if not OK.OK: print('User cancelled from dialog box'); logging.info('User cancelled from dialog box'); core.quit() autopilot = infoFirst['Autopilot'] checkRefreshEtc = infoFirst['Check refresh etc'] scrn = infoFirst['Use second screen'] print('scrn = ',scrn, ' from dialog box') fullscrn = infoFirst['Fullscreen (timing errors if not)'] refreshRate = infoFirst['Screen refresh rate'] #monitor parameters widthPix = 1280 #1440 #monitor width in pixels heightPix =1024 #900 #monitor height in pixels monitorwidth = 40.5 #28.5 #monitor width in centimeters viewdist = 55.; #cm pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) /np.pi*180) bgColor = [-1,-1,-1] #black background monitorname = 'testMonitor' # 'mitsubishi' #in psychopy Monitors Center mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor mon.setSizePix( (widthPix,heightPix) ) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) myWin.setRecordFrameIntervals(False) trialsPerCondition = 2 #default value refreshMsg2 = '' if not checkRefreshEtc: refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED' refreshRateWrong = False else: #checkRefreshEtc runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) print('Finished runInfo- which assesses the refresh and processes of this computer') refreshMsg1 = 'Median frames per second ='+ str( np.round(1000./runInfo["windowRefreshTimeMedian_ms"],1) ) refreshRateTolerancePct = 3 pctOff = abs( (1000./runInfo["windowRefreshTimeMedian_ms"]-refreshRate) / refreshRate) refreshRateWrong = pctOff > (refreshRateTolerancePct/100.) if refreshRateWrong: refreshMsg1 += ' BUT' refreshMsg1 += ' program assumes ' + str(refreshRate) refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!' else: refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) ) myWinRes = myWin.size myWin.allowGUI =True myWin.close() #have to close window to show dialog box ## ### END Setup of the screen parameters ############################################################################################## #################################### askUserAndConfirmExpParams = True if autopilot: subject = 'autoTest' ############################### ### Ask user exp params ############################################################################################## ## askUserAndConfirmExpParams if askUserAndConfirmExpParams: dlgLabelsOrdered = list() #new dialog box myDlg = gui.Dlg(title=expname, pos=(200,400)) if not autopilot: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') else: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') myDlg.addField('autoPilotTime:', 0, tip='Auto response time relative to cue') myDlg.addField('randomTime:',False, tip = 'Add (rounded) gaussian N(0,2) error to time offset?') myDlg.addField('autoPilotSpace:',0, tip='Auto response position relative to cue') myDlg.addField('randomSpace:',False, tip = 'Add (rounded) gaussian N(0,2) error to space offset?') dlgLabelsOrdered.append('autoPilotTime') dlgLabelsOrdered.append('randomTime') dlgLabelsOrdered.append('autoPilotSpace') dlgLabelsOrdered.append('randomSpace') myDlg.addField('Trials per condition (default=' + str(trialsPerCondition) + '):', trialsPerCondition, tip=str(trialsPerCondition)) dlgLabelsOrdered.append('trialsPerCondition') pctCompletedBreak = 50 myDlg.addText(refreshMsg1, color='Black') if refreshRateWrong: myDlg.addText(refreshMsg2, color='Red') msgWrongResolution = '' if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any(): msgWrongResolution = 'Instead of desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels, screen apparently '+ str(myWinRes[0])+ 'x'+ str(myWinRes[1]) myDlg.addText(msgWrongResolution, color='Red') print(msgWrongResolution); logging.info(msgWrongResolution) myDlg.addText('Note: to abort press ESC at response time', color='DimGrey') #works in PsychoPy1.84 #myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) #color names not working for some pre-1.84 versions myDlg.show() if myDlg.OK: #unpack information from dialogue box thisInfo = myDlg.data #this will be a list of data returned from each field added in order if autopilot: name=thisInfo[dlgLabelsOrdered.index('subject')] if len(name) > 0: #if entered something subject = name #change subject default name to what user entered trialsPerCondition = int( thisInfo[ dlgLabelsOrdered.index('trialsPerCondition') ] ) #convert string to integer autoSpace = thisInfo[dlgLabelsOrdered.index('autoPilotSpace')] autoTime = thisInfo[dlgLabelsOrdered.index('autoPilotTime')] randomTime = thisInfo[dlgLabelsOrdered.index('randomTime')] randomSpace = thisInfo[dlgLabelsOrdered.index('randomSpace')] print('trialsPerCondition=',trialsPerCondition) logging.info('trialsPerCondition ='+str(trialsPerCondition)) else: print('User cancelled from dialog box.'); logging.info('User cancelled from dialog box') logging.flush() core.quit() ### Ask user exp params ## END askUserAndConfirmExpParams ############################### ############################################################################################## if os.path.isdir('.'+os.sep+'dataRaw'): dataDir='dataRaw' else: msg= 'dataRaw directory does not exist, so saving data in present working directory' print(msg); logging.info(msg) dataDir='.' timeAndDateStr = time.strftime("%d%b%Y_%H-%M", time.localtime()) fileNameWithPath = dataDir+os.sep+subject+ '_' + expname+timeAndDateStr if not demo and not exportImages: saveCodeCmd = 'cp \'' + sys.argv[0] + '\' '+ fileNameWithPath + '.py' os.system(saveCodeCmd) #save a copy of the code as it was when that subject was run logF = logging.LogFile(fileNameWithPath+'.log', filemode='w',#if you set this to 'a' it will append instead of overwriting level=logging.INFO)#info, data, warnings, and errors will be sent to this logfile if demo or exportImages: logging.console.setLevel(logging.ERROR) #only show this level's and higher messages logging.console.setLevel(logging.WARNING) #DEBUG means set the console to receive nearly all messges, INFO is for everything else, INFO, EXP, DATA, WARNING and ERROR if refreshRateWrong: logging.error(refreshMsg1+refreshMsg2) else: logging.info(refreshMsg1+refreshMsg2) longerThanRefreshTolerance = 0.27 longFrameLimit = round(1000./refreshRate*(1.0+longerThanRefreshTolerance),3) # round(1000/refreshRate*1.5,2) msg = 'longFrameLimit='+ str(longFrameLimit) +' Recording trials where one or more interframe interval exceeded this figure ' logging.info(msg); print(msg) if msgWrongResolution != '': logging.error(msgWrongResolution) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) msg = 'second window opening runInfo mean ms='+ str( runInfo["windowRefreshTimeAvg_ms"] ) logging.info(msg); print(msg) logging.info(runInfo) logging.info('gammaGrid='+str(mon.getGammaGrid())) logging.info('linearizeMethod='+str(mon.getLinearizeMethod())) ####Functions. Save time by automating processes like stimulus creation and ordering ############################################################################ def oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, trialObjects):
def oneTrial(stimuli): dotOrder = np.arange(len(stimuli)) np.random.shuffle(dotOrder) print(dotOrder) shuffledStimuli = [stimuli[i] for i in dotOrder] ts = [] myWin.flip(); myWin.flip() #Make sure raster at top of screen (unless not in blocking mode), and give CPU a chance to finish other tasks t0 = trialClock.getTime() for n in range(trialFrames): fixation.draw() #print(n//SOAFrames) oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, shuffledStimuli) myWin.flip() ts.append(trialClock.getTime() - t0) return True, shuffledStimuli, dotOrder, ts def getResponse(trialStimuli): if autopilot: spacing = 360./nDots autoResponseIdx = cuePos + autoTime #The serial position of the response in the stream if randomTime: autoResponseIdx += int(round( np.random.normal(0,2) )) itemAtTemporalSelection = trialStimuli[autoResponseIdx] unshuffledPositions = [dot.pos.tolist() for dot in stimuli] itemSpatial = unshuffledPositions.index(itemAtTemporalSelection.pos.tolist()) itemSpatial = itemSpatial + autoSpace if randomSpace: itemSpatial += int(round( np.random.normal(0,2) )) while itemSpatial>23: itemSpatial = itemSpatial - 23 #Once we have temporal pos of selected item relative to start of the trial #Need to get the serial spatial pos of this item, so that we can select items around it based on the autoSpace offset #print('itemSpatial is: ', itemSpatial) selectionTemporal = trialStimuli.index(stimuli[itemSpatial]) #This seems redundant, but it tests that the item we've selected in space is the cued item in time. if the temporal and spatial offsets are 0, it should be the same as cuePos. accuracy = cuePos == selectionTemporal mousePos = (stimuli[itemSpatial].pos[0],stimuli[itemSpatial].pos[1]) expStop = False item = stimuli[itemSpatial] return accuracy, item, expStop, mousePos elif not autopilot: myMouse = event.Mouse(visible = False,win=myWin) responded = False expStop = False event.clearEvents() mousePos = (1e6,1e6) escape = event.getKeys() myMouse.setPos((0,0)) myMouse.setVisible(True) while not responded: for item in trialStimuli: item.draw() myWin.flip() button = myMouse.getPressed() mousePos = myMouse.getPos() escapeKey = event.getKeys() if button[0]: print('click detected') responded = True print('getResponse mousePos:',mousePos) elif len(escapeKey)>0: if escapeKey[0] == 'space' or escapeKey[0] == 'ESCAPE': expStop = True responded = True return False, np.random.choice(trialStimuli), expStop, (0,0) clickDistances = [] for item in trialStimuli: x = mousePos[0] - item.pos[0] y = mousePos[1] - item.pos[1] distance = sqrt(x**2 + y**2) clickDistances.append(distance) if not expStop: minDistanceIdx = clickDistances.index(min(clickDistances)) accuracy = minDistanceIdx == cuePos item = trialStimuli[minDistanceIdx] myMouse.setVisible(False) return accuracy, item, expStop, mousePos def drawStimuli(nDots, radius, center, stimulusObject, sameEachTime = True): if len(center) > 2 or len(center) < 2: print('Center coords must be list of length 2') return None if not sameEachTime and not isinstance(stimulusObject, (list, tuple)): print('You want different objects in each position, but your stimuli is not a list or tuple') return None if not sameEachTime and isinstance(stimulusObject, (list, tuple)) and len(stimulusObject)!=nDots: print('You want different objects in each position, but the number of positions does not equal the number of items') return None spacing = 360./nDots stimuli = [] for dot in range(nDots): #have to specify positions for multiples of 90deg because python (computers in general?) can't store exact value of pi and thus cos(pi/2) = 6.123e-17, not 0 angle = dot*spacing if angle == 0: xpos = radius ypos = 0 elif angle == 90: xpos = 0 ypos = radius elif angle == 180: xpos = -radius ypos = 0 elif angle == 270: xpos = 0 ypos = -radius elif angle%90!=0: xpos = radius*cos(radians(angle)) ypos = radius*sin(radians(angle)) if sameEachTime: stim = copy.copy(stimulusObject) elif not sameEachTime: stim = stimulusObject[dot] stim.pos = (xpos,ypos) stimuli.append(stim) return stimuli def checkTiming(ts): interframeIntervals = np.diff(ts) * 1000 #print(interframeIntervals) frameTimeTolerance=.3 #proportion longer than refreshRate that will not count as a miss longFrameLimit = np.round(1000/refreshRate*(1.0+frameTimeTolerance),2) idxsInterframeLong = np.where( interframeIntervals > longFrameLimit ) [0] #frames that exceeded 150% of expected duration numCasesInterframeLong = len( idxsInterframeLong ) if numCasesInterframeLong > 0: print(numCasesInterframeLong,'frames of', trialFrames,'were longer than',str(1000/refreshRate*(1.0+frameTimeTolerance))) return numCasesInterframeLong ##Set up stimuli stimulus = visual.Circle(myWin, radius = .2, fillColor = (1,1,1) ) nDots = 24 radius = 4 center = (0,0) sameEachTime = True #(nDots, radius, center, stimulusObject, sameEachTime = True) stimuli = drawStimuli(nDots, radius, center, stimulus, sameEachTime) #print(stimuli) #print('length of stimuli object', len(stimuli)) ######Create visual objects, noise masks, response prompts etc. ########### ######Draw your stimuli here if they don't change across trials, but other parameters do (like timing or distance) ######If you want to automate your stimuli. Do it in a function below and save clutter. ######For instance, maybe you want random pairs of letters. Write a function! ########################################################################### fixSize = .1 fixation= visual.Circle(myWin, radius = fixSize , fillColor = (1,1,1), units=units) cue = visual.Circle(myWin, radius = radius + 2, fillColor = None, lineColor = (1,1,1), units = units) ###Trial timing parameters SOAMS = 333.333 itemMS = 111.111 ISIMS = SOAMS - itemMS trialMS = SOAMS * nDots cueMS = itemMS SOAFrames = int(np.floor(SOAMS/(1000./refreshRate))) itemFrames = int(np.floor(itemMS/(1000./refreshRate))) ISIFrames = int(np.floor(ISIMS/(1000./refreshRate))) trialFrames = int(nDots*SOAFrames) cueFrames = int(np.floor(cueMS/(1000./refreshRate))) print('cueFrames=',cueFrames) print('itemFrames=',itemFrames) print('refreshRate =', refreshRate) print('cueMS from frames =', cueFrames*(1000./refreshRate)) print('num of SOAs in the trial:', trialFrames/SOAFrames) ##Factorial design numResponsesPerTrial = 1 #default. Used to create headers for dataFile stimList = [] #cuePositions = [dot for dot in range(nDots) if dot not in [0,nDots-1]] cuePositions = [10] print('cuePositions: ',cuePositions) #cuePositions = cuePositions[2:(nDots-3)] #drop the first and final two dots #Set up the factorial design (list of all conditions) for cuePos in cuePositions: stimList.append({'cuePos':cuePos}) trials = data.TrialHandler(stimList, nReps = trialsPerCondition) #print(trials) ####Create output file### ######################################################################### dataFile = open(fileNameWithPath + '.txt', 'w') numResponsesPerTrial = 1 #headers for initial datafile rows, they don't get repeated. These appear in the file in the order they appear here. oneOffHeaders = [ 'subject', 'task', 'staircase', 'trialNum' ] for header in oneOffHeaders: print(header, '\t', end='', file=dataFile) #Headers for duplicated datafile rows. These are repeated using numResponsesPerTrial. For instance, we might have two responses in a trial. duplicatedHeaders = [ 'responseSpatialPos', 'responseX', 'responseY', 'correctX', 'correctY', 'clickX', 'clickY', 'accuracy', 'responsePosInStream', 'correctPosInStream' ] if numResponsesPerTrial == 1: for header in duplicatedHeaders: print(header, '\t', end='', file=dataFile) elif numResponsesPerTrial > 1: for response in range(numResponsesPerTrial): for header in duplicatedHeaders: print(header+str(response), '\t', end='', file=dataFile) for pos in range(nDots): print('position'+str(pos),'\t',end='',file=dataFile) #Headers done. Do a new line print('longFrames',file=dataFile) expStop = False trialNum=0; numTrialsCorrect=0; expStop=False; framesSaved=0; print('Starting experiment of',trials.nTotal,'trials. Current trial is trial ',trialNum) #NextRemindCountText.setText( str(trialNum) + ' of ' + str(trials.nTotal) ) #NextRemindCountText.draw() myWin.flip() #end of header trialClock = core.Clock() stimClock = core.Clock() if eyeTracking: if getEyeTrackingFileFromEyetrackingMachineAtEndOfExperiment: eyeMoveFile=('EyeTrack_'+subject+'_'+timeAndDateStr+'.EDF') tracker=Tracker_EyeLink(myWin,trialClock,subject,1, 'HV5',(255,255,255),(0,0,0),False,(widthPix,heightPix)) while trialNum < trials.nTotal and expStop==False: fixation.draw() myWin.flip() if not autopilot: core.wait(1) trial = trials.next() # print('trial idx is',trials.thisIndex) cuePos = trial.cuePos # print(cuePos) print("Doing trialNum",trialNum) trialDone, trialStimuli, trialStimuliOrder, ts = oneTrial(stimuli) #Shift positions so that the list starts at 1, which is positioned at (0,radius), and increases clockwise. This is what the MM code expects MMPositions = list() #Mixture modelling positions for dotPos in trialStimuliOrder: if dotPos < (nDots/4 - 1): #Because python indexes start at 0, 5 is the 6th pos. MMPositions.append(dotPos + 20) elif dotPos >= (nDots/4 -1): MMPositions.append(dotPos -4) nBlips = checkTiming(ts) # print(trialStimuliOrder) if trialDone: accuracy, response, expStop, clickPos = getResponse(trialStimuli) responseCoord = response.pos.tolist() spatialRelativeToXAxis = [item.pos.tolist() for item in stimuli] try: responseSpatialRelativeToXAxis = spatialRelativeToXAxis.index(responseCoord) except ValueError: print('coord not in list') if responseSpatialRelativeToXAxis < (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis + 20 elif responseSpatialRelativeToXAxis >= (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis - 4 trialPositions = [item.pos.tolist() for item in trialStimuli] responseTemporal = trialPositions.index(responseCoord) # print('trial positions in sequence:',trialPositions) # print('position of item nearest to click:',responseSpatial) # print('Position in sequence of item nearest to click:',responseTemporal) correctSpatial = trialStimuli[cuePos].pos correctTemporal = cuePos print(subject,'\t', 'dot-jump','\t', 'False','\t', trialNum,'\t', responseSpatial,'\t', responseCoord[0],'\t', responseCoord[1],'\t', correctSpatial[0],'\t', correctSpatial[1],'\t', clickPos[0],'\t', clickPos[1],'\t', accuracy,'\t', responseTemporal,'\t', correctTemporal,'\t', end='', file = dataFile ) for dot in range(nDots): print(MMPositions[dot], '\t',end='', file=dataFile) print(nBlips, file=dataFile) trialNum += 1 dataFile.flush() if expStop: dataFile.flush()
cueFrame = cuePos * SOAFrames cueMax = cueFrame + cueFrames showIdx = int(np.floor(n/SOAFrames)) #objectIdxs = [i for i in range(len(trialObjects))] #objectIdxs.append(len(trialObjects)-1) #AWFUL hack #print(objectIdxs[showIdx]) #floored quotient obj = trialObjects[showIdx] drawObject = n%SOAFrames < itemFrames if drawObject: myWin.color = bgColor if n >= cueFrame and n < cueMax: #print('cueFrames! n is', n,'. cueFrame is ,', cueFrame, 'cueFrame + cueFrames is ', (cueFrame + cueFrames)) #if n%2 == 0: #This should make it flash, but it might be too fast #print('cue flash') #myWin.color = (0,0,0) obj.draw() cue.draw() else: obj.draw() return True #objects: Stimuli to display or #cue: cue stimulus or stimuli #timing parameters: Could be item duration, soa and isi. i.e. if SOA+Duration % n == 0: stimulus.setColor(stimulusColor) #bgColor and stimulusColor: if displaying and hiding stimuli, i.e. for RSVP #movementVector: direction and distance of movement if moving stimuli
identifier_body
test_dot-jump25Oct2016_10-53.py
from __future__ import print_function __author__ = """Alex "O." Holcombe, Charles Ludowici, """ ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor import time, sys, platform, os from math import atan, atan2, pi, cos, sin, sqrt, ceil, radians, degrees import numpy as np import psychopy, psychopy.info import copy from psychopy import visual, sound, monitors, logging, gui, event, core, data try: from helpersAOH import accelerateComputer, openMyStimWindow except Exception as e: print(e); print('Problem loading helpersAOH. Check that the file helpersAOH.py in the same directory as this file') print('Current directory is ',os.getcwd()) eyeTracking = False if eyeTracking: try: import eyelinkEyetrackerForPsychopySUPA3 except Exception as e: print(e) print('Problem loading eyelinkEyetrackerForPsychopySUPA3. Check that the file eyelinkEyetrackerForPsychopySUPA3.py in the same directory as this file') print('While a different version of pylink might make your eyetracking code work, your code appears to generally be out of date. Rewrite your eyetracker code based on the SR website examples') #Psychopy v1.83.01 broke this, pylink version prevents EyelinkEyetrackerForPsychopySUPA3 stuff from importing. But what really needs to be done is to change eyetracking code to more modern calls, as indicated on SR site eyeTracking = False expname= "dot-jump" demo = False; exportImages = False autopilot = False subject='test' ############################### ### Setup the screen parameters ############################################################################################## ## allowGUI = False units='deg' #'cm' fullscrn=False waitBlank=False if True: #just so I can indent all the below refreshRate= 85 *1.0; #160 #set to the framerate of the monitor fullscrn=True; #show in small window (0) or full screen (1) scrn=True #which screen to display the stimuli. 0 is home screen, 1 is second screen # create a dialog from dictionary infoFirst = { 'Autopilot':autopilot, 'Check refresh etc':True, 'Use second screen':scrn, 'Fullscreen (timing errors if not)': fullscrn, 'Screen refresh rate': refreshRate } OK = gui.DlgFromDict(dictionary=infoFirst, title='MOT', order=['Autopilot','Check refresh etc', 'Use second screen', 'Screen refresh rate', 'Fullscreen (timing errors if not)'], tip={'Check refresh etc': 'To confirm refresh rate and that can keep up, at least when drawing a grating', 'Use second Screen': ''}, ) if not OK.OK: print('User cancelled from dialog box'); logging.info('User cancelled from dialog box'); core.quit() autopilot = infoFirst['Autopilot'] checkRefreshEtc = infoFirst['Check refresh etc'] scrn = infoFirst['Use second screen'] print('scrn = ',scrn, ' from dialog box') fullscrn = infoFirst['Fullscreen (timing errors if not)'] refreshRate = infoFirst['Screen refresh rate'] #monitor parameters widthPix = 1280 #1440 #monitor width in pixels heightPix =1024 #900 #monitor height in pixels monitorwidth = 40.5 #28.5 #monitor width in centimeters viewdist = 55.; #cm pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) /np.pi*180) bgColor = [-1,-1,-1] #black background monitorname = 'testMonitor' # 'mitsubishi' #in psychopy Monitors Center mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor mon.setSizePix( (widthPix,heightPix) ) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) myWin.setRecordFrameIntervals(False) trialsPerCondition = 2 #default value refreshMsg2 = '' if not checkRefreshEtc: refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED' refreshRateWrong = False else: #checkRefreshEtc runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) print('Finished runInfo- which assesses the refresh and processes of this computer') refreshMsg1 = 'Median frames per second ='+ str( np.round(1000./runInfo["windowRefreshTimeMedian_ms"],1) ) refreshRateTolerancePct = 3 pctOff = abs( (1000./runInfo["windowRefreshTimeMedian_ms"]-refreshRate) / refreshRate) refreshRateWrong = pctOff > (refreshRateTolerancePct/100.) if refreshRateWrong: refreshMsg1 += ' BUT' refreshMsg1 += ' program assumes ' + str(refreshRate) refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!' else: refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) ) myWinRes = myWin.size myWin.allowGUI =True myWin.close() #have to close window to show dialog box ## ### END Setup of the screen parameters ############################################################################################## #################################### askUserAndConfirmExpParams = True if autopilot: subject = 'autoTest' ############################### ### Ask user exp params ############################################################################################## ## askUserAndConfirmExpParams if askUserAndConfirmExpParams: dlgLabelsOrdered = list() #new dialog box myDlg = gui.Dlg(title=expname, pos=(200,400)) if not autopilot: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') else: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') myDlg.addField('autoPilotTime:', 0, tip='Auto response time relative to cue') myDlg.addField('randomTime:',False, tip = 'Add (rounded) gaussian N(0,2) error to time offset?') myDlg.addField('autoPilotSpace:',0, tip='Auto response position relative to cue') myDlg.addField('randomSpace:',False, tip = 'Add (rounded) gaussian N(0,2) error to space offset?') dlgLabelsOrdered.append('autoPilotTime') dlgLabelsOrdered.append('randomTime') dlgLabelsOrdered.append('autoPilotSpace') dlgLabelsOrdered.append('randomSpace') myDlg.addField('Trials per condition (default=' + str(trialsPerCondition) + '):', trialsPerCondition, tip=str(trialsPerCondition)) dlgLabelsOrdered.append('trialsPerCondition') pctCompletedBreak = 50 myDlg.addText(refreshMsg1, color='Black') if refreshRateWrong: myDlg.addText(refreshMsg2, color='Red') msgWrongResolution = '' if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any(): msgWrongResolution = 'Instead of desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels, screen apparently '+ str(myWinRes[0])+ 'x'+ str(myWinRes[1]) myDlg.addText(msgWrongResolution, color='Red') print(msgWrongResolution); logging.info(msgWrongResolution) myDlg.addText('Note: to abort press ESC at response time', color='DimGrey') #works in PsychoPy1.84 #myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) #color names not working for some pre-1.84 versions myDlg.show() if myDlg.OK: #unpack information from dialogue box thisInfo = myDlg.data #this will be a list of data returned from each field added in order if autopilot: name=thisInfo[dlgLabelsOrdered.index('subject')] if len(name) > 0: #if entered something subject = name #change subject default name to what user entered trialsPerCondition = int( thisInfo[ dlgLabelsOrdered.index('trialsPerCondition') ] ) #convert string to integer autoSpace = thisInfo[dlgLabelsOrdered.index('autoPilotSpace')] autoTime = thisInfo[dlgLabelsOrdered.index('autoPilotTime')] randomTime = thisInfo[dlgLabelsOrdered.index('randomTime')] randomSpace = thisInfo[dlgLabelsOrdered.index('randomSpace')] print('trialsPerCondition=',trialsPerCondition) logging.info('trialsPerCondition ='+str(trialsPerCondition)) else: print('User cancelled from dialog box.'); logging.info('User cancelled from dialog box') logging.flush() core.quit() ### Ask user exp params ## END askUserAndConfirmExpParams ############################### ############################################################################################## if os.path.isdir('.'+os.sep+'dataRaw'): dataDir='dataRaw' else: msg= 'dataRaw directory does not exist, so saving data in present working directory' print(msg); logging.info(msg) dataDir='.' timeAndDateStr = time.strftime("%d%b%Y_%H-%M", time.localtime()) fileNameWithPath = dataDir+os.sep+subject+ '_' + expname+timeAndDateStr if not demo and not exportImages: saveCodeCmd = 'cp \'' + sys.argv[0] + '\' '+ fileNameWithPath + '.py' os.system(saveCodeCmd) #save a copy of the code as it was when that subject was run logF = logging.LogFile(fileNameWithPath+'.log', filemode='w',#if you set this to 'a' it will append instead of overwriting level=logging.INFO)#info, data, warnings, and errors will be sent to this logfile if demo or exportImages: logging.console.setLevel(logging.ERROR) #only show this level's and higher messages logging.console.setLevel(logging.WARNING) #DEBUG means set the console to receive nearly all messges, INFO is for everything else, INFO, EXP, DATA, WARNING and ERROR if refreshRateWrong: logging.error(refreshMsg1+refreshMsg2) else: logging.info(refreshMsg1+refreshMsg2) longerThanRefreshTolerance = 0.27 longFrameLimit = round(1000./refreshRate*(1.0+longerThanRefreshTolerance),3) # round(1000/refreshRate*1.5,2) msg = 'longFrameLimit='+ str(longFrameLimit) +' Recording trials where one or more interframe interval exceeded this figure ' logging.info(msg); print(msg) if msgWrongResolution != '': logging.error(msgWrongResolution) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) msg = 'second window opening runInfo mean ms='+ str( runInfo["windowRefreshTimeAvg_ms"] ) logging.info(msg); print(msg) logging.info(runInfo) logging.info('gammaGrid='+str(mon.getGammaGrid())) logging.info('linearizeMethod='+str(mon.getLinearizeMethod())) ####Functions. Save time by automating processes like stimulus creation and ordering ############################################################################ def oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, trialObjects): cueFrame = cuePos * SOAFrames cueMax = cueFrame + cueFrames showIdx = int(np.floor(n/SOAFrames)) #objectIdxs = [i for i in range(len(trialObjects))] #objectIdxs.append(len(trialObjects)-1) #AWFUL hack #print(objectIdxs[showIdx]) #floored quotient obj = trialObjects[showIdx] drawObject = n%SOAFrames < itemFrames if drawObject: myWin.color = bgColor if n >= cueFrame and n < cueMax: #print('cueFrames! n is', n,'. cueFrame is ,', cueFrame, 'cueFrame + cueFrames is ', (cueFrame + cueFrames)) #if n%2 == 0: #This should make it flash, but it might be too fast #print('cue flash') #myWin.color = (0,0,0) obj.draw() cue.draw() else: obj.draw() return True #objects: Stimuli to display or #cue: cue stimulus or stimuli #timing parameters: Could be item duration, soa and isi. i.e. if SOA+Duration % n == 0: stimulus.setColor(stimulusColor) #bgColor and stimulusColor: if displaying and hiding stimuli, i.e. for RSVP #movementVector: direction and distance of movement if moving stimuli def oneTrial(stimuli): dotOrder = np.arange(len(stimuli)) np.random.shuffle(dotOrder) print(dotOrder) shuffledStimuli = [stimuli[i] for i in dotOrder] ts = [] myWin.flip(); myWin.flip() #Make sure raster at top of screen (unless not in blocking mode), and give CPU a chance to finish other tasks t0 = trialClock.getTime() for n in range(trialFrames): fixation.draw() #print(n//SOAFrames) oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, shuffledStimuli) myWin.flip() ts.append(trialClock.getTime() - t0) return True, shuffledStimuli, dotOrder, ts def getResponse(trialStimuli): if autopilot: spacing = 360./nDots autoResponseIdx = cuePos + autoTime #The serial position of the response in the stream if randomTime: autoResponseIdx += int(round( np.random.normal(0,2) )) itemAtTemporalSelection = trialStimuli[autoResponseIdx] unshuffledPositions = [dot.pos.tolist() for dot in stimuli] itemSpatial = unshuffledPositions.index(itemAtTemporalSelection.pos.tolist()) itemSpatial = itemSpatial + autoSpace if randomSpace: itemSpatial += int(round( np.random.normal(0,2) )) while itemSpatial>23: itemSpatial = itemSpatial - 23 #Once we have temporal pos of selected item relative to start of the trial #Need to get the serial spatial pos of this item, so that we can select items around it based on the autoSpace offset #print('itemSpatial is: ', itemSpatial) selectionTemporal = trialStimuli.index(stimuli[itemSpatial]) #This seems redundant, but it tests that the item we've selected in space is the cued item in time. if the temporal and spatial offsets are 0, it should be the same as cuePos. accuracy = cuePos == selectionTemporal mousePos = (stimuli[itemSpatial].pos[0],stimuli[itemSpatial].pos[1]) expStop = False item = stimuli[itemSpatial] return accuracy, item, expStop, mousePos elif not autopilot: myMouse = event.Mouse(visible = False,win=myWin) responded = False expStop = False event.clearEvents() mousePos = (1e6,1e6) escape = event.getKeys() myMouse.setPos((0,0)) myMouse.setVisible(True) while not responded: for item in trialStimuli: item.draw() myWin.flip() button = myMouse.getPressed() mousePos = myMouse.getPos() escapeKey = event.getKeys() if button[0]: print('click detected') responded = True print('getResponse mousePos:',mousePos) elif len(escapeKey)>0: if escapeKey[0] == 'space' or escapeKey[0] == 'ESCAPE': expStop = True responded = True return False, np.random.choice(trialStimuli), expStop, (0,0) clickDistances = [] for item in trialStimuli: x = mousePos[0] - item.pos[0] y = mousePos[1] - item.pos[1] distance = sqrt(x**2 + y**2) clickDistances.append(distance) if not expStop: minDistanceIdx = clickDistances.index(min(clickDistances)) accuracy = minDistanceIdx == cuePos item = trialStimuli[minDistanceIdx] myMouse.setVisible(False) return accuracy, item, expStop, mousePos def drawStimuli(nDots, radius, center, stimulusObject, sameEachTime = True): if len(center) > 2 or len(center) < 2: print('Center coords must be list of length 2') return None if not sameEachTime and not isinstance(stimulusObject, (list, tuple)): print('You want different objects in each position, but your stimuli is not a list or tuple') return None if not sameEachTime and isinstance(stimulusObject, (list, tuple)) and len(stimulusObject)!=nDots: print('You want different objects in each position, but the number of positions does not equal the number of items') return None spacing = 360./nDots stimuli = [] for dot in range(nDots): #have to specify positions for multiples of 90deg because python (computers in general?) can't store exact value of pi and thus cos(pi/2) = 6.123e-17, not 0 angle = dot*spacing if angle == 0: xpos = radius ypos = 0 elif angle == 90: xpos = 0 ypos = radius elif angle == 180: xpos = -radius ypos = 0 elif angle == 270: xpos = 0 ypos = -radius elif angle%90!=0: xpos = radius*cos(radians(angle)) ypos = radius*sin(radians(angle)) if sameEachTime: stim = copy.copy(stimulusObject) elif not sameEachTime: stim = stimulusObject[dot] stim.pos = (xpos,ypos) stimuli.append(stim) return stimuli def checkTiming(ts): interframeIntervals = np.diff(ts) * 1000 #print(interframeIntervals) frameTimeTolerance=.3 #proportion longer than refreshRate that will not count as a miss longFrameLimit = np.round(1000/refreshRate*(1.0+frameTimeTolerance),2) idxsInterframeLong = np.where( interframeIntervals > longFrameLimit ) [0] #frames that exceeded 150% of expected duration numCasesInterframeLong = len( idxsInterframeLong ) if numCasesInterframeLong > 0: print(numCasesInterframeLong,'frames of', trialFrames,'were longer than',str(1000/refreshRate*(1.0+frameTimeTolerance))) return numCasesInterframeLong ##Set up stimuli stimulus = visual.Circle(myWin, radius = .2, fillColor = (1,1,1) ) nDots = 24 radius = 4 center = (0,0) sameEachTime = True #(nDots, radius, center, stimulusObject, sameEachTime = True) stimuli = drawStimuli(nDots, radius, center, stimulus, sameEachTime) #print(stimuli) #print('length of stimuli object', len(stimuli)) ######Create visual objects, noise masks, response prompts etc. ########### ######Draw your stimuli here if they don't change across trials, but other parameters do (like timing or distance) ######If you want to automate your stimuli. Do it in a function below and save clutter. ######For instance, maybe you want random pairs of letters. Write a function! ########################################################################### fixSize = .1 fixation= visual.Circle(myWin, radius = fixSize , fillColor = (1,1,1), units=units) cue = visual.Circle(myWin, radius = radius + 2, fillColor = None, lineColor = (1,1,1), units = units) ###Trial timing parameters SOAMS = 333.333 itemMS = 111.111 ISIMS = SOAMS - itemMS trialMS = SOAMS * nDots cueMS = itemMS SOAFrames = int(np.floor(SOAMS/(1000./refreshRate))) itemFrames = int(np.floor(itemMS/(1000./refreshRate))) ISIFrames = int(np.floor(ISIMS/(1000./refreshRate))) trialFrames = int(nDots*SOAFrames) cueFrames = int(np.floor(cueMS/(1000./refreshRate))) print('cueFrames=',cueFrames) print('itemFrames=',itemFrames) print('refreshRate =', refreshRate) print('cueMS from frames =', cueFrames*(1000./refreshRate)) print('num of SOAs in the trial:', trialFrames/SOAFrames) ##Factorial design numResponsesPerTrial = 1 #default. Used to create headers for dataFile stimList = [] #cuePositions = [dot for dot in range(nDots) if dot not in [0,nDots-1]] cuePositions = [10] print('cuePositions: ',cuePositions) #cuePositions = cuePositions[2:(nDots-3)] #drop the first and final two dots #Set up the factorial design (list of all conditions) for cuePos in cuePositions: stimList.append({'cuePos':cuePos}) trials = data.TrialHandler(stimList, nReps = trialsPerCondition) #print(trials) ####Create output file### ######################################################################### dataFile = open(fileNameWithPath + '.txt', 'w') numResponsesPerTrial = 1 #headers for initial datafile rows, they don't get repeated. These appear in the file in the order they appear here. oneOffHeaders = [ 'subject', 'task', 'staircase', 'trialNum' ] for header in oneOffHeaders: print(header, '\t', end='', file=dataFile) #Headers for duplicated datafile rows. These are repeated using numResponsesPerTrial. For instance, we might have two responses in a trial. duplicatedHeaders = [ 'responseSpatialPos', 'responseX', 'responseY', 'correctX', 'correctY', 'clickX', 'clickY', 'accuracy', 'responsePosInStream', 'correctPosInStream' ] if numResponsesPerTrial == 1: for header in duplicatedHeaders: print(header, '\t', end='', file=dataFile) elif numResponsesPerTrial > 1: for response in range(numResponsesPerTrial): for header in duplicatedHeaders: print(header+str(response), '\t', end='', file=dataFile) for pos in range(nDots): print('position'+str(pos),'\t',end='',file=dataFile) #Headers done. Do a new line print('longFrames',file=dataFile) expStop = False trialNum=0; numTrialsCorrect=0; expStop=False; framesSaved=0; print('Starting experiment of',trials.nTotal,'trials. Current trial is trial ',trialNum) #NextRemindCountText.setText( str(trialNum) + ' of ' + str(trials.nTotal) ) #NextRemindCountText.draw()
#end of header trialClock = core.Clock() stimClock = core.Clock() if eyeTracking: if getEyeTrackingFileFromEyetrackingMachineAtEndOfExperiment: eyeMoveFile=('EyeTrack_'+subject+'_'+timeAndDateStr+'.EDF') tracker=Tracker_EyeLink(myWin,trialClock,subject,1, 'HV5',(255,255,255),(0,0,0),False,(widthPix,heightPix)) while trialNum < trials.nTotal and expStop==False: fixation.draw() myWin.flip() if not autopilot: core.wait(1) trial = trials.next() # print('trial idx is',trials.thisIndex) cuePos = trial.cuePos # print(cuePos) print("Doing trialNum",trialNum) trialDone, trialStimuli, trialStimuliOrder, ts = oneTrial(stimuli) #Shift positions so that the list starts at 1, which is positioned at (0,radius), and increases clockwise. This is what the MM code expects MMPositions = list() #Mixture modelling positions for dotPos in trialStimuliOrder: if dotPos < (nDots/4 - 1): #Because python indexes start at 0, 5 is the 6th pos. MMPositions.append(dotPos + 20) elif dotPos >= (nDots/4 -1): MMPositions.append(dotPos -4) nBlips = checkTiming(ts) # print(trialStimuliOrder) if trialDone: accuracy, response, expStop, clickPos = getResponse(trialStimuli) responseCoord = response.pos.tolist() spatialRelativeToXAxis = [item.pos.tolist() for item in stimuli] try: responseSpatialRelativeToXAxis = spatialRelativeToXAxis.index(responseCoord) except ValueError: print('coord not in list') if responseSpatialRelativeToXAxis < (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis + 20 elif responseSpatialRelativeToXAxis >= (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis - 4 trialPositions = [item.pos.tolist() for item in trialStimuli] responseTemporal = trialPositions.index(responseCoord) # print('trial positions in sequence:',trialPositions) # print('position of item nearest to click:',responseSpatial) # print('Position in sequence of item nearest to click:',responseTemporal) correctSpatial = trialStimuli[cuePos].pos correctTemporal = cuePos print(subject,'\t', 'dot-jump','\t', 'False','\t', trialNum,'\t', responseSpatial,'\t', responseCoord[0],'\t', responseCoord[1],'\t', correctSpatial[0],'\t', correctSpatial[1],'\t', clickPos[0],'\t', clickPos[1],'\t', accuracy,'\t', responseTemporal,'\t', correctTemporal,'\t', end='', file = dataFile ) for dot in range(nDots): print(MMPositions[dot], '\t',end='', file=dataFile) print(nBlips, file=dataFile) trialNum += 1 dataFile.flush() if expStop: dataFile.flush()
myWin.flip()
random_line_split
test_dot-jump25Oct2016_10-53.py
from __future__ import print_function __author__ = """Alex "O." Holcombe, Charles Ludowici, """ ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor import time, sys, platform, os from math import atan, atan2, pi, cos, sin, sqrt, ceil, radians, degrees import numpy as np import psychopy, psychopy.info import copy from psychopy import visual, sound, monitors, logging, gui, event, core, data try: from helpersAOH import accelerateComputer, openMyStimWindow except Exception as e: print(e); print('Problem loading helpersAOH. Check that the file helpersAOH.py in the same directory as this file') print('Current directory is ',os.getcwd()) eyeTracking = False if eyeTracking: try: import eyelinkEyetrackerForPsychopySUPA3 except Exception as e: print(e) print('Problem loading eyelinkEyetrackerForPsychopySUPA3. Check that the file eyelinkEyetrackerForPsychopySUPA3.py in the same directory as this file') print('While a different version of pylink might make your eyetracking code work, your code appears to generally be out of date. Rewrite your eyetracker code based on the SR website examples') #Psychopy v1.83.01 broke this, pylink version prevents EyelinkEyetrackerForPsychopySUPA3 stuff from importing. But what really needs to be done is to change eyetracking code to more modern calls, as indicated on SR site eyeTracking = False expname= "dot-jump" demo = False; exportImages = False autopilot = False subject='test' ############################### ### Setup the screen parameters ############################################################################################## ## allowGUI = False units='deg' #'cm' fullscrn=False waitBlank=False if True: #just so I can indent all the below refreshRate= 85 *1.0; #160 #set to the framerate of the monitor fullscrn=True; #show in small window (0) or full screen (1) scrn=True #which screen to display the stimuli. 0 is home screen, 1 is second screen # create a dialog from dictionary infoFirst = { 'Autopilot':autopilot, 'Check refresh etc':True, 'Use second screen':scrn, 'Fullscreen (timing errors if not)': fullscrn, 'Screen refresh rate': refreshRate } OK = gui.DlgFromDict(dictionary=infoFirst, title='MOT', order=['Autopilot','Check refresh etc', 'Use second screen', 'Screen refresh rate', 'Fullscreen (timing errors if not)'], tip={'Check refresh etc': 'To confirm refresh rate and that can keep up, at least when drawing a grating', 'Use second Screen': ''}, ) if not OK.OK: print('User cancelled from dialog box'); logging.info('User cancelled from dialog box'); core.quit() autopilot = infoFirst['Autopilot'] checkRefreshEtc = infoFirst['Check refresh etc'] scrn = infoFirst['Use second screen'] print('scrn = ',scrn, ' from dialog box') fullscrn = infoFirst['Fullscreen (timing errors if not)'] refreshRate = infoFirst['Screen refresh rate'] #monitor parameters widthPix = 1280 #1440 #monitor width in pixels heightPix =1024 #900 #monitor height in pixels monitorwidth = 40.5 #28.5 #monitor width in centimeters viewdist = 55.; #cm pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) /np.pi*180) bgColor = [-1,-1,-1] #black background monitorname = 'testMonitor' # 'mitsubishi' #in psychopy Monitors Center mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor mon.setSizePix( (widthPix,heightPix) ) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) myWin.setRecordFrameIntervals(False) trialsPerCondition = 2 #default value refreshMsg2 = '' if not checkRefreshEtc: refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED' refreshRateWrong = False else: #checkRefreshEtc runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) print('Finished runInfo- which assesses the refresh and processes of this computer') refreshMsg1 = 'Median frames per second ='+ str( np.round(1000./runInfo["windowRefreshTimeMedian_ms"],1) ) refreshRateTolerancePct = 3 pctOff = abs( (1000./runInfo["windowRefreshTimeMedian_ms"]-refreshRate) / refreshRate) refreshRateWrong = pctOff > (refreshRateTolerancePct/100.) if refreshRateWrong: refreshMsg1 += ' BUT' refreshMsg1 += ' program assumes ' + str(refreshRate) refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!' else: refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) ) myWinRes = myWin.size myWin.allowGUI =True myWin.close() #have to close window to show dialog box ## ### END Setup of the screen parameters ############################################################################################## #################################### askUserAndConfirmExpParams = True if autopilot: subject = 'autoTest' ############################### ### Ask user exp params ############################################################################################## ## askUserAndConfirmExpParams if askUserAndConfirmExpParams: dlgLabelsOrdered = list() #new dialog box myDlg = gui.Dlg(title=expname, pos=(200,400)) if not autopilot: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') else: myDlg.addField('Subject code :', subject) dlgLabelsOrdered.append('subject') myDlg.addField('autoPilotTime:', 0, tip='Auto response time relative to cue') myDlg.addField('randomTime:',False, tip = 'Add (rounded) gaussian N(0,2) error to time offset?') myDlg.addField('autoPilotSpace:',0, tip='Auto response position relative to cue') myDlg.addField('randomSpace:',False, tip = 'Add (rounded) gaussian N(0,2) error to space offset?') dlgLabelsOrdered.append('autoPilotTime') dlgLabelsOrdered.append('randomTime') dlgLabelsOrdered.append('autoPilotSpace') dlgLabelsOrdered.append('randomSpace') myDlg.addField('Trials per condition (default=' + str(trialsPerCondition) + '):', trialsPerCondition, tip=str(trialsPerCondition)) dlgLabelsOrdered.append('trialsPerCondition') pctCompletedBreak = 50 myDlg.addText(refreshMsg1, color='Black') if refreshRateWrong: myDlg.addText(refreshMsg2, color='Red') msgWrongResolution = '' if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any(): msgWrongResolution = 'Instead of desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels, screen apparently '+ str(myWinRes[0])+ 'x'+ str(myWinRes[1]) myDlg.addText(msgWrongResolution, color='Red') print(msgWrongResolution); logging.info(msgWrongResolution) myDlg.addText('Note: to abort press ESC at response time', color='DimGrey') #works in PsychoPy1.84 #myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) #color names not working for some pre-1.84 versions myDlg.show() if myDlg.OK: #unpack information from dialogue box thisInfo = myDlg.data #this will be a list of data returned from each field added in order if autopilot: name=thisInfo[dlgLabelsOrdered.index('subject')] if len(name) > 0: #if entered something subject = name #change subject default name to what user entered trialsPerCondition = int( thisInfo[ dlgLabelsOrdered.index('trialsPerCondition') ] ) #convert string to integer autoSpace = thisInfo[dlgLabelsOrdered.index('autoPilotSpace')] autoTime = thisInfo[dlgLabelsOrdered.index('autoPilotTime')] randomTime = thisInfo[dlgLabelsOrdered.index('randomTime')] randomSpace = thisInfo[dlgLabelsOrdered.index('randomSpace')] print('trialsPerCondition=',trialsPerCondition) logging.info('trialsPerCondition ='+str(trialsPerCondition)) else: print('User cancelled from dialog box.'); logging.info('User cancelled from dialog box') logging.flush() core.quit() ### Ask user exp params ## END askUserAndConfirmExpParams ############################### ############################################################################################## if os.path.isdir('.'+os.sep+'dataRaw'): dataDir='dataRaw' else: msg= 'dataRaw directory does not exist, so saving data in present working directory' print(msg); logging.info(msg) dataDir='.' timeAndDateStr = time.strftime("%d%b%Y_%H-%M", time.localtime()) fileNameWithPath = dataDir+os.sep+subject+ '_' + expname+timeAndDateStr if not demo and not exportImages: saveCodeCmd = 'cp \'' + sys.argv[0] + '\' '+ fileNameWithPath + '.py' os.system(saveCodeCmd) #save a copy of the code as it was when that subject was run logF = logging.LogFile(fileNameWithPath+'.log', filemode='w',#if you set this to 'a' it will append instead of overwriting level=logging.INFO)#info, data, warnings, and errors will be sent to this logfile if demo or exportImages: logging.console.setLevel(logging.ERROR) #only show this level's and higher messages logging.console.setLevel(logging.WARNING) #DEBUG means set the console to receive nearly all messges, INFO is for everything else, INFO, EXP, DATA, WARNING and ERROR if refreshRateWrong: logging.error(refreshMsg1+refreshMsg2) else: logging.info(refreshMsg1+refreshMsg2) longerThanRefreshTolerance = 0.27 longFrameLimit = round(1000./refreshRate*(1.0+longerThanRefreshTolerance),3) # round(1000/refreshRate*1.5,2) msg = 'longFrameLimit='+ str(longFrameLimit) +' Recording trials where one or more interframe interval exceeded this figure ' logging.info(msg); print(msg) if msgWrongResolution != '': logging.error(msgWrongResolution) myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank) runInfo = psychopy.info.RunTimeInfo( win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips() refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen) verbose=True, ## True means report on everything userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes ) msg = 'second window opening runInfo mean ms='+ str( runInfo["windowRefreshTimeAvg_ms"] ) logging.info(msg); print(msg) logging.info(runInfo) logging.info('gammaGrid='+str(mon.getGammaGrid())) logging.info('linearizeMethod='+str(mon.getLinearizeMethod())) ####Functions. Save time by automating processes like stimulus creation and ordering ############################################################################ def oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, trialObjects): cueFrame = cuePos * SOAFrames cueMax = cueFrame + cueFrames showIdx = int(np.floor(n/SOAFrames)) #objectIdxs = [i for i in range(len(trialObjects))] #objectIdxs.append(len(trialObjects)-1) #AWFUL hack #print(objectIdxs[showIdx]) #floored quotient obj = trialObjects[showIdx] drawObject = n%SOAFrames < itemFrames if drawObject: myWin.color = bgColor if n >= cueFrame and n < cueMax: #print('cueFrames! n is', n,'. cueFrame is ,', cueFrame, 'cueFrame + cueFrames is ', (cueFrame + cueFrames)) #if n%2 == 0: #This should make it flash, but it might be too fast #print('cue flash') #myWin.color = (0,0,0) obj.draw() cue.draw() else: obj.draw() return True #objects: Stimuli to display or #cue: cue stimulus or stimuli #timing parameters: Could be item duration, soa and isi. i.e. if SOA+Duration % n == 0: stimulus.setColor(stimulusColor) #bgColor and stimulusColor: if displaying and hiding stimuli, i.e. for RSVP #movementVector: direction and distance of movement if moving stimuli def
(stimuli): dotOrder = np.arange(len(stimuli)) np.random.shuffle(dotOrder) print(dotOrder) shuffledStimuli = [stimuli[i] for i in dotOrder] ts = [] myWin.flip(); myWin.flip() #Make sure raster at top of screen (unless not in blocking mode), and give CPU a chance to finish other tasks t0 = trialClock.getTime() for n in range(trialFrames): fixation.draw() #print(n//SOAFrames) oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, shuffledStimuli) myWin.flip() ts.append(trialClock.getTime() - t0) return True, shuffledStimuli, dotOrder, ts def getResponse(trialStimuli): if autopilot: spacing = 360./nDots autoResponseIdx = cuePos + autoTime #The serial position of the response in the stream if randomTime: autoResponseIdx += int(round( np.random.normal(0,2) )) itemAtTemporalSelection = trialStimuli[autoResponseIdx] unshuffledPositions = [dot.pos.tolist() for dot in stimuli] itemSpatial = unshuffledPositions.index(itemAtTemporalSelection.pos.tolist()) itemSpatial = itemSpatial + autoSpace if randomSpace: itemSpatial += int(round( np.random.normal(0,2) )) while itemSpatial>23: itemSpatial = itemSpatial - 23 #Once we have temporal pos of selected item relative to start of the trial #Need to get the serial spatial pos of this item, so that we can select items around it based on the autoSpace offset #print('itemSpatial is: ', itemSpatial) selectionTemporal = trialStimuli.index(stimuli[itemSpatial]) #This seems redundant, but it tests that the item we've selected in space is the cued item in time. if the temporal and spatial offsets are 0, it should be the same as cuePos. accuracy = cuePos == selectionTemporal mousePos = (stimuli[itemSpatial].pos[0],stimuli[itemSpatial].pos[1]) expStop = False item = stimuli[itemSpatial] return accuracy, item, expStop, mousePos elif not autopilot: myMouse = event.Mouse(visible = False,win=myWin) responded = False expStop = False event.clearEvents() mousePos = (1e6,1e6) escape = event.getKeys() myMouse.setPos((0,0)) myMouse.setVisible(True) while not responded: for item in trialStimuli: item.draw() myWin.flip() button = myMouse.getPressed() mousePos = myMouse.getPos() escapeKey = event.getKeys() if button[0]: print('click detected') responded = True print('getResponse mousePos:',mousePos) elif len(escapeKey)>0: if escapeKey[0] == 'space' or escapeKey[0] == 'ESCAPE': expStop = True responded = True return False, np.random.choice(trialStimuli), expStop, (0,0) clickDistances = [] for item in trialStimuli: x = mousePos[0] - item.pos[0] y = mousePos[1] - item.pos[1] distance = sqrt(x**2 + y**2) clickDistances.append(distance) if not expStop: minDistanceIdx = clickDistances.index(min(clickDistances)) accuracy = minDistanceIdx == cuePos item = trialStimuli[minDistanceIdx] myMouse.setVisible(False) return accuracy, item, expStop, mousePos def drawStimuli(nDots, radius, center, stimulusObject, sameEachTime = True): if len(center) > 2 or len(center) < 2: print('Center coords must be list of length 2') return None if not sameEachTime and not isinstance(stimulusObject, (list, tuple)): print('You want different objects in each position, but your stimuli is not a list or tuple') return None if not sameEachTime and isinstance(stimulusObject, (list, tuple)) and len(stimulusObject)!=nDots: print('You want different objects in each position, but the number of positions does not equal the number of items') return None spacing = 360./nDots stimuli = [] for dot in range(nDots): #have to specify positions for multiples of 90deg because python (computers in general?) can't store exact value of pi and thus cos(pi/2) = 6.123e-17, not 0 angle = dot*spacing if angle == 0: xpos = radius ypos = 0 elif angle == 90: xpos = 0 ypos = radius elif angle == 180: xpos = -radius ypos = 0 elif angle == 270: xpos = 0 ypos = -radius elif angle%90!=0: xpos = radius*cos(radians(angle)) ypos = radius*sin(radians(angle)) if sameEachTime: stim = copy.copy(stimulusObject) elif not sameEachTime: stim = stimulusObject[dot] stim.pos = (xpos,ypos) stimuli.append(stim) return stimuli def checkTiming(ts): interframeIntervals = np.diff(ts) * 1000 #print(interframeIntervals) frameTimeTolerance=.3 #proportion longer than refreshRate that will not count as a miss longFrameLimit = np.round(1000/refreshRate*(1.0+frameTimeTolerance),2) idxsInterframeLong = np.where( interframeIntervals > longFrameLimit ) [0] #frames that exceeded 150% of expected duration numCasesInterframeLong = len( idxsInterframeLong ) if numCasesInterframeLong > 0: print(numCasesInterframeLong,'frames of', trialFrames,'were longer than',str(1000/refreshRate*(1.0+frameTimeTolerance))) return numCasesInterframeLong ##Set up stimuli stimulus = visual.Circle(myWin, radius = .2, fillColor = (1,1,1) ) nDots = 24 radius = 4 center = (0,0) sameEachTime = True #(nDots, radius, center, stimulusObject, sameEachTime = True) stimuli = drawStimuli(nDots, radius, center, stimulus, sameEachTime) #print(stimuli) #print('length of stimuli object', len(stimuli)) ######Create visual objects, noise masks, response prompts etc. ########### ######Draw your stimuli here if they don't change across trials, but other parameters do (like timing or distance) ######If you want to automate your stimuli. Do it in a function below and save clutter. ######For instance, maybe you want random pairs of letters. Write a function! ########################################################################### fixSize = .1 fixation= visual.Circle(myWin, radius = fixSize , fillColor = (1,1,1), units=units) cue = visual.Circle(myWin, radius = radius + 2, fillColor = None, lineColor = (1,1,1), units = units) ###Trial timing parameters SOAMS = 333.333 itemMS = 111.111 ISIMS = SOAMS - itemMS trialMS = SOAMS * nDots cueMS = itemMS SOAFrames = int(np.floor(SOAMS/(1000./refreshRate))) itemFrames = int(np.floor(itemMS/(1000./refreshRate))) ISIFrames = int(np.floor(ISIMS/(1000./refreshRate))) trialFrames = int(nDots*SOAFrames) cueFrames = int(np.floor(cueMS/(1000./refreshRate))) print('cueFrames=',cueFrames) print('itemFrames=',itemFrames) print('refreshRate =', refreshRate) print('cueMS from frames =', cueFrames*(1000./refreshRate)) print('num of SOAs in the trial:', trialFrames/SOAFrames) ##Factorial design numResponsesPerTrial = 1 #default. Used to create headers for dataFile stimList = [] #cuePositions = [dot for dot in range(nDots) if dot not in [0,nDots-1]] cuePositions = [10] print('cuePositions: ',cuePositions) #cuePositions = cuePositions[2:(nDots-3)] #drop the first and final two dots #Set up the factorial design (list of all conditions) for cuePos in cuePositions: stimList.append({'cuePos':cuePos}) trials = data.TrialHandler(stimList, nReps = trialsPerCondition) #print(trials) ####Create output file### ######################################################################### dataFile = open(fileNameWithPath + '.txt', 'w') numResponsesPerTrial = 1 #headers for initial datafile rows, they don't get repeated. These appear in the file in the order they appear here. oneOffHeaders = [ 'subject', 'task', 'staircase', 'trialNum' ] for header in oneOffHeaders: print(header, '\t', end='', file=dataFile) #Headers for duplicated datafile rows. These are repeated using numResponsesPerTrial. For instance, we might have two responses in a trial. duplicatedHeaders = [ 'responseSpatialPos', 'responseX', 'responseY', 'correctX', 'correctY', 'clickX', 'clickY', 'accuracy', 'responsePosInStream', 'correctPosInStream' ] if numResponsesPerTrial == 1: for header in duplicatedHeaders: print(header, '\t', end='', file=dataFile) elif numResponsesPerTrial > 1: for response in range(numResponsesPerTrial): for header in duplicatedHeaders: print(header+str(response), '\t', end='', file=dataFile) for pos in range(nDots): print('position'+str(pos),'\t',end='',file=dataFile) #Headers done. Do a new line print('longFrames',file=dataFile) expStop = False trialNum=0; numTrialsCorrect=0; expStop=False; framesSaved=0; print('Starting experiment of',trials.nTotal,'trials. Current trial is trial ',trialNum) #NextRemindCountText.setText( str(trialNum) + ' of ' + str(trials.nTotal) ) #NextRemindCountText.draw() myWin.flip() #end of header trialClock = core.Clock() stimClock = core.Clock() if eyeTracking: if getEyeTrackingFileFromEyetrackingMachineAtEndOfExperiment: eyeMoveFile=('EyeTrack_'+subject+'_'+timeAndDateStr+'.EDF') tracker=Tracker_EyeLink(myWin,trialClock,subject,1, 'HV5',(255,255,255),(0,0,0),False,(widthPix,heightPix)) while trialNum < trials.nTotal and expStop==False: fixation.draw() myWin.flip() if not autopilot: core.wait(1) trial = trials.next() # print('trial idx is',trials.thisIndex) cuePos = trial.cuePos # print(cuePos) print("Doing trialNum",trialNum) trialDone, trialStimuli, trialStimuliOrder, ts = oneTrial(stimuli) #Shift positions so that the list starts at 1, which is positioned at (0,radius), and increases clockwise. This is what the MM code expects MMPositions = list() #Mixture modelling positions for dotPos in trialStimuliOrder: if dotPos < (nDots/4 - 1): #Because python indexes start at 0, 5 is the 6th pos. MMPositions.append(dotPos + 20) elif dotPos >= (nDots/4 -1): MMPositions.append(dotPos -4) nBlips = checkTiming(ts) # print(trialStimuliOrder) if trialDone: accuracy, response, expStop, clickPos = getResponse(trialStimuli) responseCoord = response.pos.tolist() spatialRelativeToXAxis = [item.pos.tolist() for item in stimuli] try: responseSpatialRelativeToXAxis = spatialRelativeToXAxis.index(responseCoord) except ValueError: print('coord not in list') if responseSpatialRelativeToXAxis < (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis + 20 elif responseSpatialRelativeToXAxis >= (nDots/4-1): responseSpatial = responseSpatialRelativeToXAxis - 4 trialPositions = [item.pos.tolist() for item in trialStimuli] responseTemporal = trialPositions.index(responseCoord) # print('trial positions in sequence:',trialPositions) # print('position of item nearest to click:',responseSpatial) # print('Position in sequence of item nearest to click:',responseTemporal) correctSpatial = trialStimuli[cuePos].pos correctTemporal = cuePos print(subject,'\t', 'dot-jump','\t', 'False','\t', trialNum,'\t', responseSpatial,'\t', responseCoord[0],'\t', responseCoord[1],'\t', correctSpatial[0],'\t', correctSpatial[1],'\t', clickPos[0],'\t', clickPos[1],'\t', accuracy,'\t', responseTemporal,'\t', correctTemporal,'\t', end='', file = dataFile ) for dot in range(nDots): print(MMPositions[dot], '\t',end='', file=dataFile) print(nBlips, file=dataFile) trialNum += 1 dataFile.flush() if expStop: dataFile.flush()
oneTrial
identifier_name
StatusResize.tsx
import * as React from "react" import styled from "styled-components" import { withProps } from "./common" interface IChildDimensions { direction: string width: number id: string priority: number hide: boolean } type PassWidth = (data: IChildDimensions) => void interface Props { children?: React.ReactNode className?: string passWidth?: PassWidth direction: string } interface State { containerWidth: number children: { [id: string]: { id: string width: number hide?: boolean priority: number } } } interface Section { direction: string count: boolean } const StatusbarSection = withProps<Section>(styled.div)` flex: 1 1 auto; display: ${({ count }) => (count ? `none` : `flex`)}; flex-direction: row; height: 100%; max-width: 48%; justify-content: ${props => props.direction}; ` class StatusBarResizer extends React.Component<Props, State> { private observer: any private elem: Element constructor(props: Props) { super(props) this.state = { containerWidth: null, children: {}, } } public componentDidMount() { this.setState({ containerWidth: this.elem.getBoundingClientRect().width, }) // tslint:disable-next-line this.observer = new window["ResizeObserver"](([entry]: any) => { this.setState({ containerWidth: entry.contentRect.width }, this.resize) }) this.observer.observe(this.elem) this.resize() } public componentWillUnmount() { this.observer.disconnect() } public render() { const { containerWidth } = this.state const { children, direction } = this.props const count = React.Children.count(children) return ( <StatusbarSection direction={direction} count={count < 1} innerRef={(elem: Element) => (this.elem = elem)} > {containerWidth !== undefined && React.Children.map(children, (child: React.ReactElement<any>) => { const current = this.state.children[child.props.id] return React.cloneElement(child, { ...child.props, passWidth: this.passWidth, hide: !!current && current.hide, containerWidth, }) })} </StatusbarSection> ) } private passWidth = (childDimensions: IChildDimensions) => { const { width, id, priority, hide } = childDimensions this.setState( state => ({ ...state, children: { ...state.children, [id]: { id, width, priority, hide }, }, }), this.resize, ) } private resize = () => { const { children, containerWidth } = this.state const childArray = Object.values(children) const sorted = childArray.sort((prev, next) => prev.priority - next.priority) // Loop through components sorted by priority check if component can be added without // Overshooting container width if so show the component otherwise hide it const { statusItems } = sorted.reduce( (components, item) => { let hide // add 20 to the trunctation width so components are not too snug if (components.widths + item.width + 20 < containerWidth) { components.widths += item.width hide = false } else
components.statusItems[item.id] = { ...this.state.children[item.id], hide, } return components }, { widths: 0, statusItems: {} }, ) this.setState({ children: statusItems }) } } export default StatusBarResizer
{ hide = true }
conditional_block
StatusResize.tsx
import * as React from "react" import styled from "styled-components" import { withProps } from "./common" interface IChildDimensions { direction: string width: number id: string priority: number hide: boolean } type PassWidth = (data: IChildDimensions) => void interface Props { children?: React.ReactNode className?: string passWidth?: PassWidth direction: string } interface State { containerWidth: number children: { [id: string]: { id: string width: number hide?: boolean priority: number } } } interface Section { direction: string count: boolean } const StatusbarSection = withProps<Section>(styled.div)` flex: 1 1 auto; display: ${({ count }) => (count ? `none` : `flex`)}; flex-direction: row; height: 100%; max-width: 48%; justify-content: ${props => props.direction}; ` class StatusBarResizer extends React.Component<Props, State> { private observer: any private elem: Element constructor(props: Props) { super(props) this.state = { containerWidth: null, children: {}, } } public componentDidMount() { this.setState({ containerWidth: this.elem.getBoundingClientRect().width, }) // tslint:disable-next-line this.observer = new window["ResizeObserver"](([entry]: any) => { this.setState({ containerWidth: entry.contentRect.width }, this.resize) }) this.observer.observe(this.elem) this.resize() } public componentWillUnmount()
public render() { const { containerWidth } = this.state const { children, direction } = this.props const count = React.Children.count(children) return ( <StatusbarSection direction={direction} count={count < 1} innerRef={(elem: Element) => (this.elem = elem)} > {containerWidth !== undefined && React.Children.map(children, (child: React.ReactElement<any>) => { const current = this.state.children[child.props.id] return React.cloneElement(child, { ...child.props, passWidth: this.passWidth, hide: !!current && current.hide, containerWidth, }) })} </StatusbarSection> ) } private passWidth = (childDimensions: IChildDimensions) => { const { width, id, priority, hide } = childDimensions this.setState( state => ({ ...state, children: { ...state.children, [id]: { id, width, priority, hide }, }, }), this.resize, ) } private resize = () => { const { children, containerWidth } = this.state const childArray = Object.values(children) const sorted = childArray.sort((prev, next) => prev.priority - next.priority) // Loop through components sorted by priority check if component can be added without // Overshooting container width if so show the component otherwise hide it const { statusItems } = sorted.reduce( (components, item) => { let hide // add 20 to the trunctation width so components are not too snug if (components.widths + item.width + 20 < containerWidth) { components.widths += item.width hide = false } else { hide = true } components.statusItems[item.id] = { ...this.state.children[item.id], hide, } return components }, { widths: 0, statusItems: {} }, ) this.setState({ children: statusItems }) } } export default StatusBarResizer
{ this.observer.disconnect() }
identifier_body
StatusResize.tsx
import * as React from "react" import styled from "styled-components" import { withProps } from "./common" interface IChildDimensions { direction: string width: number id: string priority: number hide: boolean } type PassWidth = (data: IChildDimensions) => void interface Props { children?: React.ReactNode className?: string passWidth?: PassWidth direction: string } interface State { containerWidth: number children: { [id: string]: { id: string width: number hide?: boolean priority: number } } } interface Section { direction: string count: boolean } const StatusbarSection = withProps<Section>(styled.div)` flex: 1 1 auto; display: ${({ count }) => (count ? `none` : `flex`)}; flex-direction: row; height: 100%;
class StatusBarResizer extends React.Component<Props, State> { private observer: any private elem: Element constructor(props: Props) { super(props) this.state = { containerWidth: null, children: {}, } } public componentDidMount() { this.setState({ containerWidth: this.elem.getBoundingClientRect().width, }) // tslint:disable-next-line this.observer = new window["ResizeObserver"](([entry]: any) => { this.setState({ containerWidth: entry.contentRect.width }, this.resize) }) this.observer.observe(this.elem) this.resize() } public componentWillUnmount() { this.observer.disconnect() } public render() { const { containerWidth } = this.state const { children, direction } = this.props const count = React.Children.count(children) return ( <StatusbarSection direction={direction} count={count < 1} innerRef={(elem: Element) => (this.elem = elem)} > {containerWidth !== undefined && React.Children.map(children, (child: React.ReactElement<any>) => { const current = this.state.children[child.props.id] return React.cloneElement(child, { ...child.props, passWidth: this.passWidth, hide: !!current && current.hide, containerWidth, }) })} </StatusbarSection> ) } private passWidth = (childDimensions: IChildDimensions) => { const { width, id, priority, hide } = childDimensions this.setState( state => ({ ...state, children: { ...state.children, [id]: { id, width, priority, hide }, }, }), this.resize, ) } private resize = () => { const { children, containerWidth } = this.state const childArray = Object.values(children) const sorted = childArray.sort((prev, next) => prev.priority - next.priority) // Loop through components sorted by priority check if component can be added without // Overshooting container width if so show the component otherwise hide it const { statusItems } = sorted.reduce( (components, item) => { let hide // add 20 to the trunctation width so components are not too snug if (components.widths + item.width + 20 < containerWidth) { components.widths += item.width hide = false } else { hide = true } components.statusItems[item.id] = { ...this.state.children[item.id], hide, } return components }, { widths: 0, statusItems: {} }, ) this.setState({ children: statusItems }) } } export default StatusBarResizer
max-width: 48%; justify-content: ${props => props.direction}; `
random_line_split
StatusResize.tsx
import * as React from "react" import styled from "styled-components" import { withProps } from "./common" interface IChildDimensions { direction: string width: number id: string priority: number hide: boolean } type PassWidth = (data: IChildDimensions) => void interface Props { children?: React.ReactNode className?: string passWidth?: PassWidth direction: string } interface State { containerWidth: number children: { [id: string]: { id: string width: number hide?: boolean priority: number } } } interface Section { direction: string count: boolean } const StatusbarSection = withProps<Section>(styled.div)` flex: 1 1 auto; display: ${({ count }) => (count ? `none` : `flex`)}; flex-direction: row; height: 100%; max-width: 48%; justify-content: ${props => props.direction}; ` class StatusBarResizer extends React.Component<Props, State> { private observer: any private elem: Element
(props: Props) { super(props) this.state = { containerWidth: null, children: {}, } } public componentDidMount() { this.setState({ containerWidth: this.elem.getBoundingClientRect().width, }) // tslint:disable-next-line this.observer = new window["ResizeObserver"](([entry]: any) => { this.setState({ containerWidth: entry.contentRect.width }, this.resize) }) this.observer.observe(this.elem) this.resize() } public componentWillUnmount() { this.observer.disconnect() } public render() { const { containerWidth } = this.state const { children, direction } = this.props const count = React.Children.count(children) return ( <StatusbarSection direction={direction} count={count < 1} innerRef={(elem: Element) => (this.elem = elem)} > {containerWidth !== undefined && React.Children.map(children, (child: React.ReactElement<any>) => { const current = this.state.children[child.props.id] return React.cloneElement(child, { ...child.props, passWidth: this.passWidth, hide: !!current && current.hide, containerWidth, }) })} </StatusbarSection> ) } private passWidth = (childDimensions: IChildDimensions) => { const { width, id, priority, hide } = childDimensions this.setState( state => ({ ...state, children: { ...state.children, [id]: { id, width, priority, hide }, }, }), this.resize, ) } private resize = () => { const { children, containerWidth } = this.state const childArray = Object.values(children) const sorted = childArray.sort((prev, next) => prev.priority - next.priority) // Loop through components sorted by priority check if component can be added without // Overshooting container width if so show the component otherwise hide it const { statusItems } = sorted.reduce( (components, item) => { let hide // add 20 to the trunctation width so components are not too snug if (components.widths + item.width + 20 < containerWidth) { components.widths += item.width hide = false } else { hide = true } components.statusItems[item.id] = { ...this.state.children[item.id], hide, } return components }, { widths: 0, statusItems: {} }, ) this.setState({ children: statusItems }) } } export default StatusBarResizer
constructor
identifier_name
bollingTrader.py
from utils.rwlogging import log from utils.rwlogging import strategyLogger as logs from trader import Trader from indicator import ma, macd, bolling, rsi, kdj from strategy.pool import StrategyPool highest = 0 def runStrategy(prices): logs.info('STRATEGY,BUY TIMES, SELL TIMES, FINAL EQUITY') #prices = SqliteDB().getAllPrices(table) ps = [p['close'] for p in prices] pool = StrategyPool(100) #doBollingTrade(pool, prices, ps, 12, 2.4) #pool.showStrategies() #return for i in range(2, 40): j = 0 log.debug(i) while j <= 5: doBollingTrade(pool, prices, ps, i, j) j += 0.1 pool.showStrategies() def doBollingTrade(pool, prices, ps, period, deviate): global highest sname = 'BOLLING_' + str(period) + '_' + str(deviate) bollings = bolling.calc_bolling(prices, period, deviate) t = Trader(sname) for i in range(period, len(prices)): if ps[i-1] > bollings['lower'][i-1] and ps[i] < bollings['lower'][i] and t.bsflag < 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll lower: ' + str(bollings['lower'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll lower: ' + str(bollings['lower'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['mean'][i-1] and ps[i] >= bollings['mean'][i] and t.bsflag == 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['upper'][i-1] and ps[i] > bollings['upper'][i] and t.bsflag > -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll upper: ' + str(bollings['upper'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll upper: ' + str(bollings['upper'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] > bollings['mean'][i-1] and ps[i] <= bollings['mean'][i] and t.bsflag == -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) t.show(prices[i]['date'], prices[i]['time'], prices[i]['rmb']) pool.estimate(t)
# -*- coding: utf-8 -*- import datetime, time, csv, os from utils.db import SqliteDB
random_line_split
bollingTrader.py
# -*- coding: utf-8 -*- import datetime, time, csv, os from utils.db import SqliteDB from utils.rwlogging import log from utils.rwlogging import strategyLogger as logs from trader import Trader from indicator import ma, macd, bolling, rsi, kdj from strategy.pool import StrategyPool highest = 0 def runStrategy(prices): logs.info('STRATEGY,BUY TIMES, SELL TIMES, FINAL EQUITY') #prices = SqliteDB().getAllPrices(table) ps = [p['close'] for p in prices] pool = StrategyPool(100) #doBollingTrade(pool, prices, ps, 12, 2.4) #pool.showStrategies() #return for i in range(2, 40): j = 0 log.debug(i) while j <= 5: doBollingTrade(pool, prices, ps, i, j) j += 0.1 pool.showStrategies() def doBollingTrade(pool, prices, ps, period, deviate): global highest sname = 'BOLLING_' + str(period) + '_' + str(deviate) bollings = bolling.calc_bolling(prices, period, deviate) t = Trader(sname) for i in range(period, len(prices)):
pool.estimate(t)
if ps[i-1] > bollings['lower'][i-1] and ps[i] < bollings['lower'][i] and t.bsflag < 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll lower: ' + str(bollings['lower'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll lower: ' + str(bollings['lower'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['mean'][i-1] and ps[i] >= bollings['mean'][i] and t.bsflag == 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['upper'][i-1] and ps[i] > bollings['upper'][i] and t.bsflag > -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll upper: ' + str(bollings['upper'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll upper: ' + str(bollings['upper'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] > bollings['mean'][i-1] and ps[i] <= bollings['mean'][i] and t.bsflag == -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) t.show(prices[i]['date'], prices[i]['time'], prices[i]['rmb'])
conditional_block
bollingTrader.py
# -*- coding: utf-8 -*- import datetime, time, csv, os from utils.db import SqliteDB from utils.rwlogging import log from utils.rwlogging import strategyLogger as logs from trader import Trader from indicator import ma, macd, bolling, rsi, kdj from strategy.pool import StrategyPool highest = 0 def runStrategy(prices):
def doBollingTrade(pool, prices, ps, period, deviate): global highest sname = 'BOLLING_' + str(period) + '_' + str(deviate) bollings = bolling.calc_bolling(prices, period, deviate) t = Trader(sname) for i in range(period, len(prices)): if ps[i-1] > bollings['lower'][i-1] and ps[i] < bollings['lower'][i] and t.bsflag < 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll lower: ' + str(bollings['lower'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll lower: ' + str(bollings['lower'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['mean'][i-1] and ps[i] >= bollings['mean'][i] and t.bsflag == 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['upper'][i-1] and ps[i] > bollings['upper'][i] and t.bsflag > -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll upper: ' + str(bollings['upper'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll upper: ' + str(bollings['upper'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] > bollings['mean'][i-1] and ps[i] <= bollings['mean'][i] and t.bsflag == -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) t.show(prices[i]['date'], prices[i]['time'], prices[i]['rmb']) pool.estimate(t)
logs.info('STRATEGY,BUY TIMES, SELL TIMES, FINAL EQUITY') #prices = SqliteDB().getAllPrices(table) ps = [p['close'] for p in prices] pool = StrategyPool(100) #doBollingTrade(pool, prices, ps, 12, 2.4) #pool.showStrategies() #return for i in range(2, 40): j = 0 log.debug(i) while j <= 5: doBollingTrade(pool, prices, ps, i, j) j += 0.1 pool.showStrategies()
identifier_body
bollingTrader.py
# -*- coding: utf-8 -*- import datetime, time, csv, os from utils.db import SqliteDB from utils.rwlogging import log from utils.rwlogging import strategyLogger as logs from trader import Trader from indicator import ma, macd, bolling, rsi, kdj from strategy.pool import StrategyPool highest = 0 def
(prices): logs.info('STRATEGY,BUY TIMES, SELL TIMES, FINAL EQUITY') #prices = SqliteDB().getAllPrices(table) ps = [p['close'] for p in prices] pool = StrategyPool(100) #doBollingTrade(pool, prices, ps, 12, 2.4) #pool.showStrategies() #return for i in range(2, 40): j = 0 log.debug(i) while j <= 5: doBollingTrade(pool, prices, ps, i, j) j += 0.1 pool.showStrategies() def doBollingTrade(pool, prices, ps, period, deviate): global highest sname = 'BOLLING_' + str(period) + '_' + str(deviate) bollings = bolling.calc_bolling(prices, period, deviate) t = Trader(sname) for i in range(period, len(prices)): if ps[i-1] > bollings['lower'][i-1] and ps[i] < bollings['lower'][i] and t.bsflag < 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll lower: ' + str(bollings['lower'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll lower: ' + str(bollings['lower'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['mean'][i-1] and ps[i] >= bollings['mean'][i] and t.bsflag == 1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.buy(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] < bollings['upper'][i-1] and ps[i] > bollings['upper'][i] and t.bsflag > -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll upper: ' + str(bollings['upper'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll upper: ' + str(bollings['upper'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) if ps[i-1] > bollings['mean'][i-1] and ps[i] <= bollings['mean'][i] and t.bsflag == -1: notes = 'LAST p: ' + str(ps[i - 1]) + ';boll mean: ' + str(bollings['mean'][i-1]) + 'CURRENT p: ' + str(ps[i]) + ';boll mean: ' + str(bollings['mean'][i]) t.sell(prices[i]['date'], prices[i]['time'], prices[i]['rmb'], notes) t.show(prices[i]['date'], prices[i]['time'], prices[i]['rmb']) pool.estimate(t)
runStrategy
identifier_name
yash_bls.py
from __future__ import division, print_function import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import clean_and_search from ktransit import FitTransit from multiprocessing import Pool from scipy import ndimage import glob, timeit, sys import time as pythonTime # OPTIONS doPlot = True plotOption = 'save' secondary = True resultsFilename = '/Users/Yash/Desktop/results.txt' figureSaveLocation = '/Users/Yash/Desktop/' # -------- PLOTTING OPTIONS -------- # import matplotlib def plateau(array, threshold): """Find plateaus in an array, i.e continuous regions that exceed threshold Given an array of numbers, return a 2d array such that out[:,0] marks the indices where the array crosses threshold from below, and out[:,1] marks the next time the array crosses that same threshold from below. Inputs: array (1d numpy array) threshold (float or array) If threshold is a single number, any point above that value is above threshold. If it's an array, it must have the same length as the first argument, and an array[i] > threshold[i] to be included as a plateau Returns: Numpy 2d array with 2 columns. Notes: To find the length of the plateaus, use out[:,1] - out[:,0] To find the length of the largest plateau, use np.max(out[:,1] - out[:,0]) The algorithm fails if a value is exactly equal to the threshold. To guard against this, we add a very small amount to threshold to ensure floating point arithmetic prevents two numbers being exactly equal.""" arr = array.astype(np.float32) arr = arr - threshold + 1e-12 arrPlus = np.roll(arr, 1) #Location of changes from -ve to +ve (or vice versa) #Last point is bogus , so we calculate it by hand sgnChange = arr*arrPlus #Roll around can't compute sign change for zeroth elt. sgnChange[0] = +1 if arr[0] > 0: sgnChange[0] = -1 loc = np.where(sgnChange < 0)[0] if np.fmod( len(loc), 2) != 0: loc.resize( (len(loc)+1)) loc[-1] = len(arr) return loc def outlierRemoval(time, flux): fluxDetrended = medianDetrend(flux, 3) out1 = plateau(fluxDetrended, 5 * np.std(fluxDetrended)) out2 = plateau(-fluxDetrended, 5 * np.std(fluxDetrended)) if out1 == [] and out2 == []: singleOutlierIndices = [] else: outliers = np.append(out1, out2).reshape(-1,2) # Only want groups of one outlier, since > 1 may be transit points singleOutlierIndices = np.sort(outliers[(outliers[:,1] - outliers[:,0] == 1)][:,0]) # Check periodicity of outliers, with PRECISION of 0.0205 days # 0.0205 days = 29.52 minutes = ~length of long cadence precision = 0.0205 outlierTimes = time[singleOutlierIndices] diffs = [outlierTimes[i+1] - outlierTimes[i] for i in range(0, len(outlierTimes)-1)] diffs = [round(d, 5) for d in diffs] if len(singleOutlierIndices) >= 4: if len(set(diffs)) == len(diffs): possibleTimes = np.array([]) else: period = max(set(diffs), key = diffs.count) # period = most common difference epoch = outlierTimes[diffs.index(period)] possibleTimes = np.arange(epoch, outlierTimes[-1] + 0.5*period, period) notOutliers = [] for i in range(len(outlierTimes)): if np.any((abs(possibleTimes - outlierTimes[i]) < precision)): notOutliers.append(i) singleOutlierIndices = np.delete(singleOutlierIndices, notOutliers) elif len(singleOutlierIndices) == 3: if abs(diffs[0] - diffs[1]) < precision: singleOutlierIndices = [] # Uncomment to see how the plotting algorithm worked for a lightcurve # ----------------------------- PLOTTING ----------------------------- # # plt.subplot(311) # plt.scatter(time, flux, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], flux[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # plt.title('Original') # plt.subplot(312) # plt.scatter(time, fluxDetrended, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], fluxDetrended[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # x1, x2, y1, y2 = plt.axis() # plt.hlines([-5*np.std(fluxDetrended), 5*np.std(fluxDetrended)], x1, x2, # color = 'b', linestyles = 'dashed') # plt.axis([x1, x2, y1, y2]) # plt.title('Detrended') # plt.subplot(313) # plt.scatter(np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices), # marker = '.', s = 1, color = 'k', alpha = 1) # plt.title('Outliers removed: ' + str(len(singleOutlierIndices))) # plt.show() # -------------------------------------------------------------------- # return np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices) def medianDetrend(flux, binWidth): halfNumPoints = binWidth // 2 medians = [] for i in range(len(flux)): if i < halfNumPoints: medians.append(np.median(flux[:i+halfNumPoints+1])) elif i > len(flux) - halfNumPoints - 1: medians.append(np.median(flux[i-halfNumPoints:])) else:
return flux - medians def getPhase(time, flux, period, epoch, centerPhase = 0): """Get the phase of a lightcurve. How it works using an example where epoch = 2, period = 3: 1. Subtract the epoch from all times [1, 2, 3, 4, 5, 6, 7] to get [-1, 0, 1, 2, 3, 4, 5] then divide by the period [3] to get all time values in phase values which gets you [-0.3, 0, 0.3, 0.6, 1, 1.3, 1.6] 2. Subtract the PHASE NUMBER (floor function) from each PHASE (date1) which gets you [0.7, 0, 0.3, 0.6, 0, 0.3, 0.6] 3. Sort all the adjusted phases to get [0, 0, 0.3, 0.3, 0.6, 0.6, 0.7] THERE WILL BE negative values in the beginning here, just not in this example since no ex. time value divided by the period left a decimal less than 0.25 4. Sort the flux values in the same way the phases were sorted Inputs: time Time values of data. (IN DAYS) flux Flux values of data. period Period of transit. epoch Epoch of transit. centerPhase Which phase should be at the center. Returns: q1 Phase values. (IN HOURS) f1 Flux values for each phase. """ epoch += centerPhase * period date1 = (time - epoch) / period + 0.5 phi1 = ((date1) - np.floor(date1)) - 0.5 q1 = np.sort(phi1) * period * 24. f1 = flux[np.argsort(phi1)] return q1, f1 def fitModel(time, flux, guessDict, freeParPlanet, ferr = 0): if not np.all(ferr): ferr = np.ones_like(flux)*1.E-5 freeParStar = ['rho'] # Make the fitting object according to guess dictionary fitT = FitTransit() fitT.add_guess_star(ld1 = 0, ld2 = 0) fitT.add_guess_planet(period = guessDict['period'], T0 = guessDict['T0']) fitT.add_data(time = time, flux = flux, ferr = ferr) fitT.free_parameters(freeParStar, freeParPlanet) fitT.do_fit() return fitT def do_bls_and_fit(time, flux, min_period, max_period): S = clean_and_search.Search(time, flux + 1, np.ones_like(flux)*1.E-5) S.do_bls2(min_period = min_period, max_period = max_period, min_duration_hours = 1.5, max_duration_hours = 6., freq_step = 1.E-4, doplot = False, norm = False) guessDict = {'period': S.periods[0], 'T0': S.epoch} freeParPlanet = ['period', 'T0', 'rprs'] fitT = fitModel(time, flux, guessDict, freeParPlanet) # Readability of output data period = fitT.fitresultplanets['pnum0']['period'] epoch = fitT.fitresultplanets['pnum0']['T0'] k = fitT.fitresultplanets['pnum0']['rprs'] rho = fitT.fitresultstellar['rho'] duration = computeTransitDuration(period, rho, k) if not duration: duration = S.duration * 24 # Calculating transit depth significance ## fitT.transitmodel sometimes has a NaN value sigma = computePointSigma(time, flux, fitT.transitmodel, period, epoch, duration) depth = k ** 2 significance = depth / sigma phase = getPhase(time, flux, period, epoch)[0] nTransitPoints = np.sum((-duration * 0.5 < phase) & (phase < duration * 0.5)) SNR = significance * nTransitPoints**0.5 return SNR, period, epoch, duration, depth, fitT.transitmodel, S.f_1, S.convolved_bls def computePointSigma(time, flux, transitModel, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) mt2, mf2 = removeTransits(time, transitModel, period, epoch, duration) return np.nanstd(f2 - mf2) def removeTransits(time, flux, period, epoch, duration): halfDur = 0.5 * duration / 24. bad = np.where(time < epoch - period + halfDur)[0] for p in np.arange(epoch, time[-1] + period, period): bad = np.append(bad, np.where((p - halfDur < time) & (time < p + halfDur))[0]) good = np.setxor1d(range(len(time)), bad) return time[good], flux[good] def computeTransitDuration(period, rho, k): b = 0.1 # Impact parameter (default value in ktransit) G = 6.67384e-11 # Gravitational constant P = period * 86400 # Period in seconds stellarDensity = rho * 1000 rStarOverA = ((4 * np.pi**2) / (G * stellarDensity * P**2))**(1./3.) cosI = b * rStarOverA sinI = np.sqrt(1 - cosI**2) coeff = rStarOverA * np.sqrt((1+k)**2 - b**2) / sinI if coeff > 1: return 0 else: duration = (P / np.pi) * np.arcsin(coeff) return duration / 3600 # Duration in hours def findSecondary(time, flux, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) minp, maxp = period - 0.1, period + 0.1 if t2[-1] - t2[0] == 0 or 1./maxp < 1./(t2[-1] - t2[0]): return (np.nan,)*5 if minp < 0.5: minp = 0.5 planetInfo = do_bls_and_fit(t2, f2, minp, maxp) return (t2,) + planetInfo[0:4] + (planetInfo[5],) def computeOddEvenModels(time, flux, per, epo): gdOdd = {'period': per * 2, 'T0': epo} gdEven = {'period': per * 2, 'T0': epo + per} freeParPlanet = ['rprs'] fitT_odd = fitModel(time, flux, gdOdd, freeParPlanet) fitT_even = fitModel(time, flux, gdEven, freeParPlanet) return fitT_odd, fitT_even def main(filename): """Fit a transit model to a lightcurve. 1. Remove outliers. 2. Detrend the data with a binwidth of 26 cadences. Since MAX_DURATION_HOURS = 6, and 6 hours = ~13 cadences (ceiling of 12.245), detrending with a binwidth of double this value will preserve all events with a duration of 13 cadences or less. 3. Create an "S" object. (???) [1 is added to the flux to avoid a division by zero error] 4. Run the BLS algorithm and Tom's transit fitting algorithm. Since the BLS can lock on to an incorrect, shorter period event, I run it on four different minimum periods, chosen somewhat arbitrarily. These results go into a dictionary sorted by the calculated SNR of each fit, and the parameters which give the maximum SNR are used. 5. Plot the original lightcurve, the BLS statistics from its minimum to maximum period, and a phased lightcurve. 6. Save the plot, and return a string containing the parameters of the fit. """ name = filename[-13:-4] time, flux = np.genfromtxt(filename, unpack = True) if np.all(np.isnan(flux)): return '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' %((name,)+(np.nan,)*6) time, flux = outlierRemoval(time, flux) flux = medianDetrend(flux, 26) # Main transit search minPeriod = 0.5 # Limitations of BLS Fortran code maxPeriod = (time[-1] - time[0]) / 2. SNR, period, epoch, duration, depth, transitModel, period_guesses, \ convolved_bls = do_bls_and_fit(time, flux, minPeriod, maxPeriod) # For the phase curves phase, phasedFlux = getPhase(time, flux, period, epoch) phaseModel, phasedFluxModel = getPhase(time, transitModel, period, epoch) # Secondary search secTime, secSNR, secPer, secEpoch, secDur, secModel = findSecondary(time, flux, period, epoch, duration) if secSNR > 5 and abs(period - secPer) < 0.05: secPhase, secPhaseModel = getPhase(secTime, secModel, secPer, epoch) idx = len(secPhase[secPhase < 0]) else: secPhase, secPhaseModel, idx = [], [], 1 # Odd/Even plot fitT_odd, fitT_even = computeOddEvenModels(time, flux, period, epoch) phaseModel_odd, phasedFluxModel_odd = getPhase(time, fitT_odd.transitmodel, period * 2, epoch) phaseModel_even, phasedFluxModel_even = getPhase(time, fitT_even.transitmodel, period * 2, epoch + period) depthOdd = fitT_odd.fitresultplanets['pnum0']['rprs'] ** 2 depthEven = fitT_even.fitresultplanets['pnum0']['rprs'] ** 2 phaseOdd, fluxOdd = getPhase(time, flux, period * 2, epoch) phaseEven, fluxEven = getPhase(time, flux, period * 2, epoch + period) x1, x2 = -duration, duration y1, y2 = -3*np.std(fluxOdd), 3*np.std(fluxOdd) if min(fluxOdd) < y1: y1 = min(fluxOdd) - np.std(fluxOdd) # sigma = abs(depth1 - depth2) / sqrt(u1^2 + u2^2) durOdd = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_odd.fitresultplanets['pnum0']['rprs']) durEven = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_even.fitresultplanets['pnum0']['rprs']) sigma = computePointSigma(time, flux, transitModel, period, epoch, duration) nOddPoints = np.sum((-durOdd*0.5 < phaseOdd) & (phaseOdd < durOdd * 0.5)) nEvenPoints = np.sum((-durEven*0.5 < phaseEven) & (phaseEven < durEven * 0.5)) uOdd, uEven = sigma / np.sqrt(nOddPoints), sigma / np.sqrt(nEvenPoints) depthDiffSigma = abs(depthOdd - depthEven) / np.sqrt(uOdd**2 + uEven**2) if doPlot: gs = gridspec.GridSpec(3,2) ax1 = plt.subplot(gs[0,:]) axOdd = plt.subplot(gs[1,0]) axEven = plt.subplot(gs[1,1]) ax3 = plt.subplot(gs[2,:]) gs.update(wspace = 0, hspace = 0.5) ax1.plot(time, flux, 'k') y1, y2 = ax1.get_ylim() ax1.vlines(np.arange(epoch, time[-1], period), y1, y2, color = 'r', linestyles = 'dashed', linewidth = 0.5) ax1.axis([time[0], time[-1], y1, y2]) ax1.set_title('kplr%s; best period = %8.6g days; SNR = %8.6g' %(name, period, SNR)) ax1.set_xlabel('days') axOdd.set_ylabel('flux') axOdd.scatter(phaseOdd, fluxOdd, marker = '.', s = 1, color = 'k', alpha = 1) axOdd.plot(phaseModel_odd, phasedFluxModel_odd, 'r') axOdd.axhline(-depthOdd, x1, x2) axOdd.axis([x1,x2,y1,y2]) axOdd.set_title('odd') axEven.scatter(phaseEven, fluxEven, marker = '.', s = 1, color = 'k', alpha = 1) axEven.plot(phaseModel_even, phasedFluxModel_even, 'r') axEven.axhline(-depthEven, x1, x2) axEven.yaxis.tick_right() axEven.axis([x1,x2,y1,y2]) axEven.set_title('even') if secondary: plt.plot(secPhase[:idx], secPhaseModel[:idx], 'c') plt.plot(secPhase[idx:], secPhaseModel[idx:], 'c') ax3.scatter(phase, phasedFlux, marker = '.', s = 1, color = 'k') ax3.plot(phaseModel, phasedFluxModel, 'r') y1, y2 = -3*np.std(phasedFlux), 3*np.std(phasedFlux) if min(phasedFlux) < y1: y1 = min(phasedFlux) - np.std(phasedFlux) ax3.axis([phase[0], phase[-1], y1, y2]) ax3.set_xlabel('phase [hours]') ax3.text(0.5, 1.25, 'depth diff sigma = %.3f' %depthDiffSigma, horizontalalignment = 'center', verticalalignment = 'center', transform = ax3.transAxes) if plotOption == 'save': plt.savefig(figureSaveLocation + '%s.png' %name, dpi = 200) plt.close() elif plotOption == 'show': plt.show() successString = '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' \ %(name, SNR, period, depth, epoch, duration, secSNR) return successString def getResults(): rfn = '/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/run1/results.txt' names, periods = np.genfromtxt(rfn, usecols = (0,2), unpack = True) return names, periods if __name__ == '__main__': # files = np.array(glob.glob('/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/dataSVD/*.txt')) files = np.genfromtxt('/Users/Yash/Desktop/f0pcs.txt', dtype = 'str') title = '\t'.join(['name\t\t', 'SNR\t', 'period[days]', 'depth\t', 'epoch[day]', 'duration[hours]', 'secondary SNR']) print(title) # Multiprocesses the code into a Pool of 7, while writing the results to # resultsFilename as each iteration of the code completes. Also prints # results to the console and gives an ETA. #-------------------------- MULTIPROCESSING --------------------------# # with open(resultsFilename, 'w') as rf: # rf.write(title + '\n') # p = Pool(7) # start = timeit.default_timer(); progressString = '' # for i, res in enumerate(p.imap_unordered(main, files), 1): # with open(resultsFilename, 'a') as rf: # rf.write(res + '\n') # avg = (timeit.default_timer() - start)/i # eta = (len(files) - i) * avg # sys.stdout.write('\b \b'*len(progressString)) # print(res) # progressString = '%i/%i done, avg %3.2f sec per target, eta: %s' %(i, len(files), # avg, pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(eta))) # sys.stdout.write(progressString); sys.stdout.flush() # p.close() # p.join() # total = timeit.default_timer() - start # print('\ntotal elapsed time: %s' %pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(total)))
medians.append(np.median(flux[i-halfNumPoints : i+halfNumPoints+1]))
conditional_block
yash_bls.py
from __future__ import division, print_function import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import clean_and_search from ktransit import FitTransit from multiprocessing import Pool from scipy import ndimage import glob, timeit, sys import time as pythonTime # OPTIONS doPlot = True plotOption = 'save' secondary = True resultsFilename = '/Users/Yash/Desktop/results.txt' figureSaveLocation = '/Users/Yash/Desktop/' # -------- PLOTTING OPTIONS -------- # import matplotlib def plateau(array, threshold): """Find plateaus in an array, i.e continuous regions that exceed threshold Given an array of numbers, return a 2d array such that out[:,0] marks the indices where the array crosses threshold from below, and out[:,1] marks the next time the array crosses that same threshold from below. Inputs: array (1d numpy array) threshold (float or array) If threshold is a single number, any point above that value is above threshold. If it's an array, it must have the same length as the first argument, and an array[i] > threshold[i] to be included as a plateau Returns: Numpy 2d array with 2 columns. Notes: To find the length of the plateaus, use out[:,1] - out[:,0] To find the length of the largest plateau, use np.max(out[:,1] - out[:,0]) The algorithm fails if a value is exactly equal to the threshold. To guard against this, we add a very small amount to threshold to ensure floating point arithmetic prevents two numbers being exactly equal.""" arr = array.astype(np.float32) arr = arr - threshold + 1e-12 arrPlus = np.roll(arr, 1) #Location of changes from -ve to +ve (or vice versa) #Last point is bogus , so we calculate it by hand sgnChange = arr*arrPlus #Roll around can't compute sign change for zeroth elt. sgnChange[0] = +1 if arr[0] > 0: sgnChange[0] = -1 loc = np.where(sgnChange < 0)[0] if np.fmod( len(loc), 2) != 0: loc.resize( (len(loc)+1)) loc[-1] = len(arr) return loc def outlierRemoval(time, flux): fluxDetrended = medianDetrend(flux, 3) out1 = plateau(fluxDetrended, 5 * np.std(fluxDetrended)) out2 = plateau(-fluxDetrended, 5 * np.std(fluxDetrended)) if out1 == [] and out2 == []: singleOutlierIndices = [] else: outliers = np.append(out1, out2).reshape(-1,2) # Only want groups of one outlier, since > 1 may be transit points singleOutlierIndices = np.sort(outliers[(outliers[:,1] - outliers[:,0] == 1)][:,0]) # Check periodicity of outliers, with PRECISION of 0.0205 days # 0.0205 days = 29.52 minutes = ~length of long cadence precision = 0.0205 outlierTimes = time[singleOutlierIndices] diffs = [outlierTimes[i+1] - outlierTimes[i] for i in range(0, len(outlierTimes)-1)] diffs = [round(d, 5) for d in diffs] if len(singleOutlierIndices) >= 4: if len(set(diffs)) == len(diffs): possibleTimes = np.array([]) else: period = max(set(diffs), key = diffs.count) # period = most common difference epoch = outlierTimes[diffs.index(period)] possibleTimes = np.arange(epoch, outlierTimes[-1] + 0.5*period, period) notOutliers = [] for i in range(len(outlierTimes)): if np.any((abs(possibleTimes - outlierTimes[i]) < precision)): notOutliers.append(i) singleOutlierIndices = np.delete(singleOutlierIndices, notOutliers) elif len(singleOutlierIndices) == 3: if abs(diffs[0] - diffs[1]) < precision: singleOutlierIndices = [] # Uncomment to see how the plotting algorithm worked for a lightcurve # ----------------------------- PLOTTING ----------------------------- # # plt.subplot(311) # plt.scatter(time, flux, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], flux[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # plt.title('Original') # plt.subplot(312) # plt.scatter(time, fluxDetrended, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], fluxDetrended[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # x1, x2, y1, y2 = plt.axis() # plt.hlines([-5*np.std(fluxDetrended), 5*np.std(fluxDetrended)], x1, x2, # color = 'b', linestyles = 'dashed') # plt.axis([x1, x2, y1, y2]) # plt.title('Detrended') # plt.subplot(313) # plt.scatter(np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices), # marker = '.', s = 1, color = 'k', alpha = 1) # plt.title('Outliers removed: ' + str(len(singleOutlierIndices))) # plt.show() # -------------------------------------------------------------------- # return np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices) def medianDetrend(flux, binWidth): halfNumPoints = binWidth // 2 medians = [] for i in range(len(flux)): if i < halfNumPoints: medians.append(np.median(flux[:i+halfNumPoints+1])) elif i > len(flux) - halfNumPoints - 1: medians.append(np.median(flux[i-halfNumPoints:])) else: medians.append(np.median(flux[i-halfNumPoints : i+halfNumPoints+1])) return flux - medians def getPhase(time, flux, period, epoch, centerPhase = 0): """Get the phase of a lightcurve. How it works using an example where epoch = 2, period = 3: 1. Subtract the epoch from all times [1, 2, 3, 4, 5, 6, 7] to get [-1, 0, 1, 2, 3, 4, 5] then divide by the period [3] to get all time values in phase values which gets you [-0.3, 0, 0.3, 0.6, 1, 1.3, 1.6] 2. Subtract the PHASE NUMBER (floor function) from each PHASE (date1) which gets you [0.7, 0, 0.3, 0.6, 0, 0.3, 0.6] 3. Sort all the adjusted phases to get [0, 0, 0.3, 0.3, 0.6, 0.6, 0.7] THERE WILL BE negative values in the beginning here, just not in this example since no ex. time value divided by the period left a decimal less than 0.25 4. Sort the flux values in the same way the phases were sorted Inputs: time Time values of data. (IN DAYS) flux Flux values of data. period Period of transit. epoch Epoch of transit. centerPhase Which phase should be at the center. Returns: q1 Phase values. (IN HOURS) f1 Flux values for each phase. """ epoch += centerPhase * period date1 = (time - epoch) / period + 0.5 phi1 = ((date1) - np.floor(date1)) - 0.5 q1 = np.sort(phi1) * period * 24. f1 = flux[np.argsort(phi1)] return q1, f1 def fitModel(time, flux, guessDict, freeParPlanet, ferr = 0): if not np.all(ferr): ferr = np.ones_like(flux)*1.E-5 freeParStar = ['rho'] # Make the fitting object according to guess dictionary fitT = FitTransit() fitT.add_guess_star(ld1 = 0, ld2 = 0) fitT.add_guess_planet(period = guessDict['period'], T0 = guessDict['T0']) fitT.add_data(time = time, flux = flux, ferr = ferr) fitT.free_parameters(freeParStar, freeParPlanet) fitT.do_fit() return fitT def do_bls_and_fit(time, flux, min_period, max_period): S = clean_and_search.Search(time, flux + 1, np.ones_like(flux)*1.E-5) S.do_bls2(min_period = min_period, max_period = max_period, min_duration_hours = 1.5, max_duration_hours = 6., freq_step = 1.E-4, doplot = False, norm = False) guessDict = {'period': S.periods[0], 'T0': S.epoch} freeParPlanet = ['period', 'T0', 'rprs'] fitT = fitModel(time, flux, guessDict, freeParPlanet) # Readability of output data period = fitT.fitresultplanets['pnum0']['period'] epoch = fitT.fitresultplanets['pnum0']['T0'] k = fitT.fitresultplanets['pnum0']['rprs'] rho = fitT.fitresultstellar['rho'] duration = computeTransitDuration(period, rho, k) if not duration: duration = S.duration * 24 # Calculating transit depth significance ## fitT.transitmodel sometimes has a NaN value sigma = computePointSigma(time, flux, fitT.transitmodel, period, epoch, duration) depth = k ** 2 significance = depth / sigma phase = getPhase(time, flux, period, epoch)[0] nTransitPoints = np.sum((-duration * 0.5 < phase) & (phase < duration * 0.5)) SNR = significance * nTransitPoints**0.5 return SNR, period, epoch, duration, depth, fitT.transitmodel, S.f_1, S.convolved_bls def computePointSigma(time, flux, transitModel, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) mt2, mf2 = removeTransits(time, transitModel, period, epoch, duration) return np.nanstd(f2 - mf2) def
(time, flux, period, epoch, duration): halfDur = 0.5 * duration / 24. bad = np.where(time < epoch - period + halfDur)[0] for p in np.arange(epoch, time[-1] + period, period): bad = np.append(bad, np.where((p - halfDur < time) & (time < p + halfDur))[0]) good = np.setxor1d(range(len(time)), bad) return time[good], flux[good] def computeTransitDuration(period, rho, k): b = 0.1 # Impact parameter (default value in ktransit) G = 6.67384e-11 # Gravitational constant P = period * 86400 # Period in seconds stellarDensity = rho * 1000 rStarOverA = ((4 * np.pi**2) / (G * stellarDensity * P**2))**(1./3.) cosI = b * rStarOverA sinI = np.sqrt(1 - cosI**2) coeff = rStarOverA * np.sqrt((1+k)**2 - b**2) / sinI if coeff > 1: return 0 else: duration = (P / np.pi) * np.arcsin(coeff) return duration / 3600 # Duration in hours def findSecondary(time, flux, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) minp, maxp = period - 0.1, period + 0.1 if t2[-1] - t2[0] == 0 or 1./maxp < 1./(t2[-1] - t2[0]): return (np.nan,)*5 if minp < 0.5: minp = 0.5 planetInfo = do_bls_and_fit(t2, f2, minp, maxp) return (t2,) + planetInfo[0:4] + (planetInfo[5],) def computeOddEvenModels(time, flux, per, epo): gdOdd = {'period': per * 2, 'T0': epo} gdEven = {'period': per * 2, 'T0': epo + per} freeParPlanet = ['rprs'] fitT_odd = fitModel(time, flux, gdOdd, freeParPlanet) fitT_even = fitModel(time, flux, gdEven, freeParPlanet) return fitT_odd, fitT_even def main(filename): """Fit a transit model to a lightcurve. 1. Remove outliers. 2. Detrend the data with a binwidth of 26 cadences. Since MAX_DURATION_HOURS = 6, and 6 hours = ~13 cadences (ceiling of 12.245), detrending with a binwidth of double this value will preserve all events with a duration of 13 cadences or less. 3. Create an "S" object. (???) [1 is added to the flux to avoid a division by zero error] 4. Run the BLS algorithm and Tom's transit fitting algorithm. Since the BLS can lock on to an incorrect, shorter period event, I run it on four different minimum periods, chosen somewhat arbitrarily. These results go into a dictionary sorted by the calculated SNR of each fit, and the parameters which give the maximum SNR are used. 5. Plot the original lightcurve, the BLS statistics from its minimum to maximum period, and a phased lightcurve. 6. Save the plot, and return a string containing the parameters of the fit. """ name = filename[-13:-4] time, flux = np.genfromtxt(filename, unpack = True) if np.all(np.isnan(flux)): return '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' %((name,)+(np.nan,)*6) time, flux = outlierRemoval(time, flux) flux = medianDetrend(flux, 26) # Main transit search minPeriod = 0.5 # Limitations of BLS Fortran code maxPeriod = (time[-1] - time[0]) / 2. SNR, period, epoch, duration, depth, transitModel, period_guesses, \ convolved_bls = do_bls_and_fit(time, flux, minPeriod, maxPeriod) # For the phase curves phase, phasedFlux = getPhase(time, flux, period, epoch) phaseModel, phasedFluxModel = getPhase(time, transitModel, period, epoch) # Secondary search secTime, secSNR, secPer, secEpoch, secDur, secModel = findSecondary(time, flux, period, epoch, duration) if secSNR > 5 and abs(period - secPer) < 0.05: secPhase, secPhaseModel = getPhase(secTime, secModel, secPer, epoch) idx = len(secPhase[secPhase < 0]) else: secPhase, secPhaseModel, idx = [], [], 1 # Odd/Even plot fitT_odd, fitT_even = computeOddEvenModels(time, flux, period, epoch) phaseModel_odd, phasedFluxModel_odd = getPhase(time, fitT_odd.transitmodel, period * 2, epoch) phaseModel_even, phasedFluxModel_even = getPhase(time, fitT_even.transitmodel, period * 2, epoch + period) depthOdd = fitT_odd.fitresultplanets['pnum0']['rprs'] ** 2 depthEven = fitT_even.fitresultplanets['pnum0']['rprs'] ** 2 phaseOdd, fluxOdd = getPhase(time, flux, period * 2, epoch) phaseEven, fluxEven = getPhase(time, flux, period * 2, epoch + period) x1, x2 = -duration, duration y1, y2 = -3*np.std(fluxOdd), 3*np.std(fluxOdd) if min(fluxOdd) < y1: y1 = min(fluxOdd) - np.std(fluxOdd) # sigma = abs(depth1 - depth2) / sqrt(u1^2 + u2^2) durOdd = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_odd.fitresultplanets['pnum0']['rprs']) durEven = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_even.fitresultplanets['pnum0']['rprs']) sigma = computePointSigma(time, flux, transitModel, period, epoch, duration) nOddPoints = np.sum((-durOdd*0.5 < phaseOdd) & (phaseOdd < durOdd * 0.5)) nEvenPoints = np.sum((-durEven*0.5 < phaseEven) & (phaseEven < durEven * 0.5)) uOdd, uEven = sigma / np.sqrt(nOddPoints), sigma / np.sqrt(nEvenPoints) depthDiffSigma = abs(depthOdd - depthEven) / np.sqrt(uOdd**2 + uEven**2) if doPlot: gs = gridspec.GridSpec(3,2) ax1 = plt.subplot(gs[0,:]) axOdd = plt.subplot(gs[1,0]) axEven = plt.subplot(gs[1,1]) ax3 = plt.subplot(gs[2,:]) gs.update(wspace = 0, hspace = 0.5) ax1.plot(time, flux, 'k') y1, y2 = ax1.get_ylim() ax1.vlines(np.arange(epoch, time[-1], period), y1, y2, color = 'r', linestyles = 'dashed', linewidth = 0.5) ax1.axis([time[0], time[-1], y1, y2]) ax1.set_title('kplr%s; best period = %8.6g days; SNR = %8.6g' %(name, period, SNR)) ax1.set_xlabel('days') axOdd.set_ylabel('flux') axOdd.scatter(phaseOdd, fluxOdd, marker = '.', s = 1, color = 'k', alpha = 1) axOdd.plot(phaseModel_odd, phasedFluxModel_odd, 'r') axOdd.axhline(-depthOdd, x1, x2) axOdd.axis([x1,x2,y1,y2]) axOdd.set_title('odd') axEven.scatter(phaseEven, fluxEven, marker = '.', s = 1, color = 'k', alpha = 1) axEven.plot(phaseModel_even, phasedFluxModel_even, 'r') axEven.axhline(-depthEven, x1, x2) axEven.yaxis.tick_right() axEven.axis([x1,x2,y1,y2]) axEven.set_title('even') if secondary: plt.plot(secPhase[:idx], secPhaseModel[:idx], 'c') plt.plot(secPhase[idx:], secPhaseModel[idx:], 'c') ax3.scatter(phase, phasedFlux, marker = '.', s = 1, color = 'k') ax3.plot(phaseModel, phasedFluxModel, 'r') y1, y2 = -3*np.std(phasedFlux), 3*np.std(phasedFlux) if min(phasedFlux) < y1: y1 = min(phasedFlux) - np.std(phasedFlux) ax3.axis([phase[0], phase[-1], y1, y2]) ax3.set_xlabel('phase [hours]') ax3.text(0.5, 1.25, 'depth diff sigma = %.3f' %depthDiffSigma, horizontalalignment = 'center', verticalalignment = 'center', transform = ax3.transAxes) if plotOption == 'save': plt.savefig(figureSaveLocation + '%s.png' %name, dpi = 200) plt.close() elif plotOption == 'show': plt.show() successString = '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' \ %(name, SNR, period, depth, epoch, duration, secSNR) return successString def getResults(): rfn = '/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/run1/results.txt' names, periods = np.genfromtxt(rfn, usecols = (0,2), unpack = True) return names, periods if __name__ == '__main__': # files = np.array(glob.glob('/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/dataSVD/*.txt')) files = np.genfromtxt('/Users/Yash/Desktop/f0pcs.txt', dtype = 'str') title = '\t'.join(['name\t\t', 'SNR\t', 'period[days]', 'depth\t', 'epoch[day]', 'duration[hours]', 'secondary SNR']) print(title) # Multiprocesses the code into a Pool of 7, while writing the results to # resultsFilename as each iteration of the code completes. Also prints # results to the console and gives an ETA. #-------------------------- MULTIPROCESSING --------------------------# # with open(resultsFilename, 'w') as rf: # rf.write(title + '\n') # p = Pool(7) # start = timeit.default_timer(); progressString = '' # for i, res in enumerate(p.imap_unordered(main, files), 1): # with open(resultsFilename, 'a') as rf: # rf.write(res + '\n') # avg = (timeit.default_timer() - start)/i # eta = (len(files) - i) * avg # sys.stdout.write('\b \b'*len(progressString)) # print(res) # progressString = '%i/%i done, avg %3.2f sec per target, eta: %s' %(i, len(files), # avg, pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(eta))) # sys.stdout.write(progressString); sys.stdout.flush() # p.close() # p.join() # total = timeit.default_timer() - start # print('\ntotal elapsed time: %s' %pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(total)))
removeTransits
identifier_name
yash_bls.py
from __future__ import division, print_function import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import clean_and_search from ktransit import FitTransit from multiprocessing import Pool from scipy import ndimage import glob, timeit, sys import time as pythonTime # OPTIONS doPlot = True plotOption = 'save' secondary = True resultsFilename = '/Users/Yash/Desktop/results.txt' figureSaveLocation = '/Users/Yash/Desktop/' # -------- PLOTTING OPTIONS -------- # import matplotlib def plateau(array, threshold): """Find plateaus in an array, i.e continuous regions that exceed threshold Given an array of numbers, return a 2d array such that out[:,0] marks the indices where the array crosses threshold from below, and out[:,1] marks the next time the array crosses that same threshold from below. Inputs: array (1d numpy array) threshold (float or array) If threshold is a single number, any point above that value is above threshold. If it's an array, it must have the same length as the first argument, and an array[i] > threshold[i] to be included as a plateau Returns: Numpy 2d array with 2 columns. Notes: To find the length of the plateaus, use out[:,1] - out[:,0] To find the length of the largest plateau, use
arr = array.astype(np.float32) arr = arr - threshold + 1e-12 arrPlus = np.roll(arr, 1) #Location of changes from -ve to +ve (or vice versa) #Last point is bogus , so we calculate it by hand sgnChange = arr*arrPlus #Roll around can't compute sign change for zeroth elt. sgnChange[0] = +1 if arr[0] > 0: sgnChange[0] = -1 loc = np.where(sgnChange < 0)[0] if np.fmod( len(loc), 2) != 0: loc.resize( (len(loc)+1)) loc[-1] = len(arr) return loc def outlierRemoval(time, flux): fluxDetrended = medianDetrend(flux, 3) out1 = plateau(fluxDetrended, 5 * np.std(fluxDetrended)) out2 = plateau(-fluxDetrended, 5 * np.std(fluxDetrended)) if out1 == [] and out2 == []: singleOutlierIndices = [] else: outliers = np.append(out1, out2).reshape(-1,2) # Only want groups of one outlier, since > 1 may be transit points singleOutlierIndices = np.sort(outliers[(outliers[:,1] - outliers[:,0] == 1)][:,0]) # Check periodicity of outliers, with PRECISION of 0.0205 days # 0.0205 days = 29.52 minutes = ~length of long cadence precision = 0.0205 outlierTimes = time[singleOutlierIndices] diffs = [outlierTimes[i+1] - outlierTimes[i] for i in range(0, len(outlierTimes)-1)] diffs = [round(d, 5) for d in diffs] if len(singleOutlierIndices) >= 4: if len(set(diffs)) == len(diffs): possibleTimes = np.array([]) else: period = max(set(diffs), key = diffs.count) # period = most common difference epoch = outlierTimes[diffs.index(period)] possibleTimes = np.arange(epoch, outlierTimes[-1] + 0.5*period, period) notOutliers = [] for i in range(len(outlierTimes)): if np.any((abs(possibleTimes - outlierTimes[i]) < precision)): notOutliers.append(i) singleOutlierIndices = np.delete(singleOutlierIndices, notOutliers) elif len(singleOutlierIndices) == 3: if abs(diffs[0] - diffs[1]) < precision: singleOutlierIndices = [] # Uncomment to see how the plotting algorithm worked for a lightcurve # ----------------------------- PLOTTING ----------------------------- # # plt.subplot(311) # plt.scatter(time, flux, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], flux[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # plt.title('Original') # plt.subplot(312) # plt.scatter(time, fluxDetrended, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], fluxDetrended[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # x1, x2, y1, y2 = plt.axis() # plt.hlines([-5*np.std(fluxDetrended), 5*np.std(fluxDetrended)], x1, x2, # color = 'b', linestyles = 'dashed') # plt.axis([x1, x2, y1, y2]) # plt.title('Detrended') # plt.subplot(313) # plt.scatter(np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices), # marker = '.', s = 1, color = 'k', alpha = 1) # plt.title('Outliers removed: ' + str(len(singleOutlierIndices))) # plt.show() # -------------------------------------------------------------------- # return np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices) def medianDetrend(flux, binWidth): halfNumPoints = binWidth // 2 medians = [] for i in range(len(flux)): if i < halfNumPoints: medians.append(np.median(flux[:i+halfNumPoints+1])) elif i > len(flux) - halfNumPoints - 1: medians.append(np.median(flux[i-halfNumPoints:])) else: medians.append(np.median(flux[i-halfNumPoints : i+halfNumPoints+1])) return flux - medians def getPhase(time, flux, period, epoch, centerPhase = 0): """Get the phase of a lightcurve. How it works using an example where epoch = 2, period = 3: 1. Subtract the epoch from all times [1, 2, 3, 4, 5, 6, 7] to get [-1, 0, 1, 2, 3, 4, 5] then divide by the period [3] to get all time values in phase values which gets you [-0.3, 0, 0.3, 0.6, 1, 1.3, 1.6] 2. Subtract the PHASE NUMBER (floor function) from each PHASE (date1) which gets you [0.7, 0, 0.3, 0.6, 0, 0.3, 0.6] 3. Sort all the adjusted phases to get [0, 0, 0.3, 0.3, 0.6, 0.6, 0.7] THERE WILL BE negative values in the beginning here, just not in this example since no ex. time value divided by the period left a decimal less than 0.25 4. Sort the flux values in the same way the phases were sorted Inputs: time Time values of data. (IN DAYS) flux Flux values of data. period Period of transit. epoch Epoch of transit. centerPhase Which phase should be at the center. Returns: q1 Phase values. (IN HOURS) f1 Flux values for each phase. """ epoch += centerPhase * period date1 = (time - epoch) / period + 0.5 phi1 = ((date1) - np.floor(date1)) - 0.5 q1 = np.sort(phi1) * period * 24. f1 = flux[np.argsort(phi1)] return q1, f1 def fitModel(time, flux, guessDict, freeParPlanet, ferr = 0): if not np.all(ferr): ferr = np.ones_like(flux)*1.E-5 freeParStar = ['rho'] # Make the fitting object according to guess dictionary fitT = FitTransit() fitT.add_guess_star(ld1 = 0, ld2 = 0) fitT.add_guess_planet(period = guessDict['period'], T0 = guessDict['T0']) fitT.add_data(time = time, flux = flux, ferr = ferr) fitT.free_parameters(freeParStar, freeParPlanet) fitT.do_fit() return fitT def do_bls_and_fit(time, flux, min_period, max_period): S = clean_and_search.Search(time, flux + 1, np.ones_like(flux)*1.E-5) S.do_bls2(min_period = min_period, max_period = max_period, min_duration_hours = 1.5, max_duration_hours = 6., freq_step = 1.E-4, doplot = False, norm = False) guessDict = {'period': S.periods[0], 'T0': S.epoch} freeParPlanet = ['period', 'T0', 'rprs'] fitT = fitModel(time, flux, guessDict, freeParPlanet) # Readability of output data period = fitT.fitresultplanets['pnum0']['period'] epoch = fitT.fitresultplanets['pnum0']['T0'] k = fitT.fitresultplanets['pnum0']['rprs'] rho = fitT.fitresultstellar['rho'] duration = computeTransitDuration(period, rho, k) if not duration: duration = S.duration * 24 # Calculating transit depth significance ## fitT.transitmodel sometimes has a NaN value sigma = computePointSigma(time, flux, fitT.transitmodel, period, epoch, duration) depth = k ** 2 significance = depth / sigma phase = getPhase(time, flux, period, epoch)[0] nTransitPoints = np.sum((-duration * 0.5 < phase) & (phase < duration * 0.5)) SNR = significance * nTransitPoints**0.5 return SNR, period, epoch, duration, depth, fitT.transitmodel, S.f_1, S.convolved_bls def computePointSigma(time, flux, transitModel, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) mt2, mf2 = removeTransits(time, transitModel, period, epoch, duration) return np.nanstd(f2 - mf2) def removeTransits(time, flux, period, epoch, duration): halfDur = 0.5 * duration / 24. bad = np.where(time < epoch - period + halfDur)[0] for p in np.arange(epoch, time[-1] + period, period): bad = np.append(bad, np.where((p - halfDur < time) & (time < p + halfDur))[0]) good = np.setxor1d(range(len(time)), bad) return time[good], flux[good] def computeTransitDuration(period, rho, k): b = 0.1 # Impact parameter (default value in ktransit) G = 6.67384e-11 # Gravitational constant P = period * 86400 # Period in seconds stellarDensity = rho * 1000 rStarOverA = ((4 * np.pi**2) / (G * stellarDensity * P**2))**(1./3.) cosI = b * rStarOverA sinI = np.sqrt(1 - cosI**2) coeff = rStarOverA * np.sqrt((1+k)**2 - b**2) / sinI if coeff > 1: return 0 else: duration = (P / np.pi) * np.arcsin(coeff) return duration / 3600 # Duration in hours def findSecondary(time, flux, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) minp, maxp = period - 0.1, period + 0.1 if t2[-1] - t2[0] == 0 or 1./maxp < 1./(t2[-1] - t2[0]): return (np.nan,)*5 if minp < 0.5: minp = 0.5 planetInfo = do_bls_and_fit(t2, f2, minp, maxp) return (t2,) + planetInfo[0:4] + (planetInfo[5],) def computeOddEvenModels(time, flux, per, epo): gdOdd = {'period': per * 2, 'T0': epo} gdEven = {'period': per * 2, 'T0': epo + per} freeParPlanet = ['rprs'] fitT_odd = fitModel(time, flux, gdOdd, freeParPlanet) fitT_even = fitModel(time, flux, gdEven, freeParPlanet) return fitT_odd, fitT_even def main(filename): """Fit a transit model to a lightcurve. 1. Remove outliers. 2. Detrend the data with a binwidth of 26 cadences. Since MAX_DURATION_HOURS = 6, and 6 hours = ~13 cadences (ceiling of 12.245), detrending with a binwidth of double this value will preserve all events with a duration of 13 cadences or less. 3. Create an "S" object. (???) [1 is added to the flux to avoid a division by zero error] 4. Run the BLS algorithm and Tom's transit fitting algorithm. Since the BLS can lock on to an incorrect, shorter period event, I run it on four different minimum periods, chosen somewhat arbitrarily. These results go into a dictionary sorted by the calculated SNR of each fit, and the parameters which give the maximum SNR are used. 5. Plot the original lightcurve, the BLS statistics from its minimum to maximum period, and a phased lightcurve. 6. Save the plot, and return a string containing the parameters of the fit. """ name = filename[-13:-4] time, flux = np.genfromtxt(filename, unpack = True) if np.all(np.isnan(flux)): return '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' %((name,)+(np.nan,)*6) time, flux = outlierRemoval(time, flux) flux = medianDetrend(flux, 26) # Main transit search minPeriod = 0.5 # Limitations of BLS Fortran code maxPeriod = (time[-1] - time[0]) / 2. SNR, period, epoch, duration, depth, transitModel, period_guesses, \ convolved_bls = do_bls_and_fit(time, flux, minPeriod, maxPeriod) # For the phase curves phase, phasedFlux = getPhase(time, flux, period, epoch) phaseModel, phasedFluxModel = getPhase(time, transitModel, period, epoch) # Secondary search secTime, secSNR, secPer, secEpoch, secDur, secModel = findSecondary(time, flux, period, epoch, duration) if secSNR > 5 and abs(period - secPer) < 0.05: secPhase, secPhaseModel = getPhase(secTime, secModel, secPer, epoch) idx = len(secPhase[secPhase < 0]) else: secPhase, secPhaseModel, idx = [], [], 1 # Odd/Even plot fitT_odd, fitT_even = computeOddEvenModels(time, flux, period, epoch) phaseModel_odd, phasedFluxModel_odd = getPhase(time, fitT_odd.transitmodel, period * 2, epoch) phaseModel_even, phasedFluxModel_even = getPhase(time, fitT_even.transitmodel, period * 2, epoch + period) depthOdd = fitT_odd.fitresultplanets['pnum0']['rprs'] ** 2 depthEven = fitT_even.fitresultplanets['pnum0']['rprs'] ** 2 phaseOdd, fluxOdd = getPhase(time, flux, period * 2, epoch) phaseEven, fluxEven = getPhase(time, flux, period * 2, epoch + period) x1, x2 = -duration, duration y1, y2 = -3*np.std(fluxOdd), 3*np.std(fluxOdd) if min(fluxOdd) < y1: y1 = min(fluxOdd) - np.std(fluxOdd) # sigma = abs(depth1 - depth2) / sqrt(u1^2 + u2^2) durOdd = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_odd.fitresultplanets['pnum0']['rprs']) durEven = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_even.fitresultplanets['pnum0']['rprs']) sigma = computePointSigma(time, flux, transitModel, period, epoch, duration) nOddPoints = np.sum((-durOdd*0.5 < phaseOdd) & (phaseOdd < durOdd * 0.5)) nEvenPoints = np.sum((-durEven*0.5 < phaseEven) & (phaseEven < durEven * 0.5)) uOdd, uEven = sigma / np.sqrt(nOddPoints), sigma / np.sqrt(nEvenPoints) depthDiffSigma = abs(depthOdd - depthEven) / np.sqrt(uOdd**2 + uEven**2) if doPlot: gs = gridspec.GridSpec(3,2) ax1 = plt.subplot(gs[0,:]) axOdd = plt.subplot(gs[1,0]) axEven = plt.subplot(gs[1,1]) ax3 = plt.subplot(gs[2,:]) gs.update(wspace = 0, hspace = 0.5) ax1.plot(time, flux, 'k') y1, y2 = ax1.get_ylim() ax1.vlines(np.arange(epoch, time[-1], period), y1, y2, color = 'r', linestyles = 'dashed', linewidth = 0.5) ax1.axis([time[0], time[-1], y1, y2]) ax1.set_title('kplr%s; best period = %8.6g days; SNR = %8.6g' %(name, period, SNR)) ax1.set_xlabel('days') axOdd.set_ylabel('flux') axOdd.scatter(phaseOdd, fluxOdd, marker = '.', s = 1, color = 'k', alpha = 1) axOdd.plot(phaseModel_odd, phasedFluxModel_odd, 'r') axOdd.axhline(-depthOdd, x1, x2) axOdd.axis([x1,x2,y1,y2]) axOdd.set_title('odd') axEven.scatter(phaseEven, fluxEven, marker = '.', s = 1, color = 'k', alpha = 1) axEven.plot(phaseModel_even, phasedFluxModel_even, 'r') axEven.axhline(-depthEven, x1, x2) axEven.yaxis.tick_right() axEven.axis([x1,x2,y1,y2]) axEven.set_title('even') if secondary: plt.plot(secPhase[:idx], secPhaseModel[:idx], 'c') plt.plot(secPhase[idx:], secPhaseModel[idx:], 'c') ax3.scatter(phase, phasedFlux, marker = '.', s = 1, color = 'k') ax3.plot(phaseModel, phasedFluxModel, 'r') y1, y2 = -3*np.std(phasedFlux), 3*np.std(phasedFlux) if min(phasedFlux) < y1: y1 = min(phasedFlux) - np.std(phasedFlux) ax3.axis([phase[0], phase[-1], y1, y2]) ax3.set_xlabel('phase [hours]') ax3.text(0.5, 1.25, 'depth diff sigma = %.3f' %depthDiffSigma, horizontalalignment = 'center', verticalalignment = 'center', transform = ax3.transAxes) if plotOption == 'save': plt.savefig(figureSaveLocation + '%s.png' %name, dpi = 200) plt.close() elif plotOption == 'show': plt.show() successString = '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' \ %(name, SNR, period, depth, epoch, duration, secSNR) return successString def getResults(): rfn = '/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/run1/results.txt' names, periods = np.genfromtxt(rfn, usecols = (0,2), unpack = True) return names, periods if __name__ == '__main__': # files = np.array(glob.glob('/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/dataSVD/*.txt')) files = np.genfromtxt('/Users/Yash/Desktop/f0pcs.txt', dtype = 'str') title = '\t'.join(['name\t\t', 'SNR\t', 'period[days]', 'depth\t', 'epoch[day]', 'duration[hours]', 'secondary SNR']) print(title) # Multiprocesses the code into a Pool of 7, while writing the results to # resultsFilename as each iteration of the code completes. Also prints # results to the console and gives an ETA. #-------------------------- MULTIPROCESSING --------------------------# # with open(resultsFilename, 'w') as rf: # rf.write(title + '\n') # p = Pool(7) # start = timeit.default_timer(); progressString = '' # for i, res in enumerate(p.imap_unordered(main, files), 1): # with open(resultsFilename, 'a') as rf: # rf.write(res + '\n') # avg = (timeit.default_timer() - start)/i # eta = (len(files) - i) * avg # sys.stdout.write('\b \b'*len(progressString)) # print(res) # progressString = '%i/%i done, avg %3.2f sec per target, eta: %s' %(i, len(files), # avg, pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(eta))) # sys.stdout.write(progressString); sys.stdout.flush() # p.close() # p.join() # total = timeit.default_timer() - start # print('\ntotal elapsed time: %s' %pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(total)))
np.max(out[:,1] - out[:,0]) The algorithm fails if a value is exactly equal to the threshold. To guard against this, we add a very small amount to threshold to ensure floating point arithmetic prevents two numbers being exactly equal."""
random_line_split
yash_bls.py
from __future__ import division, print_function import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import clean_and_search from ktransit import FitTransit from multiprocessing import Pool from scipy import ndimage import glob, timeit, sys import time as pythonTime # OPTIONS doPlot = True plotOption = 'save' secondary = True resultsFilename = '/Users/Yash/Desktop/results.txt' figureSaveLocation = '/Users/Yash/Desktop/' # -------- PLOTTING OPTIONS -------- # import matplotlib def plateau(array, threshold): """Find plateaus in an array, i.e continuous regions that exceed threshold Given an array of numbers, return a 2d array such that out[:,0] marks the indices where the array crosses threshold from below, and out[:,1] marks the next time the array crosses that same threshold from below. Inputs: array (1d numpy array) threshold (float or array) If threshold is a single number, any point above that value is above threshold. If it's an array, it must have the same length as the first argument, and an array[i] > threshold[i] to be included as a plateau Returns: Numpy 2d array with 2 columns. Notes: To find the length of the plateaus, use out[:,1] - out[:,0] To find the length of the largest plateau, use np.max(out[:,1] - out[:,0]) The algorithm fails if a value is exactly equal to the threshold. To guard against this, we add a very small amount to threshold to ensure floating point arithmetic prevents two numbers being exactly equal.""" arr = array.astype(np.float32) arr = arr - threshold + 1e-12 arrPlus = np.roll(arr, 1) #Location of changes from -ve to +ve (or vice versa) #Last point is bogus , so we calculate it by hand sgnChange = arr*arrPlus #Roll around can't compute sign change for zeroth elt. sgnChange[0] = +1 if arr[0] > 0: sgnChange[0] = -1 loc = np.where(sgnChange < 0)[0] if np.fmod( len(loc), 2) != 0: loc.resize( (len(loc)+1)) loc[-1] = len(arr) return loc def outlierRemoval(time, flux): fluxDetrended = medianDetrend(flux, 3) out1 = plateau(fluxDetrended, 5 * np.std(fluxDetrended)) out2 = plateau(-fluxDetrended, 5 * np.std(fluxDetrended)) if out1 == [] and out2 == []: singleOutlierIndices = [] else: outliers = np.append(out1, out2).reshape(-1,2) # Only want groups of one outlier, since > 1 may be transit points singleOutlierIndices = np.sort(outliers[(outliers[:,1] - outliers[:,0] == 1)][:,0]) # Check periodicity of outliers, with PRECISION of 0.0205 days # 0.0205 days = 29.52 minutes = ~length of long cadence precision = 0.0205 outlierTimes = time[singleOutlierIndices] diffs = [outlierTimes[i+1] - outlierTimes[i] for i in range(0, len(outlierTimes)-1)] diffs = [round(d, 5) for d in diffs] if len(singleOutlierIndices) >= 4: if len(set(diffs)) == len(diffs): possibleTimes = np.array([]) else: period = max(set(diffs), key = diffs.count) # period = most common difference epoch = outlierTimes[diffs.index(period)] possibleTimes = np.arange(epoch, outlierTimes[-1] + 0.5*period, period) notOutliers = [] for i in range(len(outlierTimes)): if np.any((abs(possibleTimes - outlierTimes[i]) < precision)): notOutliers.append(i) singleOutlierIndices = np.delete(singleOutlierIndices, notOutliers) elif len(singleOutlierIndices) == 3: if abs(diffs[0] - diffs[1]) < precision: singleOutlierIndices = [] # Uncomment to see how the plotting algorithm worked for a lightcurve # ----------------------------- PLOTTING ----------------------------- # # plt.subplot(311) # plt.scatter(time, flux, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], flux[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # plt.title('Original') # plt.subplot(312) # plt.scatter(time, fluxDetrended, marker = '.', s = 1, color = 'k', alpha = 1) # plt.scatter(time[singleOutlierIndices], fluxDetrended[singleOutlierIndices], # s = 30, marker = 'o', facecolors = 'none', edgecolors = 'r') # x1, x2, y1, y2 = plt.axis() # plt.hlines([-5*np.std(fluxDetrended), 5*np.std(fluxDetrended)], x1, x2, # color = 'b', linestyles = 'dashed') # plt.axis([x1, x2, y1, y2]) # plt.title('Detrended') # plt.subplot(313) # plt.scatter(np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices), # marker = '.', s = 1, color = 'k', alpha = 1) # plt.title('Outliers removed: ' + str(len(singleOutlierIndices))) # plt.show() # -------------------------------------------------------------------- # return np.delete(time, singleOutlierIndices), np.delete(flux, singleOutlierIndices) def medianDetrend(flux, binWidth): halfNumPoints = binWidth // 2 medians = [] for i in range(len(flux)): if i < halfNumPoints: medians.append(np.median(flux[:i+halfNumPoints+1])) elif i > len(flux) - halfNumPoints - 1: medians.append(np.median(flux[i-halfNumPoints:])) else: medians.append(np.median(flux[i-halfNumPoints : i+halfNumPoints+1])) return flux - medians def getPhase(time, flux, period, epoch, centerPhase = 0): """Get the phase of a lightcurve. How it works using an example where epoch = 2, period = 3: 1. Subtract the epoch from all times [1, 2, 3, 4, 5, 6, 7] to get [-1, 0, 1, 2, 3, 4, 5] then divide by the period [3] to get all time values in phase values which gets you [-0.3, 0, 0.3, 0.6, 1, 1.3, 1.6] 2. Subtract the PHASE NUMBER (floor function) from each PHASE (date1) which gets you [0.7, 0, 0.3, 0.6, 0, 0.3, 0.6] 3. Sort all the adjusted phases to get [0, 0, 0.3, 0.3, 0.6, 0.6, 0.7] THERE WILL BE negative values in the beginning here, just not in this example since no ex. time value divided by the period left a decimal less than 0.25 4. Sort the flux values in the same way the phases were sorted Inputs: time Time values of data. (IN DAYS) flux Flux values of data. period Period of transit. epoch Epoch of transit. centerPhase Which phase should be at the center. Returns: q1 Phase values. (IN HOURS) f1 Flux values for each phase. """ epoch += centerPhase * period date1 = (time - epoch) / period + 0.5 phi1 = ((date1) - np.floor(date1)) - 0.5 q1 = np.sort(phi1) * period * 24. f1 = flux[np.argsort(phi1)] return q1, f1 def fitModel(time, flux, guessDict, freeParPlanet, ferr = 0): if not np.all(ferr): ferr = np.ones_like(flux)*1.E-5 freeParStar = ['rho'] # Make the fitting object according to guess dictionary fitT = FitTransit() fitT.add_guess_star(ld1 = 0, ld2 = 0) fitT.add_guess_planet(period = guessDict['period'], T0 = guessDict['T0']) fitT.add_data(time = time, flux = flux, ferr = ferr) fitT.free_parameters(freeParStar, freeParPlanet) fitT.do_fit() return fitT def do_bls_and_fit(time, flux, min_period, max_period): S = clean_and_search.Search(time, flux + 1, np.ones_like(flux)*1.E-5) S.do_bls2(min_period = min_period, max_period = max_period, min_duration_hours = 1.5, max_duration_hours = 6., freq_step = 1.E-4, doplot = False, norm = False) guessDict = {'period': S.periods[0], 'T0': S.epoch} freeParPlanet = ['period', 'T0', 'rprs'] fitT = fitModel(time, flux, guessDict, freeParPlanet) # Readability of output data period = fitT.fitresultplanets['pnum0']['period'] epoch = fitT.fitresultplanets['pnum0']['T0'] k = fitT.fitresultplanets['pnum0']['rprs'] rho = fitT.fitresultstellar['rho'] duration = computeTransitDuration(period, rho, k) if not duration: duration = S.duration * 24 # Calculating transit depth significance ## fitT.transitmodel sometimes has a NaN value sigma = computePointSigma(time, flux, fitT.transitmodel, period, epoch, duration) depth = k ** 2 significance = depth / sigma phase = getPhase(time, flux, period, epoch)[0] nTransitPoints = np.sum((-duration * 0.5 < phase) & (phase < duration * 0.5)) SNR = significance * nTransitPoints**0.5 return SNR, period, epoch, duration, depth, fitT.transitmodel, S.f_1, S.convolved_bls def computePointSigma(time, flux, transitModel, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) mt2, mf2 = removeTransits(time, transitModel, period, epoch, duration) return np.nanstd(f2 - mf2) def removeTransits(time, flux, period, epoch, duration):
def computeTransitDuration(period, rho, k): b = 0.1 # Impact parameter (default value in ktransit) G = 6.67384e-11 # Gravitational constant P = period * 86400 # Period in seconds stellarDensity = rho * 1000 rStarOverA = ((4 * np.pi**2) / (G * stellarDensity * P**2))**(1./3.) cosI = b * rStarOverA sinI = np.sqrt(1 - cosI**2) coeff = rStarOverA * np.sqrt((1+k)**2 - b**2) / sinI if coeff > 1: return 0 else: duration = (P / np.pi) * np.arcsin(coeff) return duration / 3600 # Duration in hours def findSecondary(time, flux, period, epoch, duration): t2, f2 = removeTransits(time, flux, period, epoch, duration) minp, maxp = period - 0.1, period + 0.1 if t2[-1] - t2[0] == 0 or 1./maxp < 1./(t2[-1] - t2[0]): return (np.nan,)*5 if minp < 0.5: minp = 0.5 planetInfo = do_bls_and_fit(t2, f2, minp, maxp) return (t2,) + planetInfo[0:4] + (planetInfo[5],) def computeOddEvenModels(time, flux, per, epo): gdOdd = {'period': per * 2, 'T0': epo} gdEven = {'period': per * 2, 'T0': epo + per} freeParPlanet = ['rprs'] fitT_odd = fitModel(time, flux, gdOdd, freeParPlanet) fitT_even = fitModel(time, flux, gdEven, freeParPlanet) return fitT_odd, fitT_even def main(filename): """Fit a transit model to a lightcurve. 1. Remove outliers. 2. Detrend the data with a binwidth of 26 cadences. Since MAX_DURATION_HOURS = 6, and 6 hours = ~13 cadences (ceiling of 12.245), detrending with a binwidth of double this value will preserve all events with a duration of 13 cadences or less. 3. Create an "S" object. (???) [1 is added to the flux to avoid a division by zero error] 4. Run the BLS algorithm and Tom's transit fitting algorithm. Since the BLS can lock on to an incorrect, shorter period event, I run it on four different minimum periods, chosen somewhat arbitrarily. These results go into a dictionary sorted by the calculated SNR of each fit, and the parameters which give the maximum SNR are used. 5. Plot the original lightcurve, the BLS statistics from its minimum to maximum period, and a phased lightcurve. 6. Save the plot, and return a string containing the parameters of the fit. """ name = filename[-13:-4] time, flux = np.genfromtxt(filename, unpack = True) if np.all(np.isnan(flux)): return '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' %((name,)+(np.nan,)*6) time, flux = outlierRemoval(time, flux) flux = medianDetrend(flux, 26) # Main transit search minPeriod = 0.5 # Limitations of BLS Fortran code maxPeriod = (time[-1] - time[0]) / 2. SNR, period, epoch, duration, depth, transitModel, period_guesses, \ convolved_bls = do_bls_and_fit(time, flux, minPeriod, maxPeriod) # For the phase curves phase, phasedFlux = getPhase(time, flux, period, epoch) phaseModel, phasedFluxModel = getPhase(time, transitModel, period, epoch) # Secondary search secTime, secSNR, secPer, secEpoch, secDur, secModel = findSecondary(time, flux, period, epoch, duration) if secSNR > 5 and abs(period - secPer) < 0.05: secPhase, secPhaseModel = getPhase(secTime, secModel, secPer, epoch) idx = len(secPhase[secPhase < 0]) else: secPhase, secPhaseModel, idx = [], [], 1 # Odd/Even plot fitT_odd, fitT_even = computeOddEvenModels(time, flux, period, epoch) phaseModel_odd, phasedFluxModel_odd = getPhase(time, fitT_odd.transitmodel, period * 2, epoch) phaseModel_even, phasedFluxModel_even = getPhase(time, fitT_even.transitmodel, period * 2, epoch + period) depthOdd = fitT_odd.fitresultplanets['pnum0']['rprs'] ** 2 depthEven = fitT_even.fitresultplanets['pnum0']['rprs'] ** 2 phaseOdd, fluxOdd = getPhase(time, flux, period * 2, epoch) phaseEven, fluxEven = getPhase(time, flux, period * 2, epoch + period) x1, x2 = -duration, duration y1, y2 = -3*np.std(fluxOdd), 3*np.std(fluxOdd) if min(fluxOdd) < y1: y1 = min(fluxOdd) - np.std(fluxOdd) # sigma = abs(depth1 - depth2) / sqrt(u1^2 + u2^2) durOdd = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_odd.fitresultplanets['pnum0']['rprs']) durEven = computeTransitDuration(period, fitT_odd.fitresultstellar['rho'], fitT_even.fitresultplanets['pnum0']['rprs']) sigma = computePointSigma(time, flux, transitModel, period, epoch, duration) nOddPoints = np.sum((-durOdd*0.5 < phaseOdd) & (phaseOdd < durOdd * 0.5)) nEvenPoints = np.sum((-durEven*0.5 < phaseEven) & (phaseEven < durEven * 0.5)) uOdd, uEven = sigma / np.sqrt(nOddPoints), sigma / np.sqrt(nEvenPoints) depthDiffSigma = abs(depthOdd - depthEven) / np.sqrt(uOdd**2 + uEven**2) if doPlot: gs = gridspec.GridSpec(3,2) ax1 = plt.subplot(gs[0,:]) axOdd = plt.subplot(gs[1,0]) axEven = plt.subplot(gs[1,1]) ax3 = plt.subplot(gs[2,:]) gs.update(wspace = 0, hspace = 0.5) ax1.plot(time, flux, 'k') y1, y2 = ax1.get_ylim() ax1.vlines(np.arange(epoch, time[-1], period), y1, y2, color = 'r', linestyles = 'dashed', linewidth = 0.5) ax1.axis([time[0], time[-1], y1, y2]) ax1.set_title('kplr%s; best period = %8.6g days; SNR = %8.6g' %(name, period, SNR)) ax1.set_xlabel('days') axOdd.set_ylabel('flux') axOdd.scatter(phaseOdd, fluxOdd, marker = '.', s = 1, color = 'k', alpha = 1) axOdd.plot(phaseModel_odd, phasedFluxModel_odd, 'r') axOdd.axhline(-depthOdd, x1, x2) axOdd.axis([x1,x2,y1,y2]) axOdd.set_title('odd') axEven.scatter(phaseEven, fluxEven, marker = '.', s = 1, color = 'k', alpha = 1) axEven.plot(phaseModel_even, phasedFluxModel_even, 'r') axEven.axhline(-depthEven, x1, x2) axEven.yaxis.tick_right() axEven.axis([x1,x2,y1,y2]) axEven.set_title('even') if secondary: plt.plot(secPhase[:idx], secPhaseModel[:idx], 'c') plt.plot(secPhase[idx:], secPhaseModel[idx:], 'c') ax3.scatter(phase, phasedFlux, marker = '.', s = 1, color = 'k') ax3.plot(phaseModel, phasedFluxModel, 'r') y1, y2 = -3*np.std(phasedFlux), 3*np.std(phasedFlux) if min(phasedFlux) < y1: y1 = min(phasedFlux) - np.std(phasedFlux) ax3.axis([phase[0], phase[-1], y1, y2]) ax3.set_xlabel('phase [hours]') ax3.text(0.5, 1.25, 'depth diff sigma = %.3f' %depthDiffSigma, horizontalalignment = 'center', verticalalignment = 'center', transform = ax3.transAxes) if plotOption == 'save': plt.savefig(figureSaveLocation + '%s.png' %name, dpi = 200) plt.close() elif plotOption == 'show': plt.show() successString = '%s\t\t%-8.6g\t%-8.6g\t%-8.6g\t%-4.3f\t%-8.6g\t%-8.6g' \ %(name, SNR, period, depth, epoch, duration, secSNR) return successString def getResults(): rfn = '/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/run1/results.txt' names, periods = np.genfromtxt(rfn, usecols = (0,2), unpack = True) return names, periods if __name__ == '__main__': # files = np.array(glob.glob('/Users/Yash/Desktop/NASA/Summer2014/k2/changedWhichpix/dataSVD/*.txt')) files = np.genfromtxt('/Users/Yash/Desktop/f0pcs.txt', dtype = 'str') title = '\t'.join(['name\t\t', 'SNR\t', 'period[days]', 'depth\t', 'epoch[day]', 'duration[hours]', 'secondary SNR']) print(title) # Multiprocesses the code into a Pool of 7, while writing the results to # resultsFilename as each iteration of the code completes. Also prints # results to the console and gives an ETA. #-------------------------- MULTIPROCESSING --------------------------# # with open(resultsFilename, 'w') as rf: # rf.write(title + '\n') # p = Pool(7) # start = timeit.default_timer(); progressString = '' # for i, res in enumerate(p.imap_unordered(main, files), 1): # with open(resultsFilename, 'a') as rf: # rf.write(res + '\n') # avg = (timeit.default_timer() - start)/i # eta = (len(files) - i) * avg # sys.stdout.write('\b \b'*len(progressString)) # print(res) # progressString = '%i/%i done, avg %3.2f sec per target, eta: %s' %(i, len(files), # avg, pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(eta))) # sys.stdout.write(progressString); sys.stdout.flush() # p.close() # p.join() # total = timeit.default_timer() - start # print('\ntotal elapsed time: %s' %pythonTime.strftime('%H:%M:%S', pythonTime.gmtime(total)))
halfDur = 0.5 * duration / 24. bad = np.where(time < epoch - period + halfDur)[0] for p in np.arange(epoch, time[-1] + period, period): bad = np.append(bad, np.where((p - halfDur < time) & (time < p + halfDur))[0]) good = np.setxor1d(range(len(time)), bad) return time[good], flux[good]
identifier_body
index.js
import Zip from 'adm-zip'; /** * @param {Type} * @return {Type} */ export default function (filePath) { let courseZip = new Zip(filePath); let courseFileScanner = { getZipObject: () => { return courseZip; }, getCourseFiles: () => { return courseZip.getEntries().filter(courseFileScanner._isCourseFile).map(courseFileScanner._getCourseFileId); }, _isCourseFile: (file) => { return file.entryName.match(/^(?=.*\bcsfiles\/home_dir\b)(?!.*\b.xml\b).*$/ig);
let fileName = file.entryName; let startIndex = fileName.lastIndexOf('xid-') + 4; let endIndex = fileName.indexOf('_', startIndex); file.courseFileId = fileName.substring(startIndex, endIndex); return file; }, getDatFiles: () => { return courseZip.getEntries().filter(courseFileScanner._isDatFile); }, _isDatFile: (file) => { return file.entryName.match(/^(?=.*res)(?=.*\b\.dat\b).*$/g); } }; return courseFileScanner; }
}, _getCourseFileId: (file) => {
random_line_split
MegareleaseOrg.py
# -*- coding: utf-8 -*- ############################################################################ # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as # # published by the Free Software Foundation, either version 3 of the # # License, or (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################ from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo class MegareleaseOrg(XFileSharingPro):
getInfo = create_getInfo(MegareleaseOrg)
__name__ = "MegareleaseOrg" __type__ = "hoster" __pattern__ = r'https?://(?:www\.)?megarelease.org/\w{12}' __version__ = "0.01" __description__ = """Megarelease.org hoster plugin""" __author_name__ = ("derek3x", "stickell") __author_mail__ = ("[email protected]", "[email protected]") HOSTER_NAME = "megarelease.org" FILE_INFO_PATTERN = r'<font color="red">%s/(?P<N>.+)</font> \((?P<S>[^)]+)\)</font>' % __pattern__
identifier_body
MegareleaseOrg.py
# -*- coding: utf-8 -*- ############################################################################ # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as # # published by the Free Software Foundation, either version 3 of the # # License, or (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################ from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo class
(XFileSharingPro): __name__ = "MegareleaseOrg" __type__ = "hoster" __pattern__ = r'https?://(?:www\.)?megarelease.org/\w{12}' __version__ = "0.01" __description__ = """Megarelease.org hoster plugin""" __author_name__ = ("derek3x", "stickell") __author_mail__ = ("[email protected]", "[email protected]") HOSTER_NAME = "megarelease.org" FILE_INFO_PATTERN = r'<font color="red">%s/(?P<N>.+)</font> \((?P<S>[^)]+)\)</font>' % __pattern__ getInfo = create_getInfo(MegareleaseOrg)
MegareleaseOrg
identifier_name
MegareleaseOrg.py
# -*- coding: utf-8 -*- ############################################################################ # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as # # published by the Free Software Foundation, either version 3 of the # # License, or (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License #
############################################################################ from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo class MegareleaseOrg(XFileSharingPro): __name__ = "MegareleaseOrg" __type__ = "hoster" __pattern__ = r'https?://(?:www\.)?megarelease.org/\w{12}' __version__ = "0.01" __description__ = """Megarelease.org hoster plugin""" __author_name__ = ("derek3x", "stickell") __author_mail__ = ("[email protected]", "[email protected]") HOSTER_NAME = "megarelease.org" FILE_INFO_PATTERN = r'<font color="red">%s/(?P<N>.+)</font> \((?P<S>[^)]+)\)</font>' % __pattern__ getInfo = create_getInfo(MegareleaseOrg)
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
random_line_split
comments.component.ts
import {Component, Input} from '@angular/core'; import {MatSnackBar} from '@angular/material'; import {Observable} from 'rxjs/Observable'; import 'rxjs/add/operator/finally'; import {GraphApiError} from '../graph-api-error'; import {GraphApiErrorComponent} from '../graph-api-error.component'; import {Comment} from '../comment'; /* * The Component showing the list of pages. */ @Component({ selector: 'comments', templateUrl: './_comments.component.html', styleUrls: ['./comments.component.css'] }) export class CommentsComponent { constructor(protected matSnackBar: MatSnackBar)
/* * All Comments shown by this Component. */ _comments: Comment[]; /* * Whether more comments can currently be loaded. */ _loaded = false; /* * Whether to override the loading indicator. * * If the containing Component knows for a fact, that the data to be shown * is already available, it can set this flag to cause to component to never * show a spinner. This can be helpful in situations, where the spinner * would otherwise only appear for a few milliseconds, causing an * odd-looking twitch in the application. */ @Input() loaded = false; @Input() set comments(comments: Observable<Comment>|undefined) { if (comments) { this._comments = []; this._loaded = this.loaded; comments .finally(() => this._loaded = true) .subscribe( comment => this._comments.push(comment), err => GraphApiErrorComponent.show(this.matSnackBar, err)); } } }
{}
identifier_body
comments.component.ts
import {Component, Input} from '@angular/core'; import {MatSnackBar} from '@angular/material'; import {Observable} from 'rxjs/Observable'; import 'rxjs/add/operator/finally'; import {GraphApiError} from '../graph-api-error'; import {GraphApiErrorComponent} from '../graph-api-error.component'; import {Comment} from '../comment'; /* * The Component showing the list of pages. */ @Component({ selector: 'comments', templateUrl: './_comments.component.html', styleUrls: ['./comments.component.css'] }) export class CommentsComponent { constructor(protected matSnackBar: MatSnackBar) {} /* * All Comments shown by this Component. */ _comments: Comment[]; /* * Whether more comments can currently be loaded. */ _loaded = false; /* * Whether to override the loading indicator. * * If the containing Component knows for a fact, that the data to be shown * is already available, it can set this flag to cause to component to never * show a spinner. This can be helpful in situations, where the spinner * would otherwise only appear for a few milliseconds, causing an * odd-looking twitch in the application. */ @Input() loaded = false; @Input()
this._loaded = this.loaded; comments .finally(() => this._loaded = true) .subscribe( comment => this._comments.push(comment), err => GraphApiErrorComponent.show(this.matSnackBar, err)); } } }
set comments(comments: Observable<Comment>|undefined) { if (comments) { this._comments = [];
random_line_split
comments.component.ts
import {Component, Input} from '@angular/core'; import {MatSnackBar} from '@angular/material'; import {Observable} from 'rxjs/Observable'; import 'rxjs/add/operator/finally'; import {GraphApiError} from '../graph-api-error'; import {GraphApiErrorComponent} from '../graph-api-error.component'; import {Comment} from '../comment'; /* * The Component showing the list of pages. */ @Component({ selector: 'comments', templateUrl: './_comments.component.html', styleUrls: ['./comments.component.css'] }) export class CommentsComponent { constructor(protected matSnackBar: MatSnackBar) {} /* * All Comments shown by this Component. */ _comments: Comment[]; /* * Whether more comments can currently be loaded. */ _loaded = false; /* * Whether to override the loading indicator. * * If the containing Component knows for a fact, that the data to be shown * is already available, it can set this flag to cause to component to never * show a spinner. This can be helpful in situations, where the spinner * would otherwise only appear for a few milliseconds, causing an * odd-looking twitch in the application. */ @Input() loaded = false; @Input() set comments(comments: Observable<Comment>|undefined) { if (comments)
} }
{ this._comments = []; this._loaded = this.loaded; comments .finally(() => this._loaded = true) .subscribe( comment => this._comments.push(comment), err => GraphApiErrorComponent.show(this.matSnackBar, err)); }
conditional_block
comments.component.ts
import {Component, Input} from '@angular/core'; import {MatSnackBar} from '@angular/material'; import {Observable} from 'rxjs/Observable'; import 'rxjs/add/operator/finally'; import {GraphApiError} from '../graph-api-error'; import {GraphApiErrorComponent} from '../graph-api-error.component'; import {Comment} from '../comment'; /* * The Component showing the list of pages. */ @Component({ selector: 'comments', templateUrl: './_comments.component.html', styleUrls: ['./comments.component.css'] }) export class CommentsComponent {
(protected matSnackBar: MatSnackBar) {} /* * All Comments shown by this Component. */ _comments: Comment[]; /* * Whether more comments can currently be loaded. */ _loaded = false; /* * Whether to override the loading indicator. * * If the containing Component knows for a fact, that the data to be shown * is already available, it can set this flag to cause to component to never * show a spinner. This can be helpful in situations, where the spinner * would otherwise only appear for a few milliseconds, causing an * odd-looking twitch in the application. */ @Input() loaded = false; @Input() set comments(comments: Observable<Comment>|undefined) { if (comments) { this._comments = []; this._loaded = this.loaded; comments .finally(() => this._loaded = true) .subscribe( comment => this._comments.push(comment), err => GraphApiErrorComponent.show(this.matSnackBar, err)); } } }
constructor
identifier_name