prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>layout-base.d.ts<|end_file_name|><|fim▁begin|>declare module "ui/layouts/layout-base" {
import view = require("ui/core/view");
import dependencyObservable = require("ui/core/dependency-observable");
/**
* Base class for all views that supports children positioning.
*/
export class LayoutBase extends view.CustomLayoutView {
public static clipToBoundsProperty: dependencyObservable.Property;
/**
* Returns the number of children in this Layout.
*/<|fim▁hole|>
/**
* Returns the view at the specified position.
* @param index The position at which to get the child from.
*/
getChildAt(index: number): view.View;
/**
* Returns the position of the child view
* @param child The child view that we are looking for.
*/
getChildIndex(child: view.View): number;
/**
* Adds the view to children array.
* @param view The view to be added to the end of the children array.
*/
addChild(view: view.View): void;
/**
* Inserts the view to children array at the specified index.
* @param view The view to be added to the end of the children array.
* @param atIndex The insertion index.
*/
insertChild(child: view.View, atIndex: number): void;
/**
* Removes the specified view from the children array.
* @param view The view to remove from the children array.
*/
removeChild(view: view.View): void;
/**
* Removes all views in this layout.
*/
removeChildren(): void;
/**
* Gets or sets padding style property.
*/
padding: string;
/**
* Specify the bottom padding of this layout.
*/
paddingBottom: number;
/**
* Specify the left padding of this layout.
*/
paddingLeft: number;
/**
* Specify the right padding of this layout.
*/
paddingRight: number;
/**
* Specify the top padding of this layout.
*/
paddingTop: number;
}
}<|fim▁end|> | getChildrenCount(): number; |
<|file_name|>stats.operators.data.js<|end_file_name|><|fim▁begin|>'use strict';
var R6MStatsOpData = (function(R6MLangTerms, undefined) {
var WARNING_THRESHOLD = 20,
opStats = {
attackers: [],
defenders: [],
sortInfo: {
field: null,
rank: null,
isDescending: null
}
};
var getAveragesTotals = function getAveragesTotals(opRoleStats) {
var count = 0,
averagesTotals = {};
for (var opKey in opRoleStats) {
for (var sarKey in opRoleStats[opKey].statsAllRanks) {
averagesTotals[sarKey] = averagesTotals[sarKey] || {};
averagesTotals[sarKey].all = averagesTotals[sarKey].all || { total: 0, avg: 0 };
averagesTotals[sarKey].all.total += opRoleStats[opKey].statsAllRanks[sarKey];
}
for (var sbrKey in opRoleStats[opKey].statsByRank) {
for (var key in opRoleStats[opKey].statsByRank[sbrKey]) {
averagesTotals[key] = averagesTotals[key] || {};
averagesTotals[key][sbrKey] = averagesTotals[key][sbrKey] || { total: 0, avg: 0 };
averagesTotals[key][sbrKey].total += opRoleStats[opKey].statsByRank[sbrKey][key];
}
}
count++;
}
for (var statKey in averagesTotals) {
for (var operator in averagesTotals[statKey]) {
averagesTotals[statKey][operator].avg = averagesTotals[statKey][operator].total / count;
}
}
return averagesTotals;
};
var getEmptyStatsObject = function getEmptyStatsObject() {
return {
totalKills: 0,
totalDeaths: 0,
totalPlays: 0,
totalWins: 0,
killsPerRound: 0,
killsPerDeath: 0,
pickRate: 0,
winRate: 0,
survivalRate: 0,
warning: false
};
};
var getCurrentStats = function getCurrentStats() {
return opStats;
};
var getOpRoleStats = function getOpRoleStats(apiOpData, totalRounds, opMetaData) {
var opRoleStats = [],
totalPlaysByRank = {},
totalPlaysAllRanks = 0;
for (var opKey in apiOpData) {
var newOpStats = {
key: opKey,
name: opMetaData[opKey].name,
cssClass: opMetaData[opKey].cssClass,
statsByRank: {},
statsAllRanks: getEmptyStatsObject()
};
for (var rankKey in apiOpData[opKey]) {
var opRankStats = getEmptyStatsObject(),
apiOpRankData = apiOpData[opKey][rankKey];
['totalWins', 'totalKills', 'totalDeaths', 'totalPlays'].forEach(function(statKey) {
opRankStats[statKey] = +apiOpRankData[statKey];
newOpStats.statsAllRanks[statKey] += opRankStats[statKey];
});
totalPlaysByRank[rankKey] = totalPlaysByRank[rankKey] ?
totalPlaysByRank[rankKey] + opRankStats.totalPlays : opRankStats.totalPlays;
totalPlaysAllRanks += opRankStats.totalPlays;
newOpStats.statsByRank[rankKey] = opRankStats;
}
opRoleStats.push(newOpStats);
}
setTallies(opRoleStats, totalRounds, totalPlaysByRank, totalPlaysAllRanks);
setWarnings(opRoleStats);
return {
operators: opRoleStats,
averagesTotals: getAveragesTotals(opRoleStats)
};
};
var set = function set(apiData, totalRounds, opMetaData) {
opStats.attackers = getOpRoleStats(apiData.role.Attacker, totalRounds, opMetaData);
opStats.defenders = getOpRoleStats(apiData.role.Defender, totalRounds, opMetaData);
};
var setTallies = function setTallies(opRoleStats, totalRounds, totalPlaysByRank, totalPlaysAllRanks) {
opRoleStats.forEach(function(operator) {
setTalliesForRank(operator.statsAllRanks);
operator.statsAllRanks.pickRate = (!totalRounds) ? 0 : operator.statsAllRanks.totalPlays / totalRounds;
for (var rankKey in operator.statsByRank) {
var stats = operator.statsByRank[rankKey];
setTalliesForRank(stats);
stats.pickRate = (!totalPlaysByRank[rankKey] || !operator.statsAllRanks.totalPlays || !totalPlaysAllRanks) ? 0 :
(stats.totalPlays / totalPlaysByRank[rankKey]) / (operator.statsAllRanks.totalPlays / totalPlaysAllRanks) * operator.statsAllRanks.pickRate;
stats.pickRate = Math.min(0.99, Math.max(0.001, stats.pickRate));
}
});
};
var setTalliesForRank = function setTalliesForRank(stats) {
stats.killsPerDeath = (!stats.totalDeaths) ? 0 : stats.totalKills / stats.totalDeaths;
stats.killsPerRound = (!stats.totalPlays) ? 0 : stats.totalKills / stats.totalPlays;
stats.survivalRate = (!stats.totalPlays) ? 0 : (stats.totalPlays - stats.totalDeaths) / stats.totalPlays;
stats.winRate = (!stats.totalPlays) ? 0 : stats.totalWins / stats.totalPlays;
};
var setWarnings = function setWarnings(opRoleStats) {
for (var opKey in opRoleStats) {
if (opRoleStats[opKey].statsAllRanks.totalPlays < WARNING_THRESHOLD) {
opRoleStats[opKey].statsAllRanks.warning = true;
}
for (var rankKey in opRoleStats[opKey].statsByRank) {
if (opRoleStats[opKey].statsByRank[rankKey].totalPlays < WARNING_THRESHOLD) {
opRoleStats[opKey].statsByRank[rankKey].warning = true;
}
}
}
};
var trySort = function trySort(sortField, isDescending, optionalRank) {
opStats.sortInfo.field = sortField || 'name';
opStats.sortInfo.rank = optionalRank;
opStats.sortInfo.isDescending = isDescending;
trySortRole(opStats.attackers.operators, sortField, isDescending, optionalRank);
trySortRole(opStats.defenders.operators, sortField, isDescending, optionalRank);
};
var trySortRole = function trySortRole(newOpStats, sortField, isDescending, optionalRank) {
newOpStats.sort(function(a, b) {
var aValue = a.name,
bValue = b.name,
nameCompare = true;
if (sortField != 'name') {
nameCompare = false;
if (!optionalRank) {
aValue = a.statsAllRanks[sortField];<|fim▁hole|> }
if (aValue == bValue) {
aValue = a.name;
bValue = b.name;
nameCompare = true;
}
}
if (nameCompare) {
if (aValue > bValue) {
return 1;
}
if (aValue < bValue) {
return -1;
}
} else {
if (aValue < bValue) {
return 1;
}
if (aValue > bValue) {
return -1;
}
}
return 0;
});
if (isDescending) {
newOpStats.reverse();
}
};
return {
get: getCurrentStats,
set: set,
trySort: trySort
};
})(R6MLangTerms);<|fim▁end|> | bValue = b.statsAllRanks[sortField];
} else {
aValue = (a.statsByRank[optionalRank]) ? a.statsByRank[optionalRank][sortField] : -1;
bValue = (b.statsByRank[optionalRank]) ? b.statsByRank[optionalRank][sortField] : -1; |
<|file_name|>report.py<|end_file_name|><|fim▁begin|>import time
import csv<|fim▁hole|>def report(ob):
#Create log file
log_file_report = ob.file_destination + "/" + "Parameters_Results.log"
log_report = file(log_file_report, 'a' )
#Print parameters
#Batch or single file
log_report.write("\nRun type: %s" % ob.runtype)
if ob.runtype in ["file","pictures"]:
log_report.write("\nInput file path: %s" % ob.inDEST)
else:
log_report.write("\nInput file path: %s" % ob.batchpool)
log_report.write("\nOutput dir: %s" % ob.fileD)
log_report.write("\nAdapt accAvg? %s" % ob.adapt)
if ob.adapt:
log_report.write("\nExpected hitrate: %s" % ob.frameHIT)
log_report.write("\nMinimum accAvg: %s" % ob.floorvalue)
log_report.write("\nThreshold %s" % ob.threshT)
log_report.write("\nMinimum contour area: %s" % ob.minSIZE)
log_report.write("\nBurnin: %s" % ob.burnin)
log_report.write("\nScan frames: %s" % ob.scan)
if ob.frameSET:
log_report.write("\nManual framerate: %s" % ob.frame_rate)
log_report.write("\nSet ROI: %s" % ob.ROI_include)
log_report.write("\nArea counter?: %s" % ob.set_areacounter)
log_report.write("\nOutput type?: %s\n\n" % ob.makeVID)
#Ending time
end=time.time()
#total_time()
total_min=(end-ob.start)/60
#processed frames per second
pfps=float(ob.frame_count)/(total_min*60)
##Write to log file
log_report.write("Total run time (min): %.2f \n " % total_min)
log_report.write("Average frames per second: %.2f \n " % pfps)
#End of program, report some statistic to screen and log
#log
log_report.write("\n Thank you for using MotionMeerkat! \n")
log_report.write("Candidate motion events: %.0f \n " % ob.total_count )
log_report.write("Frames skipped due to Threshold: %.0f \n " % ob.nocountr)
log_report.write("Frames skipped due to minSIZE: %.0f \n " % ob.toosmall)
log_report.write("Total frames in files: %.0f \n " % ob.frame_count)
rate=float(ob.total_count)/ob.frame_count*100
log_report.write("Hitrate: %.2f %% \n" % rate)
log_report.write("Exiting")
#print to screen
print("\n\nThank you for using MotionMeerkat! \n")
print("Total run time (min): %.2f \n " % total_min)
print("Average frames processed per second: %.2f \n " % pfps)
print("Candidate motion events: %.0f \n " % ob.total_count )
print("Frames skipped due to AccAvg: %.0f \n " % ob.nodiff)
print("Frames skipped due to Threshold: %.0f \n " % ob.nocountr)
print("Frames skipped due to minSIZE: %.0f \n " % ob.toosmall)
print("Total frames in files: %.0f \n " % ob.frame_count)
rate=float(ob.total_count)/ob.frame_count*100
print("Hitrate: %.2f %% \n" % rate)
#reset frame count if in batch loop
ob.frame_count=0
ob.total_count=0
ob.toosmall=0
ob.nocountr=0
#Write csv of time stamps and frame counts
#file name
time_stamp_report = ob.file_destination + "/" + "Frames.csv"
with open(time_stamp_report, 'wb') as f:
writer = csv.writer(f)
writer.writerows(ob.stamp)
if ob.set_areacounter:
area_report = ob.file_destination + "/" + "AreaCounter.csv"
with open(area_report, 'wb') as f:
writer = csv.writer(f)
writer.writerows(ob.areaC)<|fim▁end|> | |
<|file_name|>account-level-purchase-links.tsx<|end_file_name|><|fim▁begin|>import { CompactCard } from '@automattic/components';
import { useTranslate } from 'i18n-calypso';<|fim▁hole|> return (
<>
<CompactCard href="/me/purchases">{ translate( 'View all purchases' ) }</CompactCard>
</>
);
}<|fim▁end|> |
export default function AccountLevelPurchaseLinks() {
const translate = useTranslate(); |
<|file_name|>vec-matching-autoslice.rs<|end_file_name|><|fim▁begin|>pub fn main() {
let x = @[1, 2, 3];
match x {
[2, ..] => fail!(),
[1, ..tail] => {
assert_eq!(tail, [2, 3]);
}
[_] => fail!(),
[] => fail!()
}
let y = (~[(1, true), (2, false)], 0.5);
match y {
([_, _, _], 0.5) => fail!(),
([(1, a), (b, false), ..tail], _) => {
assert_eq!(a, true);
assert_eq!(b, 2);
assert!(tail.is_empty());
}
([.._tail], _) => fail!()
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>expr.rs<|end_file_name|><|fim▁begin|>//* This file is part of the uutils coreutils package.
//*
//* (c) Roman Gafiyatullin <[email protected]>
//*
//* For the full copyright and license information, please view the LICENSE
//* file that was distributed with this source code.
use clap::{crate_version, App, AppSettings, Arg};
use uucore::error::{UResult, USimpleError};
use uucore::InvalidEncodingHandling;
mod syntax_tree;
mod tokens;
const VERSION: &str = "version";
const HELP: &str = "help";
static ABOUT: &str = "Print the value of EXPRESSION to standard output";
static USAGE: &str = r#"
expr [EXPRESSION]
expr [OPTIONS]"#;
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(crate_version!())
.about(ABOUT)
.override_usage(USAGE)
.setting(AppSettings::InferLongArgs)
.arg(
Arg::new(VERSION)
.long(VERSION)
.help("output version information and exit"),
)
.arg(Arg::new(HELP).long(HELP).help("display this help and exit"))
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args
.collect_str(InvalidEncodingHandling::ConvertLossy)
.accept_any();
// For expr utility we do not want getopts.
// The following usage should work without escaping hyphens: `expr -15 = 1 + 2 \* \( 3 - -4 \)`
if maybe_handle_help_or_version(&args) {
Ok(())
} else {
let token_strings = args[1..].to_vec();
match process_expr(&token_strings) {
Ok(expr_result) => print_expr_ok(&expr_result),
Err(expr_error) => Err(USimpleError::new(2, &expr_error)),
}
}
}
fn process_expr(token_strings: &[String]) -> Result<String, String> {
let maybe_tokens = tokens::strings_to_tokens(token_strings);
let maybe_ast = syntax_tree::tokens_to_ast(maybe_tokens);
evaluate_ast(maybe_ast)
}
fn print_expr_ok(expr_result: &str) -> UResult<()> {
println!("{}", expr_result);
if expr_result == "0" || expr_result.is_empty() {
Err(1.into())
} else {
Ok(())
}
}
fn evaluate_ast(maybe_ast: Result<Box<syntax_tree::AstNode>, String>) -> Result<String, String> {
maybe_ast.and_then(|ast| ast.evaluate())
}
fn maybe_handle_help_or_version(args: &[String]) -> bool {
if args.len() == 2 {
if args[1] == "--help" {
print_help();
true
} else if args[1] == "--version" {
print_version();
true
} else {
false
}
} else {
false
}
}
fn print_help() {
//! The following is taken from GNU coreutils' "expr --help" output.<|fim▁hole|>
--help display this help and exit
--version output version information and exit
Print the value of EXPRESSION to standard output. A blank line below
separates increasing precedence groups. EXPRESSION may be:
ARG1 | ARG2 ARG1 if it is neither null nor 0, otherwise ARG2
ARG1 & ARG2 ARG1 if neither argument is null or 0, otherwise 0
ARG1 < ARG2 ARG1 is less than ARG2
ARG1 <= ARG2 ARG1 is less than or equal to ARG2
ARG1 = ARG2 ARG1 is equal to ARG2
ARG1 != ARG2 ARG1 is unequal to ARG2
ARG1 >= ARG2 ARG1 is greater than or equal to ARG2
ARG1 > ARG2 ARG1 is greater than ARG2
ARG1 + ARG2 arithmetic sum of ARG1 and ARG2
ARG1 - ARG2 arithmetic difference of ARG1 and ARG2
ARG1 * ARG2 arithmetic product of ARG1 and ARG2
ARG1 / ARG2 arithmetic quotient of ARG1 divided by ARG2
ARG1 % ARG2 arithmetic remainder of ARG1 divided by ARG2
STRING : REGEXP anchored pattern match of REGEXP in STRING
match STRING REGEXP same as STRING : REGEXP
substr STRING POS LENGTH substring of STRING, POS counted from 1
index STRING CHARS index in STRING where any CHARS is found, or 0
length STRING length of STRING
+ TOKEN interpret TOKEN as a string, even if it is a
keyword like 'match' or an operator like '/'
( EXPRESSION ) value of EXPRESSION
Beware that many operators need to be escaped or quoted for shells.
Comparisons are arithmetic if both ARGs are numbers, else lexicographical.
Pattern matches return the string matched between \( and \) or null; if
\( and \) are not used, they return the number of characters matched or 0.
Exit status is 0 if EXPRESSION is neither null nor 0, 1 if EXPRESSION is null
or 0, 2 if EXPRESSION is syntactically invalid, and 3 if an error occurred.
Environment variables:
* EXPR_DEBUG_TOKENS=1 dump expression's tokens
* EXPR_DEBUG_RPN=1 dump expression represented in reverse polish notation
* EXPR_DEBUG_SYA_STEP=1 dump each parser step
* EXPR_DEBUG_AST=1 dump expression represented abstract syntax tree"#
);
}
fn print_version() {
println!("{} {}", uucore::util_name(), crate_version!());
}<|fim▁end|> | println!(
r#"Usage: expr EXPRESSION
or: expr OPTION |
<|file_name|>EditPanel.java<|end_file_name|><|fim▁begin|>package panels;
import javax.swing.JPanel;
import javax.swing.JTextArea;<|fim▁hole|>
public class EditPanel extends JPanel {
JTextArea srcEdit;
public EditPanel() {
srcEdit = new JTextArea(20, 30);
String src = ".data\n"
+ "a: .word 1, 2, 3\n";
srcEdit.setText(src);
add(srcEdit);
}
public JTextArea getSrcEdit() {
return srcEdit;
}
public void setSrcEdit(JTextArea srcEdit) {
this.srcEdit = srcEdit;
}
public String getText() {
return getSrcEdit().getText() ;
}
}<|fim▁end|> | |
<|file_name|>congruences.py<|end_file_name|><|fim▁begin|>import primes as py
def lcm(a, b):
return a * b / gcd(a, b)
def gcd(a, b):
while b != 0:
(a, b) = (b, a % b)
return a
# Returns two integers x, y such that gcd(a, b) = ax + by
def egcd(a, b):
if a == 0:
return (0, 1)
else:
y, x = egcd(b % a, a)
return (x - (b // a) * y, y)
# Returns an integer x such that ax = 1(mod m)
def modInverse(a, m):
x, y = egcd(a, m)
if gcd(a, m) == 1:
return x % m
# Reduces linear congruence to form x = b(mod m)
def reduceCongr(a, b, m):
gcdAB = gcd(a, b)
a /= gcdAB
b /= gcdAB
m /= gcd(gcdAB, m)
modinv = modInverse(a, m)
b *= modinv
return (1, b, m)
# Returns the incongruent solutions to the linear congruence ax = b(mod m)
def linCongr(a, b, m):
solutions = set()
if (b % gcd(a, m) == 0):
numSols = gcd(a, m)
sol = (b * egcd(a, m)[0] / numSols) % m
for i in xrange(0, numSols):
solutions.add((sol + m * i / numSols) % m)
return solutions
# Uses the Chinese Remainder Theorem to solve a system of linear congruences
def crt(congruences):
x = 0
M = 1
for i in xrange(len(congruences)):
M *= congruences[i][2]
congruences[i] = reduceCongr(congruences[i][0], congruences[i][1], congruences[i][2])
for j in xrange(len(congruences)):
m = congruences[j][2]
if gcd(m, M/m) != 1:
return None
x += congruences[j][1] * modInverse(M/m, m) * M / m
return x % M
# Returns the incongruent solution to any system of linear congruences
def linCongrSystem(congruences):
newCongruences = []
for i in xrange(len(congruences)):
congruences[i] = reduceCongr(congruences[i][0], congruences[i][1], congruences[i][2])
# Tests to see whether the system is solvable
for j in xrange(len(congruences)):
if congruences[i] != congruences[j]:
if (congruences[i][1] - congruences[j][1]) % gcd(congruences[i][2], congruences[j][2]) != 0:
return None
# Splits moduli into prime powers
pFactor = py.primeFactorization(congruences[i][2])
for term in pFactor:
newCongruences.append((1, congruences[i][1], term[0] ** term[1]))
# Discards redundant congruences
newCongruences = sorted(newCongruences, key=lambda x: x[2], reverse = True)
finalCongruences = []
for k in xrange(len(newCongruences)):
isRedundant = False
for l in xrange(0, k):
if newCongruences[l][2] % newCongruences[k][2] == 0:
isRedundant = True
if not isRedundant:
finalCongruences.append(newCongruences[k])
return crt(finalCongruences)<|fim▁hole|> solutions = []
for i in xrange(m):
value = 0
for degree in xrange(len(coefficients)):
value += coefficients[degree] * (i ** (len(coefficients) - degree - 1))
if value % m == 0:
solutions.append(i)
return solutions<|fim▁end|> |
# Returns incongruents solutions to a polynomial congruence
def polyCongr(coefficients, m): |
<|file_name|>SystemAttrEnum.java<|end_file_name|><|fim▁begin|>/*
* IRIS -- Intelligent Roadway Information System
* Copyright (C) 2009-2015 Minnesota Department of Transportation
* Copyright (C) 2012 Iteris Inc.
* Copyright (C) 2014 AHMCT, University of California
* Copyright (C) 2015 SRF Consulting Group
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package us.mn.state.dot.tms;
import java.util.HashMap;
import static us.mn.state.dot.tms.SignMessageHelper.DMS_MESSAGE_MAX_PAGES;
import us.mn.state.dot.tms.utils.I18N;
/**
* This enum defines all system attributes.
*
* @author Douglas Lau
* @author Michael Darter
* @author Travis Swanston
*/
public enum SystemAttrEnum {
CAMERA_AUTH_USERNAME(""),
CAMERA_AUTH_PASSWORD(""),
CAMERA_AUTOPLAY(true, Change.RESTART_CLIENT),
CAMERA_ID_BLANK(""),
CAMERA_NUM_PRESET_BTNS(3, 0, 20, Change.RESTART_CLIENT),
CAMERA_PRESET_PANEL_COLUMNS(6, 1, 6, Change.RESTART_CLIENT),
CAMERA_PRESET_PANEL_ENABLE(false, Change.RESTART_CLIENT),
CAMERA_PRESET_STORE_ENABLE(false, Change.RESTART_CLIENT),
CAMERA_PTZ_AXIS_COMPORT(1, 1, 64),
CAMERA_PTZ_AXIS_RESET(""),
CAMERA_PTZ_AXIS_WIPE(""),
CAMERA_PTZ_BLIND(true),
CAMERA_PTZ_PANEL_ENABLE(false, Change.RESTART_CLIENT),
CAMERA_STREAM_CONTROLS_ENABLE(false, Change.RESTART_CLIENT),
CAMERA_UTIL_PANEL_ENABLE(false, Change.RESTART_CLIENT),
CAMERA_WIPER_PRECIP_MM_HR(8, 1, 100),
CLIENT_UNITS_SI(true),
COMM_EVENT_PURGE_DAYS(14, 0, 1000),
DATABASE_VERSION(String.class, Change.RESTART_SERVER),
DETECTOR_AUTO_FAIL_ENABLE(true),
DIALUP_POLL_PERIOD_MINS(60, 2, 1440),
DMS_AWS_ENABLE(false),
DMS_BRIGHTNESS_ENABLE(true, Change.RESTART_CLIENT),
DMS_COMM_LOSS_MINUTES(5, 0, 60),
DMS_COMPOSER_EDIT_MODE(1, 0, 2, Change.RESTART_CLIENT),
DMS_DEFAULT_JUSTIFICATION_LINE(3, 2, 5, Change.RESTART_CLIENT),
DMS_DEFAULT_JUSTIFICATION_PAGE(2, 2, 4, Change.RESTART_CLIENT),
DMS_DURATION_ENABLE(true),
DMS_FONT_SELECTION_ENABLE(false, Change.RESTART_CLIENT),
DMS_FORM(1, 1, 2),
DMS_HIGH_TEMP_CUTOFF(60, 35, 100),
DMS_LAMP_TEST_TIMEOUT_SECS(30, 5, 90),
DMS_MANUFACTURER_ENABLE(true, Change.RESTART_CLIENT),
DMS_MAX_LINES(3, 1, 12, Change.RESTART_CLIENT),
DMS_MESSAGE_MIN_PAGES(1, 1, DMS_MESSAGE_MAX_PAGES,
Change.RESTART_CLIENT),
DMS_OP_STATUS_ENABLE(false, Change.RESTART_CLIENT),
DMS_PAGE_OFF_DEFAULT_SECS(0f, 0f, 60f),
DMS_PAGE_ON_DEFAULT_SECS(2f, 0f, 60f),
DMS_PAGE_ON_MAX_SECS(10.0f, 0f, 100f, Change.RESTART_CLIENT),
DMS_PAGE_ON_MIN_SECS(0.5f, 0f, 100f, Change.RESTART_CLIENT),
DMS_PAGE_ON_SELECTION_ENABLE(false),
DMS_PIXEL_OFF_LIMIT(2, 1),
DMS_PIXEL_ON_LIMIT(1, 1),
DMS_PIXEL_MAINT_THRESHOLD(35, 1),
DMS_PIXEL_STATUS_ENABLE(true, Change.RESTART_CLIENT),
DMS_PIXEL_TEST_TIMEOUT_SECS(30, 5, 90),
DMS_QUERYMSG_ENABLE(false, Change.RESTART_CLIENT),
DMS_QUICKMSG_STORE_ENABLE(false, Change.RESTART_CLIENT),
DMS_RESET_ENABLE(false, Change.RESTART_CLIENT),
DMS_SEND_CONFIRMATION_ENABLE(false, Change.RESTART_CLIENT),
DMS_UPDATE_FONT_TABLE(false),
DMSXML_MODEM_OP_TIMEOUT_SECS(5 * 60 + 5, 5),
DMSXML_OP_TIMEOUT_SECS(60 + 5, 5),
DMSXML_REINIT_DETECT(false),
EMAIL_SENDER_SERVER(String.class),
EMAIL_SMTP_HOST(String.class),
EMAIL_RECIPIENT_AWS(String.class),
EMAIL_RECIPIENT_DMSXML_REINIT(String.class),
EMAIL_RECIPIENT_GATE_ARM(String.class),
GATE_ARM_ALERT_TIMEOUT_SECS(90, 10),
GPS_NTCIP_ENABLE(false),
GPS_NTCIP_JITTER_M(100, 0, 1610),
HELP_TROUBLE_TICKET_ENABLE(false),
HELP_TROUBLE_TICKET_URL(String.class),
INCIDENT_CLEAR_SECS(600, 0, 3600),
LCS_POLL_PERIOD_SECS(30, 0, Change.RESTART_SERVER),
MAP_EXTENT_NAME_INITIAL("Home"),
MAP_ICON_SIZE_SCALE_MAX(30f, 0f, 9000f),
MAP_SEGMENT_MAX_METERS(2000, 100, Change.RESTART_CLIENT),
METER_EVENT_PURGE_DAYS(14, 0, 1000),
METER_GREEN_SECS(1.3f, 0.1f, 10f),
METER_MAX_RED_SECS(13f, 5f, 30f),
METER_MIN_RED_SECS(0.1f, 0.1f, 10f),
METER_YELLOW_SECS(0.7f, 0.1f, 10f),
MSG_FEED_VERIFY(true),
OPERATION_RETRY_THRESHOLD(3, 1, 20),
ROUTE_MAX_LEGS(8, 1, 20),
ROUTE_MAX_MILES(16, 1, 30),
RWIS_HIGH_WIND_SPEED_KPH(40, 0),
RWIS_LOW_VISIBILITY_DISTANCE_M(152, 0),
RWIS_OBS_AGE_LIMIT_SECS(240, 0),
RWIS_MAX_VALID_WIND_SPEED_KPH(282, 0),
SAMPLE_ARCHIVE_ENABLE(true),
SPEED_LIMIT_MIN_MPH(45, 0, 100),
SPEED_LIMIT_DEFAULT_MPH(55, 0, 100),
SPEED_LIMIT_MAX_MPH(75, 0, 100),
TESLA_HOST(String.class),
TRAVEL_TIME_MIN_MPH(15, 1, 50),
UPTIME_LOG_ENABLE(false),
VSA_BOTTLENECK_ID_MPH(55, 10, 65),
VSA_CONTROL_THRESHOLD(-1000, -5000, -200),
VSA_DOWNSTREAM_MILES(0.2f, 0f, 2.0f),
VSA_MAX_DISPLAY_MPH(60, 10, 60),
VSA_MIN_DISPLAY_MPH(30, 10, 55),
VSA_MIN_STATION_MILES(0.1f, 0.01f, 1.0f),
VSA_START_INTERVALS(3, 0, 10),
VSA_START_THRESHOLD(-1500, -5000, -200),
VSA_STOP_THRESHOLD(-750, -5000, -200),
WINDOW_TITLE("IRIS: ", Change.RESTART_CLIENT);
/** Change action, which indicates what action the admin must
* take after changing a system attribute. */
enum Change {
RESTART_SERVER("Restart the server after changing."),
RESTART_CLIENT("Restart the client after changing."),
NONE("A change takes effect immediately.");
/** Change message for user. */
private final String m_msg;<|fim▁hole|>
/** Constructor */
private Change(String msg) {
m_msg = msg;
}
/** Get the restart message. */
public String getMessage() {
return m_msg;
}
}
/** System attribute class */
protected final Class atype;
/** Default value */
protected final Object def_value;
/** Change action */
protected final Change change_action;
/** Minimum value for number attributes */
protected final Number min_value;
/** Maximum value for number attributes */
protected final Number max_value;
/** Create a String attribute with the given default value */
private SystemAttrEnum(String d) {
this(String.class, d, null, null, Change.NONE);
}
/** Create a String attribute with the given default value */
private SystemAttrEnum(String d, Change ca) {
this(String.class, d, null, null, ca);
}
/** Create a Boolean attribute with the given default value */
private SystemAttrEnum(boolean d) {
this(Boolean.class, d, null, null, Change.NONE);
}
/** Create a Boolean attribute with the given default value */
private SystemAttrEnum(boolean d, Change ca) {
this(Boolean.class, d, null, null, ca);
}
/** Create an Integer attribute with default, min and max values */
private SystemAttrEnum(int d, int mn, int mx) {
this(Integer.class, d, mn, mx, Change.NONE);
}
/** Create an Integer attribute with default, min and max values */
private SystemAttrEnum(int d, int mn, int mx, Change ca) {
this(Integer.class, d, mn, mx, ca);
}
/** Create an Integer attribute with default and min values */
private SystemAttrEnum(int d, int mn) {
this(Integer.class, d, mn, null, Change.NONE);
}
/** Create an Integer attribute with default and min values */
private SystemAttrEnum(int d, int mn, Change ca) {
this(Integer.class, d, mn, null, ca);
}
/** Create a Float attribute with default, min and max values */
private SystemAttrEnum(float d, float mn, float mx) {
this(Float.class, d, mn, mx, Change.NONE);
}
/** Create a Float attribute with default, min and max values */
private SystemAttrEnum(float d, float mn, float mx, Change ca) {
this(Float.class, d, mn, mx, ca);
}
/** Create a system attribute with a null default value */
private SystemAttrEnum(Class c) {
this(c, null, null, null, Change.NONE);
}
/** Create a system attribute with a null default value */
private SystemAttrEnum(Class c, Change ca) {
this(c, null, null, null, ca);
}
/** Create a system attribute */
private SystemAttrEnum(Class c, Object d, Number mn, Number mx,
Change ca)
{
atype = c;
def_value = d;
min_value = mn;
max_value = mx;
change_action = ca;
assert isValidBoolean() || isValidFloat() ||
isValidInteger() || isValidString();
}
/** Get a description of the system attribute enum. */
public static String getDesc(String aname) {
String ret = I18N.get(aname);
SystemAttrEnum sae = lookup(aname);
if(sae != null)
ret += " " + sae.change_action.getMessage();
return ret;
}
/** Return true if the value is the default value. */
public boolean equalsDefault() {
return get().toString().equals(getDefault());
}
/** Test if the attribute is a valid boolean */
private boolean isValidBoolean() {
return (atype == Boolean.class) &&
(def_value instanceof Boolean) &&
min_value == null && max_value == null;
}
/** Test if the attribute is a valid float */
private boolean isValidFloat() {
return (atype == Float.class) &&
(def_value instanceof Float) &&
(min_value == null || min_value instanceof Float) &&
(max_value == null || max_value instanceof Float);
}
/** Test if the attribute is a valid integer */
private boolean isValidInteger() {
return (atype == Integer.class) &&
(def_value instanceof Integer) &&
(min_value == null || min_value instanceof Integer) &&
(max_value == null || max_value instanceof Integer);
}
/** Test if the attribute is a valid string */
private boolean isValidString() {
return (atype == String.class) &&
(def_value == null || def_value instanceof String) &&
min_value == null && max_value == null;
}
/** Get the attribute name */
public String aname() {
return toString().toLowerCase();
}
/** Set of all system attributes */
static protected final HashMap<String, SystemAttrEnum> ALL_ATTRIBUTES =
new HashMap<String, SystemAttrEnum>();
static {
for(SystemAttrEnum sa: SystemAttrEnum.values())
ALL_ATTRIBUTES.put(sa.aname(), sa);
}
/** Lookup an attribute by name */
static public SystemAttrEnum lookup(String aname) {
return ALL_ATTRIBUTES.get(aname);
}
/**
* Get the value of the attribute as a string.
* @return The value of the attribute as a string, never null.
*/
public String getString() {
assert atype == String.class;
return (String)get();
}
/** Get the default value as a String. */
public String getDefault() {
if(def_value != null)
return def_value.toString();
else
return "";
}
/** Get the value of the attribute as a boolean */
public boolean getBoolean() {
assert atype == Boolean.class;
return (Boolean)get();
}
/** Get the value of the attribute as an int */
public int getInt() {
assert atype == Integer.class;
return (Integer)get();
}
/** Get the value of the attribute as a float */
public float getFloat() {
assert atype == Float.class;
return (Float)get();
}
/**
* Get the value of the attribute.
* @return The value of the attribute, never null.
*/
protected Object get() {
return getValue(SystemAttributeHelper.get(aname()));
}
/**
* Get the value of a system attribute.
* @param attr System attribute or null.
* @return The attribute value or the default value on error.
* Null is never returned.
*/
private Object getValue(SystemAttribute attr) {
if(attr == null) {
System.err.println(warningDefault());
return def_value;
}
return parseValue(attr.getValue());
}
/**
* Get the value of a system attribute.
* @return The parsed value or the default value on error.
* Null is never returned.
*/
public Object parseValue(String v) {
Object value = parse(v);
if(value == null) {
System.err.println(warningParse());
return def_value;
}
return value;
}
/**
* Parse an attribute value.
* @param v Attribute value, may be null.
* @return The parsed value or null on error.
*/
protected Object parse(String v) {
if(atype == String.class)
return v;
if(atype == Boolean.class)
return parseBoolean(v);
if(atype == Integer.class)
return parseInteger(v);
if(atype == Float.class)
return parseFloat(v);
assert false;
return null;
}
/** Parse a boolean attribute value */
protected Boolean parseBoolean(String v) {
try {
return Boolean.parseBoolean(v);
}
catch(NumberFormatException e) {
return null;
}
}
/** Parse an integer attribute value */
protected Integer parseInteger(String v) {
int i;
try {
i = Integer.parseInt(v);
}
catch(NumberFormatException e) {
return null;
}
if(min_value != null) {
int m = min_value.intValue();
if(i < m) {
System.err.println(warningMinimum());
return m;
}
}
if(max_value != null) {
int m = max_value.intValue();
if(i > m) {
System.err.println(warningMaximum());
return m;
}
}
return i;
}
/** Parse a float attribute value */
protected Float parseFloat(String v) {
float f;
try {
f = Float.parseFloat(v);
}
catch(NumberFormatException e) {
return null;
}
if(min_value != null) {
float m = min_value.floatValue();
if(f < m) {
System.err.println(warningMinimum());
return m;
}
}
if(max_value != null) {
float m = max_value.floatValue();
if(f > m) {
System.err.println(warningMaximum());
return m;
}
}
return f;
}
/** Create a 'missing system attribute' warning message */
protected String warningDefault() {
return "Warning: " + toString() + " system attribute was not " +
"found; using a default value (" + def_value + ").";
}
/** Create a parsing warning message */
protected String warningParse() {
return "Warning: " + toString() + " system attribute could " +
"not be parsed; using a default value (" +
def_value + ").";
}
/** Create a minimum value warning message */
protected String warningMinimum() {
return "Warning: " + toString() + " system attribute was too " +
"low; using a minimum value (" + min_value + ").";
}
/** Create a maximum value warning message */
protected String warningMaximum() {
return "Warning: " + toString() + " system attribute was too " +
"high; using a maximum value (" + max_value + ").";
}
}<|fim▁end|> | |
<|file_name|>role_table.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""Test of table output."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(KeyComboAction("End"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("<Shift>Right"))
sequence.append(KeyComboAction("Down"))
sequence.append(KeyComboAction("Return"))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"1. Table Where Am I",
["BRAILLE LINE: 'gtk-demo application Shopping list frame table Number column header 3 bottles of coke'",
" VISIBLE: '3 bottles of coke', cursor=1",
"SPEECH OUTPUT: 'table.'",
"SPEECH OUTPUT: 'Number.'",
"SPEECH OUTPUT: 'table cell.'",
"SPEECH OUTPUT: '3.'",
"SPEECH OUTPUT: 'column 1 of 3'",
"SPEECH OUTPUT: 'row 1 of 5.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))<|fim▁hole|> ["BRAILLE LINE: 'gtk-demo application Shopping list frame table Number column header 5 packages of noodles'",
" VISIBLE: '5 packages of noodles', cursor=1",
"SPEECH OUTPUT: '5 packages of noodles.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"3. Table Where Am I (again)",
["BRAILLE LINE: 'gtk-demo application Shopping list frame table Number column header 5 packages of noodles'",
" VISIBLE: '5 packages of noodles', cursor=1",
"SPEECH OUTPUT: 'table.'",
"SPEECH OUTPUT: 'Number.'",
"SPEECH OUTPUT: 'table cell.'",
"SPEECH OUTPUT: '5.'",
"SPEECH OUTPUT: 'column 1 of 3'",
"SPEECH OUTPUT: 'row 2 of 5.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyPressAction(0, None, "KP_Insert"))
sequence.append(KeyComboAction("F11"))
sequence.append(KeyReleaseAction(0, None, "KP_Insert"))
sequence.append(utils.AssertPresentationAction(
"4. Turn row reading off",
["BRAILLE LINE: 'Speak cell'",
" VISIBLE: 'Speak cell', cursor=0",
"SPEECH OUTPUT: 'Speak cell'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"5. Table Right to the Product column in the packages of noodles row",
["BRAILLE LINE: 'gtk-demo application Shopping list frame table Number column header 5 packages of noodles'",
" VISIBLE: '5 packages of noodles', cursor=1",
"BRAILLE LINE: 'gtk-demo application Shopping list frame table Product column header packages of noodles table cell'",
" VISIBLE: 'packages of noodles table cell', cursor=1",
"SPEECH OUTPUT: 'Product column header packages of noodles.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"6. Table up to bottles of coke",
["BRAILLE LINE: 'gtk-demo application Shopping list frame table Product column header bottles of coke table cell'",
" VISIBLE: 'bottles of coke table cell', cursor=1",
"SPEECH OUTPUT: 'bottles of coke.'"]))
sequence.append(KeyComboAction("<Alt>F4"))
sequence.append(utils.AssertionSummaryAction())
sequence.start()<|fim▁end|> | sequence.append(utils.AssertPresentationAction(
"2. Next row", |
<|file_name|>license-helper-spec.js<|end_file_name|><|fim▁begin|>import {writeArray} from 'event-stream';
import gulp from 'gulp';
import license from '../../tasks/helpers/license-helper';
describe('license', () => {
let result;
beforeEach(done => {
const licenseStream = gulp.src('src/pivotal-ui/components/alerts')
.pipe(license());
licenseStream.on('error', (error) => {
console.error(error);
callback();<|fim▁hole|>
licenseStream.pipe(writeArray((error, data) => {
result = data;
done();
}));
});
it('creates an MIT license for the component', () => {
expect(result[0].path).toEqual('alerts/LICENSE');
expect(result[0].contents.toString()).toContain('The MIT License');
});
});<|fim▁end|> | }); |
<|file_name|>Lower.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.nashorn.internal.codegen;
import static jdk.nashorn.internal.codegen.CompilerConstants.EVAL;
import static jdk.nashorn.internal.codegen.CompilerConstants.RETURN;
import static jdk.nashorn.internal.ir.Expression.isAlwaysTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.ListIterator;
import java.util.regex.Pattern;
import jdk.nashorn.internal.ir.AccessNode;
import jdk.nashorn.internal.ir.BaseNode;
import jdk.nashorn.internal.ir.BinaryNode;
import jdk.nashorn.internal.ir.Block;
import jdk.nashorn.internal.ir.BlockLexicalContext;
import jdk.nashorn.internal.ir.BlockStatement;
import jdk.nashorn.internal.ir.BreakNode;
import jdk.nashorn.internal.ir.CallNode;
import jdk.nashorn.internal.ir.CaseNode;
import jdk.nashorn.internal.ir.CatchNode;
import jdk.nashorn.internal.ir.DebuggerNode;
import jdk.nashorn.internal.ir.ContinueNode;
import jdk.nashorn.internal.ir.EmptyNode;
import jdk.nashorn.internal.ir.Expression;
import jdk.nashorn.internal.ir.ExpressionStatement;
import jdk.nashorn.internal.ir.ForNode;
import jdk.nashorn.internal.ir.FunctionNode;
import jdk.nashorn.internal.ir.FunctionNode.CompilationState;
import jdk.nashorn.internal.ir.IdentNode;
import jdk.nashorn.internal.ir.IfNode;
import jdk.nashorn.internal.ir.IndexNode;
import jdk.nashorn.internal.ir.JumpStatement;
import jdk.nashorn.internal.ir.JumpToInlinedFinally;
import jdk.nashorn.internal.ir.LabelNode;
import jdk.nashorn.internal.ir.LexicalContext;
import jdk.nashorn.internal.ir.LiteralNode;
import jdk.nashorn.internal.ir.LiteralNode.PrimitiveLiteralNode;
import jdk.nashorn.internal.ir.LoopNode;
import jdk.nashorn.internal.ir.Node;
import jdk.nashorn.internal.ir.ReturnNode;
import jdk.nashorn.internal.ir.RuntimeNode;
import jdk.nashorn.internal.ir.Statement;
import jdk.nashorn.internal.ir.SwitchNode;
import jdk.nashorn.internal.ir.Symbol;
import jdk.nashorn.internal.ir.ThrowNode;
import jdk.nashorn.internal.ir.TryNode;
import jdk.nashorn.internal.ir.VarNode;
import jdk.nashorn.internal.ir.WhileNode;
import jdk.nashorn.internal.ir.WithNode;
import jdk.nashorn.internal.ir.visitor.NodeOperatorVisitor;
import jdk.nashorn.internal.ir.visitor.NodeVisitor;
import jdk.nashorn.internal.parser.Token;
import jdk.nashorn.internal.parser.TokenType;
import jdk.nashorn.internal.runtime.Context;
import jdk.nashorn.internal.runtime.JSType;
import jdk.nashorn.internal.runtime.Source;
import jdk.nashorn.internal.runtime.logging.DebugLogger;
import jdk.nashorn.internal.runtime.logging.Loggable;
import jdk.nashorn.internal.runtime.logging.Logger;
/**
* Lower to more primitive operations. After lowering, an AST still has no symbols
* and types, but several nodes have been turned into more low level constructs
* and control flow termination criteria have been computed.
*
* We do things like code copying/inlining of finallies here, as it is much
* harder and context dependent to do any code copying after symbols have been
* finalized.
*/
@Logger(name="lower")
final class Lower extends NodeOperatorVisitor<BlockLexicalContext> implements Loggable {
private final DebugLogger log;
// Conservative pattern to test if element names consist of characters valid for identifiers.
// This matches any non-zero length alphanumeric string including _ and $ and not starting with a digit.
private static Pattern SAFE_PROPERTY_NAME = Pattern.compile("[a-zA-Z_$][\\w$]*");
/**
* Constructor.
*/
Lower(final Compiler compiler) {
super(new BlockLexicalContext() {
@Override
public List<Statement> popStatements() {
final List<Statement> newStatements = new ArrayList<>();
boolean terminated = false;
final List<Statement> statements = super.popStatements();
for (final Statement statement : statements) {
if (!terminated) {
newStatements.add(statement);
if (statement.isTerminal() || statement instanceof JumpStatement) { //TODO hasGoto? But some Loops are hasGoto too - why?
terminated = true;
}
} else {
statement.accept(new NodeVisitor<LexicalContext>(new LexicalContext()) {
@Override
public boolean enterVarNode(final VarNode varNode) {
newStatements.add(varNode.setInit(null));
return false;
}
});
}
}
return newStatements;
}
@Override
protected Block afterSetStatements(final Block block) {
final List<Statement> stmts = block.getStatements();
for(final ListIterator<Statement> li = stmts.listIterator(stmts.size()); li.hasPrevious();) {
final Statement stmt = li.previous();
// popStatements() guarantees that the only thing after a terminal statement are uninitialized
// VarNodes. We skip past those, and set the terminal state of the block to the value of the
// terminal state of the first statement that is not an uninitialized VarNode.
if(!(stmt instanceof VarNode && ((VarNode)stmt).getInit() == null)) {
return block.setIsTerminal(this, stmt.isTerminal());
}
}
return block.setIsTerminal(this, false);
}
});
this.log = initLogger(compiler.getContext());
}
@Override
public DebugLogger getLogger() {
return log;
}
@Override
public DebugLogger initLogger(final Context context) {
return context.getLogger(this.getClass());
}
@Override
public boolean enterBreakNode(final BreakNode breakNode) {
addStatement(breakNode);
return false;
}
@Override
public Node leaveCallNode(final CallNode callNode) {
return checkEval(callNode.setFunction(markerFunction(callNode.getFunction())));
}
@Override
public Node leaveCatchNode(final CatchNode catchNode) {
return addStatement(catchNode);
}
@Override
public boolean enterContinueNode(final ContinueNode continueNode) {
addStatement(continueNode);
return false;
}
@Override
public boolean enterDebuggerNode(final DebuggerNode debuggerNode) {
final int line = debuggerNode.getLineNumber();
final long token = debuggerNode.getToken();
final int finish = debuggerNode.getFinish();
addStatement(new ExpressionStatement(line, token, finish, new RuntimeNode(token, finish, RuntimeNode.Request.DEBUGGER, new ArrayList<Expression>())));
return false;
}
@Override
public boolean enterJumpToInlinedFinally(final JumpToInlinedFinally jumpToInlinedFinally) {
addStatement(jumpToInlinedFinally);
return false;
}
@Override
public boolean enterEmptyNode(final EmptyNode emptyNode) {
return false;
}
@Override
public Node leaveIndexNode(final IndexNode indexNode) {
final String name = getConstantPropertyName(indexNode.getIndex());
if (name != null) {
// If index node is a constant property name convert index node to access node.
assert Token.descType(indexNode.getToken()) == TokenType.LBRACKET;
return new AccessNode(indexNode.getToken(), indexNode.getFinish(), indexNode.getBase(), name);
}
return super.leaveIndexNode(indexNode);
}
// If expression is a primitive literal that is not an array index and does return its string value. Else return null.
private static String getConstantPropertyName(final Expression expression) {
if (expression instanceof LiteralNode.PrimitiveLiteralNode) {
final Object value = ((LiteralNode) expression).getValue();
if (value instanceof String && SAFE_PROPERTY_NAME.matcher((String) value).matches()) {
return (String) value;
}
}
return null;
}
@Override
public Node leaveExpressionStatement(final ExpressionStatement expressionStatement) {
final Expression expr = expressionStatement.getExpression();
ExpressionStatement node = expressionStatement;
final FunctionNode currentFunction = lc.getCurrentFunction();
if (currentFunction.isProgram()) {
if (!isInternalExpression(expr) && !isEvalResultAssignment(expr)) {
node = expressionStatement.setExpression(
new BinaryNode(
Token.recast(
expressionStatement.getToken(),
TokenType.ASSIGN),
compilerConstant(RETURN),
expr));
}
}
return addStatement(node);
}
@Override
public Node leaveBlockStatement(final BlockStatement blockStatement) {
return addStatement(blockStatement);
}
@Override
public Node leaveForNode(final ForNode forNode) {
ForNode newForNode = forNode;
final Expression test = forNode.getTest();
if (!forNode.isForIn() && isAlwaysTrue(test)) {
newForNode = forNode.setTest(lc, null);
}
newForNode = checkEscape(newForNode);
if(newForNode.isForIn()) {
// Wrap it in a block so its internally created iterator is restricted in scope
addStatementEnclosedInBlock(newForNode);
} else {
addStatement(newForNode);
}
return newForNode;
}
@Override
public Node leaveFunctionNode(final FunctionNode functionNode) {
log.info("END FunctionNode: ", functionNode.getName());
return functionNode.setState(lc, CompilationState.LOWERED);
}
@Override
public Node leaveIfNode(final IfNode ifNode) {
return addStatement(ifNode);
}
@Override
public Node leaveIN(final BinaryNode binaryNode) {
return new RuntimeNode(binaryNode);
}
@Override
public Node leaveINSTANCEOF(final BinaryNode binaryNode) {
return new RuntimeNode(binaryNode);
}
@Override
public Node leaveLabelNode(final LabelNode labelNode) {
return addStatement(labelNode);
}
@Override
public Node leaveReturnNode(final ReturnNode returnNode) {
addStatement(returnNode); //ReturnNodes are always terminal, marked as such in constructor
return returnNode;
}
@Override
public Node leaveCaseNode(final CaseNode caseNode) {
// Try to represent the case test as an integer
final Node test = caseNode.getTest();
if (test instanceof LiteralNode) {
final LiteralNode<?> lit = (LiteralNode<?>)test;
if (lit.isNumeric() && !(lit.getValue() instanceof Integer)) {
if (JSType.isRepresentableAsInt(lit.getNumber())) {
return caseNode.setTest((Expression)LiteralNode.newInstance(lit, lit.getInt32()).accept(this));
}
}
}
return caseNode;
}
@Override<|fim▁hole|> addStatementEnclosedInBlock(switchNode);
} else {
addStatement(switchNode);
}
return switchNode;
}
@Override
public Node leaveThrowNode(final ThrowNode throwNode) {
return addStatement(throwNode); //ThrowNodes are always terminal, marked as such in constructor
}
@SuppressWarnings("unchecked")
private static <T extends Node> T ensureUniqueNamesIn(final T node) {
return (T)node.accept(new NodeVisitor<LexicalContext>(new LexicalContext()) {
@Override
public Node leaveFunctionNode(final FunctionNode functionNode) {
final String name = functionNode.getName();
return functionNode.setName(lc, lc.getCurrentFunction().uniqueName(name));
}
@Override
public Node leaveDefault(final Node labelledNode) {
return labelledNode.ensureUniqueLabels(lc);
}
});
}
private static Block createFinallyBlock(final Block finallyBody) {
final List<Statement> newStatements = new ArrayList<>();
for (final Statement statement : finallyBody.getStatements()) {
newStatements.add(statement);
if (statement.hasTerminalFlags()) {
break;
}
}
return finallyBody.setStatements(null, newStatements);
}
private Block catchAllBlock(final TryNode tryNode) {
final int lineNumber = tryNode.getLineNumber();
final long token = tryNode.getToken();
final int finish = tryNode.getFinish();
final IdentNode exception = new IdentNode(token, finish, lc.getCurrentFunction().uniqueName(CompilerConstants.EXCEPTION_PREFIX.symbolName()));
final Block catchBody = new Block(token, finish, new ThrowNode(lineNumber, token, finish, new IdentNode(exception), true));
assert catchBody.isTerminal(); //ends with throw, so terminal
final CatchNode catchAllNode = new CatchNode(lineNumber, token, finish, new IdentNode(exception), null, catchBody, true);
final Block catchAllBlock = new Block(token, finish, catchAllNode);
//catchallblock -> catchallnode (catchnode) -> exception -> throw
return (Block)catchAllBlock.accept(this); //not accepted. has to be accepted by lower
}
private IdentNode compilerConstant(final CompilerConstants cc) {
final FunctionNode functionNode = lc.getCurrentFunction();
return new IdentNode(functionNode.getToken(), functionNode.getFinish(), cc.symbolName());
}
private static boolean isTerminalFinally(final Block finallyBlock) {
return finallyBlock.getLastStatement().hasTerminalFlags();
}
/**
* Splice finally code into all endpoints of a trynode
* @param tryNode the try node
* @param rethrow the rethrowing throw nodes from the synthetic catch block
* @param finallyBody the code in the original finally block
* @return new try node after splicing finally code (same if nop)
*/
private TryNode spliceFinally(final TryNode tryNode, final ThrowNode rethrow, final Block finallyBody) {
assert tryNode.getFinallyBody() == null;
final Block finallyBlock = createFinallyBlock(finallyBody);
final ArrayList<Block> inlinedFinallies = new ArrayList<>();
final FunctionNode fn = lc.getCurrentFunction();
final TryNode newTryNode = (TryNode)tryNode.accept(new NodeVisitor<LexicalContext>(new LexicalContext()) {
@Override
public boolean enterFunctionNode(final FunctionNode functionNode) {
// do not enter function nodes - finally code should not be inlined into them
return false;
}
@Override
public Node leaveThrowNode(final ThrowNode throwNode) {
if (rethrow == throwNode) {
return new BlockStatement(prependFinally(finallyBlock, throwNode));
}
return throwNode;
}
@Override
public Node leaveBreakNode(final BreakNode breakNode) {
return leaveJumpStatement(breakNode);
}
@Override
public Node leaveContinueNode(final ContinueNode continueNode) {
return leaveJumpStatement(continueNode);
}
private Node leaveJumpStatement(final JumpStatement jump) {
// NOTE: leaveJumpToInlinedFinally deliberately does not delegate to this method, only break and
// continue are edited. JTIF nodes should not be changed, rather the surroundings of
// break/continue/return that were moved into the inlined finally block itself will be changed.
// If this visitor's lc doesn't find the target of the jump, it means it's external to the try block.
if (jump.getTarget(lc) == null) {
return createJumpToInlinedFinally(fn, inlinedFinallies, prependFinally(finallyBlock, jump));
}
return jump;
}
@Override
public Node leaveReturnNode(final ReturnNode returnNode) {
final Expression expr = returnNode.getExpression();
if (isTerminalFinally(finallyBlock)) {
if (expr == null) {
// Terminal finally; no return expression.
return createJumpToInlinedFinally(fn, inlinedFinallies, ensureUniqueNamesIn(finallyBlock));
}
// Terminal finally; has a return expression.
final List<Statement> newStatements = new ArrayList<>(2);
final int retLineNumber = returnNode.getLineNumber();
final long retToken = returnNode.getToken();
// Expression is evaluated for side effects.
newStatements.add(new ExpressionStatement(retLineNumber, retToken, returnNode.getFinish(), expr));
newStatements.add(createJumpToInlinedFinally(fn, inlinedFinallies, ensureUniqueNamesIn(finallyBlock)));
return new BlockStatement(retLineNumber, new Block(retToken, finallyBlock.getFinish(), newStatements));
} else if (expr == null || expr instanceof PrimitiveLiteralNode<?> || (expr instanceof IdentNode && RETURN.symbolName().equals(((IdentNode)expr).getName()))) {
// Nonterminal finally; no return expression, or returns a primitive literal, or returns :return.
// Just move the return expression into the finally block.
return createJumpToInlinedFinally(fn, inlinedFinallies, prependFinally(finallyBlock, returnNode));
} else {
// We need to evaluate the result of the return in case it is complex while still in the try block,
// store it in :return, and return it afterwards.
final List<Statement> newStatements = new ArrayList<>();
final int retLineNumber = returnNode.getLineNumber();
final long retToken = returnNode.getToken();
final int retFinish = returnNode.getFinish();
final Expression resultNode = new IdentNode(expr.getToken(), expr.getFinish(), RETURN.symbolName());
// ":return = <expr>;"
newStatements.add(new ExpressionStatement(retLineNumber, retToken, retFinish, new BinaryNode(Token.recast(returnNode.getToken(), TokenType.ASSIGN), resultNode, expr)));
// inline finally and end it with "return :return;"
newStatements.add(createJumpToInlinedFinally(fn, inlinedFinallies, prependFinally(finallyBlock, returnNode.setExpression(resultNode))));
return new BlockStatement(retLineNumber, new Block(retToken, retFinish, newStatements));
}
}
});
addStatement(inlinedFinallies.isEmpty() ? newTryNode : newTryNode.setInlinedFinallies(lc, inlinedFinallies));
// TODO: if finallyStatement is terminal, we could just have sites of inlined finallies jump here.
addStatement(new BlockStatement(finallyBlock));
return newTryNode;
}
private static JumpToInlinedFinally createJumpToInlinedFinally(final FunctionNode fn, final List<Block> inlinedFinallies, final Block finallyBlock) {
final String labelName = fn.uniqueName(":finally");
final long token = finallyBlock.getToken();
final int finish = finallyBlock.getFinish();
inlinedFinallies.add(new Block(token, finish, new LabelNode(finallyBlock.getFirstStatementLineNumber(),
token, finish, labelName, finallyBlock)));
return new JumpToInlinedFinally(labelName);
}
private static Block prependFinally(final Block finallyBlock, final Statement statement) {
final Block inlinedFinally = ensureUniqueNamesIn(finallyBlock);
if (isTerminalFinally(finallyBlock)) {
return inlinedFinally;
}
final List<Statement> stmts = inlinedFinally.getStatements();
final List<Statement> newStmts = new ArrayList<>(stmts.size() + 1);
newStmts.addAll(stmts);
newStmts.add(statement);
return new Block(inlinedFinally.getToken(), statement.getFinish(), newStmts);
}
@Override
public Node leaveTryNode(final TryNode tryNode) {
final Block finallyBody = tryNode.getFinallyBody();
TryNode newTryNode = tryNode.setFinallyBody(lc, null);
// No finally or empty finally
if (finallyBody == null || finallyBody.getStatementCount() == 0) {
final List<CatchNode> catches = newTryNode.getCatches();
if (catches == null || catches.isEmpty()) {
// A completely degenerate try block: empty finally, no catches. Replace it with try body.
return addStatement(new BlockStatement(tryNode.getBody()));
}
return addStatement(ensureUnconditionalCatch(newTryNode));
}
/*
* create a new trynode
* if we have catches:
*
* try try
* x try
* catch x
* y catch
* finally z y
* catchall
* rethrow
*
* otheriwse
*
* try try
* x x
* finally catchall
* y rethrow
*
*
* now splice in finally code wherever needed
*
*/
final Block catchAll = catchAllBlock(tryNode);
final List<ThrowNode> rethrows = new ArrayList<>(1);
catchAll.accept(new NodeVisitor<LexicalContext>(new LexicalContext()) {
@Override
public boolean enterThrowNode(final ThrowNode throwNode) {
rethrows.add(throwNode);
return true;
}
});
assert rethrows.size() == 1;
if (!tryNode.getCatchBlocks().isEmpty()) {
final Block outerBody = new Block(newTryNode.getToken(), newTryNode.getFinish(), ensureUnconditionalCatch(newTryNode));
newTryNode = newTryNode.setBody(lc, outerBody).setCatchBlocks(lc, null);
}
newTryNode = newTryNode.setCatchBlocks(lc, Arrays.asList(catchAll));
/*
* Now that the transform is done, we have to go into the try and splice
* the finally block in front of any statement that is outside the try
*/
return (TryNode)lc.replace(tryNode, spliceFinally(newTryNode, rethrows.get(0), finallyBody));
}
private TryNode ensureUnconditionalCatch(final TryNode tryNode) {
final List<CatchNode> catches = tryNode.getCatches();
if(catches == null || catches.isEmpty() || catches.get(catches.size() - 1).getExceptionCondition() == null) {
return tryNode;
}
// If the last catch block is conditional, add an unconditional rethrow block
final List<Block> newCatchBlocks = new ArrayList<>(tryNode.getCatchBlocks());
newCatchBlocks.add(catchAllBlock(tryNode));
return tryNode.setCatchBlocks(lc, newCatchBlocks);
}
@Override
public Node leaveVarNode(final VarNode varNode) {
addStatement(varNode);
if (varNode.getFlag(VarNode.IS_LAST_FUNCTION_DECLARATION) && lc.getCurrentFunction().isProgram()) {
new ExpressionStatement(varNode.getLineNumber(), varNode.getToken(), varNode.getFinish(), new IdentNode(varNode.getName())).accept(this);
}
return varNode;
}
@Override
public Node leaveWhileNode(final WhileNode whileNode) {
final Expression test = whileNode.getTest();
final Block body = whileNode.getBody();
if (isAlwaysTrue(test)) {
//turn it into a for node without a test.
final ForNode forNode = (ForNode)new ForNode(whileNode.getLineNumber(), whileNode.getToken(), whileNode.getFinish(), body, 0).accept(this);
lc.replace(whileNode, forNode);
return forNode;
}
return addStatement(checkEscape(whileNode));
}
@Override
public Node leaveWithNode(final WithNode withNode) {
return addStatement(withNode);
}
/**
* Given a function node that is a callee in a CallNode, replace it with
* the appropriate marker function. This is used by {@link CodeGenerator}
* for fast scope calls
*
* @param function function called by a CallNode
* @return transformed node to marker function or identity if not ident/access/indexnode
*/
private static Expression markerFunction(final Expression function) {
if (function instanceof IdentNode) {
return ((IdentNode)function).setIsFunction();
} else if (function instanceof BaseNode) {
return ((BaseNode)function).setIsFunction();
}
return function;
}
/**
* Calculate a synthetic eval location for a node for the stacktrace, for example src#17<eval>
* @param node a node
* @return eval location
*/
private String evalLocation(final IdentNode node) {
final Source source = lc.getCurrentFunction().getSource();
final int pos = node.position();
return new StringBuilder().
append(source.getName()).
append('#').
append(source.getLine(pos)).
append(':').
append(source.getColumn(pos)).
append("<eval>").
toString();
}
/**
* Check whether a call node may be a call to eval. In that case we
* clone the args in order to create the following construct in
* {@link CodeGenerator}
*
* <pre>
* if (calledFuntion == buildInEval) {
* eval(cloned arg);
* } else {
* cloned arg;
* }
* </pre>
*
* @param callNode call node to check if it's an eval
*/
private CallNode checkEval(final CallNode callNode) {
if (callNode.getFunction() instanceof IdentNode) {
final List<Expression> args = callNode.getArgs();
final IdentNode callee = (IdentNode)callNode.getFunction();
// 'eval' call with at least one argument
if (args.size() >= 1 && EVAL.symbolName().equals(callee.getName())) {
final List<Expression> evalArgs = new ArrayList<>(args.size());
for(final Expression arg: args) {
evalArgs.add((Expression)ensureUniqueNamesIn(arg).accept(this));
}
return callNode.setEvalArgs(new CallNode.EvalArgs(evalArgs, evalLocation(callee)));
}
}
return callNode;
}
/**
* Helper that given a loop body makes sure that it is not terminal if it
* has a continue that leads to the loop header or to outer loops' loop
* headers. This means that, even if the body ends with a terminal
* statement, we cannot tag it as terminal
*
* @param loopBody the loop body to check
* @return true if control flow may escape the loop
*/
private static boolean controlFlowEscapes(final LexicalContext lex, final Block loopBody) {
final List<Node> escapes = new ArrayList<>();
loopBody.accept(new NodeVisitor<LexicalContext>(new LexicalContext()) {
@Override
public Node leaveBreakNode(final BreakNode node) {
escapes.add(node);
return node;
}
@Override
public Node leaveContinueNode(final ContinueNode node) {
// all inner loops have been popped.
if (lex.contains(node.getTarget(lex))) {
escapes.add(node);
}
return node;
}
});
return !escapes.isEmpty();
}
@SuppressWarnings("unchecked")
private <T extends LoopNode> T checkEscape(final T loopNode) {
final boolean escapes = controlFlowEscapes(lc, loopNode.getBody());
if (escapes) {
return (T)loopNode.
setBody(lc, loopNode.getBody().setIsTerminal(lc, false)).
setControlFlowEscapes(lc, escapes);
}
return loopNode;
}
private Node addStatement(final Statement statement) {
lc.appendStatement(statement);
return statement;
}
private void addStatementEnclosedInBlock(final Statement stmt) {
BlockStatement b = BlockStatement.createReplacement(stmt, Collections.<Statement>singletonList(stmt));
if(stmt.isTerminal()) {
b = b.setBlock(b.getBlock().setIsTerminal(null, true));
}
addStatement(b);
}
/**
* An internal expression has a symbol that is tagged internal. Check if
* this is such a node
*
* @param expression expression to check for internal symbol
* @return true if internal, false otherwise
*/
private static boolean isInternalExpression(final Expression expression) {
if (!(expression instanceof IdentNode)) {
return false;
}
final Symbol symbol = ((IdentNode)expression).getSymbol();
return symbol != null && symbol.isInternal();
}
/**
* Is this an assignment to the special variable that hosts scripting eval
* results, i.e. __return__?
*
* @param expression expression to check whether it is $evalresult = X
* @return true if an assignment to eval result, false otherwise
*/
private static boolean isEvalResultAssignment(final Node expression) {
final Node e = expression;
if (e instanceof BinaryNode) {
final Node lhs = ((BinaryNode)e).lhs();
if (lhs instanceof IdentNode) {
return ((IdentNode)lhs).getName().equals(RETURN.symbolName());
}
}
return false;
}
}<|fim▁end|> | public Node leaveSwitchNode(final SwitchNode switchNode) {
if(!switchNode.isUniqueInteger()) {
// Wrap it in a block so its internally created tag is restricted in scope |
<|file_name|>factory.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
destination.factory
'''<|fim▁hole|>from destination.aws import AwsDestination
from exceptions import AutocertError
from config import CFG
from app import app
class DestinationFactoryError(AutocertError):
def __init__(self, destination):
msg = f'destination factory error with {destination}'
super(DestinationFactoryError, self).__init__(msg)
def create_destination(destination, ar, cfg, timeout, verbosity):
d = None
if destination == 'aws':
d = AwsDestination(ar, cfg, verbosity)
elif destination == 'zeus':
d = ZeusDestination(ar, cfg, verbosity)
else:
raise DestinationFactoryError(destination)
dests = list(CFG.destinations.zeus.keys())
if d.has_connectivity(timeout, dests):
return d<|fim▁end|> |
from destination.zeus import ZeusDestination |
<|file_name|>electrum_es_ES.ts<|end_file_name|><|fim▁begin|><TS language="es_ES" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Haz clic derecho para editar la dirección o la etiqueta</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Crea una nueva direccióon</translation>
</message>
<message>
<source>&New</source>
<translation>&Nuevo</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la direccón seleccionada al portapapeles del sistema</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Copiar</translation>
</message>
<message>
<source>C&lose</source>
<translation>C&errar</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Copiar Direccón</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Elimina la dirección seleccionada de la lista</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporta los datos de la pestaña actual a un archivo</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Eliminar</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Elige la dirección a la que enviar las monedas</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Elige la direccón con la que recibir monedas</translation>
</message>
<message>
<source>C&hoose</source>
<translation>E&legir</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Enviando direcciones</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Recibiendo direcciones</translation>
</message>
<message>
<source>These are your Electrum addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Estas son tus direcciones de Electrum para enviar pagos. Comprueba siempre la cantidad y la dirección receptora antes de enviar monedas.</translation>
</message>
<message>
<source>These are your Electrum addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Estas son tus direcciones de Electrum para recibir pagos. Se recomienda usar una nueva dirección receptora para cada transacción</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Copiar &Etiqueta</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Exportar Lista de Direcciones</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Archivo separado por comas (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Exportacón Fallida</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Ha ocurrido un error intentando guardar la lista de direcciones en %1. Por favor intentalo de nuevo.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Dialogo de Contraseña</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Introduzca la contraseña</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nueva contraseña</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Repite la nueva contraseña</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Encriptar cartera</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operacón necesita tu contraseña de la cartera para desbloquear la cartera.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Desbloquear cartera</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operación requiere tu contraseña de la cartera para desencriptar la cartera.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Desencriptar cartera</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Cambiar contraseña</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Confirmar encriptación de la cartera</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR ELECTRUMS</b>!</source>
<translation>Advertencia: Si encriptas tu cartera y pierdes tu contraseña, <b>PERDERÁS TODOS TUS ELECTRUMS</B></translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Estás seguro ue deseas encriptar tu cartera?</translation>
</message>
<message>
<source>Electrum Core will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your electrums from being stolen by malware infecting your computer.</source>
<translation>Electrum Core se cerrará ahora para finalizar el proceso de encriptación. Recuerda que encriptar tu cartera no protege completamente tus electrums de ser robados por malware infectando tu ordenador.</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: Cualquier copia de seguridad anterior del archivo de tu cartera debería ser remplazado con el nuevo archivo encriptado. Por motivos de seguridad, las copias de seguridad anteriores de la cartera desencriptada quedaran inusables tan pronto como empieces a usar la nueva cartera encriptada.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Advertencia: La Tecla de Bloqueo de Mayusculas esta activada!</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Cartera encriptada</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Introduzca la nueva contraseña de la cartera. <br/>Por favor utilice una contraseña de <b>diez o mas caracteres aleatorios</b>, o <b>ocho o mas palabras</b>.</translation>
</message>
<message>
<source>Enter the old passphrase and new passphrase to the wallet.</source>
<translation>Introduzca la antigua contraseña y la nueva contraseña en la cartera.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Encriptación de la cartera fallida</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>La encriptación de la cartera ha fallado debido a un error interno. Tu cartera no ha sido encriptada.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Las contraseñas proporcianadas no se corresponden.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Desbloqueo de la cartera fallido</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La contraseña introducida para desencriptar la cartera es incorrecta.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Desencriptación de la cartera fallida</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Contraseña de la cartera cambiada correctamente</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context><|fim▁hole|> <translation>Firmar &mensaje...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Sincronizando con la red...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Vista general</translation>
</message>
<message>
<source>Node</source>
<translation>Nodo</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Mostrar vista general de la cartera</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transacciones</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Navegar historial de transacciones</translation>
</message>
<message>
<source>E&xit</source>
<translation>S&alir</translation>
</message>
<message>
<source>Quit application</source>
<translation>Salir de la aplicación</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Acerca de &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Muestra información acerca de Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opciones...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Encriptar Cartera...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Hacer copia de seguridad de la cartera...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Cambiar contraseña...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Enviando direcciones...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Recibiendo direcciones..</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Abrir &URI...</translation>
</message>
<message>
<source>Electrum Core client</source>
<translation>Cliente Electrum Core</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Importando bloques desde disco...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindexando bloques en el disco...</translation>
</message>
<message>
<source>Send coins to a Electrum address</source>
<translation>Envia monedas a una dirección Electrum</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Crea una copia de seguridad de tu cartera en otra ubicación</translation>
</message>
</context>
<context>
<name>ClientModel</name>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>&Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>&Address</source>
<translation>Dirección</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
</context>
<context>
<name>HelpMessageDialog</name>
</context>
<context>
<name>Intro</name>
</context>
<context>
<name>OpenURIDialog</name>
</context>
<context>
<name>OptionsDialog</name>
</context>
<context>
<name>OverviewPage</name>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
</context>
<context>
<name>ReceiveCoinsDialog</name>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Copy &Address</source>
<translation>&Copiar Direccón</translation>
</message>
<message>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
</context>
<context>
<name>SplashScreen</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>Exporting Failed</source>
<translation>Exportacón Fallida</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Archivo separado por comas (*.csv)</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Address</source>
<translation>Dirección</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporta los datos de la pestaña actual a un archivo</translation>
</message>
</context>
<context>
<name>electrum-core</name>
</context>
</TS><|fim▁end|> | <context>
<name>ElectrumGUI</name>
<message>
<source>Sign &message...</source> |
<|file_name|>cwlutils.py<|end_file_name|><|fim▁begin|>"""Useful utilities for handling CWL inputs and outputs.
This is shared functionality abstracted across multiple approaches, currently
mostly handling CWL records. This needs some generalization to apply across
non-variant calling workflows.<|fim▁hole|>import collections
import pprint
import toolz as tz
def _get_all_cwlkeys(items):
"""Retrieve cwlkeys from inputs, handling defaults which can be null.
When inputs are null in some and present in others, this creates unequal
keys in each sample, confusing decision making about which are primary and extras.
"""
default_keys = set(["metadata__batch", "config__algorithm__validate",
"config__algorithm__validate_regions", "validate__summary",
"validate__tp", "validate__fp", "validate__fn"])
all_keys = set([])
for data in items:
all_keys.update(set(data["cwl_keys"]))
all_keys.update(default_keys)
return all_keys
def split_data_cwl_items(items):
"""Split a set of CWL output dictionaries into data samples and CWL items.
Handles cases where we're arrayed on multiple things, like a set of regional
VCF calls and data objects.
"""
key_lens = set([])
for data in items:
key_lens.add(len(_get_all_cwlkeys([data])))
extra_key_len = min(list(key_lens)) if len(key_lens) > 1 else None
data_out = []
extra_out = []
for data in items:
if extra_key_len and len(_get_all_cwlkeys([data])) == extra_key_len:
extra_out.append(data)
else:
data_out.append(data)
if len(extra_out) == 0:
return data_out, {}
else:
cwl_keys = extra_out[0]["cwl_keys"]
for extra in extra_out[1:]:
cur_cwl_keys = extra["cwl_keys"]
assert cur_cwl_keys == cwl_keys, pprint.pformat(extra_out)
cwl_extras = collections.defaultdict(list)
for data in items:
for key in cwl_keys:
cwl_extras[key].append(data[key])
data_final = []
for data in data_out:
for key in cwl_keys:
data.pop(key)
data_final.append(data)
return data_final, dict(cwl_extras)
def samples_to_records(samples):
"""Convert samples into output CWL records.
"""
from bcbio.pipeline import run_info
RECORD_CONVERT_TO_LIST = set(["config__algorithm__tools_on", "config__algorithm__tools_off",
"reference__genome_context"])
all_keys = _get_all_cwlkeys(samples)
out = []
for data in samples:
for raw_key in sorted(list(all_keys)):
key = raw_key.split("__")
if tz.get_in(key, data) is None:
data = tz.update_in(data, key, lambda x: None)
data["cwl_keys"].append(raw_key)
if raw_key in RECORD_CONVERT_TO_LIST:
val = tz.get_in(key, data)
if not val: val = []
elif not isinstance(val, (list, tuple)): val = [val]
data = tz.update_in(data, key, lambda x: val)
data["metadata"] = run_info.add_metadata_defaults(data.get("metadata", {}))
out.append(data)
return out<|fim▁end|> | """ |
<|file_name|>dump_db_classes.py<|end_file_name|><|fim▁begin|>import shelve
db = shelve.open('class-shelve')
for key in db:<|fim▁hole|>print(bob.lastName())
print(db['tom'].lastName)<|fim▁end|> | print(key, '=>\n)', db[key].name, db[key].pay)
bob = db['bob'] |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>import Vue from 'vue'
import Kanban from './components/kanban-app.vue'
import store from './store.js'
new Vue({
el: '#app',<|fim▁hole|>})<|fim▁end|> | store,
render: h => h(Kanban) |
<|file_name|>basic_services_broker.py<|end_file_name|><|fim▁begin|>from ..broker import Broker
class BasicServicesBroker(Broker):
controller = "basic_services"
def authenticate(self, **kwargs):
"""Authenticates the user with NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param username: The username of the user as whom to login.
:type username: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param password: The password of the user as whom to login.
:type password: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` %Y-%m-%d %H:%M:%S
:param datetime_format: The format to use for date/time input and output.
:type datetime_format: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timezone: Date/time input and output will be performed in the specified timezone. Should be specified as HH:MM offset from GMT. For example, -05:00 specified US Eastern Time, whereas +09:00 specifies Tokyo time. Alternatively, a timezone name may be used. See the API Data Structures page for details. If omitted, the server's configured timezone will be used.
:type timezone: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("authenticate"), kwargs)
def base_uri(self, **kwargs):
"""Returns the base URI for the specified version.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param version: The API version for which the base_uri is needed.
:type version: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("base_uri"), kwargs)
def license_info(self, **kwargs):
"""Returns license information for this NetMRI server.
**Inputs**
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False<|fim▁hole|>
:return serial_number: NetMRI serial number.
:rtype serial_number: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return license_id: NetMRI License identifier.
:rtype license_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return license_expiration: NetMRI License expiration.
:rtype license_expiration: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return license_type: NetMRI License type
:rtype license_type: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return mode: NetMRI operation mode. One of 'standalone', 'master' or 'collector'.
:rtype mode: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return maintenance_expiration: Maintenance expiration for appliance.
:rtype maintenance_expiration: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_limit: Licensed limit of devices.
:rtype device_limit: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return interface_limit: Licensed limit of interfaces.
:rtype interface_limit: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return spm_limit: Licensed limit of number of ports controlled by SPM.
:rtype spm_limit: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return modules_short_name: Short symbolic names of licensed features.
:rtype modules_short_name: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return modules_support: Support statuses for corresponding modules in modules_short_names.
:rtype modules_support: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return modules_expiration: Expiration times for corresponding modules in modules_short_names.
:rtype modules_expiration: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return modules_name: Long names for corresponding modules in modules_short_names.
:rtype modules_name: Array of String
"""
return self.api_request(self._get_method_fullname("license_info"), kwargs)
def server_info(self, **kwargs):
"""Returns basic information regarding this NetMRI server.
**Inputs**
| ``api version min:`` 2.6
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param api_versions_only_ind: Only include API version information in the output.
:type api_versions_only_ind: Boolean
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return netmri_version: The NetMRI version number running on this appliance or virtual machine.
:rtype netmri_version: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return latest_api_version: The most recent API version supported by this NetMRI.
:rtype latest_api_version: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return requested_api_version: The API version that executed this call.
:rtype requested_api_version: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return host_name: The configured host name of the NetMRI appliance.
:rtype host_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return operating_mode: Indicates if the NetMRI is running in standalone, collector, or operations center mode.
:rtype operating_mode: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return mgmt_ip: The IPv4 management address of this NetMRI, if configured.
:rtype mgmt_ip: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return mgmt_ip6: The IPv6 management address of this NetMRI, if configured.
:rtype mgmt_ip6: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return scan_ip: The IPv4 SCAN (analysis) address of this NetMRI, if configured.
:rtype scan_ip: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return scan_ip6: The IPv6 SCAN (analysis) address of this NetMRI, if configured.
:rtype scan_ip6: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return operational_status: The status of NetMRI. Usually ready, can also be upgrading. Values might change in the future.
:rtype operational_status: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return supported_api_versions: All API versions supported by this NetMRI.
:rtype supported_api_versions: Array of String
"""
return self.api_request(self._get_method_fullname("server_info"), kwargs)
def server_time(self, **kwargs):
"""Returns the current system time of this NetMRI server.
**Inputs**
**Outputs**
"""
return self.api_request(self._get_method_fullname("server_time"), kwargs)
def restart(self, **kwargs):
"""Restarts the application.
**Inputs**
**Outputs**
"""
return self.api_request(self._get_method_fullname("restart"), kwargs)
def consolidate(self, **kwargs):
"""Runs consolidation
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param managers: Comma-separated list of consolidator managers. Must be one of Aggregate, Config, Event, Group, Issue, Job, Normal, Policy, Routing, Settings, Stats, Subnet, Switching, Time, Topology, Voip, Vulnerability, Wireless
:type managers: Array
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param collector: Collector name. In case when this method called on OC this parameter is required
:type collector: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("consolidate"), kwargs)
def settings_generate(self, **kwargs):
"""Generates xml with current configuration data
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param version: The version of xml to be generated
:type version: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return xml: A string containing the full xml as collected from the running config.
:rtype xml: String
"""
return self.api_request(self._get_method_fullname("settings_generate"), kwargs)
def settings_current(self, **kwargs):
"""Reports the status of an xml configuration file
**Inputs**
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return xml: A string containing the full xml as collected from the running config.
:rtype xml: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return messages: An array of hashes that contain details about the validation process
:rtype messages: Array of Hash
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return status: A string representation of the status of the request. Will be one of; success, error, pending
:rtype status: String
"""
return self.api_request(self._get_method_fullname("settings_current"), kwargs)
def settings_apply(self, **kwargs):
"""Parses the xml provided by config_id, then applies the changes. You should not need to call this directly!
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param config_id: The configuration id reported when the xml was uploaded to the unit
:type config_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param mods: Modifications for applying
:type mods: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return xml: A string containing the full xml as collected from the running config.
:rtype xml: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return messages: An array of hashes that contain details about the validation process
:rtype messages: Array of Hash
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return status: A string representation of the status of the request. Will be one of; success, error, pending
:rtype status: String
"""
return self.api_request(self._get_method_fullname("settings_apply"), kwargs)
def settings_status(self, **kwargs):
"""Reports the status of an xml configuration file
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param config_id: The configuration id reported when the xml was uploaded to the unit
:type config_id: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return messages: An array of hashes that contain details about the validation process
:rtype messages: Array of Hash
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return status: A string representation of the status of the validation. Will be one of; success, error, pending
:rtype status: String
"""
return self.api_request(self._get_method_fullname("settings_status"), kwargs)
def settings_info(self, **kwargs):
"""Shows probe info, running_config, candidate_config, and list of installed dsb
**Inputs**
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return grid_members: Hash of grid members info including master and slaves (probes)
:rtype grid_members: String
"""
return self.api_request(self._get_method_fullname("settings_info"), kwargs)
def set_session_value(self, **kwargs):
"""save data in a cache that is session wise
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param key: key associated with that value - will be used to retrieve the same value
:type key: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param value: value to save in the session cache
:type value: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("set_session_value"), kwargs)
def get_session_value(self, **kwargs):
"""retrieve data in the session cache that formerly saved
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param key: key associated with that value - will be used to retrieve the same value
:type key: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param default_value: Default value in case key/value does not exist in session. If key does not exist and default value is nil the response is 400 with record not found message
:type default_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return value: value associated with that key
:rtype value: String
"""
return self.api_request(self._get_method_fullname("get_session_value"), kwargs)<|fim▁end|> | | ``default:`` None |
<|file_name|>test1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
<|fim▁hole|>from framework import do_exit, get_globals, main
def do_work():
global g_test_import
global globals1
print("do_work")
globals1 = get_globals()
g_test_import = globals1["g_test_import"]
print("do_work: g_test_import = %s" % str(g_test_import))
main(do_work)<|fim▁end|> | |
<|file_name|>iso.hh<|end_file_name|><|fim▁begin|>/*
* The ckFileSystem library provides core software functionality.
* Copyright (C) 2006-2012 Christian Kindahl
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.<|fim▁hole|> * You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <cxxtest/TestSuite.h>
#include <string.h>
#include "ckfilesystem/iso.hh"
using namespace ckfilesystem;
class IsoTestSuite : public CxxTest::TestSuite
{
public:
void test_write_file_name_l1()
{
unsigned char buffer[256];
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT(""), CHARSET_ISO), 0);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcd"), CHARSET_ISO), 4);
TS_ASSERT_SAME_DATA(buffer, "ABCD", 4);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcdefghijk"), CHARSET_ISO), 8);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH", 8);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcdefgh.ijk"), CHARSET_ISO), 12);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.IJK", 12);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcdefgh.ijkl"), CHARSET_ISO), 12);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.IJK", 12);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcdefghi.jkl"), CHARSET_ISO), 12);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.JKL", 12);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcdefghij.kl"), CHARSET_ISO), 11);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.KL", 11);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcdefghijk.l"), CHARSET_ISO), 10);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.L", 10);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, ckT("abcdefghijkl."), CHARSET_ISO), 9);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.", 9);
const char *expected_l1 =
"______________________________________________"
"."
"_"
"0123456789"
"_______"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"____"
"_"
"_"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"_______________________________________________________________"
"_______________________________________________________________"
"______";
for (size_t i = 1; i < 255; i++)
{
ckcore::tchar tmp[2] = { 0,0 };
tmp[0] = static_cast<ckcore::tchar>(i);
TS_ASSERT_EQUALS(iso_write_file_name_l1(buffer, tmp, CHARSET_ISO), 1);
TS_ASSERT_EQUALS(buffer[0], static_cast<unsigned char>(expected_l1[i]));
}
}
void test_write_file_name_l2()
{
unsigned char buffer[256];
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT(""), CHARSET_ISO), 0);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcd"), CHARSET_ISO), 4);
TS_ASSERT_SAME_DATA(buffer, "ABCD", 4);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcdefghijk"), CHARSET_ISO), 11);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGHIJK", 11);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcdefgh.ijk"), CHARSET_ISO), 12);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.IJK", 12);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcdefgh.ijkl"), CHARSET_ISO), 13);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGH.IJKL", 13);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcdefghi.jkl"), CHARSET_ISO), 13);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGHI.JKL", 13);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcdefghij.kl"), CHARSET_ISO), 13);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGHIJ.KL", 13);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcdefghijk.l"), CHARSET_ISO), 13);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGHIJK.L", 13);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, ckT("abcdefghijkl."), CHARSET_ISO), 13);
TS_ASSERT_SAME_DATA(buffer, "ABCDEFGHIJKL.", 13);
const char *expected_l2 =
"______________________________________________"
"."
"_"
"0123456789"
"_______"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"____"
"_"
"_"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"_______________________________________________________________"
"_______________________________________________________________"
"______";
for (size_t i = 1; i < 255; i++)
{
ckcore::tchar tmp[2] = { 0,0 };
tmp[0] = static_cast<ckcore::tchar>(i);
TS_ASSERT_EQUALS(iso_write_file_name_l2(buffer, tmp, CHARSET_ISO), 1);
TS_ASSERT_EQUALS(buffer[0], static_cast<unsigned char>(expected_l2[i]));
}
}
};<|fim▁end|> | * |
<|file_name|>default.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Template Add-on
Copyright (C) 2016 Demo
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib2, urllib, xbmcgui, xbmcplugin, xbmcaddon, xbmc, re, sys, os
try:
import json
except:
import simplejson as json
import yt
ADDON_NAME = 'Carrera'
addon_id = 'plugin.video.Carrera'
Base_Url = 'http://herovision.x10host.com/carrera/'
Main_Menu_File_Name = 'main.php'
search_filenames = ['sv1','teh','s5']
########################################################################################
### FAVOURITES SECTION IS NOT THIS AUTHORS CODE, I COULD NOT GET IT TO REMOVE FAVOURITES SO ALL CREDIT DUE TO THEM, SORRY IM NOT SURE WHERE IT CAME FROM BUT GOOD WORK :) ###
ADDON = xbmcaddon.Addon(id=addon_id)
ADDON_PATH = xbmc.translatePath('special://home/addons/'+addon_id)
ICON = ADDON_PATH + 'icon.png'
FANART = ADDON_PATH + 'fanart.jpg'
PATH = 'Carrera'
VERSION = '0.0.1'
Dialog = xbmcgui.Dialog()
addon_data = xbmc.translatePath('special://home/userdata/addon_data/'+addon_id+'/')
favorites = os.path.join(addon_data, 'favorites.txt')
watched = addon_data + 'watched.txt'
source_file = Base_Url + 'source_file.php'
debug = ADDON.getSetting('debug')
if os.path.exists(addon_data)==False:
os.makedirs(addon_data)
if not os.path.exists(watched):
open(watched,'w+')
if os.path.exists(favorites)==True:
FAV = open(favorites).read()
else: FAV = []
watched_read = open(watched).read()
def Main_Menu():
OPEN = Open_Url(Base_Url+Main_Menu_File_Name)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if name == '[COLORskyblue]F[COLORblue]avourites[/COLOR]':
Menu(name,url,6,icon,fanart,desc)
elif 'php' in url:
Menu(name,url,1,icon,fanart,desc)
elif name == '[COLORskyblue]S[COLORblue]earch[/COLOR]':
Menu('[COLORskyblue]S[COLORblue]earch[/COLOR]',url,3,icon,fanart,desc)
elif name == '[COLORskyblue]i[COLORblue]dex[/COLOR]':
Menu('[COLORskyblue]O[COLORblue]nline Lists[/COLOR]',url,10,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def Second_Menu(url):
OPEN = Open_Url(url)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
def index_Menu():
#Menu('Favourites','',5,'','','','','')
Menu('List of Index\'s','',10,'','','')
# Menu('Search','',6,ICON,FANART,'','','')
# Menu('[COLORred]Press here to add a source url[/COLOR] ','',2,'','','','','')
def Index_List():
OPEN = Open_Url(source_file)
Regex = re.compile('url="(.+?)">name="(.+?)"').findall(OPEN)
for url,name in Regex:
Menu(name,url,8,'','','')
#####################################MAIN REGEX LOOP ###############################
def Main_Loop(url):
HTML = Open_Url(url)
match = re.compile('<a href="(.+?)">(.+?)</a>').findall(HTML)
for url2,name in match:
url3 = url + url2
if '..' in url3:
pass
elif 'rar' in url3:
pass
elif 'jpg' in url3:
pass
elif 'vtx' in url3:
pass
elif 'srt' in url3:
pass
elif 'C=' in url2:
pass
elif '/' in url2:
Menu((name).replace('/',''),url3,8,ICON,FANART,'','','')
else:
Clean_name(name,url3)
################################### TIDY UP NAME #############################
def Clean_name(name,url3):
name1 = (name).replace('S01E','S01 E').replace('(MovIran).mkv','').replace('The.Walking.Dead','').replace('.mkv','').replace('Tehmovies.com.mkv','').replace('Nightsdl','').replace('Ganool','')
name2=(name1).replace('.',' ').replace(' (ParsFilm).mkv','').replace('_TehMovies.Com.mkv','').replace(' (SaberFun.IR).mkv','').replace('[UpFilm].mkv','').replace('(Bia2Movies)','')
name3=(name2).replace('.mkv','').replace('.Film2Movie_INFO.mkv','').replace('.HEVC.Film2Movie_INFO.mkv','').replace('.ParsFilm.mkv ','').replace('(SaberFunIR)','')
name4=(name3).replace('.INTERNAL.','').replace('.Film2Movie_INFO.mkv','').replace('.web-dl.Tehmovies.net.mkv','').replace('S01E06','S01 E06').replace('S01E07','S01 E07')
name5=(name4).replace('S01E08','S01 E08').replace('S01E09','S01 E09').replace('S01E10','S01 E10').replace('.Tehmovies.net','').replace('.WEBRip.Tehmovies.com.mkv','')
name6=(name5).replace('.mp4','').replace('.mkv','').replace('.Tehmovies.ir','').replace('x265HEVC','').replace('Film2Movie_INFO','').replace('Tehmovies.com.mkv','')
name7=(name6).replace(' (ParsFilm)','').replace('Tehmovies.ir.mkv','').replace('.480p',' 480p').replace('.WEBrip','').replace('.web-dl','').replace('.WEB-DL','')
name8=(name7).replace('.','').replace('.Tehmovies.com','').replace('480p.Tehmovies.net</',' 480p').replace('720p.Tehmovies.net','720p').replace('.480p',' 480p')
name9=(name8).replace('.480p.WEB-DL',' 480p').replace('.mkv','').replace('.INTERNAL.','').replace('720p',' 720p').replace('.Tehmovi..>','').replace('.Tehmovies.net.mkv','')
name10=(name9).replace('..720p',' 720p').replace('.REPACK.Tehmovies..>','').replace('.Tehmovies.com.mkv','').replace('.Tehmovies..>','').replace('Tehmovies.ir..>','')
name11=(name10).replace('Tehmovies.ne..>','').replace('.HDTV.x264-mRs','').replace('...>','').replace('.Tehmovies...>','').replace('.Tehmovies.com.mp4','')
name12=(name11).replace('.Tehmovies.com.mp4','').replace('_MovieFarsi','').replace('_MovieFar','').replace('_com','').replace('>','').replace('avi','').replace('(1)','')
name13=(name12).replace('(2)','').replace('cd 2','').replace('cd 1','').replace('-dos-xvid','').replace('divx','').replace('Xvid','').replace('DVD','').replace('DVDrip','')
name14=(name13).replace('DvDrip-aXXo','').replace('[','').replace(']','').replace('(','').replace(')','').replace('XviD-TLF-','').replace('CD1','').replace('CD2','')
name15=(name14).replace('CD3','').replace('mp4','').replace('&','&').replace('HDRip','').replace('-','').replace(' ',' ').replace('xvid','').replace('1080p','')
name16=(name15).replace('1970','').replace('1971','').replace('1972','').replace('1973','').replace('1974','').replace('1975','').replace('1976','').replace('1977','')
name17=(name16).replace('1978','').replace('1979','').replace('1980','').replace('1981','').replace('1982','').replace('1983','').replace('1984','').replace('1985','')
name18=(name17).replace('1986','').replace('1987','').replace('1988','').replace('1989','').replace('1990','').replace('1991','').replace('1992','').replace('1993','')
name19=(name18).replace('1994','').replace('1995','').replace('1996','').replace('1997','').replace('1998','').replace('1999','').replace('2000','').replace('2001','')
name20=(name19).replace('2002','').replace('2003','').replace('2004','').replace('2005','').replace('2006','').replace('2007','').replace('2008','').replace('2009','')
name21=(name20).replace('2010','').replace('2011','').replace('2012','').replace('2013','').replace('2014','').replace('2015','').replace('2016','').replace('720p','')
name22=(name21).replace('360p','').replace(' ',' ').replace('BluRay','').replace('rip','').replace('WEBDL','').replace('s01','').replace('s02','').replace('S02','')
name23=(name22).replace('s03','').replace('s04','').replace('s05','').replace('s06','').replace('s07','').replace('s08','').replace('s09','').replace('S01','')
name24=(name23).replace('S03','').replace('S04',' ').replace('S05','').replace('S06','').replace('S07','').replace('S08','').replace('S09','').replace('E01','')
name25=(name24).replace('E02','').replace('E03','').replace('E04','').replace('E05','').replace('E06','').replace('E07','').replace('E08','').replace('E09','').replace('e01','')
name25=(name24).replace('e02','').replace('e03','').replace('e04','').replace('e05','').replace('e06','').replace('e07','').replace('e08','').replace('e09','').replace('e01','')
clean_name = name15
search_name = name25
#if ADDON.getSetting('Data')=='true':
# Imdb_Scrape(url3,clean_name,search_name)
#if ADDON.getSetting('Data')=='false':
Play(clean_name,url3,2,ICON,FANART,'','','')
def Search():
Search_Name = Dialog.input('Search', type=xbmcgui.INPUT_ALPHANUM)
Search_Title = Search_Name.lower()
if Search_Title == '':
pass
else:
for file_Name in search_filenames:
search_URL = Base_Url + file_Name + '.php'
OPEN = Open_Url(search_URL)
if OPEN != 'Opened':
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(OPEN)
for url,icon,desc,fanart,name in Regex:
if Search_Title in name.lower():
Watched = re.compile('item="(.+?)"\n').findall(str(watched_read))
for item in Watched:
if item == url:
name = '[COLORred]* [/COLOR]'+(name).replace('[COLORred]* [/COLOR][COLORred]* [/COLOR]','[COLORred]* [/COLOR]')
print_text_file = open(watched,"a")
print_text_file.write('item="'+name+'"\n')
print_text_file.close
if 'php' in url:
Menu(name,url,1,icon,fanart,desc)
else:
Play(name,url,2,icon,fanart,desc)
setView('tvshows', 'Media Info 3')
####################################################################PROCESSES###################################################
def Open_Url(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = ''
link = ''
try:
response = urllib2.urlopen(req)
link=response.read()
response.close()
except: pass
if link != '':
return link
else:
link = 'Opened'
return link
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
<|fim▁hole|> u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def Play(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=5&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to '+ADDON_NAME+' Favorites','XBMC.RunPlugin(%s?mode=4&name=%s&url=%s&iconimage=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def GetPlayerCore():
try:
PlayerMethod=getSet("core-player")
if (PlayerMethod=='DVDPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_DVDPLAYER
elif (PlayerMethod=='MPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_MPLAYER
elif (PlayerMethod=='PAPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_PAPLAYER
else: PlayerMeth=xbmc.PLAYER_CORE_AUTO
except: PlayerMeth=xbmc.PLAYER_CORE_AUTO
return PlayerMeth
return True
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def resolve(url):
print_text_file = open(watched,"a")
print_text_file.write('item="'+url+'"\n')
print_text_file.close
play=xbmc.Player(GetPlayerCore())
import urlresolver
try: play.play(url)
except: pass
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def addon_log(string):
if debug == 'true':
xbmc.log("["+ADDON_NAME+"]: %s" %(addon_version, string))
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favorites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favorites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
def getFavorites():
if os.path.exists(favorites)==False:
favList = []
addon_log('Making Favorites File')
favList.append(('[COLORskyblue]C[COLORblue]arrera Favourites Section[/COLOR]','','','','','',''))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
items = json.loads(open(favorites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if ADDON.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
Menu(name,url,'',iconimage,fanart,'','fav')
else:
Menu(name,url,i[4],iconimage,fanart,'','fav')
def rmFavorite(name):
data = json.loads(open(favorites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
iconimage=None
mode=None
fanart=None
description=None
fav_mode=None
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
print str(PATH)+': '+str(VERSION)
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
print "IconImage: "+str(iconimage)
#####################################################END PROCESSES##############################################################
if mode == None: Main_Menu()
elif mode == 1 : Second_Menu(url)
elif mode == 2 :
if 'youtube' in url:
url = (url).replace('https://www.youtube.com/watch?v=','').replace('http://www.youtube.com/watch?v=','')
yt.PlayVideo(url)
else:
resolve(url)
elif mode == 3 : Search()
elif mode==4:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==5:
addon_log("rmFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
elif mode==6:
addon_log("getFavorites")
getFavorites()
elif mode == 7 : index_Menu()
elif mode == 8 : Main_Loop(url)
elif mode == 9 : Source_File()
elif mode ==10 : Index_List()
xbmcplugin.addSortMethod(int(sys.argv[1]), 1)
xbmcplugin.endOfDirectory(int(sys.argv[1]))<|fim▁end|> | def Menu(name,url,mode,iconimage,fanart,description,showcontext=True,allinfo={}): |
<|file_name|>share_tensor_test.py<|end_file_name|><|fim▁begin|># third party
# third party<|fim▁hole|>import numpy as np
import pytest
# syft absolute
# absolute
from syft.core.tensor.smpc.share_tensor import ShareTensor
@pytest.mark.smpc
def test_bit_extraction() -> None:
share = ShareTensor(rank=0, parties_info=[], ring_size=2**32)
data = np.array([[21, 32], [-54, 89]], dtype=np.int32)
share.child = data
exp_res1 = np.array([[False, False], [True, False]], dtype=np.bool_)
res = share.bit_extraction(31).child
assert (res == exp_res1).all()
exp_res2 = np.array([[True, False], [False, False]], dtype=np.bool_)
res = share.bit_extraction(2).child
assert (res == exp_res2).all()
@pytest.mark.smpc
def test_bit_extraction_exception() -> None:
share = ShareTensor(rank=0, parties_info=[], ring_size=2**32)
data = np.array([[21, 32], [-54, 89]], dtype=np.int32)
share.child = data
with pytest.raises(Exception):
share >> 33
with pytest.raises(Exception):
share >> -1<|fim▁end|> | |
<|file_name|>webgl_paint_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasCommonMsg, CanvasData, CanvasMsg, CanvasPixelData};
use canvas_traits::{FromLayoutMsg, byte_swap};
use euclid::size::Size2D;
use gleam::gl;
use ipc_channel::ipc::{self, IpcSender, IpcSharedMemory};
use offscreen_gl_context::{ColorAttachmentType, GLContext, GLLimits, GLContextAttributes, NativeGLContext};
use std::borrow::ToOwned;
use std::sync::mpsc::channel;
use util::thread::spawn_named;
use webrender_traits;
enum WebGLPaintTaskData {
WebRender(webrender_traits::RenderApi, webrender_traits::WebGLContextId),
Readback(GLContext<NativeGLContext>, (Option<(webrender_traits::RenderApi, webrender_traits::ImageKey)>)),
}
pub struct WebGLPaintThread {
size: Size2D<i32>,
data: WebGLPaintTaskData,
}
fn create_readback_painter(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api: Option<webrender_traits::RenderApi>)
-> Result<(WebGLPaintThread, GLLimits), String> {
let context = try!(GLContext::<NativeGLContext>::new(size, attrs, ColorAttachmentType::Texture, None));
let limits = context.borrow_limits().clone();
let webrender_api_and_image_key = webrender_api.map(|wr| {
let key = wr.alloc_image();
(wr, key)
});
let painter = WebGLPaintThread {
size: size,
data: WebGLPaintTaskData::Readback(context, webrender_api_and_image_key)<|fim▁hole|> Ok((painter, limits))
}
impl WebGLPaintThread {
fn new(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(WebGLPaintThread, GLLimits), String> {
if let Some(sender) = webrender_api_sender {
let wr_api = sender.create_api();
match wr_api.request_webgl_context(&size, attrs) {
Ok((id, limits)) => {
let painter = WebGLPaintThread {
data: WebGLPaintTaskData::WebRender(wr_api, id),
size: size
};
Ok((painter, limits))
},
Err(msg) => {
warn!("Initial context creation failed, falling back to readback: {}", msg);
create_readback_painter(size, attrs, Some(wr_api))
}
}
} else {
create_readback_painter(size, attrs, None)
}
}
fn handle_webgl_message(&self, message: webrender_traits::WebGLCommand) {
debug!("WebGL message: {:?}", message);
match self.data {
WebGLPaintTaskData::WebRender(ref api, id) => {
api.send_webgl_command(id, message);
}
WebGLPaintTaskData::Readback(ref ctx, _) => {
message.apply(ctx);
}
}
}
/// Creates a new `WebGLPaintThread` and returns an `IpcSender` to
/// communicate with it.
pub fn start(size: Size2D<i32>,
attrs: GLContextAttributes,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> Result<(IpcSender<CanvasMsg>, GLLimits), String> {
let (sender, receiver) = ipc::channel::<CanvasMsg>().unwrap();
let (result_chan, result_port) = channel();
spawn_named("WebGLThread".to_owned(), move || {
let mut painter = match WebGLPaintThread::new(size, attrs, webrender_api_sender) {
Ok((thread, limits)) => {
result_chan.send(Ok(limits)).unwrap();
thread
},
Err(e) => {
result_chan.send(Err(e)).unwrap();
return
}
};
painter.init();
loop {
match receiver.recv().unwrap() {
CanvasMsg::WebGL(message) => painter.handle_webgl_message(message),
CanvasMsg::Common(message) => {
match message {
CanvasCommonMsg::Close => break,
// TODO(emilio): handle error nicely
CanvasCommonMsg::Recreate(size) => painter.recreate(size).unwrap(),
}
},
CanvasMsg::FromLayout(message) => {
match message {
FromLayoutMsg::SendData(chan) =>
painter.send_data(chan),
}
}
CanvasMsg::Canvas2d(_) => panic!("Wrong message sent to WebGLThread"),
}
}
});
result_port.recv().unwrap().map(|limits| (sender, limits))
}
fn send_data(&mut self, chan: IpcSender<CanvasData>) {
match self.data {
WebGLPaintTaskData::Readback(_, ref webrender_api_and_image_key) => {
let width = self.size.width as usize;
let height = self.size.height as usize;
let mut pixels = gl::read_pixels(0, 0,
self.size.width as gl::GLsizei,
self.size.height as gl::GLsizei,
gl::RGBA, gl::UNSIGNED_BYTE);
// flip image vertically (texture is upside down)
let orig_pixels = pixels.clone();
let stride = width * 4;
for y in 0..height {
let dst_start = y * stride;
let src_start = (height - y - 1) * stride;
let src_slice = &orig_pixels[src_start .. src_start + stride];
(&mut pixels[dst_start .. dst_start + stride]).clone_from_slice(&src_slice[..stride]);
}
// rgba -> bgra
byte_swap(&mut pixels);
if let Some((ref wr, wr_image_key)) = *webrender_api_and_image_key {
// TODO: This shouldn't be a common path, but try to avoid
// the spurious clone().
wr.update_image(wr_image_key,
width as u32,
height as u32,
webrender_traits::ImageFormat::RGBA8,
pixels.clone());
}
let pixel_data = CanvasPixelData {
image_data: IpcSharedMemory::from_bytes(&pixels[..]),
image_key: webrender_api_and_image_key.as_ref().map(|&(_, key)| key),
};
chan.send(CanvasData::Pixels(pixel_data)).unwrap();
}
WebGLPaintTaskData::WebRender(_, id) => {
chan.send(CanvasData::WebGL(id)).unwrap();
}
}
}
#[allow(unsafe_code)]
fn recreate(&mut self, size: Size2D<i32>) -> Result<(), &'static str> {
match self.data {
WebGLPaintTaskData::Readback(ref mut context, _) => {
if size.width > self.size.width ||
size.height > self.size.height {
try!(context.resize(size));
self.size = context.borrow_draw_buffer().unwrap().size();
} else {
self.size = size;
unsafe { gl::Scissor(0, 0, size.width, size.height); }
}
}
WebGLPaintTaskData::WebRender(_, _) => {
// TODO
}
}
Ok(())
}
fn init(&mut self) {
if let WebGLPaintTaskData::Readback(ref context, _) = self.data {
context.make_current().unwrap();
}
}
}
impl Drop for WebGLPaintThread {
fn drop(&mut self) {
if let WebGLPaintTaskData::Readback(_, Some((ref mut wr, image_key))) = self.data {
wr.delete_image(image_key);
}
}
}<|fim▁end|> | };
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""
Setup script for the Gimbal package
"""<|fim▁hole|>from setuptools import find_packages
def readme():
"""Returns the contents of the README without the header image."""
header = '======\nGimbal\n======\n'
with open('README.rst', 'r') as f:
f.readline()
return header + f.read()
def requirements():
"""Returns the requirement list."""
with open('requirements.txt', 'r') as f:
return [line.strip() for line in f.readlines()]
# read the current version number
exec(open('gimbal/_version.py').read())
setup(
name='gimbal',
version=__version__,
description=('Tools for importing, creating, editing and querying ' +
'molecular geometries'),
long_description=readme(),
long_description_content_type='text/x-rst',
keywords='gimbal molecule geometry displacement transformation 3D',
url='https://github.com/ryjmacdonell/gimbal',
author='Ryan J. MacDonell',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
scripts=['bin/convgeom', 'bin/measure', 'bin/nudge', 'bin/subst'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering :: Chemistry'
],
install_requires=requirements()
)<|fim▁end|> | from setuptools import setup |
<|file_name|>concurrenttools.py<|end_file_name|><|fim▁begin|>"""
Concurrent downloaders
"""
import os
import sys
import signal
import logging
import itertools
from functools import partial
from concurrent.futures import ProcessPoolExecutor
from pomp.core.base import (
BaseCrawler, BaseDownloader, BaseCrawlException,
)
from pomp.contrib.urllibtools import UrllibDownloadWorker
from pomp.core.utils import iterator, Planned
log = logging.getLogger('pomp.contrib.concurrent')
<|fim▁hole|>def _run_download_worker(params, request):
pid = os.getpid()
log.debug("Download worker pid=%s params=%s", pid, params)
try:
# Initialize worker and call get_one method
return params['worker_class'](
**params.get('worker_kwargs', {})
).process(request)
except Exception:
log.exception(
"Exception on download worker pid=%s request=%s", pid, request
)
raise
def _run_crawler_worker(params, response):
pid = os.getpid()
log.debug("Crawler worker pid=%s params=%s", pid, params)
try:
# Initialize crawler worker
worker = params['worker_class'](**params.get('worker_kwargs', {}))
# process response
items = worker.extract_items(response)
next_requests = worker.next_requests(response)
if next_requests:
return list(
itertools.chain(
iterator(items),
iterator(next_requests),
)
)
return list(iterator(items))
except Exception:
log.exception(
"Exception on crawler worker pid=%s request=%s", pid, response
)
raise
class ConcurrentMixin(object):
def _done(self, request, done_future, future):
try:
response = future.result()
except Exception as e:
log.exception('Exception on %s', request)
done_future.set_result(BaseCrawlException(
request,
exception=e,
exc_info=sys.exc_info(),
))
else:
done_future.set_result(response)
class ConcurrentDownloader(BaseDownloader, ConcurrentMixin):
"""Concurrent ProcessPoolExecutor downloader
:param pool_size: size of ThreadPoolExecutor
:param timeout: request timeout in seconds
"""
def __init__(
self, worker_class,
worker_kwargs=None, pool_size=5,):
# configure executor
self.pool_size = pool_size
self.executor = ProcessPoolExecutor(max_workers=self.pool_size)
# prepare worker params
self.worker_params = {
'worker_class': worker_class,
'worker_kwargs': worker_kwargs or {},
}
# ctrl-c support for python2.x
# trap sigint
signal.signal(signal.SIGINT, lambda s, f: s)
super(ConcurrentDownloader, self).__init__()
def process(self, crawler, request):
# delegate request processing to the executor
future = self.executor.submit(
_run_download_worker, self.worker_params, request,
)
# build Planned object
done_future = Planned()
# when executor finish request - fire done_future
future.add_done_callback(
partial(self._done, request, done_future)
)
return done_future
def get_workers_count(self):
return self.pool_size
def stop(self, crawler):
self.executor.shutdown()
class ConcurrentUrllibDownloader(ConcurrentDownloader):
"""Concurrent ProcessPoolExecutor downloader for fetching data with urllib
:class:`pomp.contrib.SimpleDownloader`
:param pool_size: pool size of ProcessPoolExecutor
:param timeout: request timeout in seconds
"""
def __init__(self, pool_size=5, timeout=None):
super(ConcurrentUrllibDownloader, self).__init__(
pool_size=pool_size,
worker_class=UrllibDownloadWorker,
worker_kwargs={
'timeout': timeout
},
)
class ConcurrentCrawler(BaseCrawler, ConcurrentMixin):
"""Concurrent ProcessPoolExecutor crawler
:param pool_size: pool size of ProcessPoolExecutor
:param timeout: request timeout in seconds
"""
def __init__(self, worker_class, worker_kwargs=None, pool_size=5):
# configure executor
self.pool_size = pool_size
self.executor = ProcessPoolExecutor(max_workers=self.pool_size)
# prepare worker params
self.worker_params = {
'worker_class': worker_class,
'worker_kwargs': worker_kwargs or {},
}
# inherit ENTRY_REQUESTS from worker_class
self.ENTRY_REQUESTS = getattr(worker_class, 'ENTRY_REQUESTS', None)
def process(self, response):
# delegate response processing to the executor
future = self.executor.submit(
_run_crawler_worker, self.worker_params, response,
)
# build Planned object
done_future = Planned()
# when executor finish response processing - fire done_future
future.add_done_callback(
partial(self._done, response, done_future)
)
return done_future<|fim▁end|> | |
<|file_name|>combineLatestWith.ts<|end_file_name|><|fim▁begin|>import { ObservableInputTuple, OperatorFunction, Cons } from '../types';
import { combineLatest } from './combineLatest';
/**
* Create an observable that combines the latest values from all passed observables and the source
* into arrays and emits them.
*
* Returns an observable, that when subscribed to, will subscribe to the source observable and all
* sources provided as arguments. Once all sources emit at least one value, all of the latest values
* will be emitted as an array. After that, every time any source emits a value, all of the latest values
* will be emitted as an array.
*
* This is a useful operator for eagerly calculating values based off of changed inputs.
*
* ## Example
*
* Simple concatenation of values from two inputs
*
* ```ts
* import { fromEvent, combineLatestWith, map } from 'rxjs';
*
* // Setup: Add two inputs to the page
* const input1 = document.createElement('input');<|fim▁hole|> * document.body.appendChild(input2);
*
* // Get streams of changes
* const input1Changes$ = fromEvent(input1, 'change');
* const input2Changes$ = fromEvent(input2, 'change');
*
* // Combine the changes by adding them together
* input1Changes$.pipe(
* combineLatestWith(input2Changes$),
* map(([e1, e2]) => (<HTMLInputElement>e1.target).value + ' - ' + (<HTMLInputElement>e2.target).value)
* )
* .subscribe(x => console.log(x));
* ```
*
* @param otherSources the other sources to subscribe to.
* @return A function that returns an Observable that emits the latest
* emissions from both source and provided Observables.
*/
export function combineLatestWith<T, A extends readonly unknown[]>(
...otherSources: [...ObservableInputTuple<A>]
): OperatorFunction<T, Cons<T, A>> {
return combineLatest(...otherSources);
}<|fim▁end|> | * document.body.appendChild(input1);
* const input2 = document.createElement('input'); |
<|file_name|>validation.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8
from __future__ import division, print_function, unicode_literals
import re
import unicodedata
h1_start = re.compile(r"^\s*=(?P<title>[^=]+)=*[ \t]*")
valid_title = re.compile(r"[^=]+")
general_heading = re.compile(r"^\s*(={2,6}(?P<title>" + valid_title.pattern +
")=*)\s*$", flags=re.MULTILINE)
invalid_symbols = re.compile(r"[^\w\-_\s]+")
def strip_accents(s):
return ''.join(
(c for c in unicodedata.normalize('NFD', s) if unicodedata.category(
c) != 'Mn'))
REPLACEMENTS = {
ord('ä'): 'ae',
ord('ö'): 'oe',
ord('ü'): 'ue',
ord('ß'): 'ss',
ord('Ä'): 'Ae',
ord('Ö'): 'Oe',
ord('Ü'): 'Ue',
ord('ẞ'): 'SS'
}
def substitute_umlauts(s):
return s.translate(REPLACEMENTS)
<|fim▁hole|>
def remove_unallowed_chars(s):
s = invalid_symbols.sub('', s)
return s
def remove_and_compress_whitespaces(s):
return '_'.join(s.split()).strip('_')
def turn_into_valid_short_title(title, short_title_set=(), max_length=20):
st = substitute_umlauts(title)
st = strip_accents(st)
st = remove_unallowed_chars(st)
st = remove_and_compress_whitespaces(st)
st = st.lstrip('1234567890-_')
st = st[:min(len(st), max_length)]
if not st:
st = 'sub'
if st not in short_title_set:
return st
else:
i = 0
while True:
i += 1
suffix = str(i)
new_st = st[:min(max_length - len(suffix), len(st))] + suffix
if new_st not in short_title_set:
return new_st
def get_heading_matcher(level=0):
if 0 < level < 7:
s = "%d" % level
elif level == 0:
s = "1, 6"
else:
raise ValueError(
"level must be between 1 and 6 or 0, but was %d." % level)
pattern = r"^\s*={%s}(?P<title>[^=§]+)" \
r"(?:§\s*(?P<short_title>[^=§\s][^=§]*))?=*\s*$"
return re.compile(pattern % s, flags=re.MULTILINE)<|fim▁end|> | |
<|file_name|>slice.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::collections::{BTreeMap, HashMap, HashSet};
use std::sync::Arc;
use anyhow::{anyhow, Error, Result};
use blobrepo::BlobRepo;
use context::CoreContext;
use derived_data_utils::DerivedUtils;
use futures::stream::{self, FuturesUnordered, StreamExt, TryStreamExt};
use mononoke_types::{ChangesetId, Generation};
use skiplist::SkiplistIndex;
use slog::info;
/// Determine which heads are underived in any of the derivers.
async fn underived_heads(
ctx: &CoreContext,
repo: &BlobRepo,
derivers: &[Arc<dyn DerivedUtils>],
heads: &[ChangesetId],
) -> Result<HashSet<ChangesetId>> {
derivers
.iter()
.map(|deriver| async move {
Ok::<_, Error>(stream::iter(
deriver
.pending(ctx.clone(), repo.clone(), heads.to_vec())
.await?
.into_iter()<|fim▁hole|> .collect::<FuturesUnordered<_>>()
.try_flatten()
.try_collect::<HashSet<_>>()
.await
}
/// If skiplist parents are not available, fetch the parents and their
/// generation from the repo.
async fn parents_with_generations(
ctx: &CoreContext,
repo: &BlobRepo,
csid: ChangesetId,
) -> Result<Vec<(ChangesetId, Generation)>> {
let parents = repo
.get_changeset_parents_by_bonsai(ctx.clone(), csid)
.await?;
let parents_with_generations =
stream::iter(parents.into_iter().map(|parent_csid| async move {
match repo.get_generation_number(ctx.clone(), parent_csid).await? {
Some(gen) => Ok(Some((parent_csid, gen))),
None => Err(anyhow!(
"Could not find generation number for commit {} parent {}",
csid,
parent_csid
)),
}
}))
.buffered(100)
.try_filter_map(|maybe_csid_gen| async move { Ok::<_, Error>(maybe_csid_gen) })
.try_collect::<Vec<_>>()
.await?;
Ok(parents_with_generations)
}
/// Slice a respository into a sequence of slices for derivation.
///
/// For large repositories with a long history, computing the full set of
/// commits before beginning backfilling is slow, and cannot be resumed
/// if interrupted.
///
/// This function makes large repositories more tractible by using the
/// skiplist index to divide the repository history into "slices", where
/// each slice consists of the commits known to the skiplist index that
/// are within a range of generations.
///
/// Each slice's heads should be derived together and will be ancestors of
/// subsequent slices. The returned slices consist only of heads which
/// haven't been derived by the provided derivers. Slicing stops once
/// all derived commits are reached.
///
/// For example, given a repository where the skiplists have the structure:
///
/// E (gen 450)
/// :
/// D (gen 350)
/// :
/// : C (gen 275)
/// :/
/// B (gen 180)
/// :
/// A (gen 1)
///
/// And a slice size of 200, this function will generate slices:
///
/// (0, [A, B])
/// (200, [C, D])
/// (400, [E])
///
/// If any of these heads are already derived then they are omitted. Empty
/// slices are also omitted.
///
/// This allows derivation of the first slice with underived commits to begin
/// more quickly, as the rest of the repository history doesn't need to be
/// traversed (just the ancestors of B and A).
///
/// Returns the number of slices, and an iterator where each item is
/// (slice_id, heads).
pub(crate) async fn slice_repository(
ctx: &CoreContext,
repo: &BlobRepo,
skiplist_index: &SkiplistIndex,
derivers: &[Arc<dyn DerivedUtils>],
heads: Vec<ChangesetId>,
slice_size: u64,
) -> Result<(usize, impl Iterator<Item = (u64, Vec<ChangesetId>)>)> {
let heads = underived_heads(ctx, repo, derivers, heads.as_slice()).await?;
if skiplist_index.indexed_node_count() == 0 {
// This skiplist index is not populated. Generate a single
// slice with all heads.
info!(
ctx.logger(),
"Repository not sliced as skiplist index is not populated",
);
let heads = heads.into_iter().collect();
return Ok((1, vec![(0, heads)].into_iter().rev()));
}
// Add any unindexed heads to the skiplist index.
let changeset_fetcher = repo.get_changeset_fetcher();
for head in heads.iter() {
skiplist_index
.add_node(ctx, &changeset_fetcher, *head, std::u64::MAX)
.await?;
}
let mut head_generation_groups: BTreeMap<u64, Vec<ChangesetId>> = BTreeMap::new();
stream::iter(heads.into_iter().map(|csid| async move {
match repo.get_generation_number(ctx.clone(), csid).await? {
Some(gen) => Ok(Some((csid, gen))),
None => Err(anyhow!(
"Could not find generation number for head {}",
csid
)),
}
}))
.buffered(100)
.try_for_each(|maybe_csid_gen| {
if let Some((csid, gen)) = maybe_csid_gen {
let gen_group = (gen.value() / slice_size) * slice_size;
head_generation_groups
.entry(gen_group)
.or_default()
.push(csid);
}
async { Ok::<_, Error>(()) }
})
.await?;
let mut slices = Vec::new();
while let Some((cur_gen, mut heads)) = head_generation_groups.pop_last() {
info!(
ctx.logger(),
"Adding slice starting at generation {} with {} heads ({} slices queued)",
cur_gen,
heads.len(),
head_generation_groups.len()
);
let mut new_heads_groups = HashMap::new();
let mut seen: HashSet<_> = heads.iter().cloned().collect();
while let Some(csid) = heads.pop() {
let skip_parents = match skiplist_index.get_furthest_edges(csid) {
Some(skip_parents) => skip_parents,
None => {
// Ordinarily this shouldn't happen, as the skiplist ought
// to refer to commits that are also in the skiplist.
// However, if the commit is missing from the skiplist, we
// can look up the parents and their generations directly.
parents_with_generations(ctx, repo, csid).await?
}
};
for (parent, gen) in skip_parents {
if gen.value() >= cur_gen {
// This commit is in the same generation group.
if seen.insert(parent) {
heads.push(parent);
}
} else {
// This commit is in a new generation group.
let gen_group = (gen.value() / slice_size) * slice_size;
new_heads_groups.insert(parent, gen_group);
}
}
}
// Add all commits we've seen to the slice. The heads from the start
// of this iteration would be sufficient, however providing additional
// changesets will allow traversal of the graph to find all commits to
// run faster as it can fetch the parents of multiple commits at once.
slices.push((cur_gen, seen.into_iter().collect()));
// For each new head, check if it needs derivation, and if so, add it
// to its generation group.
let new_heads: Vec<_> = new_heads_groups.keys().cloned().collect();
let underived_new_heads =
underived_heads(ctx, repo, derivers, new_heads.as_slice()).await?;
for head in underived_new_heads {
if let Some(gen_group) = new_heads_groups.get(&head) {
head_generation_groups
.entry(*gen_group)
.or_default()
.push(head);
}
}
}
if !slices.is_empty() {
info!(
ctx.logger(),
"Repository sliced into {} slices requiring derivation",
slices.len()
);
}
Ok((slices.len(), slices.into_iter().rev()))
}<|fim▁end|> | .map(Ok::<_, Error>),
))
}) |
<|file_name|>angelloModelSpec.js<|end_file_name|><|fim▁begin|>describe('Service: angelloModel', function() {
//load module for service
beforeEach(module('Angello'));
var modelService;
beforeEach(inject(function(angelloModel) {
modelService = angelloModel;
}));
describe('#getStatuses', function() {
it('should return seven different statuses', function() {
expect(modelService.getStatuses().length).toBe(7);
});
it('should have a status named "To Do"', function() {
expect(modelService.getStatuses().map(function(status) { // get just the name of each status
return status.name;
})).toContain('To Do');
});
describe('#getTypes', function() {
it('should return four different types', function() {
expect(modelService.getTypes().length).toBe(4);
});
it('should have a type named "Bug"', function() {
expect(modelService.getTypes().map(function(status) { // get just the name of each status
return status.name;
})).
toContain('Bug');
});
});
describe('#getStories', function() {
it('should return six different stories', function() {
expect(modelService.getStories().length).toBe(6);
});
it('should return stories that have a description property',
function() {
modelService.getStories().forEach(function(story) {
expect(story.description).toBeDefined();<|fim▁hole|>
});
});<|fim▁end|> | });
});
}); |
<|file_name|>test_polynomial.py<|end_file_name|><|fim▁begin|>"""Tests for polynomial module.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import numpy.polynomial.polynomial as poly
from numpy.testing import (
TestCase, assert_almost_equal, assert_raises,
assert_equal, assert_, run_module_suite)
def trim(x):
return poly.polytrim(x, tol=1e-6)
T0 = [1]
T1 = [0, 1]
T2 = [-1, 0, 2]
T3 = [0, -3, 0, 4]
T4 = [1, 0, -8, 0, 8]
T5 = [0, 5, 0, -20, 0, 16]
T6 = [-1, 0, 18, 0, -48, 0, 32]
T7 = [0, -7, 0, 56, 0, -112, 0, 64]
T8 = [1, 0, -32, 0, 160, 0, -256, 0, 128]
T9 = [0, 9, 0, -120, 0, 432, 0, -576, 0, 256]
Tlist = [T0, T1, T2, T3, T4, T5, T6, T7, T8, T9]
class TestConstants(TestCase):
def test_polydomain(self):
assert_equal(poly.polydomain, [-1, 1])
def test_polyzero(self):
assert_equal(poly.polyzero, [0])
def test_polyone(self):
assert_equal(poly.polyone, [1])
def test_polyx(self):
assert_equal(poly.polyx, [0, 1])
class TestArithmetic(TestCase):
def test_polyadd(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
tgt = np.zeros(max(i, j) + 1)
tgt[i] += 1
tgt[j] += 1
res = poly.polyadd([0] * i + [1], [0] * j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_polysub(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
tgt = np.zeros(max(i, j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = poly.polysub([0] * i + [1], [0] * j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_polymulx(self):
assert_equal(poly.polymulx([0]), [0])
assert_equal(poly.polymulx([1]), [0, 1])
for i in range(1, 5):
ser = [0] * i + [1]
tgt = [0] * (i + 1) + [1]
assert_equal(poly.polymulx(ser), tgt)
def test_polymul(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
tgt = np.zeros(i + j + 1)
tgt[i + j] += 1
res = poly.polymul([0] * i + [1], [0] * j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_polydiv(self):
# check zero division
assert_raises(ZeroDivisionError, poly.polydiv, [1], [0])
# check scalar division
quo, rem = poly.polydiv([2], [2])
assert_equal((quo, rem), (1, 0))
quo, rem = poly.polydiv([2, 2], [2])
assert_equal((quo, rem), ((1, 1), 0))
# check rest.
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
ci = [0] * i + [1, 2]
cj = [0] * j + [1, 2]
tgt = poly.polyadd(ci, cj)
quo, rem = poly.polydiv(tgt, ci)
res = poly.polyadd(poly.polymul(quo, ci), rem)
assert_equal(res, tgt, err_msg=msg)
class TestEvaluation(TestCase):
# coefficients of 1 + 2*x + 3*x**2
c1d = np.array([1., 2., 3.])
c2d = np.einsum('i,j->ij', c1d, c1d)
c3d = np.einsum('i,j,k->ijk', c1d, c1d, c1d)
# some random values in [-1, 1)
x = np.random.random((3, 5)) * 2 - 1
y = poly.polyval(x, [1., 2., 3.])
def test_polyval(self):
# check empty input
assert_equal(poly.polyval([], [1]).size, 0)
# check normal input)
x = np.linspace(-1, 1)
y = [x ** i for i in range(5)]
for i in range(5):
tgt = y[i]
res = poly.polyval(x, [0] * i + [1])
assert_almost_equal(res, tgt)
tgt = x * (x ** 2 - 1)
res = poly.polyval(x, [0, -1, 0, 1])
assert_almost_equal(res, tgt)
# check that shape is preserved
for i in range(3):
dims = [2] * i
x = np.zeros(dims)
assert_equal(poly.polyval(x, [1]).shape, dims)
assert_equal(poly.polyval(x, [1, 0]).shape, dims)
assert_equal(poly.polyval(x, [1, 0, 0]).shape, dims)
def test_polyvalfromroots(self):
# check exception for broadcasting x values over root array with
# too few dimensions
assert_raises(ValueError, poly.polyvalfromroots,
[1], [1], tensor=False)
# check empty input
assert_equal(poly.polyvalfromroots([], [1]).size, 0)
assert_(poly.polyvalfromroots([], [1]).shape == (0,))
# check empty input + multidimensional roots
assert_equal(poly.polyvalfromroots([], [[1] * 5]).size, 0)
assert_(poly.polyvalfromroots([], [[1] * 5]).shape == (5, 0))
# check scalar input
assert_equal(poly.polyvalfromroots(1, 1), 0)
assert_(poly.polyvalfromroots(1, np.ones((3, 3))).shape == (3,))
# check normal input)
x = np.linspace(-1, 1)
y = [x ** i for i in range(5)]
for i in range(1, 5):
tgt = y[i]
res = poly.polyvalfromroots(x, [0] * i)
assert_almost_equal(res, tgt)
tgt = x * (x - 1) * (x + 1)
res = poly.polyvalfromroots(x, [-1, 0, 1])
assert_almost_equal(res, tgt)
# check that shape is preserved
for i in range(3):
dims = [2] * i
x = np.zeros(dims)
assert_equal(poly.polyvalfromroots(x, [1]).shape, dims)
assert_equal(poly.polyvalfromroots(x, [1, 0]).shape, dims)
assert_equal(poly.polyvalfromroots(x, [1, 0, 0]).shape, dims)
# check compatibility with factorization
ptest = [15, 2, -16, -2, 1]
r = poly.polyroots(ptest)
x = np.linspace(-1, 1)
assert_almost_equal(poly.polyval(x, ptest),
poly.polyvalfromroots(x, r))
# check multidimensional arrays of roots and values
# check tensor=False
rshape = (3, 5)
x = np.arange(-3, 2)
r = np.random.randint(-5, 5, size=rshape)
res = poly.polyvalfromroots(x, r, tensor=False)
tgt = np.empty(r.shape[1:])
for ii in range(tgt.size):
tgt[ii] = poly.polyvalfromroots(x[ii], r[:, ii])
assert_equal(res, tgt)
# check tensor=True
x = np.vstack([x, 2 * x])
res = poly.polyvalfromroots(x, r, tensor=True)
tgt = np.empty(r.shape[1:] + x.shape)
for ii in range(r.shape[1]):
for jj in range(x.shape[0]):
tgt[ii, jj, :] = poly.polyvalfromroots(x[jj], r[:, ii])
assert_equal(res, tgt)
def test_polyval2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
# test exceptions
assert_raises(ValueError, poly.polyval2d, x1, x2[:2], self.c2d)
# test values
tgt = y1 * y2
res = poly.polyval2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
# test shape
z = np.ones((2, 3))
res = poly.polyval2d(z, z, self.c2d)
assert_(res.shape == (2, 3))
def test_polyval3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
# test exceptions
assert_raises(ValueError, poly.polyval3d, x1, x2, x3[:2], self.c3d)
# test values
tgt = y1 * y2 * y3
res = poly.polyval3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
# test shape
z = np.ones((2, 3))
res = poly.polyval3d(z, z, z, self.c3d)
assert_(res.shape == (2, 3))
def test_polygrid2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
# test values
tgt = np.einsum('i,j->ij', y1, y2)
res = poly.polygrid2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
# test shape
z = np.ones((2, 3))
res = poly.polygrid2d(z, z, self.c2d)
assert_(res.shape == (2, 3) * 2)
def test_polygrid3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
# test values
tgt = np.einsum('i,j,k->ijk', y1, y2, y3)
res = poly.polygrid3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
# test shape
z = np.ones((2, 3))
res = poly.polygrid3d(z, z, z, self.c3d)
assert_(res.shape == (2, 3) * 3)
class TestIntegral(TestCase):
def test_polyint(self):
# check exceptions
assert_raises(ValueError, poly.polyint, [0], .5)
assert_raises(ValueError, poly.polyint, [0], -1)
assert_raises(ValueError, poly.polyint, [0], 1, [0, 0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0] * (i - 2) + [1]
res = poly.polyint([0], m=i, k=k)
assert_almost_equal(res, [0, 1])
# check single integration with integration constant
for i in range(5):
scl = i + 1
pol = [0] * i + [1]
tgt = [i] + [0] * i + [1 / scl]
res = poly.polyint(pol, m=1, k=[i])
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5):
scl = i + 1
pol = [0] * i + [1]
res = poly.polyint(pol, m=1, k=[i], lbnd=-1)
assert_almost_equal(poly.polyval(-1, res), i)
# check single integration with integration constant and scaling
for i in range(5):
scl = i + 1
pol = [0] * i + [1]
tgt = [i] + [0] * i + [2 / scl]
res = poly.polyint(pol, m=1, k=[i], scl=2)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5):
for j in range(2, 5):
pol = [0] * i + [1]
tgt = pol[:]
for k in range(j):
tgt = poly.polyint(tgt, m=1)
res = poly.polyint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5):
for j in range(2, 5):
pol = [0] * i + [1]
tgt = pol[:]
for k in range(j):
tgt = poly.polyint(tgt, m=1, k=[k])
res = poly.polyint(pol, m=j, k=list(range(j)))
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with lbnd
for i in range(5):
for j in range(2, 5):
pol = [0] * i + [1]
tgt = pol[:]
for k in range(j):
tgt = poly.polyint(tgt, m=1, k=[k], lbnd=-1)
res = poly.polyint(pol, m=j, k=list(range(j)), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with scaling
for i in range(5):
for j in range(2, 5):
pol = [0] * i + [1]
tgt = pol[:]
for k in range(j):
tgt = poly.polyint(tgt, m=1, k=[k], scl=2)
res = poly.polyint(pol, m=j, k=list(range(j)), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_polyint_axis(self):
# check that axis keyword works
c2d = np.random.random((3, 4))
tgt = np.vstack([poly.polyint(c) for c in c2d.T]).T
res = poly.polyint(c2d, axis=0)
assert_almost_equal(res, tgt)
tgt = np.vstack([poly.polyint(c) for c in c2d])
res = poly.polyint(c2d, axis=1)
assert_almost_equal(res, tgt)
tgt = np.vstack([poly.polyint(c, k=3) for c in c2d])
res = poly.polyint(c2d, k=3, axis=1)
assert_almost_equal(res, tgt)
class TestDerivative(TestCase):
def test_polyder(self):
# check exceptions
assert_raises(ValueError, poly.polyder, [0], .5)
assert_raises(ValueError, poly.polyder, [0], -1)
# check that zeroth derivative does nothing
for i in range(5):
tgt = [0] * i + [1]
res = poly.polyder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
# check that derivation is the inverse of integration
for i in range(5):
for j in range(2, 5):
tgt = [0] * i + [1]
res = poly.polyder(poly.polyint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
# check derivation with scaling
for i in range(5):
for j in range(2, 5):
tgt = [0] * i + [1]
res = poly.polyder(poly.polyint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
def test_polyder_axis(self):
# check that axis keyword works
c2d = np.random.random((3, 4))
tgt = np.vstack([poly.polyder(c) for c in c2d.T]).T
res = poly.polyder(c2d, axis=0)
assert_almost_equal(res, tgt)
tgt = np.vstack([poly.polyder(c) for c in c2d])
res = poly.polyder(c2d, axis=1)
assert_almost_equal(res, tgt)
class TestVander(TestCase):
# some random values in [-1, 1)
x = np.random.random((3, 5)) * 2 - 1
def test_polyvander(self):
# check for 1d x
x = np.arange(3)
v = poly.polyvander(x, 3)
assert_(v.shape == (3, 4))
for i in range(4):
coef = [0] * i + [1]
assert_almost_equal(v[..., i], poly.polyval(x, coef))
# check for 2d x
x = np.array([[1, 2], [3, 4], [5, 6]])
v = poly.polyvander(x, 3)
assert_(v.shape == (3, 2, 4))
for i in range(4):
coef = [0] * i + [1]
assert_almost_equal(v[..., i], poly.polyval(x, coef))
def test_polyvander2d(self):
# also tests polyval2d for non-square coefficient array
x1, x2, x3 = self.x
c = np.random.random((2, 3))
van = poly.polyvander2d(x1, x2, [1, 2])
tgt = poly.polyval2d(x1, x2, c)
res = np.dot(van, c.flat)
assert_almost_equal(res, tgt)
# check shape
van = poly.polyvander2d([x1], [x2], [1, 2])
assert_(van.shape == (1, 5, 6))
def test_polyvander3d(self):
# also tests polyval3d for non-square coefficient array
x1, x2, x3 = self.x
c = np.random.random((2, 3, 4))
van = poly.polyvander3d(x1, x2, x3, [1, 2, 3])
tgt = poly.polyval3d(x1, x2, x3, c)
res = np.dot(van, c.flat)
assert_almost_equal(res, tgt)
# check shape
van = poly.polyvander3d([x1], [x2], [x3], [1, 2, 3])
assert_(van.shape == (1, 5, 24))
class TestCompanion(TestCase):
def test_raises(self):
assert_raises(ValueError, poly.polycompanion, [])
assert_raises(ValueError, poly.polycompanion, [1])
def test_dimensions(self):
for i in range(1, 5):
coef = [0] * i + [1]
assert_(poly.polycompanion(coef).shape == (i, i))
def test_linear_root(self):
assert_(poly.polycompanion([1, 2])[0, 0] == -.5)
class TestMisc(TestCase):
def test_polyfromroots(self):
res = poly.polyfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1, 5):
roots = np.cos(np.linspace(-np.pi, 0, 2 * i + 1)[1::2])
tgt = Tlist[i]
res = poly.polyfromroots(roots) * 2 ** (i - 1)
assert_almost_equal(trim(res), trim(tgt))
def test_polyroots(self):
assert_almost_equal(poly.polyroots([1]), [])
assert_almost_equal(poly.polyroots([1, 2]), [-.5])
for i in range(2, 5):
tgt = np.linspace(-1, 1, i)
res = poly.polyroots(poly.polyfromroots(tgt))<|fim▁hole|>
def test_polyfit(self):
def f(x):
return x * (x - 1) * (x - 2)
def f2(x):
return x ** 4 + x ** 2 + 1
# Test exceptions
assert_raises(ValueError, poly.polyfit, [1], [1], -1)
assert_raises(TypeError, poly.polyfit, [[1]], [1], 0)
assert_raises(TypeError, poly.polyfit, [], [1], 0)
assert_raises(TypeError, poly.polyfit, [1], [[[1]]], 0)
assert_raises(TypeError, poly.polyfit, [1, 2], [1], 0)
assert_raises(TypeError, poly.polyfit, [1], [1, 2], 0)
assert_raises(TypeError, poly.polyfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, poly.polyfit, [1], [1], 0, w=[1, 1])
assert_raises(ValueError, poly.polyfit, [1], [1], [-1, ])
assert_raises(ValueError, poly.polyfit, [1], [1], [2, -1, 6])
assert_raises(TypeError, poly.polyfit, [1], [1], [])
# Test fit
x = np.linspace(0, 2)
y = f(x)
#
coef3 = poly.polyfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(poly.polyval(x, coef3), y)
coef3 = poly.polyfit(x, y, [0, 1, 2, 3])
assert_equal(len(coef3), 4)
assert_almost_equal(poly.polyval(x, coef3), y)
#
coef4 = poly.polyfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(poly.polyval(x, coef4), y)
coef4 = poly.polyfit(x, y, [0, 1, 2, 3, 4])
assert_equal(len(coef4), 5)
assert_almost_equal(poly.polyval(x, coef4), y)
#
coef2d = poly.polyfit(x, np.array([y, y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
coef2d = poly.polyfit(x, np.array([y, y]).T, [0, 1, 2, 3])
assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
# test weighting
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
yw[0::2] = 0
wcoef3 = poly.polyfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
wcoef3 = poly.polyfit(x, yw, [0, 1, 2, 3], w=w)
assert_almost_equal(wcoef3, coef3)
#
wcoef2d = poly.polyfit(x, np.array([yw, yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
wcoef2d = poly.polyfit(x, np.array([yw, yw]).T, [0, 1, 2, 3], w=w)
assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
# test scaling with complex values x points whose square
# is zero when summed.
x = [1, 1j, -1, -1j]
assert_almost_equal(poly.polyfit(x, x, 1), [0, 1])
assert_almost_equal(poly.polyfit(x, x, [0, 1]), [0, 1])
# test fitting only even Polyendre polynomials
x = np.linspace(-1, 1)
y = f2(x)
coef1 = poly.polyfit(x, y, 4)
assert_almost_equal(poly.polyval(x, coef1), y)
coef2 = poly.polyfit(x, y, [0, 2, 4])
assert_almost_equal(poly.polyval(x, coef2), y)
assert_almost_equal(coef1, coef2)
def test_polytrim(self):
coef = [2, -1, 1, 0]
# Test exceptions
assert_raises(ValueError, poly.polytrim, coef, -1)
# Test results
assert_equal(poly.polytrim(coef), coef[:-1])
assert_equal(poly.polytrim(coef, 1), coef[:-3])
assert_equal(poly.polytrim(coef, 2), [0])
def test_polyline(self):
assert_equal(poly.polyline(3, 4), [3, 4])
if __name__ == "__main__":
run_module_suite()<|fim▁end|> | assert_almost_equal(trim(res), trim(tgt)) |
<|file_name|>kakasi.py<|end_file_name|><|fim▁begin|>#===============================================================================
import re
from untwisted.magic import sign
from channel import not_quiet
from runtime import later
import message
import util
import limit
import kakasi_lib
link, install, uninstall = util.LinkSet().triple()
#===============================================================================
'''
@link('MESSAGE')
@not_quiet()
def h_message(bot, id, target, msg):
if limit.mark_activity(bot, id): return
kakasi(bot, id, target or id.nick, msg, target is not None, auto=True)
@link('PROXY_MSG')
@not_quiet()
def h_proxy_message(
bot, id, target, msg, no_kakasi=False, no_auto=False, **kwds
):
if no_kakasi or no_auto: return
kakasi(bot, id, target, msg, target.startswith('#'), auto=True, **kwds)
'''
#===============================================================================
@link('HELP')
def h_help(bot, reply, args):
reply('romaji TEXT',
'Converts Japanese characters to romaji.')
@link(('HELP', 'romaji'))
@link(('HELP', 'rj'))
def h_help_romaji(bot, reply, args):
reply('romaji TEXT')
reply('rj TEXT',
'Shows the Hepburn romanisation(s) of Japanese characters occurring in TEXT,'
' according to KAKASI <http://kakasi.namazu.org>. The command prefix may be'
' omitted, leaving just TEXT, if the majority of TEXT is Japanese.')
@link('!romaji')
@link('!rj')
def h_romaji(bot, id, target, args, full_msg):
kakasi(bot, id, target or id.nick, args, target is not None)
#===============================================================================
def kakasi(bot, id, target, msg, prefix=True, auto=False, **kwds):
if auto and not kakasi_lib.is_ja(msg): return<|fim▁hole|> if auto and len(raw_reply) > 200: return
reply = ('<%s> %s' % (id.nick, raw_reply)) if prefix and id else raw_reply
bot.send_msg(target, reply)
bot.drive('runtime.later', sign(
'PROXY_MSG', bot, id, target, raw_reply, **dict(kwds, no_kakasi=True)))<|fim▁end|> | raw_reply = kakasi_lib.kakasi(msg) |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import *
from . import views
from . import z_queries
from rockletonfortune import settings
from django.contrib.auth.views import login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.views.static import serve
from django.db import models
from .views import *
from .z_queries import *
urlpatterns = [
# Show index page
url(r'^$', login_required(views.index), name='index'),
# Show list of transactions
url(r'ajax/transactions/$', login_required(z_queries.q_transaction_data_json), name='q_transaction_data_json'),
url(r'transactions/viz/$', login_required(views.viz_transaction_list), name = 'viz_transaction_list'),
url(r'transactions/$', login_required(views.transaction_list), name = 'transaction_list'),
# Add new transactions
url(r'transactions/new/$', login_required(AddTransactionView.as_view()), name = 'AddTransactionView'),
# Edit transactions
url(r'^transactions/edit/(?P<pk>\d+)/$', login_required(UpdateTransactionView.as_view()), name= 'UpdateTransactionView'),
#FUTURE: There's got to be a better way to handle this 3 part import workflow
#Point to the file to import
url(r'transactions/import/$', login_required(views.import_transactions), name = 'transaction_import_list'),
#Select transactions to import as new or to replace existing
url(r'transactions/import/input/$', login_required(views.import_transaction_input), name = 'transaction_import_input'),
#Enter the percentage per transaction
url(r'transactions/import/save/$', login_required(views.import_transaction_save), name = 'transaction_import_save'),
# Export transactions to csv for Google Sheets
url(r'transactions/csv/$', login_required(views.export_csv), name = 'export_csv'),<|fim▁hole|> #Fetch the weekly spend summary per secondary and primary categories
url('ajax/budget/$', login_required(z_queries.q_budget_view_json), name='q_budget_view_json'),
#Template to show weekly spend summary per secondary and primary categories
url(r'budget/$', login_required(views.budget_view), name = 'budget_view'),
# Media root for js libraries (d3, jquery, css, etc.)
#url(r'^media/(?P<path>.*)$', serve, {'document_root': settings.MEDIA_ROOT}),
# Test Area
url('test/$', views.testview, kwargs={'variable': 'there'}, name='test'),
url('transfer/$', views.transfer_amount, name='transfer'),
url(r'^login/$', auth_views.login, name = 'login'),
url(r'^logout/$', auth_views.logout, name = 'logout'),
]<|fim▁end|> |
# Edit the weekly/monthly allocation per secondary transaction
url(r'budget/edit/$', login_required(views.budget_edit), name = 'budget_edit'),
|
<|file_name|>module.js<|end_file_name|><|fim▁begin|>// Copyright 2015 Peter Beverloo. All rights reserved.
// Use of this source code is governed by the MIT license, a copy of which can
// be found in the LICENSE file.
<|fim▁hole|>export class Module {
constructor(env) {
this.env_ = env;
let decoratedRoutes = Object.getPrototypeOf(this).decoratedRoutes_;
if (!decoratedRoutes)
return;
decoratedRoutes.forEach(route => {
env.dispatcher.addRoute(route.method, route.route_path, ::this[route.handler]);
});
}
};
// Annotation that can be used on modules to indicate that the annotated method is the request
// handler for a |method| (GET, POST) request for |route_path|.
export function route(method, route_path) {
return (target, handler) => {
if (!target.hasOwnProperty('decoratedRoutes_'))
target.decoratedRoutes_ = [];
target.decoratedRoutes_.push({ method, route_path, handler });
};
}<|fim▁end|> | // Base class for a module. Stores the environment and handles magic such as route annotations. |
<|file_name|>PopulatePackages.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Populate package directories.
#
# python PopulatePackages.py < packages.csv
#
# The input packages.csv table must have these columns:
#
# Package Name,Directory Name,Prefixes,File Numbers,File Names,Globals
#
# Rows with an empty package name specify additional prefixes and
# globals for the most recently named package. Prepend '!' to exclude
# a prefix.
#
#---------------------------------------------------------------------------
# Copyright 2011 The Open Source Electronic Health Record Agent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
import sys
import os
import csv
import glob
class Package:
def __init__(self, name, path):
self.name = name
self.path = path.strip().replace('/',os.path.sep)
self.included = set()
self.excluded = set()
self.globals = set()
def add_namespace(self, ns):
if ns:
if ns[0] in ('-','!'):
self.excluded.add(ns[1:])
else:
self.included.add(ns)
def add_number(self, n):
if n:
if n[0] == '.':
n = '0' + n
self.globals.add(n) # numbers work just like globals
def add_global(self, g):
if g:
self.globals.add(g)
def order_long_to_short(l,r):
if len(l) > len(r):
return -1
elif len(l) < len(r):
return +1
else:
return cmp(l,r)
def place(src,dst):
sys.stdout.write('%s => %s\n' % (src,dst))
d = os.path.dirname(dst)
if d:
try: os.makedirs(d)
except OSError: pass
os.rename(src,dst)
#-----------------------------------------------------------------------------
def populate(input):
packages_csv = csv.DictReader(input)
# Parse packages and namespaces from CSV table on stdin.
packages = []
pkg = None
for fields in packages_csv:
if fields['Package Name']:
pkg = Package(fields['Package Name'], fields['Directory Name'])
packages.append(pkg)
if pkg:
pkg.add_namespace(fields['Prefixes'])
pkg.add_number(fields['File Numbers'])
pkg.add_global(fields['Globals'])
# Construct "namespace => path" map.
namespaces = {}
for p in packages:
for ns in p.included:
namespaces[ns] = p.path
for ns in p.excluded:
if not namespaces.has_key(ns):
namespaces[ns] = None
#-----------------------------------------------------------------------------
# Collect routines and globals in current directory.
routines = set(glob.glob('*.m'))<|fim▁hole|>
# Map by package namespace (prefix).
for ns in sorted(namespaces.keys(),order_long_to_short):
path = namespaces[ns]
gbls = [gbl for gbl in globals if gbl.startswith(ns)]
rtns = [rtn for rtn in routines if rtn.startswith(ns)]
if (rtns or gbls) and not path:
sys.stderr.write('Namespace "%s" has no path!\n' % ns)
continue
routines.difference_update(rtns)
globals.difference_update(gbls)
for src in sorted(rtns):
place(src,os.path.join(path,'Routines',src))
for src in sorted(gbls):
place(src,os.path.join(path,'Globals',src))
# Map globals explicitly listed in each package.
for p in packages:
gbls = [gbl for gbl in globals
if gbl[:-4].split('+')[0].split('-')[0] in p.globals]
globals.difference_update(gbls)
for src in sorted(gbls):
place(src,os.path.join(p.path,'Globals',src))
# Put leftover routines and globals in Uncategorized package.
for src in routines:
place(src,os.path.join('Uncategorized','Routines',src))
for src in globals:
place(src,os.path.join('Uncategorized','Globals',src))
def main():
populate(sys.stdin)
if __name__ == '__main__':
main()<|fim▁end|> | globals = set(glob.glob('*.zwr'))
#----------------------------------------------------------------------------- |
<|file_name|>parametric_attention_test.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for parametric_attention."""
import numpy as np
import tensorflow as tf
from multiple_user_representations.models import parametric_attention
class ParametricAttentionTest(tf.test.TestCase):
def test_parametric_attention_model_with_single_representation(self):
model = parametric_attention.SimpleParametricAttention(
output_dimension=2,
input_embedding_dimension=2,
vocab_size=10,
num_representations=1,
max_sequence_size=20)
input_batch = tf.convert_to_tensor(
np.random.randint(low=0, high=10, size=(10, 20)))
output = model(input_batch)
self.assertIsInstance(model, tf.keras.Model)
self.assertSequenceEqual(output.numpy().shape, [10, 1, 2])
def test_parametric_attention_model_with_multiple_representations(self):
model = parametric_attention.SimpleParametricAttention(
output_dimension=2,
input_embedding_dimension=2,
vocab_size=10,
num_representations=3,
max_sequence_size=20)<|fim▁hole|>
input_batch = tf.convert_to_tensor(
np.random.randint(low=0, high=10, size=(10, 20)))
output = model(input_batch)
self.assertIsInstance(model, tf.keras.Model)
self.assertSequenceEqual(output.numpy().shape, [10, 3, 2])
if __name__ == '__main__':
tf.test.main()<|fim▁end|> | |
<|file_name|>NWStreamBlockVideo.cpp<|end_file_name|><|fim▁begin|>/*
* This file is part of NWFramework.
* Copyright (c) InCrew Software and Others.
* (See the AUTHORS file in the root of this distribution.)
*
* NWFramework is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* NWFramework is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with NWFramework; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "PchNWStream.h"
#include "NWStreamBlockVideo.h"
#include <memory.h>
//********************************************************************
//
//********************************************************************
//--------------------------------------------------------------------
//
//--------------------------------------------------------------------
NWStreamBlockVideo::NWStreamBlockVideo() : Inherited(),
mFrameBuffer(0),
mWidth(0),
mHeight(0),
mStride(0)
{
}
//********************************************************************
//
//********************************************************************
//--------------------------------------------------------------------
//
//--------------------------------------------------------------------
bool NWStreamBlockVideo::init()
{
bool bOK = true;
if (!isOk())
{
mFrameBuffer = 0;
mWidth = 0;
mHeight = 0;
mStride = 0;
bOK = Inherited::init(NWSTREAM_SUBTYPE_MEDIA_VIDEO);
}
return bOK;
}
//--------------------------------------------------------------------
//
//--------------------------------------------------------------------
void NWStreamBlockVideo::done()
{
if (isOk())
{
DISPOSE_ARRAY(mFrameBuffer);
Inherited::done();
}
}
//--------------------------------------------------------------------
//
//--------------------------------------------------------------------
void NWStreamBlockVideo::setFrameBufferData(int _width, int _height, int _stride, unsigned char* _frameBuffer, bool _copy)
{
if ( _frameBuffer && _copy )
{
int frameBufferSize = _height*_stride;
ASSERT(_stride >= (_height*3));
mFrameBuffer = NEW unsigned char[frameBufferSize];
memcpy(mFrameBuffer,_frameBuffer,frameBufferSize);
<|fim▁hole|> mFrameBuffer = _frameBuffer;
}
mWidth = _width;
mHeight = _height;
mStride = _stride;
}<|fim▁end|> | }
else
{
|
<|file_name|>spline.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2015 Airbus
# Copyright 2017 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and<|fim▁hole|>
class SPLINE:
Ortho = 'ortho'<|fim▁end|> | # limitations under the License. |
<|file_name|>BudgetsPage.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { Component } from 'react';
import { Button, Checkbox, Icon, Table, Input, Container } from 'semantic-ui-react'
import { observable, action } from "mobx";
import { observer } from "mobx-react";
import { BudgetEditorModal } from '../BudgetEditorModal';
import { lazyInject } from "../../inversify-container";
import { TYPES } from "../../inversify-types";
import { BudgetStore } from "../../stores/budgets/BudgetStore";
import { BudgetTableRow } from "../BudgetTableRow";
import { Budget } from "../../stores/budgets/model/Budget";
import { BudgetRequest } from "../../stores/budgets/model/BudgetRequest";
import * as moment from 'moment';
import { DeleteConfirmModal } from "../DeleteConfirmModal";
import { authorized } from "../authorized";
@authorized({ authenticated: true })
@observer
export class BudgetsPage extends Component<{}, {}> {
@lazyInject(TYPES.BudgetStore) private budgetStore: BudgetStore;
@observable editing?: BudgetRequest | Budget;
@observable budgetToDelete?: Budget;
@action onBudgetEditClick = (budget: Budget) => {
this.editing = budget.copy();
}
@action onBudgetDeleteClick = (budget: Budget) => {
this.budgetToDelete = budget;
}
@action onBudgetCreateClick = () => {
this.editing = new BudgetRequest({ from: moment().startOf('year'), to: moment().endOf('year') });
}
@action onModalClose = (budget: Budget | BudgetRequest) => {
this.editing = undefined;
}
@action onModalSave = (budget: Budget | BudgetRequest) => {
this.budgetStore.save(budget);
this.editing = undefined;
}
@action onDeleteAccept = async () => {
if (!this.budgetToDelete) {
return;
}
await this.budgetStore.delete(this.budgetToDelete);<|fim▁hole|> this.budgetToDelete = undefined;
}
render() {
const rows = this.budgetStore.budgets.map(it => <BudgetTableRow key={it.id} budget={it} onEditClick={this.onBudgetEditClick} onDeleteClick={this.onBudgetDeleteClick} />)
const deleteMessage = this.budgetToDelete ? 'Budget ' + this.budgetToDelete.from.format('YYYY-MM') + ' to ' + this.budgetToDelete.to.format('YYYY-MM') : undefined;
return (
<Container className="BudgetsPage">
<BudgetEditorModal budget={this.editing} onSave={this.onModalSave} onCancel={this.onModalClose} />
<DeleteConfirmModal message={deleteMessage} onAccept={this.onDeleteAccept} onReject={this.onDeleteReject} />
<Table celled compact>
<Table.Header fullWidth>
<Table.Row>
<Table.HeaderCell>From</Table.HeaderCell>
<Table.HeaderCell>To</Table.HeaderCell>
<Table.HeaderCell>Created By</Table.HeaderCell>
<Table.HeaderCell />
</Table.Row>
</Table.Header>
<Table.Body>
{rows}
</Table.Body>
<Table.Footer fullWidth>
<Table.Row>
<Table.HeaderCell />
<Table.HeaderCell colSpan="4">
<Button icon labelPosition="left" floated="right" primary size="small" onClick={this.onBudgetCreateClick}><Icon name="plus" /> Add Budget</Button>
</Table.HeaderCell>
</Table.Row>
</Table.Footer>
</Table>
</Container>
)
}
}<|fim▁end|> | this.budgetToDelete = undefined;
}
@action onDeleteReject = () => { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst
""" astropy.cosmology contains classes and functions for cosmological
distance measures and other cosmology-related calculations.
See the `Astropy documentation
<https://docs.astropy.org/en/latest/cosmology/index.html>`_ for more
detailed usage examples and references.
"""
from . import core, flrw, funcs, parameter, units, utils
from . import io # needed before 'realizations' # isort: split
from . import realizations
from .core import *
from .flrw import *
from .funcs import *<|fim▁hole|>from .utils import *
__all__ = (core.__all__ + flrw.__all__ # cosmology classes
+ realizations.__all__ # instances thereof
+ funcs.__all__ + parameter.__all__ + utils.__all__) # utils<|fim▁end|> | from .parameter import *
from .realizations import * |
<|file_name|>ui.js<|end_file_name|><|fim▁begin|>var ui = (function () {
function buildOnlineUsersList(users) {
var div = '<h1>Online users</h1>'
var ul = '<ul id="online-users">';
for (var i = users.length - 1; i >= 0; i--) {
var li = $('<li class = "online-user" data-username=' + users[i].username + '/>');
li.text(users[i].username);
li.prepend('<img src="' + users[i].profilePicture + '" width="30" height = "50" />');
if (users[i].messagesState)
{
li.addClass("unread");
}
ul +=li[0].outerHTML;
}
ul += '</ul>';
div += ul;
//div += '</div>';
return div;
}
function buildMessages(users) {
var div = '';
div += '<form id="form-send-message">' +
'<input type = "text" name="content" autofocus/>' +
'</form>';
//div += '<form id="sendFileForm" enctype="multipart/form-data">' +
// '<input name="file" type="file" />' +
// '<input type="submit" id="UploadFile" value="Upload" />' +
// '</form>';
var ul = '<ul id="user-messages">';
for (var i = users.length - 1; i >= 0; i--) {
var li;
if (users[i].content) {
li = appendRecievedMessage(users[i].content, users[i].sender.username);
} else {
li = appendRecievedFile(users[i].filePath, users[i].sender.username);
}
ul += li;
}
ul += '</ul>';
div += ul
div += '<form id="sendFileForm" enctype="multipart/form-data">' +
'<input name="file" type="file" id="chooseFile" /><br/>' +
'<input type="submit" id="UploadFile" value="Upload" />' +
'</form>';
//div += '</div>';
return div;
}
function appendRecievedMessage(messageContent, senderUsername) {
var li = '<li class = "message">';
li += '<div><h2>' + senderUsername + ': ';
<|fim▁hole|> else {
li += '<span class="red-message">';
}
li += messageContent + '</span>' + '</h2></div>';
li += '</li>';
return li;
//var li = '<li class = "message">';
//li += '<div><h2>Sender: ' + senderUsername + '</h2></div>';
//li += '<div class="message-content"><h2>Message: ' + messageContent + '</h2></div>';
//li += '</li>';
//return li;
}
function appendRecievedFile(messageContent, senderUsername) {
var li = '<li class = "message">';
li += '<h2>' + senderUsername ;
li += ': <a href ="' + messageContent + '">CLICK ME</a></h2>';
li += '</li>';
return li;
}
function getProfileInfo(username) {
var div = '';
div += '<h2>' + username + '</h2>';
div += '<img id= "profile-picture" src = "" width= "70" height = "50"/>';
return div;
}
return {
buildOnlineUsersList: buildOnlineUsersList,
buildMessages: buildMessages,
appendRecievedMessage: appendRecievedMessage,
getProfileInfo: getProfileInfo,
appendRecievedFile: appendRecievedFile
};
}());<|fim▁end|> | // Adding colors to the messages
if (localStorage.getItem("username") == senderUsername) {
li += '<span class="blue-message">';
} |
<|file_name|>range_ex.py<|end_file_name|><|fim▁begin|># range_ex.py Test of asynchronous mqtt client with clean session False.
# Extended version publishes SSID
# (C) Copyright Peter Hinch 2017-2019.
# Released under the MIT licence.
# Public brokers https://github.com/mqtt/mqtt.github.io/wiki/public_brokers
# This demo is for wireless range tests. If OOR the red LED will light.
# In range the blue LED will pulse for each received message.
# Uses clean sessions to avoid backlog when OOR.
# red LED: ON == WiFi fail
# blue LED pulse == message received
# Publishes connection statistics.
from mqtt_as import MQTTClient, config
from config import wifi_led, blue_led
import uasyncio as asyncio
import network
import gc
TOPIC = 'shed' # For demo publication and last will use same topic
outages = 0
rssi = -199 # Effectively zero signal in dB.
async def pulse(): # This demo pulses blue LED each time a subscribed msg arrives.
blue_led(True)
await asyncio.sleep(1)
blue_led(False)
def sub_cb(topic, msg, retained):
print((topic, msg))
asyncio.create_task(pulse())
# The only way to measure RSSI is via scan(). Alas scan() blocks so the code
# causes the obvious uasyncio issues.
async def get_rssi():
global rssi
s = network.WLAN()
ssid = config['ssid'].encode('UTF8')
while True:
try:
rssi = [x[3] for x in s.scan() if x[0] == ssid][0]
except IndexError: # ssid not found.
rssi = -199
await asyncio.sleep(30)
<|fim▁hole|> global outages
wifi_led(not state) # Light LED when WiFi down
if state:
print('We are connected to broker.')
else:
outages += 1
print('WiFi or broker is down.')
await asyncio.sleep(1)
async def conn_han(client):
await client.subscribe('foo_topic', 1)
async def main(client):
try:
await client.connect()
except OSError:
print('Connection failed.')
return
n = 0
s = '{} repubs: {} outages: {} rssi: {}dB free: {}bytes'
while True:
await asyncio.sleep(5)
gc.collect()
m = gc.mem_free()
print('publish', n)
# If WiFi is down the following will pause for the duration.
await client.publish(TOPIC, s.format(n, client.REPUB_COUNT, outages, rssi, m), qos = 1)
n += 1
# Define configuration
config['subs_cb'] = sub_cb
config['wifi_coro'] = wifi_han
config['will'] = (TOPIC, 'Goodbye cruel world!', False, 0)
config['connect_coro'] = conn_han
config['keepalive'] = 120
# Set up client. Enable optional debug statements.
MQTTClient.DEBUG = True
client = MQTTClient(config)
asyncio.create_task(get_rssi())
try:
asyncio.run(main(client))
finally: # Prevent LmacRxBlk:1 errors.
client.close()
blue_led(True)
asyncio.new_event_loop()<|fim▁end|> | async def wifi_han(state): |
<|file_name|>auth.module.js<|end_file_name|><|fim▁begin|>/**
* @author shirishgoyal
* created on 16.12.2015
*/
(function () {
'use strict';
angular.module('BlurAdmin.pages.auth', [
'BlurAdmin.services'
])
.config(routeConfig);
/** @ngInject */
function routeConfig($stateProvider) {
$stateProvider
.state('auth', {
url: '/auth',
abstract: true,
views: {
'full_screen': {
templateUrl: 'static/app/pages/auth/auth.html'
}
},
authenticate: false
})
// .state('auth.register', {
// url: '/register',
// templateUrl: 'static/app/pages/auth/register.html',
// controller: 'RegisterPageCtrl',
// controllerAs: 'vm'
// })<|fim▁hole|> // templateUrl: 'static/app/pages/auth/login.html',
// controller: 'LoginPageCtrl',
// controllerAs: 'vm'
// })
;
}
})();<|fim▁end|> | // .state('auth.login', {
// url: '/login', |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate clap;
mod cli;
mod config;
mod core;
mod util;
use std::fs::File;
use std::io::prelude::*;
use dotenv::dotenv;
use crate::config::Config;
fn main() -> std::io::Result<()> {
dotenv().ok();
let matches = cli::app().get_matches();
let default_config_path = dirs::home_dir().unwrap().join(".ledge");
let config_path = matches
.value_of("config")
.unwrap_or(default_config_path.to_str().expect("No config file"));
<|fim▁hole|> config_file.read_to_string(&mut contents)?;
let mut config: Config = toml::from_str(&contents).expect("Failed to parse config file");
cli::exec(&mut config, &matches).unwrap();
Ok(())
}<|fim▁end|> | let mut config_file = File::open(config_path)?;
let mut contents = String::new(); |
<|file_name|>hitbtc_test.go<|end_file_name|><|fim▁begin|>package hitbtc
import (
"context"
"errors"
"log"
"net/http"
"os"
"sync"
"testing"
"time"
"github.com/gorilla/websocket"
"github.com/thrasher-corp/gocryptotrader/common"
"github.com/thrasher-corp/gocryptotrader/config"
"github.com/thrasher-corp/gocryptotrader/core"
"github.com/thrasher-corp/gocryptotrader/currency"
exchange "github.com/thrasher-corp/gocryptotrader/exchanges"
"github.com/thrasher-corp/gocryptotrader/exchanges/asset"
"github.com/thrasher-corp/gocryptotrader/exchanges/kline"
"github.com/thrasher-corp/gocryptotrader/exchanges/order"
"github.com/thrasher-corp/gocryptotrader/exchanges/sharedtestvalues"
"github.com/thrasher-corp/gocryptotrader/exchanges/stream"
"github.com/thrasher-corp/gocryptotrader/portfolio/withdraw"
)
var h HitBTC
var wsSetupRan bool
// Please supply your own APIKEYS here for due diligence testing
const (
apiKey = ""
apiSecret = ""
canManipulateRealOrders = false
)
func TestMain(m *testing.M) {
h.SetDefaults()
cfg := config.GetConfig()
err := cfg.LoadConfig("../../testdata/configtest.json", true)
if err != nil {
log.Fatal("HitBTC load config error", err)
}
hitbtcConfig, err := cfg.GetExchangeConfig("HitBTC")
if err != nil {
log.Fatal("HitBTC Setup() init error")
}
hitbtcConfig.API.AuthenticatedSupport = true
hitbtcConfig.API.AuthenticatedWebsocketSupport = true
hitbtcConfig.API.Credentials.Key = apiKey
hitbtcConfig.API.Credentials.Secret = apiSecret
h.Websocket = sharedtestvalues.NewTestWebsocket()
err = h.Setup(hitbtcConfig)
if err != nil {
log.Fatal("HitBTC setup error", err)
}
os.Exit(m.Run())
}
func TestStart(t *testing.T) {
t.Parallel()
err := h.Start(nil)
if !errors.Is(err, common.ErrNilPointer) {
t.Fatalf("received: '%v' but expected: '%v'", err, common.ErrNilPointer)
}
var testWg sync.WaitGroup
err = h.Start(&testWg)
if err != nil {
t.Fatal(err)
}
testWg.Wait()
}
func TestGetOrderbook(t *testing.T) {
_, err := h.GetOrderbook(context.Background(), "BTCUSD", 50)
if err != nil {
t.Error("Test faild - HitBTC GetOrderbook() error", err)
}
}
func TestGetTrades(t *testing.T) {
_, err := h.GetTrades(context.Background(), "BTCUSD", "", "", 0, 0, 0, 0)
if err != nil {
t.Error("Test faild - HitBTC GetTradeHistory() error", err)
}
}
func TestGetChartCandles(t *testing.T) {
_, err := h.GetCandles(context.Background(),
"BTCUSD", "", "D1", time.Now().Add(-24*time.Hour), time.Now())
if err != nil {
t.Error("Test faild - HitBTC GetChartData() error", err)
}
}
func TestGetHistoricCandles(t *testing.T) {
currencyPair, err := currency.NewPairFromString("BTC-USD")
if err != nil {
t.Fatal(err)
}
startTime := time.Now().Add(-time.Hour * 24)
end := time.Now()
_, err = h.GetHistoricCandles(context.Background(),
currencyPair, asset.Spot, startTime, end, kline.OneMin)
if err != nil {
t.Fatal(err)
}
_, err = h.GetHistoricCandles(context.Background(),
currencyPair, asset.Spot, startTime, end, kline.Interval(time.Hour*7))
if err == nil {
t.Fatal("unexpected result")
}
}
func TestGetHistoricCandlesExtended(t *testing.T) {
currencyPair, err := currency.NewPairFromString("BTC-USD")
if err != nil {
t.Fatal(err)
}
startTime := time.Unix(1546300800, 0)
end := time.Unix(1577836799, 0)
_, err = h.GetHistoricCandlesExtended(context.Background(),
currencyPair, asset.Spot, startTime, end, kline.OneHour)
if err != nil {
t.Fatal(err)
}
_, err = h.GetHistoricCandlesExtended(context.Background(),
currencyPair, asset.Spot, startTime, end, kline.Interval(time.Hour*7))
if err == nil {
t.Fatal("unexpected result")
}
}
func TestGetCurrencies(t *testing.T) {
_, err := h.GetCurrencies(context.Background())
if err != nil {
t.Error("Test faild - HitBTC GetCurrencies() error", err)
}
}
func setFeeBuilder() *exchange.FeeBuilder {
return &exchange.FeeBuilder{
Amount: 1,
FeeType: exchange.CryptocurrencyTradeFee,
Pair: currency.NewPair(currency.ETH, currency.BTC),
PurchasePrice: 1,
FiatCurrency: currency.USD,
BankTransactionType: exchange.WireTransfer,
}
}
// TestGetFeeByTypeOfflineTradeFee logic test
func TestGetFeeByTypeOfflineTradeFee(t *testing.T) {
var feeBuilder = setFeeBuilder()
_, err := h.GetFeeByType(context.Background(), feeBuilder)
if err != nil {
t.Fatal(err)
}
if !areTestAPIKeysSet() {
if feeBuilder.FeeType != exchange.OfflineTradeFee {
t.Errorf("Expected %v, received %v", exchange.OfflineTradeFee, feeBuilder.FeeType)
}
} else {
if feeBuilder.FeeType != exchange.CryptocurrencyTradeFee {
t.Errorf("Expected %v, received %v", exchange.CryptocurrencyTradeFee, feeBuilder.FeeType)
}
}
}
func TestUpdateTicker(t *testing.T) {
pairs, err := currency.NewPairsFromStrings([]string{"BTC-USD", "XRP-USD"})
if err != nil {
t.Fatal(err)
}
h.CurrencyPairs.StorePairs(asset.Spot, pairs, true)
_, err = h.UpdateTicker(context.Background(),
currency.NewPair(currency.BTC, currency.USD),
asset.Spot)
if err != nil {
t.Error(err)
}
_, err = h.FetchTicker(context.Background(),
currency.NewPair(currency.XRP, currency.USD), asset.Spot)
if err != nil {
t.Error(err)
}
}
func TestUpdateTickers(t *testing.T) {
err := h.UpdateTickers(context.Background(), asset.Spot)
if err != nil {
t.Error(err)
}
}
func TestGetAllTickers(t *testing.T) {
_, err := h.GetTickers(context.Background())
if err != nil {
t.Error(err)
}
}
func TestGetSingularTicker(t *testing.T) {
_, err := h.GetTicker(context.Background(), "BTCUSD")
if err != nil {
t.Error(err)
}
}
func TestGetFee(t *testing.T) {
var feeBuilder = setFeeBuilder()
if areTestAPIKeysSet() {
// CryptocurrencyTradeFee Basic
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
// CryptocurrencyTradeFee High quantity
feeBuilder = setFeeBuilder()
feeBuilder.Amount = 1000
feeBuilder.PurchasePrice = 1000
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
// CryptocurrencyTradeFee IsMaker
feeBuilder = setFeeBuilder()
feeBuilder.IsMaker = true
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
// CryptocurrencyTradeFee Negative purchase price
feeBuilder = setFeeBuilder()
feeBuilder.PurchasePrice = -1000
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
// CryptocurrencyWithdrawalFee Basic
feeBuilder = setFeeBuilder()
feeBuilder.FeeType = exchange.CryptocurrencyWithdrawalFee
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
// CryptocurrencyWithdrawalFee Invalid currency
feeBuilder = setFeeBuilder()
feeBuilder.Pair.Base = currency.NewCode("hello")
feeBuilder.FeeType = exchange.CryptocurrencyWithdrawalFee
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
}
// CryptocurrencyDepositFee Basic
feeBuilder = setFeeBuilder()
feeBuilder.FeeType = exchange.CryptocurrencyDepositFee
feeBuilder.Pair.Base = currency.BTC
feeBuilder.Pair.Quote = currency.LTC
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
// InternationalBankDepositFee Basic
feeBuilder = setFeeBuilder()
feeBuilder.FeeType = exchange.InternationalBankDepositFee
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
// InternationalBankWithdrawalFee Basic
feeBuilder = setFeeBuilder()
feeBuilder.FeeType = exchange.InternationalBankWithdrawalFee
feeBuilder.FiatCurrency = currency.USD
if _, err := h.GetFee(context.Background(), feeBuilder); err != nil {
t.Error(err)
}
}
func TestFormatWithdrawPermissions(t *testing.T) {
expectedResult := exchange.AutoWithdrawCryptoText + " & " + exchange.NoFiatWithdrawalsText
withdrawPermissions := h.FormatWithdrawPermissions()
if withdrawPermissions != expectedResult {
t.Errorf("Expected: %s, Received: %s", expectedResult, withdrawPermissions)
}
}
func TestGetActiveOrders(t *testing.T) {
var getOrdersRequest = order.GetOrdersRequest{
Type: order.AnyType,
Pairs: []currency.Pair{currency.NewPair(currency.ETH, currency.BTC)},
AssetType: asset.Spot,
}
_, err := h.GetActiveOrders(context.Background(), &getOrdersRequest)
if areTestAPIKeysSet() && err != nil {
t.Errorf("Could not get open orders: %s", err)
} else if !areTestAPIKeysSet() && err == nil {
t.Error("Expecting an error when no keys are set")
}
}
func TestGetOrderHistory(t *testing.T) {
var getOrdersRequest = order.GetOrdersRequest{
Type: order.AnyType,
AssetType: asset.Spot,
Pairs: []currency.Pair{currency.NewPair(currency.ETH, currency.BTC)},
}
_, err := h.GetOrderHistory(context.Background(), &getOrdersRequest)
if areTestAPIKeysSet() && err != nil {
t.Errorf("Could not get order history: %s", err)
} else if !areTestAPIKeysSet() && err == nil {
t.Error("Expecting an error when no keys are set")
}
}
// Any tests below this line have the ability to impact your orders on the exchange. Enable canManipulateRealOrders to run them
// ----------------------------------------------------------------------------------------------------------------------------
func areTestAPIKeysSet() bool {
return h.ValidateAPICredentials()
}
func TestSubmitOrder(t *testing.T) {
if areTestAPIKeysSet() && !canManipulateRealOrders {
t.Skip("API keys set, canManipulateRealOrders false, skipping test")
}
var orderSubmission = &order.Submit{
Pair: currency.Pair{
Base: currency.DGD,
Quote: currency.BTC,
},
Side: order.Buy,
Type: order.Limit,
Price: 1,
Amount: 1,
ClientID: "meowOrder",
AssetType: asset.Spot,
}
response, err := h.SubmitOrder(context.Background(), orderSubmission)
if areTestAPIKeysSet() && (err != nil || !response.IsOrderPlaced) {
t.Errorf("Order failed to be placed: %v", err)
} else if !areTestAPIKeysSet() && err == nil {
t.Error("Expecting an error when no keys are set")
}
}
func TestCancelExchangeOrder(t *testing.T) {
if areTestAPIKeysSet() && !canManipulateRealOrders {
t.Skip("API keys set, canManipulateRealOrders false, skipping test")
}
currencyPair := currency.NewPair(currency.LTC, currency.BTC)
var orderCancellation = &order.Cancel{
ID: "1",
WalletAddress: core.BitcoinDonationAddress,
AccountID: "1",
Pair: currencyPair,
AssetType: asset.Spot,
}
err := h.CancelOrder(context.Background(), orderCancellation)
if !areTestAPIKeysSet() && err == nil {
t.Error("Expecting an error when no keys are set")
}
if areTestAPIKeysSet() && err != nil {
t.Errorf("Could not cancel orders: %v", err)
}
}
func TestCancelAllExchangeOrders(t *testing.T) {
if areTestAPIKeysSet() && !canManipulateRealOrders {
t.Skip("API keys set, canManipulateRealOrders false, skipping test")
}
currencyPair := currency.NewPair(currency.LTC, currency.BTC)
var orderCancellation = &order.Cancel{
ID: "1",
WalletAddress: core.BitcoinDonationAddress,
AccountID: "1",
Pair: currencyPair,
AssetType: asset.Spot,
}
resp, err := h.CancelAllOrders(context.Background(), orderCancellation)
if !areTestAPIKeysSet() && err == nil {
t.Error("Expecting an error when no keys are set")
}
if areTestAPIKeysSet() && err != nil {
t.Errorf("Could not cancel orders: %v", err)
}
if len(resp.Status) > 0 {
t.Errorf("%v orders failed to cancel", len(resp.Status))
}
}
func TestModifyOrder(t *testing.T) {
if areTestAPIKeysSet() && !canManipulateRealOrders {
t.Skip("API keys set, canManipulateRealOrders false, skipping test")
}
_, err := h.ModifyOrder(context.Background(),
&order.Modify{AssetType: asset.Spot})
if err == nil {
t.Error("ModifyOrder() Expected error")
}
}
func TestWithdraw(t *testing.T) {
withdrawCryptoRequest := withdraw.Request{
Exchange: h.Name,
Amount: -1,
Currency: currency.BTC,
Description: "WITHDRAW IT ALL",
Crypto: withdraw.CryptoRequest{
Address: core.BitcoinDonationAddress,
},
}
if areTestAPIKeysSet() && !canManipulateRealOrders {
t.Skip("API keys set, canManipulateRealOrders false, skipping test")
}
_, err := h.WithdrawCryptocurrencyFunds(context.Background(),
&withdrawCryptoRequest)
if !areTestAPIKeysSet() && err == nil {
t.Error("Expecting an error when no keys are set")
}
if areTestAPIKeysSet() && err != nil {
t.Errorf("Withdraw failed to be placed: %v", err)
}
}
func TestWithdrawFiat(t *testing.T) {
if areTestAPIKeysSet() && !canManipulateRealOrders {
t.Skip("API keys set, canManipulateRealOrders false, skipping test")
}
var withdrawFiatRequest = withdraw.Request{}
_, err := h.WithdrawFiatFunds(context.Background(), &withdrawFiatRequest)
if err != common.ErrFunctionNotSupported {
t.Errorf("Expected '%v', received: '%v'", common.ErrFunctionNotSupported, err)
}
}
func TestWithdrawInternationalBank(t *testing.T) {
if areTestAPIKeysSet() && !canManipulateRealOrders {
t.Skip("API keys set, canManipulateRealOrders false, skipping test")
}
var withdrawFiatRequest = withdraw.Request{}
_, err := h.WithdrawFiatFundsToInternationalBank(context.Background(),
&withdrawFiatRequest)
if err != common.ErrFunctionNotSupported {
t.Errorf("Expected '%v', received: '%v'", common.ErrFunctionNotSupported, err)
}
}
func TestGetDepositAddress(t *testing.T) {
if areTestAPIKeysSet() {
_, err := h.GetDepositAddress(context.Background(), currency.XRP, "", "")
if err != nil {
t.Error("GetDepositAddress() error", err)
}
} else {
_, err := h.GetDepositAddress(context.Background(), currency.BTC, "", "")
if err == nil {
t.Error("GetDepositAddress() error cannot be nil")
}
}
}
func setupWsAuth(t *testing.T) {
t.Helper()
if wsSetupRan {
return
}
if !h.Websocket.IsEnabled() && !h.API.AuthenticatedWebsocketSupport || !areTestAPIKeysSet() {
t.Skip(stream.WebsocketNotEnabled)
}
var dialer websocket.Dialer
err := h.Websocket.Conn.Dial(&dialer, http.Header{})
if err != nil {
t.Fatal(err)
}
go h.wsReadData()
err = h.wsLogin()
if err != nil {
t.Fatal(err)
}
timer := time.NewTimer(time.Second)
select {
case loginError := <-h.Websocket.DataHandler:
t.Fatal(loginError)
case <-timer.C:
}
timer.Stop()
wsSetupRan = true
}
// TestWsCancelOrder dials websocket, sends cancel request.
func TestWsCancelOrder(t *testing.T) {
setupWsAuth(t)
if !canManipulateRealOrders {
t.Skip("canManipulateRealOrders false, skipping test")
}
_, err := h.wsCancelOrder("ImNotARealOrderID")
if err != nil {
t.Fatal(err)
}
}
// TestWsPlaceOrder dials websocket, sends order submission.
func TestWsPlaceOrder(t *testing.T) {
setupWsAuth(t)
if !canManipulateRealOrders {
t.Skip("canManipulateRealOrders false, skipping test")
}
_, err := h.wsPlaceOrder(currency.NewPair(currency.LTC, currency.BTC),
order.Buy.String(),
1,
1)
if err != nil {
t.Fatal(err)
}
}
// TestWsReplaceOrder dials websocket, sends replace order request.
func TestWsReplaceOrder(t *testing.T) {
setupWsAuth(t)
if !canManipulateRealOrders {
t.Skip("canManipulateRealOrders false, skipping test")
}
_, err := h.wsReplaceOrder("ImNotARealOrderID", 1, 1)
if err != nil {
t.Fatal(err)
}
}
// TestWsGetActiveOrders dials websocket, sends get active orders request.
func TestWsGetActiveOrders(t *testing.T) {
setupWsAuth(t)
if _, err := h.wsGetActiveOrders(); err != nil {
t.Fatal(err)
}
}
// TestWsGetTradingBalance dials websocket, sends get trading balance request.
func TestWsGetTradingBalance(t *testing.T) {
setupWsAuth(t)
if _, err := h.wsGetTradingBalance(); err != nil {
t.Fatal(err)
}
}
// TestWsGetTradingBalance dials websocket, sends get trading balance request.
func TestWsGetTrades(t *testing.T) {
setupWsAuth(t)
_, err := h.wsGetTrades(currency.NewPair(currency.ETH, currency.BTC), 1000, "ASC", "id")
if err != nil {
t.Fatal(err)
}
}
// TestWsGetTradingBalance dials websocket, sends get trading balance request.
func TestWsGetSymbols(t *testing.T) {
setupWsAuth(t)
_, err := h.wsGetSymbols(currency.NewPair(currency.ETH, currency.BTC))
if err != nil {
t.Fatal(err)
}
}
// TestWsGetCurrencies dials websocket, sends get trading balance request.
func TestWsGetCurrencies(t *testing.T) {
setupWsAuth(t)
_, err := h.wsGetCurrencies(currency.BTC)
if err != nil {
t.Fatal(err)
}
}
func TestWsGetActiveOrdersJSON(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"method": "activeOrders",
"params": [
{
"id": "4345613661",
"clientOrderId": "57d5525562c945448e3cbd559bd068c3",
"symbol": "BTCUSD",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.013",
"price": "0.100000",
"cumQuantity": "0.000",
"postOnly": false,
"createdAt": "2017-10-20T12:17:12.245Z",
"updatedAt": "2017-10-20T12:17:12.245Z",
"reportType": "status"
}
]
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsGetCurrenciesJSON(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"result": {
"id": "ETH",
"fullName": "Ethereum",
"crypto": true,
"payinEnabled": true,
"payinPaymentId": false,
"payinConfirmations": 2,
"payoutEnabled": true,
"payoutIsPaymentId": false,
"transferEnabled": true,
"delisted": false,
"payoutFee": "0.001"
},
"id": 123
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsGetSymbolsJSON(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"result": {
"id": "ETHBTC",
"baseCurrency": "ETH",
"quoteCurrency": "BTC",
"quantityIncrement": "0.001",
"tickSize": "0.000001",
"takeLiquidityRate": "0.001",
"provideLiquidityRate": "-0.0001",
"feeCurrency": "BTC"
},
"id": 123
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsTicker(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"method": "ticker",
"params": {
"ask": "0.054464",
"bid": "0.054463",
"last": "0.054463",
"open": "0.057133",
"low": "0.053615",
"high": "0.057559",
"volume": "33068.346",
"volumeQuote": "1832.687530809",
"timestamp": "2017-10-19T15:45:44.941Z",
"symbol": "BTCUSD"
}
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsOrderbook(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"method": "snapshotOrderbook",
"params": {
"ask": [
{
"price": "0.054588",
"size": "0.245"
},
{
"price": "0.054590",
"size": "1.000"
},
{
"price": "0.054591",
"size": "2.784"
}
],
"bid": [
{
"price": "0.054558",
"size": "0.500"
},
{
"price": "0.054557",
"size": "0.076"
},
{
"price": "0.054524",
"size": "7.725"
}
],
"symbol": "BTCUSD",
"sequence": 8073827,
"timestamp": "2018-11-19T05:00:28.193Z"
}
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
pressXToJSON = []byte(`{
"jsonrpc": "2.0",
"method": "updateOrderbook",
"params": {
"ask": [
{
"price": "0.054590",
"size": "0.000"
},
{
"price": "0.054591",
"size": "0.000"
}
],
"bid": [
{
"price": "0.054504",
"size": "0.000"
}
],
"symbol": "BTCUSD",
"sequence": 8073830,
"timestamp": "2018-11-19T05:00:28.700Z"
}
}`)
err = h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsOrderNotification(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"method": "report",
"params": {
"id": "4345697765",
"clientOrderId": "53b7cf917963464a811a4af426102c19",
"symbol": "BTCUSD",
"side": "sell",
"status": "filled",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.053868",
"cumQuantity": "0.001",
"postOnly": false,
"createdAt": "2017-10-20T12:20:05.952Z",
"updatedAt": "2017-10-20T12:20:38.708Z",
"reportType": "trade",
"tradeQuantity": "0.001",
"tradePrice": "0.053868",
"tradeId": 55051694,
"tradeFee": "-0.000000005"
}
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsSubmitOrderJSON(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"result": {
"id": "4345947689",
"clientOrderId": "57d5525562c945448e3cbd559bd068c4",
"symbol": "BTCUSD",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.093837",
"cumQuantity": "0.000",
"postOnly": false,
"createdAt": "2017-10-20T12:29:43.166Z",
"updatedAt": "2017-10-20T12:29:43.166Z",
"reportType": "new"
},
"id": 123
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsCancelOrderJSON(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"result": {
"id": "4345947689",
"clientOrderId": "57d5525562c945448e3cbd559bd068c4",
"symbol": "BTCUSD",
"side": "sell",
"status": "canceled",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.001",
"price": "0.093837",
"cumQuantity": "0.000",
"postOnly": false,
"createdAt": "2017-10-20T12:29:43.166Z",
"updatedAt": "2017-10-20T12:31:26.174Z",
"reportType": "canceled"
},
"id": 123
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsCancelReplaceJSON(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"result": {
"id": "4346371528",
"clientOrderId": "9cbe79cb6f864b71a811402a48d4b5b2",
"symbol": "BTCUSD",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.002",
"price": "0.083837",
"cumQuantity": "0.000",
"postOnly": false,
"createdAt": "2017-10-20T12:47:07.942Z",
"updatedAt": "2017-10-20T12:50:34.488Z",
"reportType": "replaced",
"originalRequestClientOrderId": "9cbe79cb6f864b71a811402a48d4b5b1"
},
"id": 123
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsGetTradesRequestResponse(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"result": [
{
"currency": "BCN",
"available": "100.000000000",
"reserved": "0"
},
{
"currency": "BTC",
"available": "0.013634021",
"reserved": "0"
},
{
"currency": "ETH",
"available": "0",
"reserved": "0.00200000"
}
],
"id": 123
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsGetActiveOrdersRequestJSON(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"result": [
{
"id": "4346371528",
"clientOrderId": "9cbe79cb6f864b71a811402a48d4b5b2",
"symbol": "BTCUSD",
"side": "sell",
"status": "new",
"type": "limit",
"timeInForce": "GTC",
"quantity": "0.002",
"price": "0.083837",
"cumQuantity": "0.000",
"postOnly": false,
"createdAt": "2017-10-20T12:47:07.942Z",
"updatedAt": "2017-10-20T12:50:34.488Z",
"reportType": "replaced",
"originalRequestClientOrderId": "9cbe79cb6f864b71a811402a48d4b5b1"
}
],
"id": 123
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func TestWsTrades(t *testing.T) {
pressXToJSON := []byte(`{
"jsonrpc": "2.0",
"method": "snapshotTrades",
"params": {
"data": [
{
"id": 54469456,
"price": "0.054656",
"quantity": "0.057",
"side": "buy",
"timestamp": "2017-10-19T16:33:42.821Z"
},
{
"id": 54469497,
"price": "0.054656",
"quantity": "0.092",
"side": "buy",
"timestamp": "2017-10-19T16:33:48.754Z"
},
{
"id": 54469697,
"price": "0.054669",
"quantity": "0.002",
"side": "buy",
"timestamp": "2017-10-19T16:34:13.288Z"
}
],
"symbol": "BTCUSD"
}
}`)
err := h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
pressXToJSON = []byte(`{
"jsonrpc": "2.0",
"method": "updateTrades",
"params": {
"data": [
{
"id": 54469813,
"price": "0.054670",
"quantity": "0.183",
"side": "buy",
"timestamp": "2017-10-19T16:34:25.041Z"
}
],
"symbol": "BTCUSD"
}
} `)
err = h.wsHandleData(pressXToJSON)
if err != nil {
t.Error(err)
}
}
func Test_FormatExchangeKlineInterval(t *testing.T) {
testCases := []struct {
name string
interval kline.Interval
output string
}{
{
"OneMin",
kline.OneMin,
"M1",
},
{
"OneDay",
kline.OneDay,
"D1",
},
{
"SevenDay",
kline.SevenDay,
"D7",
},
{
"AllOther",
kline.OneMonth,
"",
},
}
for x := range testCases {
test := testCases[x]
t.Run(test.name, func(t *testing.T) {
ret := h.FormatExchangeKlineInterval(test.interval)
if ret != test.output {
t.Fatalf("unexpected result return expected: %v received: %v", test.output, ret)
}
})
}
}
func TestGetRecentTrades(t *testing.T) {
t.Parallel()
currencyPair, err := currency.NewPairFromString("BTCUSD")
if err != nil {
t.Fatal(err)
}<|fim▁hole|> _, err = h.GetRecentTrades(context.Background(), currencyPair, asset.Spot)
if err != nil {
t.Error(err)
}
}
func TestGetHistoricTrades(t *testing.T) {
t.Parallel()
currencyPair, err := currency.NewPairFromString("BTCUSD")
if err != nil {
t.Fatal(err)
}
_, err = h.GetHistoricTrades(context.Background(),
currencyPair, asset.Spot, time.Now().Add(-time.Minute*15), time.Now())
if err != nil && err != common.ErrFunctionNotSupported {
t.Error(err)
}
// longer term
_, err = h.GetHistoricTrades(context.Background(),
currencyPair, asset.Spot,
time.Now().Add(-time.Minute*60*200),
time.Now().Add(-time.Minute*60*199))
if err != nil {
t.Error(err)
}
}<|fim▁end|> | |
<|file_name|>1d46e8d4483_add_mod_versioning.py<|end_file_name|><|fim▁begin|>"""Add mod versioning<|fim▁hole|>
"""
# revision identifiers, used by Alembic.
revision = '1d46e8d4483'
down_revision = '2650a2191fe'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('mod', 'ksp_version')
op.drop_column('mod', 'keywords')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('mod', sa.Column('keywords', sa.VARCHAR(length=256), autoincrement=False, nullable=True))
op.add_column('mod', sa.Column('ksp_version', sa.VARCHAR(length=16), autoincrement=False, nullable=True))
### end Alembic commands ###<|fim▁end|> |
Revision ID: 1d46e8d4483
Revises: 2650a2191fe
Create Date: 2014-06-10 01:29:49.567535 |
<|file_name|>storage.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
author: Jimmy
contact: [email protected]
file: storage.py
time: 2017/9/4 下午3:18
description:
'''
__author__ = 'Jimmy'
import pymongo
from ctp.ctp_struct import *
from bson import json_util as jsonb
from utils.tools import *
def _getDataBase():
client = pymongo.MongoClient(host='127.0.0.1', port=27017)
return client.trade
# 报单回报 OnRtnOrder
def insertRtnOrder(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.order.insert(dict)
# 报单操作 OnErrRtnOrderAction
def insertErrRtnOrderAction(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.order_action.insert(dict)
# 输入报单操作 OnRspOrderAction
def insertRspOrderAction(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.input_order_action.insert(dict)
# 报单录入 OnRspOrderInsert OnErrRtnOrderInsert
def insertRspOrderInsert(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.input_order.insert(dict)
# 交易回报 OnRtnTrade
def insertRtnTrade(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.trade.insert(dict)
# 请求错误
def insertRspError(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.error_info.insert(dict)
# db.CThostFtdcRspInfoField.insert(event.dict)
# 保存下单参数
def insertSendOrderArgs(event):
db = _getDataBase()
# 枚举类型要转字符串保存
direction = str(event.dict['direction']).split('.')[-1]
event.dict['direction'] = direction
price_type = str(event.dict['price_type']).split('.')[-1]
event.dict['price_type'] = price_type
stop_price = str(event.dict['stop_price']).split('.')[-1]
event.dict['stop_price'] = stop_price
contingent_condition = str(event.dict['contingent_condition']).split('.')[-1]
event.dict['contingent_condition'] = contingent_condition
event.dict = _insertTime(event.dict)
db.send_order.insert(event.dict)
# 保存撤单参数
def insertCancelOrderArgs(event):
db = _getDataBase()
event.dict = _insertTime(event.dict)
db.cancel_order.insert(event.dict)
# 更新持仓
def insertPosition(event):
db = _getDataBase()
dict = _insertTime(event.dict)
db.position.insert(dict)
# 更新账户
def updateAccount(event):
db = _getDataBase()
dict = _insertTime(event.dict)
if db.account.find().count() > 0:
db.account.update({'AccountID': dict['AccountID']},{"$set": dict})
else:
db.account.insert(dict)
# 插入时间
def _insertTime(dict):
date = getTime()
dict['insert_date'] = date[0]
dict['insert_time'] = date[1]
dict['insert_msec'] = date[2]
return dict
def getStrategyInfo(dict):
db = _getDataBase()
dict = _insertTime(dict)
result = list(db.send_order.find({'order_ref':int(dict['OrderRef'])}))
if len(result) > 0:
result = result[0]
dict['strategy_name'] = result['strategy_name']
dict['strategy_id'] = result['strategy_id']
else:
dict['strategy_name'] = '未知'
dict['strategy_id'] = '未知'
return dict
# 获取最大报单编号<|fim▁hole|> result = list(db.send_order.find({}).sort([('order_ref', -1)]).limit(1))
if len(result) > 0:
result = result[0]
return int(result['order_ref'])
else:
return 0
def getMaxOrderActionRef():
db = _getDataBase()
result = list(db.cancel_order.find({}).sort([('order_action_ref', -1)]).limit(1))
if len(result) > 0:
result = result[0]
return int(result['order_action_ref'])
else:
return 0
if __name__ == '__main__':
def updateAccount(event):
db = _getDataBase()
if db.account.find().count() > 0:
db.account.update({'AccountID': event.dict['AccountID']},
{"$set": event.dict})
else:
db.account.insert(event.dict)<|fim▁end|> | def getMaxOrderRef():
db = _getDataBase() |
<|file_name|>recipe_manager.py<|end_file_name|><|fim▁begin|>import os
from iotile.core.dev import ComponentRegistry
from iotile.ship.recipe import RecipeObject
from iotile.ship.exceptions import RecipeNotFoundError
class RecipeManager:
"""A class that maintains a list of installed recipes and recipe actions.
It allows fetching recipes by name and auotmatically building RecipeObjects
from textual descriptions.
The RecipeManager maintains a dictionary of RecipeAction objects that it
compiles from all installed iotile packages. It passes this dictionary to
any Recipe that is created from it so the recipe can find any recipe
actions that it needs.
The RecipeManager finds RecipeActions by looking for plugins that
are registered with pkg_resources.
"""
def __init__(self):
self._recipe_actions = {}
self._recipe_resources = {}
self._recipes = {}
reg = ComponentRegistry()
for name, action in reg.load_extensions('iotile.recipe_action', product_name='build_step'):
self._recipe_actions[name] = action
for name, resource in reg.load_extensions('iotile.recipe_resource', product_name='build_resource'):
self._recipe_resources[name] = resource
def is_valid_action(self, name):
"""Check if a name describes a valid action.
Args:
name (str): The name of the action to check
Returns:
bool: Whether the action is known and valid.
"""
return self._recipe_actions.get(name, None) is not None
def is_valid_recipe(self, recipe_name):
"""Check if a recipe is known and valid.
Args:
name (str): The name of the recipe to check
Returns:
bool: Whether the recipe is known and valid.
"""
return self._recipes.get(recipe_name, None) is not None
def add_recipe_folder(self, recipe_folder, whitelist=None):
"""Add all recipes inside a folder to this RecipeManager with an optional whitelist.
Args:
recipe_folder (str): The path to the folder of recipes to add.
whitelist (list): Only include files whose os.basename() matches something
on the whitelist
"""
if whitelist is not None:
whitelist = set(whitelist)
if recipe_folder == '':
recipe_folder = '.'
for yaml_file in [x for x in os.listdir(recipe_folder) if x.endswith('.yaml')]:
if whitelist is not None and yaml_file not in whitelist:
continue
recipe = RecipeObject.FromFile(os.path.join(recipe_folder, yaml_file), self._recipe_actions, self._recipe_resources)
self._recipes[recipe.name] = recipe
for ship_file in [x for x in os.listdir(recipe_folder) if x.endswith('.ship')]:
if whitelist is not None and ship_file not in whitelist:
continue
<|fim▁hole|> recipe = RecipeObject.FromArchive(os.path.join(recipe_folder, ship_file), self._recipe_actions, self._recipe_resources)
self._recipes[recipe.name] = recipe
def add_recipe_actions(self, recipe_actions):
"""Add additional valid recipe actions to RecipeManager
args:
recipe_actions (list): List of tuples. First value of tuple is the classname,
second value of tuple is RecipeAction Object
"""
for action_name, action in recipe_actions:
self._recipe_actions[action_name] = action
def get_recipe(self, recipe_name):
"""Get a recipe by name.
Args:
recipe_name (str): The name of the recipe to fetch. Can be either the
yaml file name or the name of the recipe.
"""
if recipe_name.endswith('.yaml'):
recipe = self._recipes.get(RecipeObject.FromFile(recipe_name, self._recipe_actions, self._recipe_resources).name)
else:
recipe = self._recipes.get(recipe_name)
if recipe is None:
raise RecipeNotFoundError("Could not find recipe", recipe_name=recipe_name, known_recipes=[x for x in self._recipes.keys()])
return recipe<|fim▁end|> | |
<|file_name|>overlap.py<|end_file_name|><|fim▁begin|>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from ._overlap import _compute_overlap
__all__ = ['compute_overlap']
def compute_overlap(ilon, ilat, olon, olat):
"""Compute the overlap between two 'pixels' in spherical coordinates.
Parameters
----------
ilon : np.ndarray with shape (N, 4)
The longitudes (in radians) defining the four corners of the input pixel
ilat : np.ndarray with shape (N, 4)
The latitudes (in radians) defining the four corners of the input pixel
olon : np.ndarray with shape (N, 4)
The longitudes (in radians) defining the four corners of the output pixel<|fim▁hole|> olat : np.ndarray with shape (N, 4)
The latitudes (in radians) defining the four corners of the output pixel
Returns
-------
overlap : np.ndarray of length N
Pixel overlap solid angle in steradians
area_ratio : np.ndarray of length N
TODO
"""
ilon = np.asarray(ilon, dtype=np.float64)
ilat = np.asarray(ilat, dtype=np.float64)
olon = np.asarray(olon, dtype=np.float64)
olat = np.asarray(olat, dtype=np.float64)
return _compute_overlap(ilon, ilat, olon, olat)<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Watermarks documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 8 16:49:39 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
src_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'src')
sys.path.insert(0, src_dir)
import watermarks
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Watermarks'
copyright = u'2014, Vladimir Chovanec'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = watermarks.__version__
# The full version, including alpha/beta/rc tags.
release = watermarks.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
<|fim▁hole|>#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Watermarksdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Watermarks.tex', u'Watermarks Documentation',
u'Vladimir Chovanec', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'watermarks', u'Watermarks Documentation',
[u'Vladimir Chovanec'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Watermarks', u'Watermarks Documentation',
u'Vladimir Chovanec', 'Watermarks', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | # If false, no index is generated. |
<|file_name|>07_ExampleDoc.py<|end_file_name|><|fim▁begin|>#ImportModules
import ShareYourSystem as SYS
#define and get two children
MyParenter=SYS.ParenterClass(
).array(
[
['-Layers'],
['|First','|Second'],
['-Neurons'],
['|E','|I']
]
).command(
'+-.values+|.values',
'#call:parent',
_AfterWalkRigidBool=True
).command(
'+-.values+|.values',
{
'#bound:recruit':lambda _InstanceVariable:_InstanceVariable[
'/Top/NeuronsDict'
].__setitem__(
_InstanceVariable.ManagementTagStr,
_InstanceVariable
)
if _InstanceVariable['/^/ParentKeyStr']=="Neurons"
else None,
'/Top/LayersDict.__setitem__':{<|fim▁hole|> }
},
_AfterWalkRigidBool=True
)
#print
print('MyParenter.NeuronsDict.keys() is ')
SYS._print(MyParenter.NeuronsDict.keys())
#print
print('MyParenter.LayersDict.keys() is ')
SYS._print(MyParenter.LayersDict.keys())<|fim▁end|> | '#value:#map@get':["/~/ManagementTagStr",">>self"],
'#if':[
('/~/^/ParentKeyStr',SYS.operator.eq,"#direct:Layers")
] |
<|file_name|>step6_file.rs<|end_file_name|><|fim▁begin|>use std::rc::Rc;
//use std::collections::HashMap;
use fnv::FnvHashMap;
use itertools::Itertools;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate itertools;
extern crate fnv;
extern crate rustyline;
use rustyline::error::ReadlineError;
use rustyline::Editor;
#[macro_use]
mod types;
use types::{MalVal,MalArgs,MalRet,MalErr,error,format_error};
use types::MalVal::{Nil,Bool,Str,Sym,List,Vector,Hash,Func,MalFunc};
mod reader;
mod printer;
mod env;
use env::{Env,env_new,env_bind,env_get,env_set,env_sets};
#[macro_use]
mod core;
// read
fn read(str: &str) -> MalRet {
reader::read_str(str.to_string())
}
// eval
fn eval_ast(ast: &MalVal, env: &Env) -> MalRet {
match ast {
Sym(_) => Ok(env_get(&env, &ast)?),
List(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(list!(lst))
},
Vector(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(vector!(lst))
},
Hash(hm,_) => {
let mut new_hm: FnvHashMap<String,MalVal> = FnvHashMap::default();
for (k,v) in hm.iter() {
new_hm.insert(k.to_string(), eval(v.clone(), env.clone())?);
}
Ok(Hash(Rc::new(new_hm),Rc::new(Nil)))
},
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
let ret: MalRet;
'tco: loop {
ret = match ast.clone() {
List(l,_) => {
if l.len() == 0 { return Ok(ast); }
let a0 = &l[0];
match a0 {
Sym(ref a0sym) if a0sym == "def!" => {
env_set(&env, l[1].clone(), eval(l[2].clone(), env.clone())?)
},
Sym(ref a0sym) if a0sym == "let*" => {
env = env_new(Some(env.clone()));
let (a1, a2) = (l[1].clone(), l[2].clone());
match a1 {
List(ref binds,_) | Vector(ref binds,_) => {
for (b, e) in binds.iter().tuples() {
match b {
Sym(_) => {
let _ = env_set(&env, b.clone(),
eval(e.clone(), env.clone())?);
},
_ => {
return error("let* with non-Sym binding");
}
}
}
},
_ => {
return error("let* with non-List bindings");
}
};
ast = a2;
continue 'tco;
},
Sym(ref a0sym) if a0sym == "do" => {
match eval_ast(&list!(l[1..l.len()-1].to_vec()), &env)? {
List(_,_) => {
ast = l.last().unwrap_or(&Nil).clone();
continue 'tco;
},
_ => error("invalid do form"),
}
},
Sym(ref a0sym) if a0sym == "if" => {
let cond = eval(l[1].clone(), env.clone())?;
match cond {
Bool(false) | Nil if l.len() >= 4 => {
ast = l[3].clone();
continue 'tco;
},
Bool(false) | Nil => Ok(Nil),
_ if l.len() >= 3 => {
ast = l[2].clone();
continue 'tco;
},
_ => Ok(Nil)
}
},
Sym(ref a0sym) if a0sym == "fn*" => {
let (a1, a2) = (l[1].clone(), l[2].clone());
Ok(MalFunc{eval: eval, ast: Rc::new(a2), env: env,
params: Rc::new(a1), is_macro: false,
meta: Rc::new(Nil)})<|fim▁hole|> Sym(ref a0sym) if a0sym == "eval" => {
ast = eval(l[1].clone(), env.clone())?;
while let Some(ref e) = env.clone().outer {
env = e.clone();
}
continue 'tco;
},
_ => {
match eval_ast(&ast, &env)? {
List(ref el,_) => {
let ref f = el[0].clone();
let args = el[1..].to_vec();
match f {
Func(_,_) => f.apply(args),
MalFunc{ast: mast, env: menv, params, ..} => {
let a = &**mast;
let p = &**params;
env = env_bind(Some(menv.clone()), p.clone(), args)?;
ast = a.clone();
continue 'tco;
},
_ => error("attempt to call non-function"),
}
},
_ => {
error("expected a list")
}
}
}
}
},
_ => eval_ast(&ast, &env),
};
break;
} // end 'tco loop
ret
}
// print
fn print(ast: &MalVal) -> String {
ast.pr_str(true)
}
fn rep(str: &str, env: &Env) -> Result<String,MalErr> {
let ast = read(str)?;
let exp = eval(ast, env.clone())?;
Ok(print(&exp))
}
fn main() {
let mut args = std::env::args();
let arg1 = args.nth(1);
// `()` can be used when no completer is required
let mut rl = Editor::<()>::new();
if rl.load_history(".mal-history").is_err() {
println!("No previous history.");
}
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns() {
env_sets(&repl_env, k, v);
}
env_sets(&repl_env, "*ARGV*", list!(args.map(Str).collect()));
// core.mal: defined using the language itself
let _ = rep("(def! not (fn* (a) (if a false true)))", &repl_env);
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", &repl_env);
// Invoked with arguments
if let Some(f) = arg1 {
match rep(&format!("(load-file \"{}\")",f), &repl_env) {
Ok(_) => std::process::exit(0),
Err(e) => {
println!("Error: {}", format_error(e));
std::process::exit(1);
}
}
}
// main repl loop
loop {
let readline = rl.readline("user> ");
match readline {
Ok(line) => {
rl.add_history_entry(&line);
rl.save_history(".mal-history").unwrap();
if line.len() > 0 {
match rep(&line, &repl_env) {
Ok(out) => println!("{}", out),
Err(e) => println!("Error: {}", format_error(e)),
}
}
},
Err(ReadlineError::Interrupted) => continue,
Err(ReadlineError::Eof) => break,
Err(err) => {
println!("Error: {:?}", err);
break
}
}
}
}
// vim: ts=2:sw=2:expandtab<|fim▁end|> | }, |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#
# Copyright 2017 University of Southern California
#<|fim▁hole|># you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from distutils.core import setup
setup(name='youtubecli',
description='library for uploading a video file to YouTube',
version='0.1',
packages=['youtubecli'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (the "License"); |
<|file_name|>0002_make_unique_per_user.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('subscriptions', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name='personalarticle',
unique_together=set([('sub', 'article')]),
),
migrations.AlterUniqueTogether(
name='subscription',
unique_together=set([('owner', 'feed')]),
),
]<|fim▁end|> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import cx_Freeze
import sys
<|fim▁hole|>
executables = [cx_Freeze.Executable("MusicCompiler.py", base=None)]
cx_Freeze.setup(
name= "MusicCompiler",
description = "Best Program Ever Known To Humanity.",
author = "Space Sheep Enterprises",
options = {"build_exe":{"excludes":["urllib","html","http","tkinter","socket","multiprocessing","threading","email","htmllib"]}},
version = "1.0",
executables = executables
)<|fim▁end|> | import os
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|>g = 2<|fim▁hole|><|fim▁end|> | i = 2
<warning descr="Non-ASCII character 'ɡ' in the file, but no encoding declared">ɡ</warning> = 1
a = g + i |
<|file_name|>window_platform_event.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Demonstrates how to handle a platform-specific event not defined in
pyglet by subclassing Window. This is not for the faint-hearted!
A message will be printed to stdout when the following events are caught:
- On Mac OS X, the window drag region is clicked.
- On Windows, the display resolution is changed.
- On Linux, the window properties are changed.
'''<|fim▁hole|>import pyglet
# Check for Carbon (OS X)
try:
from pyglet.window.carbon import *
_have_carbon = True
except ImportError:
_have_carbon = False
# Check for Win32
try:
from pyglet.window.win32 import *
from pyglet.window.win32.constants import *
_have_win32 = True
except ImportError:
_have_win32 = False
# Check for Xlib (Linux)
try:
from pyglet.window.xlib import *
_have_xlib = True
except ImportError:
_have_xlib = False
# Subclass Window
class MyWindow(pyglet.window.Window):
if _have_carbon:
@CarbonEventHandler(kEventClassWindow, kEventWindowClickDragRgn)
def _on_window_click_drag_rgn(self, next_handler, event, data):
print 'Clicked drag rgn.'
carbon.CallNextEventHandler(next_handler, event)
return noErr
if _have_win32:
@Win32EventHandler(WM_DISPLAYCHANGE)
def _on_window_display_change(self, msg, lParam, wParam):
print 'Display resolution changed.'
return 0
if _have_xlib:
@XlibEventHandler(xlib.PropertyNotify)
def _on_window_property_notify(self, event):
print 'Property notify.'
if __name__ == '__main__':
window = MyWindow()
pyglet.app.run()<|fim▁end|> | |
<|file_name|>EntityRenderEvent.java<|end_file_name|><|fim▁begin|>package de.gurkenlabs.litiengine.entities;
import de.gurkenlabs.litiengine.graphics.RenderEngine;
import java.awt.Graphics2D;
import java.util.EventObject;
/**
* This {@code EventObject} contains data about the rendering process of an entity.
*
* @see RenderEngine#renderEntity(Graphics2D, IEntity)
*/
public class EntityRenderEvent extends EventObject {
private static final long serialVersionUID = 6397005859146712222L;
private final transient Graphics2D graphics;
private final transient IEntity entity;
public EntityRenderEvent(final Graphics2D graphics, final IEntity entity) {
super(entity);
this.graphics = graphics;
this.entity = entity;
}
/**
* Gets the graphics object on which the entity is rendered.
*<|fim▁hole|> return this.graphics;
}
/**
* Get the entity involved with the rendering process.
*
* @return The entity involved with the rendering process.
*/
public IEntity getEntity() {
return this.entity;
}
}<|fim▁end|> | * @return The graphics object on which the entity is rendered.
*/
public Graphics2D getGraphics() { |
<|file_name|>ARDecoder.py<|end_file_name|><|fim▁begin|>__author__ = 'Marko Milutinovic'
"""
This class will implement an Arithmetic Coding decoder
"""
import array
import utils
import math
class ARDecoder:
BITS_IN_BYTE = 8
def __init__(self, wordSize_, vocabularySize_, terminationSymbol_):
"""
Initialize the object
:param wordSize_: The word size (bits) that will be used for compression. Must be greater than 2 and less than 16
:param: vocabularySize_: The size of the vocabulary. Symbols run rom 0 to (vocabularySize -1)
:param terminationSymbol_: Symbol which indicates the end of encoded data where decoding should stop. This is required to properly terminate decoding
:return: None
"""
self.mMaxDecodingBytes = utils.calculateMaxBytes(wordSize_) # The max number of bytes we can decode before the statistics need to be re-normalized
self.mVocabularySize = vocabularySize_
self.mTerminationSymbol = terminationSymbol_
if(self.mMaxDecodingBytes == 0):
raise Exception("Invalid word size specified")
self.mWordSize = wordSize_ # The tag word size
self.mWordBitMask = 0x0000 # The word size bit-mask
self.mWordMSBMask = (0x0000 | (1 << (self.mWordSize - 1))) # The bit mask for the top bit of the word
self.mWordSecondMSBMask = (0x0000 | (1 << (self.mWordSize - 2))) # The bit mask for the second most significant bit of the word
# Create bit mask for the word size
for i in range(0, self.mWordSize):
self.mWordBitMask = (self.mWordBitMask << 1) | 0x0001
# We are initializing with an assumption of a value of 1 for the count of each symbol.
self.mSymbolCount = array.array('i', [1]*self.mVocabularySize)
# Reset member variables that are not constant
self.reset()
def reset(self):
""" Reset all the member variables that are not constant for the duration of the object life
:return: None
"""
self.mEncodedData = None # Holds the encoded data that we are un-compressing. Bytearray
self.mEncodedDataCount = 0 # Number of encoded bytes that we are un-compressing
self.mDecodedData = None # Holds the data being decoded
self.mDecodedDataLen = 0 # The number of symbols that have been decoded
self.mCurrentEncodedDataByteIndex = 0 # Index of the encoded data with are currently working with
self.mCurrentEncodedDataBit = 0 # The current bit of the current byte we are using from the encoded data bytearray
self.mTotalSymbolCount = self.mVocabularySize # The total number of symbols encountered
self.mLowerTag = 0 # The lower tag threshold
self.mUpperTag = self.mWordBitMask # The upper tag threshold
self.mCurrentTag = 0 # The current tag we are processing
# We are initializing with an assumption of a value of 1 for the count of each symbol
for i in range(0,self.mVocabularySize):
self.mSymbolCount[i] = 1
def _get_next_bit(self):
"""
Get the next bit from encoded data (MSB first). If we move past the current byte move index over to the next one.
Once there is no more data return None
:return: next bit value or None if there is no more data
"""<|fim▁hole|>
bitValue = (self.mEncodedData[self.mCurrentEncodedDataByteIndex] >> (self.BITS_IN_BYTE - 1 - self.mCurrentEncodedDataBit)) & 0x0001
self.mCurrentEncodedDataBit += 1
# If we have used all the bits in the current byte, move to the next byte
if(self.mCurrentEncodedDataBit == self.BITS_IN_BYTE):
self.mCurrentEncodedDataByteIndex += 1
self.mCurrentEncodedDataBit = 0
return bitValue
def _increment_count(self, indexToIncrement_):
"""
Update the count for the provided index. Update
the total symbol count as well. If we exceed the max symbol count normalize the stats
:param indexToIncrement_: The index which we are updating
:return: None
"""
self.mSymbolCount[indexToIncrement_] += 1
self.mTotalSymbolCount += 1
# If we have reached the max number of bytes, we need to normalize the stats to allow us to continue
if(self.mTotalSymbolCount >= self.mMaxDecodingBytes):
self._normalize_stats()
def _rescale(self):
"""
Perform required rescale operation on the upper, lower and current tags. The following scaling operations are performed:
E1: both the upper and lower ranges fall into the bottom half of full range [0, 0.5). First bit is 0 for both.
Shift out MSB for both and shift in 1 for upper tag and 0 for lower tag. Shift the current tag to left by 1 and move in next bit
E2: both the upper and lower ranges fall into the top half of full range [0.5, 1). First bit is 1 for both.
Shift out MSB for both and shift in 1 for upper tag and 0 for lower tag. Shift the current tag to left by 1 and move in next bit
E3: the upper and lower tag interval lies in the middle [0.25, 0.75). The second MSB of upper tag is 0 and the second bit of the lower tag is 1.
Complement second MSB bit of both and shift in 1 for upper tag and 0 for lower tag. Complement second MSB of the current tag, shift to the left by 1 and move in the next bit
:return:None
"""
sameMSB = ((self.mLowerTag & self.mWordMSBMask) == (self.mUpperTag & self.mWordMSBMask))
valueMSB = ((self.mLowerTag & self.mWordMSBMask) >> (self.mWordSize -1)) & 0x0001
tagRangeInMiddle = (((self.mUpperTag & self.mWordSecondMSBMask) == 0) and ((self.mLowerTag & self.mWordSecondMSBMask) == self.mWordSecondMSBMask))
while(sameMSB or tagRangeInMiddle):
# If the first bit is the same we need to perform E1 or E2 scaling. The same set of steps applies to both. If the range is in the middle we need to perform E3 scaling
if(sameMSB):
self.mLowerTag = (self.mLowerTag << 1) & self.mWordBitMask
self.mUpperTag = ((self.mUpperTag << 1) | 0x0001) & self.mWordBitMask
self.mCurrentTag = ((self.mCurrentTag << 1) | self._get_next_bit()) & self.mWordBitMask
elif(tagRangeInMiddle):
self.mLowerTag = (self.mLowerTag << 1) & self.mWordBitMask
self.mUpperTag = (self.mUpperTag << 1) & self.mWordBitMask
self.mCurrentTag = ((self.mCurrentTag << 1) | self._get_next_bit()) & self.mWordBitMask
self.mLowerTag = ((self.mLowerTag & (~self.mWordMSBMask)) | ((~self.mLowerTag) & self.mWordMSBMask))
self.mUpperTag = ((self.mUpperTag & (~self.mWordMSBMask)) | ((~self.mUpperTag) & self.mWordMSBMask))
self.mCurrentTag = ((self.mCurrentTag & (~self.mWordMSBMask)) | ((~self.mCurrentTag) & self.mWordMSBMask))
sameMSB = ((self.mLowerTag & self.mWordMSBMask) == (self.mUpperTag & self.mWordMSBMask))
valueMSB = ((self.mLowerTag & self.mWordMSBMask) >> (self.mWordSize -1)) & 0x0001
tagRangeInMiddle = (((self.mUpperTag & self.mWordSecondMSBMask) == 0) and ((self.mLowerTag & self.mWordSecondMSBMask) == self.mWordSecondMSBMask))
def _update_range_tags(self, currentSymbolIndex_, cumulativeCountSymbol_):
"""
Update the upper and lower tags according to stats for the incoming symbol
:param newSymbol_: Current symbol being encoded
:param cumulativeCountSymbol_: The cumulative count of the current symbol
:return: None
"""
prevLowerTag = self.mLowerTag
prevUpperTag = self.mUpperTag
rangeDiff = prevUpperTag - prevLowerTag
cumulativeCountPrevSymbol = cumulativeCountSymbol_ - self.mSymbolCount[currentSymbolIndex_]
self.mLowerTag = int((prevLowerTag + math.floor(((rangeDiff + 1)*cumulativeCountPrevSymbol))/self.mTotalSymbolCount))
self.mUpperTag = int((prevLowerTag + math.floor(((rangeDiff + 1)*cumulativeCountSymbol_))/self.mTotalSymbolCount - 1))
self._increment_count(currentSymbolIndex_)
def _normalize_stats(self):
"""
Divide the total count for each symbol by 2 but ensure each symbol count is at least 1.
Get new total symbol count from the entries
:return: None
"""
self.mTotalSymbolCount = 0
# Go through all the entries in the cumulative count array
for i in range(0, self.mVocabularySize):
value = int(self.mSymbolCount[i]/2)
# Ensure the count is at least 1
if(value == 0):
value = 1
self.mSymbolCount[i] = value
self.mTotalSymbolCount += value
def decode(self, encodedData_, encodedDataLen_, decodedData_, maxDecodedDataLen_):
"""
Decompress the data passed in. It is the responsibility of the caller to reset the decoder if required before
calling this function
:param encodedData_: The data that needs to be decoded (bytearray)
:param encodedDataLen_: The length of data that needs to be decoded
:param decodedData_: The decoded data (integer array)
:param maxDecodedDatalen_ : The max number of symbols that can be stored in decodedData_ array
:param firstDataBlock: If this is True then mCurrentTag must be loaded
:return: Returns the number of symbols stored in decodedData_
"""
# If the byte array is smaller than data length pass in throw exception
if(len(encodedData_) < encodedDataLen_):
raise Exception("Data passed in smaller than expected")
# If the byte array is smaller than data length pass in throw exception
if(len(decodedData_) < maxDecodedDataLen_):
raise Exception("Decompressed data byte array passed in smaller than expected")
self.mEncodedData = encodedData_
self.mEncodedDataCount = encodedDataLen_
self.mDecodedData = decodedData_
self.mDecodedDataLen = 0
self.mCurrentEncodedDataByteIndex = 0
self.mCurrentEncodedDataBit = 0
self.mCurrentTag = 0
# Load the first word size bits into the current tag
for i in range(0, self.mWordSize):
self.mCurrentTag = (self.mCurrentTag | (self._get_next_bit() << ((self.mWordSize - 1) - i)))
finished = False
# Until we have reached the end keep decompressing
while(not finished):
currentSymbol = 0
currentCumulativeCount = int(math.floor(((self.mCurrentTag - self.mLowerTag + 1)*self.mTotalSymbolCount - 1)/(self.mUpperTag - self.mLowerTag +1)))
symbolCumulativeCount = self.mSymbolCount[0]
while(currentCumulativeCount >= symbolCumulativeCount):
currentSymbol += 1
if(currentSymbol >= self.mVocabularySize):
raise Exception("Symbol count of out range")
symbolCumulativeCount += self.mSymbolCount[currentSymbol]
# If we have reached the termination symbol then decoding is finished, otherwise store the decompressed symbol
if(currentSymbol == self.mTerminationSymbol):
finished = True
else:
self.mDecodedData[self.mDecodedDataLen] = currentSymbol
self.mDecodedDataLen += 1
# If there is no more room extend the bytearray by BASE_OUT_SIZE bytes
if(self.mDecodedDataLen >= maxDecodedDataLen_):
raise Exception('Not enough space to store decoded data')
self._update_range_tags(currentSymbol, symbolCumulativeCount)
self._rescale()
return self.mDecodedDataLen<|fim▁end|> |
if(self.mCurrentEncodedDataByteIndex >= self.mEncodedDataCount):
raise Exception("Exceeded encoded data buffer") |
<|file_name|>link-input.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2007-2022 Crafter Software Corporation. All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as published by
* the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
CStudioForms.Controls.LinkInput =
CStudioForms.Controls.LinkInput ||
function (id, form, owner, properties, constraints, readonly) {
this.owner = owner;
this.owner.registerField(this);
this.errors = [];
this.properties = properties;
this.constraints = constraints;
this.inputEl = null;
this.patternErrEl = null;
this.countEl = null;
this.required = false;
this.value = '_not-set';
this.form = form;
this.id = id;
this.readonly = readonly;
return this;
};
YAHOO.extend(CStudioForms.Controls.LinkInput, CStudioForms.CStudioFormField, {
getLabel: function () {
return CMgs.format(langBundle, 'linkInput');
},
_onChange: function (evt, obj) {
obj.value = obj.inputEl.value;
var validationExist = false;
var validationResult = true;
if (obj.required) {
if (obj.inputEl.value == '') {
obj.setError('required', 'Field is Required');
validationExist = true;
validationResult = false;
} else {
obj.clearError('required');
}
} else {
var id = obj.id;
if (obj.inputEl.value != '') {
var fields = obj.owner.fields.forEach(function (item) {
var el = item;
var properties = item.properties.forEach(function (prop) {
if (prop.name == 'depends') {
if (id.includes(prop.value) && prop.value != '' && el.value == '_blank') {
el.required = true;
el.setError('required', 'Field is Required');
el.renderValidation(true, false);
var constraints = el.constraints.forEach(function (constr) {
if (constr.name == 'required') {
constr.value = 'true';
}
});
}
}
});
});
} else {
var fields = obj.owner.fields.forEach(function (item) {
var el = item;
var properties = item.properties.forEach(function (prop) {
if (prop.name == 'depends') {
if (id.includes(prop.value) && prop.value != '') {
el.required = false;
el.clearError('required');
el.renderValidation(false, false);
var constraints = el.constraints.forEach(function (constr) {
if (constr.name == 'required') {
constr.value = 'false';
}
});
}
}
});
});
}
}
if ((!validationExist && obj.inputEl.value != '') || (validationExist && validationResult)) {
for (var i = 0; i < obj.constraints.length; i++) {
var constraint = obj.constraints[i];
if (constraint.name == 'pattern') {
var regex = constraint.value;
if (regex != '') {
if (obj.inputEl.value.match(regex)) {
// only when there is no other validation mark it as passed
obj.clearError('pattern');
YAHOO.util.Dom.removeClass(obj.patternErrEl, 'on');
validationExist = true;
} else {
if (obj.inputEl.value != '') {
YAHOO.util.Dom.addClass(obj.patternErrEl, 'on');
}
obj.setError('pattern', 'The value entered is not allowed in this field.');
validationExist = true;
validationResult = false;
}
}
break;
}
}
}
// actual validation is checked by # of errors
// renderValidation does not require the result being passed
obj.renderValidation(validationExist, validationResult);
obj.owner.notifyValidation();
obj.form.updateModel(obj.id, obj.getValue());
},
_onChangeVal: function (evt, obj) {
obj.edited = true;
this._onChange(evt, obj);
},
/**
* perform count calculation on keypress
* @param evt event
* @param el element
*/
count: function (evt, countEl, el) {
// 'this' is the input box
el = el ? el : this;
var text = el.value;
var charCount = text.length ? text.length : el.textLength ? el.textLength : 0;
var maxlength = el.maxlength && el.maxlength != '' ? el.maxlength : -1;
if (maxlength != -1) {
if (charCount > el.maxlength) {
// truncate if exceeds max chars
if (charCount > el.maxlength) {
this.value = text.substr(0, el.maxlength);
charCount = el.maxlength;
}
if (
evt &&
evt != null &&
evt.keyCode != 8 &&
evt.keyCode != 46 &&
evt.keyCode != 37 &&
evt.keyCode != 38 &&
evt.keyCode != 39 &&
evt.keyCode != 40 && // arrow keys
evt.keyCode != 88 &&
evt.keyCode != 86
) {
// allow backspace and
// delete key and arrow keys (37-40)
// 86 -ctrl-v, 90-ctrl-z,
if (evt) YAHOO.util.Event.stopEvent(evt);
}
}
}
if (maxlength != -1) {
countEl.innerHTML = charCount + ' / ' + el.maxlength;
} else {
countEl.innerHTML = charCount;
}
},
render: function (config, containerEl) {
// we need to make the general layout of a control inherit from common
// you should be able to override it -- but most of the time it wil be the same
containerEl.id = this.id;
var titleEl = document.createElement('span');
YAHOO.util.Dom.addClass(titleEl, 'cstudio-form-field-title');
titleEl.textContent = config.title;
var controlWidgetContainerEl = document.createElement('div');
YAHOO.util.Dom.addClass(controlWidgetContainerEl, 'cstudio-form-control-link-input-container');
var validEl = document.createElement('span');
YAHOO.util.Dom.addClass(validEl, 'validation-hint');
YAHOO.util.Dom.addClass(validEl, 'cstudio-form-control-validation fa fa-check');
controlWidgetContainerEl.appendChild(validEl);
var inputEl = document.createElement('input');
this.inputEl = inputEl;
YAHOO.util.Dom.addClass(inputEl, 'datum');
YAHOO.util.Dom.addClass(inputEl, 'cstudio-form-control-input');
inputEl.value = (this.value = '_not-set') ? config.defaultValue : this.value;
controlWidgetContainerEl.appendChild(inputEl);
YAHOO.util.Event.on(
inputEl,
'focus',
function (evt, context) {
context.form.setFocusedField(context);
},
this<|fim▁hole|> YAHOO.util.Event.on(inputEl, 'change', this._onChangeVal, this);
YAHOO.util.Event.on(inputEl, 'blur', this._onChange, this);
for (var i = 0; i < config.properties.length; i++) {
var prop = config.properties[i];
if (prop.name == 'size') {
inputEl.size = prop.value;
}
if (prop.name == 'maxlength') {
inputEl.maxlength = prop.value;
}
if (prop.name == 'readonly' && prop.value == 'true') {
this.readonly = true;
}
}
if (this.readonly == true) {
inputEl.disabled = true;
}
var countEl = document.createElement('div');
YAHOO.util.Dom.addClass(countEl, 'char-count');
YAHOO.util.Dom.addClass(countEl, 'cstudio-form-control-input-count');
controlWidgetContainerEl.appendChild(countEl);
this.countEl = countEl;
var patternErrEl = document.createElement('div');
patternErrEl.innerHTML = 'The value entered is not allowed in this field.';
YAHOO.util.Dom.addClass(patternErrEl, 'cstudio-form-control-input-url-err');
controlWidgetContainerEl.appendChild(patternErrEl);
this.patternErrEl = patternErrEl;
YAHOO.util.Event.on(inputEl, 'keyup', this.count, countEl);
YAHOO.util.Event.on(inputEl, 'keypress', this.count, countEl);
YAHOO.util.Event.on(inputEl, 'mouseup', this.count, countEl);
this.renderHelp(config, controlWidgetContainerEl);
var descriptionEl = document.createElement('span');
YAHOO.util.Dom.addClass(descriptionEl, 'description');
YAHOO.util.Dom.addClass(descriptionEl, 'cstudio-form-field-description');
descriptionEl.textContent = config.description;
containerEl.appendChild(titleEl);
containerEl.appendChild(controlWidgetContainerEl);
containerEl.appendChild(descriptionEl);
},
getValue: function () {
return this.value;
},
setValue: function (value) {
this.value = value;
this.inputEl.value = value;
this.count(null, this.countEl, this.inputEl);
this._onChange(null, this);
this.edited = false;
},
getName: function () {
return 'link-input';
},
getSupportedProperties: function () {
return [
{
label: CMgs.format(langBundle, 'displaySize'),
name: 'size',
type: 'int',
defaultValue: '50'
},
{
label: CMgs.format(langBundle, 'maxLength'),
name: 'maxlength',
type: 'int',
defaultValue: '50'
},
{ label: CMgs.format(langBundle, 'readonly'), name: 'readonly', type: 'boolean' },
{ label: 'Tokenize for Indexing', name: 'tokenize', type: 'boolean', defaultValue: 'false' }
];
},
getSupportedConstraints: function () {
return [
{ label: CMgs.format(langBundle, 'required'), name: 'required', type: 'boolean' },
{ label: CMgs.format(langBundle, 'matchPattern'), name: 'pattern', type: 'string' }
];
}
});
CStudioAuthoring.Module.moduleLoaded('cstudio-forms-controls-link-input', CStudioForms.Controls.LinkInput);<|fim▁end|> | );
|
<|file_name|>test_conversion.py<|end_file_name|><|fim▁begin|># Licensed under the GPLv3 - see LICENSE
import pytest
import numpy as np
import astropy.units as u
from astropy.time import Time
from .. import vdif
from .. import mark4
from .. import mark5b
from .. import dada
from ..base.encoding import EIGHT_BIT_1_SIGMA
from ..data import (SAMPLE_MARK4 as SAMPLE_M4, SAMPLE_MARK5B as SAMPLE_M5B,
SAMPLE_VDIF, SAMPLE_MWA_VDIF as SAMPLE_MWA, SAMPLE_DADA,
SAMPLE_BPS1_VDIF)
class TestVDIFMark5B:
"""Simplest conversion: VDIF frame containing Mark5B data (EDV 0xab)."""
def test_header(self):
"""Check Mark 5B header information can be stored in a VDIF header."""
with open(SAMPLE_M5B, 'rb') as fh:
# Start time kiloday is needed for Mark 5B to calculate time.
m5h1 = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
# For the payload, pass in how data is encoded.
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
# A not-at-the-start header for checking times.
m5h2 = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
# Create VDIF headers based on both the Mark 5B header and payload.
header1 = vdif.VDIFHeader.from_mark5b_header(
m5h1, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
header2 = vdif.VDIFHeader.from_mark5b_header(
m5h2, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
for i, (m5h, header) in enumerate(((m5h1, header1), (m5h2, header2))):
assert m5h['frame_nr'] == i
# Check all direct information is set correctly.
assert all(m5h[key] == header[key] for key in m5h.keys())
assert header['mark5b_frame_nr'] == m5h['frame_nr']
assert header.kday == m5h.kday
# As well as the time calculated from the header information.
assert header.time == m5h.time
# Check information on the payload is also correct.
assert header.nchan == 8
assert header.bps == 2
assert not header['complex_data']
assert header.frame_nbytes == 10032
assert header.nbytes == 32
assert header.payload_nbytes == m5h.payload_nbytes
assert (header.samples_per_frame
== 10000 * 8 // m5pl.bps // m5pl.sample_shape.nchan)
# Check that we can handle > 512 Mbps sampling rate.
header3 = vdif.VDIFHeader.from_mark5b_header(
m5h2, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps,
sample_rate=64*u.MHz)
assert header3.time == header2.time
assert header3['frame_nr'] == m5h2['frame_nr']
# A copy might remove any `kday` keywords set, but should still work
# (Regression test for #34)
header_copy = header2.copy()
assert header_copy == header2
header_copy.verify()
# But it should not remove `kday` to start with (#35)
assert header_copy.kday == header2.kday
# May as well check that with a corrupt 'bcd_fraction' we can still
# get the right time using the frame number.
header_copy['bcd_fraction'] = 0
# This is common enough that we should not fail verification.
header_copy.verify()
# However, it should also cause just getting the time to fail
# unless we pass in a frame rate.
with pytest.raises(ValueError):
header_copy.time
frame_rate = 32. * u.MHz / header.samples_per_frame
assert abs(header_copy.get_time(frame_rate=frame_rate)
- m5h2.time) < 1.*u.ns
def test_payload(self):
"""Check Mark 5B payloads can used in a Mark5B VDIF payload."""
# Get Mark 5B header, payload, and construct VDIF header, as above.
with open(SAMPLE_M5B, 'rb') as fh:
m5h = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
header = vdif.VDIFHeader.from_mark5b_header(
m5h, nchan=m5pl.sample_shape.nchan, bps=m5pl.bps)
# Create VDIF payload from the Mark 5B encoded payload.
payload = vdif.VDIFPayload(m5pl.words, header)
# Check that the payload (i.e., encoded data) is the same.
assert np.all(payload.words == m5pl.words)
# And check that if we decode the payload, we get the same result.
assert np.all(payload.data == m5pl.data)
# Now construct a VDIF payload from the Mark 5B data, checking that
# the encoding works correctly too.
payload2 = vdif.VDIFPayload.fromdata(m5pl.data, header)
assert np.all(payload2.words == m5pl.words)
assert np.all(payload2.data == m5pl.data)
# Mark 5B data cannot complex. Check that this raises an exception.
header2 = header.copy()
with pytest.raises(ValueError):
header2.complex_data = True
with pytest.raises(ValueError):
header2['complex_data'] = True
with pytest.raises(ValueError):
vdif.VDIFPayload.fromdata(m5pl.data.view(complex), bps=2, edv=0xab)
def test_frame(self):
"""Check a whole Mark 5B frame can be translated to VDIF."""
with mark5b.open(SAMPLE_M5B, 'rb', ref_time=Time(57000, format='mjd'),
nchan=8, bps=2) as fh:
# pick second frame just to be different from header checks above.
fh.seek(10016)
m5f = fh.read_frame()
assert m5f['frame_nr'] == 1
frame = vdif.VDIFFrame.from_mark5b_frame(m5f)
assert frame.nbytes == 10032
assert frame.shape == (5000, 8)
assert np.all(frame.data == m5f.data)
assert frame.time == m5f.time
def test_stream(self):
"""Check we can encode a whole stream."""
class TestVDIF0VDIF1:
"""Conversion between EDV versions."""
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_MWA, 'rs', sample_rate=1.28*u.MHz) as f0:
h0 = f0.header0
d0 = f0.read(1024)
kwargs = dict(h0)
kwargs['edv'] = 1
fl = str(tmpdir.join('test1.vdif'))
with vdif.open(fl, 'ws', sample_rate=1.28*u.MHz, **kwargs) as f1w:
h1w = f1w.header0
assert list(h1w.words[:4]) == list(h0.words[:4])
assert h1w.sample_rate == 1.28*u.MHz
f1w.write(d0)
with vdif.open(fl, 'rs') as f1r:
h1r = f1r.header0
d1r = f1r.read(1024)
assert h1r.words[:4] == h0.words[:4]
assert h1w.sample_rate == 1.28*u.MHz
assert np.all(d1r == d0)
class TestMark5BToVDIF3:
"""Real conversion: Mark5B to VDIF EDV 3, and back to Mark5B"""
def test_header(self):
with open(SAMPLE_M5B, 'rb') as fh:
m5h = mark5b.Mark5BHeader.fromfile(fh, kday=56000)
m5pl = mark5b.Mark5BPayload.fromfile(fh, sample_shape=(8,), bps=2)
# check that we have enough information to create VDIF EDV 3 header.
header = vdif.VDIFHeader.fromvalues(
edv=3, bps=m5pl.bps, sample_shape=(1,), station='WB',
time=m5h.time, sample_rate=32.*u.MHz, complex_data=False)
assert header.time == m5h.time
def test_stream(self, tmpdir):
"""Convert Mark 5B data stream to VDIF."""
# Here, we need to give how the data is encoded, since the data do not
# self-describe this. Furthermore, we need to pass in a rough time,
# and the rate at which samples were taken, so that absolute times can
# be calculated.
with mark5b.open(SAMPLE_M5B, 'rs', sample_rate=32.*u.MHz, kday=56000,
nchan=8, bps=2) as fr:
m5h = fr.header0
# create VDIF header from Mark 5B stream information.
header = vdif.VDIFHeader.fromvalues(
edv=3, bps=fr.bps, nchan=1, station='WB', time=m5h.time,
sample_rate=32.*u.MHz, complex_data=False)
data = fr.read(20000) # enough to fill one EDV3 frame.
time1 = fr.tell(unit='time')
# Get a file name in our temporary testing directory.
vdif_file = str(tmpdir.join('converted.vdif'))
# create and fill vdif file with converted data.
with vdif.open(vdif_file, 'ws', header0=header,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - m5h.time) < 2. * u.ns
fw.write(data)
assert (fw.tell(unit='time') - time1) < 2. * u.ns
# Check two files contain same information.
with mark5b.open(SAMPLE_M5B, 'rs', sample_rate=32.*u.MHz, kday=56000,
nchan=8, bps=2) as fm, vdif.open(vdif_file,
'rs') as fv:
assert fm.header0.time == fv.header0.time
dm = fm.read(20000)
dv = fv.read(20000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
# Convert VDIF file back to Mark 5B
mark5b_new_file = str(tmpdir.join('reconverted.mark5b'))
hv = fv.header0
hm = fm.header0
# Here, we fill some unimportant Mark 5B header information by
# hand, so we can compare byte-for-byte.
with mark5b.open(mark5b_new_file, 'ws', sample_rate=hv.sample_rate,
nchan=dv.shape[1], bps=hv.bps,
time=hv.time, user=hm['user'],
internal_tvg=hm['internal_tvg']) as fw:
fw.write(dv)
with open(SAMPLE_M5B, 'rb') as fh_orig, open(mark5b_new_file,
'rb') as fh_new:
assert fh_orig.read() == fh_new.read()
class TestVDIF3ToMark5B:
"""Real conversion: VDIF EDV 3 to Mark5B."""
def test_header(self):
with open(SAMPLE_VDIF, 'rb') as fh:
vh = vdif.VDIFHeader.fromfile(fh)
header = mark5b.Mark5BHeader.fromvalues(time=vh.time)
assert header.time == vh.time
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_VDIF, 'rs') as fr:
vh = fr.header0
data = fr.read(20000) # enough to fill two Mark 5B frames.
fl = str(tmpdir.join('test.m5b'))
with mark5b.open(fl, 'ws', sample_rate=vh.sample_rate,
nchan=data.shape[1], bps=vh.bps, time=vh.time) as fw:
fw.write(data)
with vdif.open(SAMPLE_VDIF, 'rs') as fv, mark5b.open(
fl, 'rs', sample_rate=32.*u.MHz,
ref_time=Time(57000, format='mjd'), nchan=8, bps=2) as fm:
assert fv.header0.time == fm.header0.time
dv = fv.read(20000)
dm = fm.read(20000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
class TestVDIF0BPS1ToMark5B:
"""Real conversion: VDIF EDV 3, BPS 1 to Mark 5B."""
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_BPS1_VDIF, 'rs', sample_rate=8*u.MHz) as fr:
start_time = fr.start_time
data = fr.read(5000) # Just one Mark 5B frame.
fl = str(tmpdir.join('test.m5b'))
with mark5b.open(fl, 'ws', sample_rate=8.*u.MHz, nchan=data.shape[1],
bps=1, time=start_time) as fw:
fw.write(data)
fw.write(data)
with vdif.open(SAMPLE_BPS1_VDIF, 'rs',
sample_rate=8*u.MHz) as fv, mark5b.open(
fl, 'rs', sample_rate=8.*u.MHz, nchan=16, bps=1,
ref_time=Time('2018-09-01')) as fm:
assert fv.start_time == fm.start_time
dv = fv.read(5000)
dm = fm.read(5000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
dm = fm.read(5000)
assert np.all(dm == dv)
class TestMark4ToVDIF1:
"""Real conversion: Mark 4 to VDIF EDV 1, and back to Mark 4.
Here, need to use a VDIF format with a flexible size, since we want
to create invalid frames corresponding to the pieces of data overwritten
by the Mark 4 header.
"""
def test_header(self):
with open(SAMPLE_M4, 'rb') as fh:
fh.seek(0xa88)
m4h = mark4.Mark4Header.fromfile(fh, ntrack=64, decade=2010)
# Check that we have enough information to create VDIF EDV 1 header.
header = vdif.VDIFHeader.fromvalues(
edv=1, bps=m4h.bps, nchan=1, station='Ar', time=m4h.time,
sample_rate=32.*u.MHz, payload_nbytes=640*2//8, complex_data=False)
assert abs(header.time - m4h.time) < 2. * u.ns
def test_stream(self, tmpdir):
with mark4.open(SAMPLE_M4, 'rs', sample_rate=32.*u.MHz,
ntrack=64, decade=2010) as fr:
m4header0 = fr.header0
start_time = fr.start_time
vheader0 = vdif.VDIFHeader.fromvalues(
edv=1, bps=m4header0.bps, nchan=1, station='Ar',
time=start_time, sample_rate=32.*u.MHz,
payload_nbytes=640*2//8, complex_data=False)
assert abs(vheader0.time - start_time) < 2. * u.ns
data = fr.read(80000) # full Mark 4 frame
offset1 = fr.tell()
time1 = fr.tell(unit='time')
number_of_bytes = fr.fh_raw.tell() - 0xa88
with open(SAMPLE_M4, 'rb') as fh:
fh.seek(0xa88)
orig_bytes = fh.read(number_of_bytes)
fl = str(tmpdir.join('test.vdif'))
with vdif.open(fl, 'ws', header0=vheader0,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - start_time) < 2. * u.ns
# Write first VDIF frame, matching Mark 4 Header, hence invalid.
fw.write(data[:160], valid=False)
# Write remaining VDIF frames, with valid data.
fw.write(data[160:])
assert (fw.tell(unit='time') - time1) < 2. * u.ns
with vdif.open(fl, 'rs') as fv:
assert abs(fv.header0.time - start_time) < 2. * u.ns
expected = vheader0.copy()
expected['invalid_data'] = True
assert fv.header0 == expected
dv = fv.read(80000)
assert np.all(dv == data)
assert fv.offset == offset1
assert abs(fv.tell(unit='time') - time1) < 2.*u.ns
# Convert VDIF file back to Mark 4, and check byte-for-byte.
fl2 = str(tmpdir.join('test.m4'))
with mark4.open(fl2, 'ws', sample_rate=vheader0.sample_rate,
ntrack=64, bps=2, fanout=4, time=vheader0.time,
system_id=108) as fw:
fw.write(dv)
with open(fl2, 'rb') as fh:
conv_bytes = fh.read()
assert len(conv_bytes) == len(conv_bytes)
assert orig_bytes == conv_bytes
class TestDADAToVDIF1:
"""Real conversion: DADA to VDIF EDV 1, and back to DADA.
Here, we use a VDIF format with a flexible size so it is easier to fit
the dada file inside the VDIF one.
"""
def get_vdif_header(self, header):
return vdif.VDIFHeader.fromvalues(
edv=1, time=header.time, sample_rate=header.sample_rate,
bps=header.bps, nchan=header['NCHAN'],
complex_data=header.complex_data,
payload_nbytes=header.payload_nbytes // 2,
station=header['TELESCOPE'][:2])
def get_vdif_data(self, dada_data):
return (dada_data + 0.5 + 0.5j) / EIGHT_BIT_1_SIGMA
def get_dada_data(self, vdif_data):
return vdif_data * EIGHT_BIT_1_SIGMA - 0.5 - 0.5j
def test_header(self):
with open(SAMPLE_DADA, 'rb') as fh:
ddh = dada.DADAHeader.fromfile(fh)
# Check that we have enough information to create VDIF EDV 1 header.
header = self.get_vdif_header(ddh)
assert abs(header.time - ddh.time) < 2. * u.ns
assert header.payload_nbytes == ddh.payload_nbytes // 2
def test_payload(self):
with open(SAMPLE_DADA, 'rb') as fh:
fh.seek(4096)
ddp = dada.DADAPayload.fromfile(fh, payload_nbytes=64000,
sample_shape=(2, 1),
complex_data=True, bps=8)
dada_data = ddp.data
# Check that conversion between scalings works.
vdif_data = self.get_vdif_data(dada_data)
assert np.allclose(self.get_dada_data(vdif_data), dada_data)
# Check that we can create correct payloads.
vdif_payload0 = vdif.VDIFPayload.fromdata(vdif_data[:, 0, :], bps=8)
vdif_payload1 = vdif.VDIFPayload.fromdata(vdif_data[:, 1, :], bps=8)<|fim▁hole|> vd[:, 0] = vd0
vd[:, 1] = vd1
dd_new = self.get_dada_data(vd)
ddp2 = dada.DADAPayload.fromdata(dd_new, bps=8)
assert ddp2 == ddp
def test_stream(self, tmpdir):
with dada.open(SAMPLE_DADA, 'rs') as fr:
ddh = fr.header0
dada_data = fr.read()
offset1 = fr.tell()
stop_time = fr.tell(unit='time')
header = self.get_vdif_header(ddh)
data = self.get_vdif_data(dada_data)
assert abs(header.time - ddh.time) < 2. * u.ns
vdif_file = str(tmpdir.join('converted_dada.vdif'))
with vdif.open(vdif_file, 'ws', header0=header,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - header.time) < 2. * u.ns
# Write all data in since frameset, made of two frames.
fw.write(data)
assert (fw.tell(unit='time') - stop_time) < 2. * u.ns
assert fw.offset == offset1
with vdif.open(vdif_file, 'rs') as fv:
assert abs(fv.header0.time - ddh.time) < 2. * u.ns
dv = fv.read()
assert fv.offset == offset1
assert np.abs(fv.tell(unit='time') - stop_time) < 2.*u.ns
vh = fv.header0
vnthread = fv.sample_shape.nthread
assert np.allclose(dv, data)
# Convert VDIF file back to DADA.
dada_file = str(tmpdir.join('reconverted.dada'))
dv_data = self.get_dada_data(dv)
assert np.allclose(dv_data, dada_data)
with dada.open(dada_file, 'ws', sample_rate=vh.sample_rate,
time=vh.time, npol=vnthread, bps=vh.bps,
payload_nbytes=vh.payload_nbytes*2, nchan=vh.nchan,
telescope=vh.station,
complex_data=vh['complex_data']) as fw:
new_header = fw.header0
fw.write(dv_data)
assert self.get_vdif_header(new_header) == vh
with dada.open(dada_file, 'rs') as fh:
header = fh.header0
new_dada_data = fh.read()
assert header == new_header
assert self.get_vdif_header(header) == vh
assert np.allclose(new_dada_data, dada_data)<|fim▁end|> | vd0, vd1 = vdif_payload0.data, vdif_payload1.data
assert np.allclose(vd0, vdif_data[:, 0, :])
assert np.allclose(vd1, vdif_data[:, 1, :])
vd = np.zeros((vd0.shape[0], 2, vd0.shape[1]), vd0.dtype) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var _ = require('underscore');
var keystone = require('../../');
var utils = keystone.utils;
/**
* Content Class
*
* Accessed via `Keystone.content`
*
* @api public
*/
var Content = function () {};
/**
* Loads page content by page key (optional).
*
* If page key is not provided, returns a hash of all page contents in the database.
*
* ####Example:
*
* keystone.content.fetch('home', function(err, content) { ... });
*
* @param {String} key (optional)
* @param {Function} callback
* @api public
*/
Content.prototype.fetch = function (page, callback) {
if (utils.isFunction(page)) {
callback = page;
page = null;
}
var content = this;
if (!this.AppContent) {
return callback({ error: 'invalid page', message: 'No pages have been registered.' });
}
if (page) {
if (!this.pages[page]) {
return callback({ error: 'invalid page', message: 'The page ' + page + ' does not exist.' });
}
this.AppContent.findOne({ key: page }, function (err, result) {
if (err) return callback(err);
return callback(null, content.pages[page].populate(result ? result.content.data : {}));
});
} else {
this.AppContent.find(function (err, results) {
if (err) return callback(err);
var data = {};
results.forEach(function (i) {
if (content.pages[i.key]) {
data[i.key] = content.pages[i.key].populate(i.content.data);<|fim▁hole|> });
_.each(content.pages, function (i) {
if (!data[i.key]) {
data[i.key] = i.populate();
}
});
return data;
});
}
};
/**
* Sets page content by page key.
*
* Merges content with existing content.
*
* ####Example:
*
* keystone.content.store('home', { title: 'Welcome' }, function(err) { ... });
*
* @param {String} key
* @param {Object} content
* @param {Function} callback
* @api public
*/
Content.prototype.store = function (page, content, callback) {
if (!this.pages[page]) {
return callback({ error: 'invalid page', message: 'The page ' + page + ' does not exist.' });
}
content = this.pages[page].validate(content);
// TODO: Handle validation errors
this.AppContent.findOne({ key: page }, function (err, doc) {
if (err) return callback(err);
if (doc) {
if (doc.content) {
doc.history.push(doc.content);
}
_.defaults(content, doc.content);
} else {
doc = new content.AppContent({ key: page });
}
doc.content = { data: this.pages[page].clean(content) };
doc.lastChangeDate = Date.now();
doc.save(callback);
});
};
/**
* Registers a page. Should not be called directly, use Page.register() instead.
*
* @param {Page} page
* @api private
*/
Content.prototype.page = function (key, page) {
if (!this.pages) {
this.pages = {};
}
if (arguments.length === 1) {
if (!this.pages[key]) {
throw new Error('keystone.content.page() Error: page ' + key + ' cannot be registered more than once.');
}
return this.pages[key];
}
this.initModel();
if (this.pages[key]) {
throw new Error('keystone.content.page() Error: page ' + key + ' cannot be registered more than once.');
}
this.pages[key] = page;
return page;
};
/**
* Ensures the Mongoose model for storing content is initialised.
*
* Called automatically when pages are added.
*
* @api private
*/
Content.prototype.initModel = function () {
if (this.AppContent) return;
var contentSchemaDef = {
createdAt: { type: Date, default: Date.now },
data: { type: keystone.mongoose.Schema.Types.Mixed },
};
var ContentSchema = new keystone.mongoose.Schema(contentSchemaDef);
var PageSchema = new keystone.mongoose.Schema({
page: { type: String, index: true },
lastChangeDate: { type: Date, index: true },
content: contentSchemaDef,
history: [ContentSchema],
}, { collection: 'app_content' });
this.AppContent = keystone.mongoose.model('App_Content', PageSchema);
};
/**
* Outputs client-side editable data for content management
*
* Called automatically when pages are added.
*
* @api private
*/
Content.prototype.editable = function (user, options) {
if (!user || !user.canAccessKeystone) {
return undefined;
}
if (options.list) {
var list = keystone.list(options.list);
if (!list) {
return JSON.stringify({ type: 'error', err: 'list not found' });
}
var data = {
type: 'list',
path: list.path,
singular: list.singular,
plural: list.plural,
};
if (options.id) {
data.id = options.id;
}
return JSON.stringify(data);
}
};
/**
* The exports object is an instance of Content.
*
* @api public
*/
module.exports = new Content();
// Expose Classes
exports.Page = require('./page');
exports.Types = require('./types');<|fim▁end|> | } |
<|file_name|>String.js<|end_file_name|><|fim▁begin|>/*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.<|fim▁hole|> * LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
/***************************************************
* Color utilities
***************************************************/
define('TYPO3/CMS/Rtehtmlarea/HTMLArea/Util/String',
['TYPO3/CMS/Rtehtmlarea/HTMLArea/UserAgent/UserAgent'],
function (UserAgent) {
// Create the ruler
if (!document.getElementById('htmlarea-ruler')) {
// Insert the css rule in the stylesheet
var styleSheet = document.styleSheets[0];
var selector = '#htmlarea-ruler';
var style = 'visibility: hidden; white-space: nowrap;';
var rule = selector + ' { ' + style + ' }';
if (!UserAgent.isIEBeforeIE9) {
try {
styleSheet.insertRule(rule, styleSheet.cssRules.length);
} catch (e) {}
} else {
styleSheet.addRule(selector, style);
}
//Insert the ruler on the document
var ruler = document.createElement('span');
ruler.setAttribute('id', 'htmlarea-ruler');
document.body.appendChild(ruler);
}
/**
* Get the visual length of a string
*/
String.prototype.visualLength = function() {
var ruler = document.getElementById('htmlarea-ruler');
ruler.innerHTML = this;
return ruler.offsetWidth;
};
/**
* Set an ellipsis on a string
*/
String.prototype.ellipsis = function(length) {
var temp = this;
var trimmed = this;
if (temp.visualLength() > length) {
trimmed += "...";
while (trimmed.visualLength() > length) {
temp = temp.substring(0, temp.length-1);
trimmed = temp + "...";
}
}
return trimmed;
};
});<|fim▁end|> | *
* For the full copyright and license information, please read the |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># encoding: utf-8
# main.py, copyright 2014 by Marko Čibej <[email protected]>
#
# This file is part of SvgMapper. Full sources and documentation
# are available here: https://github.com/tumbislav/SvgMapper
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
# Full licence is in the file LICENSE and at http://www.gnu.org/copyleft/gpl.html
__author__ = 'Marko Čibej'
import argparse
from svgmapper import *
from helper import logger
def main(config, resources=None, maps=None, simulate=False):
logger.info('Starting job')
with SvgMapper() as mapper:
mapper.load_config(config, resources)
if maps:
mapper.replace_targets(maps)
if not simulate:
mapper.run()
logger.info('Finished')
def parse_args():
parser = argparse.ArgumentParser(description='Transform maps in SVG format in various ways.')
parser.add_argument('config_file', help='The name of the configuration file')
parser.add_argument('-r', '--resource', help='Additional resource file(s)',
action='append', metavar='resource_file')
parser.add_argument('-m', '--map', help='Map(s) to run instead of those listed in config file', metavar='map_name')
parser.add_argument('-v', '--verbosity', help='Set verbosity: 0=errors only, 1=warnings, 2=info, 3=debug',
type=int, choices=range(0, 3), dest='verbosity')
parser.add_argument('-l', '--log', help='Output to named log file', metavar=('level(0-3)', 'logFile'), nargs=2)
parser.add_argument('-s', '--simulate', help='Don\'t actually do anything, just parse all the configurations',
action='store_true')
return parser.parse_args()
def set_logging(the_log, verbosity):
log_levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
logger.setLevel(logging.DEBUG)<|fim▁hole|> lf = logging.FileHandler(the_log[1], mode='w')
lf.setLevel(level)
lf.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logger.addHandler(lf)
lc = logging.StreamHandler()
if verbosity:
lc.setLevel(log_levels[verbosity])
else:
lc.setLevel(log_levels[2])
logger.addHandler(lc)<|fim▁end|> | if the_log:
level = log_levels[int(the_log[0])] |
<|file_name|>unifi.py<|end_file_name|><|fim▁begin|>"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
CONF_SITE_ID = 'site_id'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
_LOGGER.error('Invalid configuration')
return False
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
site_id = this_config.get(CONF_SITE_ID, 'default')
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4', site_id)
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def __init__(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
def scan_devices(self):
"""Scan for devices."""
self._update()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if it has been detected.<|fim▁hole|> _LOGGER.debug('Device %s name %s', mac, name)
return name<|fim▁end|> | """
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname') |
<|file_name|>httpclient.go<|end_file_name|><|fim▁begin|>package httpclient
//Verbatim from: https://gist.github.com/dmichael/5710968
import (<|fim▁hole|> "net/http"
"net/http/httputil"
"time"
"github.com/sirupsen/logrus"
)
// Config encapsulates the basic settings for the HTTPClient
type Config struct {
ConnectTimeout time.Duration
ReadWriteTimeout time.Duration
}
func timeoutDialer(config *Config) func(net, addr string) (c net.Conn, err error) {
return func(netw, addr string) (net.Conn, error) {
conn, err := net.DialTimeout(netw, addr, config.ConnectTimeout)
if err != nil {
return nil, err
}
conn.SetDeadline(time.Now().Add(config.ReadWriteTimeout))
return conn, nil
}
}
// NewTimeoutClient returns a new *http.Client with timeout set on connection
// read and write operations.
func NewTimeoutClient(args ...interface{}) *http.Client {
// Default configuration
config := &Config{
ConnectTimeout: 1 * time.Second,
ReadWriteTimeout: 1 * time.Second,
}
// merge the default with user input if there is one
if len(args) == 1 {
timeout := args[0].(time.Duration)
config.ConnectTimeout = timeout
config.ReadWriteTimeout = timeout
}
if len(args) == 2 {
config.ConnectTimeout = args[0].(time.Duration)
config.ReadWriteTimeout = args[1].(time.Duration)
}
return &http.Client{
Transport: &http.Transport{
Dial: timeoutDialer(config),
},
CheckRedirect: func(req *http.Request, via []*http.Request) error {
// Copied from default function
if len(via) >= 10 {
return errors.New("stopped after 10 redirects")
}
lastURL := via[len(via)-1].URL
logrus.Debugf("GOT REDIRECT FROM %v TO: %v\n", lastURL, req.URL)
requestDump, err := httputil.DumpRequest(req, true)
if err != nil {
logrus.Errorf("Couldn't dump request: %s", err)
} else {
logrus.Debugln(string(requestDump))
}
return nil
},
}
}<|fim▁end|> | "errors"
"net" |
<|file_name|>TypeTextAtCefTest.ts<|end_file_name|><|fim▁begin|>import { Keys } from '@ephox/agar';
import { describe, it } from '@ephox/bedrock-client';
import { TinyAssertions, TinyContentActions, TinyHooks, TinySelections } from '@ephox/mcagar';
import Editor from 'tinymce/core/api/Editor';
import Theme from 'tinymce/themes/silver/Theme';
describe('browser.tinymce.core.keyboard.TypeTextAtCef', () => {
const hook = TinyHooks.bddSetupLight<Editor>({
add_unload_trigger: false,
base_url: '/project/tinymce/js/tinymce'
}, [ Theme ], true);
it('Type text before cef inline element', () => {
const editor = hook.editor();
editor.setContent('<p><span contenteditable="false">a</span></p>');
TinySelections.select(editor, 'p', [ 1 ]);
TinyContentActions.keystroke(editor, Keys.left());
TinyContentActions.type(editor, 'bc');
TinyAssertions.assertCursor(editor, [ 0, 0 ], 2);
TinyAssertions.assertContent(editor, '<p>bc<span contenteditable="false">a</span></p>');
});<|fim▁hole|> TinySelections.select(editor, 'p', [ 1 ]);
TinyContentActions.keystroke(editor, Keys.right());
TinyContentActions.type(editor, 'bc');
TinyAssertions.assertCursor(editor, [ 0, 1 ], 3);
TinyAssertions.assertContent(editor, '<p><span contenteditable="false">a</span>bc</p>');
});
it('Type between cef inline elements', () => {
const editor = hook.editor();
editor.setContent('<p><span contenteditable="false">a</span> <span contenteditable="false">b</span></p>');
TinySelections.select(editor, 'p', [ 3 ]);
TinyContentActions.keystroke(editor, Keys.left());
TinyContentActions.keystroke(editor, Keys.left());
TinyContentActions.type(editor, 'bc');
TinyAssertions.assertSelection(editor, [ 0, 1 ], 3, [ 0, 1 ], 3);
TinyAssertions.assertContent(editor, '<p><span contenteditable="false">a</span>bc <span contenteditable="false">b</span></p>');
});
});<|fim▁end|> |
it('Type after cef inline element', () => {
const editor = hook.editor();
editor.setContent('<p><span contenteditable="false">a</span></p>'); |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>/*
* TLRW algorithm implementation for Rust.
* Let's see what we can do here.
*
* (c) Silly Hat, 2016.
*/
/*
* Features, additional crates and constants go here.
*/
#![allow(dead_code)]
extern crate libc;
extern crate rand;
extern crate time;
use rand::Rng;
const NELEMS: usize = 4;
/*
* External C functions.
*/
extern {
/* tut budet tlrw */
}
/* =======
* MAIN!<|fim▁hole|> * =======
*/
fn main() {
println!("hellooooo");
let mut stuff = vec![0; NELEMS];
print_vec(&stuff);
}
/* ==============
* USEFUL STUFF
* ==============
*/
/*
* Getting current time.
*/
fn nownow() -> f64 {
let timetime = time::get_time();
let current: f64 = timetime.sec as f64 + (timetime.nsec as f64 /
1000.0 / 1000.0 / 1000.0);
current
}
/*
* Printing a vector of unsigned 32-bit integers.
*/
fn print_vec(vec: &Vec<u32>) {
for item in vec.iter() {
print!("{}\t", item);
}
print!("\n");
}<|fim▁end|> | |
<|file_name|>zkocc_structs.go<|end_file_name|><|fim▁begin|>// Copyright 2012, Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package zk
// contains the structures used for RPC calls to zkocc.
import (
"time"
)
type ZkStat struct {
czxid int64
mzxid int64
cTime time.Time
mTime time.Time
version int
cVersion int
aVersion int
ephemeralOwner int64
dataLength int
numChildren int
pzxid int64
}
type ZkPath struct {
Path string
}
type ZkPathV struct {
Paths []string
}
type ZkNode struct {
Path string
Data string
Stat ZkStat
Children []string
Cached bool // the response comes from the zkocc cache
Stale bool // the response is stale because we're not connected
}
type ZkNodeV struct {
Nodes []*ZkNode
}
// ZkStat methods to match zk.Stat interface
func (zkStat *ZkStat) Czxid() int64 {
return zkStat.czxid
}
func (zkStat *ZkStat) Mzxid() int64 {
return zkStat.mzxid
}
func (zkStat *ZkStat) CTime() time.Time {
return zkStat.cTime
}
func (zkStat *ZkStat) MTime() time.Time {
return zkStat.mTime
}
func (zkStat *ZkStat) Version() int {
return zkStat.version
}
func (zkStat *ZkStat) CVersion() int {
return zkStat.cVersion
}
func (zkStat *ZkStat) AVersion() int {
return zkStat.aVersion
}
func (zkStat *ZkStat) EphemeralOwner() int64 {
return zkStat.ephemeralOwner
}
func (zkStat *ZkStat) DataLength() int {
return zkStat.dataLength
}
<|fim▁hole|>
func (zkStat *ZkStat) Pzxid() int64 {
return zkStat.pzxid
}
// helper method
func (zkStat *ZkStat) FromZookeeperStat(zStat Stat) {
zkStat.czxid = zStat.Czxid()
zkStat.mzxid = zStat.Mzxid()
zkStat.cTime = zStat.CTime()
zkStat.mTime = zStat.MTime()
zkStat.version = zStat.Version()
zkStat.cVersion = zStat.CVersion()
zkStat.aVersion = zStat.AVersion()
zkStat.ephemeralOwner = zStat.EphemeralOwner()
zkStat.dataLength = zStat.DataLength()
zkStat.numChildren = zStat.NumChildren()
zkStat.pzxid = zStat.Pzxid()
}<|fim▁end|> | func (zkStat *ZkStat) NumChildren() int {
return zkStat.numChildren
} |
<|file_name|>splashscreen.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2013 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "splashscreen.h"
#include "clientversion.h"
#include "util.h"
#include <QPainter>
#undef loop /* ugh, remove this when the #define loop is gone from util.h */
#include <QApplication>
SplashScreen::SplashScreen(const QPixmap &pixmap, Qt::WindowFlags f) :
QSplashScreen(pixmap, f)
{
// set reference point, paddings
//int paddingLeftCol2 = 230;
//int paddingTopCol2 = 376;
//int line1 = 0;
//int line2 = 13;
//int line3 = 26;
//int line4 = 26;
float fontFactor = 1.0;
// define text to place
QString titleText = QString(QApplication::applicationName()).replace(QString("-testnet"), QString(""), Qt::CaseSensitive); // cut of testnet, place it as single object further down
QString versionText = QString("Version %1 ").arg(QString::fromStdString(FormatFullVersion()));
QString copyrightText1 = QChar(0xA9)+QString(" 2009-%1 ").arg(COPYRIGHT_YEAR) + QString(tr("The Bitcoin developers"));
QString copyrightText2 = QChar(0xA9)+QString(" 2011-%1 ").arg(COPYRIGHT_YEAR) + QString(tr("The Litecoin developers"));
QString copyrightText3 = QChar(0xA9)+QString(" 2013-%1 ").arg(COPYRIGHT_YEAR) + QString(tr("The Quarkcoin developers"));
QString font = "Arial";
// load the bitmap for writing some text over it
QPixmap newPixmap;
if(GetBoolArg("-testnet")) {
newPixmap = QPixmap(":/images/splash_testnet");
}
else {<|fim▁hole|> QPainter pixPaint(&newPixmap);
pixPaint.setPen(QColor(70,70,70));
pixPaint.setFont(QFont(font, 9*fontFactor));
// pixPaint.drawText(paddingLeftCol2,paddingTopCol2+line3,versionText);
// draw copyright stuff
pixPaint.setFont(QFont(font, 9*fontFactor));
// pixPaint.drawText(paddingLeftCol2,paddingTopCol2+line1,copyrightText1);
// pixPaint.drawText(paddingLeftCol2,paddingTopCol2+line2,copyrightText2);
// pixPaint.drawText(paddingLeftCol2,paddingTopCol2+line2+line2,copyrightText3);
pixPaint.end();
this->setPixmap(newPixmap);
}<|fim▁end|> | newPixmap = QPixmap(":/images/splash");
}
|
<|file_name|>today.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { ViewChild } from '@angular/core';
import { Router } from '@angular/router';
import * as moment from 'moment';
import { SlimLoadingBarService } from 'ng2-slim-loading-bar';
import { AppState } from '../app.service';
import { Appointment } from '../api/model/appointment';
import { ViewAppointmentService } from './appointment.service';
@Component({
templateUrl: './today.component.html',
styleUrls: [ './today.component.scss' ]
})
export class AppointmentTodayComponent implements OnInit {
public appointments: Appointment[];
public locale: string;
public defaultView: string = 'basicDay';
constructor(<|fim▁hole|> ) {}
public ngOnInit() {
// Mouseflow integration
if ((window as any)._mfq) {
(window as any)._mfq.push(['newPageView', '/appointment/today']);
}
// Set up page
this._state.isSubPage.next(false);
this._state.title.next(
localStorage.getItem('locale').startsWith('de') ? 'Heute' : 'Today');
this._state.actions.next();
this._state.primaryAction.next({
icon: 'add',
routerLink: 'appointment/add'
});
// Retrieve data
this.getTodaysAppointments();
// Set up calendar view
this.locale = localStorage.getItem('locale').startsWith('de') ? 'de' : 'en';
}
/**
* Triggered when a calendar event is clicked.
*/
public handleEventClick(event) {
this.router.navigate(['appointment', event.calEvent.id]);
}
private getTodaysAppointments(): void {
this.slimLoadingBarService.start();
const start = moment.utc().startOf('day');
const end = moment.utc().endOf('day');
this.viewAppointmentService
.appointmentFind(`{"where": {"start": {"between": ["${start.format()}", "${end.format()}"]}}}`)
.subscribe(
(x) => this.appointments = x,
(e) => console.log(e),
() => {
console.log('Get today\'s appointments complete');
this.slimLoadingBarService.complete();
}
);
}
}<|fim▁end|> | private _state: AppState,
private router: Router,
private slimLoadingBarService: SlimLoadingBarService,
private viewAppointmentService: ViewAppointmentService |
<|file_name|>download.rs<|end_file_name|><|fim▁begin|>use async_trait::async_trait;
use cucumber::given;
use snafu::ResultExt;
use crate::error;
use crate::error::Error;
use crate::state::{GlobalState, State, Step, StepStatus};
use tests::download;
// Download OSM
#[given(regex = r"osm file has been downloaded for (\S+)$")]
pub async fn download_osm(state: &mut GlobalState, region: String) {
state
.execute_once(DownloadOsm(region))
.await
.expect("failed to download OSM file");
}
#[derive(PartialEq)]
pub struct DownloadOsm(pub String);
#[async_trait(?Send)]
impl Step for DownloadOsm {
async fn execute(&mut self, _state: &State) -> Result<StepStatus, Error> {
let Self(region) = self;
download::osm(region)
.await
.map(|status| status.into())
.context(error::DownloadSnafu)
}
}
// Download bano
#[given(regex = r"bano files have been downloaded for (.+) into (\S+)$")]
pub async fn download_bano(state: &mut GlobalState, departments: String, region: String) {
let departments = departments
.split(',')
.map(str::trim)
.map(str::to_string)
.collect();
state
.execute_once(DownloadBano {
departments,
region,<|fim▁hole|> .expect("failed to download OSM file");
}
#[derive(PartialEq)]
pub struct DownloadBano {
pub departments: Vec<String>,
pub region: String,
}
#[async_trait(?Send)]
impl Step for DownloadBano {
async fn execute(&mut self, _state: &State) -> Result<StepStatus, Error> {
let Self {
departments,
region,
} = self;
download::bano(region, departments)
.await
.map(|status| status.into())
.context(error::DownloadSnafu)
}
}
// Download NTFS
#[given(regex = r"ntfs file has been downloaded for (\S+)$")]
pub async fn download_ntfs(state: &mut GlobalState, region: String) {
state
.execute_once(DownloadNTFS { region })
.await
.expect("failed to download NTFS file");
}
#[derive(Debug, PartialEq)]
pub struct DownloadNTFS {
pub region: String,
}
#[async_trait(?Send)]
impl Step for DownloadNTFS {
async fn execute(&mut self, _state: &State) -> Result<StepStatus, Error> {
let Self { region } = self;
download::ntfs(region)
.await
.map(|status| status.into())
.context(error::DownloadSnafu)
}
}<|fim▁end|> | })
.await |
<|file_name|>sort.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
'''A simple implementation of a sorting algorithm, meant to allow
people to manually rank a list of items using whatever subjective or
objective criteria they want.
This program can be called as a script and used interactively. You
can provide the list of things to sort as command line arguments, or
if there are no arguments provided, you can provide the list in stdin,
one item per line.
Example run:
$ ./sort.py 'ice cream' falafel hamburgers pizza
Which is greater, falafel or ice cream (<, =, or >)? <
Which is greater, hamburgers or ice cream (<, =, or >)? <
Which is greater, hamburgers or falafel (<, =, or >)? >
Which is greater, pizza or hamburgers (<, =, or >)? >
Which is greater, pizza or ice cream (<, =, or >)? <
* ice cream
* pizza
* hamburgers
* falafel
Author: Adam Mesha <[email protected]>
License: MIT
'''
from functools import cmp_to_key
class memoize:
'''We really want to be sure that we don't ask people to compare the
same two items twice, so we cache the result.
'''
def __init__(self, func):<|fim▁hole|> self.func = func
self.cache = {}
def __call__(self, *args):
key = tuple(args)
if key not in self.cache:
self.cache[key] = self.func(*args)
return self.cache[key]
@memoize
def cmpfunc(a, b):
result = None
s = 'Which is greater, {a} or {b} (<, =, or >)? '.format(a=a, b=b)
while result is None or result not in '<=>':
result = input(s).strip()
return '<=>'.index(result) - 1
keyfunc = cmp_to_key(cmpfunc)
def binary_insertion_sort(seq, keyfunc):
'''Insertion sort, using binary search to insert each element. Runs
in O(n**2) time, but the use case is when a human is manually
deciding on the ordering, so the most important thing is to reduce
the number of comparisons.
'''
def mv(srcidx, dstidx):
while srcidx > dstidx:
seq[srcidx], seq[srcidx - 1] = seq[srcidx - 1], seq[srcidx]
srcidx -= 1
i = 1
while i < len(seq):
lower = 0; upper = i
while lower < upper:
j = (upper + lower) // 2
key1, key2 = keyfunc(seq[i]), keyfunc(seq[j])
if key1 == key2:
mv(i, j+1) # XXX this is not stable
i += 1
break
if key1 < key2:
upper = j
else: # >
lower = j + 1
else:
mv(i, upper)
i += 1
class SortableWithHeuristic:
def __init__(self, val, heur):
self.val = val
self.heur = heur
def __str__(self):
return '{val}: {heur}'.format(val=self.val, heur=self.heur)
def __repr__(self):
return '{}(val={}, heur={})'.format(self.__class__.__name__,
repr(self.val),
repr(self.heur))
def get_heuristic_func(val):
result = None
s = 'Give an approximate numeric score to item {}: '.format(val)
while result is None:
try:
result = float(input(s).strip())
except ValueError:
pass
return result
def heuristic_sort(seq, get_heuristic_func, cmpfunc):
def swap(a, b):
seq[a], seq[b] = seq[b], seq[a]
idx = 0
while idx < len(seq):
val = seq[idx]
heur = get_heuristic_func(val)
seq[idx] = SortableWithHeuristic(val, heur)
# find the current location
j = idx
while j > 0 and seq[j].heur < seq[j-1].heur:
swap(j, j-1)
j -= 1
moved = False
while j < idx and cmpfunc(seq[j].val, seq[j+1].val) == 1:
swap(j, j+1)
j += 1
moved = True
if not moved:
while j > 0 and cmpfunc(seq[j].val, seq[j-1].val) == -1:
swap(j, j-1)
j -= 1
if 0 < j < idx:
seq[j].heur = (seq[j-1].heur + seq[j+1].heur) / 2
elif idx > 0:
if j == 0 and seq[j].heur > seq[j+1].heur:
seq[j].heur = seq[j+1].heur - 1
elif j == idx and seq[j].heur < seq[j-1].heur:
seq[j].heur = seq[j-1].heur + 1
idx += 1
def main():
import sys
seq = []
if len(sys.argv) > 1:
seq.extend(sys.argv[1:])
if not seq:
seq.extend(x.strip() for x in sys.stdin.readlines())
heuristic_sort(seq, get_heuristic_func, cmpfunc)
print('\n'.join('* {}'.format(item) for item in reversed(seq)))
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>ItemElectricEngine.java<|end_file_name|><|fim▁begin|>package adamros.mods.transducers.item;
import java.util.List;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import adamros.mods.transducers.Transducers;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.StatCollector;
public class ItemElectricEngine extends ItemBlock
{
public ItemElectricEngine(int par1)
{
super(par1);
setHasSubtypes(true);
setMaxDamage(0);
setUnlocalizedName("itemElectricEngine");
setCreativeTab(Transducers.tabTransducers);
}
@Override
public int getMetadata(int meta)
{
return meta;
}
@Override
public String getUnlocalizedName(ItemStack is)
{
String suffix;
switch (is.getItemDamage())
{
case 0:
suffix = "lv";
break;
case 1:
suffix = "mv";
break;
case 2:
suffix = "hv";
break;
case 3:
suffix = "ev";
break;
default:
suffix = "lv";
break;
}
return getUnlocalizedName() + "." + suffix;
}
@Override
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack par1ItemStack, EntityPlayer par2EntityPlayer, List par3List, boolean par4)
{
super.addInformation(par1ItemStack, par2EntityPlayer, par3List, par4);
String type;
switch (par1ItemStack.getItemDamage())
{
case 0:
type = "lv";
break;
case 1:
type = "mv";
break;
<|fim▁hole|> case 2:
type = "hv";
break;
case 3:
type = "ev";
break;
default:
type = "lv";
break;
}
par3List.add(StatCollector.translateToLocal("tip.electricEngine." + type).trim());
}
}<|fim▁end|> | |
<|file_name|>SelectionSort9.java<|end_file_name|><|fim▁begin|>package main.java;
public class SelectionSort9 {
public static <T extends Comparable<T>> void sort(final T[] a) {
for (int i = 0; i < a.length - 1; i++) {
int min = i;
for (int j = i + 1; j < a.length; j++) {
if (a[j].compareTo(a[min]) < 0) {
min = j;
}<|fim▁hole|>
if (i != min) {
final T tmp = a[min];
a[min] = a[i];
a[i] = tmp;
}
}
}
}<|fim▁end|> | } |
<|file_name|>TaskListSoapInterface.java<|end_file_name|><|fim▁begin|>/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|> */
package com.tle.web.workflow.soap;
public interface TaskListSoapInterface {
String getTaskFilterCounts(boolean ignoreZero);
String[] getTaskFilterNames();
String getTaskList(String filterName, int start, int numResults) throws Exception;
}<|fim▁end|> | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. |
<|file_name|>rest.go<|end_file_name|><|fim▁begin|>package imagestreamimport
import (
"fmt"
"net/http"
"time"
"github.com/golang/glog"
gocontext "golang.org/x/net/context"
kapierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/diff"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/validation/field"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/rest"
kapi "k8s.io/kubernetes/pkg/api"
authorizationapi "github.com/openshift/origin/pkg/authorization/api"
"github.com/openshift/origin/pkg/client"
serverapi "github.com/openshift/origin/pkg/cmd/server/api"
"github.com/openshift/origin/pkg/dockerregistry"
"github.com/openshift/origin/pkg/image/api"
imageapiv1 "github.com/openshift/origin/pkg/image/api/v1"
"github.com/openshift/origin/pkg/image/importer"
"github.com/openshift/origin/pkg/image/registry/imagestream"
quotautil "github.com/openshift/origin/pkg/quota/util"
)
// ImporterFunc returns an instance of the importer that should be used per invocation.
type ImporterFunc func(r importer.RepositoryRetriever) importer.Interface
// ImporterDockerRegistryFunc returns an instance of a docker client that should be used per invocation of import,
// may be nil if no legacy import capability is required.
type ImporterDockerRegistryFunc func() dockerregistry.Client
// REST implements the RESTStorage interface for ImageStreamImport
type REST struct {
importFn ImporterFunc
streams imagestream.Registry
internalStreams rest.CreaterUpdater
images rest.Creater
secrets client.ImageStreamSecretsNamespacer
transport http.RoundTripper
insecureTransport http.RoundTripper
clientFn ImporterDockerRegistryFunc
strategy *strategy
sarClient client.SubjectAccessReviewInterface
}
// NewREST returns a REST storage implementation that handles importing images. The clientFn argument is optional
// if v1 Docker Registry importing is not required. Insecure transport is optional, and both transports should not
// include client certs unless you wish to allow the entire cluster to import using those certs.
func NewREST(importFn ImporterFunc, streams imagestream.Registry, internalStreams rest.CreaterUpdater,
images rest.Creater, secrets client.ImageStreamSecretsNamespacer,
transport, insecureTransport http.RoundTripper,
clientFn ImporterDockerRegistryFunc,
allowedImportRegistries *serverapi.AllowedRegistries,
registryFn api.DefaultRegistryFunc,
sarClient client.SubjectAccessReviewInterface,
) *REST {
return &REST{
importFn: importFn,
streams: streams,
internalStreams: internalStreams,
images: images,
secrets: secrets,
transport: transport,
insecureTransport: insecureTransport,
clientFn: clientFn,
strategy: NewStrategy(allowedImportRegistries, registryFn),
sarClient: sarClient,
}
}
// New is only implemented to make REST implement RESTStorage
func (r *REST) New() runtime.Object {
return &api.ImageStreamImport{}
}
func (r *REST) Create(ctx apirequest.Context, obj runtime.Object) (runtime.Object, error) {
isi, ok := obj.(*api.ImageStreamImport)
if !ok {
return nil, kapierrors.NewBadRequest(fmt.Sprintf("obj is not an ImageStreamImport: %#v", obj))
}
inputMeta := isi.ObjectMeta
if err := rest.BeforeCreate(r.strategy, ctx, obj); err != nil {
return nil, err
}
// Check if the user is allowed to create Images or ImageStreamMappings.
// In case the user is allowed to create them, do not validate the ImageStreamImport
// registry location against the registry whitelist, but instead allow to create any
// image from any registry.
user, ok := apirequest.UserFrom(ctx)
if !ok {
return nil, kapierrors.NewBadRequest("unable to get user from context")
}
isCreateImage, err := r.sarClient.Create(authorizationapi.AddUserToSAR(user,
&authorizationapi.SubjectAccessReview{
Action: authorizationapi.Action{
Verb: "create",
Group: api.GroupName,
Resource: "images",
},
},
))
if err != nil {
return nil, err
}
isCreateImageStreamMapping, err := r.sarClient.Create(authorizationapi.AddUserToSAR(user,
&authorizationapi.SubjectAccessReview{
Action: authorizationapi.Action{
Verb: "create",
Group: api.GroupName,
Resource: "imagestreammapping",
},
},
))
if err != nil {
return nil, err
}
if !isCreateImage.Allowed && !isCreateImageStreamMapping.Allowed {
if errs := r.strategy.ValidateAllowedRegistries(isi); len(errs) != 0 {
return nil, kapierrors.NewInvalid(api.Kind("ImageStreamImport"), isi.Name, errs)
}
}
namespace, ok := apirequest.NamespaceFrom(ctx)
if !ok {
return nil, kapierrors.NewBadRequest("a namespace must be specified to import images")
}
if r.clientFn != nil {
if client := r.clientFn(); client != nil {
ctx = apirequest.WithValue(ctx, importer.ContextKeyV1RegistryClient, client)
}
}
// only load secrets if we need them
credentials := importer.NewLazyCredentialsForSecrets(func() ([]kapi.Secret, error) {
secrets, err := r.secrets.ImageStreamSecrets(namespace).Secrets(isi.Name, metav1.ListOptions{})
if err != nil {
return nil, err
}
return secrets.Items, nil
})
importCtx := importer.NewContext(r.transport, r.insecureTransport).WithCredentials(credentials)
imports := r.importFn(importCtx)
if err := imports.Import(ctx.(gocontext.Context), isi); err != nil {
return nil, kapierrors.NewInternalError(err)
}
// if we encountered an error loading credentials and any images could not be retrieved with an access
// related error, modify the message.
// TODO: set a status cause
if err := credentials.Err(); err != nil {
for i, image := range isi.Status.Images {
switch image.Status.Reason {
case metav1.StatusReasonUnauthorized, metav1.StatusReasonForbidden:
isi.Status.Images[i].Status.Message = fmt.Sprintf("Unable to load secrets for this image: %v; (%s)", err, image.Status.Message)
}
}
if r := isi.Status.Repository; r != nil {
switch r.Status.Reason {
case metav1.StatusReasonUnauthorized, metav1.StatusReasonForbidden:
r.Status.Message = fmt.Sprintf("Unable to load secrets for this repository: %v; (%s)", err, r.Status.Message)
}
}
}
// TODO: perform the transformation of the image stream and return it with the ISI if import is false
// so that clients can see what the resulting object would look like.
if !isi.Spec.Import {
clearManifests(isi)
return isi, nil
}
create := false
stream, err := r.streams.GetImageStream(ctx, isi.Name, &metav1.GetOptions{})
if err != nil {
if !kapierrors.IsNotFound(err) {
return nil, err
}
// consistency check, stream must exist
if len(inputMeta.ResourceVersion) > 0 || len(inputMeta.UID) > 0 {
return nil, err
}
create = true
stream = &api.ImageStream{
ObjectMeta: metav1.ObjectMeta{
Name: isi.Name,
Namespace: namespace,
Generation: 0,
},
}
} else {
if len(inputMeta.ResourceVersion) > 0 && inputMeta.ResourceVersion != stream.ResourceVersion {
glog.V(4).Infof("DEBUG: mismatch between requested ResourceVersion %s and located ResourceVersion %s", inputMeta.ResourceVersion, stream.ResourceVersion)
return nil, kapierrors.NewConflict(api.Resource("imagestream"), inputMeta.Name, fmt.Errorf("the image stream was updated from %q to %q", inputMeta.ResourceVersion, stream.ResourceVersion))
}
if len(inputMeta.UID) > 0 && inputMeta.UID != stream.UID {
glog.V(4).Infof("DEBUG: mismatch between requested UID %s and located UID %s", inputMeta.UID, stream.UID)
return nil, kapierrors.NewNotFound(api.Resource("imagestream"), inputMeta.Name)
}
}
if stream.Annotations == nil {
stream.Annotations = make(map[string]string)
}
now := metav1.Now()
_, hasAnnotation := stream.Annotations[api.DockerImageRepositoryCheckAnnotation]
nextGeneration := stream.Generation + 1
original, err := kapi.Scheme.DeepCopy(stream)
if err != nil {
return nil, err
}
// walk the retrieved images, ensuring each one exists in etcd
importedImages := make(map[string]error)
updatedImages := make(map[string]*api.Image)
if spec := isi.Spec.Repository; spec != nil {
for i, status := range isi.Status.Repository.Images {
if checkImportFailure(status, stream, status.Tag, nextGeneration, now) {
continue
}
image := status.Image
ref, err := api.ParseDockerImageReference(image.DockerImageReference)
if err != nil {
utilruntime.HandleError(fmt.Errorf("unable to parse image reference during import: %v", err))
continue
}
from, err := api.ParseDockerImageReference(spec.From.Name)
if err != nil {
utilruntime.HandleError(fmt.Errorf("unable to parse from reference during import: %v", err))
continue
}
tag := ref.Tag
if len(status.Tag) > 0 {
tag = status.Tag
}
// we've imported a set of tags, ensure spec tag will point to this for later imports
from.ID, from.Tag = "", tag
if updated, ok := r.importSuccessful(ctx, image, stream, tag, from.Exact(), nextGeneration,
now, spec.ImportPolicy, spec.ReferencePolicy, importedImages, updatedImages); ok {
isi.Status.Repository.Images[i].Image = updated
}
}
}
for i, spec := range isi.Spec.Images {
if spec.To == nil {
continue
}
tag := spec.To.Name
// record a failure condition
status := isi.Status.Images[i]
if checkImportFailure(status, stream, tag, nextGeneration, now) {
// ensure that we have a spec tag set
ensureSpecTag(stream, tag, spec.From.Name, spec.ImportPolicy, spec.ReferencePolicy, false)
continue
}
// record success
image := status.Image
if updated, ok := r.importSuccessful(ctx, image, stream, tag, spec.From.Name, nextGeneration,
now, spec.ImportPolicy, spec.ReferencePolicy, importedImages, updatedImages); ok {
isi.Status.Images[i].Image = updated
}
}
// TODO: should we allow partial failure?
for _, err := range importedImages {
if err != nil {
return nil, err
}
}
clearManifests(isi)
// ensure defaulting is applied by round trip converting
// TODO: convert to using versioned types.
external, err := kapi.Scheme.ConvertToVersion(stream, imageapiv1.SchemeGroupVersion)
if err != nil {
return nil, err
}
kapi.Scheme.Default(external)
internal, err := kapi.Scheme.ConvertToVersion(external, api.SchemeGroupVersion)
if err != nil {
return nil, err
}
stream = internal.(*api.ImageStream)
// if and only if we have changes between the original and the imported stream, trigger
// an import
hasChanges := !kapi.Semantic.DeepEqual(original, stream)
if create {
stream.Annotations[api.DockerImageRepositoryCheckAnnotation] = now.UTC().Format(time.RFC3339)
glog.V(4).Infof("create new stream: %#v", stream)
obj, err = r.internalStreams.Create(ctx, stream)
} else {
if hasAnnotation && !hasChanges {
glog.V(4).Infof("stream did not change: %#v", stream)
obj, err = original.(*api.ImageStream), nil
} else {
if glog.V(4) {
glog.V(4).Infof("updating stream %s", diff.ObjectDiff(original, stream))
}
stream.Annotations[api.DockerImageRepositoryCheckAnnotation] = now.UTC().Format(time.RFC3339)
obj, _, err = r.internalStreams.Update(ctx, stream.Name, rest.DefaultUpdatedObjectInfo(stream, kapi.Scheme))
}
}
if err != nil {
// if we have am admission limit error then record the conditions on the original stream. Quota errors
// will be recorded by the importer.
if quotautil.IsErrorLimitExceeded(err) {
originalStream := original.(*api.ImageStream)
recordLimitExceededStatus(originalStream, stream, err, now, nextGeneration)
var limitErr error
obj, _, limitErr = r.internalStreams.Update(ctx, stream.Name, rest.DefaultUpdatedObjectInfo(originalStream, kapi.Scheme))
if limitErr != nil {
utilruntime.HandleError(fmt.Errorf("failed to record limit exceeded status in image stream %s/%s: %v", stream.Namespace, stream.Name, limitErr))
}
}
return nil, err
}
isi.Status.Import = obj.(*api.ImageStream)
return isi, nil
}
// recordLimitExceededStatus adds the limit err to any new tag.
func recordLimitExceededStatus(originalStream *api.ImageStream, newStream *api.ImageStream, err error, now metav1.Time, nextGeneration int64) {
for tag := range newStream.Status.Tags {
if _, ok := originalStream.Status.Tags[tag]; !ok {
api.SetTagConditions(originalStream, tag, newImportFailedCondition(err, nextGeneration, now))
}
}
}
func checkImportFailure(status api.ImageImportStatus, stream *api.ImageStream, tag string, nextGeneration int64, now metav1.Time) bool {
if status.Image != nil && status.Status.Status == metav1.StatusSuccess {
return false
}
message := status.Status.Message
if len(message) == 0 {
message = "unknown error prevented import"
}
condition := api.TagEventCondition{
Type: api.ImportSuccess,
Status: kapi.ConditionFalse,
Message: message,
Reason: string(status.Status.Reason),
Generation: nextGeneration,
LastTransitionTime: now,
}
if tag == "" {
if len(status.Tag) > 0 {
tag = status.Tag
} else if status.Image != nil {
if ref, err := api.ParseDockerImageReference(status.Image.DockerImageReference); err == nil {
tag = ref.Tag
}
}
}
if !api.HasTagCondition(stream, tag, condition) {
api.SetTagConditions(stream, tag, condition)
if tagRef, ok := stream.Spec.Tags[tag]; ok {
zero := int64(0)
tagRef.Generation = &zero
stream.Spec.Tags[tag] = tagRef
}
}
return true
}
// ensureSpecTag guarantees that the spec tag is set with the provided from, importPolicy and referencePolicy.
// If reset is passed, the tag will be overwritten.
func ensureSpecTag(stream *api.ImageStream, tag, from string, importPolicy api.TagImportPolicy,
referencePolicy api.TagReferencePolicy, reset bool) api.TagReference {
if stream.Spec.Tags == nil {
stream.Spec.Tags = make(map[string]api.TagReference)
}
specTag, ok := stream.Spec.Tags[tag]
if ok && !reset {
return specTag
}
specTag.From = &kapi.ObjectReference{
Kind: "DockerImage",
Name: from,
}
zero := int64(0)
specTag.Generation = &zero
specTag.ImportPolicy = importPolicy<|fim▁hole|> return specTag
}
// importSuccessful records a successful import into an image stream, setting the spec tag, status tag or conditions, and ensuring
// the image is created in etcd. Images are cached so they are not created multiple times in a row (when multiple tags point to the
// same image), and a failure to persist the image will be summarized before we update the stream. If an image was imported by this
// operation, it *replaces* the imported image (from the remote repository) with the updated image.
func (r *REST) importSuccessful(
ctx apirequest.Context,
image *api.Image, stream *api.ImageStream, tag string, from string, nextGeneration int64, now metav1.Time,
importPolicy api.TagImportPolicy, referencePolicy api.TagReferencePolicy,
importedImages map[string]error, updatedImages map[string]*api.Image,
) (*api.Image, bool) {
r.strategy.PrepareImageForCreate(image)
pullSpec, _ := api.MostAccuratePullSpec(image.DockerImageReference, image.Name, "")
tagEvent := api.TagEvent{
Created: now,
DockerImageReference: pullSpec,
Image: image.Name,
Generation: nextGeneration,
}
if stream.Spec.Tags == nil {
stream.Spec.Tags = make(map[string]api.TagReference)
}
// ensure the spec and status tag match the imported image
changed := api.DifferentTagEvent(stream, tag, tagEvent) || api.DifferentTagGeneration(stream, tag)
specTag, ok := stream.Spec.Tags[tag]
if changed || !ok {
specTag = ensureSpecTag(stream, tag, from, importPolicy, referencePolicy, true)
api.AddTagEventToImageStream(stream, tag, tagEvent)
}
// always reset the import policy
specTag.ImportPolicy = importPolicy
stream.Spec.Tags[tag] = specTag
// import or reuse the image, and ensure tag conditions are set
importErr, alreadyImported := importedImages[image.Name]
if importErr != nil {
api.SetTagConditions(stream, tag, newImportFailedCondition(importErr, nextGeneration, now))
} else {
api.SetTagConditions(stream, tag)
}
// create the image if it does not exist, otherwise cache the updated status from the store for use by other tags
if alreadyImported {
if updatedImage, ok := updatedImages[image.Name]; ok {
return updatedImage, true
}
return nil, false
}
updated, err := r.images.Create(ctx, image)
switch {
case kapierrors.IsAlreadyExists(err):
if err := api.ImageWithMetadata(image); err != nil {
glog.V(4).Infof("Unable to update image metadata during image import when image already exists %q: err", image.Name, err)
}
updated = image
fallthrough
case err == nil:
updatedImage := updated.(*api.Image)
updatedImages[image.Name] = updatedImage
//isi.Status.Repository.Images[i].Image = updatedImage
importedImages[image.Name] = nil
return updatedImage, true
default:
importedImages[image.Name] = err
}
return nil, false
}
// clearManifests unsets the manifest for each object that does not request it
func clearManifests(isi *api.ImageStreamImport) {
for i := range isi.Status.Images {
if !isi.Spec.Images[i].IncludeManifest {
if isi.Status.Images[i].Image != nil {
isi.Status.Images[i].Image.DockerImageManifest = ""
isi.Status.Images[i].Image.DockerImageConfig = ""
}
}
}
if isi.Spec.Repository != nil && !isi.Spec.Repository.IncludeManifest {
for i := range isi.Status.Repository.Images {
if isi.Status.Repository.Images[i].Image != nil {
isi.Status.Repository.Images[i].Image.DockerImageManifest = ""
isi.Status.Repository.Images[i].Image.DockerImageConfig = ""
}
}
}
}
func newImportFailedCondition(err error, gen int64, now metav1.Time) api.TagEventCondition {
c := api.TagEventCondition{
Type: api.ImportSuccess,
Status: kapi.ConditionFalse,
Message: err.Error(),
Generation: gen,
LastTransitionTime: now,
}
if status, ok := err.(kapierrors.APIStatus); ok {
s := status.Status()
c.Reason, c.Message = string(s.Reason), s.Message
}
return c
}
func invalidStatus(kind, position string, errs ...*field.Error) metav1.Status {
return kapierrors.NewInvalid(api.Kind(kind), position, errs).ErrStatus
}<|fim▁end|> | specTag.ReferencePolicy = referencePolicy
stream.Spec.Tags[tag] = specTag |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Provides NN for a CUDA backend.
#![allow(missing_docs)]
use ::plugin::*;
use co::prelude::*;
use co::plugin::Error as PluginError;
use cudnn::*;
#[macro_use]
pub mod helper;
lazy_static! {
static ref CUDNN: Cudnn = Cudnn::new().unwrap();
}
pub trait ICudnnDesc<T> {
fn cudnn_tensor_desc(&self) -> Result<TensorDescriptor, PluginError>;
/// Creates a TensorDescriptor similar to `cudnn_tensor_desc`,
/// but will create a fitting 4D tensor if the actual tensor would be 1D-3D.
fn cudnn_tensor_desc_softmax(&self) -> Result<TensorDescriptor, PluginError>;<|fim▁hole|> /// e.g. activation like ReLU.
fn cudnn_tensor_desc_flat(&self) -> Result<TensorDescriptor, PluginError>;
fn cudnn_filter_desc(&self) -> Result<FilterDescriptor, PluginError>;
fn cudnn_convolution_desc(&self, filter: &SharedTensor<T>) -> Result<ConvolutionDescriptor, PluginError>;
}
macro_rules! impl_icudnndesc_for_sharedtensor {
($t:ty, $cutype:path) => (
impl ICudnnDesc<$t> for SharedTensor<$t> {
fn cudnn_tensor_desc(&self) -> Result<TensorDescriptor, PluginError> {
match TensorDescriptor::new(&self.desc().dims_i32().clone(), &self.desc().default_stride_i32().clone(), $cutype) {
Ok(desc) => Ok(desc),
Err(_) => {
Err(PluginError::Plugin("Unable to create CuDNN TensorDescriptor."))
}
}
}
fn cudnn_tensor_desc_softmax(&self) -> Result<TensorDescriptor, PluginError> {
let actual_desc = self.desc().clone();
let override_desc = match actual_desc.len() {
// not batched and single dimension softmax
1 => vec![1, actual_desc[0], 1, 1],
// batched and single dimension softmax
2 => vec![actual_desc[0], actual_desc[1], 1, 1],
// neither batched nor single dimension
3 => vec![1, actual_desc[0], actual_desc[1], actual_desc[2]],
_ => actual_desc
};
match TensorDescriptor::new(&override_desc.dims_i32().clone(),
&override_desc.default_stride_i32().clone(),
$cutype) {
Ok(desc) => Ok(desc),
Err(_) => {
Err(PluginError::Plugin("Unable to create CuDNN TensorDescriptor."))
}
}
}
fn cudnn_tensor_desc_flat(&self) -> Result<TensorDescriptor, PluginError> {
let actual_desc = self.desc().clone();
let mut override_desc = match actual_desc.len() {
1 => vec![1, 1],
2 => vec![1],
_ => vec![]
};
for dim in actual_desc {
override_desc.push(dim);
}
match TensorDescriptor::new(&override_desc.dims_i32().clone(),
&override_desc.default_stride_i32().clone(),
$cutype) {
Ok(desc) => Ok(desc),
Err(_) => {
Err(PluginError::Plugin("Unable to create CuDNN TensorDescriptor."))
}
}
}
fn cudnn_filter_desc(&self) -> Result<FilterDescriptor, PluginError> {
match FilterDescriptor::new(&self.desc().dims_i32().clone(), $cutype) {
Ok(desc) => Ok(desc),
Err(_) => {
Err(PluginError::Plugin("Unable to create CuDNN FilterDescriptor."))
}
}
}
fn cudnn_convolution_desc(&self, filter: &SharedTensor<$t>) -> Result<ConvolutionDescriptor, PluginError> {
match ConvolutionDescriptor::new(&self.desc().dims_i32().clone(), &filter.desc().default_stride_i32().clone(), $cutype) {
Ok(desc) => Ok(desc),
Err(_) => {
Err(PluginError::Plugin("Unable to create CuDNN ConvolutionDescriptor."))
}
}
}
}
)
}
impl_icudnndesc_for_sharedtensor!(f32, ::cudnn::utils::DataType::Float);
impl_icudnndesc_for_sharedtensor!(f64, ::cudnn::utils::DataType::Double);
impl_oconf_for_cc!(f32, f64);
impl_oconf_for_clrn!(f32, f64);
impl_oconf_for_pooling!(f32, f64);
impl ConvForwardAlgo {
/// Tries to return the matching cuDNN type for the enum value.
fn as_cudnn(&self) -> Result<cudnnConvolutionFwdAlgo_t, ::co::error::Error> {
Ok(match *self {
ConvForwardAlgo::Auto => return Err(::co::error::Error::Plugin(::co::plugin::Error::Plugin("Can't create cuDNN convolution forward algorithm from ConvForwardAlgo::Auto. Use `find_cudnn_algo` to find an algorithm."))),
ConvForwardAlgo::GEMM => ::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_GEMM,
ConvForwardAlgo::ImplicitGEMM => ::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM,
ConvForwardAlgo::ImplicitPrecompiledGEMM => ::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM,
ConvForwardAlgo::FFT => ::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_FFT,
ConvForwardAlgo::FFTTiling => ::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_FFT_TILING,
ConvForwardAlgo::Direct => ::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_DIRECT,
})
}
/// Returns the matching enum value for a cuDNN algo.
fn from_cudnn(algo: &cudnnConvolutionFwdAlgo_t) -> ConvForwardAlgo {
match *algo {
::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_GEMM => ConvForwardAlgo::GEMM,
::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM => ConvForwardAlgo::ImplicitGEMM,
::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM => ConvForwardAlgo::ImplicitPrecompiledGEMM,
::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_FFT => ConvForwardAlgo::FFT,
::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_FFT_TILING => ConvForwardAlgo::FFTTiling,
::cudnn::cudnnConvolutionFwdAlgo_t::CUDNN_CONVOLUTION_FWD_ALGO_DIRECT => ConvForwardAlgo::Direct,
}
}
/// Try to find best algorithm for a operation that uses the provided descriptors.
fn find_cudnn_algo(
&self,
filter_desc: &FilterDescriptor,
conv_desc: &ConvolutionDescriptor,
src_desc: &TensorDescriptor,
dest_desc: &TensorDescriptor,
) -> Result<ConvForwardAlgo, ::co::error::Error> {
if !self.is_auto() {
return Ok(*self);
}
let algos = API::find_convolution_forward_algorithm(*CUDNN.id_c(), *filter_desc.id_c(), *conv_desc.id_c(), *src_desc.id_c(), *dest_desc.id_c()).unwrap();
let algo = match algos.len() {
0 => return Err(::co::error::Error::Plugin(::co::plugin::Error::Operation("Unable to find CUDA cuDNN convolution forward algorithm."))),
_ => algos[0].algo
};
Ok(ConvForwardAlgo::from_cudnn(&algo))
}
}
impl ConvBackwardFilterAlgo {
/// Tries to return the matching cuDNN type for the enum value.
fn as_cudnn(&self) -> Result<cudnnConvolutionBwdFilterAlgo_t, ::co::error::Error> {
Ok(match *self {
ConvBackwardFilterAlgo::Auto => return Err(::co::error::Error::Plugin(::co::plugin::Error::Plugin("Can't create cuDNN convolution backward filter algorithm from ConvBackwardFilterAlgo::Auto. Use `find_cudnn_algo` to find an algorithm."))),
ConvBackwardFilterAlgo::ImplicitGEMM => ::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_1,
ConvBackwardFilterAlgo::ImplicitGEMMSum => ::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_0,
ConvBackwardFilterAlgo::ImplicitPrecompiledGEMMSum => ::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_3,
ConvBackwardFilterAlgo::FFT => ::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_FFT,
})
}
/// Returns the matching enum value for a cuDNN algo.
fn from_cudnn(algo: &cudnnConvolutionBwdFilterAlgo_t) -> ConvBackwardFilterAlgo {
match *algo {
::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_0 => ConvBackwardFilterAlgo::ImplicitGEMMSum,
::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_1 => ConvBackwardFilterAlgo::ImplicitGEMM,
::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_FFT => ConvBackwardFilterAlgo::FFT,
::cudnn::cudnnConvolutionBwdFilterAlgo_t::CUDNN_CONVOLUTION_BWD_FILTER_ALGO_3 => ConvBackwardFilterAlgo::ImplicitPrecompiledGEMMSum,
}
}
/// Try to find best algorithm for a operation that uses the provided descriptors.
fn find_cudnn_algo(
&self,
filter_desc: &FilterDescriptor,
conv_desc: &ConvolutionDescriptor,
src_desc: &TensorDescriptor,
dest_desc: &TensorDescriptor,
) -> Result<ConvBackwardFilterAlgo, ::co::error::Error> {
if !self.is_auto() {
return Ok(*self);
}
let algos = API::find_convolution_backward_filter_algorithm(*CUDNN.id_c(), *filter_desc.id_c(), *conv_desc.id_c(), *src_desc.id_c(), *dest_desc.id_c()).unwrap();
let algo = match algos.len() {
0 => return Err(::co::error::Error::Plugin(::co::plugin::Error::Operation("Unable to find CUDA cuDNN convolution backward filter algorithm."))),
_ => algos[0].algo
};
Ok(ConvBackwardFilterAlgo::from_cudnn(&algo))
}
}
impl ConvBackwardDataAlgo {
/// Tries to return the matching cuDNN type for the enum value.
fn as_cudnn(&self) -> Result<cudnnConvolutionBwdDataAlgo_t, ::co::error::Error> {
Ok(match *self {
ConvBackwardDataAlgo::Auto => return Err(::co::error::Error::Plugin(::co::plugin::Error::Plugin("Can't create cuDNN convolution backward data algorithm from ConvBackwardDataAlgo::Auto. Use `find_cudnn_algo` to find an algorithm."))),
ConvBackwardDataAlgo::ImplicitGEMM => ::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_1,
ConvBackwardDataAlgo::ImplicitGEMMSum => ::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_0,
ConvBackwardDataAlgo::FFT => ::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_FFT,
ConvBackwardDataAlgo::FFTTiling => ::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_FFT_TILING,
})
}
/// Returns the matching enum value for a cuDNN algo.
fn from_cudnn(algo: &cudnnConvolutionBwdDataAlgo_t) -> ConvBackwardDataAlgo {
match *algo {
::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_0 => ConvBackwardDataAlgo::ImplicitGEMMSum,
::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_1 => ConvBackwardDataAlgo::ImplicitGEMM,
::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_FFT => ConvBackwardDataAlgo::FFT,
::cudnn::cudnnConvolutionBwdDataAlgo_t::CUDNN_CONVOLUTION_BWD_DATA_ALGO_FFT_TILING => ConvBackwardDataAlgo::FFTTiling,
}
}
/// Try to find best algorithm for a operation that uses the provided descriptors.
fn find_cudnn_algo(
&self,
filter_desc: &FilterDescriptor,
conv_desc: &ConvolutionDescriptor,
src_desc: &TensorDescriptor,
dest_desc: &TensorDescriptor,
) -> Result<ConvBackwardDataAlgo, ::co::error::Error> {
if !self.is_auto() {
return Ok(*self);
}
let algos = API::find_convolution_backward_data_algorithm(*CUDNN.id_c(), *filter_desc.id_c(), *conv_desc.id_c(), *src_desc.id_c(), *dest_desc.id_c()).unwrap();
let algo = match algos.len() {
0 => return Err(::co::error::Error::Plugin(::co::plugin::Error::Operation("Unable to find CUDA cuDNN convolution backward data algorithm."))),
_ => algos[0].algo
};
Ok(ConvBackwardDataAlgo::from_cudnn(&algo))
}
}
macro_rules! impl_convolution_for_cuda_backend {
($t:ty, $cutype:path) => (
impl ConvolutionConfig<$t> for ::cudnn::utils::ConvolutionConfig {
fn workspace_size(&self) -> usize {
*self.largest_workspace_size()
}
}
impl Convolution<$t> for Backend<Cuda> {
fn new_convolution_config(
&self,
src: &::co::tensor::SharedTensor<$t>,
dest: &::co::tensor::SharedTensor<$t>,
filter: &mut ::co::tensor::SharedTensor<$t>,
algo_fwd: ConvForwardAlgo,
algo_bwd_filter: ConvBackwardFilterAlgo,
algo_bwd_data: ConvBackwardDataAlgo,
stride: &[i32],
zero_padding: &[i32],
) -> Result<Self::CC, ::co::error::Error> {
let src_desc = try!(src.cudnn_tensor_desc());
let dest_desc = try!(dest.cudnn_tensor_desc());
let filter_desc = try!(filter.cudnn_filter_desc());
let conv_desc = ::cudnn::ConvolutionDescriptor::new(zero_padding, stride, $cutype).unwrap();
let useable_algo_fwd = try!(algo_fwd.find_cudnn_algo(&filter_desc, &conv_desc, &src_desc, &dest_desc));
let useable_algo_bwd_filter = try!(algo_bwd_filter.find_cudnn_algo(&filter_desc, &conv_desc, &src_desc, &dest_desc));
let useable_algo_bwd_data = try!(algo_bwd_data.find_cudnn_algo(&filter_desc, &conv_desc, &src_desc, &dest_desc));
let mut workspace_size_fwd = API::get_convolution_forward_workspace_size(*CUDNN.id_c(), useable_algo_fwd.as_cudnn().unwrap(), *filter_desc.id_c(), *conv_desc.id_c(), *src_desc.id_c(), *dest_desc.id_c()).unwrap();
let mut workspace_size_bwd_filter = API::get_convolution_backward_filter_workspace_size(*CUDNN.id_c(), useable_algo_bwd_filter.as_cudnn().unwrap(), *filter_desc.id_c(), *conv_desc.id_c(), *src_desc.id_c(), *dest_desc.id_c()).unwrap();
let mut workspace_size_bwd_data = API::get_convolution_backward_data_workspace_size(*CUDNN.id_c(), useable_algo_bwd_data.as_cudnn().unwrap(), *filter_desc.id_c(), *conv_desc.id_c(), *src_desc.id_c(), *dest_desc.id_c()).unwrap();
if workspace_size_fwd == 0 {
workspace_size_fwd = 8;
}
if workspace_size_bwd_filter == 0 {
workspace_size_bwd_filter = 8;
}
if workspace_size_bwd_data == 0 {
workspace_size_bwd_data = 8;
}
Ok(
::cudnn::utils::ConvolutionConfig::new(
useable_algo_fwd.as_cudnn().unwrap(), workspace_size_fwd,
useable_algo_bwd_filter.as_cudnn().unwrap(), workspace_size_bwd_filter,
useable_algo_bwd_data.as_cudnn().unwrap(), workspace_size_bwd_data,
conv_desc, filter_desc
)
)
}
impl_ops_convolution_for!($t, Backend<Cuda>);
}
)
}
impl NN<f32> for Backend<Cuda> {
type CC = utils::ConvolutionConfig;
type CLRN = utils::NormalizationConfig;
type CPOOL = utils::PoolingConfig;
fn init_nn() { let _ = CUDNN.id_c(); }
fn device(&self) -> &DeviceType { self.device() }
}
impl_convolution_for_cuda_backend!(f32, ::cudnn::utils::DataType::Float);
impl_ops_sigmoid_for!(f32, Backend<Cuda>);
impl_ops_relu_for!(f32, Backend<Cuda>);
impl_ops_tanh_for!(f32, Backend<Cuda>);
impl_ops_softmax_for!(f32, Backend<Cuda>);
impl_ops_log_softmax_for!(f32, Backend<Cuda>);
impl_ops_lrn_for!(f32, Backend<Cuda>);
impl_ops_pooling_for!(f32, Backend<Cuda>);
impl_ops_sigmoid_pointwise_for!(f32, Backend<Cuda>);
impl_ops_relu_pointwise_for!(f32, Backend<Cuda>);
impl_ops_tanh_pointwise_for!(f32, Backend<Cuda>);
impl NN<f64> for Backend<Cuda> {
type CC = utils::ConvolutionConfig;
type CLRN = utils::NormalizationConfig;
type CPOOL = utils::PoolingConfig;
fn init_nn() { let _ = CUDNN.id_c(); }
fn device(&self) -> &DeviceType { self.device() }
}
impl_convolution_for_cuda_backend!(f64, ::cudnn::utils::DataType::Double);
impl_ops_sigmoid_for!(f64, Backend<Cuda>);
impl_ops_relu_for!(f64, Backend<Cuda>);
impl_ops_tanh_for!(f64, Backend<Cuda>);
impl_ops_softmax_for!(f64, Backend<Cuda>);
impl_ops_log_softmax_for!(f64, Backend<Cuda>);
impl_ops_lrn_for!(f64, Backend<Cuda>);
impl_ops_pooling_for!(f64, Backend<Cuda>);
impl_ops_sigmoid_pointwise_for!(f64, Backend<Cuda>);
impl_ops_relu_pointwise_for!(f64, Backend<Cuda>);
impl_ops_tanh_pointwise_for!(f64, Backend<Cuda>);<|fim▁end|> | /// Creates a TensorDescriptor similar to `cudnn_tensor_desc`,
/// but will create a fitting 3D tensor if the actual tensor would be 1D/2D.
///
/// This should be used in operations where the shape doesn't really matter |
<|file_name|>i18n_subsites.py<|end_file_name|><|fim▁begin|>"""i18n_subsites plugin creates i18n-ized subsites of the default site
This plugin is designed for Pelican 3.4 and later
"""
import os
import six
import logging
import posixpath
from copy import copy
from itertools import chain
from operator import attrgetter
from collections import OrderedDict
from contextlib import contextmanager
from six.moves.urllib.parse import urlparse
import gettext
import locale
from pelican import signals
from pelican.generators import ArticlesGenerator, PagesGenerator
from pelican.settings import configure_settings
from pelican.contents import Draft
# Global vars
_MAIN_SETTINGS = None # settings dict of the main Pelican instance<|fim▁hole|>_MAIN_STATIC_FILES = None # list of Static instances the main Pelican instance
_SUBSITE_QUEUE = {} # map: lang -> settings overrides
_SITE_DB = OrderedDict() # OrderedDict: lang -> siteurl
_SITES_RELPATH_DB = {} # map: (lang, base_lang) -> relpath
# map: generator -> list of removed contents that need interlinking
_GENERATOR_DB = {}
_NATIVE_CONTENT_URL_DB = {} # map: source_path -> content in its native lang
_LOGGER = logging.getLogger(__name__)
@contextmanager
def temporary_locale(temp_locale=None):
'''Enable code to run in a context with a temporary locale
Resets the locale back when exiting context.
Can set a temporary locale if provided
'''
orig_locale = locale.setlocale(locale.LC_ALL)
if temp_locale is not None:
locale.setlocale(locale.LC_ALL, temp_locale)
yield
locale.setlocale(locale.LC_ALL, orig_locale)
def initialize_dbs(settings):
'''Initialize internal DBs using the Pelican settings dict
This clears the DBs for e.g. autoreload mode to work
'''
global _MAIN_SETTINGS, _MAIN_SITEURL, _MAIN_LANG, _SUBSITE_QUEUE
_MAIN_SETTINGS = settings
_MAIN_LANG = settings['DEFAULT_LANG']
_MAIN_SITEURL = settings['SITEURL']
_SUBSITE_QUEUE = settings.get('I18N_SUBSITES', {}).copy()
prepare_site_db_and_overrides()
# clear databases in case of autoreload mode
_SITES_RELPATH_DB.clear()
_NATIVE_CONTENT_URL_DB.clear()
_GENERATOR_DB.clear()
def prepare_site_db_and_overrides():
'''Prepare overrides and create _SITE_DB
_SITE_DB.keys() need to be ready for filter_translations
'''
_SITE_DB.clear()
_SITE_DB[_MAIN_LANG] = _MAIN_SITEURL
# make sure it works for both root-relative and absolute
main_siteurl = '/' if _MAIN_SITEURL == '' else _MAIN_SITEURL
for lang, overrides in _SUBSITE_QUEUE.items():
if 'SITEURL' not in overrides:
overrides['SITEURL'] = posixpath.join(main_siteurl, lang)
_SITE_DB[lang] = overrides['SITEURL']
# default subsite hierarchy
if 'OUTPUT_PATH' not in overrides:
overrides['OUTPUT_PATH'] = os.path.join(
_MAIN_SETTINGS['OUTPUT_PATH'], lang)
if 'CACHE_PATH' not in overrides:
overrides['CACHE_PATH'] = os.path.join(
_MAIN_SETTINGS['CACHE_PATH'], lang)
if 'STATIC_PATHS' not in overrides:
overrides['STATIC_PATHS'] = []
if ('THEME' not in overrides and 'THEME_STATIC_DIR' not in overrides and
'THEME_STATIC_PATHS' not in overrides):
relpath = relpath_to_site(lang, _MAIN_LANG)
overrides['THEME_STATIC_DIR'] = posixpath.join(
relpath, _MAIN_SETTINGS['THEME_STATIC_DIR'])
overrides['THEME_STATIC_PATHS'] = []
# to change what is perceived as translations
overrides['DEFAULT_LANG'] = lang
def subscribe_filter_to_signals(settings):
'''Subscribe content filter to requested signals'''
for sig in settings.get('I18N_FILTER_SIGNALS', []):
sig.connect(filter_contents_translations)
def initialize_plugin(pelican_obj):
'''Initialize plugin variables and Pelican settings'''
if _MAIN_SETTINGS is None:
initialize_dbs(pelican_obj.settings)
subscribe_filter_to_signals(pelican_obj.settings)
def get_site_path(url):
'''Get the path component of an url, excludes siteurl
also normalizes '' to '/' for relpath to work,
otherwise it could be interpreted as a relative filesystem path
'''
path = urlparse(url).path
if path == '':
path = '/'
return path
def relpath_to_site(lang, target_lang):
'''Get relative path from siteurl of lang to siteurl of base_lang
the output is cached in _SITES_RELPATH_DB
'''
path = _SITES_RELPATH_DB.get((lang, target_lang), None)
if path is None:
siteurl = _SITE_DB.get(lang, _MAIN_SITEURL)
target_siteurl = _SITE_DB.get(target_lang, _MAIN_SITEURL)
path = posixpath.relpath(get_site_path(target_siteurl),
get_site_path(siteurl))
_SITES_RELPATH_DB[(lang, target_lang)] = path
return path
def save_generator(generator):
'''Save the generator for later use
initialize the removed content list
'''
_GENERATOR_DB[generator] = []
def article2draft(article):
'''Transform an Article to Draft'''
draft = Draft(article._content, article.metadata, article.settings,
article.source_path, article._context)
draft.status = 'draft'
return draft
def page2hidden_page(page):
'''Transform a Page to a hidden Page'''
page.status = 'hidden'
return page
class GeneratorInspector(object):
'''Inspector of generator instances'''
generators_info = {
ArticlesGenerator: {
'translations_lists': ['translations', 'drafts_translations'],
'contents_lists': [('articles', 'drafts')],
'hiding_func': article2draft,
'policy': 'I18N_UNTRANSLATED_ARTICLES',
},
PagesGenerator: {
'translations_lists': ['translations', 'hidden_translations'],
'contents_lists': [('pages', 'hidden_pages')],
'hiding_func': page2hidden_page,
'policy': 'I18N_UNTRANSLATED_PAGES',
},
}
def __init__(self, generator):
'''Identify the best known class of the generator instance
The class '''
self.generator = generator
self.generators_info.update(generator.settings.get(
'I18N_GENERATORS_INFO', {}))
for cls in generator.__class__.__mro__:
if cls in self.generators_info:
self.info = self.generators_info[cls]
break
else:
self.info = {}
def translations_lists(self):
'''Iterator over lists of content translations'''
return (getattr(self.generator, name) for name in
self.info.get('translations_lists', []))
def contents_list_pairs(self):
'''Iterator over pairs of normal and hidden contents'''
return (tuple(getattr(self.generator, name) for name in names)
for names in self.info.get('contents_lists', []))
def hiding_function(self):
'''Function for transforming content to a hidden version'''
hiding_func = self.info.get('hiding_func', lambda x: x)
return hiding_func
def untranslated_policy(self, default):
'''Get the policy for untranslated content'''
return self.generator.settings.get(self.info.get('policy', None),
default)
def all_contents(self):
'''Iterator over all contents'''
translations_iterator = chain(*self.translations_lists())
return chain(translations_iterator,
*(pair[i] for pair in self.contents_list_pairs()
for i in (0, 1)))
def filter_contents_translations(generator):
'''Filter the content and translations lists of a generator
Filters out
1) translations which will be generated in a different site
2) content that is not in the language of the currently
generated site but in that of a different site, content in a
language which has no site is generated always. The filtering
method bay be modified by the respective untranslated policy
'''
inspector = GeneratorInspector(generator)
current_lang = generator.settings['DEFAULT_LANG']
langs_with_sites = _SITE_DB.keys()
removed_contents = _GENERATOR_DB[generator]
for translations in inspector.translations_lists():
for translation in translations[:]: # copy to be able to remove
if translation.lang in langs_with_sites:
translations.remove(translation)
removed_contents.append(translation)
hiding_func = inspector.hiding_function()
untrans_policy = inspector.untranslated_policy(default='hide')
for (contents, other_contents) in inspector.contents_list_pairs():
for content in other_contents: # save any hidden native content first
if content.lang == current_lang: # in native lang
# save the native URL attr formatted in the current locale
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
for content in contents[:]: # copy for removing in loop
if content.lang == current_lang: # in native lang
# save the native URL attr formatted in the current locale
_NATIVE_CONTENT_URL_DB[content.source_path] = content.url
elif content.lang in langs_with_sites and untrans_policy != 'keep':
contents.remove(content)
if untrans_policy == 'hide':
other_contents.append(hiding_func(content))
elif untrans_policy == 'remove':
removed_contents.append(content)
def install_templates_translations(generator):
'''Install gettext translations in the jinja2.Environment
Only if the 'jinja2.ext.i18n' jinja2 extension is enabled
the translations for the current DEFAULT_LANG are installed.
'''
if 'jinja2.ext.i18n' in generator.settings['JINJA_EXTENSIONS']:
domain = generator.settings.get('I18N_GETTEXT_DOMAIN', 'messages')
localedir = generator.settings.get('I18N_GETTEXT_LOCALEDIR')
if localedir is None:
localedir = os.path.join(generator.theme, 'translations')
current_lang = generator.settings['DEFAULT_LANG']
if current_lang == generator.settings.get('I18N_TEMPLATES_LANG',
_MAIN_LANG):
translations = gettext.NullTranslations()
else:
langs = [current_lang]
try:
translations = gettext.translation(domain, localedir, langs)
except (IOError, OSError):
_LOGGER.error((
"Cannot find translations for language '{}' in '{}' with "
"domain '{}'. Installing NullTranslations.").format(
langs[0], localedir, domain))
translations = gettext.NullTranslations()
newstyle = generator.settings.get('I18N_GETTEXT_NEWSTYLE', True)
generator.env.install_gettext_translations(translations, newstyle)
def add_variables_to_context(generator):
'''Adds useful iterable variables to template context'''
context = generator.context # minimize attr lookup
context['relpath_to_site'] = relpath_to_site
context['main_siteurl'] = _MAIN_SITEURL
context['main_lang'] = _MAIN_LANG
context['lang_siteurls'] = _SITE_DB
current_lang = generator.settings['DEFAULT_LANG']
extra_siteurls = _SITE_DB.copy()
extra_siteurls.pop(current_lang)
context['extra_siteurls'] = extra_siteurls
def interlink_translations(content):
'''Link content to translations in their main language
so the URL (including localized month names) of the different subsites
will be honored
'''
lang = content.lang
# sort translations by lang
content.translations.sort(key=attrgetter('lang'))
for translation in content.translations:
relpath = relpath_to_site(lang, translation.lang)
url = _NATIVE_CONTENT_URL_DB[translation.source_path]
translation.override_url = posixpath.join(relpath, url)
def interlink_translated_content(generator):
'''Make translations link to the native locations
for generators that may contain translated content
'''
inspector = GeneratorInspector(generator)
for content in inspector.all_contents():
interlink_translations(content)
def interlink_removed_content(generator):
'''For all contents removed from generation queue update interlinks
link to the native location
'''
current_lang = generator.settings['DEFAULT_LANG']
for content in _GENERATOR_DB[generator]:
url = _NATIVE_CONTENT_URL_DB[content.source_path]
relpath = relpath_to_site(current_lang, content.lang)
content.override_url = posixpath.join(relpath, url)
def interlink_static_files(generator):
'''Add links to static files in the main site if necessary'''
if generator.settings['STATIC_PATHS'] != []:
return # customized STATIC_PATHS
filenames = generator.context['filenames'] # minimize attr lookup
relpath = relpath_to_site(generator.settings['DEFAULT_LANG'], _MAIN_LANG)
for staticfile in _MAIN_STATIC_FILES:
if staticfile.get_relative_source_path() not in filenames:
staticfile = copy(staticfile) # prevent override in main site
staticfile.override_url = posixpath.join(relpath, staticfile.url)
generator.add_source_path(staticfile)
def save_main_static_files(static_generator):
'''Save the static files generated for the main site'''
global _MAIN_STATIC_FILES
# test just for current lang as settings change in autoreload mode
if static_generator.settings['DEFAULT_LANG'] == _MAIN_LANG:
_MAIN_STATIC_FILES = static_generator.staticfiles
def update_generators():
'''Update the context of all generators
Ads useful variables and translations into the template context
and interlink translations
'''
for generator in _GENERATOR_DB.keys():
install_templates_translations(generator)
add_variables_to_context(generator)
interlink_static_files(generator)
interlink_removed_content(generator)
interlink_translated_content(generator)
def get_pelican_cls(settings):
'''Get the Pelican class requested in settings'''
cls = settings['PELICAN_CLASS']
if isinstance(cls, six.string_types):
module, cls_name = cls.rsplit('.', 1)
module = __import__(module)
cls = getattr(module, cls_name)
return cls
def create_next_subsite(pelican_obj):
'''Create the next subsite using the lang-specific config
If there are no more subsites in the generation queue, update all
the generators (interlink translations and removed content, add
variables and translations to template context). Otherwise get the
language and overrides for next the subsite in the queue and apply
overrides. Then generate the subsite using a PELICAN_CLASS
instance and its run method. Finally, restore the previous locale.
'''
global _MAIN_SETTINGS
if len(_SUBSITE_QUEUE) == 0:
_LOGGER.debug(
'i18n: Updating cross-site links and context of all generators.')
update_generators()
_MAIN_SETTINGS = None # to initialize next time
else:
with temporary_locale():
settings = _MAIN_SETTINGS.copy()
lang, overrides = _SUBSITE_QUEUE.popitem()
settings.update(overrides)
settings = configure_settings(settings) # to set LOCALE, etc.
cls = get_pelican_cls(settings)
new_pelican_obj = cls(settings)
_LOGGER.debug(("Generating i18n subsite for language '{}' "
"using class {}").format(lang, cls))
new_pelican_obj.run()
# map: signal name -> function name
_SIGNAL_HANDLERS_DB = {
'get_generators': initialize_plugin,
'article_generator_pretaxonomy': filter_contents_translations,
'page_generator_finalized': filter_contents_translations,
'get_writer': create_next_subsite,
'static_generator_finalized': save_main_static_files,
'generator_init': save_generator,
}
def register():
'''Register the plugin only if required signals are available'''
for sig_name in _SIGNAL_HANDLERS_DB.keys():
if not hasattr(signals, sig_name):
_LOGGER.error((
'The i18n_subsites plugin requires the {} '
'signal available for sure in Pelican 3.4.0 and later, '
'plugin will not be used.').format(sig_name))
return
for sig_name, handler in _SIGNAL_HANDLERS_DB.items():
sig = getattr(signals, sig_name)
sig.connect(handler)<|fim▁end|> | _MAIN_LANG = None # lang of the main Pelican instance
_MAIN_SITEURL = None # siteurl of the main Pelican instance |
<|file_name|>bpath.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <string>
#include <boost/filesystem.hpp>
int main() {
std::string ps = "/usr/local/dayu/scripts"; <|fim▁hole|> p /= "kk.py";
std::cout << p.string() << std::endl;
std::cout << p.stem() << std::endl;
std::cout << p.parent_path() << std::endl;
std::cout << p.filename() << std::endl;
std::cout << p.extension() << std::endl;
std::cerr << "Something error occurred." << std::endl;
return 0;
}<|fim▁end|> | boost::filesystem::path p(ps); |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>#
# This file is part of pysmi software.
#
# Copyright (c) 2015-2020, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pysmi/license.html
#
<|fim▁hole|>
class AbstractSearcher(object):
def setOptions(self, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
return self
def fileExists(self, mibname, mtime, rebuild=False):
raise NotImplementedError()<|fim▁end|> | |
<|file_name|>workerglobalscope.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::FunctionBinding::Function;
use dom::bindings::codegen::Bindings::WorkerGlobalScopeBinding::WorkerGlobalScopeMethods;
use dom::bindings::codegen::InheritTypes::DedicatedWorkerGlobalScopeCast;
use dom::bindings::error::{ErrorResult, Fallible, report_pending_exception};
use dom::bindings::error::Error::{Syntax, Network, JSFailed};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root, MutNullableHeap};
use dom::bindings::utils::Reflectable;
use dom::console::Console;
use dom::crypto::Crypto;
use dom::dedicatedworkerglobalscope::DedicatedWorkerGlobalScopeHelpers;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::workerlocation::WorkerLocation;
use dom::workernavigator::WorkerNavigator;
use dom::window::{base64_atob, base64_btoa};
use script_task::{CommonScriptMsg, ScriptChan, TimerSource, ScriptPort};
use timers::{IsInterval, TimerId, TimerManager, TimerCallback};
use devtools_traits::{ScriptToDevtoolsControlMsg, DevtoolScriptControlMsg};
use msg::constellation_msg::{ConstellationChan, PipelineId, WorkerId};
use profile_traits::mem;
use net_traits::{load_whole_resource, ResourceTask};
use util::str::DOMString;
use ipc_channel::ipc::IpcSender;
use js::jsapi::{JSContext, HandleValue, JSAutoRequest};
use js::rust::Runtime;
use url::{Url, UrlParser};
use std::default::Default;
use std::cell::Cell;
use std::rc::Rc;
use std::sync::mpsc::Receiver;
#[derive(JSTraceable, Copy, Clone, PartialEq, HeapSizeOf)]
pub enum WorkerGlobalScopeTypeId {
DedicatedGlobalScope,
}<|fim▁hole|> pub devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
pub devtools_sender: Option<IpcSender<DevtoolScriptControlMsg>>,
pub constellation_chan: ConstellationChan,
pub worker_id: WorkerId,
}
// https://html.spec.whatwg.org/multipage/#the-workerglobalscope-common-interface
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct WorkerGlobalScope {
eventtarget: EventTarget,
worker_id: WorkerId,
worker_url: Url,
#[ignore_heap_size_of = "Defined in std"]
runtime: Rc<Runtime>,
next_worker_id: Cell<WorkerId>,
#[ignore_heap_size_of = "Defined in std"]
resource_task: ResourceTask,
location: MutNullableHeap<JS<WorkerLocation>>,
navigator: MutNullableHeap<JS<WorkerNavigator>>,
console: MutNullableHeap<JS<Console>>,
crypto: MutNullableHeap<JS<Crypto>>,
timers: TimerManager,
#[ignore_heap_size_of = "Defined in std"]
mem_profiler_chan: mem::ProfilerChan,
#[ignore_heap_size_of = "Defined in ipc-channel"]
devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
/// Optional `IpcSender` for sending the `DevtoolScriptControlMsg`
/// to the server from within the worker
devtools_sender: Option<IpcSender<DevtoolScriptControlMsg>>,
#[ignore_heap_size_of = "Defined in std"]
/// This `Receiver` will be ignored later if the corresponding
/// `IpcSender` doesn't exist
devtools_receiver: Receiver<DevtoolScriptControlMsg>,
/// A flag to indicate whether the developer tools has requested live updates
/// from the worker
devtools_wants_updates: Cell<bool>,
#[ignore_heap_size_of = "Defined in std"]
constellation_chan: ConstellationChan,
}
impl WorkerGlobalScope {
pub fn new_inherited(type_id: WorkerGlobalScopeTypeId,
init: WorkerGlobalScopeInit,
worker_url: Url,
runtime: Rc<Runtime>,
devtools_receiver: Receiver<DevtoolScriptControlMsg>)
-> WorkerGlobalScope {
WorkerGlobalScope {
eventtarget: EventTarget::new_inherited(EventTargetTypeId::WorkerGlobalScope(type_id)),
next_worker_id: Cell::new(WorkerId(0)),
worker_id: init.worker_id,
worker_url: worker_url,
runtime: runtime,
resource_task: init.resource_task,
location: Default::default(),
navigator: Default::default(),
console: Default::default(),
crypto: Default::default(),
timers: TimerManager::new(),
mem_profiler_chan: init.mem_profiler_chan,
devtools_chan: init.devtools_chan,
devtools_sender: init.devtools_sender,
devtools_receiver: devtools_receiver,
devtools_wants_updates: Cell::new(false),
constellation_chan: init.constellation_chan,
}
}
pub fn mem_profiler_chan(&self) -> mem::ProfilerChan {
self.mem_profiler_chan.clone()
}
pub fn devtools_chan(&self) -> Option<IpcSender<ScriptToDevtoolsControlMsg>> {
self.devtools_chan.clone()
}
pub fn devtools_sender(&self) -> Option<IpcSender<DevtoolScriptControlMsg>> {
self.devtools_sender.clone()
}
pub fn devtools_port(&self) -> &Receiver<DevtoolScriptControlMsg> {
&self.devtools_receiver
}
pub fn constellation_chan(&self) -> ConstellationChan {
self.constellation_chan.clone()
}
pub fn get_cx(&self) -> *mut JSContext {
self.runtime.cx()
}
pub fn resource_task<'a>(&'a self) -> &'a ResourceTask {
&self.resource_task
}
pub fn get_url<'a>(&'a self) -> &'a Url {
&self.worker_url
}
pub fn get_worker_id(&self) -> WorkerId {
self.worker_id.clone()
}
pub fn get_next_worker_id(&self) -> WorkerId {
let worker_id = self.next_worker_id.get();
let WorkerId(id_num) = worker_id;
self.next_worker_id.set(WorkerId(id_num + 1));
worker_id
}
}
impl<'a> WorkerGlobalScopeMethods for &'a WorkerGlobalScope {
// https://html.spec.whatwg.org/multipage/#dom-workerglobalscope-self
fn Self_(self) -> Root<WorkerGlobalScope> {
Root::from_ref(self)
}
// https://html.spec.whatwg.org/multipage/#dom-workerglobalscope-location
fn Location(self) -> Root<WorkerLocation> {
self.location.or_init(|| {
WorkerLocation::new(self, self.worker_url.clone())
})
}
// https://html.spec.whatwg.org/multipage/#dom-workerglobalscope-importscripts
fn ImportScripts(self, url_strings: Vec<DOMString>) -> ErrorResult {
let mut urls = Vec::with_capacity(url_strings.len());
for url in url_strings {
let url = UrlParser::new().base_url(&self.worker_url)
.parse(&url);
match url {
Ok(url) => urls.push(url),
Err(_) => return Err(Syntax),
};
}
for url in urls {
let (url, source) = match load_whole_resource(&self.resource_task, url) {
Err(_) => return Err(Network),
Ok((metadata, bytes)) => {
(metadata.final_url, String::from_utf8(bytes).unwrap())
}
};
match self.runtime.evaluate_script(
self.reflector().get_jsobject(), source, url.serialize(), 1) {
Ok(_) => (),
Err(_) => {
println!("evaluate_script failed");
return Err(JSFailed);
}
}
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-worker-navigator
fn Navigator(self) -> Root<WorkerNavigator> {
self.navigator.or_init(|| WorkerNavigator::new(self))
}
// https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/console
fn Console(self) -> Root<Console> {
self.console.or_init(|| Console::new(GlobalRef::Worker(self)))
}
// https://html.spec.whatwg.org/multipage/#dfn-Crypto
fn Crypto(self) -> Root<Crypto> {
self.crypto.or_init(|| Crypto::new(GlobalRef::Worker(self)))
}
// https://html.spec.whatwg.org/multipage/#dom-windowbase64-btoa
fn Btoa(self, btoa: DOMString) -> Fallible<DOMString> {
base64_btoa(btoa)
}
// https://html.spec.whatwg.org/multipage/#dom-windowbase64-atob
fn Atob(self, atob: DOMString) -> Fallible<DOMString> {
base64_atob(atob)
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetTimeout(self, _cx: *mut JSContext, callback: Rc<Function>, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(TimerCallback::FunctionTimerCallback(callback),
args,
timeout,
IsInterval::NonInterval,
TimerSource::FromWorker,
self.script_chan())
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetTimeout_(self, _cx: *mut JSContext, callback: DOMString, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(TimerCallback::StringTimerCallback(callback),
args,
timeout,
IsInterval::NonInterval,
TimerSource::FromWorker,
self.script_chan())
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-clearinterval
fn ClearTimeout(self, handle: i32) {
self.timers.clear_timeout_or_interval(handle);
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetInterval(self, _cx: *mut JSContext, callback: Rc<Function>, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(TimerCallback::FunctionTimerCallback(callback),
args,
timeout,
IsInterval::Interval,
TimerSource::FromWorker,
self.script_chan())
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-setinterval
fn SetInterval_(self, _cx: *mut JSContext, callback: DOMString, timeout: i32, args: Vec<HandleValue>) -> i32 {
self.timers.set_timeout_or_interval(TimerCallback::StringTimerCallback(callback),
args,
timeout,
IsInterval::Interval,
TimerSource::FromWorker,
self.script_chan())
}
// https://html.spec.whatwg.org/multipage/#dom-windowtimers-clearinterval
fn ClearInterval(self, handle: i32) {
self.ClearTimeout(handle);
}
}
pub trait WorkerGlobalScopeHelpers {
fn execute_script(self, source: DOMString);
fn handle_fire_timer(self, timer_id: TimerId);
fn script_chan(self) -> Box<ScriptChan + Send>;
fn pipeline(self) -> PipelineId;
fn new_script_pair(self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>);
fn process_event(self, msg: CommonScriptMsg);
fn get_cx(self) -> *mut JSContext;
fn set_devtools_wants_updates(self, value: bool);
}
impl<'a> WorkerGlobalScopeHelpers for &'a WorkerGlobalScope {
fn execute_script(self, source: DOMString) {
match self.runtime.evaluate_script(
self.reflector().get_jsobject(), source, self.worker_url.serialize(), 1) {
Ok(_) => (),
Err(_) => {
// TODO: An error needs to be dispatched to the parent.
// https://github.com/servo/servo/issues/6422
println!("evaluate_script failed");
let _ar = JSAutoRequest::new(self.runtime.cx());
report_pending_exception(self.runtime.cx(), self.reflector().get_jsobject().get());
}
}
}
fn script_chan(self) -> Box<ScriptChan + Send> {
let dedicated =
DedicatedWorkerGlobalScopeCast::to_ref(self);
match dedicated {
Some(dedicated) => dedicated.script_chan(),
None => panic!("need to implement a sender for SharedWorker"),
}
}
fn pipeline(self) -> PipelineId {
let dedicated =
DedicatedWorkerGlobalScopeCast::to_ref(self);
match dedicated {
Some(dedicated) => dedicated.pipeline(),
None => panic!("need to add a pipeline for SharedWorker"),
}
}
fn new_script_pair(self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>) {
let dedicated =
DedicatedWorkerGlobalScopeCast::to_ref(self);
match dedicated {
Some(dedicated) => dedicated.new_script_pair(),
None => panic!("need to implement creating isolated event loops for SharedWorker"),
}
}
fn process_event(self, msg: CommonScriptMsg) {
let dedicated =
DedicatedWorkerGlobalScopeCast::to_ref(self);
match dedicated {
Some(dedicated) => dedicated.process_event(msg),
None => panic!("need to implement processing single events for SharedWorker"),
}
}
fn handle_fire_timer(self, timer_id: TimerId) {
self.timers.fire_timer(timer_id, self);
}
fn get_cx(self) -> *mut JSContext {
self.runtime.cx()
}
fn set_devtools_wants_updates(self, value: bool) {
self.devtools_wants_updates.set(value);
}
}<|fim▁end|> |
pub struct WorkerGlobalScopeInit {
pub resource_task: ResourceTask,
pub mem_profiler_chan: mem::ProfilerChan, |
<|file_name|>discovery.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs
A client library for Google's discovery based APIs.
"""
__all__ = [
'build',
'build_from_document'
'fix_method_name',
'key2param'
]
import copy
import httplib2
import logging
import os
import random
import re
import uritemplate
import urllib
import urlparse
import mimeparse
import mimetypes
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
from apiclient.errors import HttpError
from apiclient.errors import InvalidJsonError
from apiclient.errors import MediaUploadSizeError
from apiclient.errors import UnacceptableMimeTypeError
from apiclient.errors import UnknownApiNameOrVersion
from apiclient.errors import UnknownLinkType
from apiclient.http import HttpRequest
from apiclient.http import MediaFileUpload
from apiclient.http import MediaUpload
from apiclient.model import JsonModel
from apiclient.model import MediaModel
from apiclient.model import RawModel
from apiclient.schema import Schemas
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
from oauth2client.anyjson import simplejson
logger = logging.getLogger(__name__)
URITEMPLATE = re.compile('{[^}]*}')
VARNAME = re.compile('[a-zA-Z0-9_-]+')
DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
'{api}/{apiVersion}/rest')
DEFAULT_METHOD_DOC = 'A description of how to use this function'
# Parameters accepted by the stack, but not visible via discovery.
STACK_QUERY_PARAMETERS = ['trace', 'pp', 'userip', 'strict']
# Python reserved words.
RESERVED_WORDS = ['and', 'assert', 'break', 'class', 'continue', 'def', 'del',
'elif', 'else', 'except', 'exec', 'finally', 'for', 'from',
'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or',
'pass', 'print', 'raise', 'return', 'try', 'while' ]
def fix_method_name(name):
"""Fix method names to avoid reserved word conflicts.
Args:
name: string, method name.
Returns:
The name with a '_' prefixed if the name is a reserved word.
"""
if name in RESERVED_WORDS:
return name + '_'
else:
return name
def _add_query_parameter(url, name, value):
"""Adds a query parameter to a url.
Replaces the current value if it already exists in the URL.
Args:
url: string, url to add the query parameter to.
name: string, query parameter name.
value: string, query parameter value.
Returns:
Updated query parameter. Does not update the url if value is None.
"""
if value is None:
return url
else:
parsed = list(urlparse.urlparse(url))
q = dict(parse_qsl(parsed[4]))
q[name] = value
parsed[4] = urllib.urlencode(q)
return urlparse.urlunparse(parsed)
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
Args:
key: string, the method key name.
Returns:
A safe method name based on the key name.
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append('x')
for c in key:
if c.isalnum():
result.append(c)
else:
result.append('_')
return ''.join(result)
def build(serviceName,
version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with an API. The serviceName and
version are the names from the Discovery service.
Args:
serviceName: string, name of the service.
version: string, the version of the service.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
discoveryServiceUrl: string, a URI Template that points to the location of
the discovery service. It should have two parameters {api} and
{apiVersion} that when filled in produce an absolute URI to the discovery
document for that service.
developerKey: string, key obtained from
https://code.google.com/apis/console.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP
request.
<|fim▁hole|> A Resource object with methods for interacting with the service.
"""
params = {
'api': serviceName,
'apiVersion': version
}
if http is None:
http = httplib2.Http()
requested_url = uritemplate.expand(discoveryServiceUrl, params)
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if 'REMOTE_ADDR' in os.environ:
requested_url = _add_query_parameter(requested_url, 'userIp',
os.environ['REMOTE_ADDR'])
logger.info('URL being requested: %s' % requested_url)
resp, content = http.request(requested_url)
if resp.status == 404:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName,
version))
if resp.status >= 400:
raise HttpError(resp, content, requested_url)
try:
service = simplejson.loads(content)
except ValueError, e:
logger.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError()
return build_from_document(content, discoveryServiceUrl, http=http,
developerKey=developerKey, model=model, requestBuilder=requestBuilder)
def build_from_document(
service,
base,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object from a discovery
document that is it given, as opposed to retrieving one over HTTP.
Args:
service: string, discovery document.
base: string, base URI for all HTTP requests, usually the discovery URI.
future: string, discovery document with future capabilities (deprecated).
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and de-serializes requests and
responses.
requestBuilder: Takes an http request and packages it up to be executed.
Returns:
A Resource object with methods for interacting with the service.
"""
# future is no longer used.
future = {}
service = simplejson.loads(service)
base = urlparse.urljoin(base, service['basePath'])
schema = Schemas(service)
if model is None:
features = service.get('features', [])
model = JsonModel('dataWrapper' in features)
resource = _createResource(http, base, model, requestBuilder, developerKey,
service, service, schema)
return resource
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
value: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == 'string':
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
elif schema_type == 'integer':
return str(int(value))
elif schema_type == 'number':
return str(float(value))
elif schema_type == 'boolean':
return str(bool(value)).lower()
else:
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
MULTIPLIERS = {
"KB": 2 ** 10,
"MB": 2 ** 20,
"GB": 2 ** 30,
"TB": 2 ** 40,
}
def _media_size_to_long(maxSize):
"""Convert a string media size, such as 10GB or 3TB into an integer.
Args:
maxSize: string, size as a string, such as 2MB or 7GB.
Returns:
The size as an integer value.
"""
if len(maxSize) < 2:
return 0
units = maxSize[-2:].upper()
multiplier = MULTIPLIERS.get(units, 0)
if multiplier:
return int(maxSize[:-2]) * multiplier
else:
return int(maxSize)
def _createResource(http, baseUrl, model, requestBuilder,
developerKey, resourceDesc, rootDesc, schema):
"""Build a Resource from the API description.
Args:
http: httplib2.Http, Object to make http requests with.
baseUrl: string, base URL for the API. All requests are relative to this
URI.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: class or callable that instantiates an
apiclient.HttpRequest object.
developerKey: string, key obtained from
https://code.google.com/apis/console
resourceDesc: object, section of deserialized discovery document that
describes a resource. Note that the top level discovery document
is considered a resource.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
Returns:
An instance of Resource with all the methods attached for interacting with
that resource.
"""
class Resource(object):
"""A class for interacting with a resource."""
def __init__(self):
self._http = http
self._baseUrl = baseUrl
self._model = model
self._developerKey = developerKey
self._requestBuilder = requestBuilder
def createMethod(theclass, methodName, methodDesc, rootDesc):
"""Creates a method for attaching to a Resource.
Args:
theclass: type, the class to attach methods to.
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
"""
methodName = fix_method_name(methodName)
pathUrl = methodDesc['path']
httpMethod = methodDesc['httpMethod']
methodId = methodDesc['id']
mediaPathUrl = None
accept = []
maxSize = 0
if 'mediaUpload' in methodDesc:
mediaUpload = methodDesc['mediaUpload']
# TODO(user) Use URLs from discovery once it is updated.
parsed = list(urlparse.urlparse(baseUrl))
basePath = parsed[2]
mediaPathUrl = '/upload' + basePath + pathUrl
accept = mediaUpload['accept']
maxSize = _media_size_to_long(mediaUpload.get('maxSize', ''))
if 'parameters' not in methodDesc:
methodDesc['parameters'] = {}
# Add in the parameters common to all methods.
for name, desc in rootDesc.get('parameters', {}).iteritems():
methodDesc['parameters'][name] = desc
# Add in undocumented query parameters.
for name in STACK_QUERY_PARAMETERS:
methodDesc['parameters'][name] = {
'type': 'string',
'location': 'query'
}
if httpMethod in ['PUT', 'POST', 'PATCH'] and 'request' in methodDesc:
methodDesc['parameters']['body'] = {
'description': 'The request body.',
'type': 'object',
'required': True,
}
if 'request' in methodDesc:
methodDesc['parameters']['body'].update(methodDesc['request'])
else:
methodDesc['parameters']['body']['type'] = 'object'
if 'mediaUpload' in methodDesc:
methodDesc['parameters']['media_body'] = {
'description': 'The filename of the media request body.',
'type': 'string',
'required': False,
}
if 'body' in methodDesc['parameters']:
methodDesc['parameters']['body']['required'] = False
argmap = {} # Map from method parameter name to query parameter name
required_params = [] # Required parameters
repeated_params = [] # Repeated parameters
pattern_params = {} # Parameters that must match a regex
query_params = [] # Parameters that will be used in the query string
path_params = {} # Parameters that will be used in the base URL
param_type = {} # The type of the parameter
enum_params = {} # Allowable enumeration values for each parameter
if 'parameters' in methodDesc:
for arg, desc in methodDesc['parameters'].iteritems():
param = key2param(arg)
argmap[param] = arg
if desc.get('pattern', ''):
pattern_params[param] = desc['pattern']
if desc.get('enum', ''):
enum_params[param] = desc['enum']
if desc.get('required', False):
required_params.append(param)
if desc.get('repeated', False):
repeated_params.append(param)
if desc.get('location') == 'query':
query_params.append(param)
if desc.get('location') == 'path':
path_params[param] = param
param_type[param] = desc.get('type', 'string')
for match in URITEMPLATE.finditer(pathUrl):
for namematch in VARNAME.finditer(match.group(0)):
name = key2param(namematch.group(0))
path_params[name] = name
if name in query_params:
query_params.remove(name)
def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod.
for name in kwargs.iterkeys():
if name not in argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name)
# Remove args that have a value of None.
keys = kwargs.keys()
for name in keys:
if kwargs[name] is None:
del kwargs[name]
for name in required_params:
if name not in kwargs:
raise TypeError('Missing required parameter "%s"' % name)
for name, regex in pattern_params.iteritems():
if name in kwargs:
if isinstance(kwargs[name], basestring):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
for pvalue in pvalues:
if re.match(regex, pvalue) is None:
raise TypeError(
'Parameter "%s" value "%s" does not match the pattern "%s"' %
(name, pvalue, regex))
for name, enums in enum_params.iteritems():
if name in kwargs:
# We need to handle the case of a repeated enum
# name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2']
if (name in repeated_params and
not isinstance(kwargs[name], basestring)):
values = kwargs[name]
else:
values = [kwargs[name]]
for value in values:
if value not in enums:
raise TypeError(
'Parameter "%s" value "%s" is not an allowed value in "%s"' %
(name, value, str(enums)))
actual_query_params = {}
actual_path_params = {}
for key, value in kwargs.iteritems():
to_type = param_type.get(key, 'string')
# For repeated parameters we cast each member of the list.
if key in repeated_params and type(value) == type([]):
cast_value = [_cast(x, to_type) for x in value]
else:
cast_value = _cast(value, to_type)
if key in query_params:
actual_query_params[argmap[key]] = cast_value
if key in path_params:
actual_path_params[argmap[key]] = cast_value
body_value = kwargs.get('body', None)
media_filename = kwargs.get('media_body', None)
if self._developerKey:
actual_query_params['key'] = self._developerKey
model = self._model
# If there is no schema for the response then presume a binary blob.
if methodName.endswith('_media'):
model = MediaModel()
elif 'response' not in methodDesc:
model = RawModel()
headers = {}
headers, params, query, body = model.request(headers,
actual_path_params, actual_query_params, body_value)
expanded_url = uritemplate.expand(pathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
resumable = None
multipart_boundary = ''
if media_filename:
# Ensure we end up with a valid MediaUpload object.
if isinstance(media_filename, basestring):
(media_mime_type, encoding) = mimetypes.guess_type(media_filename)
if media_mime_type is None:
raise UnknownFileType(media_filename)
if not mimeparse.best_match([media_mime_type], ','.join(accept)):
raise UnacceptableMimeTypeError(media_mime_type)
media_upload = MediaFileUpload(media_filename, media_mime_type)
elif isinstance(media_filename, MediaUpload):
media_upload = media_filename
else:
raise TypeError('media_filename must be str or MediaUpload.')
# Check the maxSize
if maxSize > 0 and media_upload.size() > maxSize:
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads
expanded_url = uritemplate.expand(mediaPathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
if media_upload.resumable():
url = _add_query_parameter(url, 'uploadType', 'resumable')
if media_upload.resumable():
# This is all we need to do for resumable, if the body exists it gets
# sent in the first request, otherwise an empty body is sent.
resumable = media_upload
else:
# A non-resumable upload
if body is None:
# This is a simple media upload
headers['content-type'] = media_upload.mimetype()
body = media_upload.getbytes(0, media_upload.size())
url = _add_query_parameter(url, 'uploadType', 'media')
else:
# This is a multipart/related upload.
msgRoot = MIMEMultipart('related')
# msgRoot should not write out it's own headers
setattr(msgRoot, '_write_headers', lambda self: None)
# attach the body as one part
msg = MIMENonMultipart(*headers['content-type'].split('/'))
msg.set_payload(body)
msgRoot.attach(msg)
# attach the media as the second part
msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
payload = media_upload.getbytes(0, media_upload.size())
msg.set_payload(payload)
msgRoot.attach(msg)
body = msgRoot.as_string()
multipart_boundary = msgRoot.get_boundary()
headers['content-type'] = ('multipart/related; '
'boundary="%s"') % multipart_boundary
url = _add_query_parameter(url, 'uploadType', 'multipart')
logger.info('URL being requested: %s' % url)
return self._requestBuilder(self._http,
model.response,
url,
method=httpMethod,
body=body,
headers=headers,
methodId=methodId,
resumable=resumable)
docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
if len(argmap) > 0:
docs.append('Args:\n')
# Skip undocumented params and params common to all methods.
skip_parameters = rootDesc.get('parameters', {}).keys()
skip_parameters.append(STACK_QUERY_PARAMETERS)
for arg in argmap.iterkeys():
if arg in skip_parameters:
continue
repeated = ''
if arg in repeated_params:
repeated = ' (repeated)'
required = ''
if arg in required_params:
required = ' (required)'
paramdesc = methodDesc['parameters'][argmap[arg]]
paramdoc = paramdesc.get('description', 'A parameter')
if '$ref' in paramdesc:
docs.append(
(' %s: object, %s%s%s\n The object takes the'
' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
schema.prettyPrintByName(paramdesc['$ref'])))
else:
paramtype = paramdesc.get('type', 'string')
docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
repeated))
enum = paramdesc.get('enum', [])
enumDesc = paramdesc.get('enumDescriptions', [])
if enum and enumDesc:
docs.append(' Allowed values\n')
for (name, desc) in zip(enum, enumDesc):
docs.append(' %s - %s\n' % (name, desc))
if 'response' in methodDesc:
if methodName.endswith('_media'):
docs.append('\nReturns:\n The media object as a string.\n\n ')
else:
docs.append('\nReturns:\n An object of the form:\n\n ')
docs.append(schema.prettyPrintSchema(methodDesc['response']))
setattr(method, '__doc__', ''.join(docs))
setattr(theclass, methodName, method)
def createNextMethod(theclass, methodName, methodDesc, rootDesc):
"""Creates any _next methods for attaching to a Resource.
The _next methods allow for easy iteration through list() responses.
Args:
theclass: type, the class to attach methods to.
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
"""
methodName = fix_method_name(methodName)
methodId = methodDesc['id'] + '.next'
def methodNext(self, previous_request, previous_response):
"""Retrieves the next page of results.
Args:
previous_request: The request for the previous page.
previous_response: The response from the request for the previous page.
Returns:
A request object that you can call 'execute()' on to request the next
page. Returns None if there are no more items in the collection.
"""
# Retrieve nextPageToken from previous_response
# Use as pageToken in previous_request to create new request.
if 'nextPageToken' not in previous_response:
return None
request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken']
parsed = list(urlparse.urlparse(request.uri))
q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken))
parsed[4] = urllib.urlencode(newq)
uri = urlparse.urlunparse(parsed)
request.uri = uri
logger.info('URL being requested: %s' % uri)
return request
setattr(theclass, methodName, methodNext)
# Add basic methods to Resource
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
createMethod(Resource, methodName, methodDesc, rootDesc)
# Add in _media methods. The functionality of the attached method will
# change when it sees that the method name ends in _media.
if methodDesc.get('supportsMediaDownload', False):
createMethod(Resource, methodName + '_media', methodDesc, rootDesc)
# Add in nested resources
if 'resources' in resourceDesc:
def createResourceMethod(theclass, methodName, methodDesc, rootDesc):
"""Create a method on the Resource to access a nested Resource.
Args:
theclass: type, the class to attach methods to.
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
"""
methodName = fix_method_name(methodName)
def methodResource(self):
return _createResource(self._http, self._baseUrl, self._model,
self._requestBuilder, self._developerKey,
methodDesc, rootDesc, schema)
setattr(methodResource, '__doc__', 'A collection resource.')
setattr(methodResource, '__is_resource__', True)
setattr(theclass, methodName, methodResource)
for methodName, methodDesc in resourceDesc['resources'].iteritems():
createResourceMethod(Resource, methodName, methodDesc, rootDesc)
# Add _next() methods
# Look for response bodies in schema that contain nextPageToken, and methods
# that take a pageToken parameter.
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
if 'response' in methodDesc:
responseSchema = methodDesc['response']
if '$ref' in responseSchema:
responseSchema = schema.get(responseSchema['$ref'])
hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
{})
hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
if hasNextPageToken and hasPageToken:
createNextMethod(Resource, methodName + '_next',
resourceDesc['methods'][methodName],
methodName)
return Resource()<|fim▁end|> | Returns: |
<|file_name|>modifiedCluster.py<|end_file_name|><|fim▁begin|>import numpy as np
from sklearn import cluster, datasets, preprocessing
import pickle
import gensim
import time
import re
import tokenize
from scipy import spatial
def save_obj(obj, name ):
with open( name + '.pkl', 'wb') as f:
pickle.dump(obj, f, protocol=2)
def load_obj(name ):
with open( name + '.pkl', 'rb') as f:
return pickle.load(f)
def combine(v1,v2):
A = np.add(v1,v2)
M = np.multiply(A,A)
lent=0
for i in M:
lent+=i
return np.divide(A,lent)
# 3M word google dataset of pretrained 300D vectors
model = gensim.models.Word2Vec.load_word2vec_format('vectors.bin', binary=True)
model.init_sims(replace=True)
#### getting all vecs from w2v using the inbuilt syn0 list see code
# X_Scaled_Feature_Vecs = []
# for w in model.vocab:
# X_Scaled_Feature_Vecs.append(model.syn0[model.vocab[w].index])
# model.syn0 = X_Scaled_Feature_Vecs
# X_Scaled_Feature_Vecs = None
# X_Scaled_Feature_Vecs = model.syn0
# ### scaling feature vecs
# min_max_scaler = preprocessing.MinMaxScaler()
# X_Scaled_Feature_Vecs = min_max_scaler.fit_transform(X)
# X_Scaled_Feature_Vecs = X
# W2V = dict(zip(model.vocab, X_Scaled_Feature_Vecs))
#Cosine Distance
# from scipy import spatial
# dataSetI = model["travel"]
# dataSetII = model["travelling"]
# result = 1 - spatial.distance.cosine(dataSetI, dataSetII)
# print(result)
<|fim▁hole|># ######## Interested Categories
cat = ["advertising","beauty","business","celebrity","diy craft","entertainment","family","fashion","food","general","health","lifestyle","music","news","pop","culture","social","media","sports","technology","travel","video games"]
nums = range(0,22)
num2cat = dict(zip(nums, cat))
# new Categories Seeds (787 seeds) DICT [seed: cat]
Word2CatMap = load_obj("baseWord2CatMap")
baseWords = Word2CatMap.keys()
catVec=[]
newBaseWords =[]
# load from C file output
for bw in baseWords:
try:
catVec.append(np.array(model[bw]))
newBaseWords.append(bw)
except:
words = bw.split()
try:
vec = np.array(model[words[0]])
for word in words[1:]:
try:
vec = combine(vec,np.array(model[word]))
except:
#print(word + " Skipped!")
continue
catVec.append(vec)
newBaseWords.append(bw)
except:
#print(words)
continue
# print(len(catVec))
# print(len(newBaseWords))
#cluster Size
# newBaseWords has the list of new base words that are in word2vec vocab
k = len(catVec)
# form a num(k) to cat(22) mapping
numK2CatMap = dict()
for w in newBaseWords:
numK2CatMap[newBaseWords.index(w)] = Word2CatMap[w]
# kmeans
##### better code
t0 = time.time()
# Assign Max_Iter to 1 (ONE) if u just want to fit vectors around seeds
kmeans = cluster.KMeans(n_clusters=k, init=np.array(catVec), max_iter=1).fit(X_Scaled_Feature_Vecs)
#kmeans = cluster.KMeans(n_clusters=22, init=np.array(catVec), max_iter=900).fit(X_Scaled_Feature_Vecs)
print(str(time.time()-t0))
print(kmeans.inertia_)
###### After Fiting the Cluster Centers are recomputed : update catVec (Order Preserved)
catVec = kmeans.cluster_centers_
# #test
# for c in catVec:
# print(num2cat[kmeans.predict(c)[0]])
##### save best for future use
save_obj(kmeans,"clusterLarge")
KM = kmeans
# Cluster_lookUP = dict(zip(model.vocab, KM.labels_))
Cluster_lookUP = dict()
Cluster_KlookUP = dict()
for word in model.vocab:
kmap = KM.predict(model[word])[0]
Cluster_lookUP[word] = numK2CatMap[kmap]
Cluster_KlookUP[word] = kmap
## Precomputing the cosine similarities
Cosine_Similarity = dict()
for k in Cluster_lookUP.keys():
# if len(Cluster_lookUP[k]) == 1:
Cosine_Similarity[k] = 1 - spatial.distance.cosine(model[k], catVec[Cluster_KlookUP[k]])
# else:
# Cosine_Similarity[k] = [1 - spatial.distance.cosine(model[k], catVec[wk]) for wk in Cluster_KlookUP[k]]
#check
print(num2cat[Cluster_lookUP["flight"][0]] + " "+str(Cosine_Similarity["flight"]))
print(num2cat[Cluster_lookUP["gamecube"][0]] +" "+str(Cosine_Similarity["gamecube"]))
#Saving Models
# for 22 topics
save_obj(Cluster_lookUP,"Cluster_lookUP")
save_obj(Cosine_Similarity,"Cosine_Similarity")
save_obj(num2cat,"num2cat")
save_obj(catVec,"catVec")
save_obj(numK2CatMap,"numK2CatMap")<|fim▁end|> | X_Scaled_Feature_Vecs=[]
for word in model.vocab:
X_Scaled_Feature_Vecs.append(model[word])
|
<|file_name|>GlobalExceptionHandler.java<|end_file_name|><|fim▁begin|>package com.raymond.entrypoint;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.HttpStatus;
import org.springframework.security.core.AuthenticationException;<|fim▁hole|>import javax.servlet.http.HttpServletResponse;
/**
* Created by Raymond Kwong on 12/1/2018.
*/
@Qualifier("handlerExceptionResolver")
@RestControllerAdvice
public class GlobalExceptionHandler extends ResponseEntityExceptionHandler {
@ExceptionHandler(AuthenticationException.class)
public ErrorResponseBean handleAuthenticationException(AuthenticationException exception, HttpServletResponse response){
ErrorResponseBean errorResponseBean = new ErrorResponseBean();
errorResponseBean.setError(HttpStatus.UNAUTHORIZED.getReasonPhrase());
errorResponseBean.setMessage(exception.getMessage());
response.setStatus(HttpStatus.UNAUTHORIZED.value());
return errorResponseBean;
}
}<|fim▁end|> | import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler;
|
<|file_name|>ScriptLanguageActionExecutor.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.action.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.util.XLog;
import org.jdom.Element;
import org.jdom.Namespace;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
public abstract class ScriptLanguageActionExecutor extends JavaActionExecutor {
public ScriptLanguageActionExecutor(String type) {
super(type);
}
@Override
public List<Class<?>> getLauncherClasses() {
return null;
}
protected boolean shouldAddScriptToCache(){
return true;
}
@Override
protected Configuration setupLauncherConf(Configuration conf, Element actionXml, Path appPath, Context context)
throws ActionExecutorException {
super.setupLauncherConf(conf, actionXml, appPath, context);
if(shouldAddScriptToCache()) {
addScriptToCache(conf, actionXml, appPath, context);
}
return conf;
}
protected void addScriptToCache(Configuration conf, Element actionXml, Path appPath, Context context)
throws ActionExecutorException {
Namespace ns = actionXml.getNamespace();
String script = actionXml.getChild("script", ns).getTextTrim();
String name = new Path(script).getName();
String scriptContent = context.getProtoActionConf().get(this.getScriptName());
Path scriptFile = null;
if (scriptContent != null) { // Create script on filesystem if this is
// an http submission job;
FSDataOutputStream dos = null;
try {
Path actionPath = context.getActionDir();
scriptFile = new Path(actionPath, script);
FileSystem fs = context.getAppFileSystem();
dos = fs.create(scriptFile);
dos.write(scriptContent.getBytes(StandardCharsets.UTF_8));
addToCache(conf, actionPath, script + "#" + name, false);
}
catch (Exception ex) {
throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FAILED_OPERATION", XLog
.format("Not able to write script file {0} on hdfs", scriptFile), ex);
}
finally {
try {
if (dos != null) {
dos.close();
}
}
catch (IOException ex) {
XLog.getLog(getClass()).error("Error: " + ex.getMessage());
}
}
}
else {
addToCache(conf, appPath, script + "#" + name, false);
}
}
protected abstract String getScriptName();
}<|fim▁end|> | *
* Unless required by applicable law or agreed to in writing, software |
<|file_name|>error.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h"
#include "com4j.h"
void error( JNIEnv* env, const char* file, int line, HRESULT hr, const char* msg ... ) {
// format the message
va_list va;
va_start(va,msg);<|fim▁hole|> vsprintf(w,msg,va);
env->ExceptionClear();
env->Throw( (jthrowable)comexception_new_hr( env, env->NewStringUTF(w), hr, env->NewStringUTF(file), line ) );
}
void error( JNIEnv* env, const char* file, int line, const char* msg ... ) {
// format the message
va_list va;
va_start(va,msg);
int len = _vscprintf(msg,va);
char* w = reinterpret_cast<char*>(alloca(len+1)); // +1 for '\0'
vsprintf(w,msg,va);
env->ExceptionClear();
env->Throw( (jthrowable)comexception_new( env, env->NewStringUTF(w), env->NewStringUTF(file), line ) );
}<|fim▁end|> |
int len = _vscprintf(msg,va);
char* w = reinterpret_cast<char*>(alloca(len+1)); // +1 for '\0' |
<|file_name|>fluxx.visualizations.js<|end_file_name|><|fim▁begin|>(function($){
$.fn.extend({
renderChart: function() {
return this.each(function() {
var $chart = $(this);
if ($chart.children().length > 0)
return;
var data = $.parseJSON($chart.html());
var saveHTML = $chart.html();
$chart.html('').show().parent();
var chartID = 'chart' + $.fluxx.visualizations.counter++;
if (data) {
var $card;
if (typeof $chart.fluxxCard == 'function') {
$card = $chart.fluxxCard();
} else {
$card = $('#hand');
}
if (data.hasOwnProperty('class'))
$card.fluxxCardDetail().addClass(data['class']);
if (data.hasOwnProperty('width'))
$card.fluxxCardDetail().width(data.width);
$chart.html("").append('<div id="' + chartID + '"></div>');
$.jqplot.config.enablePlugins = true;
if (data.type == 'bar') {
if (!data.seriesDefaults)
data.seriesDefaults = {};
data.seriesDefaults.renderer = $.jqplot.BarRenderer;
}
if (data.series) {
$.each(data.series, function(i, s) {
if (s.renderer) {
s.renderer = eval(s.renderer);
}
});
}
if (data.axes && data.axes.xaxis && data.axes.xaxis.ticks.length > 0 && !$.isArray(data.axes.xaxis.ticks[0]))
data.axes.xaxis.renderer = $.jqplot.CategoryAxisRenderer;
var error = false;
try {
plot = $.jqplot(chartID, data.data, {
axesDefaults: {
tickRenderer: $.jqplot.CanvasAxisTickRenderer ,
tickOptions: {
fontSize: '10pt'
}
},
title: {show: false},
width: $chart.css('width'),
stackSeries: data.stackSeries,
// grid:{background:'#fefbf3', borderWidth:2.5},
grid:{background:'#ffffff', borderWidth:0, gridLineColor: '#ffffff', shadow: false},
seriesDefaults: data.seriesDefaults,
axes: data.axes,
series: data.series
});
} catch(e) {
// $.fluxx.log('error', e);
$chart.html('<h4>No data available</h4>').height(50).css({"text-align": "center"});
error = true;
}
if (!error) {
var legend = {};
$.each(plot.series, function(index, key) {
legend[key.label] = key;
});
var $table = $('.legend table.legend-table', $card);
if ($table.hasClass('single-row-legend')) {
$table.find('.category').each(function () {
var $cat = $(this);
var l = legend[$.trim($cat.text())];
if (l)
$cat.prepend('<span class="legend-color-swatch" style="background-color: ' + l.color + '"/>');
});
} else {
$table.find('tr').each(function() {
var $td = $('td:first', $(this));
if ($td.length) {
var l = legend[$.trim($td.text())];
if (l)
$td.prepend('<span class="legend-color-swatch" style="background-color: ' + l.color + '"/>');
}
})
.hover(function(e) {<|fim▁hole|> var $td = $('td:first', $(this));
if (legend[$.trim($td.text())])
legend[$.trim($td.text())].canvas._elem.css('opacity', '1');
});
}
}
}
});
}
});
$.extend(true, {
fluxx: {
visualizations: {
counter: 0
}
}
});
})(jQuery);<|fim▁end|> | var $td = $('td:first', $(this));
if (legend[$.trim($td.text())])
legend[$.trim($td.text())].canvas._elem.css('opacity', '.5');
}, function(e) { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Module computes indentation for block
It contains implementation of indenters, which are supported by katepart xml files
"""
import logging
logger = logging.getLogger('qutepart')
from PyQt4.QtGui import QTextCursor
def _getSmartIndenter(indenterName, qpart, indenter):
"""Get indenter by name.
Available indenters are none, normal, cstyle, haskell, lilypond, lisp, python, ruby, xml
Indenter name is not case sensitive
Raise KeyError if not found
indentText is indentation, which shall be used. i.e. '\t' for tabs, ' ' for 4 space symbols
"""
indenterName = indenterName.lower()
if indenterName in ('haskell', 'lilypond'): # not supported yet
logger.warning('Smart indentation for %s not supported yet. But you could be a hero who implemented it' % indenterName)
from qutepart.indenter.base import IndentAlgNormal as indenterClass
elif 'none' == indenterName:
from qutepart.indenter.base import IndentAlgBase as indenterClass
elif 'normal' == indenterName:
from qutepart.indenter.base import IndentAlgNormal as indenterClass
elif 'cstyle' == indenterName:
from qutepart.indenter.cstyle import IndentAlgCStyle as indenterClass
elif 'python' == indenterName:
from qutepart.indenter.python import IndentAlgPython as indenterClass
elif 'ruby' == indenterName:
from qutepart.indenter.ruby import IndentAlgRuby as indenterClass
elif 'xml' == indenterName:
from qutepart.indenter.xmlindent import IndentAlgXml as indenterClass
elif 'haskell' == indenterName:
from qutepart.indenter.haskell import IndenterHaskell as indenterClass
elif 'lilypond' == indenterName:
from qutepart.indenter.lilypond import IndenterLilypond as indenterClass
elif 'lisp' == indenterName:
from qutepart.indenter.lisp import IndentAlgLisp as indenterClass
elif 'scheme' == indenterName:
from qutepart.indenter.scheme import IndentAlgScheme as indenterClass
else:
raise KeyError("Indenter %s not found" % indenterName)
return indenterClass(qpart, indenter)
class Indenter:
"""Qutepart functionality, related to indentation
Public attributes:
width Indent width
useTabs Indent uses Tabs (instead of spaces)
"""
_DEFAULT_INDENT_WIDTH = 4
_DEFAULT_INDENT_USE_TABS = False
def __init__(self, qpart):
self._qpart = qpart
self.width = self._DEFAULT_INDENT_WIDTH
self.useTabs = self._DEFAULT_INDENT_USE_TABS
self._smartIndenter = _getSmartIndenter('normal', self._qpart, self)
def setSyntax(self, syntax):
"""Choose smart indentation algorithm according to syntax"""
self._smartIndenter = self._chooseSmartIndenter(syntax)
def text(self):
"""Get indent text as \t or string of spaces
"""
if self.useTabs:
return '\t'
else:
return ' ' * self.width
def triggerCharacters(self):
"""Trigger characters for smart indentation"""
return self._smartIndenter.TRIGGER_CHARACTERS
def autoIndentBlock(self, block, char = '\n'):
"""Indent block after Enter pressed or trigger character typed
"""
cursor = QTextCursor(block)
currentText = block.text()
spaceAtStartLen = len(currentText) - len(currentText.lstrip())
currentIndent = currentText[:spaceAtStartLen]
indent = self._smartIndenter.computeIndent(block, char)
if indent is not None and indent != currentIndent:
self._qpart.replaceText(block.position(), spaceAtStartLen, indent)
def onChangeSelectedBlocksIndent(self, increase, withSpace=False):
"""Tab or Space pressed and few blocks are selected, or Shift+Tab pressed
Insert or remove text from the beginning of blocks
"""
def blockIndentation(block):
text = block.text()
return text[:len(text) - len(text.lstrip())]
def cursorAtSpaceEnd(block):
cursor = QTextCursor(block)
cursor.setPosition(block.position() + len(blockIndentation(block)))
return cursor
def indentBlock(block):
cursor = cursorAtSpaceEnd(block)
cursor.insertText(' ' if withSpace else self.text())
def spacesCount(text):
return len(text) - len(text.rstrip(' '))
def unIndentBlock(block):
currentIndent = blockIndentation(block)
if currentIndent.endswith('\t'):
charsToRemove = 1
elif withSpace:
charsToRemove = 1 if currentIndent else 0
else:
if self.useTabs:
charsToRemove = min(spacesCount(currentIndent), self.width)
else: # spaces
if currentIndent.endswith(self.text()): # remove indent level
charsToRemove = self.width
else: # remove all spaces
charsToRemove = min(spacesCount(currentIndent), self.width)
if charsToRemove:
cursor = cursorAtSpaceEnd(block)
cursor.setPosition(cursor.position() - charsToRemove, QTextCursor.KeepAnchor)
cursor.removeSelectedText()
cursor = self._qpart.textCursor()
startBlock = self._qpart.document().findBlock(cursor.selectionStart())
endBlock = self._qpart.document().findBlock(cursor.selectionEnd())
# If end is positioned in the beginning of a block, do not indent this
# block, since no text is selected in it (beginning of line)
if endBlock.position()==cursor.selectionEnd():
endBlock=endBlock.previous()
indentFunc = indentBlock if increase else unIndentBlock
if startBlock != endBlock: # indent multiply lines
stopBlock = endBlock.next()
block = startBlock
with self._qpart:
while block != stopBlock:
indentFunc(block)
block = block.next()
newCursor = QTextCursor(startBlock)
newCursor.setPosition(endBlock.position() + len(endBlock.text()), QTextCursor.KeepAnchor)
self._qpart.setTextCursor(newCursor)
else: # indent 1 line
indentFunc(startBlock)
def onShortcutIndentAfterCursor(self):
"""Tab pressed and no selection. Insert text after cursor
"""
cursor = self._qpart.textCursor()
def insertIndent():
if self.useTabs:
cursor.insertText('\t')
else: # indent to integer count of indents from line start
charsToInsert = self.width - (len(self._qpart.textBeforeCursor()) % self.width)
cursor.insertText(' ' * charsToInsert)
if cursor.positionInBlock() == 0: # if no any indent - indent smartly
block = cursor.block()
self.autoIndentBlock(block, '')
# if no smart indentation - just insert one indent
if self._qpart.textBeforeCursor() == '':<|fim▁hole|> insertIndent()
def onShortcutUnindentWithBackspace(self):
"""Backspace pressed, unindent
"""
assert self._qpart.textBeforeCursor().endswith(self.text())
charsToRemove = len(self._qpart.textBeforeCursor()) % len(self.text())
if charsToRemove == 0:
charsToRemove = len(self.text())
cursor = self._qpart.textCursor()
cursor.setPosition(cursor.position() - charsToRemove, QTextCursor.KeepAnchor)
cursor.removeSelectedText()
def onAutoIndentTriggered(self):
"""Indent current line or selected lines
"""
cursor = self._qpart.textCursor()
startBlock = self._qpart.document().findBlock(cursor.selectionStart())
endBlock = self._qpart.document().findBlock(cursor.selectionEnd())
if startBlock != endBlock: # indent multiply lines
stopBlock = endBlock.next()
block = startBlock
with self._qpart:
while block != stopBlock:
self.autoIndentBlock(block, '')
block = block.next()
else: # indent 1 line
self.autoIndentBlock(startBlock, '')
def _chooseSmartIndenter(self, syntax):
"""Get indenter for syntax
"""
if syntax.indenter is not None:
try:
return _getSmartIndenter(syntax.indenter, self._qpart, self)
except KeyError:
logger.error("Indenter '%s' is not finished yet. But you can do it!" % syntax.indenter)
try:
return _getSmartIndenter(syntax.name, self._qpart, self)
except KeyError:
pass
return _getSmartIndenter('normal', self._qpart, self)<|fim▁end|> | insertIndent()
else: |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from app.core.helper import create_app
from app.core.db import db
from app.core.json import json_respon
from app.user.views import user_views
from app.user.models import*
from app.user.loginmanager import login_manager
from app.hotel.views import hotel_views
from app.hotel.models import*
from app.reservation.views import reservation_views
from app.reservation.models import*
config = 'app.config'
app = create_app(config)
db.init_app(app)
login_manager.init_app(app)
# register blueprint
app.register_blueprint(user_views)
app.register_blueprint(hotel_views)
app.register_blueprint(reservation_views)
@app.errorhandler(401)<|fim▁hole|>def say_401(error):
return json_respon(code=401, msg="You must login to access this url")
@app.errorhandler(404)
def say_404(error):
return json_respon(code=404, msg=error.description)
@app.errorhandler(405)
def say_405(error):
return json_respon(code=405, msg=error.description)
@app.errorhandler(500)
def say_500(error):
return json_respon(code=500, msg=error.description)<|fim▁end|> | |
<|file_name|>AllPlot.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2006 Sean C. Rhea ([email protected])
* Copyright (c) 2010 Mark Liversedge ([email protected])
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc., 51
* Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "AllPlot.h"
#include "Context.h"
#include "Athlete.h"
#include "AllPlotWindow.h"
#include "AllPlotSlopeCurve.h"
#include "ReferenceLineDialog.h"
#include "RideFile.h"
#include "RideItem.h"
#include "IntervalItem.h"
#include "IntervalTreeView.h"
#include "Settings.h"
#include "Units.h"
#include "Zones.h"
#include "Colors.h"
#include "WPrime.h"
#include "IndendPlotMarker.h"
#include <qwt_plot_curve.h>
#include <qwt_plot_canvas.h>
#include <qwt_plot_intervalcurve.h>
#include <qwt_plot_grid.h>
#include <qwt_plot_layout.h>
#include <qwt_plot_marker.h>
#include <qwt_scale_div.h>
#include <qwt_scale_widget.h>
#include <qwt_compat.h>
#include <qwt_text.h>
#include <qwt_legend.h>
#include <qwt_series_data.h>
#include <QMultiMap>
#include <string.h> // for memcpy
class IntervalPlotData : public QwtSeriesData<QPointF>
{
public:
IntervalPlotData(AllPlot *allPlot, Context *context, AllPlotWindow *window) :
allPlot(allPlot), context(context), window(window) {}
double x(size_t i) const ;
double y(size_t i) const ;
size_t size() const ;
//virtual QwtData *copy() const ;
void init() ;
IntervalItem *intervalNum(int n) const;
int intervalCount() const;
AllPlot *allPlot;
Context *context;
AllPlotWindow *window;
virtual QPointF sample(size_t i) const;
virtual QRectF boundingRect() const;
};
// define a background class to handle shading of power zones
// draws power zone bands IF zones are defined and the option
// to draw bonds has been selected
class AllPlotBackground: public QwtPlotItem
{
private:
AllPlot *parent;
public:
AllPlotBackground(AllPlot *_parent)
{
setZ(-100.0);
parent = _parent;
}
virtual int rtti() const
{
return QwtPlotItem::Rtti_PlotUserItem;
}
virtual void draw(QPainter *painter,
const QwtScaleMap &, const QwtScaleMap &yMap,
const QRectF &rect) const
{
RideItem *rideItem = parent->rideItem;
// get zone data from ride or athlete ...
const Zones *zones;
int zone_range = -1;
if (parent->context->isCompareIntervals) {
zones = parent->context->athlete->zones();
if (!zones) return;
// use first compare interval date
if (parent->context->compareIntervals.count())
zone_range = zones->whichRange(parent->context->compareIntervals[0].data->startTime().date());
// still not set
if (zone_range == -1)
zone_range = zones->whichRange(QDate::currentDate());
} else if (rideItem && parent->context->athlete->zones()) {
zones = parent->context->athlete->zones();
zone_range = parent->context->athlete->zones()->whichRange(rideItem->dateTime.date());
} else {
return; // nulls
}
if (parent->shadeZones() && (zone_range >= 0)) {
QList <int> zone_lows = zones->getZoneLows(zone_range);
int num_zones = zone_lows.size();
if (num_zones > 0) {
for (int z = 0; z < num_zones; z ++) {
QRect r = rect.toRect();
QColor shading_color = zoneColor(z, num_zones);
shading_color.setHsv(
shading_color.hue(),
shading_color.saturation() / 4,
shading_color.value()
);
r.setBottom(yMap.transform(zone_lows[z]));
if (z + 1 < num_zones)
r.setTop(yMap.transform(zone_lows[z + 1]));
if (r.top() <= r.bottom())
painter->fillRect(r, shading_color);
}
}
} else {
}
}
};
// Zone labels are drawn if power zone bands are enabled, automatically
// at the center of the plot
class AllPlotZoneLabel: public QwtPlotItem
{
private:
AllPlot *parent;
int zone_number;
double watts;
QwtText text;
public:
AllPlotZoneLabel(AllPlot *_parent, int _zone_number)
{
parent = _parent;
zone_number = _zone_number;
RideItem *rideItem = parent->rideItem;
// get zone data from ride or athlete ...
const Zones *zones;
int zone_range = -1;
if (parent->context->isCompareIntervals) {
zones = parent->context->athlete->zones();
if (!zones) return;
// use first compare interval date
if (parent->context->compareIntervals.count())
zone_range = zones->whichRange(parent->context->compareIntervals[0].data->startTime().date());
// still not set
if (zone_range == -1)
zone_range = zones->whichRange(QDate::currentDate());
} else if (rideItem && parent->context->athlete->zones()) {
zones = parent->context->athlete->zones();
zone_range = parent->context->athlete->zones()->whichRange(rideItem->dateTime.date());
} else {
return; // nulls
}
// create new zone labels if we're shading
if (parent->shadeZones() && (zone_range >= 0)) {
QList <int> zone_lows = zones->getZoneLows(zone_range);
QList <QString> zone_names = zones->getZoneNames(zone_range);
int num_zones = zone_lows.size();
if (zone_names.size() != num_zones) return;
if (zone_number < num_zones) {
watts =
(
(zone_number + 1 < num_zones) ?
0.5 * (zone_lows[zone_number] + zone_lows[zone_number + 1]) :
(
(zone_number > 0) ?
(1.5 * zone_lows[zone_number] - 0.5 * zone_lows[zone_number - 1]) :
2.0 * zone_lows[zone_number]
)
);
text = QwtText(zone_names[zone_number]);
if (_parent->referencePlot == NULL) {
text.setFont(QFont("Helvetica",24, QFont::Bold));
} else {
text.setFont(QFont("Helvetica",12, QFont::Bold));
}
QColor text_color = zoneColor(zone_number, num_zones);
text_color.setAlpha(64);
text.setColor(text_color);
}
}
setZ(-99.00 + zone_number / 100.0);
}
virtual int rtti() const
{
return QwtPlotItem::Rtti_PlotUserItem;
}
void draw(QPainter *painter,
const QwtScaleMap &, const QwtScaleMap &yMap,
const QRectF &rect) const
{
if (parent->shadeZones()) {
int x = (rect.left() + rect.right()) / 2;
int y = yMap.transform(watts);
// the following code based on source for QwtPlotMarker::draw()
QRect tr(QPoint(0, 0), text.textSize(painter->font()).toSize());
tr.moveCenter(QPoint(x, y));
text.draw(painter, tr);
}
}
};
class TimeScaleDraw: public ScaleScaleDraw
{
public:
TimeScaleDraw(bool *bydist) : ScaleScaleDraw(), bydist(bydist) {}
virtual QwtText label(double v) const
{
if (*bydist) {
return QString("%1").arg(v);
} else {
QTime t = QTime(0,0,0,0).addSecs(v*60.00);
if (scaleMap().sDist() > 5)
return t.toString("hh:mm");
return t.toString("hh:mm:ss");
}
}
private:
bool *bydist;
};
static inline double
max(double a, double b) { if (a > b) return a; else return b; }
AllPlotObject::AllPlotObject(AllPlot *plot) : plot(plot)
{
maxKM = maxSECS = 0;
wattsCurve = new QwtPlotCurve(tr("Power"));
wattsCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
wattsCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
antissCurve = new QwtPlotCurve(tr("anTISS"));
antissCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
antissCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 3));
atissCurve = new QwtPlotCurve(tr("aTISS"));
atissCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
atissCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 3));
npCurve = new QwtPlotCurve(tr("NP"));
npCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
npCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
rvCurve = new QwtPlotCurve(tr("Vertical Oscillation"));
rvCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
rvCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
rcadCurve = new QwtPlotCurve(tr("Run Cadence"));
rcadCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
rcadCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
rgctCurve = new QwtPlotCurve(tr("GCT"));
rgctCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
rgctCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
gearCurve = new QwtPlotCurve(tr("Gear Ratio"));
gearCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
gearCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
gearCurve->setStyle(QwtPlotCurve::Steps);
gearCurve->setCurveAttribute(QwtPlotCurve::Inverted);
smo2Curve = new QwtPlotCurve(tr("SmO2"));
smo2Curve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
smo2Curve->setYAxis(QwtAxisId(QwtAxis::yLeft, 1));
thbCurve = new QwtPlotCurve(tr("tHb"));
thbCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thbCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
o2hbCurve = new QwtPlotCurve(tr("O2Hb"));
o2hbCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
o2hbCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
hhbCurve = new QwtPlotCurve(tr("HHb"));
hhbCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
hhbCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
xpCurve = new QwtPlotCurve(tr("xPower"));
xpCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
xpCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
apCurve = new QwtPlotCurve(tr("aPower"));
apCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
apCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 0));
hrCurve = new QwtPlotCurve(tr("Heart Rate"));
hrCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
hrCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 1));
tcoreCurve = new QwtPlotCurve(tr("Core Temp"));
tcoreCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
tcoreCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 1));
accelCurve = new QwtPlotCurve(tr("Acceleration"));
accelCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
accelCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
wattsDCurve = new QwtPlotCurve(tr("Power Delta"));
wattsDCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
wattsDCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
cadDCurve = new QwtPlotCurve(tr("Cadence Delta"));
cadDCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
cadDCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
nmDCurve = new QwtPlotCurve(tr("Torque Delta"));
nmDCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
nmDCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
hrDCurve = new QwtPlotCurve(tr("Heartrate Delta"));
hrDCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
hrDCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
speedCurve = new QwtPlotCurve(tr("Speed"));
speedCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
speedCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
cadCurve = new QwtPlotCurve(tr("Cadence"));
cadCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
cadCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 1));
altCurve = new QwtPlotCurve(tr("Altitude"));
altCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
// standard->altCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
altCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 1));
altCurve->setZ(-10); // always at the back.
altSlopeCurve = new AllPlotSlopeCurve(tr("Alt/Slope"));
altSlopeCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
altSlopeCurve->setStyle(AllPlotSlopeCurve::SlopeDist1);
altSlopeCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 1));
altSlopeCurve->setZ(-5); // always at the back.
slopeCurve = new QwtPlotCurve(tr("Slope"));
slopeCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
slopeCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
tempCurve = new QwtPlotCurve(tr("Temperature"));
tempCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
if (plot->context->athlete->useMetricUnits)
tempCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
else
tempCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 1)); // with cadence
windCurve = new QwtPlotIntervalCurve(tr("Wind"));
windCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
torqueCurve = new QwtPlotCurve(tr("Torque"));
torqueCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
torqueCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 0));
balanceLCurve = new QwtPlotCurve(tr("Left Balance"));
balanceLCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
balanceLCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
balanceRCurve = new QwtPlotCurve(tr("Right Balance"));
balanceRCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
balanceRCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
lteCurve = new QwtPlotCurve(tr("Left Torque Efficiency"));
lteCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
lteCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
rteCurve = new QwtPlotCurve(tr("Right Torque Efficiency"));
rteCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
rteCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
lpsCurve = new QwtPlotCurve(tr("Left Pedal Smoothness"));
lpsCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
lpsCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
rpsCurve = new QwtPlotCurve(tr("Right Pedal Smoothness"));
rpsCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
rpsCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
lpcoCurve = new QwtPlotCurve(tr("Left Pedal Center Offset"));
lpcoCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
lpcoCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
rpcoCurve = new QwtPlotCurve(tr("Right Pedal Center Offset"));
rpcoCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
rpcoCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
lppCurve = new QwtPlotIntervalCurve(tr("Left Pedal Power Phase"));
lppCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
rppCurve = new QwtPlotIntervalCurve(tr("Right Pedal Power Phase"));
rppCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
lpppCurve = new QwtPlotIntervalCurve(tr("Left Peak Pedal Power Phase"));
lpppCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
rpppCurve = new QwtPlotIntervalCurve(tr("Right Peak Pedal Power Phase"));
rpppCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 3));
wCurve = new QwtPlotCurve(tr("W' Balance (kJ)"));
wCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
wCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 2));
mCurve = new QwtPlotCurve(tr("Matches"));
mCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
mCurve->setStyle(QwtPlotCurve::Dots);
mCurve->setYAxis(QwtAxisId(QwtAxis::yRight, 2));
curveTitle.attach(plot);
curveTitle.setLabelAlignment(Qt::AlignRight);
intervalHighlighterCurve = new QwtPlotCurve();
intervalHighlighterCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 2));
intervalHighlighterCurve->setBaseline(-20); // go below axis
intervalHighlighterCurve->setZ(-20); // behind alt but infront of zones
intervalHighlighterCurve->attach(plot);
intervalHoverCurve = new QwtPlotCurve();
intervalHoverCurve->setYAxis(QwtAxisId(QwtAxis::yLeft, 2));
intervalHoverCurve->setBaseline(-20); // go below axis
intervalHoverCurve->setZ(-20); // behind alt but infront of zones
intervalHoverCurve->attach(plot);
// setup that standard->grid
grid = new QwtPlotGrid();
grid->enableX(false); // not needed
grid->enableY(true);
grid->attach(plot);
}
// we tend to only do this for the compare objects
void
AllPlotObject::setColor(QColor color)
{
QList<QwtPlotCurve*> worklist;
worklist << mCurve << wCurve << wattsCurve << atissCurve << antissCurve << npCurve << xpCurve << speedCurve << accelCurve
<< wattsDCurve << cadDCurve << nmDCurve << hrDCurve
<< apCurve << cadCurve << tempCurve << hrCurve << tcoreCurve << torqueCurve << balanceLCurve
<< balanceRCurve << lteCurve << rteCurve << lpsCurve << rpsCurve
<< lpcoCurve << rpcoCurve
<< altCurve << slopeCurve << altSlopeCurve
<< rvCurve << rcadCurve << rgctCurve << gearCurve
<< smo2Curve << thbCurve << o2hbCurve << hhbCurve;
// work through getting progressively lighter
QPen pen;
pen.setWidth(1.0);
int alpha = 200;
bool antialias = appsettings->value(this, GC_ANTIALIAS, true).toBool();
foreach(QwtPlotCurve *c, worklist) {
pen.setColor(color);
color.setAlpha(alpha);
c->setPen(pen);
if (antialias) c->setRenderHint(QwtPlotItem::RenderAntialiased);
// lighten up for the next guy
color = color.darker(110);
if (alpha > 10) alpha -= 10;
}
// has to be different...
windCurve->setPen(pen);
if (antialias)windCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
lppCurve->setPen(pen);
if (antialias)lppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
rppCurve->setPen(pen);
if (antialias)rppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
lpppCurve->setPen(pen);
if (antialias)lpppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
rpppCurve->setPen(pen);
if (antialias)rpppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
// and alt needs a feint brush
altCurve->setBrush(QBrush(altCurve->pen().color().lighter(150)));
}
// wipe those curves
AllPlotObject::~AllPlotObject()
{
grid->detach(); delete grid;
mCurve->detach(); delete mCurve;
wCurve->detach(); delete wCurve;
wattsCurve->detach(); delete wattsCurve;
atissCurve->detach(); delete atissCurve;
antissCurve->detach(); delete antissCurve;
npCurve->detach(); delete npCurve;
rcadCurve->detach(); delete rcadCurve;
rvCurve->detach(); delete rvCurve;
rgctCurve->detach(); delete rgctCurve;
gearCurve->detach(); delete gearCurve;
smo2Curve->detach(); delete smo2Curve;
thbCurve->detach(); delete thbCurve;
o2hbCurve->detach(); delete o2hbCurve;
hhbCurve->detach(); delete hhbCurve;
xpCurve->detach(); delete xpCurve;
apCurve->detach(); delete apCurve;
hrCurve->detach(); delete hrCurve;
tcoreCurve->detach(); delete tcoreCurve;
speedCurve->detach(); delete speedCurve;
accelCurve->detach(); delete accelCurve;
wattsDCurve->detach(); delete wattsDCurve;
cadDCurve->detach(); delete cadDCurve;
nmDCurve->detach(); delete nmDCurve;
hrDCurve->detach(); delete hrDCurve;
cadCurve->detach(); delete cadCurve;
altCurve->detach(); delete altCurve;
slopeCurve->detach(); delete slopeCurve;
altSlopeCurve->detach(); delete altSlopeCurve;
tempCurve->detach(); delete tempCurve;
windCurve->detach(); delete windCurve;
torqueCurve->detach(); delete torqueCurve;
balanceLCurve->detach(); delete balanceLCurve;
balanceRCurve->detach(); delete balanceRCurve;
lteCurve->detach(); delete lteCurve;
rteCurve->detach(); delete rteCurve;
lpsCurve->detach(); delete lpsCurve;
rpsCurve->detach(); delete rpsCurve;
lpcoCurve->detach(); delete lpcoCurve;
rpcoCurve->detach(); delete rpcoCurve;
lppCurve->detach(); delete lppCurve;
rppCurve->detach(); delete rppCurve;
lpppCurve->detach(); delete lpppCurve;
rpppCurve->detach(); delete rpppCurve;
}
void
AllPlotObject::setVisible(bool show)
{
if (show == false) {
grid->detach();
mCurve->detach();
wCurve->detach();
wattsCurve->detach();
npCurve->detach();
rcadCurve->detach();
rvCurve->detach();
rgctCurve->detach();
gearCurve->detach();
smo2Curve->detach();
thbCurve->detach();
o2hbCurve->detach();
hhbCurve->detach();
atissCurve->detach();
antissCurve->detach();
xpCurve->detach();
apCurve->detach();
hrCurve->detach();
tcoreCurve->detach();
speedCurve->detach();
accelCurve->detach();
wattsDCurve->detach();
cadDCurve->detach();
nmDCurve->detach();
hrDCurve->detach();
cadCurve->detach();
altCurve->detach();
slopeCurve->detach();
altSlopeCurve->detach();
tempCurve->detach();
windCurve->detach();
torqueCurve->detach();
lteCurve->detach();
rteCurve->detach();
lpsCurve->detach();
rpsCurve->detach();
lpcoCurve->detach();
rpcoCurve->detach();
lppCurve->detach();
rppCurve->detach();
lpppCurve->detach();
rpppCurve->detach();
balanceLCurve->detach();
balanceRCurve->detach();
intervalHighlighterCurve->detach();
intervalHoverCurve->detach();
// marks, calibrations and reference lines
foreach(QwtPlotMarker *mrk, d_mrk) {
mrk->detach();
}
foreach(QwtPlotCurve *referenceLine, referenceLines) {
referenceLine->detach();
}
} else {
altCurve->attach(plot); // always do first as it hasa brush
grid->attach(plot);
mCurve->attach(plot);
wCurve->attach(plot);
wattsCurve->attach(plot);
slopeCurve->attach(plot);
altSlopeCurve->attach(plot);
npCurve->attach(plot);
rvCurve->attach(plot);
rcadCurve->attach(plot);
rgctCurve->attach(plot);
gearCurve->attach(plot);
smo2Curve->attach(plot);
thbCurve->attach(plot);
o2hbCurve->attach(plot);
hhbCurve->attach(plot);
atissCurve->attach(plot);
antissCurve->attach(plot);
xpCurve->attach(plot);
apCurve->attach(plot);
hrCurve->attach(plot);
tcoreCurve->attach(plot);
speedCurve->attach(plot);
accelCurve->attach(plot);
wattsDCurve->attach(plot);
cadDCurve->attach(plot);
nmDCurve->attach(plot);
hrDCurve->attach(plot);
cadCurve->attach(plot);
tempCurve->attach(plot);
windCurve->attach(plot);
torqueCurve->attach(plot);
lteCurve->attach(plot);
rteCurve->attach(plot);
lpsCurve->attach(plot);
rpsCurve->attach(plot);
lpcoCurve->attach(plot);
rpcoCurve->attach(plot);
lppCurve->attach(plot);
rppCurve->attach(plot);
lpppCurve->attach(plot);
rpppCurve->attach(plot);
balanceLCurve->attach(plot);
balanceRCurve->attach(plot);
intervalHighlighterCurve->attach(plot);
intervalHoverCurve->attach(plot);
// marks, calibrations and reference lines
foreach(QwtPlotMarker *mrk, d_mrk) {
mrk->attach(plot);
}
foreach(QwtPlotCurve *referenceLine, referenceLines) {
referenceLine->attach(plot);
}
}
}
void
AllPlotObject::hideUnwanted()
{
if (plot->showPowerState>1) wattsCurve->detach();
if (!plot->showNP) npCurve->detach();
if (!plot->showRV) rvCurve->detach();
if (!plot->showRCad) rcadCurve->detach();
if (!plot->showRGCT) rgctCurve->detach();
if (!plot->showGear) gearCurve->detach();
if (!plot->showSmO2) smo2Curve->detach();
if (!plot->showtHb) thbCurve->detach();
if (!plot->showO2Hb) o2hbCurve->detach();
if (!plot->showHHb) hhbCurve->detach();
if (!plot->showATISS) atissCurve->detach();
if (!plot->showANTISS) antissCurve->detach();
if (!plot->showXP) xpCurve->detach();
if (!plot->showAP) apCurve->detach();
if (!plot->showW) wCurve->detach();
if (!plot->showW) mCurve->detach();
if (!plot->showHr) hrCurve->detach();
if (!plot->showTcore) tcoreCurve->detach();
if (!plot->showSpeed) speedCurve->detach();
if (!plot->showAccel) accelCurve->detach();
if (!plot->showPowerD) wattsDCurve->detach();
if (!plot->showCadD) cadDCurve->detach();
if (!plot->showTorqueD) nmDCurve->detach();
if (!plot->showHrD) hrDCurve->detach();
if (!plot->showCad) cadCurve->detach();
if (!plot->showAlt) altCurve->detach();
if (!plot->showSlope) slopeCurve->detach();
if (plot->showAltSlopeState == 0) altSlopeCurve->detach();
if (!plot->showTemp) tempCurve->detach();
if (!plot->showWind) windCurve->detach();
if (!plot->showTorque) torqueCurve->detach();
if (!plot->showTE) {
lteCurve->detach();
rteCurve->detach();
}
if (!plot->showPS) {
lpsCurve->detach();
rpsCurve->detach();
}
if (!plot->showPCO) {
lpcoCurve->detach();
rpcoCurve->detach();
}
if (!plot->showDC) {
lppCurve->detach();
rppCurve->detach();
}
if (!plot->showPPP) {
lpppCurve->detach();
rpppCurve->detach();
}
if (!plot->showBalance) {
balanceLCurve->detach();
balanceRCurve->detach();
}
}
AllPlot::AllPlot(QWidget *parent, AllPlotWindow *window, Context *context, RideFile::SeriesType scope, RideFile::SeriesType secScope, bool wanttext):
QwtPlot(parent),
rideItem(NULL),
shade_zones(true),
showPowerState(3),
showAltSlopeState(0),
showATISS(false),
showANTISS(false),
showNP(false),
showXP(false),
showAP(false),
showHr(true),
showTcore(true),
showSpeed(true),
showAccel(false),
showPowerD(false),
showCadD(false),
showTorqueD(false),
showHrD(false),
showCad(true),
showAlt(true),
showSlope(false),
showTemp(true),
showWind(true),
showTorque(true),
showBalance(true),
showRV(true),
showRGCT(true),
showRCad(true),
showSmO2(true),
showtHb(true),
showO2Hb(true),
showHHb(true),
showGear(true),
bydist(false),
scope(scope),
secondaryScope(secScope),
context(context),
parent(parent),
window(window),
wanttext(wanttext),
isolation(false)
{
if (appsettings->value(this, GC_SHADEZONES, true).toBool()==false)
shade_zones = false;
smooth = 1;
wantxaxis = wantaxis = true;
setAutoDelete(false); // no - we are managing it via the AllPlotObjects now
referencePlot = NULL;
tooltip = NULL;
_canvasPicker = NULL;
// curve color object
curveColors = new CurveColors(this, true);
// create a background object for shading
bg = new AllPlotBackground(this);
bg->attach(this);
//insertLegend(new QwtLegend(), QwtPlot::BottomLegend);
setCanvasBackground(GColor(CRIDEPLOTBACKGROUND));
static_cast<QwtPlotCanvas*>(canvas())->setFrameStyle(QFrame::NoFrame);
// set the axes that we use.. yLeft 3 is ALWAYS the highlighter axes and never visible
// yLeft 4 is balance stuff
setAxesCount(QwtAxis::yLeft, 4);
setAxesCount(QwtAxis::yRight, 4);
setAxesCount(QwtAxis::xBottom, 1);
setXTitle();
standard = new AllPlotObject(this);
standard->intervalHighlighterCurve->setSamples(new IntervalPlotData(this, context, window));
setAxisMaxMinor(xBottom, 0);
enableAxis(xBottom, true);
setAxisVisible(xBottom, true);
// highlighter
ScaleScaleDraw *sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 2);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
setAxisScaleDraw(QwtAxisId(QwtAxis::yLeft, 2), sd);
QPalette pal = palette();
pal.setBrush(QPalette::Background, QBrush(GColor(CRIDEPLOTBACKGROUND)));
pal.setColor(QPalette::WindowText, QColor(Qt::gray));
pal.setColor(QPalette::Text, QColor(Qt::gray));
axisWidget(QwtAxisId(QwtAxis::yLeft, 2))->setPalette(pal);
setAxisScale(QwtAxisId(QwtAxis::yLeft, 2), 0, 100);
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 2), false); // hide interval axis
setAxisMaxMinor(yLeft, 0);
setAxisMaxMinor(QwtAxisId(QwtAxis::yLeft, 1), 0);
setAxisMaxMinor(QwtAxisId(QwtAxis::yLeft, 3), 0);
setAxisMaxMinor(yRight, 0);
setAxisMaxMinor(QwtAxisId(QwtAxis::yRight, 1), 0);
setAxisMaxMinor(QwtAxisId(QwtAxis::yRight, 2), 0);
setAxisMaxMinor(QwtAxisId(QwtAxis::yRight, 3), 0);
axisWidget(QwtPlot::yLeft)->installEventFilter(this);
axisWidget(QwtPlot::yRight)->installEventFilter(this);
axisWidget(QwtAxisId(QwtAxis::yLeft, 1))->installEventFilter(this);
axisWidget(QwtAxisId(QwtAxis::yLeft, 3))->installEventFilter(this);
axisWidget(QwtAxisId(QwtAxis::yRight, 1))->installEventFilter(this);
axisWidget(QwtAxisId(QwtAxis::yRight, 2))->installEventFilter(this);
axisWidget(QwtAxisId(QwtAxis::yRight, 3))->installEventFilter(this);
configChanged(CONFIG_APPEARANCE); // set colors
}
AllPlot::~AllPlot()
{
// wipe compare curves if there are any
foreach(QwtPlotCurve *compare, compares) {
compare->detach(); delete compare;
}
compares.clear();
// wipe the standard stuff
delete standard;
if (tooltip) delete tooltip;
if (_canvasPicker) delete _canvasPicker;
}
void
AllPlot::configChanged(qint32)
{
double width = appsettings->value(this, GC_LINEWIDTH, 0.5).toDouble();
labelFont.fromString(appsettings->value(this, GC_FONT_CHARTLABELS, QFont().toString()).toString());
labelFont.setPointSize(appsettings->value(NULL, GC_FONT_CHARTLABELS_SIZE, 8).toInt());
if (appsettings->value(this, GC_ANTIALIAS, true).toBool() == true) {
standard->wattsCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->atissCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->antissCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->npCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rvCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rcadCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rgctCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->gearCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->smo2Curve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->thbCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->o2hbCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->hhbCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->xpCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->apCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->wCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->mCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->hrCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->tcoreCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->speedCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->accelCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->wattsDCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->cadDCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->nmDCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->hrDCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->cadCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->altCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->slopeCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->altSlopeCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->tempCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->windCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->torqueCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->lteCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rteCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->lpsCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rpsCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->lpcoCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rpcoCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->lppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->lpppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->rpppCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->balanceLCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->balanceRCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->intervalHighlighterCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
standard->intervalHoverCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
}
setAltSlopePlotStyle(standard->altSlopeCurve);
setCanvasBackground(GColor(CRIDEPLOTBACKGROUND));
QPen wattsPen = QPen(GColor(CPOWER));
wattsPen.setWidth(width);
standard->wattsCurve->setPen(wattsPen);
standard->wattsDCurve->setPen(wattsPen);
QPen npPen = QPen(GColor(CNPOWER));
npPen.setWidth(width);
standard->npCurve->setPen(npPen);
QPen rvPen = QPen(GColor(CRV));
rvPen.setWidth(width);
standard->rvCurve->setPen(rvPen);
QPen rcadPen = QPen(GColor(CRCAD));
rcadPen.setWidth(width);
standard->rcadCurve->setPen(rcadPen);
QPen rgctPen = QPen(GColor(CRGCT));
rgctPen.setWidth(width);
standard->rgctCurve->setPen(rgctPen);
QPen gearPen = QPen(GColor(CGEAR));
gearPen.setWidth(width);
standard->gearCurve->setPen(gearPen);
QPen smo2Pen = QPen(GColor(CSMO2));
smo2Pen.setWidth(width);
standard->smo2Curve->setPen(smo2Pen);
QPen thbPen = QPen(GColor(CTHB));
thbPen.setWidth(width);
standard->thbCurve->setPen(thbPen);
QPen o2hbPen = QPen(GColor(CO2HB));
o2hbPen.setWidth(width);
standard->o2hbCurve->setPen(o2hbPen);
QPen hhbPen = QPen(GColor(CHHB));
hhbPen.setWidth(width);
standard->hhbCurve->setPen(hhbPen);
QPen antissPen = QPen(GColor(CANTISS));
antissPen.setWidth(width);
standard->antissCurve->setPen(antissPen);
QPen atissPen = QPen(GColor(CATISS));
atissPen.setWidth(width);
standard->atissCurve->setPen(atissPen);
QPen xpPen = QPen(GColor(CXPOWER));
xpPen.setWidth(width);
standard->xpCurve->setPen(xpPen);
QPen apPen = QPen(GColor(CAPOWER));
apPen.setWidth(width);
standard->apCurve->setPen(apPen);
QPen hrPen = QPen(GColor(CHEARTRATE));
hrPen.setWidth(width);
standard->hrCurve->setPen(hrPen);
QPen tcorePen = QPen(GColor(CCORETEMP));
tcorePen.setWidth(width);
standard->tcoreCurve->setPen(tcorePen);
standard->hrDCurve->setPen(hrPen);
QPen speedPen = QPen(GColor(CSPEED));
speedPen.setWidth(width);
standard->speedCurve->setPen(speedPen);
QPen accelPen = QPen(GColor(CACCELERATION));
accelPen.setWidth(width);
standard->accelCurve->setPen(accelPen);
QPen cadPen = QPen(GColor(CCADENCE));
cadPen.setWidth(width);
standard->cadCurve->setPen(cadPen);
standard->cadDCurve->setPen(cadPen);
QPen slopePen(GColor(CSLOPE));
slopePen.setWidth(width);
standard->slopeCurve->setPen(slopePen);
QPen altPen(GColor(CALTITUDE));
altPen.setWidth(width);
standard->altCurve->setPen(altPen);
QColor brush_color = GColor(CALTITUDEBRUSH);
brush_color.setAlpha(200);
standard->altCurve->setBrush(brush_color); // fill below the line
QPen altSlopePen(GCColor::invertColor(GColor(CPLOTBACKGROUND)));
altSlopePen.setWidth(width);
standard->altSlopeCurve->setPen(altSlopePen);
QPen tempPen = QPen(GColor(CTEMP));
tempPen.setWidth(width);
standard->tempCurve->setPen(tempPen);
//QPen windPen = QPen(GColor(CWINDSPEED));
//windPen.setWidth(width);
standard->windCurve->setPen(QPen(Qt::NoPen));
QColor wbrush_color = GColor(CWINDSPEED);
wbrush_color.setAlpha(200);
standard->windCurve->setBrush(wbrush_color); // fill below the line
QPen torquePen = QPen(GColor(CTORQUE));
torquePen.setWidth(width);
standard->torqueCurve->setPen(torquePen);
standard->nmDCurve->setPen(torquePen);
QPen balanceLPen = QPen(GColor(CBALANCERIGHT));
balanceLPen.setWidth(width);
standard->balanceLCurve->setPen(balanceLPen);
QColor brbrush_color = GColor(CBALANCERIGHT);
brbrush_color.setAlpha(200);
standard->balanceLCurve->setBrush(brbrush_color); // fill below the line
QPen balanceRPen = QPen(GColor(CBALANCELEFT));
balanceRPen.setWidth(width);
standard->balanceRCurve->setPen(balanceRPen);
QColor blbrush_color = GColor(CBALANCELEFT);
blbrush_color.setAlpha(200);
standard->balanceRCurve->setBrush(blbrush_color); // fill below the line
QPen ltePen = QPen(GColor(CLTE));
ltePen.setWidth(width);
standard->lteCurve->setPen(ltePen);
QPen rtePen = QPen(GColor(CRTE));
rtePen.setWidth(width);
standard->rteCurve->setPen(rtePen);
QPen lpsPen = QPen(GColor(CLPS));
lpsPen.setWidth(width);
standard->lpsCurve->setPen(lpsPen);
QPen rpsPen = QPen(GColor(CRPS));
rpsPen.setWidth(width);
standard->rpsCurve->setPen(rpsPen);
QPen lpcoPen = QPen(GColor(CLPS));
lpcoPen.setWidth(width);
standard->lpcoCurve->setPen(lpcoPen);
QPen rpcoPen = QPen(GColor(CRPS));
rpcoPen.setWidth(width);
standard->rpcoCurve->setPen(rpcoPen);
QPen ldcPen = QPen(GColor(CLPS));
ldcPen.setWidth(width);
standard->lppCurve->setPen(ldcPen);
QPen rdcPen = QPen(GColor(CRPS));
rdcPen.setWidth(width);
standard->rppCurve->setPen(rdcPen);
QPen lpppPen = QPen(GColor(CLPS));
lpppPen.setWidth(width);
standard->lpppCurve->setPen(lpppPen);
QPen rpppPen = QPen(GColor(CRPS));
rpppPen.setWidth(width);
standard->rpppCurve->setPen(rpppPen);
QPen wPen = QPen(GColor(CWBAL));
wPen.setWidth(width); // don't thicken
standard->wCurve->setPen(wPen);
QwtSymbol *sym = new QwtSymbol;
sym->setStyle(QwtSymbol::Rect);
sym->setPen(QPen(QColor(255,127,0))); // orange like a match, will make configurable later
sym->setSize(4);
standard->mCurve->setSymbol(sym);
QPen ihlPen = QPen(GColor(CINTERVALHIGHLIGHTER));
ihlPen.setWidth(width);
standard->intervalHighlighterCurve->setPen(QPen(Qt::NoPen));
standard->intervalHoverCurve->setPen(QPen(Qt::NoPen));
QColor ihlbrush = QColor(GColor(CINTERVALHIGHLIGHTER));
ihlbrush.setAlpha(128);
standard->intervalHighlighterCurve->setBrush(ihlbrush); // fill below the line
QColor hbrush = QColor(Qt::lightGray);
hbrush.setAlpha(64);
standard->intervalHoverCurve->setBrush(hbrush); // fill below the line
//this->legend()->remove(intervalHighlighterCurve); // don't show in legend
QPen gridPen(GColor(CPLOTGRID));
//gridPen.setStyle(Qt::DotLine); // solid line is nicer
standard->grid->setPen(gridPen);
// curve brushes
if (fill) {
QColor p;
p = standard->wattsCurve->pen().color();
p.setAlpha(64);
standard->wattsCurve->setBrush(QBrush(p));
p = standard->atissCurve->pen().color();
p.setAlpha(64);
standard->atissCurve->setBrush(QBrush(p));
p = standard->antissCurve->pen().color();
p.setAlpha(64);
standard->antissCurve->setBrush(QBrush(p));
p = standard->npCurve->pen().color();
p.setAlpha(64);
standard->npCurve->setBrush(QBrush(p));
p = standard->rvCurve->pen().color();
p.setAlpha(64);
standard->rvCurve->setBrush(QBrush(p));
p = standard->rcadCurve->pen().color();
p.setAlpha(64);
standard->rcadCurve->setBrush(QBrush(p));
p = standard->rgctCurve->pen().color();
p.setAlpha(64);
standard->rgctCurve->setBrush(QBrush(p));
p = standard->gearCurve->pen().color();
p.setAlpha(64);
standard->gearCurve->setBrush(QBrush(p));
p = standard->smo2Curve->pen().color();
p.setAlpha(64);
standard->smo2Curve->setBrush(QBrush(p));
p = standard->thbCurve->pen().color();
p.setAlpha(64);
standard->thbCurve->setBrush(QBrush(p));
p = standard->o2hbCurve->pen().color();
p.setAlpha(64);
standard->o2hbCurve->setBrush(QBrush(p));
p = standard->hhbCurve->pen().color();
p.setAlpha(64);
standard->hhbCurve->setBrush(QBrush(p));
p = standard->xpCurve->pen().color();
p.setAlpha(64);
standard->xpCurve->setBrush(QBrush(p));
p = standard->apCurve->pen().color();
p.setAlpha(64);
standard->apCurve->setBrush(QBrush(p));
p = standard->wCurve->pen().color();
p.setAlpha(64);
standard->wCurve->setBrush(QBrush(p));
p = standard->tcoreCurve->pen().color();
p.setAlpha(64);
standard->tcoreCurve->setBrush(QBrush(p));
p = standard->hrCurve->pen().color();
p.setAlpha(64);
standard->hrCurve->setBrush(QBrush(p));
p = standard->accelCurve->pen().color();
p.setAlpha(64);
standard->accelCurve->setBrush(QBrush(p));
p = standard->wattsDCurve->pen().color();
p.setAlpha(64);
standard->wattsDCurve->setBrush(QBrush(p));
p = standard->cadDCurve->pen().color();
p.setAlpha(64);
standard->cadDCurve->setBrush(QBrush(p));
p = standard->nmDCurve->pen().color();
p.setAlpha(64);
standard->nmDCurve->setBrush(QBrush(p));
p = standard->hrDCurve->pen().color();
p.setAlpha(64);
standard->hrDCurve->setBrush(QBrush(p));
p = standard->speedCurve->pen().color();
p.setAlpha(64);
standard->speedCurve->setBrush(QBrush(p));
p = standard->cadCurve->pen().color();
p.setAlpha(64);
standard->cadCurve->setBrush(QBrush(p));
p = standard->torqueCurve->pen().color();
p.setAlpha(64);
standard->torqueCurve->setBrush(QBrush(p));
p = standard->tempCurve->pen().color();
p.setAlpha(64);
standard->tempCurve->setBrush(QBrush(p));
p = standard->lteCurve->pen().color();
p.setAlpha(64);
standard->lteCurve->setBrush(QBrush(p));
p = standard->rteCurve->pen().color();
p.setAlpha(64);
standard->rteCurve->setBrush(QBrush(p));
p = standard->lpsCurve->pen().color();
p.setAlpha(64);
standard->lpsCurve->setBrush(QBrush(p));
p = standard->rpsCurve->pen().color();
p.setAlpha(64);
standard->rpsCurve->setBrush(QBrush(p));
p = standard->lpcoCurve->pen().color();
p.setAlpha(64);
standard->lpcoCurve->setBrush(QBrush(p));
p = standard->rpcoCurve->pen().color();
p.setAlpha(64);
standard->rpcoCurve->setBrush(QBrush(p));
p = standard->lppCurve->pen().color();
p.setAlpha(64);
standard->lppCurve->setBrush(QBrush(p));
p = standard->rppCurve->pen().color();
p.setAlpha(64);
standard->rppCurve->setBrush(QBrush(p));
p = standard->lpppCurve->pen().color();
p.setAlpha(64);
standard->lpppCurve->setBrush(QBrush(p));
p = standard->rpppCurve->pen().color();
p.setAlpha(64);
standard->rpppCurve->setBrush(QBrush(p));
p = standard->slopeCurve->pen().color();
p.setAlpha(64);
standard->slopeCurve->setBrush(QBrush(p));
/*p = standard->altSlopeCurve->pen().color();
p.setAlpha(64);
standard->altSlopeCurve->setBrush(QBrush(p));
p = standard->balanceLCurve->pen().color();
p.setAlpha(64);
standard->balanceLCurve->setBrush(QBrush(p));
p = standard->balanceRCurve->pen().color();
p.setAlpha(64);
standard->balanceRCurve->setBrush(QBrush(p));*/
} else {
standard->wattsCurve->setBrush(Qt::NoBrush);
standard->atissCurve->setBrush(Qt::NoBrush);
standard->antissCurve->setBrush(Qt::NoBrush);
standard->rvCurve->setBrush(Qt::NoBrush);
standard->rcadCurve->setBrush(Qt::NoBrush);
standard->rgctCurve->setBrush(Qt::NoBrush);
standard->gearCurve->setBrush(Qt::NoBrush);
standard->smo2Curve->setBrush(Qt::NoBrush);
standard->thbCurve->setBrush(Qt::NoBrush);
standard->o2hbCurve->setBrush(Qt::NoBrush);
standard->hhbCurve->setBrush(Qt::NoBrush);
standard->npCurve->setBrush(Qt::NoBrush);
standard->xpCurve->setBrush(Qt::NoBrush);
standard->apCurve->setBrush(Qt::NoBrush);
standard->wCurve->setBrush(Qt::NoBrush);
standard->hrCurve->setBrush(Qt::NoBrush);
standard->tcoreCurve->setBrush(Qt::NoBrush);
standard->speedCurve->setBrush(Qt::NoBrush);
standard->accelCurve->setBrush(Qt::NoBrush);
standard->wattsDCurve->setBrush(Qt::NoBrush);
standard->cadDCurve->setBrush(Qt::NoBrush);
standard->nmDCurve->setBrush(Qt::NoBrush);
standard->hrDCurve->setBrush(Qt::NoBrush);
standard->cadCurve->setBrush(Qt::NoBrush);
standard->torqueCurve->setBrush(Qt::NoBrush);
standard->tempCurve->setBrush(Qt::NoBrush);
standard->lteCurve->setBrush(Qt::NoBrush);
standard->rteCurve->setBrush(Qt::NoBrush);
standard->lpsCurve->setBrush(Qt::NoBrush);
standard->rpsCurve->setBrush(Qt::NoBrush);
standard->lpcoCurve->setBrush(Qt::NoBrush);
standard->rpcoCurve->setBrush(Qt::NoBrush);
standard->lppCurve->setBrush(Qt::NoBrush);
standard->rppCurve->setBrush(Qt::NoBrush);
standard->lpppCurve->setBrush(Qt::NoBrush);
standard->rpppCurve->setBrush(Qt::NoBrush);
standard->slopeCurve->setBrush(Qt::NoBrush);
//standard->altSlopeCurve->setBrush((Qt::NoBrush));
//standard->balanceLCurve->setBrush(Qt::NoBrush);
//standard->balanceRCurve->setBrush(Qt::NoBrush);
}
QPalette pal = palette();
pal.setBrush(QPalette::Background, QBrush(GColor(CRIDEPLOTBACKGROUND)));
setPalette(pal);
// tick draw
TimeScaleDraw *tsd = new TimeScaleDraw(&this->bydist) ;
tsd->setTickLength(QwtScaleDiv::MajorTick, 3);
setAxisScaleDraw(QwtPlot::xBottom, tsd);
pal.setColor(QPalette::WindowText, GColor(CPLOTMARKER));
pal.setColor(QPalette::Text, GColor(CPLOTMARKER));
axisWidget(QwtPlot::xBottom)->setPalette(pal);
enableAxis(xBottom, true);
setAxisVisible(xBottom, true);
ScaleScaleDraw *sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
setAxisScaleDraw(QwtPlot::yLeft, sd);
pal.setColor(QPalette::WindowText, GColor(CPOWER));
pal.setColor(QPalette::Text, GColor(CPOWER));
axisWidget(QwtPlot::yLeft)->setPalette(pal);
// some axis show multiple things so color them
// to match up if only one curve is selected;
// e.g. left, 1 typically has HR, Cadence
// on the same curve but can also have SmO2 and Temp
// since it gets set a few places we do it with
// a special method
setLeftOnePalette();
setRightPalette();
sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
setAxisScaleDraw(QwtAxisId(QwtAxis::yLeft, 3), sd);
pal.setColor(QPalette::WindowText, GColor(CPLOTMARKER));
pal.setColor(QPalette::Text, GColor(CPLOTMARKER));
axisWidget(QwtAxisId(QwtAxis::yLeft, 3))->setPalette(pal);
sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
setAxisScaleDraw(QwtAxisId(QwtAxis::yRight, 1), sd);
pal.setColor(QPalette::WindowText, GColor(CALTITUDE));
pal.setColor(QPalette::Text, GColor(CALTITUDE));
axisWidget(QwtAxisId(QwtAxis::yRight, 1))->setPalette(pal);
sd = new ScaleScaleDraw;
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->setFactor(0.001f); // in kJ
setAxisScaleDraw(QwtAxisId(QwtAxis::yRight, 2), sd);
pal.setColor(QPalette::WindowText, GColor(CWBAL));
pal.setColor(QPalette::Text, GColor(CWBAL));
axisWidget(QwtAxisId(QwtAxis::yRight, 2))->setPalette(pal);
sd = new ScaleScaleDraw;
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
setAxisScaleDraw(QwtAxisId(QwtAxis::yRight, 3), sd);
pal.setColor(QPalette::WindowText, GColor(CATISS));
pal.setColor(QPalette::Text, GColor(CATISS));
axisWidget(QwtAxisId(QwtAxis::yRight, 3))->setPalette(pal);
curveColors->saveState();
}
void
AllPlot::setLeftOnePalette()
{
// always use the last, so BPM overrides
// Cadence then Temp then SmO2 ...
QColor single = QColor(Qt::red);
if (standard->smo2Curve->isVisible()) {
single = GColor(CSMO2);
}
if (standard->tempCurve->isVisible() && !context->athlete->useMetricUnits) {
single = GColor(CTEMP);
}
if (standard->cadCurve->isVisible()) {
single = GColor(CCADENCE);
}
if (standard->hrCurve->isVisible()) {
single = GColor(CHEARTRATE);
}
if (standard->tcoreCurve->isVisible()) {
single = GColor(CCORETEMP);
}
// lets go
ScaleScaleDraw *sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
setAxisScaleDraw(QwtAxisId(QwtAxis::yLeft, 1), sd);
QPalette pal = palette();
pal.setBrush(QPalette::Background, QBrush(GColor(CRIDEPLOTBACKGROUND)));
pal.setColor(QPalette::WindowText, single);
pal.setColor(QPalette::Text, single);
// now work it out ....
axisWidget(QwtAxisId(QwtAxis::yLeft, 1))->setPalette(pal);
}
void
AllPlot::setRightPalette()
{
// always use the last, so BPM overrides
// Cadence then Temp then SmO2 ...
QColor single = QColor(Qt::green);
if (standard->speedCurve->isVisible()) {
single = GColor(CSPEED);
}
if (standard->tempCurve->isVisible() && context->athlete->useMetricUnits) {
single = GColor(CTEMP);
}
if (standard->o2hbCurve->isVisible()) {
single = GColor(CO2HB);
}
if (standard->hhbCurve->isVisible()) {
single = GColor(CHHB);
}
if (standard->thbCurve->isVisible()) {
single = GColor(CTHB);
}
if (standard->torqueCurve->isVisible()) {
single = GColor(CTORQUE);
}
// lets go
ScaleScaleDraw *sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
sd->setDecimals(2);
setAxisScaleDraw(QwtAxisId(QwtAxis::yRight, 0), sd);
QPalette pal = palette();
pal.setBrush(QPalette::Background, QBrush(GColor(CRIDEPLOTBACKGROUND)));
pal.setColor(QPalette::WindowText, single);
pal.setColor(QPalette::Text, single);
// now work it out ....
axisWidget(QwtAxisId(QwtAxis::yRight, 0))->setPalette(pal);
}
void
AllPlot::setHighlightIntervals(bool state)
{
if (state) {
standard->intervalHighlighterCurve->attach(this);
standard->intervalHoverCurve->attach(this);
} else {
standard->intervalHighlighterCurve->detach();
standard->intervalHoverCurve->detach();
}
}
struct DataPoint {
double time, hr, watts, atiss, antiss, np, rv, rcad, rgct,
smo2, thb, o2hb, hhb, ap, xp, speed, cad,
alt, temp, wind, torque, lrbalance, lte, rte, lps, rps,
lpco, rpco, lppb, rppb, lppe, rppe, lpppb, rpppb, lpppe, rpppe,
kphd, wattsd, cadd, nmd, hrd, slope, tcore;
DataPoint(double t, double h, double w, double at, double an, double n, double rv, double rcad, double rgct,
double smo2, double thb, double o2hb, double hhb, double l, double x, double s, double c,
double a, double te, double wi, double tq, double lrb, double lte, double rte, double lps, double rps,
double lpco, double rpco, double lppb, double rppb, double lppe, double rppe, double lpppb, double rpppb, double lpppe, double rpppe,
double kphd, double wattsd, double cadd, double nmd, double hrd, double sl, double tcore) :
time(t), hr(h), watts(w), atiss(at), antiss(an), np(n), rv(rv), rcad(rcad), rgct(rgct),
smo2(smo2), thb(thb), o2hb(o2hb), hhb(hhb), ap(l), xp(x), speed(s), cad(c),
alt(a), temp(te), wind(wi), torque(tq), lrbalance(lrb), lte(lte), rte(rte), lps(lps), rps(rps),
lpco(lpco), rpco(rpco), lppb(lppb), rppb(rppb), lppe(lppe), rppe(rppe), lpppb(lpppb), rpppb(rpppb), lpppe(lpppe), rpppe(rpppe),
kphd(kphd), wattsd(wattsd), cadd(cadd), nmd(nmd), hrd(hrd), slope(sl), tcore(tcore) {}
};
bool AllPlot::shadeZones() const
{
return shade_zones;
}
void
AllPlot::setAxisTitle(QwtAxisId axis, QString label)
{
// setup the default fonts
QFont stGiles; // hoho - Chart Font St. Giles ... ok you have to be British to get this joke
stGiles.fromString(appsettings->value(this, GC_FONT_CHARTLABELS, QFont().toString()).toString());
stGiles.setPointSize(appsettings->value(NULL, GC_FONT_CHARTLABELS_SIZE, 8).toInt());
QwtText title(label);
title.setFont(stGiles);
QwtPlot::setAxisFont(axis, stGiles);
QwtPlot::setAxisTitle(axis, title);
}
void AllPlot::refreshZoneLabels()
{
foreach(AllPlotZoneLabel *label, zoneLabels) {
label->detach();
delete label;
}
zoneLabels.clear();
if (rideItem && context->athlete->zones()) {
int zone_range = context->athlete->zones()->whichRange(rideItem->dateTime.date());
// generate labels for existing zones
if (zone_range >= 0) {
int num_zones = context->athlete->zones()->numZones(zone_range);
for (int z = 0; z < num_zones; z ++) {
AllPlotZoneLabel *label = new AllPlotZoneLabel(this, z);
label->attach(this);
zoneLabels.append(label);
}
}
}
}
void
AllPlot::setMatchLabels(AllPlotObject *objects)
{
// clear anyway
foreach(QwtPlotMarker *p, objects->matchLabels) {
p->detach();
delete p;
}
objects->matchLabels.clear();
// add new ones, but only if showW
if (showW && objects->mCurve) {
bool below = false;
// zip through the matches and add a label
for (size_t i=0; i<objects->mCurve->data()->size(); i++) {
// mCurve->data()->sample(i);
// Qwt uses its own text objects
QwtText text(QString("%1").arg(objects->mCurve->data()->sample(i).y()/1000.00f, 4, 'f', 1));
text.setFont(labelFont);
text.setColor(QColor(255,127,0)); // supposed to be configurable !
// make that mark -- always above with topN
QwtPlotMarker *label = new QwtPlotMarker();
label->setLabel(text);
label->setValue(objects->mCurve->data()->sample(i).x(), objects->mCurve->data()->sample(i).y());
label->setYAxis(objects->mCurve->yAxis());
label->setSpacing(6); // not px but by yaxis value !? mad.
label->setLabelAlignment(below ? (Qt::AlignBottom | Qt::AlignCenter) :
(Qt::AlignTop | Qt::AlignCenter));
// and attach
label->attach(this);
objects->matchLabels << label;
// toggle top / bottom
below = !below;
}
}
}
void
AllPlot::recalc(AllPlotObject *objects)
{
if (referencePlot !=NULL){
return;
}
if (objects->timeArray.empty())
return;
// skip null rides
if (!rideItem || !rideItem->ride()) return;
int rideTimeSecs = (int) ceil(objects->timeArray[objects->timeArray.count()-1]);
if (rideTimeSecs > 7*24*60*60) {
// clear all the curves
QwtArray<double> data;
QVector<QwtIntervalSample> intData;
objects->wCurve->setSamples(data,data);
objects->mCurve->setSamples(data,data);
setMatchLabels(objects);
if (!objects->atissArray.empty()) objects->atissCurve->setSamples(data, data);
if (!objects->antissArray.empty()) objects->antissCurve->setSamples(data, data);
if (!objects->npArray.empty()) objects->npCurve->setSamples(data, data);
if (!objects->rvArray.empty()) objects->rvCurve->setSamples(data, data);
if (!objects->rcadArray.empty()) objects->rcadCurve->setSamples(data, data);
if (!objects->rgctArray.empty()) objects->rgctCurve->setSamples(data, data);
if (!objects->gearArray.empty()) objects->gearCurve->setSamples(data, data);
if (!objects->smo2Array.empty()) objects->smo2Curve->setSamples(data, data);
if (!objects->thbArray.empty()) objects->thbCurve->setSamples(data, data);
if (!objects->o2hbArray.empty()) objects->o2hbCurve->setSamples(data, data);
if (!objects->hhbArray.empty()) objects->hhbCurve->setSamples(data, data);
if (!objects->xpArray.empty()) objects->xpCurve->setSamples(data, data);
if (!objects->apArray.empty()) objects->apCurve->setSamples(data, data);
if (!objects->wattsArray.empty()) objects->wattsCurve->setSamples(data, data);
if (!objects->hrArray.empty()) objects->hrCurve->setSamples(data, data);
if (!objects->tcoreArray.empty()) objects->tcoreCurve->setSamples(data, data);
if (!objects->speedArray.empty()) objects->speedCurve->setSamples(data, data);
// deltas
if (!objects->accelArray.empty()) objects->accelCurve->setSamples(data, data);
if (!objects->wattsDArray.empty()) objects->wattsDCurve->setSamples(data, data);
if (!objects->cadDArray.empty()) objects->cadDCurve->setSamples(data, data);
if (!objects->nmDArray.empty()) objects->nmDCurve->setSamples(data, data);
if (!objects->hrDArray.empty()) objects->hrDCurve->setSamples(data, data);
if (!objects->cadArray.empty()) objects->cadCurve->setSamples(data, data);
if (!objects->altArray.empty()) {
objects->altCurve->setSamples(data, data);
objects->altSlopeCurve->setSamples(data, data);
}
if (!objects->slopeArray.empty()) objects->slopeCurve->setSamples(data, data);
if (!objects->tempArray.empty()) objects->tempCurve->setSamples(data, data);
if (!objects->windArray.empty()) objects->windCurve->setSamples(new QwtIntervalSeriesData(intData));
if (!objects->torqueArray.empty()) objects->torqueCurve->setSamples(data, data);
// left/right data
if (!objects->balanceArray.empty()) {
objects->balanceLCurve->setSamples(data, data);
objects->balanceRCurve->setSamples(data, data);
}
if (!objects->lteArray.empty()) objects->lteCurve->setSamples(data, data);
if (!objects->rteArray.empty()) objects->rteCurve->setSamples(data, data);
if (!objects->lpsArray.empty()) objects->lpsCurve->setSamples(data, data);
if (!objects->rpsArray.empty()) objects->rpsCurve->setSamples(data, data);
if (!objects->lpcoArray.empty()) objects->lpcoCurve->setSamples(data, data);
if (!objects->rpcoArray.empty()) objects->rpcoCurve->setSamples(data, data);
if (!objects->lppbArray.empty()) objects->lppCurve->setSamples(new QwtIntervalSeriesData(intData));
if (!objects->rppbArray.empty()) objects->rppCurve->setSamples(new QwtIntervalSeriesData(intData));;
if (!objects->lpppbArray.empty()) objects->lpppCurve->setSamples(new QwtIntervalSeriesData(intData));
if (!objects->rpppbArray.empty()) objects->rpppCurve->setSamples(new QwtIntervalSeriesData(intData));
return;
}
// if recintsecs is longer than the smoothing, or equal to the smoothing there is no point in even trying
int applysmooth = smooth <= rideItem->ride()->recIntSecs() ? 0 : smooth;
// compare mode breaks
if (context->isCompareIntervals && applysmooth == 0) applysmooth = 1;
// we should only smooth the curves if objects->smoothed rate is greater than sample rate
if (applysmooth > 0) {
double totalWatts = 0.0;
double totalNP = 0.0;
double totalRCad = 0.0;
double totalRV = 0.0;
double totalRGCT = 0.0;
double totalSmO2 = 0.0;
double totaltHb = 0.0;
double totalO2Hb = 0.0;
double totalHHb = 0.0;
double totalATISS = 0.0;
double totalANTISS = 0.0;
double totalXP = 0.0;
double totalAP = 0.0;
double totalHr = 0.0;
double totalTcore = 0.0;
double totalSpeed = 0.0;
double totalAccel = 0.0;
double totalWattsD = 0.0;
double totalCadD = 0.0;
double totalNmD = 0.0;
double totalHrD = 0.0;
double totalCad = 0.0;
double totalDist = 0.0;
double totalAlt = 0.0;
double totalSlope = 0.0;
double totalTemp = 0.0;
double totalWind = 0.0;
double totalTorque = 0.0;
double totalBalance = 0.0;
double totalLTE = 0.0;
double totalRTE = 0.0;
double totalLPS = 0.0;
double totalRPS = 0.0;
double totalLPCO = 0.0;
double totalRPCO = 0.0;
double totalLPPB = 0.0;
double totalRPPB = 0.0;
double totalLPPE = 0.0;
double totalRPPE = 0.0;
double totalLPPPB = 0.0;
double totalRPPPB = 0.0;
double totalLPPPE = 0.0;
double totalRPPPE = 0.0;
QList<DataPoint> list;
objects->smoothWatts.resize(rideTimeSecs + 1);
objects->smoothNP.resize(rideTimeSecs + 1);
objects->smoothGear.resize(rideTimeSecs + 1);
objects->smoothRV.resize(rideTimeSecs + 1);
objects->smoothRCad.resize(rideTimeSecs + 1);
objects->smoothRGCT.resize(rideTimeSecs + 1);
objects->smoothSmO2.resize(rideTimeSecs + 1);
objects->smoothtHb.resize(rideTimeSecs + 1);
objects->smoothO2Hb.resize(rideTimeSecs + 1);
objects->smoothHHb.resize(rideTimeSecs + 1);
objects->smoothAT.resize(rideTimeSecs + 1);
objects->smoothANT.resize(rideTimeSecs + 1);
objects->smoothXP.resize(rideTimeSecs + 1);
objects->smoothAP.resize(rideTimeSecs + 1);
objects->smoothHr.resize(rideTimeSecs + 1);
objects->smoothTcore.resize(rideTimeSecs + 1);
objects->smoothSpeed.resize(rideTimeSecs + 1);
objects->smoothAccel.resize(rideTimeSecs + 1);
objects->smoothWattsD.resize(rideTimeSecs + 1);
objects->smoothCadD.resize(rideTimeSecs + 1);
objects->smoothNmD.resize(rideTimeSecs + 1);
objects->smoothHrD.resize(rideTimeSecs + 1);
objects->smoothCad.resize(rideTimeSecs + 1);
objects->smoothTime.resize(rideTimeSecs + 1);
objects->smoothDistance.resize(rideTimeSecs + 1);
objects->smoothAltitude.resize(rideTimeSecs + 1);
objects->smoothSlope.resize(rideTimeSecs + 1);
objects->smoothTemp.resize(rideTimeSecs + 1);
objects->smoothWind.resize(rideTimeSecs + 1);
objects->smoothRelSpeed.resize(rideTimeSecs + 1);
objects->smoothTorque.resize(rideTimeSecs + 1);
objects->smoothBalanceL.resize(rideTimeSecs + 1);
objects->smoothBalanceR.resize(rideTimeSecs + 1);
objects->smoothLTE.resize(rideTimeSecs + 1);
objects->smoothRTE.resize(rideTimeSecs + 1);
objects->smoothLPS.resize(rideTimeSecs + 1);
objects->smoothRPS.resize(rideTimeSecs + 1);
objects->smoothLPCO.resize(rideTimeSecs + 1);
objects->smoothRPCO.resize(rideTimeSecs + 1);
objects->smoothLPP.resize(rideTimeSecs + 1);
objects->smoothRPP.resize(rideTimeSecs + 1);
objects->smoothLPPP.resize(rideTimeSecs + 1);
objects->smoothRPPP.resize(rideTimeSecs + 1);
// do the smoothing by calculating the average of the "applysmooth" values left
// of the current data point - for points in time smaller than "applysmooth"
// only the available datapoints left are used to build the average
int i = 0;
for (int secs = 0; secs <= rideTimeSecs; ++secs) {
while ((i < objects->timeArray.count()) && (objects->timeArray[i] <= secs)) {
DataPoint dp(objects->timeArray[i],
(!objects->hrArray.empty() ? objects->hrArray[i] : 0),
(!objects->wattsArray.empty() ? objects->wattsArray[i] : 0),
(!objects->atissArray.empty() ? objects->atissArray[i] : 0),
(!objects->antissArray.empty() ? objects->antissArray[i] : 0),
(!objects->npArray.empty() ? objects->npArray[i] : 0),
(!objects->rvArray.empty() ? objects->rvArray[i] : 0),
(!objects->rcadArray.empty() ? objects->rcadArray[i] : 0),
(!objects->rgctArray.empty() ? objects->rgctArray[i] : 0),
(!objects->smo2Array.empty() ? objects->smo2Array[i] : 0),
(!objects->thbArray.empty() ? objects->thbArray[i] : 0),
(!objects->o2hbArray.empty() ? objects->o2hbArray[i] : 0),
(!objects->hhbArray.empty() ? objects->hhbArray[i] : 0),
(!objects->apArray.empty() ? objects->apArray[i] : 0),
(!objects->xpArray.empty() ? objects->xpArray[i] : 0),
(!objects->speedArray.empty() ? objects->speedArray[i] : 0),
(!objects->cadArray.empty() ? objects->cadArray[i] : 0),
(!objects->altArray.empty() ? objects->altArray[i] : 0),
(!objects->tempArray.empty() ? objects->tempArray[i] : 0),
(!objects->windArray.empty() ? objects->windArray[i] : 0),
(!objects->torqueArray.empty() ? objects->torqueArray[i] : 0),
(!objects->balanceArray.empty() ? objects->balanceArray[i] : 0),
(!objects->lteArray.empty() ? objects->lteArray[i] : 0),
(!objects->rteArray.empty() ? objects->rteArray[i] : 0),
(!objects->lpsArray.empty() ? objects->lpsArray[i] : 0),
(!objects->rpsArray.empty() ? objects->rpsArray[i] : 0),
(!objects->lpcoArray.empty() ? objects->lpcoArray[i] : 0),
(!objects->rpcoArray.empty() ? objects->rpcoArray[i] : 0),
(!objects->lppbArray.empty() ? objects->lppbArray[i] : 0),
(!objects->rppbArray.empty() ? objects->rppbArray[i] : 0),
(!objects->lppeArray.empty() ? objects->lppeArray[i] : 0),
(!objects->rppeArray.empty() ? objects->rppeArray[i] : 0),
(!objects->lpppbArray.empty() ? objects->lpppbArray[i] : 0),
(!objects->rpppbArray.empty() ? objects->rpppbArray[i] : 0),
(!objects->lpppeArray.empty() ? objects->lpppeArray[i] : 0),
(!objects->rpppeArray.empty() ? objects->rpppeArray[i] : 0),
(!objects->accelArray.empty() ? objects->accelArray[i] : 0),
(!objects->wattsDArray.empty() ? objects->wattsDArray[i] : 0),
(!objects->cadDArray.empty() ? objects->cadDArray[i] : 0),
(!objects->nmDArray.empty() ? objects->nmDArray[i] : 0),
(!objects->hrDArray.empty() ? objects->hrDArray[i] : 0),
(!objects->slopeArray.empty() ? objects->slopeArray[i] : 0),
(!objects->tcoreArray.empty() ? objects->tcoreArray[i] : 0));
if (!objects->wattsArray.empty()) totalWatts += objects->wattsArray[i];
if (!objects->npArray.empty()) totalNP += objects->npArray[i];
if (!objects->rvArray.empty()) totalRV += objects->rvArray[i];
if (!objects->rcadArray.empty()) totalRCad += objects->rcadArray[i];
if (!objects->rgctArray.empty()) totalRGCT += objects->rgctArray[i];
if (!objects->smo2Array.empty()) totalSmO2 += objects->smo2Array[i];
if (!objects->thbArray.empty()) totaltHb += objects->thbArray[i];
if (!objects->o2hbArray.empty()) totalO2Hb += objects->o2hbArray[i];
if (!objects->hhbArray.empty()) totalHHb += objects->hhbArray[i];
if (!objects->atissArray.empty()) totalATISS += objects->atissArray[i];
if (!objects->antissArray.empty()) totalANTISS += objects->antissArray[i];
if (!objects->xpArray.empty()) totalXP += objects->xpArray[i];
if (!objects->apArray.empty()) totalAP += objects->apArray[i];
if (!objects->tcoreArray.empty()) totalTcore += objects->tcoreArray[i];
if (!objects->hrArray.empty()) totalHr += objects->hrArray[i];
if (!objects->accelArray.empty()) totalAccel += objects->accelArray[i];
if (!objects->wattsDArray.empty()) totalWattsD += objects->wattsDArray[i];
if (!objects->cadDArray.empty()) totalCadD += objects->cadDArray[i];
if (!objects->nmDArray.empty()) totalNmD += objects->nmDArray[i];
if (!objects->hrDArray.empty()) totalHrD += objects->hrDArray[i];
if (!objects->speedArray.empty()) totalSpeed += objects->speedArray[i];
if (!objects->cadArray.empty()) totalCad += objects->cadArray[i];
if (!objects->altArray.empty()) totalAlt += objects->altArray[i];
if (!objects->slopeArray.empty()) totalSlope += objects->slopeArray[i];
if (!objects->windArray.empty()) totalWind += objects->windArray[i];
if (!objects->torqueArray.empty()) totalTorque += objects->torqueArray[i];
if (!objects->tempArray.empty() ) {
if (objects->tempArray[i] == RideFile::NoTemp) {
dp.temp = (i>0 && !list.empty()?list.back().temp:0.0);
totalTemp += dp.temp;
}
else {
totalTemp += objects->tempArray[i];
}
}
// left/right pedal data
if (!objects->balanceArray.empty())
totalBalance += (objects->balanceArray[i]>0?objects->balanceArray[i]:50);
if (!objects->lteArray.empty())
totalLTE += (objects->lteArray[i]>0?objects->lteArray[i]:0);
if (!objects->rteArray.empty())
totalRTE += (objects->rteArray[i]>0?objects->rteArray[i]:0);
if (!objects->lpsArray.empty())
totalLPS += (objects->lpsArray[i]>0?objects->lpsArray[i]:0);
if (!objects->rpsArray.empty())
totalRPS += (objects->rpsArray[i]>0?objects->rpsArray[i]:0);
if (!objects->lpcoArray.empty())
totalLPCO += objects->lpcoArray[i];
if (!objects->rpcoArray.empty())
totalRPCO += objects->rpcoArray[i];
if (!objects->lppbArray.empty())
totalLPPB += (objects->lppbArray[i]>0?objects->lppbArray[i]:0);
if (!objects->rppbArray.empty())
totalRPPB += (objects->rppbArray[i]>0?objects->rppbArray[i]:0);
if (!objects->lppeArray.empty())
totalLPPE += (objects->lppeArray[i]>0?objects->lppeArray[i]:0);
if (!objects->rppeArray.empty())
totalRPPE += (objects->rppeArray[i]>0?objects->rppeArray[i]:0);
if (!objects->lpppbArray.empty())
totalLPPPB += (objects->lpppbArray[i]>0?objects->lpppbArray[i]:0);
if (!objects->rpppbArray.empty())
totalRPPPB += (objects->rpppbArray[i]>0?objects->rpppbArray[i]:0);
if (!objects->lpppeArray.empty())
totalLPPPE += (objects->lpppeArray[i]>0?objects->lpppeArray[i]:0);
if (!objects->rpppeArray.empty())
totalRPPPE += (objects->rpppeArray[i]>0?objects->rpppeArray[i]:0);
totalDist = objects->distanceArray[i];
list.append(dp);
++i;
}
while (!list.empty() && (list.front().time < secs - applysmooth)) {
DataPoint &dp = list.front();
totalWatts -= dp.watts;
totalNP -= dp.np;
totalRV -= dp.rv;
totalRCad -= dp.rcad;
totalRGCT -= dp.rgct;
totalSmO2 -= dp.smo2;
totaltHb -= dp.thb;
totalO2Hb -= dp.o2hb;
totalHHb -= dp.hhb;
totalATISS -= dp.atiss;
totalANTISS -= dp.antiss;
totalAP -= dp.ap;
totalXP -= dp.xp;
totalHr -= dp.hr;
totalTcore -= dp.tcore;
totalSpeed -= dp.speed;
totalAccel -= dp.kphd;
totalWattsD -= dp.wattsd;
totalCadD -= dp.cadd;
totalNmD -= dp.nmd;
totalHrD -= dp.hrd;
totalCad -= dp.cad;
totalAlt -= dp.alt;
totalSlope -= dp.slope;
totalTemp -= dp.temp;
totalWind -= dp.wind;
totalTorque -= dp.torque;
totalLTE -= dp.lte;
totalRTE -= dp.rte;
totalLPS -= dp.lps;
totalRPS -= dp.rps;
totalLPCO -= dp.lpco;
totalRPCO -= dp.rpco;
totalLPPB -= dp.lppb;
totalRPPB -= dp.rppb;
totalLPPE -= dp.lppe;
totalRPPE -= dp.rppe;
totalLPPPB -= dp.lpppb;
totalRPPPB -= dp.rpppb;
totalLPPPE -= dp.lpppe;
totalRPPPE -= dp.rpppe;
totalBalance -= (dp.lrbalance>0?dp.lrbalance:50);
list.removeFirst();
}
// TODO: this is wrong. We should do a weighted average over the
// seconds represented by each point...
if (list.empty()) {
objects->smoothWatts[secs] = 0.0;
objects->smoothNP[secs] = 0.0;
objects->smoothRV[secs] = 0.0;
objects->smoothRCad[secs] = 0.0;
objects->smoothRGCT[secs] = 0.0;
objects->smoothSmO2[secs] = 0.0;
objects->smoothtHb[secs] = 0.0;
objects->smoothO2Hb[secs] = 0.0;
objects->smoothHHb[secs] = 0.0;
objects->smoothAT[secs] = 0.0;
objects->smoothANT[secs] = 0.0;
objects->smoothXP[secs] = 0.0;
objects->smoothAP[secs] = 0.0;
objects->smoothHr[secs] = 0.0;
objects->smoothTcore[secs] = 0.0;
objects->smoothSpeed[secs] = 0.0;
objects->smoothAccel[secs] = 0.0;
objects->smoothWattsD[secs] = 0.0;
objects->smoothCadD[secs] = 0.0;
objects->smoothNmD[secs] = 0.0;
objects->smoothHrD[secs] = 0.0;
objects->smoothCad[secs] = 0.0;
objects->smoothAltitude[secs] = ((secs > 0) ? objects->smoothAltitude[secs - 1] : objects->altArray[secs] ) ;
objects->smoothSlope[secs] = 0.0;
objects->smoothTemp[secs] = 0.0;
objects->smoothWind[secs] = 0.0;
objects->smoothRelSpeed[secs] = QwtIntervalSample();
objects->smoothTorque[secs] = 0.0;
objects->smoothLTE[secs] = 0.0;
objects->smoothRTE[secs] = 0.0;
objects->smoothLPS[secs] = 0.0;
objects->smoothRPS[secs] = 0.0;
objects->smoothLPCO[secs] = 0.0;
objects->smoothRPCO[secs] = 0.0;
objects->smoothLPP[secs] = QwtIntervalSample();
objects->smoothRPP[secs] = QwtIntervalSample();
objects->smoothLPPP[secs] = QwtIntervalSample();
objects->smoothRPPP[secs] = QwtIntervalSample();
objects->smoothBalanceL[secs] = 50;
objects->smoothBalanceR[secs] = 50;
}
else {
objects->smoothWatts[secs] = totalWatts / list.size();
objects->smoothNP[secs] = totalNP / list.size();
objects->smoothRV[secs] = totalRV / list.size();
objects->smoothRCad[secs] = totalRCad / list.size();
objects->smoothRGCT[secs] = totalRGCT / list.size();
objects->smoothSmO2[secs] = totalSmO2 / list.size();
objects->smoothtHb[secs] = totaltHb / list.size();
objects->smoothO2Hb[secs] = totalO2Hb / list.size();
objects->smoothHHb[secs] = totalHHb / list.size();
objects->smoothAT[secs] = totalATISS / list.size();
objects->smoothANT[secs] = totalANTISS / list.size();
objects->smoothXP[secs] = totalXP / list.size();
objects->smoothAP[secs] = totalAP / list.size();
objects->smoothHr[secs] = totalHr / list.size();
objects->smoothTcore[secs] = totalTcore / list.size();
objects->smoothSpeed[secs] = totalSpeed / list.size();
objects->smoothAccel[secs] = totalAccel / double(list.size());
objects->smoothWattsD[secs] = totalWattsD / double(list.size());
objects->smoothCadD[secs] = totalCadD / double(list.size());
objects->smoothNmD[secs] = totalNmD / double(list.size());
objects->smoothHrD[secs] = totalHrD / double(list.size());
objects->smoothCad[secs] = totalCad / list.size();
objects->smoothAltitude[secs] = totalAlt / list.size();
objects->smoothSlope[secs] = totalSlope / double(list.size());
objects->smoothTemp[secs] = totalTemp / list.size();
objects->smoothWind[secs] = totalWind / list.size();
objects->smoothRelSpeed[secs] = QwtIntervalSample( bydist ? totalDist : secs / 60.0, QwtInterval(qMin(totalWind / list.size(), totalSpeed / list.size()), qMax(totalWind / list.size(), totalSpeed / list.size()) ) );
objects->smoothTorque[secs] = totalTorque / list.size();
// left /right pedal data
double balance = totalBalance / list.size();
if (balance == 0) {
objects->smoothBalanceL[secs] = 50;
objects->smoothBalanceR[secs] = 50;
} else if (balance >= 50) {
objects->smoothBalanceL[secs] = balance;
objects->smoothBalanceR[secs] = 50;
}
else {
objects->smoothBalanceL[secs] = 50;
objects->smoothBalanceR[secs] = balance;
}
objects->smoothLTE[secs] = totalLTE / list.size();
objects->smoothRTE[secs] = totalRTE / list.size();
objects->smoothLPS[secs] = totalLPS / list.size();
objects->smoothRPS[secs] = totalRPS / list.size();
objects->smoothLPCO[secs] = totalLPCO / list.size();<|fim▁hole|> objects->smoothLPPP[secs] = QwtIntervalSample( bydist ? totalDist : secs / 60.0, QwtInterval(totalLPPPB / list.size(), totalLPPPE / list.size() ) );
objects->smoothRPPP[secs] = QwtIntervalSample( bydist ? totalDist : secs / 60.0, QwtInterval(totalRPPPB / list.size(), totalRPPPE / list.size() ) );
}
objects->smoothDistance[secs] = totalDist;
objects->smoothTime[secs] = secs / 60.0;
// set data series (gearRatio) which are not smoothed at all
if (objects->gearArray.empty() || secs >= objects->gearArray.count()) {
objects->smoothGear[secs] = 0.0f;
} else {
objects->smoothGear[secs] = objects->gearArray[secs];
}
}
} else {
// no standard->smoothing .. just raw data
objects->smoothWatts.resize(0);
objects->smoothNP.resize(0);
objects->smoothGear.resize(0);
objects->smoothRV.resize(0);
objects->smoothRCad.resize(0);
objects->smoothRGCT.resize(0);
objects->smoothSmO2.resize(0);
objects->smoothtHb.resize(0);
objects->smoothO2Hb.resize(0);
objects->smoothHHb.resize(0);
objects->smoothAT.resize(0);
objects->smoothANT.resize(0);
objects->smoothXP.resize(0);
objects->smoothAP.resize(0);
objects->smoothHr.resize(0);
objects->smoothTcore.resize(0);
objects->smoothSpeed.resize(0);
objects->smoothAccel.resize(0);
objects->smoothWattsD.resize(0);
objects->smoothCadD.resize(0);
objects->smoothNmD.resize(0);
objects->smoothHrD.resize(0);
objects->smoothCad.resize(0);
objects->smoothTime.resize(0);
objects->smoothDistance.resize(0);
objects->smoothAltitude.resize(0);
objects->smoothSlope.resize(0);
objects->smoothTemp.resize(0);
objects->smoothWind.resize(0);
objects->smoothRelSpeed.resize(0);
objects->smoothTorque.resize(0);
objects->smoothLTE.resize(0);
objects->smoothRTE.resize(0);
objects->smoothLPS.resize(0);
objects->smoothRPS.resize(0);
objects->smoothLPCO.resize(0);
objects->smoothRPCO.resize(0);
objects->smoothLPP.resize(0);
objects->smoothRPP.resize(0);
objects->smoothLPPP.resize(0);
objects->smoothRPPP.resize(0);
objects->smoothBalanceL.resize(0);
objects->smoothBalanceR.resize(0);
foreach (RideFilePoint *dp, rideItem->ride()->dataPoints()) {
objects->smoothWatts.append(dp->watts);
objects->smoothNP.append(dp->np);
objects->smoothRV.append(dp->rvert);
objects->smoothRCad.append(dp->rcad);
objects->smoothRGCT.append(dp->rcontact);
objects->smoothGear.append(dp->gear);
objects->smoothSmO2.append(dp->smo2);
objects->smoothtHb.append(dp->thb);
objects->smoothO2Hb.append(dp->o2hb);
objects->smoothHHb.append(dp->hhb);
objects->smoothAT.append(dp->atiss);
objects->smoothANT.append(dp->antiss);
objects->smoothXP.append(dp->xp);
objects->smoothAP.append(dp->apower);
objects->smoothHr.append(dp->hr);
objects->smoothTcore.append(dp->tcore);
objects->smoothSpeed.append(context->athlete->useMetricUnits ? dp->kph : dp->kph * MILES_PER_KM);
objects->smoothAccel.append(dp->kphd);
objects->smoothWattsD.append(dp->wattsd);
objects->smoothCadD.append(dp->cadd);
objects->smoothNmD.append(dp->nmd);
objects->smoothHrD.append(dp->hrd);
objects->smoothCad.append(dp->cad);
objects->smoothTime.append(dp->secs/60);
objects->smoothDistance.append(context->athlete->useMetricUnits ? dp->km : dp->km * MILES_PER_KM);
objects->smoothAltitude.append(context->athlete->useMetricUnits ? dp->alt : dp->alt * FEET_PER_METER);
objects->smoothSlope.append(dp->slope);
if (dp->temp == RideFile::NoTemp && !objects->smoothTemp.empty())
dp->temp = objects->smoothTemp.last();
objects->smoothTemp.append(context->athlete->useMetricUnits ? dp->temp : dp->temp * FAHRENHEIT_PER_CENTIGRADE + FAHRENHEIT_ADD_CENTIGRADE);
objects->smoothWind.append(context->athlete->useMetricUnits ? dp->headwind : dp->headwind * MILES_PER_KM);
objects->smoothTorque.append(dp->nm);
if (dp->lrbalance == 0) {
objects->smoothBalanceL.append(50);
objects->smoothBalanceR.append(50);
}
else if (dp->lrbalance >= 50) {
objects->smoothBalanceL.append(dp->lrbalance);
objects->smoothBalanceR.append(50);
}
else {
objects->smoothBalanceL.append(50);
objects->smoothBalanceR.append(dp->lrbalance);
}
objects->smoothLTE.append(dp->lte);
objects->smoothRTE.append(dp->rte);
objects->smoothLPS.append(dp->lps);
objects->smoothRPS.append(dp->rps);
objects->smoothLPCO.append(dp->lpco);
objects->smoothRPCO.append(dp->rpco);
objects->smoothLPP.append(QwtIntervalSample( bydist ? objects->smoothDistance.last() : objects->smoothTime.last(), QwtInterval(dp->lppb , dp->rppe ) ));
objects->smoothRPP.append(QwtIntervalSample( bydist ? objects->smoothDistance.last() : objects->smoothTime.last(), QwtInterval(dp->rppb , dp->rppe ) ));
objects->smoothLPPP.append(QwtIntervalSample( bydist ? objects->smoothDistance.last() : objects->smoothTime.last(), QwtInterval(dp->lpppb , dp->lpppe ) ));
objects->smoothRPPP.append(QwtIntervalSample( bydist ? objects->smoothDistance.last() : objects->smoothTime.last(), QwtInterval(dp->rpppb , dp->rpppe ) ));
double head = dp->headwind * (context->athlete->useMetricUnits ? 1.0f : MILES_PER_KM);
double speed = dp->kph * (context->athlete->useMetricUnits ? 1.0f : MILES_PER_KM);
objects->smoothRelSpeed.append(QwtIntervalSample( bydist ? objects->smoothDistance.last() : objects->smoothTime.last(), QwtInterval(qMin(head, speed) , qMax(head, speed) ) ));
}
}
QVector<double> &xaxis = bydist ? objects->smoothDistance : objects->smoothTime;
int startingIndex = qMin(smooth, xaxis.count());
int totalPoints = xaxis.count() - startingIndex;
// set curves - we set the intervalHighlighter to whichver is available
//W' curve set to whatever data we have
if (!objects->wprime.empty()) {
objects->wCurve->setSamples(bydist ? objects->wprimeDist.data() : objects->wprimeTime.data(),
objects->wprime.data(), objects->wprime.count());
objects->mCurve->setSamples(bydist ? objects->matchDist.data() : objects->matchTime.data(),
objects->match.data(), objects->match.count());
setMatchLabels(objects);
}
if (!objects->wattsArray.empty()) {
objects->wattsCurve->setSamples(xaxis.data() + startingIndex, objects->smoothWatts.data() + startingIndex, totalPoints);
}
if (!objects->antissArray.empty()) {
objects->antissCurve->setSamples(xaxis.data() + startingIndex, objects->smoothANT.data() + startingIndex, totalPoints);
}
if (!objects->atissArray.empty()) {
objects->atissCurve->setSamples(xaxis.data() + startingIndex, objects->smoothAT.data() + startingIndex, totalPoints);
}
if (!objects->rvArray.empty()) {
objects->rvCurve->setSamples(xaxis.data() + startingIndex, objects->smoothRV.data() + startingIndex, totalPoints);
}
if (!objects->rcadArray.empty()) {
objects->rcadCurve->setSamples(xaxis.data() + startingIndex, objects->smoothRCad.data() + startingIndex, totalPoints);
}
if (!objects->rgctArray.empty()) {
objects->rgctCurve->setSamples(xaxis.data() + startingIndex, objects->smoothRGCT.data() + startingIndex, totalPoints);
}
if (!objects->gearArray.empty()) {
objects->gearCurve->setSamples(xaxis.data() + startingIndex, objects->smoothGear.data() + startingIndex, totalPoints);
}
if (!objects->smo2Array.empty()) {
objects->smo2Curve->setSamples(xaxis.data() + startingIndex, objects->smoothSmO2.data() + startingIndex, totalPoints);
}
if (!objects->thbArray.empty()) {
objects->thbCurve->setSamples(xaxis.data() + startingIndex, objects->smoothtHb.data() + startingIndex, totalPoints);
}
if (!objects->o2hbArray.empty()) {
objects->o2hbCurve->setSamples(xaxis.data() + startingIndex, objects->smoothO2Hb.data() + startingIndex, totalPoints);
}
if (!objects->hhbArray.empty()) {
objects->hhbCurve->setSamples(xaxis.data() + startingIndex, objects->smoothHHb.data() + startingIndex, totalPoints);
}
if (!objects->npArray.empty()) {
objects->npCurve->setSamples(xaxis.data() + startingIndex, objects->smoothNP.data() + startingIndex, totalPoints);
}
if (!objects->xpArray.empty()) {
objects->xpCurve->setSamples(xaxis.data() + startingIndex, objects->smoothXP.data() + startingIndex, totalPoints);
}
if (!objects->apArray.empty()) {
objects->apCurve->setSamples(xaxis.data() + startingIndex, objects->smoothAP.data() + startingIndex, totalPoints);
}
if (!objects->hrArray.empty()) {
objects->hrCurve->setSamples(xaxis.data() + startingIndex, objects->smoothHr.data() + startingIndex, totalPoints);
}
if (!objects->tcoreArray.empty()) {
objects->tcoreCurve->setSamples(xaxis.data() + startingIndex, objects->smoothTcore.data() + startingIndex, totalPoints);
}
if (!objects->speedArray.empty()) {
objects->speedCurve->setSamples(xaxis.data() + startingIndex, objects->smoothSpeed.data() + startingIndex, totalPoints);
}
if (!objects->accelArray.empty()) {
objects->accelCurve->setSamples(xaxis.data() + startingIndex, objects->smoothAccel.data() + startingIndex, totalPoints);
}
if (!objects->wattsDArray.empty()) {
objects->wattsDCurve->setSamples(xaxis.data() + startingIndex, objects->smoothWattsD.data() + startingIndex, totalPoints);
}
if (!objects->cadDArray.empty()) {
objects->cadDCurve->setSamples(xaxis.data() + startingIndex, objects->smoothCadD.data() + startingIndex, totalPoints);
}
if (!objects->nmDArray.empty()) {
objects->nmDCurve->setSamples(xaxis.data() + startingIndex, objects->smoothNmD.data() + startingIndex, totalPoints);
}
if (!objects->hrDArray.empty()) {
objects->hrDCurve->setSamples(xaxis.data() + startingIndex, objects->smoothHrD.data() + startingIndex, totalPoints);
}
if (!objects->cadArray.empty()) {
objects->cadCurve->setSamples(xaxis.data() + startingIndex, objects->smoothCad.data() + startingIndex, totalPoints);
}
if (!objects->altArray.empty()) {
objects->altCurve->setSamples(xaxis.data() + startingIndex, objects->smoothAltitude.data() + startingIndex, totalPoints);
objects->altSlopeCurve->setSamples(xaxis.data() + startingIndex, objects->smoothAltitude.data() + startingIndex, totalPoints);
}
if (!objects->slopeArray.empty()) {
objects->slopeCurve->setSamples(xaxis.data() + startingIndex, objects->smoothSlope.data() + startingIndex, totalPoints);
}
if (!objects->tempArray.empty()) {
objects->tempCurve->setSamples(xaxis.data() + startingIndex, objects->smoothTemp.data() + startingIndex, totalPoints);
}
if (!objects->windArray.empty()) {
objects->windCurve->setSamples(new QwtIntervalSeriesData(objects->smoothRelSpeed));
}
if (!objects->torqueArray.empty()) {
objects->torqueCurve->setSamples(xaxis.data() + startingIndex, objects->smoothTorque.data() + startingIndex, totalPoints);
}
// left/right pedals
if (!objects->balanceArray.empty()) {
objects->balanceLCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothBalanceL.data() + startingIndex, totalPoints);
objects->balanceRCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothBalanceR.data() + startingIndex, totalPoints);
}
if (!objects->lteArray.empty()) objects->lteCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothLTE.data() + startingIndex, totalPoints);
if (!objects->rteArray.empty()) objects->rteCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothRTE.data() + startingIndex, totalPoints);
if (!objects->lpsArray.empty()) objects->lpsCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothLPS.data() + startingIndex, totalPoints);
if (!objects->rpsArray.empty()) objects->rpsCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothRPS.data() + startingIndex, totalPoints);
if (!objects->lpcoArray.empty()) objects->lpcoCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothLPCO.data() + startingIndex, totalPoints);
if (!objects->rpcoArray.empty()) objects->rpcoCurve->setSamples(xaxis.data() + startingIndex,
objects->smoothRPCO.data() + startingIndex, totalPoints);
if (!objects->lppbArray.empty()) {
objects->lppCurve->setSamples(new QwtIntervalSeriesData(objects->smoothLPP));
}
if (!objects->rppbArray.empty()) {
objects->rppCurve->setSamples(new QwtIntervalSeriesData(objects->smoothRPP));
}
if (!objects->lpppbArray.empty()) {
objects->lpppCurve->setSamples(new QwtIntervalSeriesData(objects->smoothLPPP));
}
if (!objects->rpppbArray.empty()) {
objects->rpppCurve->setSamples(new QwtIntervalSeriesData(objects->smoothRPPP));
}
setAltSlopePlotStyle(objects->altSlopeCurve);
setYMax();
if (!context->isCompareIntervals) {
refreshReferenceLines();
refreshIntervalMarkers();
refreshCalibrationMarkers();
refreshZoneLabels();
}
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::refreshIntervalMarkers()
{
QwtIndPlotMarker::resetDrawnLabels();
foreach(QwtPlotMarker *mrk, standard->d_mrk) {
mrk->detach();
delete mrk;
}
standard->d_mrk.clear();
if (rideItem && rideItem->ride()) {
foreach(IntervalItem *interval, rideItem->intervals()) {
bool nolabel = false;
// no label, but do add
if (interval->type != RideFileInterval::USER) nolabel = true;
QwtPlotMarker *mrk = new QwtIndPlotMarker;
standard->d_mrk.append(mrk);
mrk->attach(this);
mrk->setLineStyle(QwtPlotMarker::VLine);
mrk->setLabelAlignment(Qt::AlignRight | Qt::AlignTop);
if (nolabel) mrk->setLinePen(QPen(QColor(127,127,127,64), 0, Qt::DashLine));
else mrk->setLinePen(QPen(GColor(CPLOTMARKER), 0, Qt::DashLine));
// put matches on second line down
QString name(interval->name);
if (interval->name.startsWith(tr("Match"))) name = QString("\n%1").arg(interval->name);
QwtText text(wanttext && !nolabel ? name : "");
if (!nolabel) {
text.setFont(QFont("Helvetica", 10, QFont::Bold));
if (interval->name.startsWith(tr("Match")))
text.setColor(GColor(CWBAL));
else
text.setColor(GColor(CPLOTMARKER));
}
if (!bydist)
mrk->setValue(interval->start / 60.0, 0.0);
else
mrk->setValue((context->athlete->useMetricUnits ? 1 : MILES_PER_KM) *
interval->startKM, 0.0);
mrk->setLabel(text);
}
}
}
void
AllPlot::refreshCalibrationMarkers()
{
foreach(QwtPlotMarker *mrk, standard->cal_mrk) {
mrk->detach();
delete mrk;
}
standard->cal_mrk.clear();
// only on power based charts
if (scope != RideFile::none && scope != RideFile::watts && scope != RideFile::aTISS && scope != RideFile::anTISS &&
scope != RideFile::NP && scope != RideFile::aPower && scope != RideFile::xPower) return;
QColor color = GColor(CPOWER);
color.setAlpha(15); // almost invisible !
if (rideItem && rideItem->ride()) {
foreach(const RideFileCalibration *calibration, rideItem->ride()->calibrations()) {
QwtPlotMarker *mrk = new QwtPlotMarker;
standard->cal_mrk.append(mrk);
mrk->attach(this);
mrk->setLineStyle(QwtPlotMarker::VLine);
mrk->setLabelAlignment(Qt::AlignRight | Qt::AlignTop);
mrk->setLinePen(QPen(color, 0, Qt::SolidLine));
if (!bydist)
mrk->setValue(calibration->start / 60.0, 0.0);
else
mrk->setValue((context->athlete->useMetricUnits ? 1 : MILES_PER_KM) *
rideItem->ride()->timeToDistance(calibration->start), 0.0);
//Lots of markers can clutter things, so avoid texts for now
//QwtText text(false ? ("\n\n"+calibration.name) : "");
//text.setFont(QFont("Helvetica", 9, QFont::Bold));
//text.setColor(Qt::gray);
//mrk->setLabel(text);
}
}
}
void
AllPlot::refreshReferenceLines()
{
// not supported in compare mode
if (context->isCompareIntervals) return;
// only on power based charts
if (scope != RideFile::none && scope != RideFile::watts && scope != RideFile::aTISS && scope != RideFile::anTISS &&
scope != RideFile::NP && scope != RideFile::aPower && scope != RideFile::xPower) return;
foreach(QwtPlotCurve *referenceLine, standard->referenceLines) {
curveColors->remove(referenceLine);
referenceLine->detach();
delete referenceLine;
}
standard->referenceLines.clear();
if (rideItem && rideItem->ride()) {
foreach(const RideFilePoint *referencePoint, rideItem->ride()->referencePoints()) {
QwtPlotCurve *referenceLine = plotReferenceLine(referencePoint);
if (referenceLine) standard->referenceLines.append(referenceLine);
}
}
}
QwtPlotCurve*
AllPlot::plotReferenceLine(const RideFilePoint *referencePoint)
{
// not supported in compare mode
if (context->isCompareIntervals) return NULL;
// only on power based charts
if (scope != RideFile::none && scope != RideFile::watts && scope != RideFile::aTISS && scope != RideFile::anTISS &&
scope != RideFile::NP && scope != RideFile::aPower && scope != RideFile::xPower) return NULL;
QwtPlotCurve *referenceLine = NULL;
QVector<double> xaxis;
QVector<double> yaxis;
xaxis << axisScaleDiv(QwtPlot::xBottom).lowerBound();
xaxis << axisScaleDiv(QwtPlot::xBottom).upperBound();
if (referencePoint->watts != 0) {
referenceLine = new QwtPlotCurve(tr("Power Ref"));
referenceLine->setYAxis(yLeft);
QPen wattsPen = QPen(GColor(CPOWER));
wattsPen.setWidth(1);
wattsPen.setStyle(Qt::DashLine);
referenceLine->setPen(wattsPen);
yaxis.append(referencePoint->watts);
yaxis.append(referencePoint->watts);
} else if (referencePoint->hr != 0) {
referenceLine = new QwtPlotCurve(tr("Heart Rate Ref"));
referenceLine->setYAxis(yLeft);
QPen hrPen = QPen(GColor(CHEARTRATE));
hrPen.setWidth(1);
hrPen.setStyle(Qt::DashLine);
referenceLine->setPen(hrPen);
yaxis.append(referencePoint->hr);
yaxis.append(referencePoint->hr);
} else if (referencePoint->cad != 0) {
referenceLine = new QwtPlotCurve(tr("Cadence Ref"));
referenceLine->setYAxis(yLeft);
QPen cadPen = QPen(GColor(CCADENCE));
cadPen.setWidth(1);
cadPen.setStyle(Qt::DashLine);
referenceLine->setPen(cadPen);
yaxis.append(referencePoint->cad);
yaxis.append(referencePoint->cad);
}
if (referenceLine) {
curveColors->insert(referenceLine);
referenceLine->setSamples(xaxis,yaxis);
referenceLine->attach(this);
referenceLine->setVisible(true);
}
return referenceLine;
}
void
AllPlot::replot() {
QwtIndPlotMarker::resetDrawnLabels();
QwtPlot::replot();
}
void
AllPlot::setYMax()
{
// first lets show or hide, otherwise all the efforts to set scales
// etc are ignored because the axis is not visible
if (wantaxis) {
setAxisVisible(yLeft, standard->wattsCurve->isVisible() ||
standard->atissCurve->isVisible() ||
standard->antissCurve->isVisible() ||
standard->npCurve->isVisible() ||
standard->rvCurve->isVisible() ||
standard->rcadCurve->isVisible() ||
standard->rgctCurve->isVisible() ||
standard->gearCurve->isVisible() ||
standard->xpCurve->isVisible() ||
standard->apCurve->isVisible());
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 1), standard->hrCurve->isVisible() || standard->tcoreCurve->isVisible() ||
standard->cadCurve->isVisible() || standard->smo2Curve->isVisible());
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 2), false);
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 3), standard->balanceLCurve->isVisible() ||
standard->lteCurve->isVisible() ||
standard->lpsCurve->isVisible() ||
standard->slopeCurve->isVisible() );
setAxisVisible(yRight, standard->speedCurve->isVisible() || standard->torqueCurve->isVisible() ||
standard->thbCurve->isVisible() || standard->o2hbCurve->isVisible() || standard->hhbCurve->isVisible());
setAxisVisible(QwtAxisId(QwtAxis::yRight, 1), standard->altCurve->isVisible() ||
standard->altSlopeCurve->isVisible());
setAxisVisible(QwtAxisId(QwtAxis::yRight, 2), standard->wCurve->isVisible());
setAxisVisible(QwtAxisId(QwtAxis::yRight, 3), standard->atissCurve->isVisible() || standard->antissCurve->isVisible());
} else {
setAxisVisible(yLeft, false);
setAxisVisible(QwtAxisId(QwtAxis::yLeft,1), false);
setAxisVisible(QwtAxisId(QwtAxis::yLeft,2), false);
setAxisVisible(QwtAxisId(QwtAxis::yLeft,3), false);
setAxisVisible(yRight, false);
setAxisVisible(QwtAxisId(QwtPlot::yRight,1), false);
setAxisVisible(QwtAxisId(QwtAxis::yRight,2), false);
setAxisVisible(QwtAxisId(QwtAxis::yRight,3), false);
}
// might want xaxis
if (wantxaxis) setAxisVisible(xBottom, true);
else setAxisVisible(xBottom, false);
// set axis scales
// QwtAxis::yRight, 3
if (((showATISS && standard->atissCurve->isVisible()) || (showANTISS && standard->antissCurve->isVisible()))
&& rideItem && rideItem->ride()) {
setAxisTitle(QwtAxisId(QwtAxis::yRight, 3), tr("TISS"));
setAxisScale(QwtAxisId(QwtAxis::yRight, 3),0, qMax(standard->atissCurve->maxYValue(), standard->atissCurve->maxYValue()) * 1.05);
setAxisLabelAlignment(QwtAxisId(QwtAxis::yRight, 3),Qt::AlignVCenter);
}
// QwtAxis::yRight, 2
if (showW && standard->wCurve->isVisible() && rideItem && rideItem->ride()) {
setAxisTitle(QwtAxisId(QwtAxis::yRight, 2), tr("W' Balance (kJ)"));
setAxisScale(QwtAxisId(QwtAxis::yRight, 2), qMin(int(standard->wCurve->minYValue()-1000), 0),
qMax(int(standard->wCurve->maxYValue()+1000), 0));
setAxisLabelAlignment(QwtAxisId(QwtAxis::yRight, 2),Qt::AlignVCenter);
}
// QwtAxis::yLeft
if (standard->wattsCurve->isVisible()) {
double maxY = (referencePlot == NULL) ? (1.05 * standard->wattsCurve->maxYValue()) :
(1.05 * referencePlot->standard->wattsCurve->maxYValue());
int axisHeight = qRound( plotLayout()->canvasRect().height() );
int step = 100;
// axisHeight will be zero before first show, so only do this if its non-zero!
if (axisHeight) {
QFontMetrics labelWidthMetric = QFontMetrics( QwtPlot::axisFont(yLeft) );
int labelWidth = labelWidthMetric.width( (maxY > 1000) ? " 8,888 " : " 888 " );
while( ( qCeil(maxY / step) * labelWidth ) > axisHeight ) nextStep(step);
}
QwtValueList xytick[QwtScaleDiv::NTickTypes];
for (int i=0;i<maxY && i<2000;i+=step)
xytick[QwtScaleDiv::MajorTick]<<i;
setAxisTitle(yLeft, tr("Watts"));
setAxisScaleDiv(QwtPlot::yLeft,QwtScaleDiv(0.0,maxY,xytick));
axisWidget(yLeft)->update();
}
// QwtAxis::yLeft, 1
if (standard->hrCurve->isVisible() || standard->tcoreCurve->isVisible() ||
standard->cadCurve->isVisible() || standard->smo2Curve->isVisible() ||
(!context->athlete->useMetricUnits && standard->tempCurve->isVisible())) {
double ymin = 0;
double ymax = 0;
QStringList labels;
if (standard->hrCurve->isVisible()) {
labels << tr("BPM");
if (referencePlot == NULL)
ymax = standard->hrCurve->maxYValue();
else
ymax = referencePlot->standard->hrCurve->maxYValue();
}
if (standard->tcoreCurve->isVisible()) {
labels << tr("Core Temperature");
if (referencePlot == NULL)
ymax = qMax(ymax, standard->tcoreCurve->maxYValue());
else
ymax = qMax(ymax, referencePlot->standard->tcoreCurve->maxYValue());
}
if (standard->smo2Curve->isVisible()) {
labels << tr("SmO2");
if (referencePlot == NULL)
ymax = qMax(ymax, standard->smo2Curve->maxYValue());
else
ymax = qMax(ymax, referencePlot->standard->smo2Curve->maxYValue());
}
if (standard->cadCurve->isVisible()) {
labels << tr("RPM");
if (referencePlot == NULL)
ymax = qMax(ymax, standard->cadCurve->maxYValue());
else
ymax = qMax(ymax, referencePlot->standard->cadCurve->maxYValue());
}
if (standard->tempCurve->isVisible() && !context->athlete->useMetricUnits) {
labels << QString::fromUtf8("°F");
if (referencePlot == NULL) {
ymin = qMin(ymin, standard->tempCurve->minYValue());
ymax = qMax(ymax, standard->tempCurve->maxYValue());
}
else {
ymin = qMin(ymin, referencePlot->standard->tempCurve->minYValue());
ymax = qMax(ymax, referencePlot->standard->tempCurve->maxYValue());
}
}
int axisHeight = qRound( plotLayout()->canvasRect().height() );
int step = 10;
if (axisHeight) {
QFontMetrics labelWidthMetric = QFontMetrics( QwtPlot::axisFont(yLeft) );
int labelWidth = labelWidthMetric.width( "888 " );
ymax *= 1.05;
while((qCeil(ymax / step) * labelWidth) > axisHeight) nextStep(step);
}
QwtValueList xytick[QwtScaleDiv::NTickTypes];
for (int i=0;i<ymax;i+=step)
xytick[QwtScaleDiv::MajorTick]<<i;
setAxisTitle(QwtAxisId(QwtAxis::yLeft, 1), labels.join(" / "));
if (labels.count() == 1 && labels[0] == tr("Core Temperature")) {
double ymin=36.5f;
if (ymax < 39.0f) ymax = 39.0f;
if (standard->tcoreCurve->isVisible() && standard->tcoreCurve->minYValue() < ymin)
ymin = standard->tcoreCurve->minYValue();
double step = 0.00f;
if (ymin < 100.00f) step = (ymax - ymin) / 4;
// we just have Core Temp ...
setAxisScale(QwtAxisId(QwtAxis::yLeft, 1),ymin<100.0f?ymin:0, ymax, step);
} else {
setAxisScaleDiv(QwtAxisId(QwtAxis::yLeft, 1),QwtScaleDiv(ymin, ymax, xytick));
}
}
// QwtAxis::yLeft, 3
if ((standard->balanceLCurve->isVisible() || standard->lteCurve->isVisible() ||
standard->lpsCurve->isVisible()) || standard->slopeCurve->isVisible()){
QStringList labels;
double ymin = 0;
double ymax = 0;
if (standard->balanceLCurve->isVisible() || standard->lteCurve->isVisible() ||
standard->lpsCurve->isVisible()) {
labels << tr("Percent");
ymin = 0;
ymax = 100;
};
if (standard->slopeCurve->isVisible()) {
labels << tr("Slope");
ymin = qMin(standard->slopeCurve->minYValue() * 1.1, ymin);
ymax = qMax(standard->slopeCurve->maxYValue() * 1.1, ymax);
};
// Set range from the curves
setAxisTitle(QwtAxisId(QwtAxis::yLeft, 3), labels.join(" / "));
setAxisScale(QwtAxisId(QwtAxis::yLeft, 3), ymin, ymax);
// not sure about this .. should be done on creation (?)
standard->balanceLCurve->setBaseline(50);
standard->balanceRCurve->setBaseline(50);
}
// QwtAxis::yRight, 0
if (standard->speedCurve->isVisible() || standard->thbCurve->isVisible() ||
standard->o2hbCurve->isVisible() || standard->hhbCurve->isVisible() ||
(context->athlete->useMetricUnits && standard->tempCurve->isVisible()) ||
standard->torqueCurve->isVisible()) {
double ymin = -10;
double ymax = 0;
QStringList labels;
// axis scale draw precision
static_cast<ScaleScaleDraw*>(axisScaleDraw(QwtAxisId(QwtAxis::yRight, 0)))->setDecimals(2);
if (standard->speedCurve->isVisible()) {
labels << (context->athlete->useMetricUnits ? tr("KPH") : tr("MPH"));
if (referencePlot == NULL)
ymax = standard->speedCurve->maxYValue();
else
ymax = referencePlot->standard->speedCurve->maxYValue();
}
if (standard->tempCurve->isVisible() && context->athlete->useMetricUnits) {
labels << QString::fromUtf8("°C");
if (referencePlot == NULL) {
ymin = qMin(ymin, standard->tempCurve->minYValue());
ymax = qMax(ymax, standard->tempCurve->maxYValue());
}
else {
ymin = qMin(ymin, referencePlot->standard->tempCurve->minYValue());
ymax = qMax(ymax, referencePlot->standard->tempCurve->maxYValue());
}
}
if (standard->thbCurve->isVisible() || standard->o2hbCurve->isVisible() || standard->hhbCurve->isVisible()) {
labels << tr("Hb");
if (referencePlot == NULL)
ymax = qMax(ymax, standard->thbCurve->maxYValue());
else
ymax = qMax(ymax, referencePlot->standard->thbCurve->maxYValue());
}
if (standard->torqueCurve->isVisible()) {
labels << (context->athlete->useMetricUnits ? tr("Nm") : tr("ftLb"));
if (referencePlot == NULL)
ymax = qMax(ymax, standard->torqueCurve->maxYValue());
else
ymax = qMax(ymax, referencePlot->standard->torqueCurve->maxYValue());
}
int axisHeight = qRound( plotLayout()->canvasRect().height() );
int step = 10;
if (axisHeight) {
QFontMetrics labelWidthMetric = QFontMetrics( QwtPlot::axisFont(yRight) );
int labelWidth = labelWidthMetric.width( "888 " );
ymax *= 1.05;
while((qCeil(ymax / step) * labelWidth) > axisHeight) nextStep(step);
}
QwtValueList xytick[QwtScaleDiv::NTickTypes];
for (int i=0;i<ymax;i+=step)
xytick[QwtScaleDiv::MajorTick]<<i;
setAxisTitle(QwtAxisId(QwtAxis::yRight, 0), labels.join(" / "));
// we just have Hb ?
if (labels.count() == 1 && labels[0] == tr("Hb")) {
double ymin=100;
ymax /= 1.05;
ymax += .10f;
if (standard->thbCurve->isVisible() && standard->thbCurve->minYValue() < ymin)
ymin = standard->thbCurve->minYValue();
if (standard->o2hbCurve->isVisible() && standard->o2hbCurve->minYValue() < ymin)
ymin = standard->o2hbCurve->minYValue();
if (standard->hhbCurve->isVisible() && standard->hhbCurve->minYValue() < ymin)
ymin = standard->hhbCurve->minYValue();
double step = 0.00f;
if (ymin < 100.00f) step = (ymax - ymin) / 5;
// we just have Hb ...
setAxisScale(QwtAxisId(QwtAxis::yRight, 0),ymin<100.0f?ymin:0, ymax, step);
} else
setAxisScaleDiv(QwtAxisId(QwtAxis::yRight, 0),QwtScaleDiv(0, ymax, xytick));
}
// QwtAxis::yRight, 1
if (standard->altCurve->isVisible() || standard->altSlopeCurve->isVisible()) {
setAxisTitle(QwtAxisId(QwtAxis::yRight, 1), context->athlete->useMetricUnits ? tr("Meters") : tr("Feet"));
double ymin,ymax;
if (referencePlot == NULL) {
ymin = standard->altCurve->minYValue();
ymax = qMax(500.000, 1.05 * standard->altCurve->maxYValue());
} else {
ymin = referencePlot->standard->altCurve->minYValue();
ymax = qMax(500.000, 1.05 * referencePlot->standard->altCurve->maxYValue());
}
ymin = (ymin < 0 ? -100 : 0) + ( qRound(ymin) / 100 ) * 100;
int axisHeight = qRound( plotLayout()->canvasRect().height() );
int step = 100;
if (axisHeight) {
QFontMetrics labelWidthMetric = QFontMetrics( QwtPlot::axisFont(yLeft) );
int labelWidth = labelWidthMetric.width( (ymax > 1000) ? " 8888 " : " 888 " );
while( ( qCeil( (ymax - ymin ) / step) * labelWidth ) > axisHeight ) nextStep(step);
}
QwtValueList xytick[QwtScaleDiv::NTickTypes];
for (int i=ymin;i<ymax;i+=step)
xytick[QwtScaleDiv::MajorTick]<<i;
setAxisScaleDiv(QwtAxisId(QwtAxis::yRight, 1),QwtScaleDiv(ymin,ymax,xytick));
standard->altCurve->setBaseline(ymin);
}
}
void
AllPlot::setXTitle()
{
if (bydist)
setAxisTitle(xBottom, context->athlete->useMetricUnits ? "KM" : "Miles");
else
setAxisTitle(xBottom, tr("")); // time is bloody obvious, less noise
enableAxis(xBottom, true);
setAxisVisible(xBottom, true);
}
// we do this a lot so trying to save a bit of cut and paste woe
static void setSymbol(QwtPlotCurve *curve, int points)
{
QwtSymbol *sym = new QwtSymbol;
sym->setPen(QPen(GColor(CPLOTMARKER)));
if (points < 150) {
sym->setStyle(QwtSymbol::Ellipse);
sym->setSize(3);
} else {
sym->setStyle(QwtSymbol::NoSymbol);
sym->setSize(0);
}
curve->setSymbol(sym);
}
void
AllPlot::setDataFromPlot(AllPlot *plot, int startidx, int stopidx)
{
if (plot == NULL) {
rideItem = NULL;
return;
}
referencePlot = plot;
isolation = false;
curveColors->saveState();
// You got to give me some data first!
if (!plot->standard->distanceArray.count() || !plot->standard->timeArray.count()) return;
// reference the plot for data and state
rideItem = plot->rideItem;
bydist = plot->bydist;
//arrayLength = stopidx-startidx;
if (bydist) {
startidx = plot->distanceIndex(plot->standard->distanceArray[startidx]);
stopidx = plot->distanceIndex(plot->standard->distanceArray[(stopidx>=plot->standard->distanceArray.size()?plot->standard->distanceArray.size()-1:stopidx)]);
} else {
startidx = plot->timeIndex(plot->standard->timeArray[startidx]/60);
stopidx = plot->timeIndex(plot->standard->timeArray[(stopidx>=plot->standard->timeArray.size()?plot->standard->timeArray.size()-1:stopidx)]/60);
}
// center the curve title
standard->curveTitle.setYValue(30);
standard->curveTitle.setXValue(2);
// make sure indexes are still valid
if (startidx > stopidx || startidx < 0 || stopidx < 0) return;
double *smoothW = &plot->standard->smoothWatts[startidx];
double *smoothN = &plot->standard->smoothNP[startidx];
double *smoothRV = &plot->standard->smoothRV[startidx];
double *smoothRCad = &plot->standard->smoothRCad[startidx];
double *smoothRGCT = &plot->standard->smoothRGCT[startidx];
double *smoothGear = &plot->standard->smoothGear[startidx];
double *smoothSmO2 = &plot->standard->smoothSmO2[startidx];
double *smoothtHb = &plot->standard->smoothtHb[startidx];
double *smoothO2Hb = &plot->standard->smoothO2Hb[startidx];
double *smoothHHb = &plot->standard->smoothHHb[startidx];
double *smoothAT = &plot->standard->smoothAT[startidx];
double *smoothANT = &plot->standard->smoothANT[startidx];
double *smoothX = &plot->standard->smoothXP[startidx];
double *smoothL = &plot->standard->smoothAP[startidx];
double *smoothT = &plot->standard->smoothTime[startidx];
double *smoothHR = &plot->standard->smoothHr[startidx];
double *smoothTCORE = &plot->standard->smoothTcore[startidx];
double *smoothS = &plot->standard->smoothSpeed[startidx];
double *smoothC = &plot->standard->smoothCad[startidx];
double *smoothA = &plot->standard->smoothAltitude[startidx];
double *smoothSL = &plot->standard->smoothSlope[startidx];
double *smoothD = &plot->standard->smoothDistance[startidx];
double *smoothTE = &plot->standard->smoothTemp[startidx];
//double *standard->smoothWND = &plot->standard->smoothWind[startidx];
double *smoothNM = &plot->standard->smoothTorque[startidx];
// left/right
double *smoothBALL = &plot->standard->smoothBalanceL[startidx];
double *smoothBALR = &plot->standard->smoothBalanceR[startidx];
double *smoothLTE = &plot->standard->smoothLTE[startidx];
double *smoothRTE = &plot->standard->smoothRTE[startidx];
double *smoothLPS = &plot->standard->smoothLPS[startidx];
double *smoothRPS = &plot->standard->smoothRPS[startidx];
double *smoothLPCO = &plot->standard->smoothLPCO[startidx];
double *smoothRPCO = &plot->standard->smoothRPCO[startidx];
QwtIntervalSample *smoothLPP = &plot->standard->smoothLPP[startidx];
QwtIntervalSample *smoothRPP = &plot->standard->smoothRPP[startidx];
QwtIntervalSample *smoothLPPP = &plot->standard->smoothLPPP[startidx];
QwtIntervalSample *smoothRPPP = &plot->standard->smoothRPPP[startidx];
// deltas
double *smoothAC = &plot->standard->smoothAccel[startidx];
double *smoothWD = &plot->standard->smoothWattsD[startidx];
double *smoothCD = &plot->standard->smoothCadD[startidx];
double *smoothND = &plot->standard->smoothNmD[startidx];
double *smoothHD = &plot->standard->smoothHrD[startidx];
QwtIntervalSample *smoothRS = &plot->standard->smoothRelSpeed[startidx];
double *xaxis = bydist ? smoothD : smoothT;
// attach appropriate curves
//if (this->legend()) this->legend()->hide();
if (showW && rideItem->ride()->wprimeData()->TAU > 0) {
// matches cost
double burnt=0;
int count=0;
foreach(struct Match match, rideItem->ride()->wprimeData()->matches)
if (match.cost > 2000) { //XXX how to decide the threshold for a match?
burnt += match.cost;
count++;
}
QString warn;
if (rideItem->ride()->wprimeData()->minY < 0) {
int minCP = rideItem->ride()->wprimeData()->PCP();
if (minCP)
warn = QString(tr("** Minimum CP=%1 **")).arg(rideItem->ride()->wprimeData()->PCP());
else
warn = QString(tr("** Check W' is set correctly **"));
}
QString matchesText; // consider Singular/Plural in Text / Zero is in most languages handled like Plural
if (count == 1) {
matchesText = tr("Tau=%1, CP=%2, W'=%3, %4 match >2kJ (%5 kJ) %6");
}
else {
matchesText = tr("Tau=%1, CP=%2, W'=%3, %4 matches >2kJ (%5 kJ) %6");
}
QwtText text(matchesText.arg(rideItem->ride()->wprimeData()->TAU)
.arg(rideItem->ride()->wprimeData()->CP)
.arg(rideItem->ride()->wprimeData()->WPRIME)
.arg(count)
.arg(burnt/1000.00, 0, 'f', 1)
.arg(warn));
text.setFont(QFont("Helvetica", 10, QFont::Bold));
text.setColor(GColor(CWBAL));
standard->curveTitle.setLabel(text);
} else {
standard->curveTitle.setLabel(QwtText(""));
}
standard->wCurve->detach();
standard->mCurve->detach();
standard->wattsCurve->detach();
standard->atissCurve->detach();
standard->antissCurve->detach();
standard->npCurve->detach();
standard->rvCurve->detach();
standard->rcadCurve->detach();
standard->rgctCurve->detach();
standard->gearCurve->detach();
standard->smo2Curve->detach();
standard->thbCurve->detach();
standard->o2hbCurve->detach();
standard->hhbCurve->detach();
standard->xpCurve->detach();
standard->apCurve->detach();
standard->hrCurve->detach();
standard->tcoreCurve->detach();
standard->speedCurve->detach();
standard->accelCurve->detach();
standard->wattsDCurve->detach();
standard->cadDCurve->detach();
standard->nmDCurve->detach();
standard->hrDCurve->detach();
standard->cadCurve->detach();
standard->altCurve->detach();
standard->altSlopeCurve->detach();
standard->slopeCurve->detach();
standard->tempCurve->detach();
standard->windCurve->detach();
standard->torqueCurve->detach();
standard->lteCurve->detach();
standard->rteCurve->detach();
standard->lpsCurve->detach();
standard->rpsCurve->detach();
standard->balanceLCurve->detach();
standard->balanceRCurve->detach();
standard->lpcoCurve->detach();
standard->rpcoCurve->detach();
standard->lppCurve->detach();
standard->rppCurve->detach();
standard->lpppCurve->detach();
standard->rpppCurve->detach();
standard->wattsCurve->setVisible(rideItem->ride()->areDataPresent()->watts && showPowerState < 2);
standard->atissCurve->setVisible(rideItem->ride()->areDataPresent()->watts && showATISS);
standard->antissCurve->setVisible(rideItem->ride()->areDataPresent()->watts && showANTISS);
standard->npCurve->setVisible(rideItem->ride()->areDataPresent()->np && showNP);
standard->rvCurve->setVisible(rideItem->ride()->areDataPresent()->rvert && showRV);
standard->rcadCurve->setVisible(rideItem->ride()->areDataPresent()->rcad && showRCad);
standard->gearCurve->setVisible(rideItem->ride()->areDataPresent()->gear && showGear);
standard->smo2Curve->setVisible(rideItem->ride()->areDataPresent()->smo2 && showSmO2);
standard->thbCurve->setVisible(rideItem->ride()->areDataPresent()->thb && showtHb);
standard->o2hbCurve->setVisible(rideItem->ride()->areDataPresent()->o2hb && showO2Hb);
standard->hhbCurve->setVisible(rideItem->ride()->areDataPresent()->hhb && showHHb);
standard->rgctCurve->setVisible(rideItem->ride()->areDataPresent()->rcontact && showRGCT);
standard->xpCurve->setVisible(rideItem->ride()->areDataPresent()->xp && showXP);
standard->apCurve->setVisible(rideItem->ride()->areDataPresent()->apower && showAP);
standard->wCurve->setVisible(rideItem->ride()->areDataPresent()->watts && showW);
standard->mCurve->setVisible(rideItem->ride()->areDataPresent()->watts && showW);
standard->hrCurve->setVisible(rideItem->ride()->areDataPresent()->hr && showHr);
standard->tcoreCurve->setVisible(rideItem->ride()->areDataPresent()->hr && showTcore);
standard->speedCurve->setVisible(rideItem->ride()->areDataPresent()->kph && showSpeed);
standard->accelCurve->setVisible(rideItem->ride()->areDataPresent()->kph && showAccel);
standard->wattsDCurve->setVisible(rideItem->ride()->areDataPresent()->watts && showPowerD);
standard->cadDCurve->setVisible(rideItem->ride()->areDataPresent()->cad && showCadD);
standard->nmDCurve->setVisible(rideItem->ride()->areDataPresent()->nm && showTorqueD);
standard->hrDCurve->setVisible(rideItem->ride()->areDataPresent()->hr && showHrD);
standard->cadCurve->setVisible(rideItem->ride()->areDataPresent()->cad && showCad);
standard->altCurve->setVisible(rideItem->ride()->areDataPresent()->alt && showAlt);
standard->altSlopeCurve->setVisible(rideItem->ride()->areDataPresent()->alt && showAltSlopeState > 0);
standard->slopeCurve->setVisible(rideItem->ride()->areDataPresent()->slope && showSlope);
standard->tempCurve->setVisible(rideItem->ride()->areDataPresent()->temp && showTemp);
standard->windCurve->setVisible(rideItem->ride()->areDataPresent()->headwind && showWind);
standard->torqueCurve->setVisible(rideItem->ride()->areDataPresent()->nm && showTorque);
standard->lteCurve->setVisible(rideItem->ride()->areDataPresent()->lte && showTE);
standard->rteCurve->setVisible(rideItem->ride()->areDataPresent()->rte && showTE);
standard->lpsCurve->setVisible(rideItem->ride()->areDataPresent()->lps && showPS);
standard->rpsCurve->setVisible(rideItem->ride()->areDataPresent()->rps && showPS);
standard->balanceLCurve->setVisible(rideItem->ride()->areDataPresent()->lrbalance && showBalance);
standard->balanceRCurve->setVisible(rideItem->ride()->areDataPresent()->lrbalance && showBalance);
standard->lpcoCurve->setVisible(rideItem->ride()->areDataPresent()->lpco && showPCO);
standard->rpcoCurve->setVisible(rideItem->ride()->areDataPresent()->rpco && showPCO);
standard->lppCurve->setVisible(rideItem->ride()->areDataPresent()->lppb && showDC);
standard->rppCurve->setVisible(rideItem->ride()->areDataPresent()->rppb && showDC);
standard->lpppCurve->setVisible(rideItem->ride()->areDataPresent()->lpppb && showPPP);
standard->rpppCurve->setVisible(rideItem->ride()->areDataPresent()->rpppb && showPPP);
if (showW) {
standard->wCurve->setSamples(bydist ? plot->standard->wprimeDist.data() : plot->standard->wprimeTime.data(),
plot->standard->wprime.data(), plot->standard->wprime.count());
standard->mCurve->setSamples(bydist ? plot->standard->matchDist.data() : plot->standard->matchTime.data(),
plot->standard->match.data(), plot->standard->match.count());
setMatchLabels(standard);
}
int points = stopidx - startidx + 1; // e.g. 10 to 12 is 3 points 10,11,12, so not 12-10 !
standard->wattsCurve->setSamples(xaxis,smoothW,points);
standard->atissCurve->setSamples(xaxis,smoothAT,points);
standard->antissCurve->setSamples(xaxis,smoothANT,points);
standard->npCurve->setSamples(xaxis,smoothN,points);
standard->rvCurve->setSamples(xaxis,smoothRV,points);
standard->rcadCurve->setSamples(xaxis,smoothRCad,points);
standard->rgctCurve->setSamples(xaxis,smoothRGCT,points);
standard->gearCurve->setSamples(xaxis,smoothGear,points);
standard->smo2Curve->setSamples(xaxis,smoothSmO2,points);
standard->thbCurve->setSamples(xaxis,smoothtHb,points);
standard->o2hbCurve->setSamples(xaxis,smoothO2Hb,points);
standard->hhbCurve->setSamples(xaxis,smoothHHb,points);
standard->xpCurve->setSamples(xaxis,smoothX,points);
standard->apCurve->setSamples(xaxis,smoothL,points);
standard->hrCurve->setSamples(xaxis, smoothHR,points);
standard->tcoreCurve->setSamples(xaxis, smoothTCORE,points);
standard->speedCurve->setSamples(xaxis, smoothS, points);
standard->accelCurve->setSamples(xaxis, smoothAC, points);
standard->wattsDCurve->setSamples(xaxis, smoothWD, points);
standard->cadDCurve->setSamples(xaxis, smoothCD, points);
standard->nmDCurve->setSamples(xaxis, smoothND, points);
standard->hrDCurve->setSamples(xaxis, smoothHD, points);
standard->cadCurve->setSamples(xaxis, smoothC, points);
standard->altCurve->setSamples(xaxis, smoothA, points);
standard->altSlopeCurve->setSamples(xaxis, smoothA, points);
standard->slopeCurve->setSamples(xaxis, smoothSL, points);
standard->tempCurve->setSamples(xaxis, smoothTE, points);
QVector<QwtIntervalSample> tmpWND(points);
memcpy(tmpWND.data(), smoothRS, (points) * sizeof(QwtIntervalSample));
standard->windCurve->setSamples(new QwtIntervalSeriesData(tmpWND));
standard->torqueCurve->setSamples(xaxis, smoothNM, points);
standard->balanceLCurve->setSamples(xaxis, smoothBALL, points);
standard->balanceRCurve->setSamples(xaxis, smoothBALR, points);
standard->lteCurve->setSamples(xaxis, smoothLTE, points);
standard->rteCurve->setSamples(xaxis, smoothRTE, points);
standard->lpsCurve->setSamples(xaxis, smoothLPS, points);
standard->rpsCurve->setSamples(xaxis, smoothRPS, points);
standard->lpcoCurve->setSamples(xaxis, smoothLPCO, points);
standard->rpcoCurve->setSamples(xaxis, smoothRPCO, points);
QVector<QwtIntervalSample> tmpLDC(points);
memcpy(tmpLDC.data(), smoothLPP, (points) * sizeof(QwtIntervalSample));
standard->lppCurve->setSamples(new QwtIntervalSeriesData(tmpLDC));
QVector<QwtIntervalSample> tmpRDC(points);
memcpy(tmpRDC.data(), smoothRPP, (points) * sizeof(QwtIntervalSample));
standard->rppCurve->setSamples(new QwtIntervalSeriesData(tmpRDC));
QVector<QwtIntervalSample> tmpLPPP(points);
memcpy(tmpLPPP.data(), smoothLPPP, (points) * sizeof(QwtIntervalSample));
standard->lpppCurve->setSamples(new QwtIntervalSeriesData(tmpLPPP));
QVector<QwtIntervalSample> tmpRPPP(points);
memcpy(tmpRPPP.data(), smoothRPPP, (points) * sizeof(QwtIntervalSample));
standard->rpppCurve->setSamples(new QwtIntervalSeriesData(tmpRPPP));
/*QVector<double> _time(stopidx-startidx);
qMemCopy( _time.data(), xaxis, (stopidx-startidx) * sizeof( double ) );
QVector<QwtIntervalSample> tmpWND(stopidx-startidx);
for (int i=0;i<_time.count();i++) {
QwtIntervalSample inter = QwtIntervalSample(_time.at(i), 20,50);
tmpWND.append(inter); // plot->standard->smoothRelSpeed.at(i)
}*/
setSymbol(standard->wCurve, points);
setSymbol(standard->wattsCurve, points);
setSymbol(standard->antissCurve, points);
setSymbol(standard->atissCurve, points);
setSymbol(standard->npCurve, points);
setSymbol(standard->rvCurve, points);
setSymbol(standard->rcadCurve, points);
setSymbol(standard->rgctCurve, points);
setSymbol(standard->gearCurve, points);
setSymbol(standard->smo2Curve, points);
setSymbol(standard->thbCurve, points);
setSymbol(standard->o2hbCurve, points);
setSymbol(standard->hhbCurve, points);
setSymbol(standard->xpCurve, points);
setSymbol(standard->apCurve, points);
setSymbol(standard->hrCurve, points);
setSymbol(standard->tcoreCurve, points);
setSymbol(standard->speedCurve, points);
setSymbol(standard->accelCurve, points);
setSymbol(standard->wattsDCurve, points);
setSymbol(standard->cadDCurve, points);
setSymbol(standard->nmDCurve, points);
setSymbol(standard->hrDCurve, points);
setSymbol(standard->cadCurve, points);
setSymbol(standard->altCurve, points);
setSymbol(standard->altSlopeCurve, points);
setSymbol(standard->slopeCurve, points);
setSymbol(standard->tempCurve, points);
setSymbol(standard->torqueCurve, points);
setSymbol(standard->balanceLCurve, points);
setSymbol(standard->balanceRCurve, points);
setSymbol(standard->lteCurve, points);
setSymbol(standard->rteCurve, points);
setSymbol(standard->lpsCurve, points);
setSymbol(standard->rpsCurve, points);
setSymbol(standard->lpcoCurve, points);
setSymbol(standard->rpcoCurve, points);
if (!plot->standard->smoothAltitude.empty()) {
standard->altCurve->attach(this);
standard->altSlopeCurve->attach(this);
}
if (!plot->standard->smoothSlope.empty()) {
standard->slopeCurve->attach(this);
}
if (showW && plot->standard->wprime.count()) {
standard->wCurve->attach(this);
standard->mCurve->attach(this);
}
if (!plot->standard->smoothWatts.empty()) {
standard->wattsCurve->attach(this);
}
if (!plot->standard->smoothANT.empty()) {
standard->antissCurve->attach(this);
}
if (!plot->standard->smoothAT.empty()) {
standard->atissCurve->attach(this);
}
if (!plot->standard->smoothNP.empty()) {
standard->npCurve->attach(this);
}
if (!plot->standard->smoothRV.empty()) {
standard->rvCurve->attach(this);
}
if (!plot->standard->smoothRCad.empty()) {
standard->rcadCurve->attach(this);
}
if (!plot->standard->smoothRGCT.empty()) {
standard->rgctCurve->attach(this);
}
if (!plot->standard->smoothGear.empty()) {
standard->gearCurve->attach(this);
}
if (!plot->standard->smoothSmO2.empty()) {
standard->smo2Curve->attach(this);
}
if (!plot->standard->smoothtHb.empty()) {
standard->thbCurve->attach(this);
}
if (!plot->standard->smoothO2Hb.empty()) {
standard->o2hbCurve->attach(this);
}
if (!plot->standard->smoothHHb.empty()) {
standard->hhbCurve->attach(this);
}
if (!plot->standard->smoothXP.empty()) {
standard->xpCurve->attach(this);
}
if (!plot->standard->smoothAP.empty()) {
standard->apCurve->attach(this);
}
if (!plot->standard->smoothTcore.empty()) {
standard->tcoreCurve->attach(this);
}
if (!plot->standard->smoothHr.empty()) {
standard->hrCurve->attach(this);
}
if (!plot->standard->smoothAccel.empty()) {
standard->accelCurve->attach(this);
}
if (!plot->standard->smoothWattsD.empty()) {
standard->wattsDCurve->attach(this);
}
if (!plot->standard->smoothCadD.empty()) {
standard->cadDCurve->attach(this);
}
if (!plot->standard->smoothNmD.empty()) {
standard->nmDCurve->attach(this);
}
if (!plot->standard->smoothHrD.empty()) {
standard->hrDCurve->attach(this);
}
if (!plot->standard->smoothSpeed.empty()) {
standard->speedCurve->attach(this);
}
if (!plot->standard->smoothCad.empty()) {
standard->cadCurve->attach(this);
}
if (!plot->standard->smoothTemp.empty()) {
standard->tempCurve->attach(this);
}
if (!plot->standard->smoothWind.empty()) {
standard->windCurve->attach(this);
}
if (!plot->standard->smoothTorque.empty()) {
standard->torqueCurve->attach(this);
}
if (!plot->standard->smoothBalanceL.empty()) {
standard->balanceLCurve->attach(this);
standard->balanceRCurve->attach(this);
}
if (!plot->standard->smoothLTE.empty()) {
standard->lteCurve->attach(this);
standard->rteCurve->attach(this);
}
if (!plot->standard->smoothLPS.empty()) {
standard->lpsCurve->attach(this);
standard->rpsCurve->attach(this);
}
if (!plot->standard->smoothLPCO.empty()) {
standard->lpcoCurve->attach(this);
standard->rpcoCurve->attach(this);
}
if (!plot->standard->smoothLPP.empty()) {
standard->lppCurve->attach(this);
standard->rppCurve->attach(this);
}
if (!plot->standard->smoothLPPP.empty()) {
standard->lpppCurve->attach(this);
standard->rpppCurve->attach(this);
}
setAltSlopePlotStyle(standard->altSlopeCurve);
setYMax();
setAxisScale(xBottom, xaxis[0], xaxis[stopidx-startidx]);
enableAxis(xBottom, true);
setAxisVisible(xBottom, true);
refreshReferenceLines();
refreshIntervalMarkers();
refreshCalibrationMarkers();
refreshZoneLabels();
// set all the colors ?
configChanged(CONFIG_APPEARANCE);
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setDataFromPlot(AllPlot *plot)
{
if (plot == NULL) {
rideItem = NULL;
return;
}
referencePlot = plot;
isolation = false;
curveColors->saveState();
// reference the plot for data and state
rideItem = plot->rideItem;
bydist = plot->bydist;
// remove all curves from the plot
standard->wCurve->detach();
standard->mCurve->detach();
standard->wattsCurve->detach();
standard->atissCurve->detach();
standard->antissCurve->detach();
standard->npCurve->detach();
standard->rvCurve->detach();
standard->rcadCurve->detach();
standard->rgctCurve->detach();
standard->gearCurve->detach();
standard->smo2Curve->detach();
standard->thbCurve->detach();
standard->o2hbCurve->detach();
standard->hhbCurve->detach();
standard->xpCurve->detach();
standard->apCurve->detach();
standard->hrCurve->detach();
standard->tcoreCurve->detach();
standard->speedCurve->detach();
standard->accelCurve->detach();
standard->wattsDCurve->detach();
standard->cadDCurve->detach();
standard->nmDCurve->detach();
standard->hrDCurve->detach();
standard->cadCurve->detach();
standard->altCurve->detach();
standard->altSlopeCurve->detach();
standard->slopeCurve->detach();
standard->tempCurve->detach();
standard->windCurve->detach();
standard->torqueCurve->detach();
standard->balanceLCurve->detach();
standard->balanceRCurve->detach();
standard->lteCurve->detach();
standard->rteCurve->detach();
standard->lpsCurve->detach();
standard->rpsCurve->detach();
standard->lpcoCurve->detach();
standard->rpcoCurve->detach();
standard->lppCurve->detach();
standard->rppCurve->detach();
standard->lpppCurve->detach();
standard->rpppCurve->detach();
standard->wCurve->setVisible(false);
standard->mCurve->setVisible(false);
standard->wattsCurve->setVisible(false);
standard->atissCurve->setVisible(false);
standard->antissCurve->setVisible(false);
standard->npCurve->setVisible(false);
standard->rvCurve->setVisible(false);
standard->rcadCurve->setVisible(false);
standard->rgctCurve->setVisible(false);
standard->gearCurve->setVisible(false);
standard->smo2Curve->setVisible(false);
standard->thbCurve->setVisible(false);
standard->o2hbCurve->setVisible(false);
standard->hhbCurve->setVisible(false);
standard->xpCurve->setVisible(false);
standard->apCurve->setVisible(false);
standard->hrCurve->setVisible(false);
standard->tcoreCurve->setVisible(false);
standard->speedCurve->setVisible(false);
standard->accelCurve->setVisible(false);
standard->wattsDCurve->setVisible(false);
standard->cadDCurve->setVisible(false);
standard->nmDCurve->setVisible(false);
standard->hrDCurve->setVisible(false);
standard->cadCurve->setVisible(false);
standard->altCurve->setVisible(false);
standard->altSlopeCurve->setVisible(false);
standard->slopeCurve->setVisible(false);
standard->tempCurve->setVisible(false);
standard->windCurve->setVisible(false);
standard->torqueCurve->setVisible(false);
standard->balanceLCurve->setVisible(false);
standard->balanceRCurve->setVisible(false);
standard->lteCurve->setVisible(false);
standard->rteCurve->setVisible(false);
standard->lpsCurve->setVisible(false);
standard->rpsCurve->setVisible(false);
standard->lpcoCurve->setVisible(false);
standard->rpcoCurve->setVisible(false);
standard->lppCurve->setVisible(false);
standard->rppCurve->setVisible(false);
standard->lpppCurve->setVisible(false);
standard->rpppCurve->setVisible(false);
QwtPlotCurve *ourCurve = NULL, *thereCurve = NULL;
QwtPlotCurve *ourCurve2 = NULL, *thereCurve2 = NULL;
AllPlotSlopeCurve *ourASCurve = NULL, *thereASCurve = NULL;
QwtPlotIntervalCurve *ourICurve = NULL, *thereICurve = NULL;
QString title;
// which curve are we interested in ?
switch (scope) {
case RideFile::cad:
{
ourCurve = standard->cadCurve;
thereCurve = referencePlot->standard->cadCurve;
title = tr("Cadence");
}
break;
case RideFile::tcore:
{
ourCurve = standard->tcoreCurve;
thereCurve = referencePlot->standard->tcoreCurve;
title = tr("Core Temp");
}
break;
case RideFile::hr:
{
ourCurve = standard->hrCurve;
thereCurve = referencePlot->standard->hrCurve;
title = tr("Heartrate");
}
break;
case RideFile::kphd:
{
ourCurve = standard->accelCurve;
thereCurve = referencePlot->standard->accelCurve;
title = tr("Acceleration");
}
break;
case RideFile::wattsd:
{
ourCurve = standard->wattsDCurve;
thereCurve = referencePlot->standard->wattsDCurve;
title = tr("Power Delta");
}
break;
case RideFile::cadd:
{
ourCurve = standard->cadDCurve;
thereCurve = referencePlot->standard->cadDCurve;
title = tr("Cadence Delta");
}
break;
case RideFile::nmd:
{
ourCurve = standard->nmDCurve;
thereCurve = referencePlot->standard->nmDCurve;
title = tr("Torque Delta");
}
break;
case RideFile::hrd:
{
ourCurve = standard->hrDCurve;
thereCurve = referencePlot->standard->hrDCurve;
title = tr("Heartrate Delta");
}
break;
case RideFile::kph:
{
ourCurve = standard->speedCurve;
thereCurve = referencePlot->standard->speedCurve;
if (secondaryScope == RideFile::headwind) {
ourICurve = standard->windCurve;
thereICurve = referencePlot->standard->windCurve;
}
title = tr("Speed");
}
break;
case RideFile::nm:
{
ourCurve = standard->torqueCurve;
thereCurve = referencePlot->standard->torqueCurve;
title = tr("Torque");
}
break;
case RideFile::watts:
{
ourCurve = standard->wattsCurve;
thereCurve = referencePlot->standard->wattsCurve;
title = tr("Power");
}
break;
case RideFile::wprime:
{
ourCurve = standard->wCurve;
ourCurve2 = standard->mCurve;
thereCurve = referencePlot->standard->wCurve;
thereCurve2 = referencePlot->standard->mCurve;
title = tr("W'bal");
}
break;
case RideFile::alt:
{
if (secondaryScope == RideFile::none) {
ourCurve = standard->altCurve;
thereCurve = referencePlot->standard->altCurve;
title = tr("Altitude");
} else {
ourASCurve = standard->altSlopeCurve;
thereASCurve = referencePlot->standard->altSlopeCurve;
title = tr("Alt/Slope");
}
}
break;
case RideFile::slope:
{
ourCurve = standard->slopeCurve;
thereCurve = referencePlot->standard->slopeCurve;
title = tr("Slope");
}
break;
case RideFile::headwind:
{
ourICurve = standard->windCurve;
thereICurve = referencePlot->standard->windCurve;
title = tr("Headwind");
}
break;
case RideFile::temp:
{
ourCurve = standard->tempCurve;
thereCurve = referencePlot->standard->tempCurve;
title = tr("Temperature");
}
break;
case RideFile::anTISS:
{
ourCurve = standard->antissCurve;
thereCurve = referencePlot->standard->antissCurve;
title = tr("Anaerobic TISS");
}
break;
case RideFile::aTISS:
{
ourCurve = standard->atissCurve;
thereCurve = referencePlot->standard->atissCurve;
title = tr("Aerobic TISS");
}
break;
case RideFile::NP:
{
ourCurve = standard->npCurve;
thereCurve = referencePlot->standard->npCurve;
title = tr("NP");
}
break;
case RideFile::rvert:
{
ourCurve = standard->rvCurve;
thereCurve = referencePlot->standard->rvCurve;
title = tr("Vertical Oscillation");
}
break;
case RideFile::rcad:
{
ourCurve = standard->rcadCurve;
thereCurve = referencePlot->standard->rcadCurve;
title = tr("Run Cadence");
}
break;
case RideFile::rcontact:
{
ourCurve = standard->rgctCurve;
thereCurve = referencePlot->standard->rgctCurve;
title = tr("GCT");
}
break;
case RideFile::gear:
{
ourCurve = standard->gearCurve;
thereCurve = referencePlot->standard->gearCurve;
title = tr("Gear Ratio");
}
break;
case RideFile::smo2:
{
ourCurve = standard->smo2Curve;
thereCurve = referencePlot->standard->smo2Curve;
title = tr("SmO2");
}
break;
case RideFile::thb:
{
ourCurve = standard->thbCurve;
thereCurve = referencePlot->standard->thbCurve;
title = tr("tHb");
}
break;
case RideFile::o2hb:
{
ourCurve = standard->o2hbCurve;
thereCurve = referencePlot->standard->o2hbCurve;
title = tr("O2Hb");
}
break;
case RideFile::hhb:
{
ourCurve = standard->hhbCurve;
thereCurve = referencePlot->standard->hhbCurve;
title = tr("HHb");
}
break;
case RideFile::xPower:
{
ourCurve = standard->xpCurve;
thereCurve = referencePlot->standard->xpCurve;
title = tr("xPower");
}
break;
case RideFile::lps:
{
ourCurve = standard->lpsCurve;
thereCurve = referencePlot->standard->lpsCurve;
title = tr("Left Pedal Smoothness");
}
break;
case RideFile::rps:
{
ourCurve = standard->rpsCurve;
thereCurve = referencePlot->standard->rpsCurve;
title = tr("Right Pedal Smoothness");
}
break;
case RideFile::lte:
{
ourCurve = standard->lteCurve;
thereCurve = referencePlot->standard->lteCurve;
title = tr("Left Torque Efficiency");
}
break;
case RideFile::rte:
{
ourCurve = standard->rteCurve;
thereCurve = referencePlot->standard->rteCurve;
title = tr("Right Torque Efficiency");
}
break;
case RideFile::lpco:
case RideFile::rpco:
{
ourCurve = standard->lpcoCurve;
ourCurve2 = standard->rpcoCurve;
thereCurve = referencePlot->standard->lpcoCurve;
thereCurve2 = referencePlot->standard->rpcoCurve;
title = tr("Left/Right Pedal Center Offset");
}
break;
case RideFile::lppb:
{
ourICurve = standard->lppCurve;
thereICurve = referencePlot->standard->lppCurve;
title = tr("Left Power Phase");
}
break;
case RideFile::rppb:
{
ourICurve = standard->rppCurve;
thereICurve = referencePlot->standard->rppCurve;
title = tr("Right Power Phase");
}
break;
case RideFile::lpppb:
{
ourICurve = standard->lpppCurve;
thereICurve = referencePlot->standard->lpppCurve;
title = tr("Left Peak Power Phase");
}
break;
case RideFile::rpppb:
{
ourICurve = standard->rpppCurve;
thereICurve = referencePlot->standard->rpppCurve;
title = tr("Right Peak Power Phase");
}
break;
case RideFile::lrbalance:
{
ourCurve = standard->balanceLCurve;
ourCurve2 = standard->balanceRCurve;
thereCurve = referencePlot->standard->balanceLCurve;
thereCurve2 = referencePlot->standard->balanceRCurve;
title = tr("L/R Balance");
}
break;
case RideFile::aPower:
{
ourCurve = standard->apCurve;
thereCurve = referencePlot->standard->apCurve;
title = tr("aPower");
}
break;
default:
case RideFile::interval:
case RideFile::vam:
case RideFile::wattsKg:
case RideFile::km:
case RideFile::lon:
case RideFile::lat:
case RideFile::none:
break;
}
// lets clone !
if ((ourCurve && thereCurve) || (ourICurve && thereICurve) || (ourASCurve && thereASCurve)) {
if (ourCurve && thereCurve) {
// no way to get values, so we run through them
ourCurve->setVisible(true);
ourCurve->attach(this);
// lets clone the data
QVector<QPointF> array;
for (size_t i=0; i<thereCurve->data()->size(); i++) array << thereCurve->data()->sample(i);
ourCurve->setSamples(array);
ourCurve->setYAxis(yLeft);
ourCurve->setBaseline(thereCurve->baseline());
ourCurve->setStyle(thereCurve->style());
// symbol when zoomed in super close
if (array.size() < 150) {
QwtSymbol *sym = new QwtSymbol;
sym->setPen(QPen(GColor(CPLOTMARKER)));
sym->setStyle(QwtSymbol::Ellipse);
sym->setSize(3);
ourCurve->setSymbol(sym);
} else {
QwtSymbol *sym = new QwtSymbol;
sym->setStyle(QwtSymbol::NoSymbol);
sym->setSize(0);
ourCurve->setSymbol(sym);
}
}
if (ourCurve2 && thereCurve2) {
ourCurve2->setVisible(true);
ourCurve2->attach(this);
// lets clone the data
QVector<QPointF> array;
for (size_t i=0; i<thereCurve2->data()->size(); i++) array << thereCurve2->data()->sample(i);
ourCurve2->setSamples(array);
ourCurve2->setYAxis(yLeft);
ourCurve2->setBaseline(thereCurve2->baseline());
// symbol when zoomed in super close
if (array.size() < 150) {
QwtSymbol *sym = new QwtSymbol;
sym->setPen(QPen(GColor(CPLOTMARKER)));
sym->setStyle(QwtSymbol::Ellipse);
sym->setSize(3);
ourCurve2->setSymbol(sym);
} else {
QwtSymbol *sym = new QwtSymbol;
sym->setStyle(QwtSymbol::NoSymbol);
sym->setSize(0);
ourCurve2->setSymbol(sym);
}
}
if (ourICurve && thereICurve) {
ourICurve->setVisible(true);
ourICurve->attach(this);
// lets clone the data
QVector<QwtIntervalSample> array;
for (size_t i=0; i<thereICurve->data()->size(); i++) array << thereICurve->data()->sample(i);
ourICurve->setSamples(array);
ourICurve->setYAxis(yLeft);
}
if (ourASCurve && thereASCurve) {
// no way to get values, so we run through them
ourASCurve->setVisible(true);
ourASCurve->attach(this);
// lets clone the data
QVector<QPointF> array;
for (size_t i=0; i<thereASCurve->data()->size(); i++) array << thereASCurve->data()->sample(i);
ourASCurve->setSamples(array);
ourASCurve->setYAxis(yLeft);
ourASCurve->setBaseline(thereASCurve->baseline());
ourASCurve->setStyle(thereASCurve->style());
QwtSymbol *sym = new QwtSymbol;
sym->setStyle(QwtSymbol::NoSymbol);
sym->setSize(0);
ourASCurve->setSymbol(sym);
}
// x-axis
if (thereCurve || thereASCurve)
setAxisScale(QwtPlot::xBottom, referencePlot->axisScaleDiv(xBottom).lowerBound(),
referencePlot->axisScaleDiv(xBottom).upperBound());
else if (thereICurve)
setAxisScale(QwtPlot::xBottom, thereICurve->boundingRect().left(), thereICurve->boundingRect().right());
enableAxis(QwtPlot::xBottom, true);
setAxisVisible(QwtPlot::xBottom, true);
setXTitle();
// y-axis yLeft
setAxisVisible(yLeft, true);
if (scope == RideFile::thb && thereCurve) {
// minimum non-zero value... worst case its zero !
double minNZ = 0.00f;
for (size_t i=0; i<thereCurve->data()->size(); i++) {
if (!minNZ) minNZ = thereCurve->data()->sample(i).y();
else if (thereCurve->data()->sample(i).y()<minNZ) minNZ = thereCurve->data()->sample(i).y();
}
setAxisScale(QwtPlot::yLeft, minNZ, thereCurve->maxYValue() + 0.10f);
} else if (scope == RideFile::wprime) {
// always zero or lower (don't truncate)
double min = thereCurve->minYValue();
setAxisScale(QwtPlot::yLeft, min > 0 ? 0 : min * 1.1f, 1.1f * thereCurve->maxYValue());
} else if (scope == RideFile::tcore) {
// always zero or lower (don't truncate)
double min = qMin(36.5f, float(thereCurve->minYValue()));
double max = qMax(39.0f, float(thereCurve->maxYValue()+0.5f));
setAxisScale(QwtPlot::yLeft, min, max);
} else if (scope != RideFile::lrbalance) {
if (thereCurve)
setAxisScale(QwtPlot::yLeft, thereCurve->minYValue(), 1.1f * thereCurve->maxYValue());
if (thereICurve)
setAxisScale(QwtPlot::yLeft, thereICurve->boundingRect().top(), 1.1f * thereICurve->boundingRect().bottom());
if (thereASCurve)
setAxisScale(QwtPlot::yLeft, thereASCurve->minYValue(), 1.1f * thereASCurve->maxYValue());
} else {
setAxisScale(QwtPlot::yLeft, 0, 100); // 100 %
}
ScaleScaleDraw *sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
sd->setMinimumExtent(24);
sd->setSpacing(0);
if (scope == RideFile::wprime) sd->setFactor(0.001f); // Kj
if (scope == RideFile::thb || scope == RideFile::smo2
|| scope == RideFile::o2hb || scope == RideFile::hhb) // Hb
sd->setDecimals(2);
if (scope == RideFile::tcore) sd->setDecimals(1);
setAxisScaleDraw(QwtPlot::yLeft, sd);
// title and colour
setAxisTitle(yLeft, title);
QPalette pal = palette();
if (thereCurve) {
pal.setColor(QPalette::WindowText, thereCurve->pen().color());
pal.setColor(QPalette::Text, thereCurve->pen().color());
} else if (thereICurve) {
pal.setColor(QPalette::WindowText, thereICurve->pen().color());
pal.setColor(QPalette::Text, thereICurve->pen().color());
} else if (thereASCurve) {
pal.setColor(QPalette::WindowText, thereASCurve->pen().color());
pal.setColor(QPalette::Text, thereASCurve->pen().color());
}
axisWidget(QwtPlot::yLeft)->setPalette(pal);
// hide other y axes
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 1), false);
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 3), false);
setAxisVisible(yRight, false);
setAxisVisible(QwtAxisId(QwtAxis::yRight, 1), false);
setAxisVisible(QwtAxisId(QwtAxis::yRight, 2), false);
setAxisVisible(QwtAxisId(QwtAxis::yRight, 3), false);
// plot standard->grid
standard->grid->setVisible(referencePlot->standard->grid->isVisible());
// plot markers etc
refreshIntervalMarkers();
refreshCalibrationMarkers();
refreshReferenceLines();
#if 0
refreshZoneLabels();
#endif
}
// remember the curves and colors
isolation = false;
curveColors->saveState();
}
void
AllPlot::setDataFromPlots(QList<AllPlot *> plots)
{
isolation = false;
curveColors->saveState();
// remove all curves from the plot
standard->wCurve->detach();
standard->mCurve->detach();
standard->wattsCurve->detach();
standard->atissCurve->detach();
standard->antissCurve->detach();
standard->npCurve->detach();
standard->rvCurve->detach();
standard->rcadCurve->detach();
standard->rgctCurve->detach();
standard->gearCurve->detach();
standard->smo2Curve->detach();
standard->thbCurve->detach();
standard->o2hbCurve->detach();
standard->hhbCurve->detach();
standard->xpCurve->detach();
standard->apCurve->detach();
standard->hrCurve->detach();
standard->tcoreCurve->detach();
standard->speedCurve->detach();
standard->accelCurve->detach();
standard->wattsDCurve->detach();
standard->cadDCurve->detach();
standard->nmDCurve->detach();
standard->hrDCurve->detach();
standard->cadCurve->detach();
standard->altCurve->detach();
standard->altSlopeCurve->detach();
standard->slopeCurve->detach();
standard->tempCurve->detach();
standard->windCurve->detach();
standard->torqueCurve->detach();
standard->balanceLCurve->detach();
standard->balanceRCurve->detach();
standard->lteCurve->detach();
standard->rteCurve->detach();
standard->lpsCurve->detach();
standard->rpsCurve->detach();
standard->lpcoCurve->detach();
standard->rpcoCurve->detach();
standard->lppCurve->detach();
standard->rppCurve->detach();
standard->lpppCurve->detach();
standard->rpppCurve->detach();
standard->wCurve->setVisible(false);
standard->mCurve->setVisible(false);
standard->wattsCurve->setVisible(false);
standard->atissCurve->setVisible(false);
standard->antissCurve->setVisible(false);
standard->npCurve->setVisible(false);
standard->rvCurve->setVisible(false);
standard->rcadCurve->setVisible(false);
standard->rgctCurve->setVisible(false);
standard->gearCurve->setVisible(false);
standard->smo2Curve->setVisible(false);
standard->thbCurve->setVisible(false);
standard->o2hbCurve->setVisible(false);
standard->hhbCurve->setVisible(false);
standard->xpCurve->setVisible(false);
standard->apCurve->setVisible(false);
standard->hrCurve->setVisible(false);
standard->tcoreCurve->setVisible(false);
standard->speedCurve->setVisible(false);
standard->accelCurve->setVisible(false);
standard->wattsDCurve->setVisible(false);
standard->cadDCurve->setVisible(false);
standard->nmDCurve->setVisible(false);
standard->hrDCurve->setVisible(false);
standard->cadCurve->setVisible(false);
standard->altCurve->setVisible(false);
standard->altSlopeCurve->setVisible(false);
standard->slopeCurve->setVisible(false);
standard->tempCurve->setVisible(false);
standard->windCurve->setVisible(false);
standard->torqueCurve->setVisible(false);
standard->balanceLCurve->setVisible(false);
standard->balanceRCurve->setVisible(false);
standard->lteCurve->setVisible(false);
standard->rteCurve->setVisible(false);
standard->lpsCurve->setVisible(false);
standard->rpsCurve->setVisible(false);
standard->lpcoCurve->setVisible(false);
standard->rpcoCurve->setVisible(false);
standard->lppCurve->setVisible(false);
standard->rppCurve->setVisible(false);
standard->lpppCurve->setVisible(false);
standard->rpppCurve->setVisible(false);
// clear previous curves
foreach(QwtPlotCurve *prior, compares) {
prior->detach();
delete prior;
}
compares.clear();
double MAXY = -100;
double MINY = 0;
// add all the curves
int index=0;
foreach (AllPlot *plot, plots) {
if (context->compareIntervals[index].isChecked() == false) {
index++;
continue; // ignore if not shown
}
referencePlot = plot;
QwtPlotCurve *ourCurve = NULL, *thereCurve = NULL;
QwtPlotCurve *ourCurve2 = NULL, *thereCurve2 = NULL;
AllPlotSlopeCurve *ourASCurve = NULL, *thereASCurve = NULL;
QwtPlotIntervalCurve *ourICurve = NULL, *thereICurve = NULL;
QString title;
// which curve are we interested in ?
switch (scope) {
case RideFile::cad:
{
ourCurve = new QwtPlotCurve(tr("Cadence"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->cadCurve;
title = tr("Cadence");
}
break;
case RideFile::tcore:
{
ourCurve = new QwtPlotCurve(tr("Core Temp"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->tcoreCurve;
title = tr("Core Temp");
}
break;
case RideFile::hr:
{
ourCurve = new QwtPlotCurve(tr("Heart Rate"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->hrCurve;
title = tr("Heartrate");
}
break;
case RideFile::kphd:
{
ourCurve = new QwtPlotCurve(tr("Acceleration"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->accelCurve;
title = tr("Acceleration");
}
break;
case RideFile::wattsd:
{
ourCurve = new QwtPlotCurve(tr("Power Delta"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->wattsDCurve;
title = tr("Power Delta");
}
break;
case RideFile::cadd:
{
ourCurve = new QwtPlotCurve(tr("Cadence Delta"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->cadDCurve;
title = tr("Cadence Delta");
}
break;
case RideFile::nmd:
{
ourCurve = new QwtPlotCurve(tr("Torque Delta"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->nmDCurve;
title = tr("Torque Delta");
}
break;
case RideFile::hrd:
{
ourCurve = new QwtPlotCurve(tr("Heartrate Delta"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->hrDCurve;
title = tr("Heartrate Delta");
}
break;
case RideFile::kph:
{
ourCurve = new QwtPlotCurve(tr("Speed"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->speedCurve;
if (secondaryScope == RideFile::headwind) {
ourICurve = standard->windCurve;
thereICurve = referencePlot->standard->windCurve;
}
title = tr("Speed");
}
break;
case RideFile::nm:
{
ourCurve = new QwtPlotCurve(tr("Torque"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->torqueCurve;
title = tr("Torque");
}
break;
case RideFile::watts:
{
ourCurve = new QwtPlotCurve(tr("Power"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->wattsCurve;
title = tr("Power");
}
break;
case RideFile::wprime:
{
ourCurve = new QwtPlotCurve(tr("W' Balance (kJ)"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
ourCurve2 = new QwtPlotCurve(tr("Matches"));
ourCurve2->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
ourCurve2->setStyle(QwtPlotCurve::Dots);
ourCurve2->setYAxis(QwtAxisId(QwtAxis::yRight, 2));
thereCurve = referencePlot->standard->wCurve;
thereCurve2 = referencePlot->standard->mCurve;
title = tr("W'bal");
}
break;
case RideFile::alt:
{
if (secondaryScope != RideFile::slope) {
ourCurve = new QwtPlotCurve(tr("Altitude"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
ourCurve->setZ(-10); // always at the back.
thereCurve = referencePlot->standard->altCurve;
title = tr("Altitude");
} else {
ourASCurve = new AllPlotSlopeCurve(tr("Alt/Slope"));
ourASCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
ourASCurve->setZ(-5); //
thereASCurve = referencePlot->standard->altSlopeCurve;
title = tr("Alt/Slope");
}
}
break;
case RideFile::slope:
{
ourCurve = new QwtPlotCurve(tr("Slope"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->slopeCurve;
title = tr("Slope");
}
break;
case RideFile::headwind:
{
ourICurve = new QwtPlotIntervalCurve(tr("Headwind"));
thereICurve = referencePlot->standard->windCurve;
title = tr("Headwind");
}
break;
case RideFile::temp:
{
ourCurve = new QwtPlotCurve(tr("Temperature"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->tempCurve;
title = tr("Temperature");
}
break;
case RideFile::anTISS:
{
ourCurve = new QwtPlotCurve(tr("Anaerobic TISS"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->antissCurve;
title = tr("Anaerobic TISS");
}
break;
case RideFile::aTISS:
{
ourCurve = new QwtPlotCurve(tr("Aerobic TISS"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->atissCurve;
title = tr("Aerobic TISS");
}
break;
case RideFile::NP:
{
ourCurve = new QwtPlotCurve(tr("NP"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->npCurve;
title = tr("NP");
}
break;
case RideFile::rvert:
{
ourCurve = new QwtPlotCurve(tr("Vertical Oscillation"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->rvCurve;
title = tr("Vertical Oscillation");
}
break;
case RideFile::rcad:
{
ourCurve = new QwtPlotCurve(tr("Run Cadence"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->rcadCurve;
title = tr("Run Cadence");
}
break;
case RideFile::rcontact:
{
ourCurve = new QwtPlotCurve(tr("GCT"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->rgctCurve;
title = tr("GCT");
}
break;
case RideFile::gear:
{
ourCurve = new QwtPlotCurve(tr("Gear Ratio"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->gearCurve;
title = tr("Gear Ratio");
}
break;
case RideFile::smo2:
{
ourCurve = new QwtPlotCurve(tr("SmO2"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->smo2Curve;
title = tr("SmO2");
}
break;
case RideFile::thb:
{
ourCurve = new QwtPlotCurve(tr("tHb"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->thbCurve;
title = tr("tHb");
}
break;
case RideFile::o2hb:
{
ourCurve = new QwtPlotCurve(tr("O2Hb"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->o2hbCurve;
title = tr("O2Hb");
}
break;
case RideFile::hhb:
{
ourCurve = new QwtPlotCurve(tr("HHb"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->hhbCurve;
title = tr("HHb");
}
break;
case RideFile::xPower:
{
ourCurve = new QwtPlotCurve(tr("xPower"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->xpCurve;
title = tr("xPower");
}
break;
case RideFile::lps:
{
ourCurve = new QwtPlotCurve(tr("Left Pedal Smoothness"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->lpsCurve;
title = tr("Left Pedal Smoothness");
}
break;
case RideFile::rps:
{
ourCurve = new QwtPlotCurve(tr("Right Pedal Smoothness"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->rpsCurve;
title = tr("Right Pedal Smoothness");
}
break;
case RideFile::lte:
{
ourCurve = new QwtPlotCurve(tr("Left Torque Efficiency"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->lteCurve;
title = tr("Left Torque Efficiency");
}
break;
case RideFile::rte:
{
ourCurve = new QwtPlotCurve(tr("Right Torque Efficiency"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->rteCurve;
title = tr("Right Torque Efficiency");
}
break;
case RideFile::rpco:
case RideFile::lpco:
{
ourCurve = new QwtPlotCurve(tr("Left Pedal Center Offset"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->lpcoCurve;
ourCurve2 = new QwtPlotCurve(tr("Right Pedal Center Offset"));
ourCurve2->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve2 = referencePlot->standard->rpcoCurve;
title = tr("Left/Right Pedal Center Offset");
}
break;
case RideFile::lppb:
case RideFile::lppe:
{
ourICurve = new QwtPlotIntervalCurve(tr("Left Power Phase"));
thereICurve = referencePlot->standard->lppCurve;
title = tr("Left Power Phase");
}
break;
case RideFile::rppb:
case RideFile::rppe:
{
ourICurve = new QwtPlotIntervalCurve(tr("Right Power Phase"));
thereICurve = referencePlot->standard->rppCurve;
title = tr("Right Power Phase");
}
break;
case RideFile::lpppb:
case RideFile::lpppe:
{
ourICurve = new QwtPlotIntervalCurve(tr("Left Peak Power Phase"));
thereICurve = referencePlot->standard->lpppCurve;
title = tr("Left Peak Power Phase");
}
break;
case RideFile::rpppb:
case RideFile::rpppe:
{
ourICurve = new QwtPlotIntervalCurve(tr("Right Peak Power Phase"));
thereICurve = referencePlot->standard->rpppCurve;
title = tr("Right Peak Power Phase");
}
break;
case RideFile::lrbalance:
{
ourCurve = new QwtPlotCurve(tr("Left Balance"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
ourCurve2 = new QwtPlotCurve(tr("Right Balance"));
ourCurve2->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->balanceLCurve;
thereCurve2 = referencePlot->standard->balanceRCurve;
title = tr("L/R Balance");
}
break;
case RideFile::aPower:
{
ourCurve = new QwtPlotCurve(tr("aPower"));
ourCurve->setPaintAttribute(QwtPlotCurve::FilterPoints, true);
thereCurve = referencePlot->standard->apCurve;
title = tr("aPower");
}
break;
default:
case RideFile::interval:
case RideFile::vam:
case RideFile::wattsKg:
case RideFile::km:
case RideFile::lon:
case RideFile::lat:
case RideFile::none:
break;
}
bool antialias = appsettings->value(this, GC_ANTIALIAS, true).toBool();
// lets clone !
if ((ourCurve && thereCurve) || (ourICurve && thereICurve) || (ourASCurve && thereASCurve)) {
if (ourCurve && thereCurve) {
// remember for next time...
compares << ourCurve;
// colours etc
if (antialias) ourCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
QPen pen = thereCurve->pen();
pen.setColor(context->compareIntervals[index].color);
ourCurve->setPen(pen);
ourCurve->setVisible(true);
ourCurve->attach(this);
// lets clone the data
QVector<QPointF> array;
for (size_t i=0; i<thereCurve->data()->size(); i++) array << thereCurve->data()->sample(i);
ourCurve->setSamples(array);
ourCurve->setYAxis(yLeft);
ourCurve->setBaseline(thereCurve->baseline());
if (ourCurve->maxYValue() > MAXY) MAXY = ourCurve->maxYValue();
if (ourCurve->minYValue() < MINY) MINY = ourCurve->minYValue();
// symbol when zoomed in super close
if (array.size() < 150) {
QwtSymbol *sym = new QwtSymbol;
sym->setPen(QPen(GColor(CPLOTMARKER)));
sym->setStyle(QwtSymbol::Ellipse);
sym->setSize(3);
ourCurve->setSymbol(sym);
} else {
QwtSymbol *sym = new QwtSymbol;
sym->setStyle(QwtSymbol::NoSymbol);
sym->setSize(0);
ourCurve->setSymbol(sym);
}
}
if (ourCurve2 && thereCurve2) {
// remember for next time...
compares << ourCurve2;
ourCurve2->setVisible(true);
ourCurve2->attach(this);
if (antialias) ourCurve2->setRenderHint(QwtPlotItem::RenderAntialiased);
QPen pen = thereCurve2->pen();
pen.setColor(context->compareIntervals[index].color);
ourCurve2->setPen(pen);
// lets clone the data
QVector<QPointF> array;
for (size_t i=0; i<thereCurve2->data()->size(); i++) array << thereCurve2->data()->sample(i);
ourCurve2->setSamples(array);
ourCurve2->setYAxis(yLeft);
ourCurve2->setBaseline(thereCurve2->baseline());
if (ourCurve2->maxYValue() > MAXY) MAXY = ourCurve2->maxYValue();
if (ourCurve2->minYValue() < MINY) MINY = ourCurve2->minYValue();
// symbol when zoomed in super close
if (array.size() < 150) {
QwtSymbol *sym = new QwtSymbol;
sym->setPen(QPen(GColor(CPLOTMARKER)));
sym->setStyle(QwtSymbol::Ellipse);
sym->setSize(3);
ourCurve2->setSymbol(sym);
} else {
QwtSymbol *sym = new QwtSymbol;
sym->setStyle(QwtSymbol::NoSymbol);
sym->setSize(0);
ourCurve2->setSymbol(sym);
}
}
if (ourICurve && thereICurve) {
ourICurve->setVisible(true);
ourICurve->attach(this);
QPen pen = thereICurve->pen();
pen.setColor(context->compareIntervals[index].color);
ourICurve->setPen(pen);
if (antialias) ourICurve->setRenderHint(QwtPlotItem::RenderAntialiased);
// lets clone the data
QVector<QwtIntervalSample> array;
for (size_t i=0; i<thereICurve->data()->size(); i++) array << thereICurve->data()->sample(i);
ourICurve->setSamples(array);
ourICurve->setYAxis(yLeft);
//XXXX ???? DUNNO ?????
//XXXX FIX LATER XXXX if (ourICurve->maxYValue() > MAXY) MAXY = ourICurve->maxYValue();
}
if (ourASCurve && thereASCurve) {
// remember for next time...
compares << ourASCurve;
// colours etc
if (antialias) ourASCurve->setRenderHint(QwtPlotItem::RenderAntialiased);
QPen pen = thereASCurve->pen();
pen.setColor(context->compareIntervals[index].color);
ourASCurve->setPen(pen);
ourASCurve->setVisible(true);
ourASCurve->attach(this);
// lets clone the data
QVector<QPointF> array;
for (size_t i=0; i<thereASCurve->data()->size(); i++) array << thereASCurve->data()->sample(i);
ourASCurve->setSamples(array);
ourASCurve->setYAxis(yLeft);
ourASCurve->setBaseline(thereASCurve->baseline());
setAltSlopePlotStyle (ourASCurve);
if (ourASCurve->maxYValue() > MAXY) MAXY = ourASCurve->maxYValue();
if (ourASCurve->minYValue() < MINY) MINY = ourASCurve->minYValue();
QwtSymbol *sym = new QwtSymbol;
sym->setStyle(QwtSymbol::NoSymbol);
sym->setSize(0);
ourASCurve->setSymbol(sym);
}
}
// move on -- this is used to reference into the compareIntervals
// array to get the colors predominantly!
index++;
}
// x-axis
enableAxis(QwtPlot::xBottom, true);
setAxisVisible(QwtPlot::xBottom, true);
setAxisVisible(yLeft, true);
// prettify the chart at the end
ScaleScaleDraw *sd = new ScaleScaleDraw;
sd->setTickLength(QwtScaleDiv::MajorTick, 3);
sd->enableComponent(ScaleScaleDraw::Ticks, false);
sd->enableComponent(ScaleScaleDraw::Backbone, false);
if (scope == RideFile::wprime) sd->setFactor(0.001f); // Kj
setAxisScaleDraw(QwtPlot::yLeft, sd);
// set the y-axis for largest value we saw +10%
setAxisScale(QwtPlot::yLeft, MINY * 1.10f , MAXY * 1.10f);
// hide other y axes
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 1), false);
setAxisVisible(QwtAxisId(QwtAxis::yLeft, 3), false);
setAxisVisible(yRight, false);
setAxisVisible(QwtAxisId(QwtAxis::yRight, 1), false);
setAxisVisible(QwtAxisId(QwtAxis::yRight, 2), false);
setAxisVisible(QwtAxisId(QwtAxis::yRight, 3), false);
// refresh zone background (if needed)
if (shade_zones) {
bg->attach(this);
refreshZoneLabels();
} else
bg->detach();
#if 0
// plot standard->grid
standard->grid->setVisible(referencePlot->standard->grid->isVisible());
// plot markers etc
refreshIntervalMarkers();
refreshCalibrationMarkers();
refreshReferenceLines();
// always draw against yLeft in series mode
intervalHighlighterCurve->setYAxis(yLeft);
if (thereCurve)
intervalHighlighterCurve->setBaseline(thereCurve->minYValue());
else if (thereICurve)
intervalHighlighterCurve->setBaseline(thereICurve->boundingRect().top());
#if 0
#endif
#endif
// remember the curves and colors
isolation = false;
curveColors->saveState();
}
// used to setup array of allplots where there is one for
// each interval in compare mode
void
AllPlot::setDataFromObject(AllPlotObject *object, AllPlot *reference)
{
referencePlot = reference;
bydist = reference->bydist;
// remove all curves from the plot
standard->wCurve->detach();
standard->mCurve->detach();
standard->wattsCurve->detach();
standard->atissCurve->detach();
standard->antissCurve->detach();
standard->npCurve->detach();
standard->rvCurve->detach();
standard->rcadCurve->detach();
standard->rgctCurve->detach();
standard->gearCurve->detach();
standard->smo2Curve->detach();
standard->thbCurve->detach();
standard->o2hbCurve->detach();
standard->hhbCurve->detach();
standard->xpCurve->detach();
standard->apCurve->detach();
standard->hrCurve->detach();
standard->tcoreCurve->detach();
standard->speedCurve->detach();
standard->accelCurve->detach();
standard->wattsDCurve->detach();
standard->cadDCurve->detach();
standard->nmDCurve->detach();
standard->hrDCurve->detach();
standard->cadCurve->detach();
standard->altCurve->detach();
standard->altSlopeCurve->detach();
standard->slopeCurve->detach();
standard->tempCurve->detach();
standard->windCurve->detach();
standard->torqueCurve->detach();
standard->balanceLCurve->detach();
standard->balanceRCurve->detach();
standard->lteCurve->detach();
standard->rteCurve->detach();
standard->lpsCurve->detach();
standard->rpsCurve->detach();
standard->intervalHighlighterCurve->detach();
standard->intervalHoverCurve->detach();
standard->wCurve->setVisible(false);
standard->mCurve->setVisible(false);
standard->wattsCurve->setVisible(false);
standard->atissCurve->setVisible(false);
standard->antissCurve->setVisible(false);
standard->npCurve->setVisible(false);
standard->rvCurve->setVisible(false);
standard->rcadCurve->setVisible(false);
standard->rgctCurve->setVisible(false);
standard->gearCurve->setVisible(false);
standard->smo2Curve->setVisible(false);
standard->thbCurve->setVisible(false);
standard->o2hbCurve->setVisible(false);
standard->hhbCurve->setVisible(false);
standard->xpCurve->setVisible(false);
standard->apCurve->setVisible(false);
standard->hrCurve->setVisible(false);
standard->tcoreCurve->setVisible(false);
standard->speedCurve->setVisible(false);
standard->accelCurve->setVisible(false);
standard->wattsDCurve->setVisible(false);
standard->cadDCurve->setVisible(false);
standard->nmDCurve->setVisible(false);
standard->hrDCurve->setVisible(false);
standard->cadCurve->setVisible(false);
standard->altCurve->setVisible(false);
standard->altSlopeCurve->setVisible(false);
standard->slopeCurve->setVisible(false);
standard->tempCurve->setVisible(false);
standard->windCurve->setVisible(false);
standard->torqueCurve->setVisible(false);
standard->balanceLCurve->setVisible(false);
standard->balanceRCurve->setVisible(false);
standard->lteCurve->setVisible(false);
standard->rteCurve->setVisible(false);
standard->lpsCurve->setVisible(false);
standard->rpsCurve->setVisible(false);
standard->intervalHighlighterCurve->setVisible(false);
standard->intervalHoverCurve->setVisible(false);
// NOW SET OUR CURVES USING THEIR DATA ...
QVector<double> &xaxis = referencePlot->bydist ? object->smoothDistance : object->smoothTime;
int totalPoints = xaxis.count();
//W' curve set to whatever data we have
if (!object->wprime.empty()) {
standard->wCurve->setSamples(bydist ? object->wprimeDist.data() : object->wprimeTime.data(),
object->wprime.data(), object->wprime.count());
standard->mCurve->setSamples(bydist ? object->matchDist.data() : object->matchTime.data(),
object->match.data(), object->match.count());
setMatchLabels(standard);
}
if (!object->wattsArray.empty()) {
standard->wattsCurve->setSamples(xaxis.data(), object->smoothWatts.data(), totalPoints);
standard->wattsCurve->attach(this);
standard->wattsCurve->setVisible(true);
}
if (!object->antissArray.empty()) {
standard->antissCurve->setSamples(xaxis.data(), object->smoothANT.data(), totalPoints);
standard->antissCurve->attach(this);
standard->antissCurve->setVisible(true);
}
if (!object->atissArray.empty()) {
standard->atissCurve->setSamples(xaxis.data(), object->smoothAT.data(), totalPoints);
standard->atissCurve->attach(this);
standard->atissCurve->setVisible(true);
}
if (!object->npArray.empty()) {
standard->npCurve->setSamples(xaxis.data(), object->smoothNP.data(), totalPoints);
standard->npCurve->attach(this);
standard->npCurve->setVisible(true);
}
if (!object->rvArray.empty()) {
standard->rvCurve->setSamples(xaxis.data(), object->smoothRV.data(), totalPoints);
standard->rvCurve->attach(this);
standard->rvCurve->setVisible(true);
}
if (!object->rcadArray.empty()) {
standard->rcadCurve->setSamples(xaxis.data(), object->smoothRCad.data(), totalPoints);
standard->rcadCurve->attach(this);
standard->rcadCurve->setVisible(true);
}
if (!object->rgctArray.empty()) {
standard->rgctCurve->setSamples(xaxis.data(), object->smoothRGCT.data(), totalPoints);
standard->rgctCurve->attach(this);
standard->rgctCurve->setVisible(true);
}
if (!object->gearArray.empty()) {
standard->gearCurve->setSamples(xaxis.data(), object->smoothGear.data(), totalPoints);
standard->gearCurve->attach(this);
standard->gearCurve->setVisible(true);
}
if (!object->smo2Array.empty()) {
standard->smo2Curve->setSamples(xaxis.data(), object->smoothSmO2.data(), totalPoints);
standard->smo2Curve->attach(this);
standard->smo2Curve->setVisible(true);
}
if (!object->thbArray.empty()) {
standard->thbCurve->setSamples(xaxis.data(), object->smoothtHb.data(), totalPoints);
standard->thbCurve->attach(this);
standard->thbCurve->setVisible(true);
}
if (!object->o2hbArray.empty()) {
standard->o2hbCurve->setSamples(xaxis.data(), object->smoothO2Hb.data(), totalPoints);
standard->o2hbCurve->attach(this);
standard->o2hbCurve->setVisible(true);
}
if (!object->hhbArray.empty()) {
standard->hhbCurve->setSamples(xaxis.data(), object->smoothHHb.data(), totalPoints);
standard->hhbCurve->attach(this);
standard->hhbCurve->setVisible(true);
}
if (!object->xpArray.empty()) {
standard->xpCurve->setSamples(xaxis.data(), object->smoothXP.data(), totalPoints);
standard->xpCurve->attach(this);
standard->xpCurve->setVisible(true);
}
if (!object->apArray.empty()) {
standard->apCurve->setSamples(xaxis.data(), object->smoothAP.data(), totalPoints);
standard->apCurve->attach(this);
standard->apCurve->setVisible(true);
}
if (!object->tcoreArray.empty()) {
standard->tcoreCurve->setSamples(xaxis.data(), object->smoothTcore.data(), totalPoints);
standard->tcoreCurve->attach(this);
standard->tcoreCurve->setVisible(true);
}
if (!object->hrArray.empty()) {
standard->hrCurve->setSamples(xaxis.data(), object->smoothHr.data(), totalPoints);
standard->hrCurve->attach(this);
standard->hrCurve->setVisible(true);
}
if (!object->speedArray.empty()) {
standard->speedCurve->setSamples(xaxis.data(), object->smoothSpeed.data(), totalPoints);
standard->speedCurve->attach(this);
standard->speedCurve->setVisible(true);
}
if (!object->accelArray.empty()) {
standard->accelCurve->setSamples(xaxis.data(), object->smoothAccel.data(), totalPoints);
standard->accelCurve->attach(this);
standard->accelCurve->setVisible(true);
}
if (!object->wattsDArray.empty()) {
standard->wattsDCurve->setSamples(xaxis.data(), object->smoothWattsD.data(), totalPoints);
standard->wattsDCurve->attach(this);
standard->wattsDCurve->setVisible(true);
}
if (!object->cadDArray.empty()) {
standard->cadDCurve->setSamples(xaxis.data(), object->smoothCadD.data(), totalPoints);
standard->cadDCurve->attach(this);
standard->cadDCurve->setVisible(true);
}
if (!object->nmDArray.empty()) {
standard->nmDCurve->setSamples(xaxis.data(), object->smoothNmD.data(), totalPoints);
standard->nmDCurve->attach(this);
standard->nmDCurve->setVisible(true);
}
if (!object->hrDArray.empty()) {
standard->hrDCurve->setSamples(xaxis.data(), object->smoothHrD.data(), totalPoints);
standard->hrDCurve->attach(this);
standard->hrDCurve->setVisible(true);
}
if (!object->cadArray.empty()) {
standard->cadCurve->setSamples(xaxis.data(), object->smoothCad.data(), totalPoints);
standard->cadCurve->attach(this);
standard->cadCurve->setVisible(true);
}
if (!object->altArray.empty()) {
standard->altCurve->setSamples(xaxis.data(), object->smoothAltitude.data(), totalPoints);
standard->altCurve->attach(this);
standard->altCurve->setVisible(true);
standard->altSlopeCurve->setSamples(xaxis.data(), object->smoothAltitude.data(), totalPoints);
standard->altSlopeCurve->attach(this);
standard->altSlopeCurve->setVisible(true);
}
if (!object->slopeArray.empty()) {
standard->slopeCurve->setSamples(xaxis.data(), object->smoothSlope.data(), totalPoints);
standard->slopeCurve->attach(this);
standard->slopeCurve->setVisible(true);
}
if (!object->tempArray.empty()) {
standard->tempCurve->setSamples(xaxis.data(), object->smoothTemp.data(), totalPoints);
standard->tempCurve->attach(this);
standard->tempCurve->setVisible(true);
}
if (!object->windArray.empty()) {
standard->windCurve->setSamples(new QwtIntervalSeriesData(object->smoothRelSpeed));
standard->windCurve->attach(this);
standard->windCurve->setVisible(true);
}
if (!object->torqueArray.empty()) {
standard->torqueCurve->setSamples(xaxis.data(), object->smoothTorque.data(), totalPoints);
standard->torqueCurve->attach(this);
standard->torqueCurve->setVisible(true);
}
if (!object->balanceArray.empty()) {
standard->balanceLCurve->setSamples(xaxis.data(), object->smoothBalanceL.data(), totalPoints);
standard->balanceRCurve->setSamples(xaxis.data(), object->smoothBalanceR.data(), totalPoints);
standard->balanceLCurve->attach(this);
standard->balanceLCurve->setVisible(true);
standard->balanceRCurve->attach(this);
standard->balanceRCurve->setVisible(true);
}
if (!object->lteArray.empty()) {
standard->lteCurve->setSamples(xaxis.data(), object->smoothLTE.data(), totalPoints);
standard->rteCurve->setSamples(xaxis.data(), object->smoothRTE.data(), totalPoints);
standard->lteCurve->attach(this);
standard->lteCurve->setVisible(true);
standard->rteCurve->attach(this);
standard->rteCurve->setVisible(true);
}
if (!object->lpsArray.empty()) {
standard->lpsCurve->setSamples(xaxis.data(), object->smoothLPS.data(), totalPoints);
standard->rpsCurve->setSamples(xaxis.data(), object->smoothRPS.data(), totalPoints);
standard->lpsCurve->attach(this);
standard->lpsCurve->setVisible(true);
standard->rpsCurve->attach(this);
standard->rpsCurve->setVisible(true);
}
if (!object->lpcoArray.empty()) {
standard->lpcoCurve->setSamples(xaxis.data(), object->smoothLPCO.data(), totalPoints);
standard->rpcoCurve->setSamples(xaxis.data(), object->smoothRPCO.data(), totalPoints);
standard->lpcoCurve->attach(this);
standard->lpcoCurve->setVisible(true);
standard->rpcoCurve->attach(this);
standard->rpcoCurve->setVisible(true);
}
if (!object->lppbArray.empty()) {
standard->lppCurve->setSamples(new QwtIntervalSeriesData(object->smoothLPP));
standard->lppCurve->attach(this);
standard->lppCurve->setVisible(true);
}
if (!object->rppbArray.empty()) {
standard->rppCurve->setSamples(new QwtIntervalSeriesData(object->smoothRPP));
standard->rppCurve->attach(this);
standard->rppCurve->setVisible(true);
}
if (!object->lpppbArray.empty()) {
standard->lpppCurve->setSamples(new QwtIntervalSeriesData(object->smoothLPPP));
standard->lpppCurve->attach(this);
standard->lpppCurve->setVisible(true);
}
if (!object->rpppbArray.empty()) {
standard->rpppCurve->setSamples(new QwtIntervalSeriesData(object->smoothRPPP));
standard->rpppCurve->attach(this);
standard->rpppCurve->setVisible(true);
}
// to the max / min
standard->grid->detach();
// honour user preferences
standard->wCurve->setVisible(referencePlot->showW);
standard->mCurve->setVisible(referencePlot->showW);
standard->wattsCurve->setVisible(referencePlot->showPowerState < 2);
standard->npCurve->setVisible(referencePlot->showNP);
standard->rvCurve->setVisible(referencePlot->showRV);
standard->rcadCurve->setVisible(referencePlot->showRCad);
standard->rgctCurve->setVisible(referencePlot->showRGCT);
standard->gearCurve->setVisible(referencePlot->showGear);
standard->smo2Curve->setVisible(referencePlot->showSmO2);
standard->thbCurve->setVisible(referencePlot->showtHb);
standard->o2hbCurve->setVisible(referencePlot->showO2Hb);
standard->hhbCurve->setVisible(referencePlot->showHHb);
standard->atissCurve->setVisible(referencePlot->showATISS);
standard->antissCurve->setVisible(referencePlot->showANTISS);
standard->xpCurve->setVisible(referencePlot->showXP);
standard->apCurve->setVisible(referencePlot->showAP);
standard->hrCurve->setVisible(referencePlot->showHr);
standard->tcoreCurve->setVisible(referencePlot->showTcore);
standard->speedCurve->setVisible(referencePlot->showSpeed);
standard->accelCurve->setVisible(referencePlot->showAccel);
standard->wattsDCurve->setVisible(referencePlot->showPowerD);
standard->cadDCurve->setVisible(referencePlot->showCadD);
standard->nmDCurve->setVisible(referencePlot->showTorqueD);
standard->hrDCurve->setVisible(referencePlot->showHrD);
standard->cadCurve->setVisible(referencePlot->showCad);
standard->altCurve->setVisible(referencePlot->showAlt);
standard->altSlopeCurve->setVisible(referencePlot->showAltSlopeState > 0);
standard->slopeCurve->setVisible(referencePlot->showSlope);
standard->tempCurve->setVisible(referencePlot->showTemp);
standard->windCurve->setVisible(referencePlot->showWind);
standard->torqueCurve->setVisible(referencePlot->showWind);
standard->balanceLCurve->setVisible(referencePlot->showBalance);
standard->balanceRCurve->setVisible(referencePlot->showBalance);
standard->lteCurve->setVisible(referencePlot->showTE);
standard->rteCurve->setVisible(referencePlot->showTE);
standard->lpsCurve->setVisible(referencePlot->showPS);
standard->rpsCurve->setVisible(referencePlot->showPS);
standard->lpcoCurve->setVisible(referencePlot->showPCO);
standard->rpcoCurve->setVisible(referencePlot->showPCO);
standard->lppCurve->setVisible(referencePlot->showDC);
standard->rppCurve->setVisible(referencePlot->showDC);
standard->lpppCurve->setVisible(referencePlot->showPPP);
standard->rpppCurve->setVisible(referencePlot->showPPP);
// set xaxis -- but not min/max as we get called during smoothing
// and massively quicker to reuse data and replot
setXTitle();
enableAxis(xBottom, true);
setAxisVisible(xBottom, true);
// set the y-axis scales now
referencePlot = NULL;
setAltSlopePlotStyle(standard->altSlopeCurve);
setYMax();
// refresh zone background (if needed)
if (shade_zones) {
bg->attach(this);
refreshZoneLabels();
} else
bg->detach();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setDataFromRide(RideItem *_rideItem)
{
rideItem = _rideItem;
if (_rideItem == NULL) return;
// we don't have a reference plot
referencePlot = NULL;
// basically clear out
//standard->wattsArray.clear();
//standard->curveTitle.setLabel(QwtText(QString(""), QwtText::PlainText)); // default to no title
setDataFromRideFile(rideItem->ride(), standard);
// remember the curves and colors
isolation = false;
curveColors->saveState();
}
void
AllPlot::setDataFromRideFile(RideFile *ride, AllPlotObject *here)
{
if (ride && ride->dataPoints().size()) {
const RideFileDataPresent *dataPresent = ride->areDataPresent();
int npoints = ride->dataPoints().size();
// fetch w' bal data
here->match = ride->wprimeData()->mydata();
here->matchTime = ride->wprimeData()->mxdata(false);
here->matchDist = ride->wprimeData()->mxdata(true);
here->wprime = ride->wprimeData()->ydata();
here->wprimeTime = ride->wprimeData()->xdata(false);
here->wprimeDist = ride->wprimeData()->xdata(true);
here->wattsArray.resize(dataPresent->watts ? npoints : 0);
here->atissArray.resize(dataPresent->watts ? npoints : 0);
here->antissArray.resize(dataPresent->watts ? npoints : 0);
here->npArray.resize(dataPresent->np ? npoints : 0);
here->rcadArray.resize(dataPresent->rcad ? npoints : 0);
here->rvArray.resize(dataPresent->rvert ? npoints : 0);
here->rgctArray.resize(dataPresent->rcontact ? npoints : 0);
here->smo2Array.resize(dataPresent->smo2 ? npoints : 0);
here->thbArray.resize(dataPresent->thb ? npoints : 0);
here->o2hbArray.resize(dataPresent->o2hb ? npoints : 0);
here->hhbArray.resize(dataPresent->hhb ? npoints : 0);
here->gearArray.resize(dataPresent->gear ? npoints : 0);
here->xpArray.resize(dataPresent->xp ? npoints : 0);
here->apArray.resize(dataPresent->apower ? npoints : 0);
here->hrArray.resize(dataPresent->hr ? npoints : 0);
here->tcoreArray.resize(dataPresent->hr ? npoints : 0);
here->speedArray.resize(dataPresent->kph ? npoints : 0);
here->accelArray.resize(dataPresent->kph ? npoints : 0);
here->wattsDArray.resize(dataPresent->watts ? npoints : 0);
here->cadDArray.resize(dataPresent->cad ? npoints : 0);
here->nmDArray.resize(dataPresent->nm ? npoints : 0);
here->hrDArray.resize(dataPresent->hr ? npoints : 0);
here->cadArray.resize(dataPresent->cad ? npoints : 0);
here->altArray.resize(dataPresent->alt ? npoints : 0);
here->slopeArray.resize(dataPresent->slope ? npoints : 0);
here->tempArray.resize(dataPresent->temp ? npoints : 0);
here->windArray.resize(dataPresent->headwind ? npoints : 0);
here->torqueArray.resize(dataPresent->nm ? npoints : 0);
here->balanceArray.resize(dataPresent->lrbalance ? npoints : 0);
here->lteArray.resize(dataPresent->lte ? npoints : 0);
here->rteArray.resize(dataPresent->rte ? npoints : 0);
here->lpsArray.resize(dataPresent->lps ? npoints : 0);
here->rpsArray.resize(dataPresent->rps ? npoints : 0);
here->lpcoArray.resize(dataPresent->lpco ? npoints : 0);
here->rpcoArray.resize(dataPresent->rpco ? npoints : 0);
here->lppbArray.resize(dataPresent->lppb ? npoints : 0);
here->rppbArray.resize(dataPresent->rppb ? npoints : 0);
here->lppeArray.resize(dataPresent->lppe ? npoints : 0);
here->rppeArray.resize(dataPresent->rppe ? npoints : 0);
here->lpppbArray.resize(dataPresent->lpppb ? npoints : 0);
here->rpppbArray.resize(dataPresent->rpppb ? npoints : 0);
here->lpppeArray.resize(dataPresent->lpppe ? npoints : 0);
here->rpppeArray.resize(dataPresent->rpppe ? npoints : 0);
here->timeArray.resize(npoints);
here->distanceArray.resize(npoints);
// attach appropriate curves
here->wCurve->detach();
here->mCurve->detach();
here->wattsCurve->detach();
here->atissCurve->detach();
here->antissCurve->detach();
here->npCurve->detach();
here->rcadCurve->detach();
here->rvCurve->detach();
here->rgctCurve->detach();
here->gearCurve->detach();
here->smo2Curve->detach();
here->thbCurve->detach();
here->o2hbCurve->detach();
here->hhbCurve->detach();
here->xpCurve->detach();
here->apCurve->detach();
here->hrCurve->detach();
here->tcoreCurve->detach();
here->speedCurve->detach();
here->accelCurve->detach();
here->wattsDCurve->detach();
here->cadDCurve->detach();
here->nmDCurve->detach();
here->hrDCurve->detach();
here->cadCurve->detach();
here->altCurve->detach();
here->altSlopeCurve->detach();
here->slopeCurve->detach();
here->tempCurve->detach();
here->windCurve->detach();
here->torqueCurve->detach();
here->balanceLCurve->detach();
here->balanceRCurve->detach();
here->lteCurve->detach();
here->rteCurve->detach();
here->lpsCurve->detach();
here->rpsCurve->detach();
here->lpcoCurve->detach();
here->rpcoCurve->detach();
here->lppCurve->detach();
here->rppCurve->detach();
here->lpppCurve->detach();
here->rpppCurve->detach();
if (!here->altArray.empty()) {
here->altCurve->attach(this);
here->altSlopeCurve->attach(this);
}
if (!here->slopeArray.empty()) here->slopeCurve->attach(this);
if (!here->wattsArray.empty()) here->wattsCurve->attach(this);
if (!here->atissArray.empty()) here->atissCurve->attach(this);
if (!here->antissArray.empty()) here->antissCurve->attach(this);
if (!here->npArray.empty()) here->npCurve->attach(this);
if (!here->rvArray.empty()) here->rvCurve->attach(this);
if (!here->rcadArray.empty()) here->rcadCurve->attach(this);
if (!here->rgctArray.empty()) here->rgctCurve->attach(this);
if (!here->gearArray.empty()) here->gearCurve->attach(this);
if (!here->smo2Array.empty()) here->smo2Curve->attach(this);
if (!here->thbArray.empty()) here->thbCurve->attach(this);
if (!here->o2hbArray.empty()) here->o2hbCurve->attach(this);
if (!here->hhbArray.empty()) here->hhbCurve->attach(this);
if (!here->xpArray.empty()) here->xpCurve->attach(this);
if (!here->apArray.empty()) here->apCurve->attach(this);
if (showW && ride && !here->wprime.empty()) {
here->wCurve->attach(this);
here->mCurve->attach(this);
}
if (!here->hrArray.empty()) here->hrCurve->attach(this);
if (!here->tcoreArray.empty()) here->tcoreCurve->attach(this);
if (!here->speedArray.empty()) here->speedCurve->attach(this);
// deltas
if (!here->accelArray.empty()) here->accelCurve->attach(this);
if (!here->wattsDArray.empty()) here->wattsDCurve->attach(this);
if (!here->cadDArray.empty()) here->cadDCurve->attach(this);
if (!here->nmDArray.empty()) here->nmDCurve->attach(this);
if (!here->hrDArray.empty()) here->hrDCurve->attach(this);
if (!here->cadArray.empty()) here->cadCurve->attach(this);
if (!here->tempArray.empty()) here->tempCurve->attach(this);
if (!here->windArray.empty()) here->windCurve->attach(this);
if (!here->torqueArray.empty()) here->torqueCurve->attach(this);
if (!here->lteArray.empty()) {
here->lteCurve->attach(this);
here->rteCurve->attach(this);
}
if (!here->lpsArray.empty()) {
here->lpsCurve->attach(this);
here->rpsCurve->attach(this);
}
if (!here->balanceArray.empty()) {
here->balanceLCurve->attach(this);
here->balanceRCurve->attach(this);
}
if (!here->lpcoArray.empty()) {
here->lpcoCurve->attach(this);
here->rpcoCurve->attach(this);
}
if (!here->lppbArray.empty()) {
here->lppCurve->attach(this);
here->rppCurve->attach(this);
}
if (!here->lpppbArray.empty()) {
here->lpppCurve->attach(this);
here->rpppCurve->attach(this);
}
here->wCurve->setVisible(dataPresent->watts && showW);
here->mCurve->setVisible(dataPresent->watts && showW);
here->wattsCurve->setVisible(dataPresent->watts && showPowerState < 2);
here->atissCurve->setVisible(dataPresent->watts && showATISS);
here->antissCurve->setVisible(dataPresent->watts && showANTISS);
here->npCurve->setVisible(dataPresent->np && showNP);
here->rcadCurve->setVisible(dataPresent->rcad && showRCad);
here->rvCurve->setVisible(dataPresent->rvert && showRV);
here->rgctCurve->setVisible(dataPresent->rcontact && showRGCT);
here->gearCurve->setVisible(dataPresent->gear && showGear);
here->smo2Curve->setVisible(dataPresent->smo2 && showSmO2);
here->thbCurve->setVisible(dataPresent->thb && showtHb);
here->o2hbCurve->setVisible(dataPresent->o2hb && showO2Hb);
here->hhbCurve->setVisible(dataPresent->hhb && showHHb);
here->xpCurve->setVisible(dataPresent->xp && showXP);
here->apCurve->setVisible(dataPresent->apower && showAP);
here->hrCurve->setVisible(dataPresent->hr && showHr);
here->tcoreCurve->setVisible(dataPresent->hr && showTcore);
here->speedCurve->setVisible(dataPresent->kph && showSpeed);
here->cadCurve->setVisible(dataPresent->cad && showCad);
here->altCurve->setVisible(dataPresent->alt && showAlt);
here->altSlopeCurve->setVisible(dataPresent->alt && showAltSlopeState > 0);
here->slopeCurve->setVisible(dataPresent->slope && showSlope);
here->tempCurve->setVisible(dataPresent->temp && showTemp);
here->windCurve->setVisible(dataPresent->headwind && showWind);
here->torqueCurve->setVisible(dataPresent->nm && showWind);
here->lteCurve->setVisible(dataPresent->lte && showTE);
here->rteCurve->setVisible(dataPresent->rte && showTE);
here->lpsCurve->setVisible(dataPresent->lps && showPS);
here->rpsCurve->setVisible(dataPresent->rps && showPS);
here->balanceLCurve->setVisible(dataPresent->lrbalance && showBalance);
here->balanceRCurve->setVisible(dataPresent->lrbalance && showBalance);
here->lpcoCurve->setVisible(dataPresent->lpco && showPCO);
here->rpcoCurve->setVisible(dataPresent->rpco && showPCO);
here->lppCurve->setVisible(dataPresent->lppb && showDC);
here->rppCurve->setVisible(dataPresent->rppb && showDC);
here->lpppCurve->setVisible(dataPresent->lpppb && showPPP);
here->rpppCurve->setVisible(dataPresent->rpppb && showPPP);
// deltas
here->accelCurve->setVisible(dataPresent->kph && showAccel);
here->wattsDCurve->setVisible(dataPresent->watts && showPowerD);
here->cadDCurve->setVisible(dataPresent->cad && showCadD);
here->nmDCurve->setVisible(dataPresent->nm && showTorqueD);
here->hrDCurve->setVisible(dataPresent->hr && showHrD);
int arrayLength = 0;
foreach (const RideFilePoint *point, ride->dataPoints()) {
// we round the time to nearest 100th of a second
// before adding to the array, to avoid situation
// where 'high precision' time slice is an artefact
// of double precision or slight timing anomalies
// e.g. where realtime gives timestamps like
// 940.002 followed by 940.998 and were previously
// both rounded to 940s
//
// NOTE: this rounding mechanism is identical to that
// used by the Ride Editor.
double secs = floor(point->secs);
double msecs = round((point->secs - secs) * 100) * 10;
here->timeArray[arrayLength] = secs + msecs/1000;
if (!here->wattsArray.empty()) here->wattsArray[arrayLength] = max(0, point->watts);
if (!here->atissArray.empty()) here->atissArray[arrayLength] = max(0, point->atiss);
if (!here->antissArray.empty()) here->antissArray[arrayLength] = max(0, point->antiss);
if (!here->npArray.empty()) here->npArray[arrayLength] = max(0, point->np);
if (!here->rvArray.empty()) here->rvArray[arrayLength] = max(0, point->rvert);
if (!here->rcadArray.empty()) here->rcadArray[arrayLength] = max(0, point->rcad);
if (!here->rgctArray.empty()) here->rgctArray[arrayLength] = max(0, point->rcontact);
if (!here->gearArray.empty()) here->gearArray[arrayLength] = max(0, point->gear);
if (!here->smo2Array.empty()) here->smo2Array[arrayLength] = max(0, point->smo2);
if (!here->thbArray.empty()) here->thbArray[arrayLength] = max(0, point->thb);
if (!here->o2hbArray.empty()) here->o2hbArray[arrayLength] = max(0, point->o2hb);
if (!here->hhbArray.empty()) here->hhbArray[arrayLength] = max(0, point->hhb);
if (!here->xpArray.empty()) here->xpArray[arrayLength] = max(0, point->xp);
if (!here->apArray.empty()) here->apArray[arrayLength] = max(0, point->apower);
if (!here->hrArray.empty()) here->hrArray[arrayLength] = max(0, point->hr);
if (!here->tcoreArray.empty()) here->tcoreArray[arrayLength] = max(0, point->tcore);
// delta series
if (!here->accelArray.empty()) here->accelArray[arrayLength] = point->kphd;
if (!here->wattsDArray.empty()) here->wattsDArray[arrayLength] = point->wattsd;
if (!here->cadDArray.empty()) here->cadDArray[arrayLength] = point->cadd;
if (!here->nmDArray.empty()) here->nmDArray[arrayLength] = point->nmd;
if (!here->hrDArray.empty()) here->hrDArray[arrayLength] = point->hrd;
if (!here->speedArray.empty())
here->speedArray[arrayLength] = max(0,
(context->athlete->useMetricUnits
? point->kph
: point->kph * MILES_PER_KM));
if (!here->cadArray.empty())
here->cadArray[arrayLength] = max(0, point->cad);
if (!here->altArray.empty())
here->altArray[arrayLength] = (context->athlete->useMetricUnits
? point->alt
: point->alt * FEET_PER_METER);
if (!here->slopeArray.empty()) here->slopeArray[arrayLength] = point->slope;
if (!here->tempArray.empty())
here->tempArray[arrayLength] = point->temp;
if (!here->windArray.empty())
here->windArray[arrayLength] = max(0,
(context->athlete->useMetricUnits
? point->headwind
: point->headwind * MILES_PER_KM));
// pedal data
if (!here->balanceArray.empty()) here->balanceArray[arrayLength] = point->lrbalance;
if (!here->lteArray.empty()) here->lteArray[arrayLength] = point->lte;
if (!here->rteArray.empty()) here->rteArray[arrayLength] = point->rte;
if (!here->lpsArray.empty()) here->lpsArray[arrayLength] = point->lps;
if (!here->rpsArray.empty()) here->rpsArray[arrayLength] = point->rps;
if (!here->lpcoArray.empty()) here->lpcoArray[arrayLength] = point->lpco;
if (!here->rpcoArray.empty()) here->rpcoArray[arrayLength] = point->rpco;
if (!here->lppbArray.empty()) here->lppbArray[arrayLength] = point->lppb;
if (!here->rppbArray.empty()) here->rppbArray[arrayLength] = point->rppb;
if (!here->lppeArray.empty()) here->lppeArray[arrayLength] = point->lppe;
if (!here->rppeArray.empty()) here->rppeArray[arrayLength] = point->rppe;
if (!here->lpppbArray.empty()) here->lpppbArray[arrayLength] = point->lpppb;
if (!here->rpppbArray.empty()) here->rpppbArray[arrayLength] = point->rpppb;
if (!here->lpppeArray.empty()) here->lpppeArray[arrayLength] = point->lpppe;
if (!here->rpppeArray.empty()) here->rpppeArray[arrayLength] = point->rpppe;
here->distanceArray[arrayLength] = max(0,
(context->athlete->useMetricUnits
? point->km
: point->km * MILES_PER_KM));
if (!here->torqueArray.empty())
here->torqueArray[arrayLength] = max(0,
(context->athlete->useMetricUnits
? point->nm
: point->nm * FEET_LB_PER_NM));
++arrayLength;
}
recalc(here);
}
else {
//setTitle("no data");
here->wCurve->detach();
here->mCurve->detach();
here->wattsCurve->detach();
here->atissCurve->detach();
here->antissCurve->detach();
here->npCurve->detach();
here->rvCurve->detach();
here->rcadCurve->detach();
here->rgctCurve->detach();
here->gearCurve->detach();
here->smo2Curve->detach();
here->thbCurve->detach();
here->o2hbCurve->detach();
here->hhbCurve->detach();
here->xpCurve->detach();
here->apCurve->detach();
here->hrCurve->detach();
here->tcoreCurve->detach();
here->speedCurve->detach();
here->accelCurve->detach();
here->wattsDCurve->detach();
here->cadDCurve->detach();
here->nmDCurve->detach();
here->hrDCurve->detach();
here->cadCurve->detach();
here->altCurve->detach();
here->altSlopeCurve->detach();
here->slopeCurve->detach();
here->tempCurve->detach();
here->windCurve->detach();
here->torqueCurve->detach();
here->lteCurve->detach();
here->rteCurve->detach();
here->lpsCurve->detach();
here->rpsCurve->detach();
here->balanceLCurve->detach();
here->balanceRCurve->detach();
here->lpcoCurve->detach();
here->rpcoCurve->detach();
here->lppCurve->detach();
here->rppCurve->detach();
here->lpppCurve->detach();
here->rpppCurve->detach();
foreach(QwtPlotMarker *mrk, here->d_mrk)
delete mrk;
here->d_mrk.clear();
foreach(QwtPlotMarker *mrk, here->cal_mrk)
delete mrk;
here->cal_mrk.clear();
foreach(QwtPlotCurve *referenceLine, here->referenceLines) {
curveColors->remove(referenceLine);
referenceLine->detach();
delete referenceLine;
}
here->referenceLines.clear();
}
// record the max x value
if (here->timeArray.count() && here->distanceArray.count()) {
int maxSECS = here->timeArray[here->timeArray.count()-1];
int maxKM = here->distanceArray[here->distanceArray.count()-1];
if (maxKM > here->maxKM) here->maxKM = maxKM;
if (maxSECS > here->maxSECS) here->maxSECS = maxSECS;
}
setAltSlopePlotStyle(here->altSlopeCurve);
// set the axis
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
}
void
AllPlot::setShow(RideFile::SeriesType type, bool state)
{
switch(type) {
case RideFile::none:
setShowAccel(false);
setShowPowerD(false);
setShowCadD(false);
setShowTorqueD(false);
setShowHrD(false);
setShowPower(0);
setShowAltSlope(0);
setShowSlope(false);
setShowNP(false);
setShowATISS(false);
setShowANTISS(false);
setShowXP(false);
setShowAP(false);
setShowHr(false);
setShowTcore(false);
setShowSpeed(false);
setShowCad(false);
setShowAlt(false);
setShowTemp(false);
setShowWind(false);
setShowRV(false);
setShowRGCT(false);
setShowRCad(false);
setShowSmO2(false);
setShowtHb(false);
setShowO2Hb(false);
setShowHHb(false);
setShowGear(false);
setShowW(false);
setShowTorque(false);
setShowBalance(false);
setShowTE(false);
setShowPS(false);
setShowPCO(false);
setShowDC(false);
setShowPPP(false);
break;
case RideFile::secs:
break;
case RideFile::cad:
setShowCad(state);
break;
case RideFile::tcore:
setShowTcore(state);
break;
case RideFile::hr:
setShowHr(state);
break;
case RideFile::km:
break;
case RideFile::kph:
setShowSpeed(state);
break;
case RideFile::kphd:
setShowAccel(state);
break;
case RideFile::wattsd:
setShowPowerD(state);
break;
case RideFile::cadd:
setShowCadD(state);
break;
case RideFile::nmd:
setShowTorqueD(state);
break;
case RideFile::hrd:
setShowHrD(state);
break;
case RideFile::nm:
setShowTorque(state);
break;
case RideFile::watts:
setShowPower(state ? 0 : 2);
break;
case RideFile::xPower:
setShowXP(state);
break;
case RideFile::aPower:
setShowAP(state);
break;
case RideFile::aTISS:
setShowATISS(state);
break;
case RideFile::anTISS:
setShowANTISS(state);
break;
case RideFile::NP:
setShowNP(state);
break;
case RideFile::alt:
setShowAlt(state);
break;
case RideFile::lon:
break;
case RideFile::lat:
break;
case RideFile::headwind:
setShowWind(state);
break;
case RideFile::slope:
setShowSlope(state);
break;
case RideFile::temp:
setShowTemp(state);
break;
case RideFile::lrbalance:
setShowBalance(state);
break;
case RideFile::lte:
case RideFile::rte:
setShowTE(state);
break;
case RideFile::lps:
case RideFile::rps:
setShowPS(state);
break;
case RideFile::lpco:
case RideFile::rpco:
setShowPCO(state);
break;
case RideFile::lppb:
case RideFile::rppb:
case RideFile::lppe:
case RideFile::rppe:
setShowDC(state);
break;
case RideFile::lpppb:
case RideFile::rpppb:
case RideFile::lpppe:
case RideFile::rpppe:
setShowPPP(state);
break;
case RideFile::interval:
break;
case RideFile::vam:
break;
case RideFile::wattsKg:
break;
case RideFile::wprime:
setShowW(state);
break;
case RideFile::smo2:
setShowSmO2(state);
break;
case RideFile::thb:
setShowtHb(state);
break;
case RideFile::o2hb:
setShowO2Hb(state);
break;
case RideFile::hhb:
setShowHHb(state);
break;
case RideFile::rvert:
setShowRV(state);
break;
case RideFile::rcad:
setShowRCad(state);
break;
case RideFile::rcontact:
setShowRGCT(state);
break;
case RideFile::gear:
setShowGear(state);
break;
case RideFile::wbal:
break;
}
}
void
AllPlot::setShowPower(int id)
{
if (showPowerState == id) return;
showPowerState = id;
standard->wattsCurve->setVisible(id < 2);
shade_zones = (id == 0);
setYMax();
if (shade_zones) {
bg->attach(this);
refreshZoneLabels();
} else
bg->detach();
// remember the curves and colors
isolation = false;
curveColors->saveState();
}
void
AllPlot::setShowNP(bool show)
{
showNP = show;
standard->npCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowRV(bool show)
{
showRV = show;
standard->rvCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowRCad(bool show)
{
showRCad = show;
standard->rcadCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowRGCT(bool show)
{
showRGCT = show;
standard->rgctCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowGear(bool show)
{
showGear = show;
standard->gearCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowSmO2(bool show)
{
showSmO2 = show;
standard->smo2Curve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowtHb(bool show)
{
showtHb = show;
standard->thbCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowO2Hb(bool show)
{
showO2Hb = show;
standard->o2hbCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowHHb(bool show)
{
showHHb = show;
standard->hhbCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowANTISS(bool show)
{
showANTISS = show;
standard->antissCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowATISS(bool show)
{
showATISS = show;
standard->atissCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowXP(bool show)
{
showXP = show;
standard->xpCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowAP(bool show)
{
showAP = show;
standard->apCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowTcore(bool show)
{
showTcore = show;
standard->tcoreCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowHr(bool show)
{
showHr = show;
standard->hrCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowSpeed(bool show)
{
showSpeed = show;
standard->speedCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowAccel(bool show)
{
showAccel = show;
standard->accelCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowPowerD(bool show)
{
showPowerD = show;
standard->wattsDCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowCadD(bool show)
{
showCadD = show;
standard->cadDCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowTorqueD(bool show)
{
showTorqueD = show;
standard->nmDCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowHrD(bool show)
{
showHrD = show;
standard->hrDCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowCad(bool show)
{
showCad = show;
standard->cadCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowAlt(bool show)
{
showAlt = show;
standard->altCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowSlope(bool show)
{
showSlope = show;
standard->slopeCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowAltSlope(int id)
{
if (showAltSlopeState == id) return;
showAltSlopeState = id;
standard->altSlopeCurve->setVisible(id > 0);
setAltSlopePlotStyle(standard->altSlopeCurve);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowTemp(bool show)
{
showTemp = show;
standard->tempCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowWind(bool show)
{
showWind = show;
standard->windCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowW(bool show)
{
showW = show;
standard->wCurve->setVisible(show);
standard->mCurve->setVisible(show);
if (!showW || (rideItem && rideItem->ride() && rideItem->ride()->wprimeData()->TAU <= 0)) {
standard->curveTitle.setLabel(QwtText(""));
}
setYMax();
// clear labels ?
if (show == false) {
foreach(QwtPlotMarker *p, standard->matchLabels) {
p->detach();
delete p;
}
standard->matchLabels.clear();
}
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowTorque(bool show)
{
showTorque = show;
standard->torqueCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowBalance(bool show)
{
showBalance = show;
standard->balanceLCurve->setVisible(show);
standard->balanceRCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowTE(bool show)
{
showTE = show;
standard->lteCurve->setVisible(show);
standard->rteCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowPS(bool show)
{
showPS = show;
standard->lpsCurve->setVisible(show);
standard->rpsCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowDC(bool show)
{
showDC = show;
standard->lppCurve->setVisible(show);
standard->rppCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowPPP(bool show)
{
showPPP = show;
standard->lpppCurve->setVisible(show);
standard->rpppCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowPCO(bool show)
{
showPCO = show;
standard->lpcoCurve->setVisible(show);
standard->rpcoCurve->setVisible(show);
setYMax();
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setShowGrid(bool show)
{
standard->grid->setVisible(show);
// remember the curves and colors
isolation = false;
curveColors->saveState();
replot();
}
void
AllPlot::setPaintBrush(int state)
{
fill = state;
if (state) {
QColor p;
p = standard->wCurve->pen().color();
p.setAlpha(64);
standard->wCurve->setBrush(QBrush(p));
p = standard->wattsCurve->pen().color();
p.setAlpha(64);
standard->wattsCurve->setBrush(QBrush(p));
p = standard->npCurve->pen().color();
p.setAlpha(64);
standard->npCurve->setBrush(QBrush(p));
standard->atissCurve->setBrush(QBrush(p));
standard->antissCurve->setBrush(QBrush(p));
p = standard->rvCurve->pen().color();
p.setAlpha(64);
standard->rvCurve->setBrush(QBrush(p));
p = standard->rgctCurve->pen().color();
p.setAlpha(64);
standard->rgctCurve->setBrush(QBrush(p));
p = standard->rcadCurve->pen().color();
p.setAlpha(64);
standard->rcadCurve->setBrush(QBrush(p));
p = standard->gearCurve->pen().color();
p.setAlpha(64);
standard->gearCurve->setBrush(QBrush(p));
p = standard->smo2Curve->pen().color();
p.setAlpha(64);
standard->smo2Curve->setBrush(QBrush(p));
p = standard->thbCurve->pen().color();
p.setAlpha(64);
standard->thbCurve->setBrush(QBrush(p));
p = standard->o2hbCurve->pen().color();
p.setAlpha(64);
standard->o2hbCurve->setBrush(QBrush(p));
p = standard->hhbCurve->pen().color();
p.setAlpha(64);
standard->hhbCurve->setBrush(QBrush(p));
p = standard->xpCurve->pen().color();
p.setAlpha(64);
standard->xpCurve->setBrush(QBrush(p));
p = standard->apCurve->pen().color();
p.setAlpha(64);
standard->apCurve->setBrush(QBrush(p));
p = standard->tcoreCurve->pen().color();
p.setAlpha(64);
standard->tcoreCurve->setBrush(QBrush(p));
p = standard->hrCurve->pen().color();
p.setAlpha(64);
standard->hrCurve->setBrush(QBrush(p));
p = standard->accelCurve->pen().color();
p.setAlpha(64);
standard->accelCurve->setBrush(QBrush(p));
p = standard->wattsDCurve->pen().color();
p.setAlpha(64);
standard->wattsDCurve->setBrush(QBrush(p));
p = standard->cadDCurve->pen().color();
p.setAlpha(64);
standard->cadDCurve->setBrush(QBrush(p));
p = standard->nmDCurve->pen().color();
p.setAlpha(64);
standard->nmDCurve->setBrush(QBrush(p));
p = standard->hrDCurve->pen().color();
p.setAlpha(64);
standard->hrDCurve->setBrush(QBrush(p));
p = standard->speedCurve->pen().color();
p.setAlpha(64);
standard->speedCurve->setBrush(QBrush(p));
p = standard->cadCurve->pen().color();
p.setAlpha(64);
standard->cadCurve->setBrush(QBrush(p));
p = standard->tempCurve->pen().color();
p.setAlpha(64);
standard->tempCurve->setBrush(QBrush(p));
p = standard->torqueCurve->pen().color();
p.setAlpha(64);
standard->torqueCurve->setBrush(QBrush(p));
p = standard->lteCurve->pen().color();
p.setAlpha(64);
standard->lteCurve->setBrush(QBrush(p));
p = standard->rteCurve->pen().color();
p.setAlpha(64);
standard->rteCurve->setBrush(QBrush(p));
p = standard->lpsCurve->pen().color();
p.setAlpha(64);
standard->lpsCurve->setBrush(QBrush(p));
p = standard->rpsCurve->pen().color();
p.setAlpha(64);
standard->rpsCurve->setBrush(QBrush(p));
p = standard->lpcoCurve->pen().color();
p.setAlpha(64);
standard->lpcoCurve->setBrush(QBrush(p));
p = standard->rpcoCurve->pen().color();
p.setAlpha(64);
standard->rpcoCurve->setBrush(QBrush(p));
p = standard->lppCurve->pen().color();
p.setAlpha(64);
standard->lppCurve->setBrush(QBrush(p));
p = standard->rppCurve->pen().color();
p.setAlpha(64);
standard->rppCurve->setBrush(QBrush(p));
p = standard->lpppCurve->pen().color();
p.setAlpha(64);
standard->lpppCurve->setBrush(QBrush(p));
p = standard->rpppCurve->pen().color();
p.setAlpha(64);
standard->rpppCurve->setBrush(QBrush(p));
p = standard->slopeCurve->pen().color();
p.setAlpha(64);
standard->slopeCurve->setBrush(QBrush(p));
/*p = standard->altSlopeCurve->pen().color();
p.setAlpha(64);
standard->altSlopeCurve->setBrush(QBrush(p));
p = standard->balanceLCurve->pen().color();
p.setAlpha(64);
standard->balanceLCurve->setBrush(QBrush(p));
p = standard->balanceRCurve->pen().color();
p.setAlpha(64);
standard->balanceRCurve->setBrush(QBrush(p));*/
} else {
standard->wCurve->setBrush(Qt::NoBrush);
standard->wattsCurve->setBrush(Qt::NoBrush);
standard->npCurve->setBrush(Qt::NoBrush);
standard->rvCurve->setBrush(Qt::NoBrush);
standard->rgctCurve->setBrush(Qt::NoBrush);
standard->rcadCurve->setBrush(Qt::NoBrush);
standard->gearCurve->setBrush(Qt::NoBrush);
standard->smo2Curve->setBrush(Qt::NoBrush);
standard->thbCurve->setBrush(Qt::NoBrush);
standard->o2hbCurve->setBrush(Qt::NoBrush);
standard->hhbCurve->setBrush(Qt::NoBrush);
standard->atissCurve->setBrush(Qt::NoBrush);
standard->antissCurve->setBrush(Qt::NoBrush);
standard->xpCurve->setBrush(Qt::NoBrush);
standard->apCurve->setBrush(Qt::NoBrush);
standard->hrCurve->setBrush(Qt::NoBrush);
standard->tcoreCurve->setBrush(Qt::NoBrush);
standard->speedCurve->setBrush(Qt::NoBrush);
standard->accelCurve->setBrush(Qt::NoBrush);
standard->wattsDCurve->setBrush(Qt::NoBrush);
standard->cadDCurve->setBrush(Qt::NoBrush);
standard->hrDCurve->setBrush(Qt::NoBrush);
standard->nmDCurve->setBrush(Qt::NoBrush);
standard->cadCurve->setBrush(Qt::NoBrush);
standard->tempCurve->setBrush(Qt::NoBrush);
standard->torqueCurve->setBrush(Qt::NoBrush);
standard->lteCurve->setBrush(Qt::NoBrush);
standard->rteCurve->setBrush(Qt::NoBrush);
standard->lpsCurve->setBrush(Qt::NoBrush);
standard->rpsCurve->setBrush(Qt::NoBrush);
standard->lpcoCurve->setBrush(Qt::NoBrush);
standard->rpcoCurve->setBrush(Qt::NoBrush);
standard->lppCurve->setBrush(Qt::NoBrush);
standard->rppCurve->setBrush(Qt::NoBrush);
standard->lpppCurve->setBrush(Qt::NoBrush);
standard->rpppCurve->setBrush(Qt::NoBrush);
standard->slopeCurve->setBrush(Qt::NoBrush);
//standard->altSlopeCurve->setBrush(Qt::NoBrush);
//standard->balanceLCurve->setBrush(Qt::NoBrush);
//standard->balanceRCurve->setBrush(Qt::NoBrush);
}
replot();
}
void
AllPlot::setSmoothing(int value)
{
smooth = value;
// if anything is going on, lets stop it now!
// ACTUALLY its quite handy to play with smooting!
isolation = false;
curveColors->restoreState();
recalc(standard);
}
void
AllPlot::setByDistance(int id)
{
bydist = (id == 1);
setXTitle();
// if anything is going on, lets stop it now!
isolation = false;
curveColors->restoreState();
recalc(standard);
}
struct ComparePoints {
bool operator()(const double p1, const double p2) {
return p1 < p2;
}
};
int
AllPlot::timeIndex(double min) const
{
// return index offset for specified time
QVector<double>::const_iterator i = std::lower_bound(
standard->smoothTime.begin(), standard->smoothTime.end(), min, ComparePoints());
if (i == standard->smoothTime.end())
return standard->smoothTime.size();
return i - standard->smoothTime.begin();
}
int
AllPlot::distanceIndex(double km) const
{
// return index offset for specified distance in km
QVector<double>::const_iterator i = std::lower_bound(
standard->smoothDistance.begin(), standard->smoothDistance.end(), km, ComparePoints());
if (i == standard->smoothDistance.end())
return standard->smoothDistance.size();
return i - standard->smoothDistance.begin();
}
/*----------------------------------------------------------------------
* Interval plotting
*--------------------------------------------------------------------*/
/*
* HELPER FUNCTIONS:
* intervalNum - returns a pointer to the nth selected interval
* intervalCount - returns the number of highlighted intervals
*/
// note this is operating on the children of allIntervals and not the
// intervalWidget (QTreeWidget) -- this is why we do not use the
// selectedItems() member. N starts a one not zero.
IntervalItem *IntervalPlotData::intervalNum(int n) const
{
RideItem *rideItem = window->current;
if (!rideItem) return NULL;
int highlighted=0;
foreach(IntervalItem *p, rideItem->intervals()) {
if (p->selected) highlighted++;
if (highlighted == n) return p;
}
return NULL;
}
// how many intervals selected?
int IntervalPlotData::intervalCount() const
{
RideItem *rideItem = window->current;
if (!rideItem) return 0;
int highlighted=0;
foreach(IntervalItem *p, rideItem->intervals())
if (p->selected) highlighted++;
return highlighted;
}
/*
* INTERVAL HIGHLIGHTING CURVE
* IntervalPlotData - implements the qwtdata interface where
* x,y return point co-ordinates and
* size returns the number of points
*/
// The interval curve data is derived from the intervals that have
// been selected in the Context leftlayout for each selected
// interval we return 4 data points; bottomleft, topleft, topright
// and bottom right.
//
// the points correspond to:
// bottom left = interval start, 0 watts
// top left = interval start, maxwatts
// top right = interval stop, maxwatts
// bottom right = interval stop, 0 watts
//
double IntervalPlotData::x(size_t i) const
{
// for each interval there are four points, which interval is this for?
int interval = i ? i/4 : 0;
interval += 1; // interval numbers start at 1 not ZERO in the utility functions
double multiplier = context->athlete->useMetricUnits ? 1 : MILES_PER_KM;
// get the interval
IntervalItem *current = intervalNum(interval);
if (current == NULL) return 0; // out of bounds !?
// overlap at right ?
double right = allPlot->bydist ? multiplier * current->stopKM : current->stop/60;
if (i%4 == 2 || i%4 == 3) {
for (int n=1; n<=intervalCount(); n++) {
IntervalItem *other = intervalNum(n);
if (other != current) {
if (other->start < current->stop && other->stop > current->stop) {
if (other->start < current->start) {
double _right = allPlot->bydist ? multiplier * current->startKM : current->start/60;
if (_right<right)
right = _right;
} else {
double _right = allPlot->bydist ? multiplier * other->startKM : other->start/60;
if (_right<right)
right = _right;
}
}
}
}
}
// which point are we returning?
switch (i%4) {
case 0 : return allPlot->bydist ? multiplier * current->startKM : current->start/60; // bottom left
case 1 : return allPlot->bydist ? multiplier * current->startKM : current->start/60; // top left
case 2 : return right; // top right
case 3 : return right; // bottom right
}
return 0; // shouldn't get here, but keeps compiler happy
}
double IntervalPlotData::y(size_t i) const
{
// which point are we returning?
switch (i%4) {
case 0 : return -20; // bottom left
case 1 : return 100; // top left - set to out of bound value
case 2 : return 100; // top right - set to out of bound value
case 3 : return -20; // bottom right
}
return 0;
}
size_t IntervalPlotData::size() const { return intervalCount()*4; }
QPointF IntervalPlotData::sample(size_t i) const {
return QPointF(x(i), y(i));
}
QRectF IntervalPlotData::boundingRect() const
{
return QRectF(0, 5000, 5100, 5100);
}
void
AllPlot::pointHover(QwtPlotCurve *curve, int index)
{
double X=0.0f;
if (index >= 0 && curve != standard->intervalHighlighterCurve &&
curve != standard->intervalHoverCurve && curve->isVisible()) {
double yvalue = curve->sample(index).y();
double xvalue = curve->sample(index).x();
X = xvalue;
QString xstring;
if (bydist) {
xstring = QString("%1").arg(xvalue);
} else {
QTime t = QTime(0,0,0).addSecs(xvalue*60.00);
xstring = t.toString("hh:mm:ss");
}
// for speed curve add pace with units according to settings
// only when the activity is a run.
QString paceStr;
if (curve->title() == tr("Speed") && rideItem && rideItem->isRun) {
bool metricPace = appsettings->value(this, GC_PACE, true).toBool();
QString paceunit = metricPace ? tr("min/km") : tr("min/mile");
paceStr = tr("\n%1 %2").arg(context->athlete->useMetricUnits ? kphToPace(yvalue, metricPace, false) : mphToPace(yvalue, metricPace, false)).arg(paceunit);
}
if (curve->title() == tr("Speed") && rideItem && rideItem->isSwim) {
bool metricPace = appsettings->value(this, GC_SWIMPACE, true).toBool();
QString paceunit = metricPace ? tr("min/100m") : tr("min/100yd");
paceStr = tr("\n%1 %2").arg(context->athlete->useMetricUnits ? kphToPace(yvalue, metricPace, true) : mphToPace(yvalue, metricPace, true)).arg(paceunit);
}
bool isHB= curve->title().text().contains("Hb");
// need to scale for W' bal
if (curve->title().text().contains("W'")) yvalue /= 1000.0f;
// output the tooltip
QString text = QString("%1 %2%5\n%3 %4")
.arg(yvalue, 0, 'f', isHB ? 2 : 1)
.arg(this->axisTitle(curve->yAxis()).text())
.arg(xstring)
.arg(this->axisTitle(curve->xAxis()).text())
.arg(paceStr);
// set that text up
tooltip->setText(text);
// isolate me -- maybe do this via the legend ?
//curveColors->isolate(curve);
//replot();
} else {
// no point
tooltip->setText("");
// ok now we highlight intervals
QPoint cursor = QCursor::pos();
X = tooltip->invTransform(canvas()->mapFromGlobal(cursor)).x();
// get colors back -- maybe do this via the legend?
//curveColors->restoreState();
//replot();
}
// we don't want hoveing or we have intervals selected so no need to mouse over
if (!window->showHover->isChecked() || (rideItem && rideItem->intervalsSelected().count())) return;
if (!context->isCompareIntervals && rideItem && rideItem->ride()) {
// convert from distance to time
if (bydist) X = rideItem->ride()->distanceToTime(X) / 60.00f;
QVector<double>xdata, ydata;
IntervalItem *chosen = NULL;
if (rideItem->ride()->dataPoints().count() > 1) {
// set duration to length of ride, and keep the value to compare
int rideduration = rideItem->ride()->dataPoints().last()->secs -
rideItem->ride()->dataPoints().first()->secs;
int duration = rideduration;
// loop through intervals and select FIRST we are in
foreach(IntervalItem *i, rideItem->intervals()) {
// ignore peaks and all, they are really distracting
if (i->type == RideFileInterval::ALL || i->type == RideFileInterval::PEAKPOWER)
continue;
if (i->start < (X*60.00f) && i->stop > (X*60.00f)) {
if ((i->stop-i->start) < duration) {
duration = i->stop - i->start;
chosen = i;
}
}
}
// we already chose it!
if (chosen == NULL || chosen == hovered) return;
if (duration < rideduration) {
// hover curve color aligns to the type of interval we are highlighting
QColor hbrush = chosen->color;
hbrush.setAlpha(64);
standard->intervalHoverCurve->setBrush(hbrush); // fill below the line
// we chose one?
if (bydist) {
double multiplier = context->athlete->useMetricUnits ? 1 : MILES_PER_KM;
double start = multiplier * chosen->startKM;
double stop = multiplier * chosen->stopKM;
xdata << start;
ydata << -20;
xdata << start;
ydata << 100;
xdata << stop;
ydata << 100;
xdata << stop;
ydata << -20;
} else {
xdata << chosen->start / 60.00f;
ydata << -20;
xdata << chosen->start / 60.00f;
ydata << 100;
xdata << chosen->stop / 60.00f;
ydata << 100;
xdata << chosen->stop / 60.00f;
ydata << -20;
}
}
}
standard->intervalHoverCurve->setSamples(xdata,ydata);
replot();
// remember for next time!
hovered = chosen;
// tell the charts -- and block signals whilst they occur
blockSignals(true);
context->notifyIntervalHover(hovered);
blockSignals(false);
}
}
void
AllPlot::intervalHover(IntervalItem *chosen)
{
// no point!
if (!isVisible() || chosen == hovered) return;
// don't highlight the all or all the peak intervals
if (chosen && chosen->type == RideFileInterval::ALL) return;
QVector<double>xdata, ydata;
if (chosen) {
// hover curve color aligns to the type of interval we are highlighting
QColor hbrush = chosen->color;
hbrush.setAlpha(64);
standard->intervalHoverCurve->setBrush(hbrush); // fill below the line
if (bydist) {
double multiplier = context->athlete->useMetricUnits ? 1 : MILES_PER_KM;
double start = multiplier * chosen->startKM;
double stop = multiplier * chosen->stopKM;
xdata << start;
ydata << -20;
xdata << start;
ydata << 100;
xdata << stop;
ydata << 100;
xdata << stop;
ydata << -20;
} else {
xdata << chosen->start / 60.00f;
ydata << -20;
xdata << chosen->start / 60.00f;
ydata << 100;
xdata << chosen->stop / 60.00f;
ydata << 100;
xdata << chosen->stop / 60.00f;
ydata << -20;
}
}
// update state
hovered = chosen;
standard->intervalHoverCurve->setSamples(xdata,ydata);
replot();
}
void
AllPlot::nextStep( int& step )
{
if( step < 50 )
{
step += 10;
}
else if( step == 50 )
{
step = 100;
}
else if( step >= 100 && step < 1000 )
{
step += 100;
}
else if( step >= 1000 && step < 5000)
{
step += 500;
}
else
{
step += 1000;
}
}
bool
AllPlot::eventFilter(QObject *obj, QEvent *event)
{
// if power is going on we worry about reference lines
// otherwise not so much ..
if ((showPowerState<2 && scope == RideFile::none) || scope == RideFile::watts || scope == RideFile::aTISS ||
scope == RideFile::anTISS || scope == RideFile::NP || scope == RideFile::aPower || scope == RideFile::xPower) {
int axis = -1;
if (obj == axisWidget(QwtPlot::yLeft))
axis=QwtPlot::yLeft;
if (axis>-1 && event->type() == QEvent::MouseButtonDblClick) {
QMouseEvent *m = static_cast<QMouseEvent*>(event);
confirmTmpReference(invTransform(axis, m->y()),axis, true); // do show delete stuff
return false;
}
if (axis>-1 && event->type() == QEvent::MouseMove) {
QMouseEvent *m = static_cast<QMouseEvent*>(event);
plotTmpReference(axis, m->x()-axisWidget(axis)->width(), m->y());
return false;
}
if (axis>-1 && event->type() == QEvent::MouseButtonRelease) {
QMouseEvent *m = static_cast<QMouseEvent*>(event);
if (m->x()>axisWidget(axis)->width()) {
confirmTmpReference(invTransform(axis, m->y()),axis,false); // don't show delete stuff
return false;
} else if (standard->tmpReferenceLines.count()) {
plotTmpReference(axis, 0, 0); //unplot
return true;
}
}
}
// is it for other objects ?
QList<QObject*> axes;
QList<QwtAxisId> axesId;
axes << axisWidget(QwtPlot::yLeft);
axesId << QwtPlot::yLeft;
axes << axisWidget(QwtAxisId(QwtAxis::yLeft, 1));
axesId << QwtAxisId(QwtAxis::yLeft, 1);
axes << axisWidget(QwtAxisId(QwtAxis::yLeft, 3));
axesId << QwtAxisId(QwtAxis::yLeft, 3);
axes << axisWidget(QwtPlot::yRight);
axesId << QwtPlot::yRight;
axes << axisWidget(QwtAxisId(QwtAxis::yRight, 1));
axesId << QwtAxisId(QwtAxis::yRight, 1);
axes << axisWidget(QwtAxisId(QwtAxis::yRight, 2));
axesId << QwtAxisId(QwtAxis::yRight, 2);
axes << axisWidget(QwtAxisId(QwtAxis::yRight, 3));
axesId << QwtAxisId(QwtAxis::yRight, 3);
if (axes.contains(obj)) {
QwtAxisId id = axesId.at(axes.indexOf(obj));
// this is an axes widget
//qDebug()<<this<<"event on="<<id<< static_cast<QwtScaleWidget*>(obj)->title().text() <<"event="<<event->type();
// isolate / restore on mouse enter leave
if (!isolation && event->type() == QEvent::Enter) {
// isolate curve on hover
curveColors->isolateAxis(id);
replot();
} else if (!isolation && event->type() == QEvent::Leave) {
// return to normal when leave
curveColors->restoreState();
replot();
} else if (event->type() == QEvent::MouseButtonRelease) {
// click on any axis to toggle isolation
// if isolation is on, just turns it off
// if isolation is off, turns it on for the axis clicked
if (isolation) {
isolation = false;
curveColors->restoreState();
replot();
} else {
isolation = true;
curveColors->isolateAxis(id, true); // with scale adjust
replot();
}
}
}
// turn off hover when mouse leaves
if (event->type() == QEvent::Leave) context->notifyIntervalHover(NULL);
return false;
}
void
AllPlot::plotTmpReference(int axis, int x, int y)
{
// only if on allplotwindow
if (window==NULL) return;
// not supported in compare mode
if (context->isCompareIntervals) return;
// only on power based charts
if (scope != RideFile::none && scope != RideFile::watts && scope != RideFile::aTISS && scope != RideFile::anTISS &&
scope != RideFile::NP && scope != RideFile::aPower && scope != RideFile::xPower) return;
if (x>0) {
RideFilePoint *referencePoint = new RideFilePoint();
referencePoint->watts = invTransform(axis, y);
foreach(QwtPlotCurve *curve, standard->tmpReferenceLines) {
if (curve) {
curveColors->remove(curve); // ignored if not already there
curve->detach();
delete curve;
}
}
standard->tmpReferenceLines.clear();
// only plot if they are relevant to the plot.
QwtPlotCurve *referenceLine = window->allPlot->plotReferenceLine(referencePoint);
if (referenceLine) {
standard->tmpReferenceLines.append(referenceLine);
window->allPlot->replot();
}
// now do the series plots
foreach(AllPlot *plot, window->seriesPlots) {
plot->replot();
foreach(QwtPlotCurve *curve, plot->standard->tmpReferenceLines) {
if (curve) {
plot->curveColors->remove(curve); // ignored if not already there
curve->detach();
delete curve;
}
}
plot->standard->tmpReferenceLines.clear();
}
foreach(AllPlot *plot, window->seriesPlots) {
QwtPlotCurve *referenceLine = plot->plotReferenceLine(referencePoint);
if (referenceLine) {
plot->standard->tmpReferenceLines.append(referenceLine);
plot->replot();
}
}
// now the stack plots
foreach(AllPlot *plot, window->allPlots) {
plot->replot();
foreach(QwtPlotCurve *curve, plot->standard->tmpReferenceLines) {
if (curve) {
plot->curveColors->remove(curve); // ignored if not already there
curve->detach();
delete curve;
}
}
plot->standard->tmpReferenceLines.clear();
}
foreach(AllPlot *plot, window->allPlots) {
QwtPlotCurve *referenceLine = plot->plotReferenceLine(referencePoint);
if (referenceLine) {
plot->standard->tmpReferenceLines.append(referenceLine);
plot->replot();
}
}
} else {
// wipe any we don't want
foreach(QwtPlotCurve *curve, standard->tmpReferenceLines) {
if (curve) {
curveColors->remove(curve); // ignored if not already there
curve->detach();
delete curve;
}
}
standard->tmpReferenceLines.clear();
window->allPlot->replot();
foreach(AllPlot *plot, window->seriesPlots) {
plot->replot();
foreach(QwtPlotCurve *curve, plot->standard->tmpReferenceLines) {
if (curve) {
plot->curveColors->remove(curve); // ignored if not already there
curve->detach();
delete curve;
}
plot->standard->tmpReferenceLines.clear();
}
}
window->allPlot->replot();
foreach(AllPlot *plot, window->allPlots) {
plot->replot();
foreach(QwtPlotCurve *curve, plot->standard->tmpReferenceLines) {
if (curve) {
plot->curveColors->remove(curve); // ignored if not already there
curve->detach();
delete curve;
}
}
plot->standard->tmpReferenceLines.clear();
}
}
}
void
AllPlot::refreshReferenceLinesForAllPlots()
{
// not supported in compare mode
if (window == NULL || context->isCompareIntervals) return;
window->allPlot->refreshReferenceLines();
foreach(AllPlot *plot, window->allPlots) {
plot->refreshReferenceLines();
}
foreach(AllPlot *plot, window->seriesPlots) {
plot->refreshReferenceLines();
}
}
void
AllPlot::confirmTmpReference(double value, int axis, bool allowDelete)
{
// not supported in compare mode
if (window == NULL || context->isCompareIntervals) return;
ReferenceLineDialog *p = new ReferenceLineDialog(this, context, allowDelete);
p->setWindowModality(Qt::ApplicationModal); // don't allow select other ride or it all goes wrong!
p->setValueForAxis(value, axis);
p->move(QCursor::pos()-QPoint(40,40));
p->exec();
}
void
AllPlot::setAltSlopePlotStyle (AllPlotSlopeCurve *curve){
if (bydist) {
switch (showAltSlopeState) {
case 0: {curve->setStyle(AllPlotSlopeCurve::SlopeDist1); break;}
case 1: {curve->setStyle(AllPlotSlopeCurve::SlopeDist1); break;}
case 2: {curve->setStyle(AllPlotSlopeCurve::SlopeDist2); break;}
case 3: {curve->setStyle(AllPlotSlopeCurve::SlopeDist3); break;}
}
} else {
switch (showAltSlopeState) {
case 0: {curve->setStyle(AllPlotSlopeCurve::SlopeTime1); break;}
case 1: {curve->setStyle(AllPlotSlopeCurve::SlopeTime1); break;}
case 2: {curve->setStyle(AllPlotSlopeCurve::SlopeTime2); break;}
case 3: {curve->setStyle(AllPlotSlopeCurve::SlopeTime3); break;}
}
}
}<|fim▁end|> | objects->smoothRPCO[secs] = totalRPCO / list.size();
objects->smoothLPP[secs] = QwtIntervalSample( bydist ? totalDist : secs / 60.0, QwtInterval(totalLPPB / list.size(), totalLPPE / list.size() ) );
objects->smoothRPP[secs] = QwtIntervalSample( bydist ? totalDist : secs / 60.0, QwtInterval(totalRPPB / list.size(), totalRPPE / list.size() ) ); |
<|file_name|>convert.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package yaml
import (
"encoding/json"
"fmt"
"gopkg.in/yaml.v3"
)
// ToYAML converts some object that serializes to JSON into a YAML node tree.
// It's useful since it pays attention to JSON tags, unlike yaml.Unmarshal or
// yaml.Node.Decode.
func ToYAML(rawObj interface{}) (*yaml.Node, error) {
if rawObj == nil {
return &yaml.Node{Kind: yaml.ScalarNode, Value: "null", Tag: "!!null"}, nil
}
rawJSON, err := json.Marshal(rawObj)
if err != nil {
return nil, fmt.Errorf("failed to marshal object: %v", err)
}<|fim▁hole|> var out yaml.Node
if err := yaml.Unmarshal(rawJSON, &out); err != nil {
return nil, fmt.Errorf("unable to unmarshal marshalled object: %v", err)
}
return &out, nil
}
// changeAll calls the given callback for all nodes in
// the given YAML node tree.
func changeAll(root *yaml.Node, cb func(*yaml.Node)) {
cb(root)
for _, child := range root.Content {
changeAll(child, cb)
}
}
// SetStyle sets the style for all nodes in the given
// node tree to the given style.
func SetStyle(root *yaml.Node, style yaml.Style) {
changeAll(root, func(node *yaml.Node) {
node.Style = style
})
}<|fim▁end|> | |
<|file_name|>uniprot_parser_v01.py<|end_file_name|><|fim▁begin|>__author__ = 'nicolas'
# coding=utf-8
from os.path import expanduser
from ordereddict import OrderedDict
from Bio import SwissProt
import time
import MySQLdb as mdb
"""
Fuck!
from ordereddict import OrderedDict
import MySQLdb as mdb
dicc = {}
dictdebug_empty = OrderedDict()
dictdebug = dictdebug_empty
dictdebug['hola'] = 'chau'
print(dictdebug.items())
print(dictdebug_empty.items())
dictdebug_empty.clear()
print(dictdebug_empty.items())
print(dictdebug.items())
"""
# Establecer el tiempo de inicio del script
start_time = time.time()
<|fim▁hole|>tabla_ptms = "sprot_ptms1"
file_name = "uniprot_sprot.dat"
desde = 0
hasta = 542783 # Hay 542782 entradas de AC??
# Conectar a la base de datos
con = mdb.connect('localhost', 'nicolas', passwd="nicolaslfp", db=database)
cur = con.cursor()
cur.execute("SELECT VERSION()")
cur.execute("USE " + database)
print("USE ptmdb;")
# Abrir el .dat de uniprot
uniprot_file = expanduser("~") + '/QB9_Files/' + file_name
output_file = expanduser("~") + '/QB9-git/QB9/resources/output.txt'
def count_amino_acids_ext(seq): # Defino una función que toma una secuencia y los cuenta
prot_dic2 = prot_dic
for aa in prot_dic2:
prot_dic2[aa] = seq.count(aa)
return prot_dic2 # y devuelve un dict ordenado con pares AA, #AA
# Armo un diccionario con los AAs que voy a contar
abc = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
prot_dic = OrderedDict((k, 0) for k in abc)
# Interesting feature types
ptmrecords = ["MOD_RES", "LIPID", "CARBOHYD", "DISULFID", "CROSSLNK"]
# Non-experimental qualifiers for feature annotations
neqs = ["Probable", "Potential", "By similarity"] # Y "Experimental"
# Las categorías están en un diccionario con su type de mysql todo volar
categories = OrderedDict()
categories['AC'] = "varchar(30) NOT NULL" # accesion number
categories['FT'] = "varchar(30) NOT NULL"
categories['STATUS'] = "varchar(30) NOT NULL"
categories['PTM'] = "varchar(100) NOT NULL"
categories['FROM_RES'] = "varchar(10) NOT NULL"
categories['TO_RES'] = "varchar(10) NOT NULL"
categories['FROM_AA'] = "varchar(10) NOT NULL" # vamo a implementar el target directamente!!!! =D
categories['TO_AA'] = "varchar(10) NOT NULL"
categories['SQ'] = "text(45000) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
categories['LENGTH'] = "varchar(200) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
categories['ORG'] = "text(500) NOT NULL" # organism
categories['OC'] = "varchar(30) NOT NULL" # organism classification, vamos solo con el dominio
categories['OX'] = "varchar(200) NOT NULL" # taxonomic ID
categories['HO'] = "text(500)" # host organism
categories['inumber'] = "varchar(200) NOT NULL"
# categories['CC'] = "varchar(200)" # comments section, nos interesa el campo "PTM"
# categories['SQi'] = "varchar(200)" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
# Defino un diccionario modelo donde cargar los valores que voy a extraer de la lista
empty_data = OrderedDict()
for gato in categories: # usando las keys de categories y un valor por defecto todo vacío no es nulo ¿cómo hago?
empty_data[gato] = 'NOFT'
empty_data['FROM_RES'] = '?'
empty_data['TO_RES'] = '?'
empty_data['FROM_AA'] = '?'
empty_data['TO_AA'] = '?'
data = empty_data.copy() # este es el diccionario de registros vacío que voy a usar
print("DROP TABLE " + tabla_cuentas + ";")
print("DROP TABLE " + tabla_ptms + ";")
# Crear la tabla de cuentas
prot_dic_def_items = []
prot_dic_def = OrderedDict((k, 'SMALLINT') for k in abc)
for cat, value in prot_dic_def.items(): # concatenaciones key y valor
prot_dic_def_items.append(cat + ' ' + value) # guardadaes en la lista
table_def = ', '.join(prot_dic_def_items) # definicion de la tabla
print("CREATE TABLE IF NOT EXISTS "
+ tabla_cuentas
+ " (AC VARCHAR(30) UNIQUE, OC_ID VARCHAR(30), LENGTH MEDIUMINT,"
+ table_def
+ ") ENGINE=InnoDB;")
print("commit;")
# con.commit()
# Crear la tabla de ptms
table_def_items = [] # lista para concatenaciones de key y valor
for cat, value in categories.items(): # concatenaciones key y valor
table_def_items.append(cat + ' ' + value) # guardadaes en la lista
table_def_2 = ', '.join(table_def_items) # definicion de la tabla
print("CREATE TABLE IF NOT EXISTS " + tabla_ptms + " (" + table_def_2 + ") ENGINE=InnoDB;")
print("commit;")
# con.commit()
# Variables del loop
i = 0
j = 0
ptm = ''
out = []
listap = []
listaq = []
listar = []
olista = []
interes = []
with open(uniprot_file) as uniprot: # esto me abre y cierra el archivo al final
for record in SwissProt.parse(uniprot): # parseando los records de uniprot
i += 1
if i % 100 == 0:
print("commit;")
data = empty_data.copy() # en vez de vaciar el diccionario, le asigno el dafault sin enlazarlo al vacío
# Acá cargo los datos generales para las PTMs de una proteína/entrada de uniprot (instancias de entradas)
# tienen que cargarse en el orden de las columnas en la ptmdb y el del insert
# print(record.accessions[0])
data['AC'] = record.accessions[0] # solo el principal, el resto nose.
data['SQ'] = record.sequence
data['LENGTH'] = record.sequence_length # todo acá hay un problema? no entran las de mas de 999 residuos
data['ORG'] = record.organism # el bicho
data['OC'] = record.organism_classification[0] # el dominio del bicho
data['OX'] = record.taxonomy_id[0] # Id taxonomica del bicho
del olista[:]
if not record.host_organism:
data['HO'] = 'No host'
else:
for o in record.host_organism:
olista.append((o.split(";"))[0])
data['HO'] = ', '.join(olista) # y esto el host del virus ¿o parásito?
data['inumber'] = str(i) # solo para debuguear =) ver hasta donde llegó
# Generar y guardar el insert del #AA en la secuencia
del listaq[:]
contenido_aa = count_amino_acids_ext(record.sequence) # Guardo el dict con partes AA, #AA de la secuencia
for q in contenido_aa.itervalues():
listaq.append(str(q)) # y los pongo en una lista
sql_insert_values_q = ', '.join(listaq)
if i >= desde:
print("INSERT INTO " + tabla_cuentas + " VALUES ('"
+ record.accessions[0] + "', '"
+ record.organism_classification[0] + "', "
+ str(record.sequence_length)
+ ", " + sql_insert_values_q + ");")
# print("commit;")
# con.commit()
# Acá empiezo con los features, hay alguno interesante?
features = record.features # todo insertar los FTs en otra tabla junto con OC; OX, OR...?
del out[:]
del interes[:]
for a in range(0, len(features)): # guardar los campos "candidato" del FT en una lista llamada out
out.append(features[a][0])
interes = list(set(out).intersection(ptmrecords)) # armar un set con los interesantes y hacerlo lista interes
if interes: # si interes no está vacía, entonces hay algo para cargar
# todo evitar duplicados de secuencia, relacion via AC?
# ahora cargo cada PTM en data (subinstancias de entrada)
for feature in features: # iterar los features de la entrada
if feature[0] in interes: # si el titulo del FT interesa, proseguir ¡mejora un poco! =D
for tipo in interes: # iterear los tipos interesantes encontrados en el feature
if feature[0] in tipo: # si el feature evaluado interesante, cargar los datos en data[]
A = feature[1] # de el residuo tal (va a ser el mismo que el siguiente si está solo)
B = feature[2] # hacia el otro. OJO hay algunos desconocidos indicados con un "?"
C = feature[3] # este tiene la posta?
D = feature[4] # este aparece a veces? todo wtf?
# reiniciar FT, FROM y TO
data['FT'] = 'NOFT'
data['FROM_RES'] = '?'
data['TO_RES'] = '?'
data['FROM_AA'] = '?'
data['TO_AA'] = '?'
# Asignar FT
data['FT'] = feature[0]
data['FROM_RES'] = A
data['TO_RES'] = B
# reiniciar PTM y STATUS
ptm = ''
data['PTM'] = 'NOFT'
data['STATUS'] = "Experimental"
# Asignar STATUS y PTM
if C: # si C (el que tiene el nombre de la PTM y el STATUS) contiene algo
for neq in neqs: # iterar los STATUS posibles
if neq in C: # si C contiene el STATUS pirulo
data['STATUS'] = neq # asignar el valor a STATUS
C = C.replace('(' + neq + ")", '') # hay que sacar esta porquería
C = C.replace(neq, '')
# hay que sacar esta porquería si no aparece con paréntesis
break # esto corta con el loop más "cercano" en indentación
ptm = ((C.split(" /"))[0].split(';')[0]). \
rstrip(" ").rstrip(".").rstrip(" ")
# Obs: a veces las mods tienen identificadores estables que empiezan con "/"
# así que hay que sacarlo. y otas cosas después de un ";" CHAU.
# También hay CROSSLNKs con otras anotaciones, que los hace aparecer como únicas
# al contarlas, pero en realidad son casi iguales todo quizás ocurre con otras?
# Ver http://web.expasy.org/docs/userman.html#FT_line
# También le saco espacios y puntos al final.
# Odio esto del formato... todo no hay algo que lo haga mejor?
if tipo == 'DISULFID': # si el tipo es disulfuro, no hay mucho que decir.
ptm = "S-cysteinyl 3-(oxidosulfanyl)alanine (Cys-Cys)"
data['FROM_AA'] = 'C'
data['TO_AA'] = 'C'
else: # pero si no lo es, guardamos cosas normalmente.
# Asignar target residue
if A != '?':
data['FROM_AA'] = data['SQ'][int(data['FROM_RES'])-1]
else:
data['FROM_AA'] = '?'
if B != '?':
data['TO_AA'] = data['SQ'][int(data['TO_RES'])-1]
else:
data['TO_AA'] = '?'
if ptm.find("with") != -1: # si la ptm contiene la palabra "with" (caso crosslink)
ptm = ptm.split(" (with")[0].split(" (int")[0] # pero si la contiene, recortar
data['PTM'] = ptm
del listap[:]
for p in data.itervalues(): # itero los valores de los datos que fui cargando al dict.
listap.append(str(p).replace("'", "''")) # y los pongo en una lista
sql_insert_values_p = '\'' + \
'\', \''.join(listap) + \
'\''
# Que después uno como van en el INSERT
# El insert, en el que reemplazo ' por '', para escaparlas en sql
if i >= desde: # para hacerlo en partes
print(("INSERT INTO " + tabla_ptms + " VALUES (%r);"
% sql_insert_values_p).replace("-...", "").replace("\"", '').replace('.', ''))
# print("commit;")
# con.commit()
# unir los elementos de values con comas
else:
# Si, en cambio, la entrada no tiene FT insteresantes, solo cargo los datos generales y defaults
del listar[:]
for r in data.itervalues():
listar.append(str(r).replace("'", "''"))
sql_insert_values_r = '\'' + '\', \''.join(listar) + '\''
if i >= desde: # para hacerlo en partes
print(("INSERT INTO " + tabla_ptms + " VALUES (%r);"
% sql_insert_values_r).replace("\"", '').replace('.', ''))
# print("commit;")
# con.commit()
if i >= hasta: # segun uniprot el número de entradas de secuencias es 54247468
# print("\n")
# print(i)
break
# The sequence counts 60 amino acids per line, in groups of 10 amino acids, beginning in position 6 of the line.
# http://www.uniprot.org/manual/
# General Annotation: cofactores, mass spectrometry data, PTM (complementario al MOD_RES y otras PTMs..?)
# Sequence Annotation (Features): Sites (cleavage sites?), non-standard residue,
# MOD_RES (excluye lipidos, crosslinks y glycanos), lipidación, puente disulfuro, cross-link, glycosylation
# todo consider PE "protein existence", KW contiene "glycoprotein" qué otros?
# todo también dentro de FT
# output.close()
# print('\n')
# print(time.time() - start_time)
# """<|fim▁end|> | # Variables del script
database = "ptmdb"
tabla_cuentas = "sprot_count1" |
<|file_name|>uhd_fft_qt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
##################################################
# Gnuradio Python Flow Graph
# Title: UHD FFT Qt
# Author: Johannes Demel
# Generated: Wed Jan 29 13:51:16 2014
##################################################
from PyQt4 import Qt
from gnuradio import eng_notation
from gnuradio import gr
from gnuradio import qtgui
from gnuradio import uhd
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from optparse import OptionParser
import PyQt4.Qwt5 as Qwt
import sip
import sys
import threading
import time
class uhd_fft_qt(gr.top_block, Qt.QWidget):
def __init__(self):
gr.top_block.__init__(self, "UHD FFT Qt")
Qt.QWidget.__init__(self)
self.setWindowTitle("UHD FFT Qt")
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "uhd_fft_qt")
self.restoreGeometry(self.settings.value("geometry").toByteArray())
##################################################
# Variables
##################################################
self.z_info = z_info = {"mboard_id":"id","mboard_serial":"serial","rx_serial":"rx","rx_subdev_name":"subname", "rx_subdev_spec":"spec","rx_antenna":"antenna"}
self.usrp_serial = usrp_serial = z_info["mboard_serial"]
self.usrp_id = usrp_id = z_info["mboard_id"]
self.db_spec = db_spec = z_info["rx_subdev_spec"]
self.db_serial = db_serial = z_info["rx_serial"]
self.db_name = db_name = z_info["rx_subdev_name"]
self.db_antenna = db_antenna = z_info["rx_antenna"]
self.catch_result = catch_result = uhd.tune_result()
self.usrp_type = usrp_type = "usrp2"
self.usrp_text = usrp_text = usrp_id + " (" + usrp_serial + ")"
self.master_clock_rate = master_clock_rate = 40e6
self.db_text = db_text = db_name + " (" + db_serial + " ," + db_spec + " ," + db_antenna + ")"
self.actual_rf = actual_rf = catch_result.actual_rf_freq
self.actual_dsp = actual_dsp = catch_result.actual_dsp_freq
self.uhd_version = uhd_version = uhd.get_version_string()
self.samp_rate = samp_rate = 10e6
self.rf_label = rf_label = actual_rf
self.myzero = myzero = 0
self.gain = gain = 50
self.dsp_label = dsp_label = actual_dsp
self.dev_args = dev_args = "type=" + usrp_type + ",master_clock_rate=" + str(master_clock_rate) + ", recv_buff_size=32768e6"
self.center_freq = center_freq = 900e6
self.a_usrp = a_usrp = usrp_text
self.a_db_label = a_db_label = db_text
##################################################
# Blocks
##################################################
self._samp_rate_tool_bar = Qt.QToolBar(self)
self._samp_rate_tool_bar.addWidget(Qt.QLabel("Sample Rate"+": "))
self._samp_rate_line_edit = Qt.QLineEdit(str(self.samp_rate))
self._samp_rate_tool_bar.addWidget(self._samp_rate_line_edit)
self._samp_rate_line_edit.returnPressed.connect(
lambda: self.set_samp_rate(eng_notation.str_to_num(self._samp_rate_line_edit.text().toAscii())))
self.top_grid_layout.addWidget(self._samp_rate_tool_bar, 4, 0, 1, 3)
self._gain_layout = Qt.QVBoxLayout()
self._gain_tool_bar = Qt.QToolBar(self)
self._gain_layout.addWidget(self._gain_tool_bar)
self._gain_tool_bar.addWidget(Qt.QLabel("Gain"+": "))
self._gain_counter = Qwt.QwtCounter()
self._gain_counter.setRange(0, 100, 1)
self._gain_counter.setNumButtons(2)
self._gain_counter.setValue(self.gain)
self._gain_tool_bar.addWidget(self._gain_counter)
self._gain_counter.valueChanged.connect(self.set_gain)
self._gain_slider = Qwt.QwtSlider(None, Qt.Qt.Horizontal, Qwt.QwtSlider.BottomScale, Qwt.QwtSlider.BgSlot)
self._gain_slider.setRange(0, 100, 1)
self._gain_slider.setValue(self.gain)
self._gain_slider.setMinimumWidth(200)
self._gain_slider.valueChanged.connect(self.set_gain)
self._gain_layout.addWidget(self._gain_slider)
self.top_grid_layout.addLayout(self._gain_layout, 5, 0, 1, 5)
self._center_freq_tool_bar = Qt.QToolBar(self)
self._center_freq_tool_bar.addWidget(Qt.QLabel("Center Frequency"+": "))
self._center_freq_line_edit = Qt.QLineEdit(str(self.center_freq))
self._center_freq_tool_bar.addWidget(self._center_freq_line_edit)
self._center_freq_line_edit.returnPressed.connect(
lambda: self.set_center_freq(eng_notation.str_to_num(self._center_freq_line_edit.text().toAscii())))
self.top_grid_layout.addWidget(self._center_freq_tool_bar, 4, 3, 1, 2)
self.usrp_dev = uhd.usrp_source(
device_addr=dev_args,
stream_args=uhd.stream_args(
cpu_format="fc32",
args="calibration-file=/home/johannes/tests/calibration-rx_B210_150N15_FE-RX2_integrated_TX-RX_1387571801.csv",
channels=range(1),
),
)
self.usrp_dev.set_samp_rate(samp_rate)
self.usrp_dev.set_center_freq(center_freq, 0)
self.usrp_dev.set_gain(gain, 0)
self.usrp_dev.set_antenna("RX2", 0)
self.z_info = val = self.usrp_dev.get_usrp_info(0)
def _z_info_probe():
notset = True
while notset:
try:
self.set_z_info(self.z_info)
notset = False
except:
notset = True
time.sleep(1.0/10.0)
self._z_info_thread = threading.Thread(target=_z_info_probe)
self._z_info_thread.daemon = True
self._z_info_thread.start()
self._uhd_version_tool_bar = Qt.QToolBar(self)
self._uhd_version_tool_bar.addWidget(Qt.QLabel("UHD"+": "))
self._uhd_version_label = Qt.QLabel(str(self.uhd_version))
self._uhd_version_tool_bar.addWidget(self._uhd_version_label)
self.top_grid_layout.addWidget(self._uhd_version_tool_bar, 3, 0, 1, 1)
self._rf_label_tool_bar = Qt.QToolBar(self)
self._rf_label_tool_bar.addWidget(Qt.QLabel("RF Freq"+": "))
self._rf_label_label = Qt.QLabel(str(self.rf_label))
self._rf_label_tool_bar.addWidget(self._rf_label_label)
self.top_grid_layout.addWidget(self._rf_label_tool_bar, 3, 3, 1, 1)
self.qtgui_sink_x_0 = qtgui.sink_c(
1024, #fftsize
firdes.WIN_BLACKMAN_hARRIS, #wintype
center_freq, #fc
samp_rate, #bw
"QT GUI Plot", #name
True, #plotfreq
True, #plotwaterfall
True, #plottime
True, #plotconst
)
self.qtgui_sink_x_0.set_update_time(1.0/10)
self._qtgui_sink_x_0_win = sip.wrapinstance(self.qtgui_sink_x_0.pyqwidget(), Qt.QWidget)
self.top_grid_layout.addWidget(self._qtgui_sink_x_0_win, 0, 0, 3, 5)
self._dsp_label_tool_bar = Qt.QToolBar(self)
self._dsp_label_tool_bar.addWidget(Qt.QLabel("DSP Freq"+": "))
self._dsp_label_label = Qt.QLabel(str(self.dsp_label))
self._dsp_label_tool_bar.addWidget(self._dsp_label_label)
self.top_grid_layout.addWidget(self._dsp_label_tool_bar, 3, 4, 1, 1)
self.catch_result = val = self.usrp_dev.set_center_freq(center_freq, myzero)
def _catch_result_probe():
notset = True
while notset:
try:
self.set_catch_result(self.catch_result)
notset = False
except:
notset = True
time.sleep(1.0/10.0)
self._catch_result_thread = threading.Thread(target=_catch_result_probe)
self._catch_result_thread.daemon = True
self._catch_result_thread.start()
self._a_usrp_tool_bar = Qt.QToolBar(self)
self._a_usrp_tool_bar.addWidget(Qt.QLabel("USRP"+": "))
self._a_usrp_label = Qt.QLabel(str(self.a_usrp))
self._a_usrp_tool_bar.addWidget(self._a_usrp_label)
self.top_grid_layout.addWidget(self._a_usrp_tool_bar, 3, 1, 1, 1)
self._a_db_label_tool_bar = Qt.QToolBar(self)
self._a_db_label_tool_bar.addWidget(Qt.QLabel("Daughterboard"+": "))
self._a_db_label_label = Qt.QLabel(str(self.a_db_label))
self._a_db_label_tool_bar.addWidget(self._a_db_label_label)
self.top_grid_layout.addWidget(self._a_db_label_tool_bar, 3, 2, 1, 1)
##################################################
# Connections
##################################################
self.connect((self.usrp_dev, 0), (self.qtgui_sink_x_0, 0))
# QT sink close method reimplementation
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "uhd_fft_qt")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_z_info(self):
return self.z_info
def set_z_info(self, z_info):
self.z_info = z_info
self.set_db_name(self.z_info["rx_subdev_name"])
self.set_db_antenna(self.z_info["rx_antenna"])
self.set_db_serial(self.z_info["rx_serial"])
self.set_db_spec(self.z_info["rx_subdev_spec"])
self.set_usrp_serial(self.z_info["mboard_serial"])
self.set_usrp_id(self.z_info["mboard_id"])
def get_usrp_serial(self):
return self.usrp_serial
def set_usrp_serial(self, usrp_serial):
self.usrp_serial = usrp_serial
self.set_usrp_text(self.usrp_id + " (" + self.usrp_serial + ")")
def get_usrp_id(self):
return self.usrp_id
def set_usrp_id(self, usrp_id):
self.usrp_id = usrp_id
self.set_usrp_text(self.usrp_id + " (" + self.usrp_serial + ")")
def get_db_spec(self):
return self.db_spec
def set_db_spec(self, db_spec):
self.db_spec = db_spec
self.set_db_text(self.db_name + " (" + self.db_serial + " ," + self.db_spec + " ," + self.db_antenna + ")")
def get_db_serial(self):
return self.db_serial
def set_db_serial(self, db_serial):
self.db_serial = db_serial
self.set_db_text(self.db_name + " (" + self.db_serial + " ," + self.db_spec + " ," + self.db_antenna + ")")
def get_db_name(self):
return self.db_name
def set_db_name(self, db_name):
self.db_name = db_name
self.set_db_text(self.db_name + " (" + self.db_serial + " ," + self.db_spec + " ," + self.db_antenna + ")")
def get_db_antenna(self):
return self.db_antenna
def set_db_antenna(self, db_antenna):
self.db_antenna = db_antenna
self.set_db_text(self.db_name + " (" + self.db_serial + " ," + self.db_spec + " ," + self.db_antenna + ")")
def get_catch_result(self):
return self.catch_result
def set_catch_result(self, catch_result):
self.catch_result = catch_result
self.set_actual_rf(self.catch_result.actual_rf_freq)
self.set_actual_dsp(self.catch_result.actual_dsp_freq)
def get_usrp_type(self):
return self.usrp_type
def set_usrp_type(self, usrp_type):
self.usrp_type = usrp_type
self.set_dev_args("type=" + self.usrp_type + ",master_clock_rate=" + str(self.master_clock_rate) + ", recv_buff_size=32768e6")
def get_usrp_text(self):
return self.usrp_text
def set_usrp_text(self, usrp_text):
self.usrp_text = usrp_text
self.set_a_usrp(self.usrp_text)
def get_master_clock_rate(self):
return self.master_clock_rate
def set_master_clock_rate(self, master_clock_rate):
self.master_clock_rate = master_clock_rate
self.set_dev_args("type=" + self.usrp_type + ",master_clock_rate=" + str(self.master_clock_rate) + ", recv_buff_size=32768e6")
def get_db_text(self):
return self.db_text
def set_db_text(self, db_text):
self.db_text = db_text
self.set_a_db_label(self.db_text)
def get_actual_rf(self):
return self.actual_rf
def set_actual_rf(self, actual_rf):
self.actual_rf = actual_rf
self.set_rf_label(self.actual_rf)
def get_actual_dsp(self):
return self.actual_dsp
def set_actual_dsp(self, actual_dsp):
self.actual_dsp = actual_dsp
self.set_dsp_label(self.actual_dsp)
def get_uhd_version(self):
return self.uhd_version
def set_uhd_version(self, uhd_version):
self.uhd_version = uhd_version
self._uhd_version_label.setText(str(self.uhd_version))
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self._samp_rate_line_edit.setText(eng_notation.num_to_str(self.samp_rate))
self.qtgui_sink_x_0.set_frequency_range(self.center_freq, self.samp_rate)<|fim▁hole|> return self.rf_label
def set_rf_label(self, rf_label):
self.rf_label = rf_label
self._rf_label_label.setText(eng_notation.num_to_str(self.rf_label))
def get_myzero(self):
return self.myzero
def set_myzero(self, myzero):
self.myzero = myzero
self.set_catch_result(self.usrp_dev.set_center_freq(self.center_freq, self.myzero))
def get_gain(self):
return self.gain
def set_gain(self, gain):
self.gain = gain
self._gain_counter.setValue(self.gain)
self._gain_slider.setValue(self.gain)
self.usrp_dev.set_gain(self.gain, 0)
def get_dsp_label(self):
return self.dsp_label
def set_dsp_label(self, dsp_label):
self.dsp_label = dsp_label
self._dsp_label_label.setText(eng_notation.num_to_str(self.dsp_label))
def get_dev_args(self):
return self.dev_args
def set_dev_args(self, dev_args):
self.dev_args = dev_args
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.set_catch_result(self.usrp_dev.set_center_freq(self.center_freq, self.myzero))
self._center_freq_line_edit.setText(eng_notation.num_to_str(self.center_freq))
self.qtgui_sink_x_0.set_frequency_range(self.center_freq, self.samp_rate)
self.usrp_dev.set_center_freq(self.center_freq, 0)
def get_a_usrp(self):
return self.a_usrp
def set_a_usrp(self, a_usrp):
self.a_usrp = a_usrp
self._a_usrp_label.setText(repr(self.a_usrp))
def get_a_db_label(self):
return self.a_db_label
def set_a_db_label(self, a_db_label):
self.a_db_label = a_db_label
self._a_db_label_label.setText(str(self.a_db_label))
if __name__ == '__main__':
import ctypes
import os
if os.name == 'posix':
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
parser = OptionParser(option_class=eng_option, usage="%prog: [options]")
(options, args) = parser.parse_args()
qapp = Qt.QApplication(sys.argv)
tb = uhd_fft_qt()
tb.start()
tb.show()
def quitting():
tb.stop()
tb.wait()
qapp.connect(qapp, Qt.SIGNAL("aboutToQuit()"), quitting)
qapp.exec_()
tb = None #to clean up Qt widgets<|fim▁end|> | self.usrp_dev.set_samp_rate(self.samp_rate)
def get_rf_label(self): |
<|file_name|>sonnet_predict_bed.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2017 Calico LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import print_function
from optparse import OptionParser
import json
import os
import pdb
import pickle
import sys
import h5py
import numpy as np
import pandas as pd
import pysam
import pyBigWig
import tensorflow as tf
if tf.__version__[0] == '1':
tf.compat.v1.enable_eager_execution()
from basenji import bed
from basenji import dna_io
from basenji import seqnn
from basenji import stream
'''
basenji_predict_bed.py
Predict sequences from a BED file.
'''
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <model_file> <bed_file>'
parser = OptionParser(usage)
parser.add_option('-b', dest='bigwig_indexes',
default=None, help='Comma-separated list of target indexes to write BigWigs')
parser.add_option('-e', dest='embed_layer',
default=None, type='int',
help='Embed sequences using the specified layer index.')
parser.add_option('-f', dest='genome_fasta',
default=None,
help='Genome FASTA for sequences [Default: %default]')
parser.add_option('-g', dest='genome_file',
default=None,
help='Chromosome length information [Default: %default]')
parser.add_option('-l', dest='site_length',
default=None, type='int',
help='Prediction site length. [Default: model seq_length]')
parser.add_option('-o', dest='out_dir',
default='pred_out',
help='Output directory [Default: %default]')
# parser.add_option('--plots', dest='plots',
# default=False, action='store_true',
# help='Make heatmap plots [Default: %default]')
parser.add_option('-p', dest='processes',
default=None, type='int',
help='Number of processes, passed by multi script')
parser.add_option('--rc', dest='rc',
default=False, action='store_true',
help='Ensemble forward and reverse complement predictions [Default: %default]')
parser.add_option('-s', dest='sum',
default=False, action='store_true',
help='Sum site predictions [Default: %default]')
parser.add_option('--shifts', dest='shifts',
default='0',
help='Ensemble prediction shifts [Default: %default]')
parser.add_option('--species', dest='species',
default='human')
parser.add_option('-t', dest='targets_file',
default=None, type='str',
help='File specifying target indexes and labels in table format')
(options, args) = parser.parse_args()
if len(args) == 2:
model_file = args[0]
bed_file = args[1]
elif len(args) == 4:
# multi worker
options_pkl_file = args[0]
model_file = args[1]
bed_file = args[2]
worker_index = int(args[3])
# load options
options_pkl = open(options_pkl_file, 'rb')
options = pickle.load(options_pkl)
options_pkl.close()
# update output directory
options.out_dir = '%s/job%d' % (options.out_dir, worker_index)
else:
parser.error('Must provide parameter and model files and BED file')
if not os.path.isdir(options.out_dir):
os.mkdir(options.out_dir)
options.shifts = [int(shift) for shift in options.shifts.split(',')]
if options.bigwig_indexes is not None:
options.bigwig_indexes = [int(bi) for bi in options.bigwig_indexes.split(',')]
else:
options.bigwig_indexes = []
if len(options.bigwig_indexes) > 0:
bigwig_dir = '%s/bigwig' % options.out_dir
if not os.path.isdir(bigwig_dir):
os.mkdir(bigwig_dir)
#################################################################
# read parameters and collet target information
if options.targets_file is None:
target_slice = None
else:
targets_df = pd.read_table(options.targets_file, index_col=0)
target_slice = targets_df.index
#################################################################
# setup model
seqnn_model = tf.saved_model.load(model_file).model
# query num model targets
seq_length = seqnn_model.predict_on_batch.input_signature[0].shape[1]
null_1hot = np.zeros((1,seq_length,4))
null_preds = seqnn_model.predict_on_batch(null_1hot)
null_preds = null_preds[options.species].numpy()
_, preds_length, preds_depth = null_preds.shape
# hack sizes
preds_window = 128
seq_crop = (seq_length - preds_length*preds_window) // 2
#################################################################
# sequence dataset
if options.site_length is None:
options.site_length = preds_window*preds_length
print('site_length: %d' % options.site_length)
# construct model sequences
model_seqs_dna, model_seqs_coords = bed.make_bed_seqs(
bed_file, options.genome_fasta,
seq_length, stranded=False)
# construct site coordinates
site_seqs_coords = bed.read_bed_coords(bed_file, options.site_length)
# filter for worker SNPs
if options.processes is not None:
worker_bounds = np.linspace(0, len(model_seqs_dna), options.processes+1, dtype='int')
model_seqs_dna = model_seqs_dna[worker_bounds[worker_index]:worker_bounds[worker_index+1]]
model_seqs_coords = model_seqs_coords[worker_bounds[worker_index]:worker_bounds[worker_index+1]]
site_seqs_coords = site_seqs_coords[worker_bounds[worker_index]:worker_bounds[worker_index+1]]
num_seqs = len(model_seqs_dna)
#################################################################
# setup output
assert(preds_length % 2 == 0)
preds_mid = preds_length // 2
assert(options.site_length % preds_window == 0)
site_preds_length = options.site_length // preds_window
assert(site_preds_length % 2 == 0)
site_preds_start = preds_mid - site_preds_length//2
site_preds_end = site_preds_start + site_preds_length
# initialize HDF5
out_h5_file = '%s/predict.h5' % options.out_dir
if os.path.isfile(out_h5_file):
os.remove(out_h5_file)
out_h5 = h5py.File(out_h5_file, 'w')
# create predictions
if options.sum:
out_h5.create_dataset('preds', shape=(num_seqs, preds_depth), dtype='float16')
else:
out_h5.create_dataset('preds', shape=(num_seqs, site_preds_length, preds_depth), dtype='float16')
# store site coordinates
site_seqs_chr, site_seqs_start, site_seqs_end = zip(*site_seqs_coords)
site_seqs_chr = np.array(site_seqs_chr, dtype='S')
site_seqs_start = np.array(site_seqs_start)
site_seqs_end = np.array(site_seqs_end)
out_h5.create_dataset('chrom', data=site_seqs_chr)<|fim▁hole|> out_h5.create_dataset('end', data=site_seqs_end)
#################################################################
# predict scores, write output
# define sequence generator
def seqs_gen():
for seq_dna in model_seqs_dna:
yield dna_io.dna_1hot(seq_dna)
# initialize predictions stream
preds_stream = stream.PredStreamSonnet(seqnn_model, seqs_gen(),
rc=options.rc, shifts=options.shifts, species=options.species)
for si in range(num_seqs):
preds_seq = preds_stream[si]
# slice site
preds_site = preds_seq[site_preds_start:site_preds_end,:]
# write
if options.sum:
out_h5['preds'][si] = preds_site.sum(axis=0)
else:
out_h5['preds'][si] = preds_site
# write bigwig
for ti in options.bigwig_indexes:
bw_file = '%s/s%d_t%d.bw' % (bigwig_dir, si, ti)
bigwig_write(preds_seq[:,ti], model_seqs_coords[si], bw_file,
options.genome_file, seq_crop)
# close output HDF5
out_h5.close()
def bigwig_open(bw_file, genome_file):
""" Open the bigwig file for writing and write the header. """
bw_out = pyBigWig.open(bw_file, 'w')
chrom_sizes = []
for line in open(genome_file):
a = line.split()
chrom_sizes.append((a[0], int(a[1])))
bw_out.addHeader(chrom_sizes)
return bw_out
def bigwig_write(signal, seq_coords, bw_file, genome_file, seq_crop=0):
""" Write a signal track to a BigWig file over the region
specified by seqs_coords.
Args
signal: Sequences x Length signal array
seq_coords: (chr,start,end)
bw_file: BigWig filename
genome_file: Chromosome lengths file
seq_crop: Sequence length cropped from each side of the sequence.
"""
target_length = len(signal)
# open bigwig
bw_out = bigwig_open(bw_file, genome_file)
# initialize entry arrays
entry_starts = []
entry_ends = []
# set entries
chrm, start, end = seq_coords
preds_pool = (end - start - 2 * seq_crop) // target_length
bw_start = start + seq_crop
for li in range(target_length):
bw_end = bw_start + preds_pool
entry_starts.append(bw_start)
entry_ends.append(bw_end)
bw_start = bw_end
# add
bw_out.addEntries(
[chrm]*target_length,
entry_starts,
ends=entry_ends,
values=[float(s) for s in signal])
bw_out.close()
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()<|fim▁end|> | out_h5.create_dataset('start', data=site_seqs_start) |
<|file_name|>HarmonyApplication.java<|end_file_name|><|fim▁begin|>package org.mariotaku.harmony.app;
import android.app.Application;
import android.content.Context;
import com.nostra13.universalimageloader.cache.disc.impl.UnlimitedDiscCache;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.ImageLoaderConfiguration;
import com.nostra13.universalimageloader.core.download.BaseImageDownloader;
import com.nostra13.universalimageloader.core.download.HttpClientImageDownloader;
import java.io.File;
import org.mariotaku.harmony.Constants;
import org.mariotaku.harmony.util.ImageLoaderWrapper;
import org.mariotaku.harmony.util.ImageMemoryCache;
import org.mariotaku.harmony.util.URLFileNameGenerator;
<|fim▁hole|> private ImageLoader mImageLoader;
public ImageLoader getImageLoader() {
if (mImageLoader != null) return mImageLoader;
final File cache_dir = new File(getCacheDir(), CACHE_DIR_NAME_ALBUMART);
if (!cache_dir.exists()) {
cache_dir.mkdirs();
}
final ImageLoader loader = ImageLoader.getInstance();
final ImageLoaderConfiguration.Builder cb = new ImageLoaderConfiguration.Builder(this);
cb.threadPoolSize(8);
cb.memoryCache(new ImageMemoryCache(40));
cb.discCache(new UnlimitedDiscCache(cache_dir, new URLFileNameGenerator()));
cb.imageDownloader(new BaseImageDownloader(this));
loader.init(cb.build());
return mImageLoader = loader;
}
public ImageLoaderWrapper getImageLoaderWrapper() {
if (mImageLoaderWrapper != null) return mImageLoaderWrapper;
return mImageLoaderWrapper = new ImageLoaderWrapper(getImageLoader());
}
public static HarmonyApplication getInstance(final Context context) {
final Context app = context != null ? context.getApplicationContext() : null;
return app instanceof HarmonyApplication ? (HarmonyApplication) app : null;
}
}<|fim▁end|> | public class HarmonyApplication extends Application implements Constants {
private ImageLoaderWrapper mImageLoaderWrapper; |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'
const path = require('path')
const Generator = require('yeoman-generator')
const chalk = require('chalk')
const _ = require('lodash')
_.templateSettings.interpolate = /<%=([\s\S]+?)%>/g<|fim▁hole|> this.props = {}
},
paths: function () {
this.sourceRoot(path.normalize(path.join(__dirname, '/../../templates')))
},
writing: function () {
const props = this.config.getAll()
const newVersion = require('../../package.json').version
if (!this.fs.exists(this.destinationPath('.yo-rc.json'))) {
this.log(chalk.red('Refusing to update, a .yo-rc.json file is required.'))
return
}
const cpTpl = (from, to) => {
this.fs.copyTpl(
this.templatePath(from),
this.destinationPath(to),
props
)
}
const cp = (from, to) => {
this.fs.copy(
this.templatePath(from),
this.destinationPath(to)
)
}
const rm = (p) => {
this.fs.delete(this.destinationPath(p))
}
const pkgTpl = _.template(
this.fs.read(this.templatePath('_package.json'))
)
const pkg = JSON.parse(pkgTpl(props))
// No longer using eslint
pkg.dependencies['eslint'] = undefined
pkg.dependencies['eslint-config-ivantage'] = undefined
pkg.dependencies['eslint-loader'] = undefined
pkg.devDependencies['eslint'] = undefined
pkg.devDependencies['eslint-config-ivantage'] = undefined
pkg.devDependencies['eslint-loader'] = undefined
// React update 15.4 --> 15.5
pkg.devDependencies['react-addons-shallow-compare'] = undefined
pkg.devDependencies['react-addons-test-utils'] = undefined
pkg.devDependencies['prop-types'] = undefined
// Removed postcss plugins
pkg.devDependencies['postcss-custom-properties'] = undefined
// @todo - extendJSON will merge properties, for some things
// (devDependencies) we probably just want to set them so as to not carry
// forward cruft we don't need anymore.
this.fs.extendJSON(this.destinationPath('package.json'), _.pick(pkg, [
'name',
'main',
'description',
'scripts',
'license',
'jest',
'peerDependencies',
'devDependencies'
]))
cpTpl('webpack.config.js', 'webpack.config.js')
if (props.useDotFiles) {
cp('_editorconfig', '.editorconfig')
cp('_gitignore', '.gitignore')
cp('_babelrc', '.babelrc')
} else {
[
'.editorconfig',
'.gitignore',
'.babelrc'
].forEach(rm)
}
// Standard over eslint!
rm('.eslintrc.js')
// No longer using explicit mock files
rm('src/mocks')
this.config.set('generatorVersion', newVersion)
},
end: function () {
const msg = chalk.green('Done.')
this.log(msg)
}
})<|fim▁end|> |
module.exports = Generator.extend({
initializing: function () { |
<|file_name|>CommonProxy.java<|end_file_name|><|fim▁begin|>package com.technode.terrafirmastuff.core.proxy;
import com.bioxx.tfc.api.Tools.ChiselManager;
import com.technode.terrafirmastuff.handler.ServerTickHandler;
import com.technode.terrafirmastuff.tileentity.TEOilLampMod;
import com.technode.terrafirmastuff.tools.ChiselMode_Chiseled;
import com.technode.terrafirmastuff.tools.ChiselMode_Circle;<|fim▁hole|>import cpw.mods.fml.common.event.FMLInterModComms;
import cpw.mods.fml.common.registry.GameRegistry;
public abstract class CommonProxy
{
public void registerChiselModes()
{
ChiselManager.getInstance().addChiselMode(new ChiselMode_Paver("Paver"));
ChiselManager.getInstance().addChiselMode(new ChiselMode_Circle("Circle"));
ChiselManager.getInstance().addChiselMode(new ChiselMode_Chiseled("Chiseled"));
ChiselManager.getInstance().addChiselMode(new ChiselMode_Pillar("Pillar"));
}
public void hideNEIItems() {}
public void registerRenderInformation()
{
// NOOP on server
}
public void registerWailaClasses()
{
FMLInterModComms.sendMessage("Waila", "register", "com.technode.terrafirmastuff.core.compat.WAILADataMod.onCallbackRegister");// Block
}
public void registerTileEntities(boolean b)
{
GameRegistry.registerTileEntity(TEOilLampMod.class, "Oil Lamp Mod");
}
public void registerTickHandler()
{
FMLCommonHandler.instance().bus().register(new ServerTickHandler());
}
}<|fim▁end|> | import com.technode.terrafirmastuff.tools.ChiselMode_Paver;
import com.technode.terrafirmastuff.tools.ChiselMode_Pillar;
import cpw.mods.fml.common.FMLCommonHandler; |
<|file_name|>test_calc.py<|end_file_name|><|fim▁begin|>import pytest
from calc import INTEGER, EOF, PLUS, Calc, CalcError
def test_calc_raises_error_on_invalid_tokens():
"""
Test that invalid tokens cause a ``CalcError`` and that the exception stack
trace contains useful information.
"""
input_text = "lumberjack" # Now with 100% more Monty Python references.
calc = Calc(text=input_text)
with pytest.raises(CalcError) as err:
calc.parse()
assert "Invalid token at position 0" in str(err.value)
def test_calc_raises_error_on_unexepected_syntax():
"""
Test that unexpected syntax causes a ``CalcError`` and that the exception
stack trace contains useful information.
"""
input_text = "+"
calc = Calc(text=input_text)
with pytest.raises(CalcError) as err:
calc.parse()
assert "Expected INTEGER at position 1, found PLUS" in str(err.value)
def test_calc_finds_eof_token_at_end_of_line():
"""
Test that, upon finding an end of line, a :class:`Calc` correctly tokenizes
an EOF :class:`Token`.
"""
input_text = ""
calc = Calc(text=input_text)
assert calc._next_token().type == EOF
<|fim▁hole|> :class:`Calc` will correctly tokenize an EOF :class:`Token`.
"""
input_text = "1"
calc = Calc(text=input_text)
token = calc._next_token()
assert token.type == INTEGER
assert token.value == 1
assert calc._next_token().type == EOF
def test_calc_can_consume_valid_token():
"""Test that a :class:`Calc` can consume a valid :class:`Token`."""
input_text = "1+1"
calc = Calc(text=input_text)
# Note: Since _next_token advances position one cannot simply
# >>> calc.current_token = Token(INTEGER, 1)
# The _next_token method MUST be called or this test will fail.
calc.current_token = calc._next_token()
calc._consume_token(INTEGER)
assert calc.current_token.type == PLUS
def test_parse_supports_addition():
"""Test that a :class:`Calc` can correctly parse the addition operation."""
# Note: This function name was briefly duplicated and therefore didn't run.
input_text = "1+1"
calc = Calc(text=input_text)
assert calc.parse() == 2
def test_parse_sets_eof():
"""
Test that successfully parsing an arithmetic expression sets the
``current_token`` attribute of a :class:`Calc` to EOF.
"""
input_text = "1+1"
calc = Calc(text=input_text)
calc.parse()
assert calc.current_token.type == EOF
def test_parse_raises_error_on_invalid_expression():
"""
Test that attempting to parse an invalid expression allows a ``CalcError``
to propagate correctly.
"""
input_text = "+1"
calc = Calc(text=input_text)
with pytest.raises(CalcError):
calc.parse()<|fim▁end|> | def test_calc_finds_eof_token_after_int():
"""
Test that after consuming a solitary an INTEGER :class:`Token` a |
<|file_name|>azure_rm_managed_disk.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright (c) 2017 Bruno Medina Bolanos Cacho <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_managed_disk
version_added: "2.4"
short_description: Manage Azure Manage Disks
description:
- Create, update and delete an Azure Managed Disk
options:
resource_group:
description:
- Name of a resource group where the managed disk exists or will be created.
required: true
name:
description:
- Name of the managed disk.
required: true
state:
description:
- Assert the state of the managed disk. Use C(present) to create or update a managed disk and C(absent) to delete a managed disk.
default: present
choices:
- absent
- present
location:
description:
- Valid Azure location. Defaults to location of the resource group.
storage_account_type:
description:
- "Type of storage for the managed disk: C(Standard_LRS) or C(Premium_LRS). If not specified the disk is created C(Standard_LRS)."
choices:
- Standard_LRS
- Premium_LRS
create_option:
description:
- "Allowed values: empty, import, copy.
- C(import) from a VHD file in I(source_uri) and C(copy) from previous managed disk I(source_uri)."
choices:
- empty
- import
- copy
source_uri:
description:
- URI to a valid VHD file to be used or the resource ID of the managed disk to copy.
aliases:
- source_resource_uri
os_type:
description:
- "Type of Operating System: C(linux) or C(windows)."
- "Used when I(create_option) is either C(copy) or C(import) and the source is an OS disk."
- "If omitted during creation, no value is set."
- "If omitted during an update, no change is made."
- "Once set, this value cannot be cleared."
choices:
- linux
- windows
disk_size_gb:
description:
- "Size in GB of the managed disk to be created."
- "If I(create_option) is C(copy) then the value must be greater than or equal to the source's size."
managed_by:
description:
- Name of an existing virtual machine with which the disk is or will be associated, this VM should be in the same resource group.
- To detach a disk from a vm, explicitly set to ''.
- If this option is unset, the value will not be changed.
version_added: 2.5
tags:
description:
- Tags to assign to the managed disk.
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Bruno Medina (@brusMX)"
'''
EXAMPLES = '''
- name: Create managed disk
azure_rm_managed_disk:
name: mymanageddisk
location: eastus
resource_group: myResourceGroup
disk_size_gb: 4
- name: Create managed operating system disk from page blob
azure_rm_managed_disk:
name: mymanageddisk
location: eastus2
resource_group: myResourceGroup
create_option: import
source_uri: https://storageaccountname.blob.core.windows.net/containername/blob-name.vhd
os_type: windows
storage_account_type: Premium_LRS
- name: Mount the managed disk to VM
azure_rm_managed_disk:
name: mymanageddisk
location: eastus
resource_group: myResourceGroup
disk_size_gb: 4
managed_by: testvm001
- name: Unmount the managed disk to VM
azure_rm_managed_disk:
name: mymanageddisk
location: eastus
resource_group: myResourceGroup
disk_size_gb: 4
- name: Delete managed disk
azure_rm_manage_disk:
name: mymanageddisk
location: eastus
resource_group: myResourceGroup
state: absent
'''
RETURN = '''
id:
description: The managed disk resource ID.
returned: always
type: dict
state:
description: Current state of the managed disk
returned: always
type: dict
changed:
description: Whether or not the resource has changed
returned: always
type: bool
'''
import re
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.tools import parse_resource_id
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
# duplicated in azure_rm_managed_disk_facts
def managed_disk_to_dict(managed_disk):
create_data = managed_disk.creation_data
return dict(
id=managed_disk.id,
name=managed_disk.name,
location=managed_disk.location,
tags=managed_disk.tags,
create_option=create_data.create_option.lower(),
source_uri=create_data.source_uri or create_data.source_resource_id,
disk_size_gb=managed_disk.disk_size_gb,
os_type=managed_disk.os_type.lower() if managed_disk.os_type else None,
storage_account_type=managed_disk.sku.name if managed_disk.sku else None,
managed_by=managed_disk.managed_by
)
class AzureRMManagedDisk(AzureRMModuleBase):
"""Configuration class for an Azure RM Managed Disk resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
location=dict(
type='str'
),
storage_account_type=dict(
type='str',
choices=['Standard_LRS', 'Premium_LRS']
),
create_option=dict(
type='str',
choices=['empty', 'import', 'copy']
),
source_uri=dict(
type='str',
aliases=['source_resource_uri']
),
os_type=dict(
type='str',
choices=['linux', 'windows']
),
disk_size_gb=dict(
type='int'
),
managed_by=dict(
type='str'
)
)
required_if = [
('create_option', 'import', ['source_uri']),
('create_option', 'copy', ['source_uri']),
('create_option', 'empty', ['disk_size_gb'])
]
self.results = dict(
changed=False,
state=dict())
self.resource_group = None
self.name = None
self.location = None
self.storage_account_type = None
self.create_option = None
self.source_uri = None
self.os_type = None
self.disk_size_gb = None
self.tags = None
self.managed_by = None
super(AzureRMManagedDisk, self).__init__(
derived_arg_spec=self.module_arg_spec,
required_if=required_if,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
result = None
changed = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
disk_instance = self.get_managed_disk()
result = disk_instance
# need create or update
if self.state == 'present':
parameter = self.generate_managed_disk_property()
if not disk_instance or self.is_different(disk_instance, parameter):
changed = True
if not self.check_mode:
result = self.create_or_update_managed_disk(parameter)
else:
result = True
# unmount from the old virtual machine and mount to the new virtual machine
if self.managed_by or self.managed_by == '':
vm_name = parse_resource_id(disk_instance.get('managed_by', '')).get('name') if disk_instance else None
vm_name = vm_name or ''
if self.managed_by != vm_name:
changed = True
if not self.check_mode:
if vm_name:
self.detach(vm_name, result)
if self.managed_by:
self.attach(self.managed_by, result)
result = self.get_managed_disk()
if self.state == 'absent' and disk_instance:
changed = True
if not self.check_mode:
self.delete_managed_disk()
result = True
self.results['changed'] = changed
self.results['state'] = result
return self.results
def attach(self, vm_name, disk):
vm = self._get_vm(vm_name)
# find the lun
luns = ([d.lun for d in vm.storage_profile.data_disks]
if vm.storage_profile.data_disks else [])
lun = max(luns) + 1 if luns else 0
# prepare the data disk
params = self.compute_models.ManagedDiskParameters(id=disk.get('id'), storage_account_type=disk.get('storage_account_type'))
data_disk = self.compute_models.DataDisk(lun=lun, create_option=self.compute_models.DiskCreateOptionTypes.attach, managed_disk=params)
vm.storage_profile.data_disks.append(data_disk)
self._update_vm(vm_name, vm)
def detach(self, vm_name, disk):
vm = self._get_vm(vm_name)
leftovers = [d for d in vm.storage_profile.data_disks if d.name.lower() != disk.get('name').lower()]
if len(vm.storage_profile.data_disks) == len(leftovers):
self.fail("No disk with the name '{0}' was found".format(disk.get('name')))
vm.storage_profile.data_disks = leftovers
self._update_vm(vm_name, vm)
def _update_vm(self, name, params):
try:
poller = self.compute_client.virtual_machines.create_or_update(self.resource_group, name, params)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error updating virtual machine {0} - {1}".format(name, str(exc)))
<|fim▁hole|> def _get_vm(self, name):
try:
return self.compute_client.virtual_machines.get(self.resource_group, name, expand='instanceview')
except Exception as exc:
self.fail("Error getting virtual machine {0} - {1}".format(name, str(exc)))
def generate_managed_disk_property(self):
# TODO: Add support for EncryptionSettings, DiskIOPSReadWrite, DiskMBpsReadWrite, Zones
disk_params = {}
creation_data = {}
disk_params['location'] = self.location
disk_params['tags'] = self.tags
if self.storage_account_type:
storage_account_type = self.compute_models.DiskSku(name=self.storage_account_type)
disk_params['sku'] = storage_account_type
disk_params['disk_size_gb'] = self.disk_size_gb
creation_data['create_option'] = self.compute_models.DiskCreateOption.empty
if self.create_option == 'import':
creation_data['create_option'] = self.compute_models.DiskCreateOption.import_enum
creation_data['source_uri'] = self.source_uri
elif self.create_option == 'copy':
creation_data['create_option'] = self.compute_models.DiskCreateOption.copy
creation_data['source_resource_id'] = self.source_uri
if self.os_type:
typecon = {
'linux': self.compute_models.OperatingSystemTypes.linux,
'windows': self.compute_models.OperatingSystemTypes.windows
}
disk_params['os_type'] = typecon[self.os_type]
else:
disk_params['os_type'] = None
disk_params['creation_data'] = creation_data
return disk_params
def create_or_update_managed_disk(self, parameter):
try:
poller = self.compute_client.disks.create_or_update(
self.resource_group,
self.name,
parameter)
aux = self.get_poller_result(poller)
return managed_disk_to_dict(aux)
except CloudError as e:
self.fail("Error creating the managed disk: {0}".format(str(e)))
# This method accounts for the difference in structure between the
# Azure retrieved disk and the parameters for the new disk to be created.
def is_different(self, found_disk, new_disk):
resp = False
if new_disk.get('disk_size_gb'):
if not found_disk['disk_size_gb'] == new_disk['disk_size_gb']:
resp = True
if new_disk.get('os_type'):
if not found_disk['os_type'] == new_disk['os_type']:
resp = True
if new_disk.get('sku'):
if not found_disk['storage_account_type'] == new_disk['sku'].name:
resp = True
# Check how to implement tags
if new_disk.get('tags') is not None:
if not found_disk['tags'] == new_disk['tags']:
resp = True
return resp
def delete_managed_disk(self):
try:
poller = self.compute_client.disks.delete(
self.resource_group,
self.name)
return self.get_poller_result(poller)
except CloudError as e:
self.fail("Error deleting the managed disk: {0}".format(str(e)))
def get_managed_disk(self):
try:
resp = self.compute_client.disks.get(
self.resource_group,
self.name)
return managed_disk_to_dict(resp)
except CloudError as e:
self.log('Did not find managed disk')
def main():
"""Main execution"""
AzureRMManagedDisk()
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>generator_matrix.py<|end_file_name|><|fim▁begin|>import random
import time
import sys
import Csound
import subprocess
import base64
import hashlib
import matrixmusic
csd = None
oscillator = None
buzzer = None
voice = None
truevoice = None
song_publisher = None
def add_motif(instrument, req):
global csd
time = req.motif_start_time
for note in req.score:
if note != "P":
csd.score(instrument.note(time,
req.note_duration,
note,
req.motif_amplitude))
time += req.internote_delay
def handle_create_song(req):
global csd, oscillator, buzzer, voice
global song_publisher
s = 'temp'
csd = Csound.CSD('%s.csd' % s)
csd.orchestra(oscillator, buzzer, voice)
for motif in req.motifs:
if motif.instrument == 'oscil':
add_motif(oscillator, motif)
elif motif.instrument == 'buzzer':
add_motif(buzzer, motif)<|fim▁hole|> args = ['csound', '-d', '%s.csd' % s]
subprocess.call(args)
f = open('%s.csd' % s)
csd_string = f.read()
f.close()
song_name = '%s.ogg' % req.song_name
args = ['oggenc', '-o', song_name, '%s.wav' % s]
subprocess.call(args)
args = ['vorbiscomment', '-a', song_name,
'-t', "ARTIST=%s" % req.artist,
'-t', "TITLE=%s" % req.song_name,
'-t', "ALBUM=%s" % req.album,
'-t', "GENRE=%s" % 'Electronica',
'-t', "CSOUND=%s" % csd_string]
subprocess.call(args)
args = ['ogg123', song_name]
subprocess.call(args)
class Motif(object):
def __init__(self, motif_start_time, motif_repeat, motif_amplitude, score, note_duration, internote_delay, instrument):
self.motif_start_time = motif_start_time
self.motif_repeat = motif_repeat
self.motif_amplitude = motif_amplitude
self.score = score
self.note_duration = note_duration
self.internote_delay = internote_delay
self.instrument = instrument
class Request(object):
def __init__(self, song_name, artist, album, motifs):
self.song_name = song_name
self.artist = artist
self.album = album
self.motifs = motifs
def heads():
return (random.random() < 0.5)
def biasedFlip(p):
return (random.random() < p)
def selectInstrument():
if heads():
return 'oscil'
else:
return 'buzzer'
def selectInterval():
return 0.15, 0.05
def triggerCreate(song_name, artist, album, motifs):
handle_create_song(Request(song_name, artist, album, motifs))
def random_note():
bases = ["A", "B", "C", "D", "E", "F", "G"]
unsharpable = ["E", "B"]
unflatable = ["C", "F"]
octaves = map(str, range(2,6))
mods = ["", "#"]
base = random.choice(bases)
mods = [""]
if not base in unsharpable:
mods.append("#")
mod = random.choice(mods)
octave = random.choice(octaves)
return base + mod + octave
def random_motif(start_time):
#notes = " ".join([random_note() for i in range(10)])
#notes = "A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5 A5 B5 D6 E6 F#6 P".split(" ")
notes = "C3 C#3 E3 F3 G3 G#3 B4 C4 C#4 E4 F4 G4 G#4".split(" ")
score = matrixmusic.create_pair_score(notes, 15) * 5
print("Random score: " + str(score))
opts = [("voice", 1.0, 1.5),
#("oscil", 1.0, 1.5),
("voice", 3.0, 1.5)]
#("oscil", 3.0, 1.5)]
opt = random.choice(opts)
return Motif(start_time, 12, 0.05, score, opt[1], opt[2], opt[0])
if __name__ == "__main__":
if len(sys.argv) < 3:
print "Usage: %s <artist> <album name>" % sys.argv[0]
exit()
else:
artist = sys.argv[1]
album = sys.argv[2]
global song_publisher, oscillator, buzzer, voice
oscillator = Csound.oscil()
buzzer = Csound.buzz()
voice = Csound.fmvoice()
#voice = Csound.voice()
for i in xrange(1, 16384):
song_title = "song_%d" % i
#motifs = [ Motif(0.0, 12, 0.32, "A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5 A5 B5 D6 E6 F#6", 0.15, 0.05, selectInstrument()) ]
motifs = [random_motif(i*0.8) for i in range(3)]
# if biasedFlip(0.8):
# motifs.append(Motif(3.0, 10, 0.32, "A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5 A5 B5 D6 E6 F#6", a, b, selectInstrument()))
# if biasedFlip(0.9):
# motifs.append(Motif(6.0, 4, 0.10, "A2 B2 D3 D3 F#3 A3 B3 D4 E4 F#4 A4 B4 D5 E5 F#5", 0.3, 0.1, selectInstrument()))
triggerCreate(song_title, artist, album, motifs)
print "Created song %s" % song_title
time.sleep(10)<|fim▁end|> | elif motif.instrument == 'voice':
add_motif(voice, motif)
csd.output() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.