file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
core.js | define(["jquery", "underscore", "backbone"], function ($,_,Backbone) {
/* *****************************************************************************************************************
Prototype Inheritance
**************************************************************************************************************** */
$.curCSS = $.css; // back-port jquery 1.8+
/* *****************************************************************************************************************
**************************************************************************************************************** */
return {
DEBUG: false,
idAttribute: "id", labelAttribute: "label", typeAttribute: "type", commentAttribute: "comment",
fact: {
},
ux: {
i18n: {},
types: {},
mixin: {},
view: { field: {} }
},
iq: {
},
NS: {
"owl": "http://www.w3.org/2002/07/owl#",
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
"xsd": "http://www.w3.org/2001/XMLSchema#",
"ux": "meta4:ux:",
"iq": "meta4:iq:",
"fact": "meta4:fact:",
"asq": "meta4:asq:",
},
/**
curried fn()
when invoked returns a fn() that in turns calls a named fn() on a context object
e.g: dispatch(source, "doSomething")(a,b,c) -> source.doSomething(a,b,c)
**/
dispatch: function(self, event) {
return function() {
return self[event] && self[event].apply(self, arguments)
}
},
resolve: function(options, modelled) {
if (!options) throw "meta4:ux:oops:missing-options"
var _DEBUG = options.debug || this.ux.DEBUG
modelled = modelled || _.extend({},options);
// Resolve Backbone Model - last resort, use 'options'
if (_.isString(options.model)) {
modelled.model = this.fact.models.get(options.model);
//_DEBUG &&
console.warn("Model$ (%s) %o %o -> %o", options.model, this.fact.models, options, modelled);
} else if ( options.model instanceof Backbone.Model ) {
modelled.model = options.model;
} else if (_.isFunction(options.model)) {
modelled.model = options.model(options);
} else if (_.isObject(options.model)) {
modelled.model = new this.fact.Model( options.model );
} else if ( options.model === false ) {
// modelled.model = new Backbone.Model()
_DEBUG && console.debug("No Model: %o %o", options, modelled)
} else if ( options.model === true || options.model == undefined) {
var _options = { label: options.label, comment: (options.comment || ""), icon: (options.icon || "") };
_options.idAttribute = options[this.ux.idAttribute]
modelled.model = new this.fact.Model({})
modelled.model.set(_options)
_DEBUG && console.debug("View Model (%s): %o %o", modelled.id, _options, modelled.model)
} else throw "meta4:ux:oops:invalid-model#"+options.model
// Resolve Backbone Collection
if (_.isString(options.collection)) {
// recursively re-model ... check if a sub-model first
var _collection = false
// nested-collection
if (options.collection.indexOf(".")==0) {
var cid = options.collection.substring(1)
_collection = modelled.model.get(cid)
if (!_collection) {
_collection = new Backbone.Collection()
_DEBUG && console.log("New Local Collection (%s): %o %o %o", cid, options, modelled, _collection)
modelled.model.set(cid, _collection)
} else {
_DEBUG && console.log("Existing Local Collection (%s): %o %o %o", cid, options, modelled, _collection)
}
} else if (options.collection.indexOf("!")==0) {
// global-collection
var cid = options.collection.substring(1)
_collection = fact.models.get(cid)
_DEBUG && console.log("Global Collection (%s): %o %o %o", cid, options, modelled, _collection)
} else {
var cid = options.collection
_collection = modelled.model.get(cid) || this.fact.models.get(cid)
_DEBUG && console.log("Local/Global Collection (%s): %o %o %o", cid, options, modelled, _collection)
}
if (!_collection) {
_collection = this.fact.factory.Local({ id: options.collection, fetch: false })
_DEBUG && console.log("Local Collection: %o %o %o %o", options.collection, options, modelled, _collection)
}
// resolve any string models
this.ux.model( { model: modelled.model, collection: _collection }, modelled);
_DEBUG && console.log("String Modelled: %o", modelled)
} else if (_.isArray(options.collection)) {
_DEBUG && console.log("Array Collection", options.collection, this.fact)
modelled.collection = this.fact.Collection(options.collection);
} else if (_.isObject(options.collection) && options.collection instanceof Backbone.Collection ) {
_DEBUG && console.log("Existing Collection: %o", options.collection)
modelled.collection = options.collection;
} else if (_.isObject(options.collection) && _.isString(options.collection.id) ) {
//_DEBUG &&
console.log("Register Collection: %s -> %o / %o", options.collection.id, options.collection, this.fact)
modelled.collection = this.fact.models.get(options.collection.id) || this.fact.register(options.collection)
} else if (_.isFunction(options.collection)) {
_DEBUG && console.log("Function Collection", options.collection, this.fact)
modelled.collection = options.collection(options);
}
// cloned originally options - with resolved Model, optionally a Collection
return modelled;
},
/**
Uses a curried fn() to replace key/values in options{}
if a matching option key exists within mix_ins{}
if the mixin is fn() then execute & bind the returned value
**/
curry: function(options, mix_ins, _options) {
if (!options || !mix_ins) return options;
_options = _options || {} // cloned object
_.each(options, function(option,key) {
var mixin = mix_ins[key]
_options[key] = _.isFunction(mixin)?mixin(option):option
})
return _options;
},
/**
Rename/Replace the keys in an key/value Object using a re-mapping object
@param: options - Object of key/values
@param: remap - Object of key1/key2
**/
remap: function(options, remap) {
if (!options || !remap) return options;
var map = {}
_.each(remap, function(v,k) {
var n = options[k]
if (_.isFunction(v)) map[v] = v(n, k, map)
else if ( !_.isUndefined(n) && !_.isUndefined(v) ) map[v] = n
})
return map;
},
/**
De-reference string-based 'values' to a mix-in fn()
Replaces string values in options with corresponding fn() from mixin
**/
mixin: function(options, mix_ins, _options) {
if (!options || !mix_ins) return options;
_options = _options || options // default original
_.each(options, function(value,key) {
if (_.isString(value)) {
var mixin = mix_ins[value]
if (mixin && _.isFunction(mixin)) {
options[key] = _.isFunction(mixin)?mixin:value
}
}
})
return _options;
},
isDefaultTrue: function(options, key) {
if (_.isUndefined(options)) return true;
return options[key]?true:false
},
/**
Utilities to deal with Strings (including special cases for 'id' strings)
**/
/**
Generate a reasonably unique UUID
**/
uuid: function() {
function | () { return (((1+Math.random())*0x10000)|0).toString(16).substring(1); };
return (_id()+"-"+_id()+"-"+_id()+"-"+_id()+"-"+_id()+"-"+_id()+"-"+_id()+"-"+_id());
},
/**
Generate a scoped UUID by pre-pending a prefix
**/
urn: function(prefix) {
return (prefix || core[idAttribute])+"#"+this.uuid();
},
/**
Turn camel-cased strings into a capitalised, space-separated string
**/
humanize: function(s) {
return s.replace(/\W+|_|-/g, " ").toLowerCase().replace(/(^[a-z]| [a-z]|-[a-z])/g, function($1) { return $1.toUpperCase() });
},
toQueryString(obj, prefix) {
serialize = function(obj, prefix) {
var str = [];
for(var p in obj) {
if (obj.hasOwnProperty(p)) {
var k = prefix ? prefix + "[" + p + "]" : p, v = obj[p];
str.push(typeof v == "object" ?
serialize(v, k) :
encodeURIComponent(k) + "=" + encodeURIComponent(v));
}
}
return str.join("&");
}
return serialize(obj, prefix)
}
}
}); | _id | identifier_name |
vfs.py | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""A simple utility for constructing filesystem-like trees from beets
libraries.
"""
from __future__ import division, absolute_import, print_function
from collections import namedtuple
from beets import util
Node = namedtuple('Node', ['files', 'dirs'])
def _insert(node, path, itemid):
"""Insert an item into a virtual filesystem node."""
if len(path) == 1:
# Last component. Insert file.
node.files[path[0]] = itemid
else:
# In a directory.
dirname = path[0]
rest = path[1:]
if dirname not in node.dirs:
node.dirs[dirname] = Node({}, {})
_insert(node.dirs[dirname], rest, itemid)
| def libtree(lib):
"""Generates a filesystem-like directory tree for the files
contained in `lib`. Filesystem nodes are (files, dirs) named
tuples in which both components are dictionaries. The first
maps filenames to Item ids. The second maps directory names to
child node tuples.
"""
root = Node({}, {})
for item in lib.items():
dest = item.destination(fragment=True)
parts = util.components(dest)
_insert(root, parts, item.id)
return root | random_line_split |
|
vfs.py | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""A simple utility for constructing filesystem-like trees from beets
libraries.
"""
from __future__ import division, absolute_import, print_function
from collections import namedtuple
from beets import util
Node = namedtuple('Node', ['files', 'dirs'])
def _insert(node, path, itemid):
"""Insert an item into a virtual filesystem node."""
if len(path) == 1:
# Last component. Insert file.
node.files[path[0]] = itemid
else:
# In a directory.
dirname = path[0]
rest = path[1:]
if dirname not in node.dirs:
node.dirs[dirname] = Node({}, {})
_insert(node.dirs[dirname], rest, itemid)
def libtree(lib):
"""Generates a filesystem-like directory tree for the files
contained in `lib`. Filesystem nodes are (files, dirs) named
tuples in which both components are dictionaries. The first
maps filenames to Item ids. The second maps directory names to
child node tuples.
"""
root = Node({}, {})
for item in lib.items():
|
return root
| dest = item.destination(fragment=True)
parts = util.components(dest)
_insert(root, parts, item.id) | conditional_block |
vfs.py | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""A simple utility for constructing filesystem-like trees from beets
libraries.
"""
from __future__ import division, absolute_import, print_function
from collections import namedtuple
from beets import util
Node = namedtuple('Node', ['files', 'dirs'])
def | (node, path, itemid):
"""Insert an item into a virtual filesystem node."""
if len(path) == 1:
# Last component. Insert file.
node.files[path[0]] = itemid
else:
# In a directory.
dirname = path[0]
rest = path[1:]
if dirname not in node.dirs:
node.dirs[dirname] = Node({}, {})
_insert(node.dirs[dirname], rest, itemid)
def libtree(lib):
"""Generates a filesystem-like directory tree for the files
contained in `lib`. Filesystem nodes are (files, dirs) named
tuples in which both components are dictionaries. The first
maps filenames to Item ids. The second maps directory names to
child node tuples.
"""
root = Node({}, {})
for item in lib.items():
dest = item.destination(fragment=True)
parts = util.components(dest)
_insert(root, parts, item.id)
return root
| _insert | identifier_name |
vfs.py | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""A simple utility for constructing filesystem-like trees from beets
libraries.
"""
from __future__ import division, absolute_import, print_function
from collections import namedtuple
from beets import util
Node = namedtuple('Node', ['files', 'dirs'])
def _insert(node, path, itemid):
"""Insert an item into a virtual filesystem node."""
if len(path) == 1:
# Last component. Insert file.
node.files[path[0]] = itemid
else:
# In a directory.
dirname = path[0]
rest = path[1:]
if dirname not in node.dirs:
node.dirs[dirname] = Node({}, {})
_insert(node.dirs[dirname], rest, itemid)
def libtree(lib):
| """Generates a filesystem-like directory tree for the files
contained in `lib`. Filesystem nodes are (files, dirs) named
tuples in which both components are dictionaries. The first
maps filenames to Item ids. The second maps directory names to
child node tuples.
"""
root = Node({}, {})
for item in lib.items():
dest = item.destination(fragment=True)
parts = util.components(dest)
_insert(root, parts, item.id)
return root | identifier_body |
|
DeviceStream.js | import React, { Component, PropTypes } from 'react'
import { Card, CardHeader, CardText } from 'material-ui/Card'
import Avatar from 'material-ui/Avatar'
import randomMC from 'random-material-color'
import SelectField from 'material-ui/SelectField'
import MenuItem from 'material-ui/MenuItem'
import Graph from '../Graph/Graph'
export default class DeviceStream extends Component {
static propTypes = {
id: PropTypes.string,
unit: PropTypes.string,
color: PropTypes.string,
limit: PropTypes.number,
series: PropTypes.array
}
componentWillMount() {
this.setState({color: randomMC.getColor()})
}
render () {
const childNode = <p>Last updated {this.props.lastUpdate ? this.props.lastUpdate.date: '' }: unit: {this.props.unit}</p>
return ( | <Card>
<CardHeader
title={this.props.id}
subtitle={childNode}
avatar={<Avatar backgroundColor={this.state.color}>
{this.props.lastUpdate ? this.props.lastUpdate.value: '' }</Avatar>}
/>
<CardText>
<SelectField value={this.props.limit}
onChange={(event, index, value) => this.props.deviceStreamFetch(this.props.device, this.props, value)}>
<MenuItem value={10} primaryText="Hour" />
<MenuItem value={50} primaryText="Day" />
<MenuItem value={100} primaryText="Week" />
</SelectField>
<Graph series={this.props.series} streamid={this.props.id} color={this.state.color} />
</CardText>
</Card>
)
}
} | random_line_split |
|
DeviceStream.js | import React, { Component, PropTypes } from 'react'
import { Card, CardHeader, CardText } from 'material-ui/Card'
import Avatar from 'material-ui/Avatar'
import randomMC from 'random-material-color'
import SelectField from 'material-ui/SelectField'
import MenuItem from 'material-ui/MenuItem'
import Graph from '../Graph/Graph'
export default class DeviceStream extends Component {
static propTypes = {
id: PropTypes.string,
unit: PropTypes.string,
color: PropTypes.string,
limit: PropTypes.number,
series: PropTypes.array
}
componentWillMount() |
render () {
const childNode = <p>Last updated {this.props.lastUpdate ? this.props.lastUpdate.date: '' }: unit: {this.props.unit}</p>
return (
<Card>
<CardHeader
title={this.props.id}
subtitle={childNode}
avatar={<Avatar backgroundColor={this.state.color}>
{this.props.lastUpdate ? this.props.lastUpdate.value: '' }</Avatar>}
/>
<CardText>
<SelectField value={this.props.limit}
onChange={(event, index, value) => this.props.deviceStreamFetch(this.props.device, this.props, value)}>
<MenuItem value={10} primaryText="Hour" />
<MenuItem value={50} primaryText="Day" />
<MenuItem value={100} primaryText="Week" />
</SelectField>
<Graph series={this.props.series} streamid={this.props.id} color={this.state.color} />
</CardText>
</Card>
)
}
}
| {
this.setState({color: randomMC.getColor()})
} | identifier_body |
DeviceStream.js | import React, { Component, PropTypes } from 'react'
import { Card, CardHeader, CardText } from 'material-ui/Card'
import Avatar from 'material-ui/Avatar'
import randomMC from 'random-material-color'
import SelectField from 'material-ui/SelectField'
import MenuItem from 'material-ui/MenuItem'
import Graph from '../Graph/Graph'
export default class DeviceStream extends Component {
static propTypes = {
id: PropTypes.string,
unit: PropTypes.string,
color: PropTypes.string,
limit: PropTypes.number,
series: PropTypes.array
}
| () {
this.setState({color: randomMC.getColor()})
}
render () {
const childNode = <p>Last updated {this.props.lastUpdate ? this.props.lastUpdate.date: '' }: unit: {this.props.unit}</p>
return (
<Card>
<CardHeader
title={this.props.id}
subtitle={childNode}
avatar={<Avatar backgroundColor={this.state.color}>
{this.props.lastUpdate ? this.props.lastUpdate.value: '' }</Avatar>}
/>
<CardText>
<SelectField value={this.props.limit}
onChange={(event, index, value) => this.props.deviceStreamFetch(this.props.device, this.props, value)}>
<MenuItem value={10} primaryText="Hour" />
<MenuItem value={50} primaryText="Day" />
<MenuItem value={100} primaryText="Week" />
</SelectField>
<Graph series={this.props.series} streamid={this.props.id} color={this.state.color} />
</CardText>
</Card>
)
}
}
| componentWillMount | identifier_name |
forms.py | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from wtforms.fields import StringField
from wtforms.validators import DataRequired
from wtforms_sqlalchemy.fields import QuerySelectField
from indico.core.db.sqlalchemy.descriptions import RenderMode
from indico.modules.events.sessions.models.sessions import Session
from indico.modules.events.tracks.models.groups import TrackGroup
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm, generated_data
from indico.web.forms.fields import IndicoMarkdownField
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
track_group = QuerySelectField(_('Track group'), default='', allow_blank=True, get_label='title',
description=_('Select a track group to which this track should belong'))
default_session = QuerySelectField(_('Default session'), default='', allow_blank=True, get_label='title',
description=_('Indico will preselect this session whenever an abstract is '
'accepted for the track'))
description = IndicoMarkdownField(_('Description'), editor=True)
def __init__(self, *args, **kwargs):
event = kwargs.pop('event')
super().__init__(*args, **kwargs)
self.default_session.query = Session.query.with_parent(event)
self.track_group.query = TrackGroup.query.with_parent(event)
class ProgramForm(IndicoForm):
program = IndicoMarkdownField(_('Program'), editor=True, mathjax=True)
@generated_data
def | (self):
return RenderMode.markdown
class TrackGroupForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
description = IndicoMarkdownField(_('Description'), editor=True)
| program_render_mode | identifier_name |
forms.py | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from wtforms.fields import StringField
from wtforms.validators import DataRequired
from wtforms_sqlalchemy.fields import QuerySelectField
from indico.core.db.sqlalchemy.descriptions import RenderMode
from indico.modules.events.sessions.models.sessions import Session
from indico.modules.events.tracks.models.groups import TrackGroup
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm, generated_data
from indico.web.forms.fields import IndicoMarkdownField
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
track_group = QuerySelectField(_('Track group'), default='', allow_blank=True, get_label='title',
description=_('Select a track group to which this track should belong'))
default_session = QuerySelectField(_('Default session'), default='', allow_blank=True, get_label='title',
description=_('Indico will preselect this session whenever an abstract is '
'accepted for the track'))
description = IndicoMarkdownField(_('Description'), editor=True)
def __init__(self, *args, **kwargs):
event = kwargs.pop('event')
super().__init__(*args, **kwargs)
self.default_session.query = Session.query.with_parent(event)
self.track_group.query = TrackGroup.query.with_parent(event)
class ProgramForm(IndicoForm):
program = IndicoMarkdownField(_('Program'), editor=True, mathjax=True)
@generated_data
def program_render_mode(self):
return RenderMode.markdown
class TrackGroupForm(IndicoForm):
| title = StringField(_('Title'), [DataRequired()])
description = IndicoMarkdownField(_('Description'), editor=True) | identifier_body |
|
forms.py | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from wtforms.fields import StringField
from wtforms.validators import DataRequired
from wtforms_sqlalchemy.fields import QuerySelectField
from indico.core.db.sqlalchemy.descriptions import RenderMode
from indico.modules.events.sessions.models.sessions import Session
from indico.modules.events.tracks.models.groups import TrackGroup
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm, generated_data | code = StringField(_('Code'))
track_group = QuerySelectField(_('Track group'), default='', allow_blank=True, get_label='title',
description=_('Select a track group to which this track should belong'))
default_session = QuerySelectField(_('Default session'), default='', allow_blank=True, get_label='title',
description=_('Indico will preselect this session whenever an abstract is '
'accepted for the track'))
description = IndicoMarkdownField(_('Description'), editor=True)
def __init__(self, *args, **kwargs):
event = kwargs.pop('event')
super().__init__(*args, **kwargs)
self.default_session.query = Session.query.with_parent(event)
self.track_group.query = TrackGroup.query.with_parent(event)
class ProgramForm(IndicoForm):
program = IndicoMarkdownField(_('Program'), editor=True, mathjax=True)
@generated_data
def program_render_mode(self):
return RenderMode.markdown
class TrackGroupForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
description = IndicoMarkdownField(_('Description'), editor=True) | from indico.web.forms.fields import IndicoMarkdownField
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()]) | random_line_split |
update_replace_rollback.py | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def | (expected_count):
test.assertEqual(expected_count,
len(reality.resources_by_logical_name('C')))
example_template = Template({
'A': RsrcDef({'a': 'initial'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
example_template2 = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.update_stack('foo', example_template2)
engine.noop(4)
engine.rollback_stack('foo')
engine.call(check_c_count, 2)
engine.noop(11)
engine.call(verify, example_template)
engine.delete_stack('foo')
engine.noop(12)
engine.call(verify, Template({}))
| check_c_count | identifier_name |
update_replace_rollback.py | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def check_c_count(expected_count):
test.assertEqual(expected_count,
len(reality.resources_by_logical_name('C')))
example_template = Template({
'A': RsrcDef({'a': 'initial'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
example_template2 = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.update_stack('foo', example_template2)
engine.noop(4)
engine.rollback_stack('foo')
engine.call(check_c_count, 2)
engine.noop(11)
engine.call(verify, example_template)
| engine.delete_stack('foo')
engine.noop(12)
engine.call(verify, Template({})) | random_line_split |
|
update_replace_rollback.py | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def check_c_count(expected_count):
|
example_template = Template({
'A': RsrcDef({'a': 'initial'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
example_template2 = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.update_stack('foo', example_template2)
engine.noop(4)
engine.rollback_stack('foo')
engine.call(check_c_count, 2)
engine.noop(11)
engine.call(verify, example_template)
engine.delete_stack('foo')
engine.noop(12)
engine.call(verify, Template({}))
| test.assertEqual(expected_count,
len(reality.resources_by_logical_name('C'))) | identifier_body |
qNickInputWidget.py | from PyQt4.QtCore import Qt
from PyQt4.QtGui import QHBoxLayout
from PyQt4.QtGui import QLabel
from PyQt4.QtGui import QLineEdit
from PyQt4.QtGui import QMessageBox
from PyQt4.QtGui import QPixmap
from PyQt4.QtGui import QPushButton
from PyQt4.QtGui import QVBoxLayout
from PyQt4.QtGui import QWidget
import qtUtils
from utils import constants
from utils import errors
from utils import utils
class QNickInputWidget(QWidget):
def | (self, image, imageWidth, connectClickedSlot, nick='', parent=None):
QWidget.__init__(self, parent)
self.connectClickedSlot = connectClickedSlot
# Image
self.image = QLabel(self)
self.image.setPixmap(QPixmap(qtUtils.getAbsoluteImagePath(image)).scaledToWidth(imageWidth, Qt.SmoothTransformation))
# Nick field
self.nickLabel = QLabel("Nickname:", self)
self.nickEdit = QLineEdit(nick, self)
self.nickEdit.setMaxLength(constants.NICK_MAX_LEN)
self.nickEdit.returnPressed.connect(self.__connectClicked)
# Connect button
self.connectButton = QPushButton("Connect", self)
self.connectButton.resize(self.connectButton.sizeHint())
self.connectButton.setAutoDefault(False)
self.connectButton.clicked.connect(self.__connectClicked)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.nickLabel)
hbox.addWidget(self.nickEdit)
hbox.addStretch(1)
vbox = QVBoxLayout()
vbox.addStretch(1)
vbox.addLayout(hbox)
vbox.addWidget(self.connectButton)
vbox.addStretch(1)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.image)
hbox.addSpacing(10)
hbox.addLayout(vbox)
hbox.addStretch(1)
self.setLayout(hbox)
def __connectClicked(self):
nick = str(self.nickEdit.text()).lower()
# Validate the given nick
nickStatus = utils.isValidNick(nick)
if nickStatus == errors.VALID_NICK:
self.connectClickedSlot(nick)
elif nickStatus == errors.INVALID_NICK_CONTENT:
QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_CONTENT)
elif nickStatus == errors.INVALID_NICK_LENGTH:
QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_LENGTH)
elif nickStatus == errors.INVALID_EMPTY_NICK:
QMessageBox.warning(self, errors.TITLE_EMPTY_NICK, errors.EMPTY_NICK)
| __init__ | identifier_name |
qNickInputWidget.py | from PyQt4.QtCore import Qt
from PyQt4.QtGui import QHBoxLayout
from PyQt4.QtGui import QLabel
from PyQt4.QtGui import QLineEdit
from PyQt4.QtGui import QMessageBox
from PyQt4.QtGui import QPixmap
from PyQt4.QtGui import QPushButton
from PyQt4.QtGui import QVBoxLayout
from PyQt4.QtGui import QWidget
import qtUtils
from utils import constants
from utils import errors
from utils import utils
class QNickInputWidget(QWidget):
def __init__(self, image, imageWidth, connectClickedSlot, nick='', parent=None):
QWidget.__init__(self, parent)
self.connectClickedSlot = connectClickedSlot
# Image
self.image = QLabel(self)
self.image.setPixmap(QPixmap(qtUtils.getAbsoluteImagePath(image)).scaledToWidth(imageWidth, Qt.SmoothTransformation))
# Nick field
self.nickLabel = QLabel("Nickname:", self)
self.nickEdit = QLineEdit(nick, self)
self.nickEdit.setMaxLength(constants.NICK_MAX_LEN)
self.nickEdit.returnPressed.connect(self.__connectClicked)
| self.connectButton.resize(self.connectButton.sizeHint())
self.connectButton.setAutoDefault(False)
self.connectButton.clicked.connect(self.__connectClicked)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.nickLabel)
hbox.addWidget(self.nickEdit)
hbox.addStretch(1)
vbox = QVBoxLayout()
vbox.addStretch(1)
vbox.addLayout(hbox)
vbox.addWidget(self.connectButton)
vbox.addStretch(1)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.image)
hbox.addSpacing(10)
hbox.addLayout(vbox)
hbox.addStretch(1)
self.setLayout(hbox)
def __connectClicked(self):
nick = str(self.nickEdit.text()).lower()
# Validate the given nick
nickStatus = utils.isValidNick(nick)
if nickStatus == errors.VALID_NICK:
self.connectClickedSlot(nick)
elif nickStatus == errors.INVALID_NICK_CONTENT:
QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_CONTENT)
elif nickStatus == errors.INVALID_NICK_LENGTH:
QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_LENGTH)
elif nickStatus == errors.INVALID_EMPTY_NICK:
QMessageBox.warning(self, errors.TITLE_EMPTY_NICK, errors.EMPTY_NICK) | # Connect button
self.connectButton = QPushButton("Connect", self) | random_line_split |
qNickInputWidget.py | from PyQt4.QtCore import Qt
from PyQt4.QtGui import QHBoxLayout
from PyQt4.QtGui import QLabel
from PyQt4.QtGui import QLineEdit
from PyQt4.QtGui import QMessageBox
from PyQt4.QtGui import QPixmap
from PyQt4.QtGui import QPushButton
from PyQt4.QtGui import QVBoxLayout
from PyQt4.QtGui import QWidget
import qtUtils
from utils import constants
from utils import errors
from utils import utils
class QNickInputWidget(QWidget):
def __init__(self, image, imageWidth, connectClickedSlot, nick='', parent=None):
QWidget.__init__(self, parent)
self.connectClickedSlot = connectClickedSlot
# Image
self.image = QLabel(self)
self.image.setPixmap(QPixmap(qtUtils.getAbsoluteImagePath(image)).scaledToWidth(imageWidth, Qt.SmoothTransformation))
# Nick field
self.nickLabel = QLabel("Nickname:", self)
self.nickEdit = QLineEdit(nick, self)
self.nickEdit.setMaxLength(constants.NICK_MAX_LEN)
self.nickEdit.returnPressed.connect(self.__connectClicked)
# Connect button
self.connectButton = QPushButton("Connect", self)
self.connectButton.resize(self.connectButton.sizeHint())
self.connectButton.setAutoDefault(False)
self.connectButton.clicked.connect(self.__connectClicked)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.nickLabel)
hbox.addWidget(self.nickEdit)
hbox.addStretch(1)
vbox = QVBoxLayout()
vbox.addStretch(1)
vbox.addLayout(hbox)
vbox.addWidget(self.connectButton)
vbox.addStretch(1)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.image)
hbox.addSpacing(10)
hbox.addLayout(vbox)
hbox.addStretch(1)
self.setLayout(hbox)
def __connectClicked(self):
| nick = str(self.nickEdit.text()).lower()
# Validate the given nick
nickStatus = utils.isValidNick(nick)
if nickStatus == errors.VALID_NICK:
self.connectClickedSlot(nick)
elif nickStatus == errors.INVALID_NICK_CONTENT:
QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_CONTENT)
elif nickStatus == errors.INVALID_NICK_LENGTH:
QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_LENGTH)
elif nickStatus == errors.INVALID_EMPTY_NICK:
QMessageBox.warning(self, errors.TITLE_EMPTY_NICK, errors.EMPTY_NICK) | identifier_body |
|
qNickInputWidget.py | from PyQt4.QtCore import Qt
from PyQt4.QtGui import QHBoxLayout
from PyQt4.QtGui import QLabel
from PyQt4.QtGui import QLineEdit
from PyQt4.QtGui import QMessageBox
from PyQt4.QtGui import QPixmap
from PyQt4.QtGui import QPushButton
from PyQt4.QtGui import QVBoxLayout
from PyQt4.QtGui import QWidget
import qtUtils
from utils import constants
from utils import errors
from utils import utils
class QNickInputWidget(QWidget):
def __init__(self, image, imageWidth, connectClickedSlot, nick='', parent=None):
QWidget.__init__(self, parent)
self.connectClickedSlot = connectClickedSlot
# Image
self.image = QLabel(self)
self.image.setPixmap(QPixmap(qtUtils.getAbsoluteImagePath(image)).scaledToWidth(imageWidth, Qt.SmoothTransformation))
# Nick field
self.nickLabel = QLabel("Nickname:", self)
self.nickEdit = QLineEdit(nick, self)
self.nickEdit.setMaxLength(constants.NICK_MAX_LEN)
self.nickEdit.returnPressed.connect(self.__connectClicked)
# Connect button
self.connectButton = QPushButton("Connect", self)
self.connectButton.resize(self.connectButton.sizeHint())
self.connectButton.setAutoDefault(False)
self.connectButton.clicked.connect(self.__connectClicked)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.nickLabel)
hbox.addWidget(self.nickEdit)
hbox.addStretch(1)
vbox = QVBoxLayout()
vbox.addStretch(1)
vbox.addLayout(hbox)
vbox.addWidget(self.connectButton)
vbox.addStretch(1)
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self.image)
hbox.addSpacing(10)
hbox.addLayout(vbox)
hbox.addStretch(1)
self.setLayout(hbox)
def __connectClicked(self):
nick = str(self.nickEdit.text()).lower()
# Validate the given nick
nickStatus = utils.isValidNick(nick)
if nickStatus == errors.VALID_NICK:
self.connectClickedSlot(nick)
elif nickStatus == errors.INVALID_NICK_CONTENT:
QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_CONTENT)
elif nickStatus == errors.INVALID_NICK_LENGTH:
|
elif nickStatus == errors.INVALID_EMPTY_NICK:
QMessageBox.warning(self, errors.TITLE_EMPTY_NICK, errors.EMPTY_NICK)
| QMessageBox.warning(self, errors.TITLE_INVALID_NICK, errors.INVALID_NICK_LENGTH) | conditional_block |
controller_analyze.py | import base64
import csv
import io
import multiprocessing
import numpy as np
import sys
from collections import defaultdict
from io import StringIO
from pathlib import Path
# Import matplotlib ourselves and make it use agg (not any GUI anything)
# before the analyze module pulls it in.
import matplotlib
matplotlib.use('Agg')
from bottle import get, post, redirect, request, response, jinja2_template as template # noqa: E402
from analysis import heatmaps, process, plot # noqa: E402
from web.error_handlers import TrackParseError # noqa: E402
from common import mkdir # noqa: E402
import config # noqa: E402
def _make_stats_output(stats, all_keys, do_csv):
for i in range(len(stats)):
stat = stats[i]
for k in all_keys:
if k in stat:
val = stat[k]
if isinstance(val, (np.float32, np.float64)):
stat[k] = "%0.3f" % val
else:
stat[k] = ""
all_keys.remove('Track file') # will be added as first column
all_keys = sorted(list(all_keys))
all_keys[:0] = ['Track file'] # prepend 'Track file' header
if do_csv:
output = StringIO()
writer = csv.DictWriter(output, fieldnames=all_keys)
writer.writeheader()
for stat in stats:
writer.writerow(stat)
csvstring = output.getvalue()
output.close()
response.content_type = 'text/csv'
response.headers['Content-Disposition'] = 'attachment; filename=atles_stats.csv'
return csvstring
else:
return template('stats', keys=all_keys, stats=stats)
@get('/stats/')
def get_stats():
trackrels = request.query.tracks.split('|')
exp_type = request.query.exp_type
stats = []
all_keys = set()
for trackrel in trackrels:
curstats = {}
curstats['Track file'] = trackrel
try:
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
curstats.update(processor.get_setup(['experiment', 'phases', 'general']))
curstats.update(processor.get_stats_single_table(include_phases=True))
if exp_type:
curstats.update(processor.get_exp_stats(exp_type))
except (ValueError, IndexError):
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
all_keys.update(curstats.keys())
stats.append(curstats)
return _make_stats_output(stats, all_keys, do_csv=request.query.csv)
def _do_analyze(trackrel):
trackrel = Path(trackrel)
# ensure directories exist for plot creation
trackreldir = trackrel.parent
mkdir(config.PLOTDIR / trackreldir)
# look for debug frames to create links in the trace plot
trackname = trackrel.name.replace('-track.csv', '')
dbgframedir = config.DBGFRAMEDIR / trackreldir / trackname
dbgframes = list(dbgframedir.glob("subframe*.png")) # list so TrackPlotter can re-use (instead of exhausting the iterable)
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
plotter = plot.TrackPlotter(processor, dbgframes)
plotter.plot_heatmap()
def saveplot(filename):
plot.savefig(str(config.PLOTDIR / filename))
saveplot("{}.10.heat.png".format(trackrel))
plotter.plot_invalidheatmap()
saveplot("{}.12.heat.invalid.png".format(trackrel))
if processor.num_phases() > 1:
plotter.plot_heatmap(plot_type='per-phase')
saveplot("{}.14.heat.perphase.png".format(trackrel))
plotter.plot_heatmap(plot_type='per-minute')
saveplot("{}.15.heat.perminute.png".format(trackrel))
plotter.plot_trace()
saveplot("{}.20.plot.svg".format(trackrel))
@post('/analyze/')
def post_analyze():
trackrel = request.query.trackrel
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
redirect("/view/{}".format(trackrel))
def | (trackrels):
for trackrel in trackrels:
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
pass # nothing to be done here; we're processing in the background
@post('/analyze_selection/')
def post_analyze_selection():
trackrels = request.query.trackrels.split('|')
p = multiprocessing.Process(target=_analyze_selection, args=(trackrels,))
p.start()
@get('/heatmaps/')
def get_heatmaps():
trackrels = request.query.tracks.split('|')
processors = []
# to verify all phases are equivalent
plength_map = defaultdict(list)
for trackrel in trackrels:
try:
p = process.TrackProcessor(str(config.TRACKDIR / trackrel), just_raw_data=True)
processors.append(p)
plength_map[tuple(phase.length for phase in p.phase_list)].append(trackrel)
except ValueError:
raise(TrackParseError(trackrel, sys.exc_info()))
if len(plength_map) > 1:
lengths_string = '\n'.join(
"{} in:\n {}\n".format(
str(lengths),
"\n ".join(trackrel for trackrel in plength_map[lengths])
)
for lengths in plength_map
)
return template('error', errormsg="The provided tracks do not all have the same phase lengths. Please select tracks that share an experimental setup.<br>Phase lengths found:<pre>{}</pre>".format(lengths_string))
# Save all images as binary to be included in the page directly
# Base64-encoded. (Saves having to write temporary data to filesystem.)
images_data = []
# use phases from an arbitrary track
plengths = plength_map.popitem()[0]
dataframes = [proc.df for proc in processors]
phase_start = 0
for i, length in enumerate(plengths):
phase_end = phase_start + length
x, y = heatmaps.get_timeslice(dataframes, phase_start*60, phase_end*60)
title = "Phase {} ({}:00-{}:00)".format(i+1, phase_start, phase_end)
ax = heatmaps.make_heatmap(x, y, title)
plot.format_axis(ax)
image_data = io.BytesIO()
plot.savefig(image_data, format='png')
images_data.append(
base64.b64encode(image_data.getvalue()).decode()
)
phase_start = phase_end
return template('view', imgdatas=images_data)
| _analyze_selection | identifier_name |
controller_analyze.py | import base64
import csv
import io
import multiprocessing
import numpy as np
import sys
from collections import defaultdict
from io import StringIO
from pathlib import Path
# Import matplotlib ourselves and make it use agg (not any GUI anything)
# before the analyze module pulls it in.
import matplotlib
matplotlib.use('Agg')
from bottle import get, post, redirect, request, response, jinja2_template as template # noqa: E402
from analysis import heatmaps, process, plot # noqa: E402
from web.error_handlers import TrackParseError # noqa: E402
from common import mkdir # noqa: E402
import config # noqa: E402
def _make_stats_output(stats, all_keys, do_csv):
for i in range(len(stats)):
stat = stats[i]
for k in all_keys:
if k in stat:
val = stat[k]
if isinstance(val, (np.float32, np.float64)):
stat[k] = "%0.3f" % val
else:
stat[k] = ""
all_keys.remove('Track file') # will be added as first column
all_keys = sorted(list(all_keys))
all_keys[:0] = ['Track file'] # prepend 'Track file' header
if do_csv:
output = StringIO()
writer = csv.DictWriter(output, fieldnames=all_keys)
writer.writeheader()
for stat in stats:
writer.writerow(stat)
csvstring = output.getvalue()
output.close()
response.content_type = 'text/csv'
response.headers['Content-Disposition'] = 'attachment; filename=atles_stats.csv'
return csvstring
else:
return template('stats', keys=all_keys, stats=stats)
@get('/stats/')
def get_stats():
trackrels = request.query.tracks.split('|')
exp_type = request.query.exp_type
stats = []
all_keys = set()
for trackrel in trackrels:
curstats = {}
curstats['Track file'] = trackrel
try:
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
curstats.update(processor.get_setup(['experiment', 'phases', 'general']))
curstats.update(processor.get_stats_single_table(include_phases=True))
if exp_type:
curstats.update(processor.get_exp_stats(exp_type))
except (ValueError, IndexError):
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
all_keys.update(curstats.keys())
stats.append(curstats)
return _make_stats_output(stats, all_keys, do_csv=request.query.csv)
def _do_analyze(trackrel):
trackrel = Path(trackrel)
# ensure directories exist for plot creation
trackreldir = trackrel.parent
mkdir(config.PLOTDIR / trackreldir)
# look for debug frames to create links in the trace plot
trackname = trackrel.name.replace('-track.csv', '')
dbgframedir = config.DBGFRAMEDIR / trackreldir / trackname
dbgframes = list(dbgframedir.glob("subframe*.png")) # list so TrackPlotter can re-use (instead of exhausting the iterable)
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
plotter = plot.TrackPlotter(processor, dbgframes)
plotter.plot_heatmap()
def saveplot(filename):
plot.savefig(str(config.PLOTDIR / filename))
saveplot("{}.10.heat.png".format(trackrel))
plotter.plot_invalidheatmap()
saveplot("{}.12.heat.invalid.png".format(trackrel))
if processor.num_phases() > 1: | saveplot("{}.14.heat.perphase.png".format(trackrel))
plotter.plot_heatmap(plot_type='per-minute')
saveplot("{}.15.heat.perminute.png".format(trackrel))
plotter.plot_trace()
saveplot("{}.20.plot.svg".format(trackrel))
@post('/analyze/')
def post_analyze():
trackrel = request.query.trackrel
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
redirect("/view/{}".format(trackrel))
def _analyze_selection(trackrels):
for trackrel in trackrels:
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
pass # nothing to be done here; we're processing in the background
@post('/analyze_selection/')
def post_analyze_selection():
trackrels = request.query.trackrels.split('|')
p = multiprocessing.Process(target=_analyze_selection, args=(trackrels,))
p.start()
@get('/heatmaps/')
def get_heatmaps():
trackrels = request.query.tracks.split('|')
processors = []
# to verify all phases are equivalent
plength_map = defaultdict(list)
for trackrel in trackrels:
try:
p = process.TrackProcessor(str(config.TRACKDIR / trackrel), just_raw_data=True)
processors.append(p)
plength_map[tuple(phase.length for phase in p.phase_list)].append(trackrel)
except ValueError:
raise(TrackParseError(trackrel, sys.exc_info()))
if len(plength_map) > 1:
lengths_string = '\n'.join(
"{} in:\n {}\n".format(
str(lengths),
"\n ".join(trackrel for trackrel in plength_map[lengths])
)
for lengths in plength_map
)
return template('error', errormsg="The provided tracks do not all have the same phase lengths. Please select tracks that share an experimental setup.<br>Phase lengths found:<pre>{}</pre>".format(lengths_string))
# Save all images as binary to be included in the page directly
# Base64-encoded. (Saves having to write temporary data to filesystem.)
images_data = []
# use phases from an arbitrary track
plengths = plength_map.popitem()[0]
dataframes = [proc.df for proc in processors]
phase_start = 0
for i, length in enumerate(plengths):
phase_end = phase_start + length
x, y = heatmaps.get_timeslice(dataframes, phase_start*60, phase_end*60)
title = "Phase {} ({}:00-{}:00)".format(i+1, phase_start, phase_end)
ax = heatmaps.make_heatmap(x, y, title)
plot.format_axis(ax)
image_data = io.BytesIO()
plot.savefig(image_data, format='png')
images_data.append(
base64.b64encode(image_data.getvalue()).decode()
)
phase_start = phase_end
return template('view', imgdatas=images_data) | plotter.plot_heatmap(plot_type='per-phase') | random_line_split |
controller_analyze.py | import base64
import csv
import io
import multiprocessing
import numpy as np
import sys
from collections import defaultdict
from io import StringIO
from pathlib import Path
# Import matplotlib ourselves and make it use agg (not any GUI anything)
# before the analyze module pulls it in.
import matplotlib
matplotlib.use('Agg')
from bottle import get, post, redirect, request, response, jinja2_template as template # noqa: E402
from analysis import heatmaps, process, plot # noqa: E402
from web.error_handlers import TrackParseError # noqa: E402
from common import mkdir # noqa: E402
import config # noqa: E402
def _make_stats_output(stats, all_keys, do_csv):
for i in range(len(stats)):
stat = stats[i]
for k in all_keys:
|
all_keys.remove('Track file') # will be added as first column
all_keys = sorted(list(all_keys))
all_keys[:0] = ['Track file'] # prepend 'Track file' header
if do_csv:
output = StringIO()
writer = csv.DictWriter(output, fieldnames=all_keys)
writer.writeheader()
for stat in stats:
writer.writerow(stat)
csvstring = output.getvalue()
output.close()
response.content_type = 'text/csv'
response.headers['Content-Disposition'] = 'attachment; filename=atles_stats.csv'
return csvstring
else:
return template('stats', keys=all_keys, stats=stats)
@get('/stats/')
def get_stats():
trackrels = request.query.tracks.split('|')
exp_type = request.query.exp_type
stats = []
all_keys = set()
for trackrel in trackrels:
curstats = {}
curstats['Track file'] = trackrel
try:
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
curstats.update(processor.get_setup(['experiment', 'phases', 'general']))
curstats.update(processor.get_stats_single_table(include_phases=True))
if exp_type:
curstats.update(processor.get_exp_stats(exp_type))
except (ValueError, IndexError):
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
all_keys.update(curstats.keys())
stats.append(curstats)
return _make_stats_output(stats, all_keys, do_csv=request.query.csv)
def _do_analyze(trackrel):
trackrel = Path(trackrel)
# ensure directories exist for plot creation
trackreldir = trackrel.parent
mkdir(config.PLOTDIR / trackreldir)
# look for debug frames to create links in the trace plot
trackname = trackrel.name.replace('-track.csv', '')
dbgframedir = config.DBGFRAMEDIR / trackreldir / trackname
dbgframes = list(dbgframedir.glob("subframe*.png")) # list so TrackPlotter can re-use (instead of exhausting the iterable)
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
plotter = plot.TrackPlotter(processor, dbgframes)
plotter.plot_heatmap()
def saveplot(filename):
plot.savefig(str(config.PLOTDIR / filename))
saveplot("{}.10.heat.png".format(trackrel))
plotter.plot_invalidheatmap()
saveplot("{}.12.heat.invalid.png".format(trackrel))
if processor.num_phases() > 1:
plotter.plot_heatmap(plot_type='per-phase')
saveplot("{}.14.heat.perphase.png".format(trackrel))
plotter.plot_heatmap(plot_type='per-minute')
saveplot("{}.15.heat.perminute.png".format(trackrel))
plotter.plot_trace()
saveplot("{}.20.plot.svg".format(trackrel))
@post('/analyze/')
def post_analyze():
trackrel = request.query.trackrel
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
redirect("/view/{}".format(trackrel))
def _analyze_selection(trackrels):
for trackrel in trackrels:
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
pass # nothing to be done here; we're processing in the background
@post('/analyze_selection/')
def post_analyze_selection():
trackrels = request.query.trackrels.split('|')
p = multiprocessing.Process(target=_analyze_selection, args=(trackrels,))
p.start()
@get('/heatmaps/')
def get_heatmaps():
trackrels = request.query.tracks.split('|')
processors = []
# to verify all phases are equivalent
plength_map = defaultdict(list)
for trackrel in trackrels:
try:
p = process.TrackProcessor(str(config.TRACKDIR / trackrel), just_raw_data=True)
processors.append(p)
plength_map[tuple(phase.length for phase in p.phase_list)].append(trackrel)
except ValueError:
raise(TrackParseError(trackrel, sys.exc_info()))
if len(plength_map) > 1:
lengths_string = '\n'.join(
"{} in:\n {}\n".format(
str(lengths),
"\n ".join(trackrel for trackrel in plength_map[lengths])
)
for lengths in plength_map
)
return template('error', errormsg="The provided tracks do not all have the same phase lengths. Please select tracks that share an experimental setup.<br>Phase lengths found:<pre>{}</pre>".format(lengths_string))
# Save all images as binary to be included in the page directly
# Base64-encoded. (Saves having to write temporary data to filesystem.)
images_data = []
# use phases from an arbitrary track
plengths = plength_map.popitem()[0]
dataframes = [proc.df for proc in processors]
phase_start = 0
for i, length in enumerate(plengths):
phase_end = phase_start + length
x, y = heatmaps.get_timeslice(dataframes, phase_start*60, phase_end*60)
title = "Phase {} ({}:00-{}:00)".format(i+1, phase_start, phase_end)
ax = heatmaps.make_heatmap(x, y, title)
plot.format_axis(ax)
image_data = io.BytesIO()
plot.savefig(image_data, format='png')
images_data.append(
base64.b64encode(image_data.getvalue()).decode()
)
phase_start = phase_end
return template('view', imgdatas=images_data)
| if k in stat:
val = stat[k]
if isinstance(val, (np.float32, np.float64)):
stat[k] = "%0.3f" % val
else:
stat[k] = "" | conditional_block |
controller_analyze.py | import base64
import csv
import io
import multiprocessing
import numpy as np
import sys
from collections import defaultdict
from io import StringIO
from pathlib import Path
# Import matplotlib ourselves and make it use agg (not any GUI anything)
# before the analyze module pulls it in.
import matplotlib
matplotlib.use('Agg')
from bottle import get, post, redirect, request, response, jinja2_template as template # noqa: E402
from analysis import heatmaps, process, plot # noqa: E402
from web.error_handlers import TrackParseError # noqa: E402
from common import mkdir # noqa: E402
import config # noqa: E402
def _make_stats_output(stats, all_keys, do_csv):
for i in range(len(stats)):
stat = stats[i]
for k in all_keys:
if k in stat:
val = stat[k]
if isinstance(val, (np.float32, np.float64)):
stat[k] = "%0.3f" % val
else:
stat[k] = ""
all_keys.remove('Track file') # will be added as first column
all_keys = sorted(list(all_keys))
all_keys[:0] = ['Track file'] # prepend 'Track file' header
if do_csv:
output = StringIO()
writer = csv.DictWriter(output, fieldnames=all_keys)
writer.writeheader()
for stat in stats:
writer.writerow(stat)
csvstring = output.getvalue()
output.close()
response.content_type = 'text/csv'
response.headers['Content-Disposition'] = 'attachment; filename=atles_stats.csv'
return csvstring
else:
return template('stats', keys=all_keys, stats=stats)
@get('/stats/')
def get_stats():
|
def _do_analyze(trackrel):
trackrel = Path(trackrel)
# ensure directories exist for plot creation
trackreldir = trackrel.parent
mkdir(config.PLOTDIR / trackreldir)
# look for debug frames to create links in the trace plot
trackname = trackrel.name.replace('-track.csv', '')
dbgframedir = config.DBGFRAMEDIR / trackreldir / trackname
dbgframes = list(dbgframedir.glob("subframe*.png")) # list so TrackPlotter can re-use (instead of exhausting the iterable)
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
plotter = plot.TrackPlotter(processor, dbgframes)
plotter.plot_heatmap()
def saveplot(filename):
plot.savefig(str(config.PLOTDIR / filename))
saveplot("{}.10.heat.png".format(trackrel))
plotter.plot_invalidheatmap()
saveplot("{}.12.heat.invalid.png".format(trackrel))
if processor.num_phases() > 1:
plotter.plot_heatmap(plot_type='per-phase')
saveplot("{}.14.heat.perphase.png".format(trackrel))
plotter.plot_heatmap(plot_type='per-minute')
saveplot("{}.15.heat.perminute.png".format(trackrel))
plotter.plot_trace()
saveplot("{}.20.plot.svg".format(trackrel))
@post('/analyze/')
def post_analyze():
trackrel = request.query.trackrel
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
redirect("/view/{}".format(trackrel))
def _analyze_selection(trackrels):
for trackrel in trackrels:
try:
_do_analyze(trackrel)
except ValueError:
# often 'wrong number of columns' due to truncated file from killed experiment
pass # nothing to be done here; we're processing in the background
@post('/analyze_selection/')
def post_analyze_selection():
trackrels = request.query.trackrels.split('|')
p = multiprocessing.Process(target=_analyze_selection, args=(trackrels,))
p.start()
@get('/heatmaps/')
def get_heatmaps():
trackrels = request.query.tracks.split('|')
processors = []
# to verify all phases are equivalent
plength_map = defaultdict(list)
for trackrel in trackrels:
try:
p = process.TrackProcessor(str(config.TRACKDIR / trackrel), just_raw_data=True)
processors.append(p)
plength_map[tuple(phase.length for phase in p.phase_list)].append(trackrel)
except ValueError:
raise(TrackParseError(trackrel, sys.exc_info()))
if len(plength_map) > 1:
lengths_string = '\n'.join(
"{} in:\n {}\n".format(
str(lengths),
"\n ".join(trackrel for trackrel in plength_map[lengths])
)
for lengths in plength_map
)
return template('error', errormsg="The provided tracks do not all have the same phase lengths. Please select tracks that share an experimental setup.<br>Phase lengths found:<pre>{}</pre>".format(lengths_string))
# Save all images as binary to be included in the page directly
# Base64-encoded. (Saves having to write temporary data to filesystem.)
images_data = []
# use phases from an arbitrary track
plengths = plength_map.popitem()[0]
dataframes = [proc.df for proc in processors]
phase_start = 0
for i, length in enumerate(plengths):
phase_end = phase_start + length
x, y = heatmaps.get_timeslice(dataframes, phase_start*60, phase_end*60)
title = "Phase {} ({}:00-{}:00)".format(i+1, phase_start, phase_end)
ax = heatmaps.make_heatmap(x, y, title)
plot.format_axis(ax)
image_data = io.BytesIO()
plot.savefig(image_data, format='png')
images_data.append(
base64.b64encode(image_data.getvalue()).decode()
)
phase_start = phase_end
return template('view', imgdatas=images_data)
| trackrels = request.query.tracks.split('|')
exp_type = request.query.exp_type
stats = []
all_keys = set()
for trackrel in trackrels:
curstats = {}
curstats['Track file'] = trackrel
try:
processor = process.TrackProcessor(str(config.TRACKDIR / trackrel))
curstats.update(processor.get_setup(['experiment', 'phases', 'general']))
curstats.update(processor.get_stats_single_table(include_phases=True))
if exp_type:
curstats.update(processor.get_exp_stats(exp_type))
except (ValueError, IndexError):
# often 'wrong number of columns' due to truncated file from killed experiment
raise(TrackParseError(trackrel, sys.exc_info()))
all_keys.update(curstats.keys())
stats.append(curstats)
return _make_stats_output(stats, all_keys, do_csv=request.query.csv) | identifier_body |
sound.tag.js | /*
*
* @author Benoit Vinay
*
* [email protected]
* http://www.benoitvinay.com
*
*/
//////////////////////////////////////////////////////////////////////////////////////////
// Sound Object
//
// use in loader.sound.js
//////////////////////////////////////////////////////////////////////////////////////////
function SoundObject(url, tag) | {
var _this = this;
var _tag = tag;
var _valid = (_tag.canPlayType ? true : false); // check if the tag is valid (i.e.: Safari on Windows)
var _playTimeout = undefined;
var _loopInterval = undefined;
var _duration = (_valid ? (_tag.duration * 1000) : -1);
var _playing = false;
this.url = url;
// play
var _play = function(time) {
_this.stop();
try {
_tag.currentTime = time || 0;
_tag.play();
}
catch(e) {
_error(e);
}
_playing = true;
}
this.play = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = false;
}
catch(e) {
_error(e);
}
if(complete) {
_playTimeout = setTimeout(function() {
clearTimeout(_playTimeout);
complete.apply(_this);
}, _duration);
}
}
// loop
this.loop = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = true;
}
catch(e) {
_error(e);
}
_loopInterval = setInterval(function() {
if(complete) {
complete.apply(_this);
}
}, _duration);
}
// stop
this.stop = function() {
_playing = false;
if(_playTimeout) {
clearTimeout(_playTimeout);
}
if(_loopInterval) {
clearInterval(_loopInterval);
}
try {
_tag.pause();
}
catch(e) {
_error(e);
}
}
// volume
this.mute = function(value) {
try {
_tag.muted = value;
}
catch(e) {
_error(e);
}
}
// error
var _error = function(e) {
Console.log("SoundObject.error:", e.message);
}
// valid
if(!_valid) {
_play = function(time) {};
this.play = function(time, complete) {};
this.loop = function(time, complete) {};
this.stop = function() {};
this.mute = function(value) {};
_error({ message: this.url + " is not valid." });
}
// if valid
// we add the tag to the holder
else {
if($("#audios-holder").length == 0) {
$("body").prepend("<div id='audios-holder'></div>");
}
$("#audios-holder").append(_tag);
_tag = document.getElementById(url);
}
} | identifier_body |
|
sound.tag.js | /*
*
* @author Benoit Vinay
*
* [email protected]
* http://www.benoitvinay.com
*
*/
//////////////////////////////////////////////////////////////////////////////////////////
// Sound Object
//
// use in loader.sound.js
//////////////////////////////////////////////////////////////////////////////////////////
function | (url, tag) {
var _this = this;
var _tag = tag;
var _valid = (_tag.canPlayType ? true : false); // check if the tag is valid (i.e.: Safari on Windows)
var _playTimeout = undefined;
var _loopInterval = undefined;
var _duration = (_valid ? (_tag.duration * 1000) : -1);
var _playing = false;
this.url = url;
// play
var _play = function(time) {
_this.stop();
try {
_tag.currentTime = time || 0;
_tag.play();
}
catch(e) {
_error(e);
}
_playing = true;
}
this.play = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = false;
}
catch(e) {
_error(e);
}
if(complete) {
_playTimeout = setTimeout(function() {
clearTimeout(_playTimeout);
complete.apply(_this);
}, _duration);
}
}
// loop
this.loop = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = true;
}
catch(e) {
_error(e);
}
_loopInterval = setInterval(function() {
if(complete) {
complete.apply(_this);
}
}, _duration);
}
// stop
this.stop = function() {
_playing = false;
if(_playTimeout) {
clearTimeout(_playTimeout);
}
if(_loopInterval) {
clearInterval(_loopInterval);
}
try {
_tag.pause();
}
catch(e) {
_error(e);
}
}
// volume
this.mute = function(value) {
try {
_tag.muted = value;
}
catch(e) {
_error(e);
}
}
// error
var _error = function(e) {
Console.log("SoundObject.error:", e.message);
}
// valid
if(!_valid) {
_play = function(time) {};
this.play = function(time, complete) {};
this.loop = function(time, complete) {};
this.stop = function() {};
this.mute = function(value) {};
_error({ message: this.url + " is not valid." });
}
// if valid
// we add the tag to the holder
else {
if($("#audios-holder").length == 0) {
$("body").prepend("<div id='audios-holder'></div>");
}
$("#audios-holder").append(_tag);
_tag = document.getElementById(url);
}
}
| SoundObject | identifier_name |
sound.tag.js | /*
*
* @author Benoit Vinay
*
* [email protected]
* http://www.benoitvinay.com
*
*/
//////////////////////////////////////////////////////////////////////////////////////////
// Sound Object
//
// use in loader.sound.js
//////////////////////////////////////////////////////////////////////////////////////////
function SoundObject(url, tag) {
var _this = this;
var _tag = tag;
var _valid = (_tag.canPlayType ? true : false); // check if the tag is valid (i.e.: Safari on Windows)
var _playTimeout = undefined;
var _loopInterval = undefined;
var _duration = (_valid ? (_tag.duration * 1000) : -1);
var _playing = false;
this.url = url;
// play
var _play = function(time) {
_this.stop();
try {
_tag.currentTime = time || 0;
_tag.play();
}
catch(e) {
_error(e);
}
_playing = true;
}
this.play = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = false;
}
catch(e) {
_error(e);
}
|
complete.apply(_this);
}, _duration);
}
}
// loop
this.loop = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = true;
}
catch(e) {
_error(e);
}
_loopInterval = setInterval(function() {
if(complete) {
complete.apply(_this);
}
}, _duration);
}
// stop
this.stop = function() {
_playing = false;
if(_playTimeout) {
clearTimeout(_playTimeout);
}
if(_loopInterval) {
clearInterval(_loopInterval);
}
try {
_tag.pause();
}
catch(e) {
_error(e);
}
}
// volume
this.mute = function(value) {
try {
_tag.muted = value;
}
catch(e) {
_error(e);
}
}
// error
var _error = function(e) {
Console.log("SoundObject.error:", e.message);
}
// valid
if(!_valid) {
_play = function(time) {};
this.play = function(time, complete) {};
this.loop = function(time, complete) {};
this.stop = function() {};
this.mute = function(value) {};
_error({ message: this.url + " is not valid." });
}
// if valid
// we add the tag to the holder
else {
if($("#audios-holder").length == 0) {
$("body").prepend("<div id='audios-holder'></div>");
}
$("#audios-holder").append(_tag);
_tag = document.getElementById(url);
}
} | if(complete) {
_playTimeout = setTimeout(function() {
clearTimeout(_playTimeout); | random_line_split |
sound.tag.js | /*
*
* @author Benoit Vinay
*
* [email protected]
* http://www.benoitvinay.com
*
*/
//////////////////////////////////////////////////////////////////////////////////////////
// Sound Object
//
// use in loader.sound.js
//////////////////////////////////////////////////////////////////////////////////////////
function SoundObject(url, tag) {
var _this = this;
var _tag = tag;
var _valid = (_tag.canPlayType ? true : false); // check if the tag is valid (i.e.: Safari on Windows)
var _playTimeout = undefined;
var _loopInterval = undefined;
var _duration = (_valid ? (_tag.duration * 1000) : -1);
var _playing = false;
this.url = url;
// play
var _play = function(time) {
_this.stop();
try {
_tag.currentTime = time || 0;
_tag.play();
}
catch(e) {
_error(e);
}
_playing = true;
}
this.play = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = false;
}
catch(e) {
_error(e);
}
if(complete) {
_playTimeout = setTimeout(function() {
clearTimeout(_playTimeout);
complete.apply(_this);
}, _duration);
}
}
// loop
this.loop = function(time, complete) {
_play.apply(_this, [time]);
try {
_tag.loop = true;
}
catch(e) {
_error(e);
}
_loopInterval = setInterval(function() {
if(complete) {
complete.apply(_this);
}
}, _duration);
}
// stop
this.stop = function() {
_playing = false;
if(_playTimeout) {
clearTimeout(_playTimeout);
}
if(_loopInterval) {
clearInterval(_loopInterval);
}
try {
_tag.pause();
}
catch(e) {
_error(e);
}
}
// volume
this.mute = function(value) {
try {
_tag.muted = value;
}
catch(e) {
_error(e);
}
}
// error
var _error = function(e) {
Console.log("SoundObject.error:", e.message);
}
// valid
if(!_valid) {
_play = function(time) {};
this.play = function(time, complete) {};
this.loop = function(time, complete) {};
this.stop = function() {};
this.mute = function(value) {};
_error({ message: this.url + " is not valid." });
}
// if valid
// we add the tag to the holder
else |
}
| {
if($("#audios-holder").length == 0) {
$("body").prepend("<div id='audios-holder'></div>");
}
$("#audios-holder").append(_tag);
_tag = document.getElementById(url);
} | conditional_block |
events.rs | // Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::Resizable(true));
let (window, events) = glfw.create_window(800, 600, "Hello, I am a window.", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.set_sticky_keys(true);
// Polling of events can be turned on and off by the specific event type
window.set_pos_polling(true);
window.set_all_polling(true);
window.set_size_polling(true);
window.set_close_polling(true);
window.set_refresh_polling(true);
window.set_focus_polling(true);
window.set_iconify_polling(true);
window.set_framebuffer_size_polling(true);
window.set_key_polling(true);
window.set_char_polling(true);
window.set_mouse_button_polling(true);
window.set_cursor_pos_polling(true);
window.set_cursor_enter_polling(true);
window.set_scroll_polling(true);
// Alternatively, all event types may be set to poll at once. Note that
// in this example, this call is redundant as all events have been set
// to poll in the above code.
window.set_all_polling(true);
window.make_current();
while !window.should_close() {
glfw.poll_events();
for event in glfw::flush_messages(&events) {
handle_window_event(&window, event);
}
}
}
fn | (window: &glfw::Window, (time, event): (f64, glfw::WindowEvent)) {
match event {
glfw::PosEvent(x, y) => window.set_title(format!("Time: {}, Window pos: ({}, {})", time, x, y).as_slice()),
glfw::SizeEvent(w, h) => window.set_title(format!("Time: {}, Window size: ({}, {})", time, w, h).as_slice()),
glfw::CloseEvent => println!("Time: {}, Window close requested.", time),
glfw::RefreshEvent => println!("Time: {}, Window refresh callback triggered.", time),
glfw::FocusEvent(true) => println!("Time: {}, Window focus gained.", time),
glfw::FocusEvent(false) => println!("Time: {}, Window focus lost.", time),
glfw::IconifyEvent(true) => println!("Time: {}, Window was minimised", time),
glfw::IconifyEvent(false) => println!("Time: {}, Window was maximised.", time),
glfw::FramebufferSizeEvent(w, h) => println!("Time: {}, Framebuffer size: ({}, {})", time, w, h),
glfw::CharEvent(character) => println!("Time: {}, Character: {}", time, character),
glfw::MouseButtonEvent(btn, action, mods) => println!("Time: {}, Button: {}, Action: {}, Modifiers: [{}]", time, glfw::ShowAliases(btn), action, mods),
glfw::CursorPosEvent(xpos, ypos) => window.set_title(format!("Time: {}, Cursor position: ({}, {})", time, xpos, ypos).as_slice()),
glfw::CursorEnterEvent(true) => println!("Time: {}, Cursor entered window.", time),
glfw::CursorEnterEvent(false) => println!("Time: {}, Cursor left window.", time),
glfw::ScrollEvent(x, y) => window.set_title(format!("Time: {}, Scroll offset: ({}, {})", time, x, y).as_slice()),
glfw::KeyEvent(key, scancode, action, mods) => {
println!("Time: {}, Key: {}, ScanCode: {}, Action: {}, Modifiers: [{}]", time, key, scancode, action, mods);
match (key, action) {
(glfw::KeyEscape, glfw::Press) => window.set_should_close(true),
(glfw::KeyR, glfw::Press) => {
// Resize should cause the window to "refresh"
let (window_width, window_height) = window.get_size();
window.set_size(window_width + 1, window_height);
window.set_size(window_width, window_height);
}
_ => {}
}
}
}
}
| handle_window_event | identifier_name |
events.rs | // Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main() |
fn handle_window_event(window: &glfw::Window, (time, event): (f64, glfw::WindowEvent)) {
match event {
glfw::PosEvent(x, y) => window.set_title(format!("Time: {}, Window pos: ({}, {})", time, x, y).as_slice()),
glfw::SizeEvent(w, h) => window.set_title(format!("Time: {}, Window size: ({}, {})", time, w, h).as_slice()),
glfw::CloseEvent => println!("Time: {}, Window close requested.", time),
glfw::RefreshEvent => println!("Time: {}, Window refresh callback triggered.", time),
glfw::FocusEvent(true) => println!("Time: {}, Window focus gained.", time),
glfw::FocusEvent(false) => println!("Time: {}, Window focus lost.", time),
glfw::IconifyEvent(true) => println!("Time: {}, Window was minimised", time),
glfw::IconifyEvent(false) => println!("Time: {}, Window was maximised.", time),
glfw::FramebufferSizeEvent(w, h) => println!("Time: {}, Framebuffer size: ({}, {})", time, w, h),
glfw::CharEvent(character) => println!("Time: {}, Character: {}", time, character),
glfw::MouseButtonEvent(btn, action, mods) => println!("Time: {}, Button: {}, Action: {}, Modifiers: [{}]", time, glfw::ShowAliases(btn), action, mods),
glfw::CursorPosEvent(xpos, ypos) => window.set_title(format!("Time: {}, Cursor position: ({}, {})", time, xpos, ypos).as_slice()),
glfw::CursorEnterEvent(true) => println!("Time: {}, Cursor entered window.", time),
glfw::CursorEnterEvent(false) => println!("Time: {}, Cursor left window.", time),
glfw::ScrollEvent(x, y) => window.set_title(format!("Time: {}, Scroll offset: ({}, {})", time, x, y).as_slice()),
glfw::KeyEvent(key, scancode, action, mods) => {
println!("Time: {}, Key: {}, ScanCode: {}, Action: {}, Modifiers: [{}]", time, key, scancode, action, mods);
match (key, action) {
(glfw::KeyEscape, glfw::Press) => window.set_should_close(true),
(glfw::KeyR, glfw::Press) => {
// Resize should cause the window to "refresh"
let (window_width, window_height) = window.get_size();
window.set_size(window_width + 1, window_height);
window.set_size(window_width, window_height);
}
_ => {}
}
}
}
}
| {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::Resizable(true));
let (window, events) = glfw.create_window(800, 600, "Hello, I am a window.", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.set_sticky_keys(true);
// Polling of events can be turned on and off by the specific event type
window.set_pos_polling(true);
window.set_all_polling(true);
window.set_size_polling(true);
window.set_close_polling(true);
window.set_refresh_polling(true);
window.set_focus_polling(true);
window.set_iconify_polling(true);
window.set_framebuffer_size_polling(true);
window.set_key_polling(true);
window.set_char_polling(true);
window.set_mouse_button_polling(true);
window.set_cursor_pos_polling(true);
window.set_cursor_enter_polling(true);
window.set_scroll_polling(true);
// Alternatively, all event types may be set to poll at once. Note that
// in this example, this call is redundant as all events have been set
// to poll in the above code.
window.set_all_polling(true);
window.make_current();
while !window.should_close() {
glfw.poll_events();
for event in glfw::flush_messages(&events) {
handle_window_event(&window, event);
}
}
} | identifier_body |
events.rs | // Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
} | glfw.window_hint(glfw::Resizable(true));
let (window, events) = glfw.create_window(800, 600, "Hello, I am a window.", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.set_sticky_keys(true);
// Polling of events can be turned on and off by the specific event type
window.set_pos_polling(true);
window.set_all_polling(true);
window.set_size_polling(true);
window.set_close_polling(true);
window.set_refresh_polling(true);
window.set_focus_polling(true);
window.set_iconify_polling(true);
window.set_framebuffer_size_polling(true);
window.set_key_polling(true);
window.set_char_polling(true);
window.set_mouse_button_polling(true);
window.set_cursor_pos_polling(true);
window.set_cursor_enter_polling(true);
window.set_scroll_polling(true);
// Alternatively, all event types may be set to poll at once. Note that
// in this example, this call is redundant as all events have been set
// to poll in the above code.
window.set_all_polling(true);
window.make_current();
while !window.should_close() {
glfw.poll_events();
for event in glfw::flush_messages(&events) {
handle_window_event(&window, event);
}
}
}
fn handle_window_event(window: &glfw::Window, (time, event): (f64, glfw::WindowEvent)) {
match event {
glfw::PosEvent(x, y) => window.set_title(format!("Time: {}, Window pos: ({}, {})", time, x, y).as_slice()),
glfw::SizeEvent(w, h) => window.set_title(format!("Time: {}, Window size: ({}, {})", time, w, h).as_slice()),
glfw::CloseEvent => println!("Time: {}, Window close requested.", time),
glfw::RefreshEvent => println!("Time: {}, Window refresh callback triggered.", time),
glfw::FocusEvent(true) => println!("Time: {}, Window focus gained.", time),
glfw::FocusEvent(false) => println!("Time: {}, Window focus lost.", time),
glfw::IconifyEvent(true) => println!("Time: {}, Window was minimised", time),
glfw::IconifyEvent(false) => println!("Time: {}, Window was maximised.", time),
glfw::FramebufferSizeEvent(w, h) => println!("Time: {}, Framebuffer size: ({}, {})", time, w, h),
glfw::CharEvent(character) => println!("Time: {}, Character: {}", time, character),
glfw::MouseButtonEvent(btn, action, mods) => println!("Time: {}, Button: {}, Action: {}, Modifiers: [{}]", time, glfw::ShowAliases(btn), action, mods),
glfw::CursorPosEvent(xpos, ypos) => window.set_title(format!("Time: {}, Cursor position: ({}, {})", time, xpos, ypos).as_slice()),
glfw::CursorEnterEvent(true) => println!("Time: {}, Cursor entered window.", time),
glfw::CursorEnterEvent(false) => println!("Time: {}, Cursor left window.", time),
glfw::ScrollEvent(x, y) => window.set_title(format!("Time: {}, Scroll offset: ({}, {})", time, x, y).as_slice()),
glfw::KeyEvent(key, scancode, action, mods) => {
println!("Time: {}, Key: {}, ScanCode: {}, Action: {}, Modifiers: [{}]", time, key, scancode, action, mods);
match (key, action) {
(glfw::KeyEscape, glfw::Press) => window.set_should_close(true),
(glfw::KeyR, glfw::Press) => {
// Resize should cause the window to "refresh"
let (window_width, window_height) = window.get_size();
window.set_size(window_width + 1, window_height);
window.set_size(window_width, window_height);
}
_ => {}
}
}
}
} |
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
| random_line_split |
nb.py |
import numpy as np
import util
from datetime import datetime
from scipy.stats import norm
import better_exceptions
from scipy.stats import multivariate_normal as mvn
class NaiveBayers(object):
def __init__(self):
# Gaussian deviation
self.gaussians = dict()
# Class priors
self.priors = dict()
def fit(self, X, Y, smoothing=10e-3):
N, D = X.shape
# 1,2,3,4,5,6,7,8,9,0 - is labels
labels = set(Y)
for c in labels:
# get the current slice [0:number] where X in our class
current_x = X[Y == c]
# Compute mean and variance. Store in the dictionary by class key
self.gaussians[c] = {
'mean': current_x.mean(axis=0),
'var': np.var(current_x.T) + smoothing,
}
# Simple calculate prior probability. Divide current class by all classes
self.priors[c] = float(len(Y[Y == c])) / len(Y)
def score(self, X, Y):
# Get the predictions
P = self.predict(X)
# Return mean of array
return np.mean(P == Y)
def predict(self, X):
# N - samples, D - features (classes)
|
if __name__ == '__main__':
# Get train data
X, Y = util.get_data(40000)
Ntrain = len(Y) // 2
Xtest, Ytest = util.get_test_data(40000)
Xtrain, Ytrain = X[:Ntrain], Y[:Ntrain]
# Xtest, Ytest = X[Ntrain:], Y[Ntrain:]
model = NaiveBayers()
t0 = datetime.now()
model.fit(Xtrain, Ytrain)
print("Training time: ", (datetime.now() - t0))
t0 = datetime.now()
print("Training accuracy: ", model.score(Xtrain, Ytrain))
print("Time to compute train accuracy: ", (datetime.now() - t0), "Train size: ", len(Ytrain))
t0 = datetime.now()
print("Test accuracy: ", model.score(Xtest, Ytest))
print("Time to compute test accuracy: ", (datetime.now() - t0), "Test size: ", len(Ytest))
| N, D = X.shape
# Hyperparameter (10)
K = len(self.gaussians)
# Fill by Zeros
P = np.zeros((N, K))
# for each class and mean/covariance
for c, g in self.gaussians.items():
mean, var = g['mean'], g['var']
log = np.log(self.priors[c])
# Calculate Log of the probability density function, all at once
P[:, c] = mvn.logpdf(X, mean=mean, cov=var) + log
return np.argmax(P, axis=1) | identifier_body |
nb.py | import numpy as np
import util
from datetime import datetime
from scipy.stats import norm
import better_exceptions
from scipy.stats import multivariate_normal as mvn
class NaiveBayers(object):
def __init__(self):
# Gaussian deviation
self.gaussians = dict()
# Class priors | def fit(self, X, Y, smoothing=10e-3):
N, D = X.shape
# 1,2,3,4,5,6,7,8,9,0 - is labels
labels = set(Y)
for c in labels:
# get the current slice [0:number] where X in our class
current_x = X[Y == c]
# Compute mean and variance. Store in the dictionary by class key
self.gaussians[c] = {
'mean': current_x.mean(axis=0),
'var': np.var(current_x.T) + smoothing,
}
# Simple calculate prior probability. Divide current class by all classes
self.priors[c] = float(len(Y[Y == c])) / len(Y)
def score(self, X, Y):
# Get the predictions
P = self.predict(X)
# Return mean of array
return np.mean(P == Y)
def predict(self, X):
# N - samples, D - features (classes)
N, D = X.shape
# Hyperparameter (10)
K = len(self.gaussians)
# Fill by Zeros
P = np.zeros((N, K))
# for each class and mean/covariance
for c, g in self.gaussians.items():
mean, var = g['mean'], g['var']
log = np.log(self.priors[c])
# Calculate Log of the probability density function, all at once
P[:, c] = mvn.logpdf(X, mean=mean, cov=var) + log
return np.argmax(P, axis=1)
if __name__ == '__main__':
# Get train data
X, Y = util.get_data(40000)
Ntrain = len(Y) // 2
Xtest, Ytest = util.get_test_data(40000)
Xtrain, Ytrain = X[:Ntrain], Y[:Ntrain]
# Xtest, Ytest = X[Ntrain:], Y[Ntrain:]
model = NaiveBayers()
t0 = datetime.now()
model.fit(Xtrain, Ytrain)
print("Training time: ", (datetime.now() - t0))
t0 = datetime.now()
print("Training accuracy: ", model.score(Xtrain, Ytrain))
print("Time to compute train accuracy: ", (datetime.now() - t0), "Train size: ", len(Ytrain))
t0 = datetime.now()
print("Test accuracy: ", model.score(Xtest, Ytest))
print("Time to compute test accuracy: ", (datetime.now() - t0), "Test size: ", len(Ytest)) | self.priors = dict()
| random_line_split |
nb.py |
import numpy as np
import util
from datetime import datetime
from scipy.stats import norm
import better_exceptions
from scipy.stats import multivariate_normal as mvn
class NaiveBayers(object):
def __init__(self):
# Gaussian deviation
self.gaussians = dict()
# Class priors
self.priors = dict()
def fit(self, X, Y, smoothing=10e-3):
N, D = X.shape
# 1,2,3,4,5,6,7,8,9,0 - is labels
labels = set(Y)
for c in labels:
# get the current slice [0:number] where X in our class
current_x = X[Y == c]
# Compute mean and variance. Store in the dictionary by class key
self.gaussians[c] = {
'mean': current_x.mean(axis=0),
'var': np.var(current_x.T) + smoothing,
}
# Simple calculate prior probability. Divide current class by all classes
self.priors[c] = float(len(Y[Y == c])) / len(Y)
def score(self, X, Y):
# Get the predictions
P = self.predict(X)
# Return mean of array
return np.mean(P == Y)
def predict(self, X):
# N - samples, D - features (classes)
N, D = X.shape
# Hyperparameter (10)
K = len(self.gaussians)
# Fill by Zeros
P = np.zeros((N, K))
# for each class and mean/covariance
for c, g in self.gaussians.items():
mean, var = g['mean'], g['var']
log = np.log(self.priors[c])
# Calculate Log of the probability density function, all at once
P[:, c] = mvn.logpdf(X, mean=mean, cov=var) + log
return np.argmax(P, axis=1)
if __name__ == '__main__':
# Get train data
| X, Y = util.get_data(40000)
Ntrain = len(Y) // 2
Xtest, Ytest = util.get_test_data(40000)
Xtrain, Ytrain = X[:Ntrain], Y[:Ntrain]
# Xtest, Ytest = X[Ntrain:], Y[Ntrain:]
model = NaiveBayers()
t0 = datetime.now()
model.fit(Xtrain, Ytrain)
print("Training time: ", (datetime.now() - t0))
t0 = datetime.now()
print("Training accuracy: ", model.score(Xtrain, Ytrain))
print("Time to compute train accuracy: ", (datetime.now() - t0), "Train size: ", len(Ytrain))
t0 = datetime.now()
print("Test accuracy: ", model.score(Xtest, Ytest))
print("Time to compute test accuracy: ", (datetime.now() - t0), "Test size: ", len(Ytest)) | conditional_block |
|
nb.py |
import numpy as np
import util
from datetime import datetime
from scipy.stats import norm
import better_exceptions
from scipy.stats import multivariate_normal as mvn
class NaiveBayers(object):
def | (self):
# Gaussian deviation
self.gaussians = dict()
# Class priors
self.priors = dict()
def fit(self, X, Y, smoothing=10e-3):
N, D = X.shape
# 1,2,3,4,5,6,7,8,9,0 - is labels
labels = set(Y)
for c in labels:
# get the current slice [0:number] where X in our class
current_x = X[Y == c]
# Compute mean and variance. Store in the dictionary by class key
self.gaussians[c] = {
'mean': current_x.mean(axis=0),
'var': np.var(current_x.T) + smoothing,
}
# Simple calculate prior probability. Divide current class by all classes
self.priors[c] = float(len(Y[Y == c])) / len(Y)
def score(self, X, Y):
# Get the predictions
P = self.predict(X)
# Return mean of array
return np.mean(P == Y)
def predict(self, X):
# N - samples, D - features (classes)
N, D = X.shape
# Hyperparameter (10)
K = len(self.gaussians)
# Fill by Zeros
P = np.zeros((N, K))
# for each class and mean/covariance
for c, g in self.gaussians.items():
mean, var = g['mean'], g['var']
log = np.log(self.priors[c])
# Calculate Log of the probability density function, all at once
P[:, c] = mvn.logpdf(X, mean=mean, cov=var) + log
return np.argmax(P, axis=1)
if __name__ == '__main__':
# Get train data
X, Y = util.get_data(40000)
Ntrain = len(Y) // 2
Xtest, Ytest = util.get_test_data(40000)
Xtrain, Ytrain = X[:Ntrain], Y[:Ntrain]
# Xtest, Ytest = X[Ntrain:], Y[Ntrain:]
model = NaiveBayers()
t0 = datetime.now()
model.fit(Xtrain, Ytrain)
print("Training time: ", (datetime.now() - t0))
t0 = datetime.now()
print("Training accuracy: ", model.score(Xtrain, Ytrain))
print("Time to compute train accuracy: ", (datetime.now() - t0), "Train size: ", len(Ytrain))
t0 = datetime.now()
print("Test accuracy: ", model.score(Xtest, Ytest))
print("Time to compute test accuracy: ", (datetime.now() - t0), "Test size: ", len(Ytest))
| __init__ | identifier_name |
amp-pinterest.js | /**
* Copyright 2015 The AMP HTML Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Shows a Pinterest widget.
* Examples:
* <code>
*
* <amp-pinterest height=20 width=40
* data-do="buttonPin"
* data-url="http://www.flickr.com/photos/kentbrew/6851755809/"
* data-media="http://farm8.staticflickr.com/7027/6851755809_df5b2051c9_z.jpg"
* data-description="Next stop: Pinterest">
* </amp-pinterest>
*
* <amp-pinterest width=245 height=330
* data-do="embedPin"
* data-url="https://www.pinterest.com/pin/99360735500167749/">
* </amp-pinterest>
*
* </code>
*/
import {CSS} from '../../../build/amp-pinterest-0.1.css';
import {FollowButton} from './follow-button';
import {PinWidget} from './pin-widget';
import {SaveButton} from './save-button';
import {Services} from '../../../src/services';
import {htmlFor} from '../../../src/static-template';
import {isLayoutSizeDefined} from '../../../src/layout';
import {user, userAssert} from '../../../src/log';
/**
* AMP Pinterest
* data-do
* - buttonPin: Save button
* - buttonFollow: User follow button
*/
class AmpPinterest extends AMP.BaseElement {
/** @param {!AmpElement} element */
constructor(element) {
super(element);
/** @private {string} */
this.type_ = '';
}
/**
* @param {boolean=} onLayout
* @override
*/
preconnectCallback(onLayout) {
// preconnect to widget APIpinMedia
Services.preconnectFor(this.win).url(
this.getAmpDoc(),
'https://widgets.pinterest.com',
onLayout
);
}
/** @override */
isLayoutSupported(layout) {
return isLayoutSizeDefined(layout);
}
/** @override */
buildCallback() {
this.type_ = userAssert(
this.element.getAttribute('data-do'),
'The data-do attribute is required for <amp-pinterest> %s',
this.element
);
}
/** @override */
layoutCallback() {
return this.render().then((node) => {
return this.element.appendChild(node);
});
}
/**
* Renders the component
* @return {*} TODO(#23582): Specify return type
*/
render() {
switch (this.type_) {
case 'embedPin':
return new PinWidget(this.element).render();
case 'buttonPin':
return new SaveButton(this.element).render();
case 'buttonFollow':
return new FollowButton(this.element).render();
}
return Promise.reject(user().createError('Invalid type: %s', this.type_));
}
/** @override */
createLoaderLogoCallback() {
if (this.type_ != 'embedPin') |
const html = htmlFor(this.element);
return {
color: '#E60019',
content: html`
<svg viewBox="0 0 72 72">
<path
fill="currentColor"
d="M36,26c-5.52,0-9.99,4.47-9.99,9.99c0,4.24,2.63,7.85,6.35,9.31c-0.09-0.79-0.16-2.01,0.03-2.87
c0.18-0.78,1.17-4.97,1.17-4.97s-0.3-0.6-0.3-1.48c0-1.39,0.81-2.43,1.81-2.43c0.86,0,1.27,0.64,1.27,1.41
c0,0.86-0.54,2.14-0.83,3.33c-0.24,1,0.5,1.81,1.48,1.81c1.78,0,3.14-1.88,3.14-4.57c0-2.39-1.72-4.06-4.18-4.06
c-2.85,0-4.51,2.13-4.51,4.33c0,0.86,0.33,1.78,0.74,2.28c0.08,0.1,0.09,0.19,0.07,0.29c-0.07,0.31-0.25,1-0.28,1.13
c-0.04,0.18-0.15,0.22-0.34,0.13c-1.25-0.58-2.03-2.4-2.03-3.87c0-3.15,2.29-6.04,6.6-6.04c3.46,0,6.16,2.47,6.16,5.77
c0,3.45-2.17,6.22-5.18,6.22c-1.01,0-1.97-0.53-2.29-1.15c0,0-0.5,1.91-0.62,2.38c-0.22,0.87-0.83,1.96-1.24,2.62
c0.94,0.29,1.92,0.44,2.96,0.44c5.52,0,9.99-4.47,9.99-9.99C45.99,30.47,41.52,26,36,26z"
/>
</svg>
`,
};
}
}
AMP.extension('amp-pinterest', '0.1', (AMP) => {
AMP.registerElement('amp-pinterest', AmpPinterest, CSS);
});
| {
return {};
} | conditional_block |
amp-pinterest.js | /**
* Copyright 2015 The AMP HTML Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Shows a Pinterest widget.
* Examples:
* <code>
*
* <amp-pinterest height=20 width=40
* data-do="buttonPin"
* data-url="http://www.flickr.com/photos/kentbrew/6851755809/"
* data-media="http://farm8.staticflickr.com/7027/6851755809_df5b2051c9_z.jpg"
* data-description="Next stop: Pinterest">
* </amp-pinterest>
*
* <amp-pinterest width=245 height=330
* data-do="embedPin"
* data-url="https://www.pinterest.com/pin/99360735500167749/">
* </amp-pinterest>
*
* </code>
*/
import {CSS} from '../../../build/amp-pinterest-0.1.css';
import {FollowButton} from './follow-button';
import {PinWidget} from './pin-widget';
import {SaveButton} from './save-button';
import {Services} from '../../../src/services';
import {htmlFor} from '../../../src/static-template';
import {isLayoutSizeDefined} from '../../../src/layout';
import {user, userAssert} from '../../../src/log';
/**
* AMP Pinterest
* data-do
* - buttonPin: Save button
* - buttonFollow: User follow button
*/
class AmpPinterest extends AMP.BaseElement {
/** @param {!AmpElement} element */
| (element) {
super(element);
/** @private {string} */
this.type_ = '';
}
/**
* @param {boolean=} onLayout
* @override
*/
preconnectCallback(onLayout) {
// preconnect to widget APIpinMedia
Services.preconnectFor(this.win).url(
this.getAmpDoc(),
'https://widgets.pinterest.com',
onLayout
);
}
/** @override */
isLayoutSupported(layout) {
return isLayoutSizeDefined(layout);
}
/** @override */
buildCallback() {
this.type_ = userAssert(
this.element.getAttribute('data-do'),
'The data-do attribute is required for <amp-pinterest> %s',
this.element
);
}
/** @override */
layoutCallback() {
return this.render().then((node) => {
return this.element.appendChild(node);
});
}
/**
* Renders the component
* @return {*} TODO(#23582): Specify return type
*/
render() {
switch (this.type_) {
case 'embedPin':
return new PinWidget(this.element).render();
case 'buttonPin':
return new SaveButton(this.element).render();
case 'buttonFollow':
return new FollowButton(this.element).render();
}
return Promise.reject(user().createError('Invalid type: %s', this.type_));
}
/** @override */
createLoaderLogoCallback() {
if (this.type_ != 'embedPin') {
return {};
}
const html = htmlFor(this.element);
return {
color: '#E60019',
content: html`
<svg viewBox="0 0 72 72">
<path
fill="currentColor"
d="M36,26c-5.52,0-9.99,4.47-9.99,9.99c0,4.24,2.63,7.85,6.35,9.31c-0.09-0.79-0.16-2.01,0.03-2.87
c0.18-0.78,1.17-4.97,1.17-4.97s-0.3-0.6-0.3-1.48c0-1.39,0.81-2.43,1.81-2.43c0.86,0,1.27,0.64,1.27,1.41
c0,0.86-0.54,2.14-0.83,3.33c-0.24,1,0.5,1.81,1.48,1.81c1.78,0,3.14-1.88,3.14-4.57c0-2.39-1.72-4.06-4.18-4.06
c-2.85,0-4.51,2.13-4.51,4.33c0,0.86,0.33,1.78,0.74,2.28c0.08,0.1,0.09,0.19,0.07,0.29c-0.07,0.31-0.25,1-0.28,1.13
c-0.04,0.18-0.15,0.22-0.34,0.13c-1.25-0.58-2.03-2.4-2.03-3.87c0-3.15,2.29-6.04,6.6-6.04c3.46,0,6.16,2.47,6.16,5.77
c0,3.45-2.17,6.22-5.18,6.22c-1.01,0-1.97-0.53-2.29-1.15c0,0-0.5,1.91-0.62,2.38c-0.22,0.87-0.83,1.96-1.24,2.62
c0.94,0.29,1.92,0.44,2.96,0.44c5.52,0,9.99-4.47,9.99-9.99C45.99,30.47,41.52,26,36,26z"
/>
</svg>
`,
};
}
}
AMP.extension('amp-pinterest', '0.1', (AMP) => {
AMP.registerElement('amp-pinterest', AmpPinterest, CSS);
});
| constructor | identifier_name |
amp-pinterest.js | /**
* Copyright 2015 The AMP HTML Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Shows a Pinterest widget.
* Examples:
* <code>
*
* <amp-pinterest height=20 width=40
* data-do="buttonPin"
* data-url="http://www.flickr.com/photos/kentbrew/6851755809/"
* data-media="http://farm8.staticflickr.com/7027/6851755809_df5b2051c9_z.jpg"
* data-description="Next stop: Pinterest">
* </amp-pinterest>
*
* <amp-pinterest width=245 height=330
* data-do="embedPin"
* data-url="https://www.pinterest.com/pin/99360735500167749/">
* </amp-pinterest>
*
* </code>
*/
import {CSS} from '../../../build/amp-pinterest-0.1.css';
import {FollowButton} from './follow-button';
import {PinWidget} from './pin-widget';
import {SaveButton} from './save-button';
import {Services} from '../../../src/services';
import {htmlFor} from '../../../src/static-template';
import {isLayoutSizeDefined} from '../../../src/layout';
import {user, userAssert} from '../../../src/log';
/**
* AMP Pinterest
* data-do
* - buttonPin: Save button
* - buttonFollow: User follow button
*/
class AmpPinterest extends AMP.BaseElement {
/** @param {!AmpElement} element */
constructor(element) {
super(element);
/** @private {string} */
this.type_ = '';
}
/**
* @param {boolean=} onLayout
* @override
*/
preconnectCallback(onLayout) {
// preconnect to widget APIpinMedia
Services.preconnectFor(this.win).url(
this.getAmpDoc(),
'https://widgets.pinterest.com',
onLayout
);
}
/** @override */
isLayoutSupported(layout) {
return isLayoutSizeDefined(layout);
}
/** @override */
buildCallback() |
/** @override */
layoutCallback() {
return this.render().then((node) => {
return this.element.appendChild(node);
});
}
/**
* Renders the component
* @return {*} TODO(#23582): Specify return type
*/
render() {
switch (this.type_) {
case 'embedPin':
return new PinWidget(this.element).render();
case 'buttonPin':
return new SaveButton(this.element).render();
case 'buttonFollow':
return new FollowButton(this.element).render();
}
return Promise.reject(user().createError('Invalid type: %s', this.type_));
}
/** @override */
createLoaderLogoCallback() {
if (this.type_ != 'embedPin') {
return {};
}
const html = htmlFor(this.element);
return {
color: '#E60019',
content: html`
<svg viewBox="0 0 72 72">
<path
fill="currentColor"
d="M36,26c-5.52,0-9.99,4.47-9.99,9.99c0,4.24,2.63,7.85,6.35,9.31c-0.09-0.79-0.16-2.01,0.03-2.87
c0.18-0.78,1.17-4.97,1.17-4.97s-0.3-0.6-0.3-1.48c0-1.39,0.81-2.43,1.81-2.43c0.86,0,1.27,0.64,1.27,1.41
c0,0.86-0.54,2.14-0.83,3.33c-0.24,1,0.5,1.81,1.48,1.81c1.78,0,3.14-1.88,3.14-4.57c0-2.39-1.72-4.06-4.18-4.06
c-2.85,0-4.51,2.13-4.51,4.33c0,0.86,0.33,1.78,0.74,2.28c0.08,0.1,0.09,0.19,0.07,0.29c-0.07,0.31-0.25,1-0.28,1.13
c-0.04,0.18-0.15,0.22-0.34,0.13c-1.25-0.58-2.03-2.4-2.03-3.87c0-3.15,2.29-6.04,6.6-6.04c3.46,0,6.16,2.47,6.16,5.77
c0,3.45-2.17,6.22-5.18,6.22c-1.01,0-1.97-0.53-2.29-1.15c0,0-0.5,1.91-0.62,2.38c-0.22,0.87-0.83,1.96-1.24,2.62
c0.94,0.29,1.92,0.44,2.96,0.44c5.52,0,9.99-4.47,9.99-9.99C45.99,30.47,41.52,26,36,26z"
/>
</svg>
`,
};
}
}
AMP.extension('amp-pinterest', '0.1', (AMP) => {
AMP.registerElement('amp-pinterest', AmpPinterest, CSS);
});
| {
this.type_ = userAssert(
this.element.getAttribute('data-do'),
'The data-do attribute is required for <amp-pinterest> %s',
this.element
);
} | identifier_body |
amp-pinterest.js | /**
* Copyright 2015 The AMP HTML Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Shows a Pinterest widget.
* Examples:
* <code>
*
* <amp-pinterest height=20 width=40
* data-do="buttonPin"
* data-url="http://www.flickr.com/photos/kentbrew/6851755809/"
* data-media="http://farm8.staticflickr.com/7027/6851755809_df5b2051c9_z.jpg"
* data-description="Next stop: Pinterest">
* </amp-pinterest>
*
* <amp-pinterest width=245 height=330
* data-do="embedPin"
* data-url="https://www.pinterest.com/pin/99360735500167749/">
* </amp-pinterest>
*
* </code>
*/
import {CSS} from '../../../build/amp-pinterest-0.1.css';
import {FollowButton} from './follow-button';
import {PinWidget} from './pin-widget';
import {SaveButton} from './save-button';
import {Services} from '../../../src/services';
import {htmlFor} from '../../../src/static-template';
import {isLayoutSizeDefined} from '../../../src/layout';
import {user, userAssert} from '../../../src/log';
/**
* AMP Pinterest
* data-do
* - buttonPin: Save button
* - buttonFollow: User follow button
*/
class AmpPinterest extends AMP.BaseElement {
/** @param {!AmpElement} element */
constructor(element) {
super(element);
/** @private {string} */
this.type_ = '';
}
/**
* @param {boolean=} onLayout
* @override
*/
preconnectCallback(onLayout) {
// preconnect to widget APIpinMedia
Services.preconnectFor(this.win).url(
this.getAmpDoc(),
'https://widgets.pinterest.com',
onLayout
);
}
/** @override */
isLayoutSupported(layout) {
return isLayoutSizeDefined(layout);
}
/** @override */
buildCallback() {
this.type_ = userAssert(
this.element.getAttribute('data-do'),
'The data-do attribute is required for <amp-pinterest> %s',
this.element
);
}
/** @override */
layoutCallback() {
return this.render().then((node) => {
return this.element.appendChild(node);
});
}
/**
* Renders the component
* @return {*} TODO(#23582): Specify return type
*/
render() {
switch (this.type_) {
case 'embedPin':
return new PinWidget(this.element).render();
case 'buttonPin':
return new SaveButton(this.element).render();
case 'buttonFollow':
return new FollowButton(this.element).render();
}
return Promise.reject(user().createError('Invalid type: %s', this.type_));
}
/** @override */
createLoaderLogoCallback() {
if (this.type_ != 'embedPin') {
return {};
}
const html = htmlFor(this.element);
return {
color: '#E60019',
content: html`
<svg viewBox="0 0 72 72">
<path
fill="currentColor"
d="M36,26c-5.52,0-9.99,4.47-9.99,9.99c0,4.24,2.63,7.85,6.35,9.31c-0.09-0.79-0.16-2.01,0.03-2.87
c0.18-0.78,1.17-4.97,1.17-4.97s-0.3-0.6-0.3-1.48c0-1.39,0.81-2.43,1.81-2.43c0.86,0,1.27,0.64,1.27,1.41
c0,0.86-0.54,2.14-0.83,3.33c-0.24,1,0.5,1.81,1.48,1.81c1.78,0,3.14-1.88,3.14-4.57c0-2.39-1.72-4.06-4.18-4.06
c-2.85,0-4.51,2.13-4.51,4.33c0,0.86,0.33,1.78,0.74,2.28c0.08,0.1,0.09,0.19,0.07,0.29c-0.07,0.31-0.25,1-0.28,1.13
c-0.04,0.18-0.15,0.22-0.34,0.13c-1.25-0.58-2.03-2.4-2.03-3.87c0-3.15,2.29-6.04,6.6-6.04c3.46,0,6.16,2.47,6.16,5.77
c0,3.45-2.17,6.22-5.18,6.22c-1.01,0-1.97-0.53-2.29-1.15c0,0-0.5,1.91-0.62,2.38c-0.22,0.87-0.83,1.96-1.24,2.62
c0.94,0.29,1.92,0.44,2.96,0.44c5.52,0,9.99-4.47,9.99-9.99C45.99,30.47,41.52,26,36,26z"
/> | </svg>
`,
};
}
}
AMP.extension('amp-pinterest', '0.1', (AMP) => {
AMP.registerElement('amp-pinterest', AmpPinterest, CSS);
}); | random_line_split |
|
levels.py | #-------------------------------------------------------------------------------
# Name: levels
# Purpose:
#
# Author: novirael
#
# Created: 17-04-2012
# Copyright: (c) novirael 2012
# Licence: <your licence>
#-------------------------------------------------------------------------------
#!/usr/bin/env python
# Import
from sprites import Kafel
# Blocks
kafelek = [ "img/blue.png", "img/green.png", "img/red.png", "img/yellow.png",
"img/grey.png", "img/purple.png" ]
# Colors
black = (0,0,0)
white = (255,255,255)
blue = (0,100,200)
green = (0,200,0)
red = (255,0,0)
yellow = (235,235,0)
purple = (113,0,185)
# Variables
SW, SH = 900, 600
k_width, k_height = 45, 20
def | (n):
if n == 1:
# The top of the block (y position)
top = 80
for i in range(15):
block = Kafel(blue, kafelek[0], i*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
return allsprites, blocks
# --- Create blocks
"""
# Five rows of blocks
for row in range(2):
for column in range(0,20):
block = Kafel(blue, kafelek[0], column*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
# Move the top of the next row down
top += k_height + 2
"""
| draw_level | identifier_name |
levels.py | #-------------------------------------------------------------------------------
# Name: levels
# Purpose:
#
# Author: novirael
#
# Created: 17-04-2012
# Copyright: (c) novirael 2012
# Licence: <your licence>
#-------------------------------------------------------------------------------
#!/usr/bin/env python
# Import
from sprites import Kafel
# Blocks
kafelek = [ "img/blue.png", "img/green.png", "img/red.png", "img/yellow.png",
"img/grey.png", "img/purple.png" ]
# Colors
black = (0,0,0)
white = (255,255,255)
blue = (0,100,200)
green = (0,200,0)
red = (255,0,0)
yellow = (235,235,0)
purple = (113,0,185)
# Variables
SW, SH = 900, 600
k_width, k_height = 45, 20
def draw_level(n):
| if n == 1:
# The top of the block (y position)
top = 80
for i in range(15):
block = Kafel(blue, kafelek[0], i*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
return allsprites, blocks
# --- Create blocks
"""
# Five rows of blocks
for row in range(2):
for column in range(0,20):
block = Kafel(blue, kafelek[0], column*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
# Move the top of the next row down
top += k_height + 2
""" | identifier_body |
|
levels.py | #-------------------------------------------------------------------------------
| # Author: novirael
#
# Created: 17-04-2012
# Copyright: (c) novirael 2012
# Licence: <your licence>
#-------------------------------------------------------------------------------
#!/usr/bin/env python
# Import
from sprites import Kafel
# Blocks
kafelek = [ "img/blue.png", "img/green.png", "img/red.png", "img/yellow.png",
"img/grey.png", "img/purple.png" ]
# Colors
black = (0,0,0)
white = (255,255,255)
blue = (0,100,200)
green = (0,200,0)
red = (255,0,0)
yellow = (235,235,0)
purple = (113,0,185)
# Variables
SW, SH = 900, 600
k_width, k_height = 45, 20
def draw_level(n):
if n == 1:
# The top of the block (y position)
top = 80
for i in range(15):
block = Kafel(blue, kafelek[0], i*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
return allsprites, blocks
# --- Create blocks
"""
# Five rows of blocks
for row in range(2):
for column in range(0,20):
block = Kafel(blue, kafelek[0], column*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
# Move the top of the next row down
top += k_height + 2
""" | # Name: levels
# Purpose:
#
| random_line_split |
levels.py | #-------------------------------------------------------------------------------
# Name: levels
# Purpose:
#
# Author: novirael
#
# Created: 17-04-2012
# Copyright: (c) novirael 2012
# Licence: <your licence>
#-------------------------------------------------------------------------------
#!/usr/bin/env python
# Import
from sprites import Kafel
# Blocks
kafelek = [ "img/blue.png", "img/green.png", "img/red.png", "img/yellow.png",
"img/grey.png", "img/purple.png" ]
# Colors
black = (0,0,0)
white = (255,255,255)
blue = (0,100,200)
green = (0,200,0)
red = (255,0,0)
yellow = (235,235,0)
purple = (113,0,185)
# Variables
SW, SH = 900, 600
k_width, k_height = 45, 20
def draw_level(n):
if n == 1:
# The top of the block (y position)
top = 80
for i in range(15):
|
return allsprites, blocks
# --- Create blocks
"""
# Five rows of blocks
for row in range(2):
for column in range(0,20):
block = Kafel(blue, kafelek[0], column*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
# Move the top of the next row down
top += k_height + 2
"""
| block = Kafel(blue, kafelek[0], i*(k_width+2), top)
blocks.add(block)
allsprites.add(block) | conditional_block |
app.module.ts | /**
* Copyright (c) 2017 Francois-Xavier Soubirou.
*
* This file is part of tam4.
*
* tam4 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* tam4 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with tam4. If not, see <http://www.gnu.org/licenses/>.
*/
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { NavbarComponent } from './navbar/navbar.component';
import { CoreModule } from './core/core.module';
import { MockModule } from './core/mock.module';
import { HomeModule } from './home/home.module';
import { PersonalModule } from './personal/personal.module';
import { SharedModule } from './shared/shared.module';
import { environment } from 'app/../environments/environment';
import { LoginComponent } from './login/login.component';
const importedModules: Array<any> = [
BrowserModule,
FormsModule,
HttpModule,
AppRoutingModule,
CoreModule,
HomeModule,
PersonalModule,
SharedModule,
];
if (environment.mock) |
@NgModule({
imports: importedModules,
declarations: [
AppComponent,
NavbarComponent,
LoginComponent,
],
exports: [
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
| {
console.log('Enabling mocked services.');
importedModules.push(MockModule);
} | conditional_block |
app.module.ts | /**
* Copyright (c) 2017 Francois-Xavier Soubirou.
*
* This file is part of tam4.
*
* tam4 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* tam4 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with tam4. If not, see <http://www.gnu.org/licenses/>.
*/
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { NavbarComponent } from './navbar/navbar.component';
import { CoreModule } from './core/core.module';
import { MockModule } from './core/mock.module';
import { HomeModule } from './home/home.module';
import { PersonalModule } from './personal/personal.module';
import { SharedModule } from './shared/shared.module';
import { environment } from 'app/../environments/environment';
import { LoginComponent } from './login/login.component';
const importedModules: Array<any> = [
BrowserModule,
FormsModule,
HttpModule,
AppRoutingModule,
CoreModule,
HomeModule,
PersonalModule,
SharedModule,
];
if (environment.mock) {
console.log('Enabling mocked services.');
importedModules.push(MockModule);
}
@NgModule({
imports: importedModules,
declarations: [
AppComponent,
NavbarComponent,
LoginComponent,
],
exports: [
],
providers: [],
bootstrap: [AppComponent]
})
export class | { }
| AppModule | identifier_name |
app.module.ts | /**
* Copyright (c) 2017 Francois-Xavier Soubirou.
*
* This file is part of tam4.
*
* tam4 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* tam4 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with tam4. If not, see <http://www.gnu.org/licenses/>.
*/
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { NavbarComponent } from './navbar/navbar.component';
import { CoreModule } from './core/core.module'; | import { PersonalModule } from './personal/personal.module';
import { SharedModule } from './shared/shared.module';
import { environment } from 'app/../environments/environment';
import { LoginComponent } from './login/login.component';
const importedModules: Array<any> = [
BrowserModule,
FormsModule,
HttpModule,
AppRoutingModule,
CoreModule,
HomeModule,
PersonalModule,
SharedModule,
];
if (environment.mock) {
console.log('Enabling mocked services.');
importedModules.push(MockModule);
}
@NgModule({
imports: importedModules,
declarations: [
AppComponent,
NavbarComponent,
LoginComponent,
],
exports: [
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { } | import { MockModule } from './core/mock.module';
import { HomeModule } from './home/home.module'; | random_line_split |
sales_order.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import json
import frappe.utils
from frappe.utils import cstr, flt, getdate, comma_and, cint
from frappe import _
from frappe.model.utils import get_fetch_values
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.stock_balance import update_bin_qty, get_reserved_qty
from frappe.desk.notifications import clear_doctype_notifications
from frappe.contacts.doctype.address.address import get_company_address
from erpnext.controllers.selling_controller import SellingController
from erpnext.subscription.doctype.subscription.subscription import month_map, get_next_date
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class WarehouseRequired(frappe.ValidationError): pass
class SalesOrder(SellingController):
def __init__(self, arg1, arg2=None):
super(SalesOrder, self).__init__(arg1, arg2)
def validate(self):
super(SalesOrder, self).validate()
self.validate_order_type()
self.validate_delivery_date()
self.validate_proj_cust()
self.validate_po()
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.validate_uom_is_integer("uom", "qty")
self.validate_for_items()
self.validate_warehouse()
self.validate_drop_ship()
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
self.validate_with_previous_doc()
self.set_status()
if not self.billing_status: self.billing_status = 'Not Billed'
if not self.delivery_status: self.delivery_status = 'Not Delivered'
def validate_po(self):
# validate p.o date v/s delivery date
if self.po_date:
for d in self.get("items"):
if d.delivery_date and getdate(self.po_date) > getdate(d.delivery_date):
frappe.throw(_("Row #{0}: Expected Delivery Date cannot be before Purchase Order Date")
.format(d.idx))
if self.po_no and self.customer:
so = frappe.db.sql("select name from `tabSales Order` \
where ifnull(po_no, '') = %s and name != %s and docstatus < 2\
and customer = %s", (self.po_no, self.name, self.customer))
if so and so[0][0] and not cint(frappe.db.get_single_value("Selling Settings",
"allow_against_multiple_purchase_orders")):
frappe.msgprint(_("Warning: Sales Order {0} already exists against Customer's Purchase Order {1}").format(so[0][0], self.po_no))
def validate_for_items(self):
check_list = []
for d in self.get('items'):
check_list.append(cstr(d.item_code))
# used for production plan
d.transaction_date = self.transaction_date
tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \
where item_code = %s and warehouse = %s", (d.item_code, d.warehouse))
d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0
# check for same entry multiple times
unique_chk_list = set(check_list)
if len(unique_chk_list) != len(check_list) and \
not cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")):
frappe.msgprint(_("Same item has been entered multiple times"),
title=_("Warning"), indicator='orange')
def product_bundle_has_stock_item(self, product_bundle):
"""Returns true if product bundle has stock item"""
ret = len(frappe.db.sql("""select i.name from tabItem i, `tabProduct Bundle Item` pbi
where pbi.parent = %s and pbi.item_code = i.name and i.is_stock_item = 1""", product_bundle))
return ret
def validate_sales_mntc_quotation(self):
for d in self.get('items'):
if d.prevdoc_docname:
res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s",
(d.prevdoc_docname, self.order_type))
if not res:
frappe.msgprint(_("Quotation {0} not of type {1}")
.format(d.prevdoc_docname, self.order_type))
def validate_order_type(self):
super(SalesOrder, self).validate_order_type()
def validate_delivery_date(self):
if self.order_type == 'Sales':
if not self.delivery_date:
self.delivery_date = max([d.delivery_date for d in self.get("items")])
if self.delivery_date:
for d in self.get("items"):
if not d.delivery_date:
d.delivery_date = self.delivery_date
if getdate(self.transaction_date) > getdate(d.delivery_date):
frappe.msgprint(_("Expected Delivery Date should be after Sales Order Date"),
indicator='orange', title=_('Warning'))
else:
frappe.throw(_("Please enter Delivery Date"))
self.validate_sales_mntc_quotation()
def validate_proj_cust(self):
if self.project and self.customer_name:
res = frappe.db.sql("""select name from `tabProject` where name = %s
and (customer = %s or ifnull(customer,'')='')""",
(self.project, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project))
def validate_warehouse(self):
super(SalesOrder, self).validate_warehouse()
for d in self.get("items"):
if (frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 or
(self.has_product_bundle(d.item_code) and self.product_bundle_has_stock_item(d.item_code))) \
and not d.warehouse and not cint(d.delivered_by_supplier):
frappe.throw(_("Delivery warehouse required for stock item {0}").format(d.item_code),
WarehouseRequired)
def validate_with_previous_doc(self):
super(SalesOrder, self).validate_with_previous_doc({
"Quotation": {
"ref_dn_field": "prevdoc_docname",
"compare_fields": [["company", "="], ["currency", "="]]
}
})
def update_enquiry_status(self, prevdoc, flag):
enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc)
if enq:
|
def update_prevdoc_status(self, flag):
for quotation in list(set([d.prevdoc_docname for d in self.get("items")])):
if quotation:
doc = frappe.get_doc("Quotation", quotation)
if doc.docstatus==2:
frappe.throw(_("Quotation {0} is cancelled").format(quotation))
doc.set_status(update=True)
doc.update_opportunity()
def validate_drop_ship(self):
for d in self.get('items'):
if d.delivered_by_supplier and not d.supplier:
frappe.throw(_("Row #{0}: Set Supplier for item {1}").format(d.idx, d.item_code))
def on_submit(self):
self.check_credit_limit()
self.update_reserved_qty()
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.company, self.base_grand_total, self)
self.update_project()
self.update_prevdoc_status('submit')
def on_cancel(self):
# Cannot cancel closed SO
if self.status == 'Closed':
frappe.throw(_("Closed order cannot be cancelled. Unclose to cancel."))
self.check_nextdoc_docstatus()
self.update_reserved_qty()
self.update_project()
self.update_prevdoc_status('cancel')
frappe.db.set(self, 'status', 'Cancelled')
def update_project(self):
project_list = []
if self.project:
project = frappe.get_doc("Project", self.project)
project.flags.dont_sync_tasks = True
project.update_sales_costing()
project.save()
project_list.append(self.project)
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
check_credit_limit(self.customer, self.company)
def check_nextdoc_docstatus(self):
# Checks Delivery Note
submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2
where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name)
if submit_dn:
frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn)))
# Checks Sales Invoice
submit_rv = frappe.db.sql_list("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""",
self.name)
if submit_rv:
frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv)))
#check maintenance schedule
submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1,
`tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order = %s and t1.docstatus = 1""", self.name)
if submit_ms:
frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms)))
# check maintenance visit
submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name)
if submit_mv:
frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv)))
# check production order
pro_order = frappe.db.sql_list("""select name from `tabProduction Order`
where sales_order = %s and docstatus = 1""", self.name)
if pro_order:
frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order)))
def check_modified_date(self):
mod_db = frappe.db.get_value("Sales Order", self.name, "modified")
date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" %
( mod_db, cstr(self.modified)))
if date_diff and date_diff[0][0]:
frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name))
def update_status(self, status):
self.check_modified_date()
self.set_status(update=True, status=status)
self.update_reserved_qty()
self.notify_update()
clear_doctype_notifications(self)
def update_reserved_qty(self, so_item_rows=None):
"""update requested qty (before ordered_qty is updated)"""
item_wh_list = []
def _valid_for_reserve(item_code, warehouse):
if item_code and warehouse and [item_code, warehouse] not in item_wh_list \
and frappe.db.get_value("Item", item_code, "is_stock_item"):
item_wh_list.append([item_code, warehouse])
for d in self.get("items"):
if (not so_item_rows or d.name in so_item_rows) and not d.delivered_by_supplier:
if self.has_product_bundle(d.item_code):
for p in self.get("packed_items"):
if p.parent_detail_docname == d.name and p.parent_item == d.item_code:
_valid_for_reserve(p.item_code, p.warehouse)
else:
_valid_for_reserve(d.item_code, d.warehouse)
for item_code, warehouse in item_wh_list:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
def on_update(self):
pass
def before_update_after_submit(self):
self.validate_po()
self.validate_drop_ship()
self.validate_supplier_after_submit()
def validate_supplier_after_submit(self):
"""Check that supplier is the same after submit if PO is already made"""
exc_list = []
for item in self.items:
if item.supplier:
supplier = frappe.db.get_value("Sales Order Item", {"parent": self.name, "item_code": item.item_code},
"supplier")
if item.ordered_qty > 0.0 and item.supplier != supplier:
exc_list.append(_("Row #{0}: Not allowed to change Supplier as Purchase Order already exists").format(item.idx))
if exc_list:
frappe.throw('\n'.join(exc_list))
def update_delivery_status(self):
"""Update delivery status from Purchase Order for drop shipping"""
tot_qty, delivered_qty = 0.0, 0.0
for item in self.items:
if item.delivered_by_supplier:
item_delivered_qty = frappe.db.sql("""select sum(qty)
from `tabPurchase Order Item` poi, `tabPurchase Order` po
where poi.sales_order_item = %s
and poi.item_code = %s
and poi.parent = po.name
and po.docstatus = 1
and po.status = 'Delivered'""", (item.name, item.item_code))
item_delivered_qty = item_delivered_qty[0][0] if item_delivered_qty else 0
item.db_set("delivered_qty", flt(item_delivered_qty), update_modified=False)
delivered_qty += item.delivered_qty
tot_qty += item.qty
self.db_set("per_delivered", flt(delivered_qty/tot_qty) * 100,
update_modified=False)
def set_indicator(self):
"""Set indicator for portal"""
if self.per_billed < 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Not Paid and Not Delivered")
elif self.per_billed == 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Paid and Not Delivered")
else:
self.indicator_color = "green"
self.indicator_title = _("Paid")
def get_production_order_items(self):
'''Returns items with BOM that already do not have a linked production order'''
items = []
for table in [self.items, self.packed_items]:
for i in table:
bom = get_default_bom_item(i.item_code)
if bom:
stock_qty = i.qty if i.doctype == 'Packed Item' else i.stock_qty
items.append(dict(
item_code= i.item_code,
bom = bom,
warehouse = i.warehouse,
pending_qty= stock_qty - flt(frappe.db.sql('''select sum(qty) from `tabProduction Order`
where production_item=%s and sales_order=%s''', (i.item_code, self.name))[0][0])
))
return items
def on_recurring(self, reference_doc, subscription_doc):
mcount = month_map[subscription_doc.frequency]
self.set("delivery_date", get_next_date(reference_doc.delivery_date, mcount,
cint(subscription_doc.repeat_on_day)))
for d in self.get("items"):
reference_delivery_date = frappe.db.get_value("Sales Order Item",
{"parent": reference_doc.name, "item_code": d.item_code, "idx": d.idx}, "delivery_date")
d.set("delivery_date",
get_next_date(reference_delivery_date, mcount, cint(subscription_doc.repeat_on_day)))
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context.update({
'show_sidebar': True,
'show_search': True,
'no_breadcrumbs': True,
'title': _('Orders'),
})
return list_context
@frappe.whitelist()
def close_or_unclose_sales_orders(names, status):
if not frappe.has_permission("Sales Order", "write"):
frappe.throw(_("Not permitted"), frappe.PermissionError)
names = json.loads(names)
for name in names:
so = frappe.get_doc("Sales Order", name)
if so.docstatus == 1:
if status == "Closed":
if so.status not in ("Cancelled", "Closed") and (so.per_delivered < 100 or so.per_billed < 100):
so.update_status(status)
else:
if so.status == "Closed":
so.update_status('Draft')
frappe.local.message_log = []
@frappe.whitelist()
def make_material_request(source_name, target_doc=None):
def postprocess(source, doc):
doc.material_request_type = "Purchase"
def update_item(source, target, source_parent):
target.project = source_parent.project
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Material Request",
"validation": {
"docstatus": ["=", 1]
}
},
"Packed Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom"
},
"postprocess": update_item
},
"Sales Order Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom",
"stock_qty": "qty"
},
"condition": lambda doc: not frappe.db.exists('Product Bundle', doc.item_code),
"postprocess": update_item
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_project(source_name, target_doc=None):
def postprocess(source, doc):
doc.project_type = "External"
doc.project_name = source.name
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Project",
"validation": {
"docstatus": ["=", 1]
},
"field_map":{
"name" : "sales_order",
"base_grand_total" : "estimated_costing",
}
},
"Sales Order Item": {
"doctype": "Project Task",
"field_map": {
"description": "title",
},
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_delivery_note(source_name, target_doc=None):
def set_missing_values(source, target):
if source.po_no:
if target.po_no:
target_po_no = target.po_no.split(", ")
target_po_no.append(source.po_no)
target.po_no = ", ".join(list(set(target_po_no))) if len(target_po_no) > 1 else target_po_no[0]
else:
target.po_no = source.po_no
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Delivery Note", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.base_amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.base_rate)
target.amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.rate)
target.qty = flt(source.qty) - flt(source.delivered_qty)
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
target_doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Delivery Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Delivery Note Item",
"field_map": {
"rate": "rate",
"name": "so_detail",
"parent": "against_sales_order",
},
"postprocess": update_item,
"condition": lambda doc: abs(doc.delivered_qty) < abs(doc.qty) and doc.delivered_by_supplier!=1
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, set_missing_values)
return target_doc
@frappe.whitelist()
def make_sales_invoice(source_name, target_doc=None, ignore_permissions=False):
def postprocess(source, target):
set_missing_values(source, target)
#Get the advance paid Journal Entries in Sales Invoice Advance
target.set_advances()
def set_missing_values(source, target):
target.is_pos = 0
target.ignore_pricing_rule = 1
target.flags.ignore_permissions = True
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Sales Invoice", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.amount = flt(source.amount) - flt(source.billed_amt)
target.base_amount = target.amount * flt(source_parent.conversion_rate)
target.qty = target.amount / flt(source.rate) if (source.rate and source.billed_amt) else source.qty
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Sales Invoice",
"field_map": {
"party_account_currency": "party_account_currency"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Sales Invoice Item",
"field_map": {
"name": "so_detail",
"parent": "sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.qty and (doc.base_amount==0 or abs(doc.billed_amt) < abs(doc.amount))
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, postprocess, ignore_permissions=ignore_permissions)
return doclist
@frappe.whitelist()
def make_maintenance_schedule(source_name, target_doc=None):
maint_schedule = frappe.db.sql("""select t1.name
from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order=%s and t1.docstatus=1""", source_name)
if not maint_schedule:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Schedule",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Schedule Item",
"field_map": {
"parent": "sales_order"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_maintenance_visit(source_name, target_doc=None):
visit = frappe.db.sql("""select t1.name
from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s
and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name)
if not visit:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Visit",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Visit Purpose",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def get_events(start, end, filters=None):
"""Returns events for Gantt / Calendar view rendering.
:param start: Start date-time.
:param end: End date-time.
:param filters: Filters (JSON).
"""
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions("Sales Order", filters)
data = frappe.db.sql("""
select
`tabSales Order`.name, `tabSales Order`.customer_name, `tabSales Order`.status,
`tabSales Order`.delivery_status, `tabSales Order`.billing_status,
`tabSales Order Item`.delivery_date
from
`tabSales Order`, `tabSales Order Item`
where `tabSales Order`.name = `tabSales Order Item`.parent
and (ifnull(`tabSales Order Item`.delivery_date, '0000-00-00')!= '0000-00-00') \
and (`tabSales Order Item`.delivery_date between %(start)s and %(end)s)
and `tabSales Order`.docstatus < 2
{conditions}
""".format(conditions=conditions), {
"start": start,
"end": end
}, as_dict=True, update={"allDay": 0})
return data
@frappe.whitelist()
def make_purchase_order_for_drop_shipment(source_name, for_supplier, target_doc=None):
def set_missing_values(source, target):
target.supplier = for_supplier
target.apply_discount_on = ""
target.additional_discount_percentage = 0.0
target.discount_amount = 0.0
default_price_list = frappe.get_value("Supplier", for_supplier, "default_price_list")
if default_price_list:
target.buying_price_list = default_price_list
if any( item.delivered_by_supplier==1 for item in source.items):
if source.shipping_address_name:
target.shipping_address = source.shipping_address_name
target.shipping_address_display = source.shipping_address
else:
target.shipping_address = source.customer_address
target.shipping_address_display = source.address_display
target.customer_contact_person = source.contact_person
target.customer_contact_display = source.contact_display
target.customer_contact_mobile = source.contact_mobile
target.customer_contact_email = source.contact_email
else:
target.customer = ""
target.customer_name = ""
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.schedule_date = source.delivery_date
target.qty = flt(source.qty) - flt(source.ordered_qty)
target.stock_qty = (flt(source.qty) - flt(source.ordered_qty)) * flt(source.conversion_factor)
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Purchase Order",
"field_no_map": [
"address_display",
"contact_display",
"contact_mobile",
"contact_email",
"contact_person"
],
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Purchase Order Item",
"field_map": [
["name", "sales_order_item"],
["parent", "sales_order"],
["stock_uom", "stock_uom"],
["uom", "uom"],
["conversion_factor", "conversion_factor"],
["delivery_date", "schedule_date"]
],
"field_no_map": [
"rate",
"price_list_rate"
],
"postprocess": update_item,
"condition": lambda doc: doc.ordered_qty < doc.qty and doc.supplier == for_supplier
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def get_supplier(doctype, txt, searchfield, start, page_len, filters):
supp_master_name = frappe.defaults.get_user_default("supp_master_name")
if supp_master_name == "Supplier Name":
fields = ["name", "supplier_type"]
else:
fields = ["name", "supplier_name", "supplier_type"]
fields = ", ".join(fields)
return frappe.db.sql("""select {field} from `tabSupplier`
where docstatus < 2
and ({key} like %(txt)s
or supplier_name like %(txt)s)
and name in (select supplier from `tabSales Order Item` where parent = %(parent)s)
order by
if(locate(%(_txt)s, name), locate(%(_txt)s, name), 99999),
if(locate(%(_txt)s, supplier_name), locate(%(_txt)s, supplier_name), 99999),
name, supplier_name
limit %(start)s, %(page_len)s """.format(**{
'field': fields,
'key': frappe.db.escape(searchfield)
}), {
'txt': "%%%s%%" % txt,
'_txt': txt.replace("%", ""),
'start': start,
'page_len': page_len,
'parent': filters.get('parent')
})
@frappe.whitelist()
def make_production_orders(items, sales_order, company, project=None):
'''Make Production Orders against the given Sales Order for the given `items`'''
items = json.loads(items).get('items')
out = []
for i in items:
production_order = frappe.get_doc(dict(
doctype='Production Order',
production_item=i['item_code'],
bom_no=i['bom'],
qty=i['pending_qty'],
company=company,
sales_order=sales_order,
project=project,
fg_warehouse=i['warehouse']
)).insert()
production_order.set_production_order_operations()
production_order.save()
out.append(production_order)
return [p.name for p in out]
@frappe.whitelist()
def update_status(status, name):
so = frappe.get_doc("Sales Order", name)
so.update_status(status)
def get_default_bom_item(item_code):
bom = frappe.get_all('BOM', dict(item=item_code, is_active=True),
order_by='is_default desc')
bom = bom[0].name if bom else None
return bom
| frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0])) | conditional_block |
sales_order.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import json
import frappe.utils
from frappe.utils import cstr, flt, getdate, comma_and, cint
from frappe import _
from frappe.model.utils import get_fetch_values
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.stock_balance import update_bin_qty, get_reserved_qty
from frappe.desk.notifications import clear_doctype_notifications
from frappe.contacts.doctype.address.address import get_company_address
from erpnext.controllers.selling_controller import SellingController
from erpnext.subscription.doctype.subscription.subscription import month_map, get_next_date
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class WarehouseRequired(frappe.ValidationError): pass
class SalesOrder(SellingController):
def __init__(self, arg1, arg2=None):
super(SalesOrder, self).__init__(arg1, arg2)
def validate(self):
super(SalesOrder, self).validate()
self.validate_order_type()
self.validate_delivery_date()
self.validate_proj_cust()
self.validate_po()
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.validate_uom_is_integer("uom", "qty")
self.validate_for_items()
self.validate_warehouse()
self.validate_drop_ship()
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
self.validate_with_previous_doc()
self.set_status()
if not self.billing_status: self.billing_status = 'Not Billed'
if not self.delivery_status: self.delivery_status = 'Not Delivered'
def validate_po(self):
# validate p.o date v/s delivery date
if self.po_date:
for d in self.get("items"):
if d.delivery_date and getdate(self.po_date) > getdate(d.delivery_date):
frappe.throw(_("Row #{0}: Expected Delivery Date cannot be before Purchase Order Date")
.format(d.idx))
if self.po_no and self.customer:
so = frappe.db.sql("select name from `tabSales Order` \
where ifnull(po_no, '') = %s and name != %s and docstatus < 2\
and customer = %s", (self.po_no, self.name, self.customer))
if so and so[0][0] and not cint(frappe.db.get_single_value("Selling Settings",
"allow_against_multiple_purchase_orders")):
frappe.msgprint(_("Warning: Sales Order {0} already exists against Customer's Purchase Order {1}").format(so[0][0], self.po_no))
def validate_for_items(self):
check_list = []
for d in self.get('items'):
check_list.append(cstr(d.item_code))
# used for production plan
d.transaction_date = self.transaction_date
tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \
where item_code = %s and warehouse = %s", (d.item_code, d.warehouse))
d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0
# check for same entry multiple times
unique_chk_list = set(check_list)
if len(unique_chk_list) != len(check_list) and \
not cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")):
frappe.msgprint(_("Same item has been entered multiple times"),
title=_("Warning"), indicator='orange')
def product_bundle_has_stock_item(self, product_bundle):
"""Returns true if product bundle has stock item"""
ret = len(frappe.db.sql("""select i.name from tabItem i, `tabProduct Bundle Item` pbi
where pbi.parent = %s and pbi.item_code = i.name and i.is_stock_item = 1""", product_bundle))
return ret
def validate_sales_mntc_quotation(self):
for d in self.get('items'):
if d.prevdoc_docname:
res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s",
(d.prevdoc_docname, self.order_type))
if not res:
frappe.msgprint(_("Quotation {0} not of type {1}")
.format(d.prevdoc_docname, self.order_type))
def validate_order_type(self):
super(SalesOrder, self).validate_order_type()
def validate_delivery_date(self):
if self.order_type == 'Sales':
if not self.delivery_date:
self.delivery_date = max([d.delivery_date for d in self.get("items")])
if self.delivery_date:
for d in self.get("items"):
if not d.delivery_date:
d.delivery_date = self.delivery_date
if getdate(self.transaction_date) > getdate(d.delivery_date):
frappe.msgprint(_("Expected Delivery Date should be after Sales Order Date"),
indicator='orange', title=_('Warning'))
else:
frappe.throw(_("Please enter Delivery Date"))
self.validate_sales_mntc_quotation()
def validate_proj_cust(self):
if self.project and self.customer_name:
res = frappe.db.sql("""select name from `tabProject` where name = %s
and (customer = %s or ifnull(customer,'')='')""",
(self.project, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project))
def validate_warehouse(self):
super(SalesOrder, self).validate_warehouse()
for d in self.get("items"):
if (frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 or
(self.has_product_bundle(d.item_code) and self.product_bundle_has_stock_item(d.item_code))) \
and not d.warehouse and not cint(d.delivered_by_supplier):
frappe.throw(_("Delivery warehouse required for stock item {0}").format(d.item_code),
WarehouseRequired)
def validate_with_previous_doc(self):
super(SalesOrder, self).validate_with_previous_doc({
"Quotation": {
"ref_dn_field": "prevdoc_docname",
"compare_fields": [["company", "="], ["currency", "="]]
}
})
def update_enquiry_status(self, prevdoc, flag):
enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc)
if enq:
frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0]))
def update_prevdoc_status(self, flag):
for quotation in list(set([d.prevdoc_docname for d in self.get("items")])):
if quotation:
doc = frappe.get_doc("Quotation", quotation)
if doc.docstatus==2:
frappe.throw(_("Quotation {0} is cancelled").format(quotation))
doc.set_status(update=True)
doc.update_opportunity()
def validate_drop_ship(self):
for d in self.get('items'):
if d.delivered_by_supplier and not d.supplier:
frappe.throw(_("Row #{0}: Set Supplier for item {1}").format(d.idx, d.item_code))
def on_submit(self):
self.check_credit_limit()
self.update_reserved_qty()
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.company, self.base_grand_total, self)
self.update_project()
self.update_prevdoc_status('submit')
def on_cancel(self):
# Cannot cancel closed SO
if self.status == 'Closed':
frappe.throw(_("Closed order cannot be cancelled. Unclose to cancel."))
self.check_nextdoc_docstatus()
self.update_reserved_qty()
self.update_project()
self.update_prevdoc_status('cancel')
frappe.db.set(self, 'status', 'Cancelled')
def update_project(self):
project_list = []
if self.project:
project = frappe.get_doc("Project", self.project)
project.flags.dont_sync_tasks = True
project.update_sales_costing()
project.save()
project_list.append(self.project)
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
check_credit_limit(self.customer, self.company)
def check_nextdoc_docstatus(self):
# Checks Delivery Note
submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2
where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name)
if submit_dn:
frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn)))
# Checks Sales Invoice
submit_rv = frappe.db.sql_list("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""",
self.name)
if submit_rv:
frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv)))
#check maintenance schedule
submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1,
`tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order = %s and t1.docstatus = 1""", self.name)
if submit_ms:
frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms)))
# check maintenance visit
submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name)
if submit_mv:
frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv)))
# check production order
pro_order = frappe.db.sql_list("""select name from `tabProduction Order`
where sales_order = %s and docstatus = 1""", self.name)
if pro_order:
frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order)))
def check_modified_date(self):
mod_db = frappe.db.get_value("Sales Order", self.name, "modified")
date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" %
( mod_db, cstr(self.modified)))
if date_diff and date_diff[0][0]:
frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name))
def update_status(self, status):
self.check_modified_date()
self.set_status(update=True, status=status)
self.update_reserved_qty()
self.notify_update()
clear_doctype_notifications(self)
def update_reserved_qty(self, so_item_rows=None):
"""update requested qty (before ordered_qty is updated)"""
item_wh_list = []
def _valid_for_reserve(item_code, warehouse):
if item_code and warehouse and [item_code, warehouse] not in item_wh_list \
and frappe.db.get_value("Item", item_code, "is_stock_item"):
item_wh_list.append([item_code, warehouse])
for d in self.get("items"):
if (not so_item_rows or d.name in so_item_rows) and not d.delivered_by_supplier:
if self.has_product_bundle(d.item_code):
for p in self.get("packed_items"):
if p.parent_detail_docname == d.name and p.parent_item == d.item_code:
_valid_for_reserve(p.item_code, p.warehouse)
else:
_valid_for_reserve(d.item_code, d.warehouse)
for item_code, warehouse in item_wh_list:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
def on_update(self):
pass
def before_update_after_submit(self):
self.validate_po()
self.validate_drop_ship()
self.validate_supplier_after_submit()
def validate_supplier_after_submit(self):
"""Check that supplier is the same after submit if PO is already made"""
exc_list = []
for item in self.items:
if item.supplier:
supplier = frappe.db.get_value("Sales Order Item", {"parent": self.name, "item_code": item.item_code},
"supplier")
if item.ordered_qty > 0.0 and item.supplier != supplier:
exc_list.append(_("Row #{0}: Not allowed to change Supplier as Purchase Order already exists").format(item.idx))
if exc_list:
frappe.throw('\n'.join(exc_list))
def update_delivery_status(self):
"""Update delivery status from Purchase Order for drop shipping"""
tot_qty, delivered_qty = 0.0, 0.0
for item in self.items:
if item.delivered_by_supplier:
item_delivered_qty = frappe.db.sql("""select sum(qty)
from `tabPurchase Order Item` poi, `tabPurchase Order` po
where poi.sales_order_item = %s
and poi.item_code = %s
and poi.parent = po.name
and po.docstatus = 1
and po.status = 'Delivered'""", (item.name, item.item_code))
item_delivered_qty = item_delivered_qty[0][0] if item_delivered_qty else 0
item.db_set("delivered_qty", flt(item_delivered_qty), update_modified=False)
delivered_qty += item.delivered_qty
tot_qty += item.qty
self.db_set("per_delivered", flt(delivered_qty/tot_qty) * 100,
update_modified=False)
def set_indicator(self):
"""Set indicator for portal"""
if self.per_billed < 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Not Paid and Not Delivered")
elif self.per_billed == 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Paid and Not Delivered")
else:
self.indicator_color = "green"
self.indicator_title = _("Paid")
def get_production_order_items(self):
'''Returns items with BOM that already do not have a linked production order'''
items = []
for table in [self.items, self.packed_items]:
for i in table:
bom = get_default_bom_item(i.item_code)
if bom:
stock_qty = i.qty if i.doctype == 'Packed Item' else i.stock_qty
items.append(dict(
item_code= i.item_code,
bom = bom,
warehouse = i.warehouse,
pending_qty= stock_qty - flt(frappe.db.sql('''select sum(qty) from `tabProduction Order`
where production_item=%s and sales_order=%s''', (i.item_code, self.name))[0][0])
))
return items
def on_recurring(self, reference_doc, subscription_doc):
mcount = month_map[subscription_doc.frequency]
self.set("delivery_date", get_next_date(reference_doc.delivery_date, mcount,
cint(subscription_doc.repeat_on_day)))
for d in self.get("items"):
reference_delivery_date = frappe.db.get_value("Sales Order Item",
{"parent": reference_doc.name, "item_code": d.item_code, "idx": d.idx}, "delivery_date")
d.set("delivery_date",
get_next_date(reference_delivery_date, mcount, cint(subscription_doc.repeat_on_day)))
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context.update({
'show_sidebar': True,
'show_search': True,
'no_breadcrumbs': True,
'title': _('Orders'),
})
return list_context
@frappe.whitelist()
def close_or_unclose_sales_orders(names, status):
if not frappe.has_permission("Sales Order", "write"):
frappe.throw(_("Not permitted"), frappe.PermissionError)
names = json.loads(names)
for name in names:
so = frappe.get_doc("Sales Order", name)
if so.docstatus == 1:
if status == "Closed":
if so.status not in ("Cancelled", "Closed") and (so.per_delivered < 100 or so.per_billed < 100):
so.update_status(status)
else:
if so.status == "Closed":
so.update_status('Draft')
frappe.local.message_log = []
@frappe.whitelist()
def make_material_request(source_name, target_doc=None):
def postprocess(source, doc):
doc.material_request_type = "Purchase"
def | (source, target, source_parent):
target.project = source_parent.project
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Material Request",
"validation": {
"docstatus": ["=", 1]
}
},
"Packed Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom"
},
"postprocess": update_item
},
"Sales Order Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom",
"stock_qty": "qty"
},
"condition": lambda doc: not frappe.db.exists('Product Bundle', doc.item_code),
"postprocess": update_item
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_project(source_name, target_doc=None):
def postprocess(source, doc):
doc.project_type = "External"
doc.project_name = source.name
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Project",
"validation": {
"docstatus": ["=", 1]
},
"field_map":{
"name" : "sales_order",
"base_grand_total" : "estimated_costing",
}
},
"Sales Order Item": {
"doctype": "Project Task",
"field_map": {
"description": "title",
},
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_delivery_note(source_name, target_doc=None):
def set_missing_values(source, target):
if source.po_no:
if target.po_no:
target_po_no = target.po_no.split(", ")
target_po_no.append(source.po_no)
target.po_no = ", ".join(list(set(target_po_no))) if len(target_po_no) > 1 else target_po_no[0]
else:
target.po_no = source.po_no
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Delivery Note", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.base_amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.base_rate)
target.amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.rate)
target.qty = flt(source.qty) - flt(source.delivered_qty)
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
target_doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Delivery Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Delivery Note Item",
"field_map": {
"rate": "rate",
"name": "so_detail",
"parent": "against_sales_order",
},
"postprocess": update_item,
"condition": lambda doc: abs(doc.delivered_qty) < abs(doc.qty) and doc.delivered_by_supplier!=1
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, set_missing_values)
return target_doc
@frappe.whitelist()
def make_sales_invoice(source_name, target_doc=None, ignore_permissions=False):
def postprocess(source, target):
set_missing_values(source, target)
#Get the advance paid Journal Entries in Sales Invoice Advance
target.set_advances()
def set_missing_values(source, target):
target.is_pos = 0
target.ignore_pricing_rule = 1
target.flags.ignore_permissions = True
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Sales Invoice", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.amount = flt(source.amount) - flt(source.billed_amt)
target.base_amount = target.amount * flt(source_parent.conversion_rate)
target.qty = target.amount / flt(source.rate) if (source.rate and source.billed_amt) else source.qty
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Sales Invoice",
"field_map": {
"party_account_currency": "party_account_currency"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Sales Invoice Item",
"field_map": {
"name": "so_detail",
"parent": "sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.qty and (doc.base_amount==0 or abs(doc.billed_amt) < abs(doc.amount))
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, postprocess, ignore_permissions=ignore_permissions)
return doclist
@frappe.whitelist()
def make_maintenance_schedule(source_name, target_doc=None):
maint_schedule = frappe.db.sql("""select t1.name
from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order=%s and t1.docstatus=1""", source_name)
if not maint_schedule:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Schedule",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Schedule Item",
"field_map": {
"parent": "sales_order"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_maintenance_visit(source_name, target_doc=None):
visit = frappe.db.sql("""select t1.name
from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s
and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name)
if not visit:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Visit",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Visit Purpose",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def get_events(start, end, filters=None):
"""Returns events for Gantt / Calendar view rendering.
:param start: Start date-time.
:param end: End date-time.
:param filters: Filters (JSON).
"""
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions("Sales Order", filters)
data = frappe.db.sql("""
select
`tabSales Order`.name, `tabSales Order`.customer_name, `tabSales Order`.status,
`tabSales Order`.delivery_status, `tabSales Order`.billing_status,
`tabSales Order Item`.delivery_date
from
`tabSales Order`, `tabSales Order Item`
where `tabSales Order`.name = `tabSales Order Item`.parent
and (ifnull(`tabSales Order Item`.delivery_date, '0000-00-00')!= '0000-00-00') \
and (`tabSales Order Item`.delivery_date between %(start)s and %(end)s)
and `tabSales Order`.docstatus < 2
{conditions}
""".format(conditions=conditions), {
"start": start,
"end": end
}, as_dict=True, update={"allDay": 0})
return data
@frappe.whitelist()
def make_purchase_order_for_drop_shipment(source_name, for_supplier, target_doc=None):
def set_missing_values(source, target):
target.supplier = for_supplier
target.apply_discount_on = ""
target.additional_discount_percentage = 0.0
target.discount_amount = 0.0
default_price_list = frappe.get_value("Supplier", for_supplier, "default_price_list")
if default_price_list:
target.buying_price_list = default_price_list
if any( item.delivered_by_supplier==1 for item in source.items):
if source.shipping_address_name:
target.shipping_address = source.shipping_address_name
target.shipping_address_display = source.shipping_address
else:
target.shipping_address = source.customer_address
target.shipping_address_display = source.address_display
target.customer_contact_person = source.contact_person
target.customer_contact_display = source.contact_display
target.customer_contact_mobile = source.contact_mobile
target.customer_contact_email = source.contact_email
else:
target.customer = ""
target.customer_name = ""
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.schedule_date = source.delivery_date
target.qty = flt(source.qty) - flt(source.ordered_qty)
target.stock_qty = (flt(source.qty) - flt(source.ordered_qty)) * flt(source.conversion_factor)
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Purchase Order",
"field_no_map": [
"address_display",
"contact_display",
"contact_mobile",
"contact_email",
"contact_person"
],
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Purchase Order Item",
"field_map": [
["name", "sales_order_item"],
["parent", "sales_order"],
["stock_uom", "stock_uom"],
["uom", "uom"],
["conversion_factor", "conversion_factor"],
["delivery_date", "schedule_date"]
],
"field_no_map": [
"rate",
"price_list_rate"
],
"postprocess": update_item,
"condition": lambda doc: doc.ordered_qty < doc.qty and doc.supplier == for_supplier
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def get_supplier(doctype, txt, searchfield, start, page_len, filters):
supp_master_name = frappe.defaults.get_user_default("supp_master_name")
if supp_master_name == "Supplier Name":
fields = ["name", "supplier_type"]
else:
fields = ["name", "supplier_name", "supplier_type"]
fields = ", ".join(fields)
return frappe.db.sql("""select {field} from `tabSupplier`
where docstatus < 2
and ({key} like %(txt)s
or supplier_name like %(txt)s)
and name in (select supplier from `tabSales Order Item` where parent = %(parent)s)
order by
if(locate(%(_txt)s, name), locate(%(_txt)s, name), 99999),
if(locate(%(_txt)s, supplier_name), locate(%(_txt)s, supplier_name), 99999),
name, supplier_name
limit %(start)s, %(page_len)s """.format(**{
'field': fields,
'key': frappe.db.escape(searchfield)
}), {
'txt': "%%%s%%" % txt,
'_txt': txt.replace("%", ""),
'start': start,
'page_len': page_len,
'parent': filters.get('parent')
})
@frappe.whitelist()
def make_production_orders(items, sales_order, company, project=None):
'''Make Production Orders against the given Sales Order for the given `items`'''
items = json.loads(items).get('items')
out = []
for i in items:
production_order = frappe.get_doc(dict(
doctype='Production Order',
production_item=i['item_code'],
bom_no=i['bom'],
qty=i['pending_qty'],
company=company,
sales_order=sales_order,
project=project,
fg_warehouse=i['warehouse']
)).insert()
production_order.set_production_order_operations()
production_order.save()
out.append(production_order)
return [p.name for p in out]
@frappe.whitelist()
def update_status(status, name):
so = frappe.get_doc("Sales Order", name)
so.update_status(status)
def get_default_bom_item(item_code):
bom = frappe.get_all('BOM', dict(item=item_code, is_active=True),
order_by='is_default desc')
bom = bom[0].name if bom else None
return bom
| update_item | identifier_name |
sales_order.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import json
import frappe.utils
from frappe.utils import cstr, flt, getdate, comma_and, cint
from frappe import _
from frappe.model.utils import get_fetch_values
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.stock_balance import update_bin_qty, get_reserved_qty
from frappe.desk.notifications import clear_doctype_notifications
from frappe.contacts.doctype.address.address import get_company_address
from erpnext.controllers.selling_controller import SellingController
from erpnext.subscription.doctype.subscription.subscription import month_map, get_next_date
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class WarehouseRequired(frappe.ValidationError): pass
class SalesOrder(SellingController):
def __init__(self, arg1, arg2=None):
super(SalesOrder, self).__init__(arg1, arg2)
def validate(self):
super(SalesOrder, self).validate()
self.validate_order_type()
self.validate_delivery_date()
self.validate_proj_cust()
self.validate_po()
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.validate_uom_is_integer("uom", "qty")
self.validate_for_items()
self.validate_warehouse()
self.validate_drop_ship()
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
self.validate_with_previous_doc()
self.set_status()
if not self.billing_status: self.billing_status = 'Not Billed'
if not self.delivery_status: self.delivery_status = 'Not Delivered'
def validate_po(self):
# validate p.o date v/s delivery date
if self.po_date:
for d in self.get("items"):
if d.delivery_date and getdate(self.po_date) > getdate(d.delivery_date):
frappe.throw(_("Row #{0}: Expected Delivery Date cannot be before Purchase Order Date")
.format(d.idx))
if self.po_no and self.customer:
so = frappe.db.sql("select name from `tabSales Order` \
where ifnull(po_no, '') = %s and name != %s and docstatus < 2\
and customer = %s", (self.po_no, self.name, self.customer))
if so and so[0][0] and not cint(frappe.db.get_single_value("Selling Settings",
"allow_against_multiple_purchase_orders")):
frappe.msgprint(_("Warning: Sales Order {0} already exists against Customer's Purchase Order {1}").format(so[0][0], self.po_no))
def validate_for_items(self):
check_list = []
for d in self.get('items'):
check_list.append(cstr(d.item_code))
# used for production plan
d.transaction_date = self.transaction_date
tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \
where item_code = %s and warehouse = %s", (d.item_code, d.warehouse))
d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0
# check for same entry multiple times
unique_chk_list = set(check_list)
if len(unique_chk_list) != len(check_list) and \
not cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")):
frappe.msgprint(_("Same item has been entered multiple times"),
title=_("Warning"), indicator='orange')
def product_bundle_has_stock_item(self, product_bundle):
"""Returns true if product bundle has stock item"""
ret = len(frappe.db.sql("""select i.name from tabItem i, `tabProduct Bundle Item` pbi
where pbi.parent = %s and pbi.item_code = i.name and i.is_stock_item = 1""", product_bundle))
return ret
def validate_sales_mntc_quotation(self):
for d in self.get('items'):
if d.prevdoc_docname:
res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s",
(d.prevdoc_docname, self.order_type))
if not res:
frappe.msgprint(_("Quotation {0} not of type {1}")
.format(d.prevdoc_docname, self.order_type))
def validate_order_type(self):
super(SalesOrder, self).validate_order_type()
def validate_delivery_date(self):
if self.order_type == 'Sales':
if not self.delivery_date:
self.delivery_date = max([d.delivery_date for d in self.get("items")])
if self.delivery_date:
for d in self.get("items"):
if not d.delivery_date:
d.delivery_date = self.delivery_date
if getdate(self.transaction_date) > getdate(d.delivery_date):
frappe.msgprint(_("Expected Delivery Date should be after Sales Order Date"),
indicator='orange', title=_('Warning'))
else:
frappe.throw(_("Please enter Delivery Date"))
self.validate_sales_mntc_quotation()
def validate_proj_cust(self):
if self.project and self.customer_name:
res = frappe.db.sql("""select name from `tabProject` where name = %s
and (customer = %s or ifnull(customer,'')='')""",
(self.project, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project))
def validate_warehouse(self):
super(SalesOrder, self).validate_warehouse()
for d in self.get("items"):
if (frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 or
(self.has_product_bundle(d.item_code) and self.product_bundle_has_stock_item(d.item_code))) \
and not d.warehouse and not cint(d.delivered_by_supplier):
frappe.throw(_("Delivery warehouse required for stock item {0}").format(d.item_code),
WarehouseRequired)
def validate_with_previous_doc(self):
super(SalesOrder, self).validate_with_previous_doc({
"Quotation": {
"ref_dn_field": "prevdoc_docname",
"compare_fields": [["company", "="], ["currency", "="]]
}
})
def update_enquiry_status(self, prevdoc, flag):
enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc)
if enq:
frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0]))
def update_prevdoc_status(self, flag):
for quotation in list(set([d.prevdoc_docname for d in self.get("items")])):
if quotation:
doc = frappe.get_doc("Quotation", quotation)
if doc.docstatus==2:
frappe.throw(_("Quotation {0} is cancelled").format(quotation))
doc.set_status(update=True)
doc.update_opportunity()
def validate_drop_ship(self):
for d in self.get('items'):
if d.delivered_by_supplier and not d.supplier:
frappe.throw(_("Row #{0}: Set Supplier for item {1}").format(d.idx, d.item_code))
def on_submit(self):
self.check_credit_limit()
self.update_reserved_qty()
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.company, self.base_grand_total, self)
self.update_project()
self.update_prevdoc_status('submit')
def on_cancel(self):
# Cannot cancel closed SO
if self.status == 'Closed':
frappe.throw(_("Closed order cannot be cancelled. Unclose to cancel."))
self.check_nextdoc_docstatus()
self.update_reserved_qty()
self.update_project()
self.update_prevdoc_status('cancel')
frappe.db.set(self, 'status', 'Cancelled')
def update_project(self):
project_list = []
if self.project:
project = frappe.get_doc("Project", self.project)
project.flags.dont_sync_tasks = True
project.update_sales_costing()
project.save()
project_list.append(self.project)
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
check_credit_limit(self.customer, self.company)
def check_nextdoc_docstatus(self):
# Checks Delivery Note
submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2
where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name)
if submit_dn:
frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn)))
# Checks Sales Invoice
submit_rv = frappe.db.sql_list("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""",
self.name)
if submit_rv:
frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv)))
#check maintenance schedule
submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1,
`tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order = %s and t1.docstatus = 1""", self.name)
if submit_ms:
frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms)))
# check maintenance visit
submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name)
if submit_mv:
frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv)))
# check production order
pro_order = frappe.db.sql_list("""select name from `tabProduction Order`
where sales_order = %s and docstatus = 1""", self.name)
if pro_order:
frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order)))
def check_modified_date(self):
mod_db = frappe.db.get_value("Sales Order", self.name, "modified")
date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" %
( mod_db, cstr(self.modified)))
if date_diff and date_diff[0][0]:
frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name))
def update_status(self, status):
self.check_modified_date()
self.set_status(update=True, status=status)
self.update_reserved_qty()
self.notify_update()
clear_doctype_notifications(self)
def update_reserved_qty(self, so_item_rows=None):
"""update requested qty (before ordered_qty is updated)"""
item_wh_list = []
def _valid_for_reserve(item_code, warehouse):
if item_code and warehouse and [item_code, warehouse] not in item_wh_list \
and frappe.db.get_value("Item", item_code, "is_stock_item"):
item_wh_list.append([item_code, warehouse])
for d in self.get("items"):
if (not so_item_rows or d.name in so_item_rows) and not d.delivered_by_supplier:
if self.has_product_bundle(d.item_code):
for p in self.get("packed_items"):
if p.parent_detail_docname == d.name and p.parent_item == d.item_code:
_valid_for_reserve(p.item_code, p.warehouse)
else:
_valid_for_reserve(d.item_code, d.warehouse)
for item_code, warehouse in item_wh_list:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
def on_update(self):
pass
def before_update_after_submit(self):
self.validate_po()
self.validate_drop_ship()
self.validate_supplier_after_submit()
def validate_supplier_after_submit(self):
"""Check that supplier is the same after submit if PO is already made"""
exc_list = []
for item in self.items:
if item.supplier:
supplier = frappe.db.get_value("Sales Order Item", {"parent": self.name, "item_code": item.item_code},
"supplier")
if item.ordered_qty > 0.0 and item.supplier != supplier:
exc_list.append(_("Row #{0}: Not allowed to change Supplier as Purchase Order already exists").format(item.idx))
if exc_list:
frappe.throw('\n'.join(exc_list))
def update_delivery_status(self):
"""Update delivery status from Purchase Order for drop shipping"""
tot_qty, delivered_qty = 0.0, 0.0
for item in self.items:
if item.delivered_by_supplier:
item_delivered_qty = frappe.db.sql("""select sum(qty)
from `tabPurchase Order Item` poi, `tabPurchase Order` po
where poi.sales_order_item = %s
and poi.item_code = %s
and poi.parent = po.name
and po.docstatus = 1
and po.status = 'Delivered'""", (item.name, item.item_code))
item_delivered_qty = item_delivered_qty[0][0] if item_delivered_qty else 0
item.db_set("delivered_qty", flt(item_delivered_qty), update_modified=False)
delivered_qty += item.delivered_qty
tot_qty += item.qty
self.db_set("per_delivered", flt(delivered_qty/tot_qty) * 100,
update_modified=False)
def set_indicator(self):
"""Set indicator for portal"""
if self.per_billed < 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Not Paid and Not Delivered")
elif self.per_billed == 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Paid and Not Delivered")
else:
self.indicator_color = "green"
self.indicator_title = _("Paid")
def get_production_order_items(self):
'''Returns items with BOM that already do not have a linked production order'''
items = []
for table in [self.items, self.packed_items]:
for i in table:
bom = get_default_bom_item(i.item_code)
if bom:
stock_qty = i.qty if i.doctype == 'Packed Item' else i.stock_qty
items.append(dict(
item_code= i.item_code,
bom = bom,
warehouse = i.warehouse,
pending_qty= stock_qty - flt(frappe.db.sql('''select sum(qty) from `tabProduction Order`
where production_item=%s and sales_order=%s''', (i.item_code, self.name))[0][0])
))
return items
def on_recurring(self, reference_doc, subscription_doc):
mcount = month_map[subscription_doc.frequency]
self.set("delivery_date", get_next_date(reference_doc.delivery_date, mcount,
cint(subscription_doc.repeat_on_day)))
for d in self.get("items"):
reference_delivery_date = frappe.db.get_value("Sales Order Item",
{"parent": reference_doc.name, "item_code": d.item_code, "idx": d.idx}, "delivery_date")
d.set("delivery_date",
get_next_date(reference_delivery_date, mcount, cint(subscription_doc.repeat_on_day)))
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context.update({
'show_sidebar': True,
'show_search': True,
'no_breadcrumbs': True,
'title': _('Orders'),
})
return list_context
@frappe.whitelist()
def close_or_unclose_sales_orders(names, status):
if not frappe.has_permission("Sales Order", "write"):
frappe.throw(_("Not permitted"), frappe.PermissionError)
names = json.loads(names)
for name in names:
so = frappe.get_doc("Sales Order", name)
if so.docstatus == 1:
if status == "Closed":
if so.status not in ("Cancelled", "Closed") and (so.per_delivered < 100 or so.per_billed < 100):
so.update_status(status)
else:
if so.status == "Closed":
so.update_status('Draft')
frappe.local.message_log = []
@frappe.whitelist()
def make_material_request(source_name, target_doc=None):
def postprocess(source, doc):
doc.material_request_type = "Purchase"
def update_item(source, target, source_parent):
target.project = source_parent.project
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Material Request",
"validation": {
"docstatus": ["=", 1]
}
},
"Packed Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom"
},
"postprocess": update_item
},
"Sales Order Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom",
"stock_qty": "qty"
},
"condition": lambda doc: not frappe.db.exists('Product Bundle', doc.item_code),
"postprocess": update_item
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_project(source_name, target_doc=None):
def postprocess(source, doc):
doc.project_type = "External"
doc.project_name = source.name
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Project",
"validation": {
"docstatus": ["=", 1]
},
"field_map":{
"name" : "sales_order",
"base_grand_total" : "estimated_costing",
}
},
"Sales Order Item": {
"doctype": "Project Task",
"field_map": {
"description": "title",
},
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_delivery_note(source_name, target_doc=None):
def set_missing_values(source, target):
if source.po_no:
if target.po_no:
target_po_no = target.po_no.split(", ")
target_po_no.append(source.po_no)
target.po_no = ", ".join(list(set(target_po_no))) if len(target_po_no) > 1 else target_po_no[0]
else:
target.po_no = source.po_no
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Delivery Note", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.base_amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.base_rate)
target.amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.rate)
target.qty = flt(source.qty) - flt(source.delivered_qty) |
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
target_doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Delivery Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Delivery Note Item",
"field_map": {
"rate": "rate",
"name": "so_detail",
"parent": "against_sales_order",
},
"postprocess": update_item,
"condition": lambda doc: abs(doc.delivered_qty) < abs(doc.qty) and doc.delivered_by_supplier!=1
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, set_missing_values)
return target_doc
@frappe.whitelist()
def make_sales_invoice(source_name, target_doc=None, ignore_permissions=False):
def postprocess(source, target):
set_missing_values(source, target)
#Get the advance paid Journal Entries in Sales Invoice Advance
target.set_advances()
def set_missing_values(source, target):
target.is_pos = 0
target.ignore_pricing_rule = 1
target.flags.ignore_permissions = True
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Sales Invoice", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.amount = flt(source.amount) - flt(source.billed_amt)
target.base_amount = target.amount * flt(source_parent.conversion_rate)
target.qty = target.amount / flt(source.rate) if (source.rate and source.billed_amt) else source.qty
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Sales Invoice",
"field_map": {
"party_account_currency": "party_account_currency"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Sales Invoice Item",
"field_map": {
"name": "so_detail",
"parent": "sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.qty and (doc.base_amount==0 or abs(doc.billed_amt) < abs(doc.amount))
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, postprocess, ignore_permissions=ignore_permissions)
return doclist
@frappe.whitelist()
def make_maintenance_schedule(source_name, target_doc=None):
maint_schedule = frappe.db.sql("""select t1.name
from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order=%s and t1.docstatus=1""", source_name)
if not maint_schedule:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Schedule",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Schedule Item",
"field_map": {
"parent": "sales_order"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_maintenance_visit(source_name, target_doc=None):
visit = frappe.db.sql("""select t1.name
from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s
and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name)
if not visit:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Visit",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Visit Purpose",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def get_events(start, end, filters=None):
"""Returns events for Gantt / Calendar view rendering.
:param start: Start date-time.
:param end: End date-time.
:param filters: Filters (JSON).
"""
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions("Sales Order", filters)
data = frappe.db.sql("""
select
`tabSales Order`.name, `tabSales Order`.customer_name, `tabSales Order`.status,
`tabSales Order`.delivery_status, `tabSales Order`.billing_status,
`tabSales Order Item`.delivery_date
from
`tabSales Order`, `tabSales Order Item`
where `tabSales Order`.name = `tabSales Order Item`.parent
and (ifnull(`tabSales Order Item`.delivery_date, '0000-00-00')!= '0000-00-00') \
and (`tabSales Order Item`.delivery_date between %(start)s and %(end)s)
and `tabSales Order`.docstatus < 2
{conditions}
""".format(conditions=conditions), {
"start": start,
"end": end
}, as_dict=True, update={"allDay": 0})
return data
@frappe.whitelist()
def make_purchase_order_for_drop_shipment(source_name, for_supplier, target_doc=None):
def set_missing_values(source, target):
target.supplier = for_supplier
target.apply_discount_on = ""
target.additional_discount_percentage = 0.0
target.discount_amount = 0.0
default_price_list = frappe.get_value("Supplier", for_supplier, "default_price_list")
if default_price_list:
target.buying_price_list = default_price_list
if any( item.delivered_by_supplier==1 for item in source.items):
if source.shipping_address_name:
target.shipping_address = source.shipping_address_name
target.shipping_address_display = source.shipping_address
else:
target.shipping_address = source.customer_address
target.shipping_address_display = source.address_display
target.customer_contact_person = source.contact_person
target.customer_contact_display = source.contact_display
target.customer_contact_mobile = source.contact_mobile
target.customer_contact_email = source.contact_email
else:
target.customer = ""
target.customer_name = ""
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.schedule_date = source.delivery_date
target.qty = flt(source.qty) - flt(source.ordered_qty)
target.stock_qty = (flt(source.qty) - flt(source.ordered_qty)) * flt(source.conversion_factor)
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Purchase Order",
"field_no_map": [
"address_display",
"contact_display",
"contact_mobile",
"contact_email",
"contact_person"
],
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Purchase Order Item",
"field_map": [
["name", "sales_order_item"],
["parent", "sales_order"],
["stock_uom", "stock_uom"],
["uom", "uom"],
["conversion_factor", "conversion_factor"],
["delivery_date", "schedule_date"]
],
"field_no_map": [
"rate",
"price_list_rate"
],
"postprocess": update_item,
"condition": lambda doc: doc.ordered_qty < doc.qty and doc.supplier == for_supplier
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def get_supplier(doctype, txt, searchfield, start, page_len, filters):
supp_master_name = frappe.defaults.get_user_default("supp_master_name")
if supp_master_name == "Supplier Name":
fields = ["name", "supplier_type"]
else:
fields = ["name", "supplier_name", "supplier_type"]
fields = ", ".join(fields)
return frappe.db.sql("""select {field} from `tabSupplier`
where docstatus < 2
and ({key} like %(txt)s
or supplier_name like %(txt)s)
and name in (select supplier from `tabSales Order Item` where parent = %(parent)s)
order by
if(locate(%(_txt)s, name), locate(%(_txt)s, name), 99999),
if(locate(%(_txt)s, supplier_name), locate(%(_txt)s, supplier_name), 99999),
name, supplier_name
limit %(start)s, %(page_len)s """.format(**{
'field': fields,
'key': frappe.db.escape(searchfield)
}), {
'txt': "%%%s%%" % txt,
'_txt': txt.replace("%", ""),
'start': start,
'page_len': page_len,
'parent': filters.get('parent')
})
@frappe.whitelist()
def make_production_orders(items, sales_order, company, project=None):
'''Make Production Orders against the given Sales Order for the given `items`'''
items = json.loads(items).get('items')
out = []
for i in items:
production_order = frappe.get_doc(dict(
doctype='Production Order',
production_item=i['item_code'],
bom_no=i['bom'],
qty=i['pending_qty'],
company=company,
sales_order=sales_order,
project=project,
fg_warehouse=i['warehouse']
)).insert()
production_order.set_production_order_operations()
production_order.save()
out.append(production_order)
return [p.name for p in out]
@frappe.whitelist()
def update_status(status, name):
so = frappe.get_doc("Sales Order", name)
so.update_status(status)
def get_default_bom_item(item_code):
bom = frappe.get_all('BOM', dict(item=item_code, is_active=True),
order_by='is_default desc')
bom = bom[0].name if bom else None
return bom | random_line_split |
|
sales_order.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import json
import frappe.utils
from frappe.utils import cstr, flt, getdate, comma_and, cint
from frappe import _
from frappe.model.utils import get_fetch_values
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.stock_balance import update_bin_qty, get_reserved_qty
from frappe.desk.notifications import clear_doctype_notifications
from frappe.contacts.doctype.address.address import get_company_address
from erpnext.controllers.selling_controller import SellingController
from erpnext.subscription.doctype.subscription.subscription import month_map, get_next_date
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class WarehouseRequired(frappe.ValidationError): pass
class SalesOrder(SellingController):
def __init__(self, arg1, arg2=None):
super(SalesOrder, self).__init__(arg1, arg2)
def validate(self):
super(SalesOrder, self).validate()
self.validate_order_type()
self.validate_delivery_date()
self.validate_proj_cust()
self.validate_po()
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.validate_uom_is_integer("uom", "qty")
self.validate_for_items()
self.validate_warehouse()
self.validate_drop_ship()
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
self.validate_with_previous_doc()
self.set_status()
if not self.billing_status: self.billing_status = 'Not Billed'
if not self.delivery_status: self.delivery_status = 'Not Delivered'
def validate_po(self):
# validate p.o date v/s delivery date
if self.po_date:
for d in self.get("items"):
if d.delivery_date and getdate(self.po_date) > getdate(d.delivery_date):
frappe.throw(_("Row #{0}: Expected Delivery Date cannot be before Purchase Order Date")
.format(d.idx))
if self.po_no and self.customer:
so = frappe.db.sql("select name from `tabSales Order` \
where ifnull(po_no, '') = %s and name != %s and docstatus < 2\
and customer = %s", (self.po_no, self.name, self.customer))
if so and so[0][0] and not cint(frappe.db.get_single_value("Selling Settings",
"allow_against_multiple_purchase_orders")):
frappe.msgprint(_("Warning: Sales Order {0} already exists against Customer's Purchase Order {1}").format(so[0][0], self.po_no))
def validate_for_items(self):
check_list = []
for d in self.get('items'):
check_list.append(cstr(d.item_code))
# used for production plan
d.transaction_date = self.transaction_date
tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \
where item_code = %s and warehouse = %s", (d.item_code, d.warehouse))
d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0
# check for same entry multiple times
unique_chk_list = set(check_list)
if len(unique_chk_list) != len(check_list) and \
not cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")):
frappe.msgprint(_("Same item has been entered multiple times"),
title=_("Warning"), indicator='orange')
def product_bundle_has_stock_item(self, product_bundle):
"""Returns true if product bundle has stock item"""
ret = len(frappe.db.sql("""select i.name from tabItem i, `tabProduct Bundle Item` pbi
where pbi.parent = %s and pbi.item_code = i.name and i.is_stock_item = 1""", product_bundle))
return ret
def validate_sales_mntc_quotation(self):
for d in self.get('items'):
if d.prevdoc_docname:
res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s",
(d.prevdoc_docname, self.order_type))
if not res:
frappe.msgprint(_("Quotation {0} not of type {1}")
.format(d.prevdoc_docname, self.order_type))
def validate_order_type(self):
super(SalesOrder, self).validate_order_type()
def validate_delivery_date(self):
if self.order_type == 'Sales':
if not self.delivery_date:
self.delivery_date = max([d.delivery_date for d in self.get("items")])
if self.delivery_date:
for d in self.get("items"):
if not d.delivery_date:
d.delivery_date = self.delivery_date
if getdate(self.transaction_date) > getdate(d.delivery_date):
frappe.msgprint(_("Expected Delivery Date should be after Sales Order Date"),
indicator='orange', title=_('Warning'))
else:
frappe.throw(_("Please enter Delivery Date"))
self.validate_sales_mntc_quotation()
def validate_proj_cust(self):
if self.project and self.customer_name:
res = frappe.db.sql("""select name from `tabProject` where name = %s
and (customer = %s or ifnull(customer,'')='')""",
(self.project, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project))
def validate_warehouse(self):
super(SalesOrder, self).validate_warehouse()
for d in self.get("items"):
if (frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 or
(self.has_product_bundle(d.item_code) and self.product_bundle_has_stock_item(d.item_code))) \
and not d.warehouse and not cint(d.delivered_by_supplier):
frappe.throw(_("Delivery warehouse required for stock item {0}").format(d.item_code),
WarehouseRequired)
def validate_with_previous_doc(self):
super(SalesOrder, self).validate_with_previous_doc({
"Quotation": {
"ref_dn_field": "prevdoc_docname",
"compare_fields": [["company", "="], ["currency", "="]]
}
})
def update_enquiry_status(self, prevdoc, flag):
enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc)
if enq:
frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0]))
def update_prevdoc_status(self, flag):
|
def validate_drop_ship(self):
for d in self.get('items'):
if d.delivered_by_supplier and not d.supplier:
frappe.throw(_("Row #{0}: Set Supplier for item {1}").format(d.idx, d.item_code))
def on_submit(self):
self.check_credit_limit()
self.update_reserved_qty()
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.company, self.base_grand_total, self)
self.update_project()
self.update_prevdoc_status('submit')
def on_cancel(self):
# Cannot cancel closed SO
if self.status == 'Closed':
frappe.throw(_("Closed order cannot be cancelled. Unclose to cancel."))
self.check_nextdoc_docstatus()
self.update_reserved_qty()
self.update_project()
self.update_prevdoc_status('cancel')
frappe.db.set(self, 'status', 'Cancelled')
def update_project(self):
project_list = []
if self.project:
project = frappe.get_doc("Project", self.project)
project.flags.dont_sync_tasks = True
project.update_sales_costing()
project.save()
project_list.append(self.project)
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
check_credit_limit(self.customer, self.company)
def check_nextdoc_docstatus(self):
# Checks Delivery Note
submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2
where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name)
if submit_dn:
frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn)))
# Checks Sales Invoice
submit_rv = frappe.db.sql_list("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""",
self.name)
if submit_rv:
frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv)))
#check maintenance schedule
submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1,
`tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order = %s and t1.docstatus = 1""", self.name)
if submit_ms:
frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms)))
# check maintenance visit
submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name)
if submit_mv:
frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv)))
# check production order
pro_order = frappe.db.sql_list("""select name from `tabProduction Order`
where sales_order = %s and docstatus = 1""", self.name)
if pro_order:
frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order)))
def check_modified_date(self):
mod_db = frappe.db.get_value("Sales Order", self.name, "modified")
date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" %
( mod_db, cstr(self.modified)))
if date_diff and date_diff[0][0]:
frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name))
def update_status(self, status):
self.check_modified_date()
self.set_status(update=True, status=status)
self.update_reserved_qty()
self.notify_update()
clear_doctype_notifications(self)
def update_reserved_qty(self, so_item_rows=None):
"""update requested qty (before ordered_qty is updated)"""
item_wh_list = []
def _valid_for_reserve(item_code, warehouse):
if item_code and warehouse and [item_code, warehouse] not in item_wh_list \
and frappe.db.get_value("Item", item_code, "is_stock_item"):
item_wh_list.append([item_code, warehouse])
for d in self.get("items"):
if (not so_item_rows or d.name in so_item_rows) and not d.delivered_by_supplier:
if self.has_product_bundle(d.item_code):
for p in self.get("packed_items"):
if p.parent_detail_docname == d.name and p.parent_item == d.item_code:
_valid_for_reserve(p.item_code, p.warehouse)
else:
_valid_for_reserve(d.item_code, d.warehouse)
for item_code, warehouse in item_wh_list:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
def on_update(self):
pass
def before_update_after_submit(self):
self.validate_po()
self.validate_drop_ship()
self.validate_supplier_after_submit()
def validate_supplier_after_submit(self):
"""Check that supplier is the same after submit if PO is already made"""
exc_list = []
for item in self.items:
if item.supplier:
supplier = frappe.db.get_value("Sales Order Item", {"parent": self.name, "item_code": item.item_code},
"supplier")
if item.ordered_qty > 0.0 and item.supplier != supplier:
exc_list.append(_("Row #{0}: Not allowed to change Supplier as Purchase Order already exists").format(item.idx))
if exc_list:
frappe.throw('\n'.join(exc_list))
def update_delivery_status(self):
"""Update delivery status from Purchase Order for drop shipping"""
tot_qty, delivered_qty = 0.0, 0.0
for item in self.items:
if item.delivered_by_supplier:
item_delivered_qty = frappe.db.sql("""select sum(qty)
from `tabPurchase Order Item` poi, `tabPurchase Order` po
where poi.sales_order_item = %s
and poi.item_code = %s
and poi.parent = po.name
and po.docstatus = 1
and po.status = 'Delivered'""", (item.name, item.item_code))
item_delivered_qty = item_delivered_qty[0][0] if item_delivered_qty else 0
item.db_set("delivered_qty", flt(item_delivered_qty), update_modified=False)
delivered_qty += item.delivered_qty
tot_qty += item.qty
self.db_set("per_delivered", flt(delivered_qty/tot_qty) * 100,
update_modified=False)
def set_indicator(self):
"""Set indicator for portal"""
if self.per_billed < 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Not Paid and Not Delivered")
elif self.per_billed == 100 and self.per_delivered < 100:
self.indicator_color = "orange"
self.indicator_title = _("Paid and Not Delivered")
else:
self.indicator_color = "green"
self.indicator_title = _("Paid")
def get_production_order_items(self):
'''Returns items with BOM that already do not have a linked production order'''
items = []
for table in [self.items, self.packed_items]:
for i in table:
bom = get_default_bom_item(i.item_code)
if bom:
stock_qty = i.qty if i.doctype == 'Packed Item' else i.stock_qty
items.append(dict(
item_code= i.item_code,
bom = bom,
warehouse = i.warehouse,
pending_qty= stock_qty - flt(frappe.db.sql('''select sum(qty) from `tabProduction Order`
where production_item=%s and sales_order=%s''', (i.item_code, self.name))[0][0])
))
return items
def on_recurring(self, reference_doc, subscription_doc):
mcount = month_map[subscription_doc.frequency]
self.set("delivery_date", get_next_date(reference_doc.delivery_date, mcount,
cint(subscription_doc.repeat_on_day)))
for d in self.get("items"):
reference_delivery_date = frappe.db.get_value("Sales Order Item",
{"parent": reference_doc.name, "item_code": d.item_code, "idx": d.idx}, "delivery_date")
d.set("delivery_date",
get_next_date(reference_delivery_date, mcount, cint(subscription_doc.repeat_on_day)))
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context.update({
'show_sidebar': True,
'show_search': True,
'no_breadcrumbs': True,
'title': _('Orders'),
})
return list_context
@frappe.whitelist()
def close_or_unclose_sales_orders(names, status):
if not frappe.has_permission("Sales Order", "write"):
frappe.throw(_("Not permitted"), frappe.PermissionError)
names = json.loads(names)
for name in names:
so = frappe.get_doc("Sales Order", name)
if so.docstatus == 1:
if status == "Closed":
if so.status not in ("Cancelled", "Closed") and (so.per_delivered < 100 or so.per_billed < 100):
so.update_status(status)
else:
if so.status == "Closed":
so.update_status('Draft')
frappe.local.message_log = []
@frappe.whitelist()
def make_material_request(source_name, target_doc=None):
def postprocess(source, doc):
doc.material_request_type = "Purchase"
def update_item(source, target, source_parent):
target.project = source_parent.project
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Material Request",
"validation": {
"docstatus": ["=", 1]
}
},
"Packed Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom"
},
"postprocess": update_item
},
"Sales Order Item": {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order",
"stock_uom": "uom",
"stock_qty": "qty"
},
"condition": lambda doc: not frappe.db.exists('Product Bundle', doc.item_code),
"postprocess": update_item
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_project(source_name, target_doc=None):
def postprocess(source, doc):
doc.project_type = "External"
doc.project_name = source.name
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Project",
"validation": {
"docstatus": ["=", 1]
},
"field_map":{
"name" : "sales_order",
"base_grand_total" : "estimated_costing",
}
},
"Sales Order Item": {
"doctype": "Project Task",
"field_map": {
"description": "title",
},
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_delivery_note(source_name, target_doc=None):
def set_missing_values(source, target):
if source.po_no:
if target.po_no:
target_po_no = target.po_no.split(", ")
target_po_no.append(source.po_no)
target.po_no = ", ".join(list(set(target_po_no))) if len(target_po_no) > 1 else target_po_no[0]
else:
target.po_no = source.po_no
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Delivery Note", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.base_amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.base_rate)
target.amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.rate)
target.qty = flt(source.qty) - flt(source.delivered_qty)
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
target_doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Delivery Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Delivery Note Item",
"field_map": {
"rate": "rate",
"name": "so_detail",
"parent": "against_sales_order",
},
"postprocess": update_item,
"condition": lambda doc: abs(doc.delivered_qty) < abs(doc.qty) and doc.delivered_by_supplier!=1
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, set_missing_values)
return target_doc
@frappe.whitelist()
def make_sales_invoice(source_name, target_doc=None, ignore_permissions=False):
def postprocess(source, target):
set_missing_values(source, target)
#Get the advance paid Journal Entries in Sales Invoice Advance
target.set_advances()
def set_missing_values(source, target):
target.is_pos = 0
target.ignore_pricing_rule = 1
target.flags.ignore_permissions = True
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
# set company address
target.update(get_company_address(target.company))
if target.company_address:
target.update(get_fetch_values("Sales Invoice", 'company_address', target.company_address))
def update_item(source, target, source_parent):
target.amount = flt(source.amount) - flt(source.billed_amt)
target.base_amount = target.amount * flt(source_parent.conversion_rate)
target.qty = target.amount / flt(source.rate) if (source.rate and source.billed_amt) else source.qty
item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1)
target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \
or item.selling_cost_center \
or frappe.db.get_value("Item Group", item.item_group, "default_cost_center")
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Sales Invoice",
"field_map": {
"party_account_currency": "party_account_currency"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Sales Invoice Item",
"field_map": {
"name": "so_detail",
"parent": "sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.qty and (doc.base_amount==0 or abs(doc.billed_amt) < abs(doc.amount))
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, postprocess, ignore_permissions=ignore_permissions)
return doclist
@frappe.whitelist()
def make_maintenance_schedule(source_name, target_doc=None):
maint_schedule = frappe.db.sql("""select t1.name
from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.sales_order=%s and t1.docstatus=1""", source_name)
if not maint_schedule:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Schedule",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Schedule Item",
"field_map": {
"parent": "sales_order"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_maintenance_visit(source_name, target_doc=None):
visit = frappe.db.sql("""select t1.name
from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s
and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name)
if not visit:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Visit",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Visit Purpose",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def get_events(start, end, filters=None):
"""Returns events for Gantt / Calendar view rendering.
:param start: Start date-time.
:param end: End date-time.
:param filters: Filters (JSON).
"""
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions("Sales Order", filters)
data = frappe.db.sql("""
select
`tabSales Order`.name, `tabSales Order`.customer_name, `tabSales Order`.status,
`tabSales Order`.delivery_status, `tabSales Order`.billing_status,
`tabSales Order Item`.delivery_date
from
`tabSales Order`, `tabSales Order Item`
where `tabSales Order`.name = `tabSales Order Item`.parent
and (ifnull(`tabSales Order Item`.delivery_date, '0000-00-00')!= '0000-00-00') \
and (`tabSales Order Item`.delivery_date between %(start)s and %(end)s)
and `tabSales Order`.docstatus < 2
{conditions}
""".format(conditions=conditions), {
"start": start,
"end": end
}, as_dict=True, update={"allDay": 0})
return data
@frappe.whitelist()
def make_purchase_order_for_drop_shipment(source_name, for_supplier, target_doc=None):
def set_missing_values(source, target):
target.supplier = for_supplier
target.apply_discount_on = ""
target.additional_discount_percentage = 0.0
target.discount_amount = 0.0
default_price_list = frappe.get_value("Supplier", for_supplier, "default_price_list")
if default_price_list:
target.buying_price_list = default_price_list
if any( item.delivered_by_supplier==1 for item in source.items):
if source.shipping_address_name:
target.shipping_address = source.shipping_address_name
target.shipping_address_display = source.shipping_address
else:
target.shipping_address = source.customer_address
target.shipping_address_display = source.address_display
target.customer_contact_person = source.contact_person
target.customer_contact_display = source.contact_display
target.customer_contact_mobile = source.contact_mobile
target.customer_contact_email = source.contact_email
else:
target.customer = ""
target.customer_name = ""
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.schedule_date = source.delivery_date
target.qty = flt(source.qty) - flt(source.ordered_qty)
target.stock_qty = (flt(source.qty) - flt(source.ordered_qty)) * flt(source.conversion_factor)
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Purchase Order",
"field_no_map": [
"address_display",
"contact_display",
"contact_mobile",
"contact_email",
"contact_person"
],
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Purchase Order Item",
"field_map": [
["name", "sales_order_item"],
["parent", "sales_order"],
["stock_uom", "stock_uom"],
["uom", "uom"],
["conversion_factor", "conversion_factor"],
["delivery_date", "schedule_date"]
],
"field_no_map": [
"rate",
"price_list_rate"
],
"postprocess": update_item,
"condition": lambda doc: doc.ordered_qty < doc.qty and doc.supplier == for_supplier
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def get_supplier(doctype, txt, searchfield, start, page_len, filters):
supp_master_name = frappe.defaults.get_user_default("supp_master_name")
if supp_master_name == "Supplier Name":
fields = ["name", "supplier_type"]
else:
fields = ["name", "supplier_name", "supplier_type"]
fields = ", ".join(fields)
return frappe.db.sql("""select {field} from `tabSupplier`
where docstatus < 2
and ({key} like %(txt)s
or supplier_name like %(txt)s)
and name in (select supplier from `tabSales Order Item` where parent = %(parent)s)
order by
if(locate(%(_txt)s, name), locate(%(_txt)s, name), 99999),
if(locate(%(_txt)s, supplier_name), locate(%(_txt)s, supplier_name), 99999),
name, supplier_name
limit %(start)s, %(page_len)s """.format(**{
'field': fields,
'key': frappe.db.escape(searchfield)
}), {
'txt': "%%%s%%" % txt,
'_txt': txt.replace("%", ""),
'start': start,
'page_len': page_len,
'parent': filters.get('parent')
})
@frappe.whitelist()
def make_production_orders(items, sales_order, company, project=None):
'''Make Production Orders against the given Sales Order for the given `items`'''
items = json.loads(items).get('items')
out = []
for i in items:
production_order = frappe.get_doc(dict(
doctype='Production Order',
production_item=i['item_code'],
bom_no=i['bom'],
qty=i['pending_qty'],
company=company,
sales_order=sales_order,
project=project,
fg_warehouse=i['warehouse']
)).insert()
production_order.set_production_order_operations()
production_order.save()
out.append(production_order)
return [p.name for p in out]
@frappe.whitelist()
def update_status(status, name):
so = frappe.get_doc("Sales Order", name)
so.update_status(status)
def get_default_bom_item(item_code):
bom = frappe.get_all('BOM', dict(item=item_code, is_active=True),
order_by='is_default desc')
bom = bom[0].name if bom else None
return bom
| for quotation in list(set([d.prevdoc_docname for d in self.get("items")])):
if quotation:
doc = frappe.get_doc("Quotation", quotation)
if doc.docstatus==2:
frappe.throw(_("Quotation {0} is cancelled").format(quotation))
doc.set_status(update=True)
doc.update_opportunity() | identifier_body |
second.rs | // Module containing functions for calculating second order greeks
use std::f64::consts::E;
use common::*;
/// Calculates the Gamma for an option
///
/// Gamma measures the rate of change in the delta with respect to the change in the underlying price.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn gamma(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
return gamma_d1(s0, t, q, sigma, d1);
}
pub fn gamma_d1(s0: f64, t: f64, q: f64, sigma: f64, d1: f64) -> f64 {
let arg1 = E.powf(-(q * t)) / (s0 * sigma * (t.sqrt()));
let arg2 = one_over_sqrt_pi();
let arg3 = E.powf((-d1).powf(2.0)) / 2.0;
return arg1 * arg2 * arg3;
}
#[cfg(test)]
mod tests {
use greeks::*;
const UNDERLYING: f64 = 64.68;
const STRIKE: f64 = 65.00;
const VOL: f64 = 0.5051;
const INTEREST_RATE: f64 = 0.0150;
const DIV_YIELD: f64 = 0.0210;
const DAYS_PER_YEAR: f64 = 365.0;
const TIME_TO_EXPIRY: f64 = 23.0 / DAYS_PER_YEAR;
const E_GAMMA: f64 = 0.0243;
#[test]
fn | () {
let gamma = gamma(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (gamma - E_GAMMA).abs();
assert!(abs < 0.001);
}
} | test_gamma | identifier_name |
second.rs | // Module containing functions for calculating second order greeks
use std::f64::consts::E; |
use common::*;
/// Calculates the Gamma for an option
///
/// Gamma measures the rate of change in the delta with respect to the change in the underlying price.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn gamma(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
return gamma_d1(s0, t, q, sigma, d1);
}
pub fn gamma_d1(s0: f64, t: f64, q: f64, sigma: f64, d1: f64) -> f64 {
let arg1 = E.powf(-(q * t)) / (s0 * sigma * (t.sqrt()));
let arg2 = one_over_sqrt_pi();
let arg3 = E.powf((-d1).powf(2.0)) / 2.0;
return arg1 * arg2 * arg3;
}
#[cfg(test)]
mod tests {
use greeks::*;
const UNDERLYING: f64 = 64.68;
const STRIKE: f64 = 65.00;
const VOL: f64 = 0.5051;
const INTEREST_RATE: f64 = 0.0150;
const DIV_YIELD: f64 = 0.0210;
const DAYS_PER_YEAR: f64 = 365.0;
const TIME_TO_EXPIRY: f64 = 23.0 / DAYS_PER_YEAR;
const E_GAMMA: f64 = 0.0243;
#[test]
fn test_gamma() {
let gamma = gamma(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (gamma - E_GAMMA).abs();
assert!(abs < 0.001);
}
} | random_line_split |
|
second.rs | // Module containing functions for calculating second order greeks
use std::f64::consts::E;
use common::*;
/// Calculates the Gamma for an option
///
/// Gamma measures the rate of change in the delta with respect to the change in the underlying price.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn gamma(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
return gamma_d1(s0, t, q, sigma, d1);
}
pub fn gamma_d1(s0: f64, t: f64, q: f64, sigma: f64, d1: f64) -> f64 |
#[cfg(test)]
mod tests {
use greeks::*;
const UNDERLYING: f64 = 64.68;
const STRIKE: f64 = 65.00;
const VOL: f64 = 0.5051;
const INTEREST_RATE: f64 = 0.0150;
const DIV_YIELD: f64 = 0.0210;
const DAYS_PER_YEAR: f64 = 365.0;
const TIME_TO_EXPIRY: f64 = 23.0 / DAYS_PER_YEAR;
const E_GAMMA: f64 = 0.0243;
#[test]
fn test_gamma() {
let gamma = gamma(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (gamma - E_GAMMA).abs();
assert!(abs < 0.001);
}
} | {
let arg1 = E.powf(-(q * t)) / (s0 * sigma * (t.sqrt()));
let arg2 = one_over_sqrt_pi();
let arg3 = E.powf((-d1).powf(2.0)) / 2.0;
return arg1 * arg2 * arg3;
} | identifier_body |
raster_symbolizer_test.py | #!/usr/bin/env python
from nose.tools import *
from utilities import execution_path, save_data, contains_word
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_dataraster_coloring():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs)
style = mapnik.Style()
rule = mapnik.Rule()
sym = mapnik.RasterSymbolizer()
# Assigning a colorizer to the RasterSymbolizer tells the later
# that it should use it to colorize the raw data raster
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color("transparent"))
for value, color in [
( 0, "#0044cc"),
( 10, "#00cc00"),
( 20, "#ffff00"),
( 30, "#ff7f00"),
( 40, "#ff0000"),
( 50, "#ff007f"),
( 60, "#ff00ff"),
( 70, "#cc00cc"),
( 80, "#990099"),
( 90, "#660066"),
( 200, "transparent"),
]:
sym.colorizer.add_stop(value, mapnik.Color(color))
rule.symbols.append(sym)
style.rules.append(rule)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(lyr.envelope())
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_dataraster_coloring.png', im.tostring('png'))
imdata = im.tostring()
# we have some values in the [20,30) interval so check that they're colored
assert contains_word('\xff\xff\x00\xff', imdata)
def | ():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs)
_map.layers.append(lyr)
# point inside raster extent with valid data
x, y = 427417, 4477517
features = _map.query_point(0,x,y).features
assert len(features) == 1
feat = features[0]
center = feat.envelope().center()
assert center.x==x and center.y==y, center
value = feat['value']
assert value == 21.0, value
# point outside raster extent
features = _map.query_point(0,-427417,4477517).features
assert len(features) == 0
# point inside raster extent with nodata
features = _map.query_point(0,126850,4596050).features
assert len(features) == 0
def test_load_save_map():
map = mapnik.Map(256,256)
in_map = "../data/good_maps/raster_symbolizer.xml"
mapnik.load_map(map, in_map)
out_map = mapnik.save_map_to_string(map)
assert 'RasterSymbolizer' in out_map
assert 'RasterColorizer' in out_map
assert 'stop' in out_map
def test_raster_with_alpha_blends_correctly_with_background():
WIDTH = 500
HEIGHT = 500
map = mapnik.Map(WIDTH, HEIGHT)
WHITE = mapnik.Color(255, 255, 255)
map.background = WHITE
style = mapnik.Style()
rule = mapnik.Rule()
symbolizer = mapnik.RasterSymbolizer()
#XXX: This fixes it, see http://trac.mapnik.org/ticket/759#comment:3
# (and remove comment when this test passes)
#symbolizer.scaling="bilinear_old"
rule.symbols.append(symbolizer)
style.rules.append(rule)
map.append_style('raster_style', style)
map_layer = mapnik.Layer('test_layer')
filepath = '../data/raster/white-alpha.png'
map_layer.datasource = mapnik.Gdal(file=filepath)
map_layer.styles.append('raster_style')
map.layers.append(map_layer)
map.zoom_all()
mim = mapnik.Image(WIDTH, HEIGHT)
mapnik.render(map, mim)
save_data('test_raster_with_alpha_blends_correctly_with_background.png',
mim.tostring('png'))
imdata = mim.tostring()
# All white is expected
assert contains_word('\xff\xff\xff\xff', imdata)
def test_raster_warping():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
prj_trans = mapnik.ProjTransform(mapnik.Projection(mapSrs),
mapnik.Projection(lyrSrs))
_map.zoom_to_box(prj_trans.backward(lyr.envelope()))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping.png', im.tostring('png'))
imdata = im.tostring()
assert contains_word('\xff\xff\x00\xff', imdata)
def test_raster_warping_does_not_overclip_source():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.background=mapnik.Color('white')
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(mapnik.Box2d(3,42,4,43))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping_does_not_overclip_source.png',
im.tostring('png'))
assert im.view(0,200,1,1).tostring()=='\xff\xff\x00\xff'
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| test_dataraster_query_point | identifier_name |
raster_symbolizer_test.py | #!/usr/bin/env python
from nose.tools import *
from utilities import execution_path, save_data, contains_word
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_dataraster_coloring():
|
def test_dataraster_query_point():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs)
_map.layers.append(lyr)
# point inside raster extent with valid data
x, y = 427417, 4477517
features = _map.query_point(0,x,y).features
assert len(features) == 1
feat = features[0]
center = feat.envelope().center()
assert center.x==x and center.y==y, center
value = feat['value']
assert value == 21.0, value
# point outside raster extent
features = _map.query_point(0,-427417,4477517).features
assert len(features) == 0
# point inside raster extent with nodata
features = _map.query_point(0,126850,4596050).features
assert len(features) == 0
def test_load_save_map():
map = mapnik.Map(256,256)
in_map = "../data/good_maps/raster_symbolizer.xml"
mapnik.load_map(map, in_map)
out_map = mapnik.save_map_to_string(map)
assert 'RasterSymbolizer' in out_map
assert 'RasterColorizer' in out_map
assert 'stop' in out_map
def test_raster_with_alpha_blends_correctly_with_background():
WIDTH = 500
HEIGHT = 500
map = mapnik.Map(WIDTH, HEIGHT)
WHITE = mapnik.Color(255, 255, 255)
map.background = WHITE
style = mapnik.Style()
rule = mapnik.Rule()
symbolizer = mapnik.RasterSymbolizer()
#XXX: This fixes it, see http://trac.mapnik.org/ticket/759#comment:3
# (and remove comment when this test passes)
#symbolizer.scaling="bilinear_old"
rule.symbols.append(symbolizer)
style.rules.append(rule)
map.append_style('raster_style', style)
map_layer = mapnik.Layer('test_layer')
filepath = '../data/raster/white-alpha.png'
map_layer.datasource = mapnik.Gdal(file=filepath)
map_layer.styles.append('raster_style')
map.layers.append(map_layer)
map.zoom_all()
mim = mapnik.Image(WIDTH, HEIGHT)
mapnik.render(map, mim)
save_data('test_raster_with_alpha_blends_correctly_with_background.png',
mim.tostring('png'))
imdata = mim.tostring()
# All white is expected
assert contains_word('\xff\xff\xff\xff', imdata)
def test_raster_warping():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
prj_trans = mapnik.ProjTransform(mapnik.Projection(mapSrs),
mapnik.Projection(lyrSrs))
_map.zoom_to_box(prj_trans.backward(lyr.envelope()))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping.png', im.tostring('png'))
imdata = im.tostring()
assert contains_word('\xff\xff\x00\xff', imdata)
def test_raster_warping_does_not_overclip_source():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.background=mapnik.Color('white')
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(mapnik.Box2d(3,42,4,43))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping_does_not_overclip_source.png',
im.tostring('png'))
assert im.view(0,200,1,1).tostring()=='\xff\xff\x00\xff'
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs)
style = mapnik.Style()
rule = mapnik.Rule()
sym = mapnik.RasterSymbolizer()
# Assigning a colorizer to the RasterSymbolizer tells the later
# that it should use it to colorize the raw data raster
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color("transparent"))
for value, color in [
( 0, "#0044cc"),
( 10, "#00cc00"),
( 20, "#ffff00"),
( 30, "#ff7f00"),
( 40, "#ff0000"),
( 50, "#ff007f"),
( 60, "#ff00ff"),
( 70, "#cc00cc"),
( 80, "#990099"),
( 90, "#660066"),
( 200, "transparent"),
]:
sym.colorizer.add_stop(value, mapnik.Color(color))
rule.symbols.append(sym)
style.rules.append(rule)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(lyr.envelope())
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_dataraster_coloring.png', im.tostring('png'))
imdata = im.tostring()
# we have some values in the [20,30) interval so check that they're colored
assert contains_word('\xff\xff\x00\xff', imdata) | identifier_body |
raster_symbolizer_test.py | #!/usr/bin/env python
from nose.tools import *
from utilities import execution_path, save_data, contains_word
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_dataraster_coloring():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs) | sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color("transparent"))
for value, color in [
( 0, "#0044cc"),
( 10, "#00cc00"),
( 20, "#ffff00"),
( 30, "#ff7f00"),
( 40, "#ff0000"),
( 50, "#ff007f"),
( 60, "#ff00ff"),
( 70, "#cc00cc"),
( 80, "#990099"),
( 90, "#660066"),
( 200, "transparent"),
]:
sym.colorizer.add_stop(value, mapnik.Color(color))
rule.symbols.append(sym)
style.rules.append(rule)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(lyr.envelope())
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_dataraster_coloring.png', im.tostring('png'))
imdata = im.tostring()
# we have some values in the [20,30) interval so check that they're colored
assert contains_word('\xff\xff\x00\xff', imdata)
def test_dataraster_query_point():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs)
_map.layers.append(lyr)
# point inside raster extent with valid data
x, y = 427417, 4477517
features = _map.query_point(0,x,y).features
assert len(features) == 1
feat = features[0]
center = feat.envelope().center()
assert center.x==x and center.y==y, center
value = feat['value']
assert value == 21.0, value
# point outside raster extent
features = _map.query_point(0,-427417,4477517).features
assert len(features) == 0
# point inside raster extent with nodata
features = _map.query_point(0,126850,4596050).features
assert len(features) == 0
def test_load_save_map():
map = mapnik.Map(256,256)
in_map = "../data/good_maps/raster_symbolizer.xml"
mapnik.load_map(map, in_map)
out_map = mapnik.save_map_to_string(map)
assert 'RasterSymbolizer' in out_map
assert 'RasterColorizer' in out_map
assert 'stop' in out_map
def test_raster_with_alpha_blends_correctly_with_background():
WIDTH = 500
HEIGHT = 500
map = mapnik.Map(WIDTH, HEIGHT)
WHITE = mapnik.Color(255, 255, 255)
map.background = WHITE
style = mapnik.Style()
rule = mapnik.Rule()
symbolizer = mapnik.RasterSymbolizer()
#XXX: This fixes it, see http://trac.mapnik.org/ticket/759#comment:3
# (and remove comment when this test passes)
#symbolizer.scaling="bilinear_old"
rule.symbols.append(symbolizer)
style.rules.append(rule)
map.append_style('raster_style', style)
map_layer = mapnik.Layer('test_layer')
filepath = '../data/raster/white-alpha.png'
map_layer.datasource = mapnik.Gdal(file=filepath)
map_layer.styles.append('raster_style')
map.layers.append(map_layer)
map.zoom_all()
mim = mapnik.Image(WIDTH, HEIGHT)
mapnik.render(map, mim)
save_data('test_raster_with_alpha_blends_correctly_with_background.png',
mim.tostring('png'))
imdata = mim.tostring()
# All white is expected
assert contains_word('\xff\xff\xff\xff', imdata)
def test_raster_warping():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
prj_trans = mapnik.ProjTransform(mapnik.Projection(mapSrs),
mapnik.Projection(lyrSrs))
_map.zoom_to_box(prj_trans.backward(lyr.envelope()))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping.png', im.tostring('png'))
imdata = im.tostring()
assert contains_word('\xff\xff\x00\xff', imdata)
def test_raster_warping_does_not_overclip_source():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.background=mapnik.Color('white')
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(mapnik.Box2d(3,42,4,43))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping_does_not_overclip_source.png',
im.tostring('png'))
assert im.view(0,200,1,1).tostring()=='\xff\xff\x00\xff'
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run] | style = mapnik.Style()
rule = mapnik.Rule()
sym = mapnik.RasterSymbolizer()
# Assigning a colorizer to the RasterSymbolizer tells the later
# that it should use it to colorize the raw data raster | random_line_split |
raster_symbolizer_test.py | #!/usr/bin/env python
from nose.tools import *
from utilities import execution_path, save_data, contains_word
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_dataraster_coloring():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs)
style = mapnik.Style()
rule = mapnik.Rule()
sym = mapnik.RasterSymbolizer()
# Assigning a colorizer to the RasterSymbolizer tells the later
# that it should use it to colorize the raw data raster
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color("transparent"))
for value, color in [
( 0, "#0044cc"),
( 10, "#00cc00"),
( 20, "#ffff00"),
( 30, "#ff7f00"),
( 40, "#ff0000"),
( 50, "#ff007f"),
( 60, "#ff00ff"),
( 70, "#cc00cc"),
( 80, "#990099"),
( 90, "#660066"),
( 200, "transparent"),
]:
|
rule.symbols.append(sym)
style.rules.append(rule)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(lyr.envelope())
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_dataraster_coloring.png', im.tostring('png'))
imdata = im.tostring()
# we have some values in the [20,30) interval so check that they're colored
assert contains_word('\xff\xff\x00\xff', imdata)
def test_dataraster_query_point():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
lyr.srs = srs
_map = mapnik.Map(256,256, srs)
_map.layers.append(lyr)
# point inside raster extent with valid data
x, y = 427417, 4477517
features = _map.query_point(0,x,y).features
assert len(features) == 1
feat = features[0]
center = feat.envelope().center()
assert center.x==x and center.y==y, center
value = feat['value']
assert value == 21.0, value
# point outside raster extent
features = _map.query_point(0,-427417,4477517).features
assert len(features) == 0
# point inside raster extent with nodata
features = _map.query_point(0,126850,4596050).features
assert len(features) == 0
def test_load_save_map():
map = mapnik.Map(256,256)
in_map = "../data/good_maps/raster_symbolizer.xml"
mapnik.load_map(map, in_map)
out_map = mapnik.save_map_to_string(map)
assert 'RasterSymbolizer' in out_map
assert 'RasterColorizer' in out_map
assert 'stop' in out_map
def test_raster_with_alpha_blends_correctly_with_background():
WIDTH = 500
HEIGHT = 500
map = mapnik.Map(WIDTH, HEIGHT)
WHITE = mapnik.Color(255, 255, 255)
map.background = WHITE
style = mapnik.Style()
rule = mapnik.Rule()
symbolizer = mapnik.RasterSymbolizer()
#XXX: This fixes it, see http://trac.mapnik.org/ticket/759#comment:3
# (and remove comment when this test passes)
#symbolizer.scaling="bilinear_old"
rule.symbols.append(symbolizer)
style.rules.append(rule)
map.append_style('raster_style', style)
map_layer = mapnik.Layer('test_layer')
filepath = '../data/raster/white-alpha.png'
map_layer.datasource = mapnik.Gdal(file=filepath)
map_layer.styles.append('raster_style')
map.layers.append(map_layer)
map.zoom_all()
mim = mapnik.Image(WIDTH, HEIGHT)
mapnik.render(map, mim)
save_data('test_raster_with_alpha_blends_correctly_with_background.png',
mim.tostring('png'))
imdata = mim.tostring()
# All white is expected
assert contains_word('\xff\xff\xff\xff', imdata)
def test_raster_warping():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
prj_trans = mapnik.ProjTransform(mapnik.Projection(mapSrs),
mapnik.Projection(lyrSrs))
_map.zoom_to_box(prj_trans.backward(lyr.envelope()))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping.png', im.tostring('png'))
imdata = im.tostring()
assert contains_word('\xff\xff\x00\xff', imdata)
def test_raster_warping_does_not_overclip_source():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
)
sym = mapnik.RasterSymbolizer()
sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
rule = mapnik.Rule()
rule.symbols.append(sym)
style = mapnik.Style()
style.rules.append(rule)
_map = mapnik.Map(256,256, mapSrs)
_map.background=mapnik.Color('white')
_map.append_style('foo', style)
lyr.styles.append('foo')
_map.layers.append(lyr)
_map.zoom_to_box(mapnik.Box2d(3,42,4,43))
im = mapnik.Image(_map.width,_map.height)
mapnik.render(_map, im)
# save a png somewhere so we can see it
save_data('test_raster_warping_does_not_overclip_source.png',
im.tostring('png'))
assert im.view(0,200,1,1).tostring()=='\xff\xff\x00\xff'
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| sym.colorizer.add_stop(value, mapnik.Color(color)) | conditional_block |
app.routes.ts | import { PrivateComponent } from "./private.component";
import { LoginComponent } from "./login.component";
import { RegisterComponent } from "./register.component";
import { SettingsComponent } from "./settings.component";
import { HomeComponent } from "./home.component";
import { AssetComponent } from "./asset.component";
import { ViewerComponent } from "./viewer.component";
import { AnalyticsComponent } from "./analytics.component";
/* List of Alienate Routes */
export const AlienateAppRoutes = [
{
path: '',
redirectTo: 'me',
terminal: true | { path: '', component: HomeComponent },
{ path: 'asset/:tx', component: AssetComponent },
{ path: 'asset/:tx/view', component: ViewerComponent },
{ path: 'asset/:tx/analytics', component: AnalyticsComponent },
{ path: 'settings', component: SettingsComponent }
]
},
{ path: 'login', component: LoginComponent },
{ path: 'register', component: RegisterComponent }
]; | },
{
path: 'me',
component: PrivateComponent,
children: [ | random_line_split |
state.rs | use std::fmt::{Debug, Formatter, Result};
use std::clone::Clone;
pub enum State {
Unknown,
Unsupported,
Unauthorized,
PoweredOff,
PoweredOn,
}
impl State {
fn id(&self) -> usize {
match *self {
State::Unknown => 1,
State::Unsupported => 3,
State::Unauthorized => 4,
State::PoweredOff => 5,
State::PoweredOn => 6,
}
}
}
impl PartialEq for State {
fn eq(&self, other: &State) -> bool |
}
impl Debug for State {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "State::{}", match *self {
State::Unknown => "Unknown",
State::Unsupported => "Unsupported",
State::Unauthorized => "Unauthorized",
State::PoweredOff => "PoweredOff",
State::PoweredOn => "PoweredOn",
})
}
}
impl Clone for State {
fn clone(&self) -> State {
match *self {
State::Unknown => State::Unknown,
State::Unsupported => State::Unsupported,
State::Unauthorized => State::Unauthorized,
State::PoweredOff => State::PoweredOff,
State::PoweredOn => State::PoweredOn,
}
}
}
| {
self.id() == other.id()
} | identifier_body |
state.rs | use std::fmt::{Debug, Formatter, Result};
use std::clone::Clone;
pub enum State {
Unknown,
Unsupported,
Unauthorized,
PoweredOff,
PoweredOn,
}
impl State {
fn id(&self) -> usize {
match *self {
State::Unknown => 1,
State::Unsupported => 3,
State::Unauthorized => 4,
State::PoweredOff => 5,
State::PoweredOn => 6,
}
}
}
impl PartialEq for State {
fn eq(&self, other: &State) -> bool {
self.id() == other.id()
}
}
impl Debug for State {
fn | (&self, f: &mut Formatter) -> Result {
write!(f, "State::{}", match *self {
State::Unknown => "Unknown",
State::Unsupported => "Unsupported",
State::Unauthorized => "Unauthorized",
State::PoweredOff => "PoweredOff",
State::PoweredOn => "PoweredOn",
})
}
}
impl Clone for State {
fn clone(&self) -> State {
match *self {
State::Unknown => State::Unknown,
State::Unsupported => State::Unsupported,
State::Unauthorized => State::Unauthorized,
State::PoweredOff => State::PoweredOff,
State::PoweredOn => State::PoweredOn,
}
}
}
| fmt | identifier_name |
state.rs | use std::fmt::{Debug, Formatter, Result};
use std::clone::Clone;
pub enum State {
Unknown,
Unsupported,
Unauthorized,
PoweredOff,
PoweredOn,
}
impl State {
fn id(&self) -> usize {
match *self {
State::Unknown => 1,
State::Unsupported => 3,
State::Unauthorized => 4,
State::PoweredOff => 5,
State::PoweredOn => 6,
} | }
impl PartialEq for State {
fn eq(&self, other: &State) -> bool {
self.id() == other.id()
}
}
impl Debug for State {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "State::{}", match *self {
State::Unknown => "Unknown",
State::Unsupported => "Unsupported",
State::Unauthorized => "Unauthorized",
State::PoweredOff => "PoweredOff",
State::PoweredOn => "PoweredOn",
})
}
}
impl Clone for State {
fn clone(&self) -> State {
match *self {
State::Unknown => State::Unknown,
State::Unsupported => State::Unsupported,
State::Unauthorized => State::Unauthorized,
State::PoweredOff => State::PoweredOff,
State::PoweredOn => State::PoweredOn,
}
}
} | } | random_line_split |
Northwind.RegionService.ts | namespace Enterprise.Northwind {
export namespace RegionService {
export const baseUrl = 'Northwind/Region';
export declare function Create(request: Serenity.SaveRequest<RegionRow>, onSuccess?: (response: Serenity.SaveResponse) => void, opt?: Q.ServiceOptions<any>): JQueryXHR;
export declare function Update(request: Serenity.SaveRequest<RegionRow>, onSuccess?: (response: Serenity.SaveResponse) => void, opt?: Q.ServiceOptions<any>): JQueryXHR;
export declare function Delete(request: Serenity.DeleteRequest, onSuccess?: (response: Serenity.DeleteResponse) => void, opt?: Q.ServiceOptions<any>): JQueryXHR;
export declare function Retrieve(request: Serenity.RetrieveRequest, onSuccess?: (response: Serenity.RetrieveResponse<RegionRow>) => void, opt?: Q.ServiceOptions<any>): JQueryXHR;
export declare function List(request: Serenity.ListRequest, onSuccess?: (response: Serenity.ListResponse<RegionRow>) => void, opt?: Q.ServiceOptions<any>): JQueryXHR;
export namespace Methods {
export declare const Create: string;
export declare const Update: string;
export declare const Delete: string;
export declare const Retrieve: string;
export declare const List: string;
}
| 'Update',
'Delete',
'Retrieve',
'List'
].forEach(x => {
(<any>RegionService)[x] = function (r, s, o) {
return Q.serviceRequest(baseUrl + '/' + x, r, s, o);
};
(<any>Methods)[x] = baseUrl + '/' + x;
});
}
} | [
'Create', | random_line_split |
mat.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
from collections import OrderedDict
from uuid import uuid4
from warnings import warn
from numpy import object as np_object
from numpy import array, inf, isinf
from six import string_types
from cobra.core import Metabolite, Model, Reaction
from cobra.util import create_stoichiometric_matrix
from cobra.util.solver import set_objective
try:
import scipy.sparse as scipy_sparse
import scipy.io as scipy_io
except ImportError:
scipy_sparse = None
scipy_io = None
# precompiled regular expressions
_bracket_re = re.compile("r\[[a-z]\]$")
_underscore_re = re.compile(r"_[a-z]$")
def _get_id_compartment(id):
"""extract the compartment from the id string"""
bracket_search = _bracket_re.findall(id)
if len(bracket_search) == 1:
return bracket_search[0][1]
underscore_search = _underscore_re.findall(id)
if len(underscore_search) == 1:
return underscore_search[0][1]
return None
def _cell(x):
"""translate an array x into a MATLAB cell array"""
x_no_none = [i if i is not None else "" for i in x]
return array(x_no_none, dtype=np_object)
def load_matlab_model(infile_path, variable_name=None, inf=inf):
"""Load a cobra model stored as a .mat file
Parameters
----------
infile_path: str
path to the file to to read
variable_name: str, optional
The variable name of the model in the .mat file. If this is not
specified, then the first MATLAB variable which looks like a COBRA
model will be used
inf: value
The value to use for infinite bounds. Some solvers do not handle
infinite values so for using those, set this to a high numeric value.
Returns
-------
cobra.core.Model.Model:
The resulting cobra model
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
data = scipy_io.loadmat(infile_path)
possible_names = []
if variable_name is None:
# skip meta variables
meta_vars = {"__globals__", "__header__", "__version__"}
possible_names = sorted(i for i in data if i not in meta_vars)
if len(possible_names) == 1:
variable_name = possible_names[0]
if variable_name is not None:
return from_mat_struct(data[variable_name], model_id=variable_name,
inf=inf)
for possible_name in possible_names:
try:
return from_mat_struct(data[possible_name], model_id=possible_name,
inf=inf)
except ValueError:
pass
# If code here is executed, then no model was found.
raise IOError("no COBRA model found")
def save_matlab_model(model, file_name, varname=None):
"""Save the cobra model as a .mat file.
This .mat file can be used directly in the MATLAB version of COBRA.
Parameters
----------
model : cobra.core.Model.Model object
The model to save
file_name : str or file-like object
The file to save to
varname : string
The name of the variable within the workspace
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
if varname is None:
varname = str(model.id) \
if model.id is not None and len(model.id) > 0 \
else "exported_model"
mat = create_mat_dict(model)
scipy_io.savemat(file_name, {varname: mat},
appendmat=True, oned_as="column")
def create_mat_metabolite_id(model):
for met in model.metabolites:
if not _get_id_compartment(met.id) and met.compartment:
yield '{}[{}]'.format(met.id,
model.compartments[met.compartment].lower())
else:
yield met.id
def create_mat_dict(model):
"""create a dict mapping model attributes to arrays"""
rxns = model.reactions
mets = model.metabolites
mat = OrderedDict()
mat["mets"] = _cell([met_id for met_id in create_mat_metabolite_id(model)])
mat["metNames"] = _cell(mets.list_attr("name"))
mat["metFormulas"] = _cell([str(m.formula) for m in mets])
try:
mat["metCharge"] = array(mets.list_attr("charge")) * 1.
except TypeError:
# can't have any None entries for charge, or this will fail
pass
mat["genes"] = _cell(model.genes.list_attr("id"))
# make a matrix for rxnGeneMat
# reactions are rows, genes are columns
rxn_gene = scipy_sparse.dok_matrix((len(model.reactions),
len(model.genes)))
if min(rxn_gene.shape) > 0:
for i, reaction in enumerate(model.reactions):
for gene in reaction.genes:
rxn_gene[i, model.genes.index(gene)] = 1
mat["rxnGeneMat"] = rxn_gene
mat["grRules"] = _cell(rxns.list_attr("gene_reaction_rule"))
mat["rxns"] = _cell(rxns.list_attr("id"))
mat["rxnNames"] = _cell(rxns.list_attr("name"))
mat["subSystems"] = _cell(rxns.list_attr("subsystem"))
mat["csense"] = "".join((
met._constraint_sense for met in model.metabolites))
stoich_mat = create_stoichiometric_matrix(model)
mat["S"] = stoich_mat if stoich_mat is not None else [[]]
# multiply by 1 to convert to float, working around scipy bug
# https://github.com/scipy/scipy/issues/4537
mat["lb"] = array(rxns.list_attr("lower_bound")) * 1.
mat["ub"] = array(rxns.list_attr("upper_bound")) * 1.
mat["b"] = array(mets.list_attr("_bound")) * 1.
mat["c"] = array(rxns.list_attr("objective_coefficient")) * 1.
mat["rev"] = array(rxns.list_attr("reversibility")) * 1
mat["description"] = str(model.id)
return mat
def from_mat_struct(mat_struct, model_id=None, inf=inf):
"""create a model from the COBRA toolbox struct
The struct will be a dict read in by scipy.io.loadmat
"""
m = mat_struct
if m.dtype.names is None:
raise ValueError("not a valid mat struct")
if not {"rxns", "mets", "S", "lb", "ub"} <= set(m.dtype.names):
raise ValueError("not a valid mat struct")
if "c" in m.dtype.names:
c_vec = m["c"][0, 0]
else:
c_vec = None
warn("objective vector 'c' not found")
model = Model()
if model_id is not None:
model.id = model_id
elif "description" in m.dtype.names:
description = m["description"][0, 0][0]
if not isinstance(description, string_types) and len(description) > 1:
model.id = description[0]
warn("Several IDs detected, only using the first.")
else:
model.id = description
else:
model.id = "imported_model"
for i, name in enumerate(m["mets"][0, 0]):
new_metabolite = Metabolite()
new_metabolite.id = str(name[0][0])
if all(var in m.dtype.names for var in
['metComps', 'comps', 'compNames']):
comp_index = m["metComps"][0, 0][i][0] - 1
new_metabolite.compartment = m['comps'][0, 0][comp_index][0][0]
if new_metabolite.compartment not in model.compartments:
comp_name = m['compNames'][0, 0][comp_index][0][0]
model.compartments[new_metabolite.compartment] = comp_name
else:
new_metabolite.compartment = _get_id_compartment(new_metabolite.id)
if new_metabolite.compartment not in model.compartments:
model.compartments[
new_metabolite.compartment] = new_metabolite.compartment
try:
new_metabolite.name = str(m["metNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.formula = str(m["metFormulas"][0][0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.charge = float(m["metCharge"][0, 0][i][0])
int_charge = int(new_metabolite.charge)
if new_metabolite.charge == int_charge:
new_metabolite.charge = int_charge
except (IndexError, ValueError):
pass
model.add_metabolites([new_metabolite])
new_reactions = []
coefficients = {}
for i, name in enumerate(m["rxns"][0, 0]):
new_reaction = Reaction()
new_reaction.id = str(name[0][0])
new_reaction.lower_bound = float(m["lb"][0, 0][i][0])
new_reaction.upper_bound = float(m["ub"][0, 0][i][0])
if isinf(new_reaction.lower_bound) and new_reaction.lower_bound < 0:
new_reaction.lower_bound = -inf
if isinf(new_reaction.upper_bound) and new_reaction.upper_bound > 0:
new_reaction.upper_bound = inf
if c_vec is not None:
coefficients[new_reaction] = float(c_vec[i][0])
try:
new_reaction.gene_reaction_rule = str(m['grRules'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.name = str(m["rxnNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.subsystem = str(m['subSystems'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
new_reactions.append(new_reaction)
model.add_reactions(new_reactions)
set_objective(model, coefficients)
coo = scipy_sparse.coo_matrix(m["S"][0, 0])
for i, j, v in zip(coo.row, coo.col, coo.data):
model.reactions[j].add_metabolites({model.metabolites[i]: v})
return model
def _check(result):
"""ensure success of a pymatbridge operation"""
if result["success"] is not True:
raise RuntimeError(result["content"]["stdout"])
def model_to_pymatbridge(model, variable_name="model", matlab=None):
| """send the model to a MATLAB workspace through pymatbridge
This model can then be manipulated through the COBRA toolbox
Parameters
----------
variable_name : str
The variable name to which the model will be assigned in the
MATLAB workspace
matlab : None or pymatbridge.Matlab instance
The MATLAB workspace to which the variable will be sent. If
this is None, then this will be sent to the same environment
used in IPython magics.
"""
if scipy_sparse is None:
raise ImportError("`model_to_pymatbridge` requires scipy!")
if matlab is None: # assumed to be running an IPython magic
from IPython import get_ipython
matlab = get_ipython().magics_manager.registry["MatlabMagics"].Matlab
model_info = create_mat_dict(model)
S = model_info["S"].todok()
model_info["S"] = 0
temp_S_name = "cobra_pymatbridge_temp_" + uuid4().hex
_check(matlab.set_variable(variable_name, model_info))
_check(matlab.set_variable(temp_S_name, S))
_check(matlab.run_code("%s.S = %s;" % (variable_name, temp_S_name)))
# all vectors need to be transposed
for i in model_info.keys():
if i == "S":
continue
_check(matlab.run_code("{0}.{1} = {0}.{1}';".format(variable_name, i)))
_check(matlab.run_code("clear %s;" % temp_S_name)) | identifier_body |
|
mat.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
from collections import OrderedDict
from uuid import uuid4
from warnings import warn
from numpy import object as np_object
from numpy import array, inf, isinf
from six import string_types
from cobra.core import Metabolite, Model, Reaction
from cobra.util import create_stoichiometric_matrix
from cobra.util.solver import set_objective
try:
import scipy.sparse as scipy_sparse
import scipy.io as scipy_io
except ImportError:
scipy_sparse = None
scipy_io = None
# precompiled regular expressions
_bracket_re = re.compile("r\[[a-z]\]$")
_underscore_re = re.compile(r"_[a-z]$")
def _get_id_compartment(id):
"""extract the compartment from the id string"""
bracket_search = _bracket_re.findall(id)
if len(bracket_search) == 1:
return bracket_search[0][1]
underscore_search = _underscore_re.findall(id)
if len(underscore_search) == 1:
return underscore_search[0][1]
return None
def _cell(x):
"""translate an array x into a MATLAB cell array"""
x_no_none = [i if i is not None else "" for i in x]
return array(x_no_none, dtype=np_object)
def load_matlab_model(infile_path, variable_name=None, inf=inf):
"""Load a cobra model stored as a .mat file
Parameters
----------
infile_path: str
path to the file to to read
variable_name: str, optional
The variable name of the model in the .mat file. If this is not
specified, then the first MATLAB variable which looks like a COBRA
model will be used
inf: value
The value to use for infinite bounds. Some solvers do not handle
infinite values so for using those, set this to a high numeric value.
Returns
-------
cobra.core.Model.Model:
The resulting cobra model
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
data = scipy_io.loadmat(infile_path)
possible_names = []
if variable_name is None:
# skip meta variables
meta_vars = {"__globals__", "__header__", "__version__"}
possible_names = sorted(i for i in data if i not in meta_vars) | for possible_name in possible_names:
try:
return from_mat_struct(data[possible_name], model_id=possible_name,
inf=inf)
except ValueError:
pass
# If code here is executed, then no model was found.
raise IOError("no COBRA model found")
def save_matlab_model(model, file_name, varname=None):
"""Save the cobra model as a .mat file.
This .mat file can be used directly in the MATLAB version of COBRA.
Parameters
----------
model : cobra.core.Model.Model object
The model to save
file_name : str or file-like object
The file to save to
varname : string
The name of the variable within the workspace
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
if varname is None:
varname = str(model.id) \
if model.id is not None and len(model.id) > 0 \
else "exported_model"
mat = create_mat_dict(model)
scipy_io.savemat(file_name, {varname: mat},
appendmat=True, oned_as="column")
def create_mat_metabolite_id(model):
for met in model.metabolites:
if not _get_id_compartment(met.id) and met.compartment:
yield '{}[{}]'.format(met.id,
model.compartments[met.compartment].lower())
else:
yield met.id
def create_mat_dict(model):
"""create a dict mapping model attributes to arrays"""
rxns = model.reactions
mets = model.metabolites
mat = OrderedDict()
mat["mets"] = _cell([met_id for met_id in create_mat_metabolite_id(model)])
mat["metNames"] = _cell(mets.list_attr("name"))
mat["metFormulas"] = _cell([str(m.formula) for m in mets])
try:
mat["metCharge"] = array(mets.list_attr("charge")) * 1.
except TypeError:
# can't have any None entries for charge, or this will fail
pass
mat["genes"] = _cell(model.genes.list_attr("id"))
# make a matrix for rxnGeneMat
# reactions are rows, genes are columns
rxn_gene = scipy_sparse.dok_matrix((len(model.reactions),
len(model.genes)))
if min(rxn_gene.shape) > 0:
for i, reaction in enumerate(model.reactions):
for gene in reaction.genes:
rxn_gene[i, model.genes.index(gene)] = 1
mat["rxnGeneMat"] = rxn_gene
mat["grRules"] = _cell(rxns.list_attr("gene_reaction_rule"))
mat["rxns"] = _cell(rxns.list_attr("id"))
mat["rxnNames"] = _cell(rxns.list_attr("name"))
mat["subSystems"] = _cell(rxns.list_attr("subsystem"))
mat["csense"] = "".join((
met._constraint_sense for met in model.metabolites))
stoich_mat = create_stoichiometric_matrix(model)
mat["S"] = stoich_mat if stoich_mat is not None else [[]]
# multiply by 1 to convert to float, working around scipy bug
# https://github.com/scipy/scipy/issues/4537
mat["lb"] = array(rxns.list_attr("lower_bound")) * 1.
mat["ub"] = array(rxns.list_attr("upper_bound")) * 1.
mat["b"] = array(mets.list_attr("_bound")) * 1.
mat["c"] = array(rxns.list_attr("objective_coefficient")) * 1.
mat["rev"] = array(rxns.list_attr("reversibility")) * 1
mat["description"] = str(model.id)
return mat
def from_mat_struct(mat_struct, model_id=None, inf=inf):
"""create a model from the COBRA toolbox struct
The struct will be a dict read in by scipy.io.loadmat
"""
m = mat_struct
if m.dtype.names is None:
raise ValueError("not a valid mat struct")
if not {"rxns", "mets", "S", "lb", "ub"} <= set(m.dtype.names):
raise ValueError("not a valid mat struct")
if "c" in m.dtype.names:
c_vec = m["c"][0, 0]
else:
c_vec = None
warn("objective vector 'c' not found")
model = Model()
if model_id is not None:
model.id = model_id
elif "description" in m.dtype.names:
description = m["description"][0, 0][0]
if not isinstance(description, string_types) and len(description) > 1:
model.id = description[0]
warn("Several IDs detected, only using the first.")
else:
model.id = description
else:
model.id = "imported_model"
for i, name in enumerate(m["mets"][0, 0]):
new_metabolite = Metabolite()
new_metabolite.id = str(name[0][0])
if all(var in m.dtype.names for var in
['metComps', 'comps', 'compNames']):
comp_index = m["metComps"][0, 0][i][0] - 1
new_metabolite.compartment = m['comps'][0, 0][comp_index][0][0]
if new_metabolite.compartment not in model.compartments:
comp_name = m['compNames'][0, 0][comp_index][0][0]
model.compartments[new_metabolite.compartment] = comp_name
else:
new_metabolite.compartment = _get_id_compartment(new_metabolite.id)
if new_metabolite.compartment not in model.compartments:
model.compartments[
new_metabolite.compartment] = new_metabolite.compartment
try:
new_metabolite.name = str(m["metNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.formula = str(m["metFormulas"][0][0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.charge = float(m["metCharge"][0, 0][i][0])
int_charge = int(new_metabolite.charge)
if new_metabolite.charge == int_charge:
new_metabolite.charge = int_charge
except (IndexError, ValueError):
pass
model.add_metabolites([new_metabolite])
new_reactions = []
coefficients = {}
for i, name in enumerate(m["rxns"][0, 0]):
new_reaction = Reaction()
new_reaction.id = str(name[0][0])
new_reaction.lower_bound = float(m["lb"][0, 0][i][0])
new_reaction.upper_bound = float(m["ub"][0, 0][i][0])
if isinf(new_reaction.lower_bound) and new_reaction.lower_bound < 0:
new_reaction.lower_bound = -inf
if isinf(new_reaction.upper_bound) and new_reaction.upper_bound > 0:
new_reaction.upper_bound = inf
if c_vec is not None:
coefficients[new_reaction] = float(c_vec[i][0])
try:
new_reaction.gene_reaction_rule = str(m['grRules'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.name = str(m["rxnNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.subsystem = str(m['subSystems'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
new_reactions.append(new_reaction)
model.add_reactions(new_reactions)
set_objective(model, coefficients)
coo = scipy_sparse.coo_matrix(m["S"][0, 0])
for i, j, v in zip(coo.row, coo.col, coo.data):
model.reactions[j].add_metabolites({model.metabolites[i]: v})
return model
def _check(result):
"""ensure success of a pymatbridge operation"""
if result["success"] is not True:
raise RuntimeError(result["content"]["stdout"])
def model_to_pymatbridge(model, variable_name="model", matlab=None):
"""send the model to a MATLAB workspace through pymatbridge
This model can then be manipulated through the COBRA toolbox
Parameters
----------
variable_name : str
The variable name to which the model will be assigned in the
MATLAB workspace
matlab : None or pymatbridge.Matlab instance
The MATLAB workspace to which the variable will be sent. If
this is None, then this will be sent to the same environment
used in IPython magics.
"""
if scipy_sparse is None:
raise ImportError("`model_to_pymatbridge` requires scipy!")
if matlab is None: # assumed to be running an IPython magic
from IPython import get_ipython
matlab = get_ipython().magics_manager.registry["MatlabMagics"].Matlab
model_info = create_mat_dict(model)
S = model_info["S"].todok()
model_info["S"] = 0
temp_S_name = "cobra_pymatbridge_temp_" + uuid4().hex
_check(matlab.set_variable(variable_name, model_info))
_check(matlab.set_variable(temp_S_name, S))
_check(matlab.run_code("%s.S = %s;" % (variable_name, temp_S_name)))
# all vectors need to be transposed
for i in model_info.keys():
if i == "S":
continue
_check(matlab.run_code("{0}.{1} = {0}.{1}';".format(variable_name, i)))
_check(matlab.run_code("clear %s;" % temp_S_name)) | if len(possible_names) == 1:
variable_name = possible_names[0]
if variable_name is not None:
return from_mat_struct(data[variable_name], model_id=variable_name,
inf=inf) | random_line_split |
mat.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
from collections import OrderedDict
from uuid import uuid4
from warnings import warn
from numpy import object as np_object
from numpy import array, inf, isinf
from six import string_types
from cobra.core import Metabolite, Model, Reaction
from cobra.util import create_stoichiometric_matrix
from cobra.util.solver import set_objective
try:
import scipy.sparse as scipy_sparse
import scipy.io as scipy_io
except ImportError:
scipy_sparse = None
scipy_io = None
# precompiled regular expressions
_bracket_re = re.compile("r\[[a-z]\]$")
_underscore_re = re.compile(r"_[a-z]$")
def _get_id_compartment(id):
"""extract the compartment from the id string"""
bracket_search = _bracket_re.findall(id)
if len(bracket_search) == 1:
return bracket_search[0][1]
underscore_search = _underscore_re.findall(id)
if len(underscore_search) == 1:
return underscore_search[0][1]
return None
def _cell(x):
"""translate an array x into a MATLAB cell array"""
x_no_none = [i if i is not None else "" for i in x]
return array(x_no_none, dtype=np_object)
def load_matlab_model(infile_path, variable_name=None, inf=inf):
"""Load a cobra model stored as a .mat file
Parameters
----------
infile_path: str
path to the file to to read
variable_name: str, optional
The variable name of the model in the .mat file. If this is not
specified, then the first MATLAB variable which looks like a COBRA
model will be used
inf: value
The value to use for infinite bounds. Some solvers do not handle
infinite values so for using those, set this to a high numeric value.
Returns
-------
cobra.core.Model.Model:
The resulting cobra model
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
data = scipy_io.loadmat(infile_path)
possible_names = []
if variable_name is None:
# skip meta variables
meta_vars = {"__globals__", "__header__", "__version__"}
possible_names = sorted(i for i in data if i not in meta_vars)
if len(possible_names) == 1:
|
if variable_name is not None:
return from_mat_struct(data[variable_name], model_id=variable_name,
inf=inf)
for possible_name in possible_names:
try:
return from_mat_struct(data[possible_name], model_id=possible_name,
inf=inf)
except ValueError:
pass
# If code here is executed, then no model was found.
raise IOError("no COBRA model found")
def save_matlab_model(model, file_name, varname=None):
"""Save the cobra model as a .mat file.
This .mat file can be used directly in the MATLAB version of COBRA.
Parameters
----------
model : cobra.core.Model.Model object
The model to save
file_name : str or file-like object
The file to save to
varname : string
The name of the variable within the workspace
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
if varname is None:
varname = str(model.id) \
if model.id is not None and len(model.id) > 0 \
else "exported_model"
mat = create_mat_dict(model)
scipy_io.savemat(file_name, {varname: mat},
appendmat=True, oned_as="column")
def create_mat_metabolite_id(model):
for met in model.metabolites:
if not _get_id_compartment(met.id) and met.compartment:
yield '{}[{}]'.format(met.id,
model.compartments[met.compartment].lower())
else:
yield met.id
def create_mat_dict(model):
"""create a dict mapping model attributes to arrays"""
rxns = model.reactions
mets = model.metabolites
mat = OrderedDict()
mat["mets"] = _cell([met_id for met_id in create_mat_metabolite_id(model)])
mat["metNames"] = _cell(mets.list_attr("name"))
mat["metFormulas"] = _cell([str(m.formula) for m in mets])
try:
mat["metCharge"] = array(mets.list_attr("charge")) * 1.
except TypeError:
# can't have any None entries for charge, or this will fail
pass
mat["genes"] = _cell(model.genes.list_attr("id"))
# make a matrix for rxnGeneMat
# reactions are rows, genes are columns
rxn_gene = scipy_sparse.dok_matrix((len(model.reactions),
len(model.genes)))
if min(rxn_gene.shape) > 0:
for i, reaction in enumerate(model.reactions):
for gene in reaction.genes:
rxn_gene[i, model.genes.index(gene)] = 1
mat["rxnGeneMat"] = rxn_gene
mat["grRules"] = _cell(rxns.list_attr("gene_reaction_rule"))
mat["rxns"] = _cell(rxns.list_attr("id"))
mat["rxnNames"] = _cell(rxns.list_attr("name"))
mat["subSystems"] = _cell(rxns.list_attr("subsystem"))
mat["csense"] = "".join((
met._constraint_sense for met in model.metabolites))
stoich_mat = create_stoichiometric_matrix(model)
mat["S"] = stoich_mat if stoich_mat is not None else [[]]
# multiply by 1 to convert to float, working around scipy bug
# https://github.com/scipy/scipy/issues/4537
mat["lb"] = array(rxns.list_attr("lower_bound")) * 1.
mat["ub"] = array(rxns.list_attr("upper_bound")) * 1.
mat["b"] = array(mets.list_attr("_bound")) * 1.
mat["c"] = array(rxns.list_attr("objective_coefficient")) * 1.
mat["rev"] = array(rxns.list_attr("reversibility")) * 1
mat["description"] = str(model.id)
return mat
def from_mat_struct(mat_struct, model_id=None, inf=inf):
"""create a model from the COBRA toolbox struct
The struct will be a dict read in by scipy.io.loadmat
"""
m = mat_struct
if m.dtype.names is None:
raise ValueError("not a valid mat struct")
if not {"rxns", "mets", "S", "lb", "ub"} <= set(m.dtype.names):
raise ValueError("not a valid mat struct")
if "c" in m.dtype.names:
c_vec = m["c"][0, 0]
else:
c_vec = None
warn("objective vector 'c' not found")
model = Model()
if model_id is not None:
model.id = model_id
elif "description" in m.dtype.names:
description = m["description"][0, 0][0]
if not isinstance(description, string_types) and len(description) > 1:
model.id = description[0]
warn("Several IDs detected, only using the first.")
else:
model.id = description
else:
model.id = "imported_model"
for i, name in enumerate(m["mets"][0, 0]):
new_metabolite = Metabolite()
new_metabolite.id = str(name[0][0])
if all(var in m.dtype.names for var in
['metComps', 'comps', 'compNames']):
comp_index = m["metComps"][0, 0][i][0] - 1
new_metabolite.compartment = m['comps'][0, 0][comp_index][0][0]
if new_metabolite.compartment not in model.compartments:
comp_name = m['compNames'][0, 0][comp_index][0][0]
model.compartments[new_metabolite.compartment] = comp_name
else:
new_metabolite.compartment = _get_id_compartment(new_metabolite.id)
if new_metabolite.compartment not in model.compartments:
model.compartments[
new_metabolite.compartment] = new_metabolite.compartment
try:
new_metabolite.name = str(m["metNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.formula = str(m["metFormulas"][0][0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.charge = float(m["metCharge"][0, 0][i][0])
int_charge = int(new_metabolite.charge)
if new_metabolite.charge == int_charge:
new_metabolite.charge = int_charge
except (IndexError, ValueError):
pass
model.add_metabolites([new_metabolite])
new_reactions = []
coefficients = {}
for i, name in enumerate(m["rxns"][0, 0]):
new_reaction = Reaction()
new_reaction.id = str(name[0][0])
new_reaction.lower_bound = float(m["lb"][0, 0][i][0])
new_reaction.upper_bound = float(m["ub"][0, 0][i][0])
if isinf(new_reaction.lower_bound) and new_reaction.lower_bound < 0:
new_reaction.lower_bound = -inf
if isinf(new_reaction.upper_bound) and new_reaction.upper_bound > 0:
new_reaction.upper_bound = inf
if c_vec is not None:
coefficients[new_reaction] = float(c_vec[i][0])
try:
new_reaction.gene_reaction_rule = str(m['grRules'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.name = str(m["rxnNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.subsystem = str(m['subSystems'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
new_reactions.append(new_reaction)
model.add_reactions(new_reactions)
set_objective(model, coefficients)
coo = scipy_sparse.coo_matrix(m["S"][0, 0])
for i, j, v in zip(coo.row, coo.col, coo.data):
model.reactions[j].add_metabolites({model.metabolites[i]: v})
return model
def _check(result):
"""ensure success of a pymatbridge operation"""
if result["success"] is not True:
raise RuntimeError(result["content"]["stdout"])
def model_to_pymatbridge(model, variable_name="model", matlab=None):
"""send the model to a MATLAB workspace through pymatbridge
This model can then be manipulated through the COBRA toolbox
Parameters
----------
variable_name : str
The variable name to which the model will be assigned in the
MATLAB workspace
matlab : None or pymatbridge.Matlab instance
The MATLAB workspace to which the variable will be sent. If
this is None, then this will be sent to the same environment
used in IPython magics.
"""
if scipy_sparse is None:
raise ImportError("`model_to_pymatbridge` requires scipy!")
if matlab is None: # assumed to be running an IPython magic
from IPython import get_ipython
matlab = get_ipython().magics_manager.registry["MatlabMagics"].Matlab
model_info = create_mat_dict(model)
S = model_info["S"].todok()
model_info["S"] = 0
temp_S_name = "cobra_pymatbridge_temp_" + uuid4().hex
_check(matlab.set_variable(variable_name, model_info))
_check(matlab.set_variable(temp_S_name, S))
_check(matlab.run_code("%s.S = %s;" % (variable_name, temp_S_name)))
# all vectors need to be transposed
for i in model_info.keys():
if i == "S":
continue
_check(matlab.run_code("{0}.{1} = {0}.{1}';".format(variable_name, i)))
_check(matlab.run_code("clear %s;" % temp_S_name))
| variable_name = possible_names[0] | conditional_block |
mat.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
from collections import OrderedDict
from uuid import uuid4
from warnings import warn
from numpy import object as np_object
from numpy import array, inf, isinf
from six import string_types
from cobra.core import Metabolite, Model, Reaction
from cobra.util import create_stoichiometric_matrix
from cobra.util.solver import set_objective
try:
import scipy.sparse as scipy_sparse
import scipy.io as scipy_io
except ImportError:
scipy_sparse = None
scipy_io = None
# precompiled regular expressions
_bracket_re = re.compile("r\[[a-z]\]$")
_underscore_re = re.compile(r"_[a-z]$")
def _get_id_compartment(id):
"""extract the compartment from the id string"""
bracket_search = _bracket_re.findall(id)
if len(bracket_search) == 1:
return bracket_search[0][1]
underscore_search = _underscore_re.findall(id)
if len(underscore_search) == 1:
return underscore_search[0][1]
return None
def _cell(x):
"""translate an array x into a MATLAB cell array"""
x_no_none = [i if i is not None else "" for i in x]
return array(x_no_none, dtype=np_object)
def load_matlab_model(infile_path, variable_name=None, inf=inf):
"""Load a cobra model stored as a .mat file
Parameters
----------
infile_path: str
path to the file to to read
variable_name: str, optional
The variable name of the model in the .mat file. If this is not
specified, then the first MATLAB variable which looks like a COBRA
model will be used
inf: value
The value to use for infinite bounds. Some solvers do not handle
infinite values so for using those, set this to a high numeric value.
Returns
-------
cobra.core.Model.Model:
The resulting cobra model
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
data = scipy_io.loadmat(infile_path)
possible_names = []
if variable_name is None:
# skip meta variables
meta_vars = {"__globals__", "__header__", "__version__"}
possible_names = sorted(i for i in data if i not in meta_vars)
if len(possible_names) == 1:
variable_name = possible_names[0]
if variable_name is not None:
return from_mat_struct(data[variable_name], model_id=variable_name,
inf=inf)
for possible_name in possible_names:
try:
return from_mat_struct(data[possible_name], model_id=possible_name,
inf=inf)
except ValueError:
pass
# If code here is executed, then no model was found.
raise IOError("no COBRA model found")
def save_matlab_model(model, file_name, varname=None):
"""Save the cobra model as a .mat file.
This .mat file can be used directly in the MATLAB version of COBRA.
Parameters
----------
model : cobra.core.Model.Model object
The model to save
file_name : str or file-like object
The file to save to
varname : string
The name of the variable within the workspace
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
if varname is None:
varname = str(model.id) \
if model.id is not None and len(model.id) > 0 \
else "exported_model"
mat = create_mat_dict(model)
scipy_io.savemat(file_name, {varname: mat},
appendmat=True, oned_as="column")
def create_mat_metabolite_id(model):
for met in model.metabolites:
if not _get_id_compartment(met.id) and met.compartment:
yield '{}[{}]'.format(met.id,
model.compartments[met.compartment].lower())
else:
yield met.id
def create_mat_dict(model):
"""create a dict mapping model attributes to arrays"""
rxns = model.reactions
mets = model.metabolites
mat = OrderedDict()
mat["mets"] = _cell([met_id for met_id in create_mat_metabolite_id(model)])
mat["metNames"] = _cell(mets.list_attr("name"))
mat["metFormulas"] = _cell([str(m.formula) for m in mets])
try:
mat["metCharge"] = array(mets.list_attr("charge")) * 1.
except TypeError:
# can't have any None entries for charge, or this will fail
pass
mat["genes"] = _cell(model.genes.list_attr("id"))
# make a matrix for rxnGeneMat
# reactions are rows, genes are columns
rxn_gene = scipy_sparse.dok_matrix((len(model.reactions),
len(model.genes)))
if min(rxn_gene.shape) > 0:
for i, reaction in enumerate(model.reactions):
for gene in reaction.genes:
rxn_gene[i, model.genes.index(gene)] = 1
mat["rxnGeneMat"] = rxn_gene
mat["grRules"] = _cell(rxns.list_attr("gene_reaction_rule"))
mat["rxns"] = _cell(rxns.list_attr("id"))
mat["rxnNames"] = _cell(rxns.list_attr("name"))
mat["subSystems"] = _cell(rxns.list_attr("subsystem"))
mat["csense"] = "".join((
met._constraint_sense for met in model.metabolites))
stoich_mat = create_stoichiometric_matrix(model)
mat["S"] = stoich_mat if stoich_mat is not None else [[]]
# multiply by 1 to convert to float, working around scipy bug
# https://github.com/scipy/scipy/issues/4537
mat["lb"] = array(rxns.list_attr("lower_bound")) * 1.
mat["ub"] = array(rxns.list_attr("upper_bound")) * 1.
mat["b"] = array(mets.list_attr("_bound")) * 1.
mat["c"] = array(rxns.list_attr("objective_coefficient")) * 1.
mat["rev"] = array(rxns.list_attr("reversibility")) * 1
mat["description"] = str(model.id)
return mat
def from_mat_struct(mat_struct, model_id=None, inf=inf):
"""create a model from the COBRA toolbox struct
The struct will be a dict read in by scipy.io.loadmat
"""
m = mat_struct
if m.dtype.names is None:
raise ValueError("not a valid mat struct")
if not {"rxns", "mets", "S", "lb", "ub"} <= set(m.dtype.names):
raise ValueError("not a valid mat struct")
if "c" in m.dtype.names:
c_vec = m["c"][0, 0]
else:
c_vec = None
warn("objective vector 'c' not found")
model = Model()
if model_id is not None:
model.id = model_id
elif "description" in m.dtype.names:
description = m["description"][0, 0][0]
if not isinstance(description, string_types) and len(description) > 1:
model.id = description[0]
warn("Several IDs detected, only using the first.")
else:
model.id = description
else:
model.id = "imported_model"
for i, name in enumerate(m["mets"][0, 0]):
new_metabolite = Metabolite()
new_metabolite.id = str(name[0][0])
if all(var in m.dtype.names for var in
['metComps', 'comps', 'compNames']):
comp_index = m["metComps"][0, 0][i][0] - 1
new_metabolite.compartment = m['comps'][0, 0][comp_index][0][0]
if new_metabolite.compartment not in model.compartments:
comp_name = m['compNames'][0, 0][comp_index][0][0]
model.compartments[new_metabolite.compartment] = comp_name
else:
new_metabolite.compartment = _get_id_compartment(new_metabolite.id)
if new_metabolite.compartment not in model.compartments:
model.compartments[
new_metabolite.compartment] = new_metabolite.compartment
try:
new_metabolite.name = str(m["metNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.formula = str(m["metFormulas"][0][0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.charge = float(m["metCharge"][0, 0][i][0])
int_charge = int(new_metabolite.charge)
if new_metabolite.charge == int_charge:
new_metabolite.charge = int_charge
except (IndexError, ValueError):
pass
model.add_metabolites([new_metabolite])
new_reactions = []
coefficients = {}
for i, name in enumerate(m["rxns"][0, 0]):
new_reaction = Reaction()
new_reaction.id = str(name[0][0])
new_reaction.lower_bound = float(m["lb"][0, 0][i][0])
new_reaction.upper_bound = float(m["ub"][0, 0][i][0])
if isinf(new_reaction.lower_bound) and new_reaction.lower_bound < 0:
new_reaction.lower_bound = -inf
if isinf(new_reaction.upper_bound) and new_reaction.upper_bound > 0:
new_reaction.upper_bound = inf
if c_vec is not None:
coefficients[new_reaction] = float(c_vec[i][0])
try:
new_reaction.gene_reaction_rule = str(m['grRules'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.name = str(m["rxnNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.subsystem = str(m['subSystems'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
new_reactions.append(new_reaction)
model.add_reactions(new_reactions)
set_objective(model, coefficients)
coo = scipy_sparse.coo_matrix(m["S"][0, 0])
for i, j, v in zip(coo.row, coo.col, coo.data):
model.reactions[j].add_metabolites({model.metabolites[i]: v})
return model
def | (result):
"""ensure success of a pymatbridge operation"""
if result["success"] is not True:
raise RuntimeError(result["content"]["stdout"])
def model_to_pymatbridge(model, variable_name="model", matlab=None):
"""send the model to a MATLAB workspace through pymatbridge
This model can then be manipulated through the COBRA toolbox
Parameters
----------
variable_name : str
The variable name to which the model will be assigned in the
MATLAB workspace
matlab : None or pymatbridge.Matlab instance
The MATLAB workspace to which the variable will be sent. If
this is None, then this will be sent to the same environment
used in IPython magics.
"""
if scipy_sparse is None:
raise ImportError("`model_to_pymatbridge` requires scipy!")
if matlab is None: # assumed to be running an IPython magic
from IPython import get_ipython
matlab = get_ipython().magics_manager.registry["MatlabMagics"].Matlab
model_info = create_mat_dict(model)
S = model_info["S"].todok()
model_info["S"] = 0
temp_S_name = "cobra_pymatbridge_temp_" + uuid4().hex
_check(matlab.set_variable(variable_name, model_info))
_check(matlab.set_variable(temp_S_name, S))
_check(matlab.run_code("%s.S = %s;" % (variable_name, temp_S_name)))
# all vectors need to be transposed
for i in model_info.keys():
if i == "S":
continue
_check(matlab.run_code("{0}.{1} = {0}.{1}';".format(variable_name, i)))
_check(matlab.run_code("clear %s;" % temp_S_name))
| _check | identifier_name |
thesubdb.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import babelfish
import requests
from . import Provider
from .. import __version__
from ..exceptions import InvalidSubtitle, ProviderNotAvailable, ProviderError
from ..subtitle import Subtitle, is_valid_subtitle, detect
logger = logging.getLogger(__name__)
class TheSubDBSubtitle(Subtitle):
provider_name = 'thesubdb'
def __init__(self, language, hash): # @ReservedAssignment
super(TheSubDBSubtitle, self).__init__(language)
self.hash = hash
def compute_matches(self, video):
matches = set()
# hash
if 'thesubdb' in video.hashes and video.hashes['thesubdb'] == self.hash:
matches.add('hash')
return matches
class | (Provider):
languages = set([babelfish.Language.fromalpha2(l) for l in ['en', 'es', 'fr', 'it', 'nl', 'pl', 'pt', 'ro', 'sv', 'tr']])
required_hash = 'thesubdb'
def initialize(self):
self.session = requests.Session()
self.session.headers = {'User-Agent': 'SubDB/1.0 (subliminal/%s; https://github.com/Diaoul/subliminal)' %
__version__}
def terminate(self):
self.session.close()
def get(self, params):
"""Make a GET request on the server with the given parameters
:param params: params of the request
:return: the response
:rtype: :class:`requests.Response`
:raise: :class:`~subliminal.exceptions.ProviderNotAvailable`
"""
try:
r = self.session.get('http://api.thesubdb.com', params=params, timeout=10)
except requests.Timeout:
raise ProviderNotAvailable('Timeout after 10 seconds')
return r
def query(self, hash): # @ReservedAssignment
params = {'action': 'search', 'hash': hash}
logger.debug('Searching subtitles %r', params)
r = self.get(params)
if r.status_code == 404:
logger.debug('No subtitle found')
return []
elif r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
return [TheSubDBSubtitle(language, hash) for language in
set([babelfish.Language.fromalpha2(l) for l in r.content.split(',')])]
def list_subtitles(self, video, languages):
return [s for s in self.query(video.hashes['thesubdb']) if s.language in languages]
def download_subtitle(self, subtitle):
params = {'action': 'download', 'hash': subtitle.hash, 'language': subtitle.language.alpha2}
r = self.get(params)
if r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
logger.debug('Download URL: %s {hash=%s, lang=%s}' % (
'http://api.thesubdb.com', subtitle.hash, subtitle.language.alpha2,
))
subtitle_text = r.content.decode(
detect(r.content, subtitle.language.alpha2)['encoding'], 'replace')
if not is_valid_subtitle(subtitle_text):
raise InvalidSubtitle
return subtitle_text
| TheSubDBProvider | identifier_name |
thesubdb.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import babelfish
import requests
from . import Provider
from .. import __version__
from ..exceptions import InvalidSubtitle, ProviderNotAvailable, ProviderError
from ..subtitle import Subtitle, is_valid_subtitle, detect
logger = logging.getLogger(__name__)
class TheSubDBSubtitle(Subtitle):
provider_name = 'thesubdb'
def __init__(self, language, hash): # @ReservedAssignment
super(TheSubDBSubtitle, self).__init__(language)
self.hash = hash
def compute_matches(self, video):
matches = set()
# hash
if 'thesubdb' in video.hashes and video.hashes['thesubdb'] == self.hash:
matches.add('hash')
return matches
class TheSubDBProvider(Provider):
languages = set([babelfish.Language.fromalpha2(l) for l in ['en', 'es', 'fr', 'it', 'nl', 'pl', 'pt', 'ro', 'sv', 'tr']])
required_hash = 'thesubdb'
def initialize(self):
self.session = requests.Session()
self.session.headers = {'User-Agent': 'SubDB/1.0 (subliminal/%s; https://github.com/Diaoul/subliminal)' %
__version__}
def terminate(self):
self.session.close()
def get(self, params):
"""Make a GET request on the server with the given parameters
:param params: params of the request
:return: the response
:rtype: :class:`requests.Response`
:raise: :class:`~subliminal.exceptions.ProviderNotAvailable`
"""
try:
r = self.session.get('http://api.thesubdb.com', params=params, timeout=10)
except requests.Timeout:
raise ProviderNotAvailable('Timeout after 10 seconds')
return r
def query(self, hash): # @ReservedAssignment
params = {'action': 'search', 'hash': hash}
logger.debug('Searching subtitles %r', params)
r = self.get(params)
if r.status_code == 404:
|
elif r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
return [TheSubDBSubtitle(language, hash) for language in
set([babelfish.Language.fromalpha2(l) for l in r.content.split(',')])]
def list_subtitles(self, video, languages):
return [s for s in self.query(video.hashes['thesubdb']) if s.language in languages]
def download_subtitle(self, subtitle):
params = {'action': 'download', 'hash': subtitle.hash, 'language': subtitle.language.alpha2}
r = self.get(params)
if r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
logger.debug('Download URL: %s {hash=%s, lang=%s}' % (
'http://api.thesubdb.com', subtitle.hash, subtitle.language.alpha2,
))
subtitle_text = r.content.decode(
detect(r.content, subtitle.language.alpha2)['encoding'], 'replace')
if not is_valid_subtitle(subtitle_text):
raise InvalidSubtitle
return subtitle_text
| logger.debug('No subtitle found')
return [] | conditional_block |
thesubdb.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import babelfish
import requests
from . import Provider
from .. import __version__
from ..exceptions import InvalidSubtitle, ProviderNotAvailable, ProviderError
from ..subtitle import Subtitle, is_valid_subtitle, detect
logger = logging.getLogger(__name__)
class TheSubDBSubtitle(Subtitle):
provider_name = 'thesubdb'
def __init__(self, language, hash): # @ReservedAssignment
super(TheSubDBSubtitle, self).__init__(language)
self.hash = hash
def compute_matches(self, video):
matches = set()
# hash
if 'thesubdb' in video.hashes and video.hashes['thesubdb'] == self.hash:
matches.add('hash')
return matches
class TheSubDBProvider(Provider):
languages = set([babelfish.Language.fromalpha2(l) for l in ['en', 'es', 'fr', 'it', 'nl', 'pl', 'pt', 'ro', 'sv', 'tr']])
required_hash = 'thesubdb'
def initialize(self):
self.session = requests.Session()
self.session.headers = {'User-Agent': 'SubDB/1.0 (subliminal/%s; https://github.com/Diaoul/subliminal)' %
__version__}
def terminate(self):
|
def get(self, params):
"""Make a GET request on the server with the given parameters
:param params: params of the request
:return: the response
:rtype: :class:`requests.Response`
:raise: :class:`~subliminal.exceptions.ProviderNotAvailable`
"""
try:
r = self.session.get('http://api.thesubdb.com', params=params, timeout=10)
except requests.Timeout:
raise ProviderNotAvailable('Timeout after 10 seconds')
return r
def query(self, hash): # @ReservedAssignment
params = {'action': 'search', 'hash': hash}
logger.debug('Searching subtitles %r', params)
r = self.get(params)
if r.status_code == 404:
logger.debug('No subtitle found')
return []
elif r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
return [TheSubDBSubtitle(language, hash) for language in
set([babelfish.Language.fromalpha2(l) for l in r.content.split(',')])]
def list_subtitles(self, video, languages):
return [s for s in self.query(video.hashes['thesubdb']) if s.language in languages]
def download_subtitle(self, subtitle):
params = {'action': 'download', 'hash': subtitle.hash, 'language': subtitle.language.alpha2}
r = self.get(params)
if r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
logger.debug('Download URL: %s {hash=%s, lang=%s}' % (
'http://api.thesubdb.com', subtitle.hash, subtitle.language.alpha2,
))
subtitle_text = r.content.decode(
detect(r.content, subtitle.language.alpha2)['encoding'], 'replace')
if not is_valid_subtitle(subtitle_text):
raise InvalidSubtitle
return subtitle_text
| self.session.close() | identifier_body |
thesubdb.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import babelfish
import requests
from . import Provider
from .. import __version__
from ..exceptions import InvalidSubtitle, ProviderNotAvailable, ProviderError
from ..subtitle import Subtitle, is_valid_subtitle, detect
logger = logging.getLogger(__name__)
class TheSubDBSubtitle(Subtitle):
provider_name = 'thesubdb'
def __init__(self, language, hash): # @ReservedAssignment
super(TheSubDBSubtitle, self).__init__(language)
self.hash = hash
def compute_matches(self, video):
matches = set()
# hash
if 'thesubdb' in video.hashes and video.hashes['thesubdb'] == self.hash:
matches.add('hash')
return matches
class TheSubDBProvider(Provider):
languages = set([babelfish.Language.fromalpha2(l) for l in ['en', 'es', 'fr', 'it', 'nl', 'pl', 'pt', 'ro', 'sv', 'tr']])
required_hash = 'thesubdb'
def initialize(self):
self.session = requests.Session()
self.session.headers = {'User-Agent': 'SubDB/1.0 (subliminal/%s; https://github.com/Diaoul/subliminal)' %
__version__}
def terminate(self):
self.session.close()
def get(self, params):
"""Make a GET request on the server with the given parameters
:param params: params of the request
:return: the response
:rtype: :class:`requests.Response`
:raise: :class:`~subliminal.exceptions.ProviderNotAvailable`
"""
try:
r = self.session.get('http://api.thesubdb.com', params=params, timeout=10)
except requests.Timeout:
raise ProviderNotAvailable('Timeout after 10 seconds')
return r
def query(self, hash): # @ReservedAssignment
params = {'action': 'search', 'hash': hash}
logger.debug('Searching subtitles %r', params)
r = self.get(params)
if r.status_code == 404:
logger.debug('No subtitle found')
return []
elif r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
return [TheSubDBSubtitle(language, hash) for language in
set([babelfish.Language.fromalpha2(l) for l in r.content.split(',')])]
def list_subtitles(self, video, languages):
return [s for s in self.query(video.hashes['thesubdb']) if s.language in languages]
def download_subtitle(self, subtitle):
params = {'action': 'download', 'hash': subtitle.hash, 'language': subtitle.language.alpha2}
r = self.get(params)
if r.status_code != 200:
raise ProviderError('Request failed with status code %d' % r.status_code)
logger.debug('Download URL: %s {hash=%s, lang=%s}' % (
'http://api.thesubdb.com', subtitle.hash, subtitle.language.alpha2, | raise InvalidSubtitle
return subtitle_text | ))
subtitle_text = r.content.decode(
detect(r.content, subtitle.language.alpha2)['encoding'], 'replace')
if not is_valid_subtitle(subtitle_text): | random_line_split |
build.rs | use std::{
env,
fs::File,
io::{BufRead, BufReader, Write},
path::Path,
};
use quote::ToTokens;
use syn::{parse_quote, visit::Visit, visit_mut::VisitMut};
struct FilterSwigAttrs;
impl VisitMut for FilterSwigAttrs {
fn visit_attribute_mut(&mut self, i: &mut syn::Attribute) {
if i.path
.clone()
.into_token_stream()
.to_string()
.starts_with("swig_")
{
*i = parse_quote! { #[doc = "swig_ replace"] };
}
}
}
mod file_cache {
include!("src/file_cache.rs");
}
mod jni_find_cache {
include!("src/java_jni/find_cache.rs");
}
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
for include_path in &[
Path::new("src/java_jni/jni-include.rs"),
Path::new("src/cpp/cpp-include.rs"),
] {
let src_cnt_tail = std::fs::read_to_string(include_path)
.unwrap_or_else(|err| panic!("Error during read {}: {}", include_path.display(), err));
let mut src_cnt = r#"
macro_rules! foreign_typemap {
($($tree:tt)*) => {};
}
"#
.to_string();
src_cnt.push_str(&src_cnt_tail);
let mut file = syn::parse_file(&src_cnt)
.unwrap_or_else(|err| panic!("Error during parse {}: {}", include_path.display(), err));
let mut filter_swig_attrs = FilterSwigAttrs;
filter_swig_attrs.visit_file_mut(&mut file);
let mut jni_cache_macro_cache = jni_find_cache::JniCacheMacroCalls::default();
let mut visitor = jni_find_cache::JniCacheMacroCallsVisitor {
inner: &mut jni_cache_macro_cache,
errors: vec![],
};
visitor.visit_file(&file);
if !visitor.errors.is_empty() |
let mut jni_global_vars = jni_cache_macro_cache.global_vars();
file.items.append(&mut jni_global_vars);
let out_path = Path::new(&out_dir).join(include_path.file_name().expect("No file name"));
let mut cache =
file_cache::FileWriteCache::new(&out_path, &mut file_cache::NoNeedFsOpsRegistration);
let write_err_msg = format!("Error during write to file {}", out_path.display());
write!(&mut cache, "{}", file.into_token_stream().to_string()).expect(&write_err_msg);
cache.update_file_if_necessary().expect(&write_err_msg);
println!("cargo:rerun-if-changed={}", include_path.display());
}
println!("cargo:rerun-if-changed=tests/test_includes_syntax.rs");
let exp_tests_list_path = Path::new("tests").join("expectations").join("tests.list");
let expectation_tests = File::open(&exp_tests_list_path)
.unwrap_or_else(|err| panic!("Can not open {}: {}", exp_tests_list_path.display(), err));
let expectation_tests = BufReader::new(&expectation_tests);
let exp_code_path = Path::new(&out_dir).join("test_expectations.rs");
let mut exp_code =
file_cache::FileWriteCache::new(&exp_code_path, &mut file_cache::NoNeedFsOpsRegistration);
for name in expectation_tests.lines() {
let name = name.unwrap_or_else(|err| {
panic!("Can not read {}: {}", exp_tests_list_path.display(), err)
});
write!(
&mut exp_code,
r##"
#[test]
fn test_expectation_{test_name}() {{
let _ = env_logger::try_init();
let test_case = Path::new("tests").join("expectations").join("{test_name}.rs");
let base_name = test_case.file_stem().expect("name without extenstion");
let test_name = base_name.to_string_lossy();
let mut test_something = false;
for lang in &[ForeignLang::Cpp, ForeignLang::Java] {{
if check_expectation(&test_name, &test_case, *lang) {{
test_something = true;
}}
}}
assert!(test_something, "empty test");
}}
"##,
test_name = name,
)
.unwrap();
}
exp_code
.update_file_if_necessary()
.unwrap_or_else(|err| panic!("Can not write to {}: {}", exp_code_path.display(), err));
println!("cargo:rerun-if-changed={}", exp_tests_list_path.display());
}
| {
panic!("jni cache macros visiting failed: {}", visitor.errors[0]);
} | conditional_block |
build.rs | use std::{
env,
fs::File,
io::{BufRead, BufReader, Write},
path::Path,
};
use quote::ToTokens;
use syn::{parse_quote, visit::Visit, visit_mut::VisitMut};
struct FilterSwigAttrs;
impl VisitMut for FilterSwigAttrs {
fn visit_attribute_mut(&mut self, i: &mut syn::Attribute) {
if i.path
.clone()
.into_token_stream()
.to_string()
.starts_with("swig_")
{
*i = parse_quote! { #[doc = "swig_ replace"] };
}
}
}
mod file_cache {
include!("src/file_cache.rs");
}
mod jni_find_cache {
include!("src/java_jni/find_cache.rs");
}
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
for include_path in &[
Path::new("src/java_jni/jni-include.rs"),
Path::new("src/cpp/cpp-include.rs"),
] {
let src_cnt_tail = std::fs::read_to_string(include_path)
.unwrap_or_else(|err| panic!("Error during read {}: {}", include_path.display(), err));
let mut src_cnt = r#"
macro_rules! foreign_typemap {
($($tree:tt)*) => {};
}
"#
.to_string();
src_cnt.push_str(&src_cnt_tail);
let mut file = syn::parse_file(&src_cnt)
.unwrap_or_else(|err| panic!("Error during parse {}: {}", include_path.display(), err));
let mut filter_swig_attrs = FilterSwigAttrs;
filter_swig_attrs.visit_file_mut(&mut file);
let mut jni_cache_macro_cache = jni_find_cache::JniCacheMacroCalls::default();
let mut visitor = jni_find_cache::JniCacheMacroCallsVisitor {
inner: &mut jni_cache_macro_cache,
errors: vec![],
};
visitor.visit_file(&file);
if !visitor.errors.is_empty() {
panic!("jni cache macros visiting failed: {}", visitor.errors[0]);
}
let mut jni_global_vars = jni_cache_macro_cache.global_vars();
file.items.append(&mut jni_global_vars);
let out_path = Path::new(&out_dir).join(include_path.file_name().expect("No file name"));
let mut cache =
file_cache::FileWriteCache::new(&out_path, &mut file_cache::NoNeedFsOpsRegistration);
let write_err_msg = format!("Error during write to file {}", out_path.display());
write!(&mut cache, "{}", file.into_token_stream().to_string()).expect(&write_err_msg);
cache.update_file_if_necessary().expect(&write_err_msg);
println!("cargo:rerun-if-changed={}", include_path.display());
}
println!("cargo:rerun-if-changed=tests/test_includes_syntax.rs");
let exp_tests_list_path = Path::new("tests").join("expectations").join("tests.list");
let expectation_tests = File::open(&exp_tests_list_path)
.unwrap_or_else(|err| panic!("Can not open {}: {}", exp_tests_list_path.display(), err));
let expectation_tests = BufReader::new(&expectation_tests);
let exp_code_path = Path::new(&out_dir).join("test_expectations.rs");
let mut exp_code =
file_cache::FileWriteCache::new(&exp_code_path, &mut file_cache::NoNeedFsOpsRegistration);
for name in expectation_tests.lines() {
let name = name.unwrap_or_else(|err| {
panic!("Can not read {}: {}", exp_tests_list_path.display(), err)
});
write!(
&mut exp_code,
r##"
#[test]
fn test_expectation_{test_name}() {{
let _ = env_logger::try_init();
let test_case = Path::new("tests").join("expectations").join("{test_name}.rs");
let base_name = test_case.file_stem().expect("name without extenstion");
let test_name = base_name.to_string_lossy();
let mut test_something = false;
for lang in &[ForeignLang::Cpp, ForeignLang::Java] {{
if check_expectation(&test_name, &test_case, *lang) {{
test_something = true;
}}
}}
assert!(test_something, "empty test");
}}
"##,
test_name = name,
) | .unwrap();
}
exp_code
.update_file_if_necessary()
.unwrap_or_else(|err| panic!("Can not write to {}: {}", exp_code_path.display(), err));
println!("cargo:rerun-if-changed={}", exp_tests_list_path.display());
} | random_line_split |
|
build.rs | use std::{
env,
fs::File,
io::{BufRead, BufReader, Write},
path::Path,
};
use quote::ToTokens;
use syn::{parse_quote, visit::Visit, visit_mut::VisitMut};
struct FilterSwigAttrs;
impl VisitMut for FilterSwigAttrs {
fn visit_attribute_mut(&mut self, i: &mut syn::Attribute) {
if i.path
.clone()
.into_token_stream()
.to_string()
.starts_with("swig_")
{
*i = parse_quote! { #[doc = "swig_ replace"] };
}
}
}
mod file_cache {
include!("src/file_cache.rs");
}
mod jni_find_cache {
include!("src/java_jni/find_cache.rs");
}
fn | () {
let out_dir = env::var("OUT_DIR").unwrap();
for include_path in &[
Path::new("src/java_jni/jni-include.rs"),
Path::new("src/cpp/cpp-include.rs"),
] {
let src_cnt_tail = std::fs::read_to_string(include_path)
.unwrap_or_else(|err| panic!("Error during read {}: {}", include_path.display(), err));
let mut src_cnt = r#"
macro_rules! foreign_typemap {
($($tree:tt)*) => {};
}
"#
.to_string();
src_cnt.push_str(&src_cnt_tail);
let mut file = syn::parse_file(&src_cnt)
.unwrap_or_else(|err| panic!("Error during parse {}: {}", include_path.display(), err));
let mut filter_swig_attrs = FilterSwigAttrs;
filter_swig_attrs.visit_file_mut(&mut file);
let mut jni_cache_macro_cache = jni_find_cache::JniCacheMacroCalls::default();
let mut visitor = jni_find_cache::JniCacheMacroCallsVisitor {
inner: &mut jni_cache_macro_cache,
errors: vec![],
};
visitor.visit_file(&file);
if !visitor.errors.is_empty() {
panic!("jni cache macros visiting failed: {}", visitor.errors[0]);
}
let mut jni_global_vars = jni_cache_macro_cache.global_vars();
file.items.append(&mut jni_global_vars);
let out_path = Path::new(&out_dir).join(include_path.file_name().expect("No file name"));
let mut cache =
file_cache::FileWriteCache::new(&out_path, &mut file_cache::NoNeedFsOpsRegistration);
let write_err_msg = format!("Error during write to file {}", out_path.display());
write!(&mut cache, "{}", file.into_token_stream().to_string()).expect(&write_err_msg);
cache.update_file_if_necessary().expect(&write_err_msg);
println!("cargo:rerun-if-changed={}", include_path.display());
}
println!("cargo:rerun-if-changed=tests/test_includes_syntax.rs");
let exp_tests_list_path = Path::new("tests").join("expectations").join("tests.list");
let expectation_tests = File::open(&exp_tests_list_path)
.unwrap_or_else(|err| panic!("Can not open {}: {}", exp_tests_list_path.display(), err));
let expectation_tests = BufReader::new(&expectation_tests);
let exp_code_path = Path::new(&out_dir).join("test_expectations.rs");
let mut exp_code =
file_cache::FileWriteCache::new(&exp_code_path, &mut file_cache::NoNeedFsOpsRegistration);
for name in expectation_tests.lines() {
let name = name.unwrap_or_else(|err| {
panic!("Can not read {}: {}", exp_tests_list_path.display(), err)
});
write!(
&mut exp_code,
r##"
#[test]
fn test_expectation_{test_name}() {{
let _ = env_logger::try_init();
let test_case = Path::new("tests").join("expectations").join("{test_name}.rs");
let base_name = test_case.file_stem().expect("name without extenstion");
let test_name = base_name.to_string_lossy();
let mut test_something = false;
for lang in &[ForeignLang::Cpp, ForeignLang::Java] {{
if check_expectation(&test_name, &test_case, *lang) {{
test_something = true;
}}
}}
assert!(test_something, "empty test");
}}
"##,
test_name = name,
)
.unwrap();
}
exp_code
.update_file_if_necessary()
.unwrap_or_else(|err| panic!("Can not write to {}: {}", exp_code_path.display(), err));
println!("cargo:rerun-if-changed={}", exp_tests_list_path.display());
}
| main | identifier_name |
build.rs | use std::{
env,
fs::File,
io::{BufRead, BufReader, Write},
path::Path,
};
use quote::ToTokens;
use syn::{parse_quote, visit::Visit, visit_mut::VisitMut};
struct FilterSwigAttrs;
impl VisitMut for FilterSwigAttrs {
fn visit_attribute_mut(&mut self, i: &mut syn::Attribute) |
}
mod file_cache {
include!("src/file_cache.rs");
}
mod jni_find_cache {
include!("src/java_jni/find_cache.rs");
}
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
for include_path in &[
Path::new("src/java_jni/jni-include.rs"),
Path::new("src/cpp/cpp-include.rs"),
] {
let src_cnt_tail = std::fs::read_to_string(include_path)
.unwrap_or_else(|err| panic!("Error during read {}: {}", include_path.display(), err));
let mut src_cnt = r#"
macro_rules! foreign_typemap {
($($tree:tt)*) => {};
}
"#
.to_string();
src_cnt.push_str(&src_cnt_tail);
let mut file = syn::parse_file(&src_cnt)
.unwrap_or_else(|err| panic!("Error during parse {}: {}", include_path.display(), err));
let mut filter_swig_attrs = FilterSwigAttrs;
filter_swig_attrs.visit_file_mut(&mut file);
let mut jni_cache_macro_cache = jni_find_cache::JniCacheMacroCalls::default();
let mut visitor = jni_find_cache::JniCacheMacroCallsVisitor {
inner: &mut jni_cache_macro_cache,
errors: vec![],
};
visitor.visit_file(&file);
if !visitor.errors.is_empty() {
panic!("jni cache macros visiting failed: {}", visitor.errors[0]);
}
let mut jni_global_vars = jni_cache_macro_cache.global_vars();
file.items.append(&mut jni_global_vars);
let out_path = Path::new(&out_dir).join(include_path.file_name().expect("No file name"));
let mut cache =
file_cache::FileWriteCache::new(&out_path, &mut file_cache::NoNeedFsOpsRegistration);
let write_err_msg = format!("Error during write to file {}", out_path.display());
write!(&mut cache, "{}", file.into_token_stream().to_string()).expect(&write_err_msg);
cache.update_file_if_necessary().expect(&write_err_msg);
println!("cargo:rerun-if-changed={}", include_path.display());
}
println!("cargo:rerun-if-changed=tests/test_includes_syntax.rs");
let exp_tests_list_path = Path::new("tests").join("expectations").join("tests.list");
let expectation_tests = File::open(&exp_tests_list_path)
.unwrap_or_else(|err| panic!("Can not open {}: {}", exp_tests_list_path.display(), err));
let expectation_tests = BufReader::new(&expectation_tests);
let exp_code_path = Path::new(&out_dir).join("test_expectations.rs");
let mut exp_code =
file_cache::FileWriteCache::new(&exp_code_path, &mut file_cache::NoNeedFsOpsRegistration);
for name in expectation_tests.lines() {
let name = name.unwrap_or_else(|err| {
panic!("Can not read {}: {}", exp_tests_list_path.display(), err)
});
write!(
&mut exp_code,
r##"
#[test]
fn test_expectation_{test_name}() {{
let _ = env_logger::try_init();
let test_case = Path::new("tests").join("expectations").join("{test_name}.rs");
let base_name = test_case.file_stem().expect("name without extenstion");
let test_name = base_name.to_string_lossy();
let mut test_something = false;
for lang in &[ForeignLang::Cpp, ForeignLang::Java] {{
if check_expectation(&test_name, &test_case, *lang) {{
test_something = true;
}}
}}
assert!(test_something, "empty test");
}}
"##,
test_name = name,
)
.unwrap();
}
exp_code
.update_file_if_necessary()
.unwrap_or_else(|err| panic!("Can not write to {}: {}", exp_code_path.display(), err));
println!("cargo:rerun-if-changed={}", exp_tests_list_path.display());
}
| {
if i.path
.clone()
.into_token_stream()
.to_string()
.starts_with("swig_")
{
*i = parse_quote! { #[doc = "swig_ replace"] };
}
} | identifier_body |
backdrop.ts | import { Directive, ElementRef, Input } from '@angular/core';
import { DisableScroll, GestureController, GestureDelegate } from '../../gestures/gesture-controller';
import { isTrueProperty } from '../../util/util';
/**
* @private
*/
@Directive({
selector: 'ion-backdrop',
host: {
'role': 'presentation',
'tappable': '',
'disable-activated': ''
},
})
export class Backdrop {
private _gestureID: number = null;
@Input() disableScroll = true;
constructor(private _gestureCtrl: GestureController, private _elementRef: ElementRef) {}
ngOnInit() {
if (isTrueProperty(this.disableScroll)) {
this._gestureID = this._gestureCtrl.newID();
this._gestureCtrl.disableScroll(this._gestureID);
}
}
ngOnDestroy() {
if (this._gestureID) |
}
getNativeElement(): HTMLElement {
return this._elementRef.nativeElement;
}
}
| {
this._gestureCtrl.enableScroll(this._gestureID);
} | conditional_block |
backdrop.ts | import { Directive, ElementRef, Input } from '@angular/core';
import { DisableScroll, GestureController, GestureDelegate } from '../../gestures/gesture-controller';
import { isTrueProperty } from '../../util/util';
/**
* @private
*/
@Directive({
selector: 'ion-backdrop',
host: {
'role': 'presentation',
'tappable': '',
'disable-activated': ''
},
})
export class Backdrop {
private _gestureID: number = null;
@Input() disableScroll = true;
constructor(private _gestureCtrl: GestureController, private _elementRef: ElementRef) |
ngOnInit() {
if (isTrueProperty(this.disableScroll)) {
this._gestureID = this._gestureCtrl.newID();
this._gestureCtrl.disableScroll(this._gestureID);
}
}
ngOnDestroy() {
if (this._gestureID) {
this._gestureCtrl.enableScroll(this._gestureID);
}
}
getNativeElement(): HTMLElement {
return this._elementRef.nativeElement;
}
}
| {} | identifier_body |
backdrop.ts | import { Directive, ElementRef, Input } from '@angular/core';
import { DisableScroll, GestureController, GestureDelegate } from '../../gestures/gesture-controller';
import { isTrueProperty } from '../../util/util';
/**
* @private
*/
@Directive({
selector: 'ion-backdrop',
host: {
'role': 'presentation',
'tappable': '',
'disable-activated': ''
},
})
export class Backdrop {
private _gestureID: number = null;
@Input() disableScroll = true;
constructor(private _gestureCtrl: GestureController, private _elementRef: ElementRef) {}
ngOnInit() {
if (isTrueProperty(this.disableScroll)) {
this._gestureID = this._gestureCtrl.newID();
this._gestureCtrl.disableScroll(this._gestureID);
}
}
| () {
if (this._gestureID) {
this._gestureCtrl.enableScroll(this._gestureID);
}
}
getNativeElement(): HTMLElement {
return this._elementRef.nativeElement;
}
}
| ngOnDestroy | identifier_name |
backdrop.ts | import { Directive, ElementRef, Input } from '@angular/core';
import { DisableScroll, GestureController, GestureDelegate } from '../../gestures/gesture-controller';
import { isTrueProperty } from '../../util/util';
/**
* @private
*/
@Directive({
selector: 'ion-backdrop',
host: {
'role': 'presentation',
'tappable': '',
'disable-activated': ''
},
})
export class Backdrop {
private _gestureID: number = null;
@Input() disableScroll = true;
constructor(private _gestureCtrl: GestureController, private _elementRef: ElementRef) {}
ngOnInit() {
if (isTrueProperty(this.disableScroll)) {
this._gestureID = this._gestureCtrl.newID(); | this._gestureCtrl.disableScroll(this._gestureID);
}
}
ngOnDestroy() {
if (this._gestureID) {
this._gestureCtrl.enableScroll(this._gestureID);
}
}
getNativeElement(): HTMLElement {
return this._elementRef.nativeElement;
}
} | random_line_split |
|
bin_test.js | /**
* Test for fur bin.
* Runs with mocha.
*/
'use strict'
const assert = require('assert')
const fs = require('fs')
const furBin = require.resolve('../bin/fur')
const execcli = require('execcli')
const mkdirp = require('mkdirp')
let tmpDir = __dirname + '/../tmp'
describe('bin', function () {
this.timeout(24000)
before(async () => {
await mkdirp(tmpDir)
})
after(async () => {
}) | })
it('Generate banner', async () => {
let filename = tmpDir + '/testing-bin-banner.png'
await execcli(furBin, [ 'banner', filename ])
assert.ok(fs.existsSync(filename))
})
})
/* global describe, before, after, it */ |
it('Generate favicon', async () => {
let filename = tmpDir + '/testing-bin-favicon.png'
await execcli(furBin, [ 'favicon', filename ])
assert.ok(fs.existsSync(filename)) | random_line_split |
postsCtrl.js | angular.module('fishTank')
.controller('PostsCtrl', [
'$scope',
'postsFactory',
'post',
function($scope, postsFactory, post){
$("input.tags").tagsinput('items')
// $("input.form-control").show()
$scope.post = post;
$scope.incrementUpvotes = function(comment) {
postsFactory.upvoteComment(post, comment);
};
$scope.decrementUpvotes = function(comment) {
postsFactory.downvoteComment(post, comment);
};
$scope.addComment = function(){
errors();
if($scope.body === ''){return;}
postsFactory.addComment(post.id, {
body: $scope.body,
author: 'user'
}).success(function(comment) {
$scope.post.comments.push(comment)
});
$scope.body = ''; | $scope.error = xhr.data.error
});
}
}]); | };
var errors = function() {
$scope.$on('devise:unauthorized', function(event, xhr, deferred) { | random_line_split |
postsCtrl.js | angular.module('fishTank')
.controller('PostsCtrl', [
'$scope',
'postsFactory',
'post',
function($scope, postsFactory, post){
$("input.tags").tagsinput('items')
// $("input.form-control").show()
$scope.post = post;
$scope.incrementUpvotes = function(comment) {
postsFactory.upvoteComment(post, comment);
};
$scope.decrementUpvotes = function(comment) {
postsFactory.downvoteComment(post, comment);
};
$scope.addComment = function(){
errors();
if($scope.body === '') |
postsFactory.addComment(post.id, {
body: $scope.body,
author: 'user'
}).success(function(comment) {
$scope.post.comments.push(comment)
});
$scope.body = '';
};
var errors = function() {
$scope.$on('devise:unauthorized', function(event, xhr, deferred) {
$scope.error = xhr.data.error
});
}
}]); | {return;} | conditional_block |
gulpfile.js | var gulp = require('gulp');
var paths = {
scripts: ['js/**/*.js']
};
/**
* Run test once and exit
*/
gulp.task('test', function (done) {
new (require('karma').Server)({
configFile: __dirname + '/karma.conf.js',
singleRun: true
}, done).start();
});
gulp.task('lint', function () {
var jshint = require('gulp-jshint');
return gulp.src(paths.scripts)
.pipe(jshint())
.pipe(jshint.reporter('jshint-stylish'))
.pipe(jshint.reporter('fail'));
});
gulp.task('scripts', function () {
var rename = require("gulp-rename");
return gulp.src("js/allpack.js")
.pipe(require('gulp-requirejs-optimize')({
baseUrl: "./js",
paths: {
circuits: ".",
dojo: "empty:",
dojox: "empty:",
dijit: "empty:"
},
name: "circuits/allpack"
}))
.pipe(rename("circuits-min.js"))
.pipe(gulp.dest('./'));
});
gulp.task('build', function (callback) {
require('run-sequence')('lint', 'test', 'scripts', callback);
});
// The default task (called when you run `gulp` from cli)
gulp.task('default', ['lint', 'test']);
// Versioning tasks
/**
* Increments a version value within the package json and bower json
*/
function inc(importance) |
gulp.task('patch', function() { return inc('patch'); })
gulp.task('feature', function() { return inc('minor'); })
gulp.task('release', function() { return inc('major'); }) | {
var git = require('gulp-git'),
bump = require('gulp-bump'),
filter = require('gulp-filter'),
tag_version = require('gulp-tag-version');
// get all the files to bump version in
return gulp.src(['./package.json', './bower.json'])
// bump the version number in those files
.pipe(bump({type: importance}))
// save it back to filesystem
.pipe(gulp.dest('./'))
// commit the changed version number
.pipe(git.commit('bumps package version'))
// read only one file to get the version number
.pipe(filter('package.json'))
// **tag it in the repository**
.pipe(tag_version());
} | identifier_body |
gulpfile.js | var gulp = require('gulp');
var paths = {
scripts: ['js/**/*.js']
};
/**
* Run test once and exit
*/
gulp.task('test', function (done) {
new (require('karma').Server)({
configFile: __dirname + '/karma.conf.js',
singleRun: true
}, done).start();
});
gulp.task('lint', function () {
var jshint = require('gulp-jshint');
return gulp.src(paths.scripts)
.pipe(jshint())
.pipe(jshint.reporter('jshint-stylish'))
.pipe(jshint.reporter('fail'));
});
gulp.task('scripts', function () {
var rename = require("gulp-rename");
return gulp.src("js/allpack.js")
.pipe(require('gulp-requirejs-optimize')({
baseUrl: "./js",
paths: {
circuits: ".",
dojo: "empty:",
dojox: "empty:",
dijit: "empty:"
},
name: "circuits/allpack"
}))
.pipe(rename("circuits-min.js"))
.pipe(gulp.dest('./'));
});
gulp.task('build', function (callback) {
require('run-sequence')('lint', 'test', 'scripts', callback);
});
// The default task (called when you run `gulp` from cli)
gulp.task('default', ['lint', 'test']);
// Versioning tasks
/**
* Increments a version value within the package json and bower json
*/
function | (importance) {
var git = require('gulp-git'),
bump = require('gulp-bump'),
filter = require('gulp-filter'),
tag_version = require('gulp-tag-version');
// get all the files to bump version in
return gulp.src(['./package.json', './bower.json'])
// bump the version number in those files
.pipe(bump({type: importance}))
// save it back to filesystem
.pipe(gulp.dest('./'))
// commit the changed version number
.pipe(git.commit('bumps package version'))
// read only one file to get the version number
.pipe(filter('package.json'))
// **tag it in the repository**
.pipe(tag_version());
}
gulp.task('patch', function() { return inc('patch'); })
gulp.task('feature', function() { return inc('minor'); })
gulp.task('release', function() { return inc('major'); }) | inc | identifier_name |
gulpfile.js | var gulp = require('gulp');
var paths = {
scripts: ['js/**/*.js']
};
/**
* Run test once and exit
*/
gulp.task('test', function (done) {
new (require('karma').Server)({
configFile: __dirname + '/karma.conf.js',
singleRun: true
}, done).start();
});
gulp.task('lint', function () {
var jshint = require('gulp-jshint');
return gulp.src(paths.scripts)
.pipe(jshint())
.pipe(jshint.reporter('jshint-stylish'))
.pipe(jshint.reporter('fail'));
});
gulp.task('scripts', function () {
var rename = require("gulp-rename");
return gulp.src("js/allpack.js")
.pipe(require('gulp-requirejs-optimize')({
baseUrl: "./js",
paths: {
circuits: ".",
dojo: "empty:",
dojox: "empty:",
dijit: "empty:"
},
name: "circuits/allpack" |
gulp.task('build', function (callback) {
require('run-sequence')('lint', 'test', 'scripts', callback);
});
// The default task (called when you run `gulp` from cli)
gulp.task('default', ['lint', 'test']);
// Versioning tasks
/**
* Increments a version value within the package json and bower json
*/
function inc(importance) {
var git = require('gulp-git'),
bump = require('gulp-bump'),
filter = require('gulp-filter'),
tag_version = require('gulp-tag-version');
// get all the files to bump version in
return gulp.src(['./package.json', './bower.json'])
// bump the version number in those files
.pipe(bump({type: importance}))
// save it back to filesystem
.pipe(gulp.dest('./'))
// commit the changed version number
.pipe(git.commit('bumps package version'))
// read only one file to get the version number
.pipe(filter('package.json'))
// **tag it in the repository**
.pipe(tag_version());
}
gulp.task('patch', function() { return inc('patch'); })
gulp.task('feature', function() { return inc('minor'); })
gulp.task('release', function() { return inc('major'); }) | }))
.pipe(rename("circuits-min.js"))
.pipe(gulp.dest('./'));
}); | random_line_split |
o-list-item-text.component.ts | import {
AfterViewInit,
Component,
ElementRef,
forwardRef,
Inject,
Injector,
OnInit,
Optional,
Renderer2,
ViewEncapsulation,
} from '@angular/core';
import { OListItemComponent } from '../../list-item/o-list-item.component';
import {
DEFAULT_INPUTS_O_TEXT_RENDERER,
DEFAULT_OUTPUTS_O_TEXT_RENDERER,
OListItemTextRenderer,
} from '../o-list-item-text-renderer.class';
export const DEFAULT_INPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_INPUTS_O_TEXT_RENDERER,
'iconPosition : icon-position'
];
export const DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_OUTPUTS_O_TEXT_RENDERER
];
@Component({
selector: 'o-list-item-text',
templateUrl: './o-list-item-text.component.html',
styleUrls: ['./o-list-item-text.component.scss'],
inputs: DEFAULT_INPUTS_O_LIST_ITEM_TEXT,
outputs: DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT,
encapsulation: ViewEncapsulation.None,
host: {
'[class.o-custom-list-item]': 'true'
}
})
export class OListItemTextComponent extends OListItemTextRenderer implements OnInit, AfterViewInit {
public ICON_POSITION_LEFT = 'left';
public ICON_POSITION_RIGHT = 'right';
public _iconPosition: string;
constructor(
elRef: ElementRef,
_renderer: Renderer2,
_injector: Injector,
@Optional() @Inject(forwardRef(() => OListItemComponent)) protected _listItem: OListItemComponent
) {
super(elRef, _renderer, _injector, _listItem);
this.elRef.nativeElement.classList.add('o-list-item-text');
}
ngOnInit(): void {
if (!this.iconPosition || [this.ICON_POSITION_LEFT, this.ICON_POSITION_RIGHT].indexOf(this.iconPosition.toLowerCase()) === -1) |
}
ngAfterViewInit() {
this.modifyMatListItemElement();
}
get iconPosition(): string {
return this._iconPosition;
}
set iconPosition(val: string) {
this._iconPosition = val;
}
}
| {
this.iconPosition = this.ICON_POSITION_RIGHT;
} | conditional_block |
o-list-item-text.component.ts | import {
AfterViewInit,
Component,
ElementRef,
forwardRef,
Inject,
Injector,
OnInit,
Optional,
Renderer2,
ViewEncapsulation,
} from '@angular/core';
import { OListItemComponent } from '../../list-item/o-list-item.component';
import {
DEFAULT_INPUTS_O_TEXT_RENDERER,
DEFAULT_OUTPUTS_O_TEXT_RENDERER,
OListItemTextRenderer,
} from '../o-list-item-text-renderer.class';
export const DEFAULT_INPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_INPUTS_O_TEXT_RENDERER,
'iconPosition : icon-position'
];
export const DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_OUTPUTS_O_TEXT_RENDERER
];
@Component({
selector: 'o-list-item-text',
templateUrl: './o-list-item-text.component.html',
styleUrls: ['./o-list-item-text.component.scss'],
inputs: DEFAULT_INPUTS_O_LIST_ITEM_TEXT,
outputs: DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT,
encapsulation: ViewEncapsulation.None,
host: {
'[class.o-custom-list-item]': 'true'
}
})
export class OListItemTextComponent extends OListItemTextRenderer implements OnInit, AfterViewInit {
public ICON_POSITION_LEFT = 'left';
public ICON_POSITION_RIGHT = 'right';
public _iconPosition: string;
constructor(
elRef: ElementRef,
_renderer: Renderer2,
_injector: Injector,
@Optional() @Inject(forwardRef(() => OListItemComponent)) protected _listItem: OListItemComponent
) {
super(elRef, _renderer, _injector, _listItem);
this.elRef.nativeElement.classList.add('o-list-item-text');
}
ngOnInit(): void {
if (!this.iconPosition || [this.ICON_POSITION_LEFT, this.ICON_POSITION_RIGHT].indexOf(this.iconPosition.toLowerCase()) === -1) {
this.iconPosition = this.ICON_POSITION_RIGHT;
}
}
ngAfterViewInit() {
this.modifyMatListItemElement();
}
get | (): string {
return this._iconPosition;
}
set iconPosition(val: string) {
this._iconPosition = val;
}
}
| iconPosition | identifier_name |
o-list-item-text.component.ts | import {
AfterViewInit,
Component,
ElementRef, | Renderer2,
ViewEncapsulation,
} from '@angular/core';
import { OListItemComponent } from '../../list-item/o-list-item.component';
import {
DEFAULT_INPUTS_O_TEXT_RENDERER,
DEFAULT_OUTPUTS_O_TEXT_RENDERER,
OListItemTextRenderer,
} from '../o-list-item-text-renderer.class';
export const DEFAULT_INPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_INPUTS_O_TEXT_RENDERER,
'iconPosition : icon-position'
];
export const DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_OUTPUTS_O_TEXT_RENDERER
];
@Component({
selector: 'o-list-item-text',
templateUrl: './o-list-item-text.component.html',
styleUrls: ['./o-list-item-text.component.scss'],
inputs: DEFAULT_INPUTS_O_LIST_ITEM_TEXT,
outputs: DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT,
encapsulation: ViewEncapsulation.None,
host: {
'[class.o-custom-list-item]': 'true'
}
})
export class OListItemTextComponent extends OListItemTextRenderer implements OnInit, AfterViewInit {
public ICON_POSITION_LEFT = 'left';
public ICON_POSITION_RIGHT = 'right';
public _iconPosition: string;
constructor(
elRef: ElementRef,
_renderer: Renderer2,
_injector: Injector,
@Optional() @Inject(forwardRef(() => OListItemComponent)) protected _listItem: OListItemComponent
) {
super(elRef, _renderer, _injector, _listItem);
this.elRef.nativeElement.classList.add('o-list-item-text');
}
ngOnInit(): void {
if (!this.iconPosition || [this.ICON_POSITION_LEFT, this.ICON_POSITION_RIGHT].indexOf(this.iconPosition.toLowerCase()) === -1) {
this.iconPosition = this.ICON_POSITION_RIGHT;
}
}
ngAfterViewInit() {
this.modifyMatListItemElement();
}
get iconPosition(): string {
return this._iconPosition;
}
set iconPosition(val: string) {
this._iconPosition = val;
}
} | forwardRef,
Inject,
Injector,
OnInit,
Optional, | random_line_split |
o-list-item-text.component.ts | import {
AfterViewInit,
Component,
ElementRef,
forwardRef,
Inject,
Injector,
OnInit,
Optional,
Renderer2,
ViewEncapsulation,
} from '@angular/core';
import { OListItemComponent } from '../../list-item/o-list-item.component';
import {
DEFAULT_INPUTS_O_TEXT_RENDERER,
DEFAULT_OUTPUTS_O_TEXT_RENDERER,
OListItemTextRenderer,
} from '../o-list-item-text-renderer.class';
export const DEFAULT_INPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_INPUTS_O_TEXT_RENDERER,
'iconPosition : icon-position'
];
export const DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT = [
...DEFAULT_OUTPUTS_O_TEXT_RENDERER
];
@Component({
selector: 'o-list-item-text',
templateUrl: './o-list-item-text.component.html',
styleUrls: ['./o-list-item-text.component.scss'],
inputs: DEFAULT_INPUTS_O_LIST_ITEM_TEXT,
outputs: DEFAULT_OUTPUTS_O_LIST_ITEM_TEXT,
encapsulation: ViewEncapsulation.None,
host: {
'[class.o-custom-list-item]': 'true'
}
})
export class OListItemTextComponent extends OListItemTextRenderer implements OnInit, AfterViewInit {
public ICON_POSITION_LEFT = 'left';
public ICON_POSITION_RIGHT = 'right';
public _iconPosition: string;
constructor(
elRef: ElementRef,
_renderer: Renderer2,
_injector: Injector,
@Optional() @Inject(forwardRef(() => OListItemComponent)) protected _listItem: OListItemComponent
) {
super(elRef, _renderer, _injector, _listItem);
this.elRef.nativeElement.classList.add('o-list-item-text');
}
ngOnInit(): void {
if (!this.iconPosition || [this.ICON_POSITION_LEFT, this.ICON_POSITION_RIGHT].indexOf(this.iconPosition.toLowerCase()) === -1) {
this.iconPosition = this.ICON_POSITION_RIGHT;
}
}
ngAfterViewInit() |
get iconPosition(): string {
return this._iconPosition;
}
set iconPosition(val: string) {
this._iconPosition = val;
}
}
| {
this.modifyMatListItemElement();
} | identifier_body |
common.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use syntax::ast;
use syntax::codemap::{span};
use syntax::visit;
use core::hashmap::linear::LinearSet;
use core::str;
use std;
pub fn time<T>(do_it: bool, what: ~str, thunk: &fn() -> T) -> T {
if !do_it { return thunk(); }
let start = std::time::precise_time_s();
let rv = thunk();
let end = std::time::precise_time_s();
io::println(fmt!("time: %3.3f s\t%s", end - start, what));
rv
}
pub fn indent<R>(op: &fn() -> R) -> R {
// Use in conjunction with the log post-processor like `src/etc/indenter`
// to make debug output more readable.
debug!(">>");
let r = op();
debug!("<< (Result = %?)", r);
r
}
pub struct _indenter {
_i: (),
}
impl Drop for _indenter {
fn finalize(&self) { debug!("<<"); }
}
pub fn _indenter(_i: ()) -> _indenter |
pub fn indenter() -> _indenter {
debug!(">>");
_indenter(())
}
pub fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; }
pub fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] {
fields.map(|f| f.node.expr)
}
// Takes a predicate p, returns true iff p is true for any subexpressions
// of b -- skipping any inner loops (loop, while, loop_body)
pub fn loop_query(b: &ast::blk, p: @fn(ast::expr_) -> bool) -> bool {
let rs = @mut false;
let visit_expr: @fn(@ast::expr,
&&flag: @mut bool,
v: visit::vt<@mut bool>) = |e, &&flag, v| {
*flag |= p(e.node);
match e.node {
// Skip inner loops, since a break in the inner loop isn't a
// break inside the outer loop
ast::expr_loop(*) | ast::expr_while(*)
| ast::expr_loop_body(*) => {}
_ => visit::visit_expr(e, flag, v)
}
};
let v = visit::mk_vt(@visit::Visitor {
visit_expr: visit_expr,
.. *visit::default_visitor()});
visit::visit_block(b, rs, v);
return *rs;
}
// Takes a predicate p, returns true iff p is true for any subexpressions
// of b -- skipping any inner loops (loop, while, loop_body)
pub fn block_query(b: &ast::blk, p: @fn(@ast::expr) -> bool) -> bool {
let rs = @mut false;
let visit_expr: @fn(@ast::expr,
&&flag: @mut bool,
v: visit::vt<@mut bool>) = |e, &&flag, v| {
*flag |= p(e);
visit::visit_expr(e, flag, v)
};
let v = visit::mk_vt(@visit::Visitor{
visit_expr: visit_expr,
.. *visit::default_visitor()});
visit::visit_block(b, rs, v);
return *rs;
}
pub fn local_rhs_span(l: @ast::local, def: span) -> span {
match l.node.init {
Some(i) => return i.span,
_ => return def
}
}
pub fn pluralize(n: uint, +s: ~str) -> ~str {
if n == 1 { s }
else { str::concat([s, ~"s"]) }
}
// A set of node IDs (used to keep track of which node IDs are for statements)
pub type stmt_set = @mut LinearSet<ast::node_id>;
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//
| {
_indenter {
_i: ()
}
} | identifier_body |
common.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use syntax::ast;
use syntax::codemap::{span};
use syntax::visit;
use core::hashmap::linear::LinearSet;
use core::str;
use std;
pub fn | <T>(do_it: bool, what: ~str, thunk: &fn() -> T) -> T {
if !do_it { return thunk(); }
let start = std::time::precise_time_s();
let rv = thunk();
let end = std::time::precise_time_s();
io::println(fmt!("time: %3.3f s\t%s", end - start, what));
rv
}
pub fn indent<R>(op: &fn() -> R) -> R {
// Use in conjunction with the log post-processor like `src/etc/indenter`
// to make debug output more readable.
debug!(">>");
let r = op();
debug!("<< (Result = %?)", r);
r
}
pub struct _indenter {
_i: (),
}
impl Drop for _indenter {
fn finalize(&self) { debug!("<<"); }
}
pub fn _indenter(_i: ()) -> _indenter {
_indenter {
_i: ()
}
}
pub fn indenter() -> _indenter {
debug!(">>");
_indenter(())
}
pub fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; }
pub fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] {
fields.map(|f| f.node.expr)
}
// Takes a predicate p, returns true iff p is true for any subexpressions
// of b -- skipping any inner loops (loop, while, loop_body)
pub fn loop_query(b: &ast::blk, p: @fn(ast::expr_) -> bool) -> bool {
let rs = @mut false;
let visit_expr: @fn(@ast::expr,
&&flag: @mut bool,
v: visit::vt<@mut bool>) = |e, &&flag, v| {
*flag |= p(e.node);
match e.node {
// Skip inner loops, since a break in the inner loop isn't a
// break inside the outer loop
ast::expr_loop(*) | ast::expr_while(*)
| ast::expr_loop_body(*) => {}
_ => visit::visit_expr(e, flag, v)
}
};
let v = visit::mk_vt(@visit::Visitor {
visit_expr: visit_expr,
.. *visit::default_visitor()});
visit::visit_block(b, rs, v);
return *rs;
}
// Takes a predicate p, returns true iff p is true for any subexpressions
// of b -- skipping any inner loops (loop, while, loop_body)
pub fn block_query(b: &ast::blk, p: @fn(@ast::expr) -> bool) -> bool {
let rs = @mut false;
let visit_expr: @fn(@ast::expr,
&&flag: @mut bool,
v: visit::vt<@mut bool>) = |e, &&flag, v| {
*flag |= p(e);
visit::visit_expr(e, flag, v)
};
let v = visit::mk_vt(@visit::Visitor{
visit_expr: visit_expr,
.. *visit::default_visitor()});
visit::visit_block(b, rs, v);
return *rs;
}
pub fn local_rhs_span(l: @ast::local, def: span) -> span {
match l.node.init {
Some(i) => return i.span,
_ => return def
}
}
pub fn pluralize(n: uint, +s: ~str) -> ~str {
if n == 1 { s }
else { str::concat([s, ~"s"]) }
}
// A set of node IDs (used to keep track of which node IDs are for statements)
pub type stmt_set = @mut LinearSet<ast::node_id>;
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//
| time | identifier_name |
common.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use syntax::ast;
use syntax::codemap::{span};
use syntax::visit;
use core::hashmap::linear::LinearSet;
use core::str;
use std;
pub fn time<T>(do_it: bool, what: ~str, thunk: &fn() -> T) -> T {
if !do_it { return thunk(); }
let start = std::time::precise_time_s();
let rv = thunk();
let end = std::time::precise_time_s();
io::println(fmt!("time: %3.3f s\t%s", end - start, what));
rv
} | debug!(">>");
let r = op();
debug!("<< (Result = %?)", r);
r
}
pub struct _indenter {
_i: (),
}
impl Drop for _indenter {
fn finalize(&self) { debug!("<<"); }
}
pub fn _indenter(_i: ()) -> _indenter {
_indenter {
_i: ()
}
}
pub fn indenter() -> _indenter {
debug!(">>");
_indenter(())
}
pub fn field_expr(f: ast::field) -> @ast::expr { return f.node.expr; }
pub fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] {
fields.map(|f| f.node.expr)
}
// Takes a predicate p, returns true iff p is true for any subexpressions
// of b -- skipping any inner loops (loop, while, loop_body)
pub fn loop_query(b: &ast::blk, p: @fn(ast::expr_) -> bool) -> bool {
let rs = @mut false;
let visit_expr: @fn(@ast::expr,
&&flag: @mut bool,
v: visit::vt<@mut bool>) = |e, &&flag, v| {
*flag |= p(e.node);
match e.node {
// Skip inner loops, since a break in the inner loop isn't a
// break inside the outer loop
ast::expr_loop(*) | ast::expr_while(*)
| ast::expr_loop_body(*) => {}
_ => visit::visit_expr(e, flag, v)
}
};
let v = visit::mk_vt(@visit::Visitor {
visit_expr: visit_expr,
.. *visit::default_visitor()});
visit::visit_block(b, rs, v);
return *rs;
}
// Takes a predicate p, returns true iff p is true for any subexpressions
// of b -- skipping any inner loops (loop, while, loop_body)
pub fn block_query(b: &ast::blk, p: @fn(@ast::expr) -> bool) -> bool {
let rs = @mut false;
let visit_expr: @fn(@ast::expr,
&&flag: @mut bool,
v: visit::vt<@mut bool>) = |e, &&flag, v| {
*flag |= p(e);
visit::visit_expr(e, flag, v)
};
let v = visit::mk_vt(@visit::Visitor{
visit_expr: visit_expr,
.. *visit::default_visitor()});
visit::visit_block(b, rs, v);
return *rs;
}
pub fn local_rhs_span(l: @ast::local, def: span) -> span {
match l.node.init {
Some(i) => return i.span,
_ => return def
}
}
pub fn pluralize(n: uint, +s: ~str) -> ~str {
if n == 1 { s }
else { str::concat([s, ~"s"]) }
}
// A set of node IDs (used to keep track of which node IDs are for statements)
pub type stmt_set = @mut LinearSet<ast::node_id>;
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
// |
pub fn indent<R>(op: &fn() -> R) -> R {
// Use in conjunction with the log post-processor like `src/etc/indenter`
// to make debug output more readable. | random_line_split |
vr_event.rs | use {VRDisplayData, VRGamepadData, VRGamepadState};
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VREvent {
Display(VRDisplayEvent),
Gamepad(VRGamepadEvent),
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VRDisplayEventReason {
Navigation,
/// The VRDisplay has detected that the user has put it on.
Mounted,
/// The VRDisplay has detected that the user has taken it off.
Unmounted
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VRDisplayEvent {
/// Indicates that a VRDisplay has been connected.
Connect(VRDisplayData),
/// Indicates that a VRDisplay has been disconnected.
/// param: display_id
Disconnect(u32),
/// Indicates that something has occured which suggests the VRDisplay should be presented to
Activate(VRDisplayData, VRDisplayEventReason),
/// Indicates that something has occured which suggests the VRDisplay should exit presentation
Deactivate(VRDisplayData, VRDisplayEventReason),
/// Indicates that some of the VRDisplay's data has changed (eye parameters, tracking data, chaperone, ipd, etc.)
Change(VRDisplayData),
/// Indicates that presentation to the display by the page is paused by the user agent, OS, or VR hardware
Blur(VRDisplayData),
/// Indicates that presentation to the display by the page has resumed after being blurred.
Focus(VRDisplayData),
/// Indicates that a VRDisplay has begun or ended VR presentation
PresentChange(VRDisplayData, bool),
/// Indicates that VRDisplay presentation loop must be paused (i.e Android app goes to background)
Pause(u32),
/// Indicates that VRDisplay presentation loop must be resumed (i.e Android app goes to foreground)
Resume(u32),
/// Indicates that user has exited VRDisplay presentation (i.e. User clicked back key on android)
Exit(u32)
}
impl Into<VREvent> for VRDisplayEvent {
fn | (self) -> VREvent {
VREvent::Display(self)
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VRGamepadEvent {
/// Indicates that a VRGamepad has been connected.
/// params: name, displa_id, state
Connect(VRGamepadData, VRGamepadState),
/// Indicates that a VRGamepad has been disconnected.
/// param: gamepad_id
Disconnect(u32)
}
impl Into<VREvent> for VRGamepadEvent {
fn into(self) -> VREvent {
VREvent::Gamepad(self)
}
}
| into | identifier_name |
vr_event.rs | use {VRDisplayData, VRGamepadData, VRGamepadState};
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VREvent {
Display(VRDisplayEvent),
Gamepad(VRGamepadEvent),
}
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VRDisplayEventReason {
Navigation,
/// The VRDisplay has detected that the user has put it on.
Mounted,
/// The VRDisplay has detected that the user has taken it off.
Unmounted
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VRDisplayEvent {
/// Indicates that a VRDisplay has been connected.
Connect(VRDisplayData),
/// Indicates that a VRDisplay has been disconnected.
/// param: display_id
Disconnect(u32),
/// Indicates that something has occured which suggests the VRDisplay should be presented to
Activate(VRDisplayData, VRDisplayEventReason),
/// Indicates that something has occured which suggests the VRDisplay should exit presentation
Deactivate(VRDisplayData, VRDisplayEventReason),
/// Indicates that some of the VRDisplay's data has changed (eye parameters, tracking data, chaperone, ipd, etc.)
Change(VRDisplayData),
/// Indicates that presentation to the display by the page is paused by the user agent, OS, or VR hardware
Blur(VRDisplayData),
/// Indicates that presentation to the display by the page has resumed after being blurred.
Focus(VRDisplayData),
/// Indicates that a VRDisplay has begun or ended VR presentation | Pause(u32),
/// Indicates that VRDisplay presentation loop must be resumed (i.e Android app goes to foreground)
Resume(u32),
/// Indicates that user has exited VRDisplay presentation (i.e. User clicked back key on android)
Exit(u32)
}
impl Into<VREvent> for VRDisplayEvent {
fn into(self) -> VREvent {
VREvent::Display(self)
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde-serialization", derive(Deserialize, Serialize))]
pub enum VRGamepadEvent {
/// Indicates that a VRGamepad has been connected.
/// params: name, displa_id, state
Connect(VRGamepadData, VRGamepadState),
/// Indicates that a VRGamepad has been disconnected.
/// param: gamepad_id
Disconnect(u32)
}
impl Into<VREvent> for VRGamepadEvent {
fn into(self) -> VREvent {
VREvent::Gamepad(self)
}
} | PresentChange(VRDisplayData, bool),
/// Indicates that VRDisplay presentation loop must be paused (i.e Android app goes to background) | random_line_split |
0004_auto_20141229_1211.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils.timezone import utc
import datetime
class Migration(migrations.Migration):
| dependencies = [
('feed', '0003_auto_20141227_2343'),
]
operations = [
migrations.AddField(
model_name='newsarticle',
name='created',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 7, 540368, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='slug',
field=models.SlugField(default=datetime.datetime(2014, 12, 29, 11, 11, 29, 101175, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 42, 82623, tzinfo=utc), auto_now=True),
preserve_default=False,
),
] | identifier_body |
|
0004_auto_20141229_1211.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils.timezone import utc
import datetime
class | (migrations.Migration):
dependencies = [
('feed', '0003_auto_20141227_2343'),
]
operations = [
migrations.AddField(
model_name='newsarticle',
name='created',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 7, 540368, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='slug',
field=models.SlugField(default=datetime.datetime(2014, 12, 29, 11, 11, 29, 101175, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 42, 82623, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
| Migration | identifier_name |
0004_auto_20141229_1211.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils.timezone import utc |
dependencies = [
('feed', '0003_auto_20141227_2343'),
]
operations = [
migrations.AddField(
model_name='newsarticle',
name='created',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 7, 540368, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='slug',
field=models.SlugField(default=datetime.datetime(2014, 12, 29, 11, 11, 29, 101175, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='newsarticle',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2014, 12, 29, 11, 11, 42, 82623, tzinfo=utc), auto_now=True),
preserve_default=False,
),
] | import datetime
class Migration(migrations.Migration): | random_line_split |
finally.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The Finally trait provides a method, `finally` on
stack closures that emulates Java-style try/finally blocks.
Using the `finally` method is sometimes convenient, but the type rules
prohibit any shared, mutable state between the "try" case and the
"finally" case. For advanced cases, the `try_finally` function can
also be used. See that function for more details.
# Example
```
use std::unstable::finally::Finally;
(|| {
// ...
}).finally(|| {
// this code is always run
})
```
*/
#![experimental]
use ops::Drop;
/// A trait for executing a destructor unconditionally after a block of code,
/// regardless of whether the blocked fails.
pub trait Finally<T> {
/// Executes this object, unconditionally running `dtor` after this block of
/// code has run.
fn finally(&mut self, dtor: ||) -> T;
}
impl<'a,T> Finally<T> for ||: 'a -> T {
fn finally(&mut self, dtor: ||) -> T {
try_finally(&mut (), self,
|_, f| (*f)(),
|_| dtor())
}
}
impl<T> Finally<T> for fn() -> T {
fn finally(&mut self, dtor: ||) -> T {
try_finally(&mut (), (),
|_, _| (*self)(),
|_| dtor())
}
}
/**
* The most general form of the `finally` functions. The function
* `try_fn` will be invoked first; whether or not it fails, the
* function `finally_fn` will be invoked next. The two parameters
* `mutate` and `drop` are used to thread state through the two
* closures. `mutate` is used for any shared, mutable state that both
* closures require access to; `drop` is used for any state that the
* `try_fn` requires ownership of.
*
* **WARNING:** While shared, mutable state between the try and finally
* function is often necessary, one must be very careful; the `try`
* function could have failed at any point, so the values of the shared
* state may be inconsistent.
*
* # Example
*
* ```
* use std::unstable::finally::try_finally;
*
* struct State<'a> { buffer: &'a mut [u8], len: uint }
* # let mut buf = [];
* let mut state = State { buffer: buf, len: 0 };
* try_finally(
* &mut state, (),
* |state, ()| {
* // use state.buffer, state.len
* },
* |state| {
* // use state.buffer, state.len to cleanup
* })
* ```
*/
pub fn try_finally<T,U,R>(mutate: &mut T,
drop: U,
try_fn: |&mut T, U| -> R,
finally_fn: |&mut T|)
-> R {
let f = Finallyalizer {
mutate: mutate,
dtor: finally_fn,
};
try_fn(&mut *f.mutate, drop)
}
struct Finallyalizer<'a,A> {
mutate: &'a mut A,
dtor: |&mut A|: 'a
}
#[unsafe_destructor]
impl<'a,A> Drop for Finallyalizer<'a,A> {
#[inline]
fn drop(&mut self) |
}
#[cfg(test)]
mod test {
use super::{try_finally, Finally};
use realstd::task::failing;
#[test]
fn test_success() {
let mut i = 0;
try_finally(
&mut i, (),
|i, ()| {
*i = 10;
},
|i| {
assert!(!failing());
assert_eq!(*i, 10);
*i = 20;
});
assert_eq!(i, 20);
}
#[test]
#[should_fail]
fn test_fail() {
let mut i = 0;
try_finally(
&mut i, (),
|i, ()| {
*i = 10;
fail!();
},
|i| {
assert!(failing());
assert_eq!(*i, 10);
})
}
#[test]
fn test_retval() {
let mut closure: || -> int = || 10;
let i = closure.finally(|| { });
assert_eq!(i, 10);
}
#[test]
fn test_compact() {
fn do_some_fallible_work() {}
fn but_always_run_this_function() { }
let mut f = do_some_fallible_work;
f.finally(but_always_run_this_function);
}
}
| {
(self.dtor)(self.mutate);
} | identifier_body |
finally.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! | "finally" case. For advanced cases, the `try_finally` function can
also be used. See that function for more details.
# Example
```
use std::unstable::finally::Finally;
(|| {
// ...
}).finally(|| {
// this code is always run
})
```
*/
#![experimental]
use ops::Drop;
/// A trait for executing a destructor unconditionally after a block of code,
/// regardless of whether the blocked fails.
pub trait Finally<T> {
/// Executes this object, unconditionally running `dtor` after this block of
/// code has run.
fn finally(&mut self, dtor: ||) -> T;
}
impl<'a,T> Finally<T> for ||: 'a -> T {
fn finally(&mut self, dtor: ||) -> T {
try_finally(&mut (), self,
|_, f| (*f)(),
|_| dtor())
}
}
impl<T> Finally<T> for fn() -> T {
fn finally(&mut self, dtor: ||) -> T {
try_finally(&mut (), (),
|_, _| (*self)(),
|_| dtor())
}
}
/**
* The most general form of the `finally` functions. The function
* `try_fn` will be invoked first; whether or not it fails, the
* function `finally_fn` will be invoked next. The two parameters
* `mutate` and `drop` are used to thread state through the two
* closures. `mutate` is used for any shared, mutable state that both
* closures require access to; `drop` is used for any state that the
* `try_fn` requires ownership of.
*
* **WARNING:** While shared, mutable state between the try and finally
* function is often necessary, one must be very careful; the `try`
* function could have failed at any point, so the values of the shared
* state may be inconsistent.
*
* # Example
*
* ```
* use std::unstable::finally::try_finally;
*
* struct State<'a> { buffer: &'a mut [u8], len: uint }
* # let mut buf = [];
* let mut state = State { buffer: buf, len: 0 };
* try_finally(
* &mut state, (),
* |state, ()| {
* // use state.buffer, state.len
* },
* |state| {
* // use state.buffer, state.len to cleanup
* })
* ```
*/
pub fn try_finally<T,U,R>(mutate: &mut T,
drop: U,
try_fn: |&mut T, U| -> R,
finally_fn: |&mut T|)
-> R {
let f = Finallyalizer {
mutate: mutate,
dtor: finally_fn,
};
try_fn(&mut *f.mutate, drop)
}
struct Finallyalizer<'a,A> {
mutate: &'a mut A,
dtor: |&mut A|: 'a
}
#[unsafe_destructor]
impl<'a,A> Drop for Finallyalizer<'a,A> {
#[inline]
fn drop(&mut self) {
(self.dtor)(self.mutate);
}
}
#[cfg(test)]
mod test {
use super::{try_finally, Finally};
use realstd::task::failing;
#[test]
fn test_success() {
let mut i = 0;
try_finally(
&mut i, (),
|i, ()| {
*i = 10;
},
|i| {
assert!(!failing());
assert_eq!(*i, 10);
*i = 20;
});
assert_eq!(i, 20);
}
#[test]
#[should_fail]
fn test_fail() {
let mut i = 0;
try_finally(
&mut i, (),
|i, ()| {
*i = 10;
fail!();
},
|i| {
assert!(failing());
assert_eq!(*i, 10);
})
}
#[test]
fn test_retval() {
let mut closure: || -> int = || 10;
let i = closure.finally(|| { });
assert_eq!(i, 10);
}
#[test]
fn test_compact() {
fn do_some_fallible_work() {}
fn but_always_run_this_function() { }
let mut f = do_some_fallible_work;
f.finally(but_always_run_this_function);
}
} | The Finally trait provides a method, `finally` on
stack closures that emulates Java-style try/finally blocks.
Using the `finally` method is sometimes convenient, but the type rules
prohibit any shared, mutable state between the "try" case and the | random_line_split |
finally.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The Finally trait provides a method, `finally` on
stack closures that emulates Java-style try/finally blocks.
Using the `finally` method is sometimes convenient, but the type rules
prohibit any shared, mutable state between the "try" case and the
"finally" case. For advanced cases, the `try_finally` function can
also be used. See that function for more details.
# Example
```
use std::unstable::finally::Finally;
(|| {
// ...
}).finally(|| {
// this code is always run
})
```
*/
#![experimental]
use ops::Drop;
/// A trait for executing a destructor unconditionally after a block of code,
/// regardless of whether the blocked fails.
pub trait Finally<T> {
/// Executes this object, unconditionally running `dtor` after this block of
/// code has run.
fn finally(&mut self, dtor: ||) -> T;
}
impl<'a,T> Finally<T> for ||: 'a -> T {
fn finally(&mut self, dtor: ||) -> T {
try_finally(&mut (), self,
|_, f| (*f)(),
|_| dtor())
}
}
impl<T> Finally<T> for fn() -> T {
fn finally(&mut self, dtor: ||) -> T {
try_finally(&mut (), (),
|_, _| (*self)(),
|_| dtor())
}
}
/**
* The most general form of the `finally` functions. The function
* `try_fn` will be invoked first; whether or not it fails, the
* function `finally_fn` will be invoked next. The two parameters
* `mutate` and `drop` are used to thread state through the two
* closures. `mutate` is used for any shared, mutable state that both
* closures require access to; `drop` is used for any state that the
* `try_fn` requires ownership of.
*
* **WARNING:** While shared, mutable state between the try and finally
* function is often necessary, one must be very careful; the `try`
* function could have failed at any point, so the values of the shared
* state may be inconsistent.
*
* # Example
*
* ```
* use std::unstable::finally::try_finally;
*
* struct State<'a> { buffer: &'a mut [u8], len: uint }
* # let mut buf = [];
* let mut state = State { buffer: buf, len: 0 };
* try_finally(
* &mut state, (),
* |state, ()| {
* // use state.buffer, state.len
* },
* |state| {
* // use state.buffer, state.len to cleanup
* })
* ```
*/
pub fn try_finally<T,U,R>(mutate: &mut T,
drop: U,
try_fn: |&mut T, U| -> R,
finally_fn: |&mut T|)
-> R {
let f = Finallyalizer {
mutate: mutate,
dtor: finally_fn,
};
try_fn(&mut *f.mutate, drop)
}
struct Finallyalizer<'a,A> {
mutate: &'a mut A,
dtor: |&mut A|: 'a
}
#[unsafe_destructor]
impl<'a,A> Drop for Finallyalizer<'a,A> {
#[inline]
fn drop(&mut self) {
(self.dtor)(self.mutate);
}
}
#[cfg(test)]
mod test {
use super::{try_finally, Finally};
use realstd::task::failing;
#[test]
fn test_success() {
let mut i = 0;
try_finally(
&mut i, (),
|i, ()| {
*i = 10;
},
|i| {
assert!(!failing());
assert_eq!(*i, 10);
*i = 20;
});
assert_eq!(i, 20);
}
#[test]
#[should_fail]
fn test_fail() {
let mut i = 0;
try_finally(
&mut i, (),
|i, ()| {
*i = 10;
fail!();
},
|i| {
assert!(failing());
assert_eq!(*i, 10);
})
}
#[test]
fn test_retval() {
let mut closure: || -> int = || 10;
let i = closure.finally(|| { });
assert_eq!(i, 10);
}
#[test]
fn test_compact() {
fn | () {}
fn but_always_run_this_function() { }
let mut f = do_some_fallible_work;
f.finally(but_always_run_this_function);
}
}
| do_some_fallible_work | identifier_name |
VirtualTimeScheduler.ts | import { AsyncAction } from './AsyncAction';
import { Subscription } from '../Subscription';
import { AsyncScheduler } from './AsyncScheduler';
export class VirtualTimeScheduler extends AsyncScheduler {
protected static frameTimeFactor: number = 10;
public frame: number = 0;
public index: number = -1;
constructor(SchedulerAction: typeof AsyncAction = VirtualAction,
public maxFrames: number = Number.POSITIVE_INFINITY) {
super(SchedulerAction, () => this.frame);
}
/**
* Prompt the Scheduler to execute all of its queued actions, therefore
* clearing its queue.
* @return {void}
*/
public flush(): void {
const {actions, maxFrames} = this;
let error: any, action: AsyncAction<any>;
while ((action = actions.shift()) && (this.frame = action.delay) <= maxFrames) {
if (error = action.execute(action.state, action.delay)) {
break;
}
}
if (error) {
while (action = actions.shift()) {
action.unsubscribe();
}
throw error;
}
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
export class VirtualAction<T> extends AsyncAction<T> {
constructor(protected scheduler: VirtualTimeScheduler,
protected work: (this: VirtualAction<T>, state?: T) => void,
protected index: number = scheduler.index += 1) {
super(scheduler, work);
this.index = scheduler.index = index;
}
public schedule(state?: T, delay: number = 0): Subscription {
return !this.id ?
super.schedule(state, delay) : (
// If an action is rescheduled, we save allocations by mutating its state,
// pushing it to the end of the scheduler queue, and recycling the action.
// But since the VirtualTimeScheduler is used for testing, VirtualActions | <VirtualAction<T>> this.add(
new VirtualAction<T>(this.scheduler, this.work))
).schedule(state, delay);
}
protected requestAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
this.delay = scheduler.frame + delay;
const {actions} = scheduler;
actions.push(this);
actions.sort(VirtualAction.sortActions);
return true;
}
protected recycleAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
return undefined;
}
public static sortActions<T>(a: VirtualAction<T>, b: VirtualAction<T>) {
if (a.delay === b.delay) {
if (a.index === b.index) {
return 0;
} else if (a.index > b.index) {
return 1;
} else {
return -1;
}
} else if (a.delay > b.delay) {
return 1;
} else {
return -1;
}
}
} | // must be immutable so they can be inspected later. | random_line_split |
VirtualTimeScheduler.ts | import { AsyncAction } from './AsyncAction';
import { Subscription } from '../Subscription';
import { AsyncScheduler } from './AsyncScheduler';
export class VirtualTimeScheduler extends AsyncScheduler {
protected static frameTimeFactor: number = 10;
public frame: number = 0;
public index: number = -1;
constructor(SchedulerAction: typeof AsyncAction = VirtualAction,
public maxFrames: number = Number.POSITIVE_INFINITY) {
super(SchedulerAction, () => this.frame);
}
/**
* Prompt the Scheduler to execute all of its queued actions, therefore
* clearing its queue.
* @return {void}
*/
public flush(): void {
const {actions, maxFrames} = this;
let error: any, action: AsyncAction<any>;
while ((action = actions.shift()) && (this.frame = action.delay) <= maxFrames) {
if (error = action.execute(action.state, action.delay)) |
}
if (error) {
while (action = actions.shift()) {
action.unsubscribe();
}
throw error;
}
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
export class VirtualAction<T> extends AsyncAction<T> {
constructor(protected scheduler: VirtualTimeScheduler,
protected work: (this: VirtualAction<T>, state?: T) => void,
protected index: number = scheduler.index += 1) {
super(scheduler, work);
this.index = scheduler.index = index;
}
public schedule(state?: T, delay: number = 0): Subscription {
return !this.id ?
super.schedule(state, delay) : (
// If an action is rescheduled, we save allocations by mutating its state,
// pushing it to the end of the scheduler queue, and recycling the action.
// But since the VirtualTimeScheduler is used for testing, VirtualActions
// must be immutable so they can be inspected later.
<VirtualAction<T>> this.add(
new VirtualAction<T>(this.scheduler, this.work))
).schedule(state, delay);
}
protected requestAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
this.delay = scheduler.frame + delay;
const {actions} = scheduler;
actions.push(this);
actions.sort(VirtualAction.sortActions);
return true;
}
protected recycleAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
return undefined;
}
public static sortActions<T>(a: VirtualAction<T>, b: VirtualAction<T>) {
if (a.delay === b.delay) {
if (a.index === b.index) {
return 0;
} else if (a.index > b.index) {
return 1;
} else {
return -1;
}
} else if (a.delay > b.delay) {
return 1;
} else {
return -1;
}
}
}
| {
break;
} | conditional_block |
VirtualTimeScheduler.ts | import { AsyncAction } from './AsyncAction';
import { Subscription } from '../Subscription';
import { AsyncScheduler } from './AsyncScheduler';
export class VirtualTimeScheduler extends AsyncScheduler {
protected static frameTimeFactor: number = 10;
public frame: number = 0;
public index: number = -1;
constructor(SchedulerAction: typeof AsyncAction = VirtualAction,
public maxFrames: number = Number.POSITIVE_INFINITY) {
super(SchedulerAction, () => this.frame);
}
/**
* Prompt the Scheduler to execute all of its queued actions, therefore
* clearing its queue.
* @return {void}
*/
public flush(): void {
const {actions, maxFrames} = this;
let error: any, action: AsyncAction<any>;
while ((action = actions.shift()) && (this.frame = action.delay) <= maxFrames) {
if (error = action.execute(action.state, action.delay)) {
break;
}
}
if (error) {
while (action = actions.shift()) {
action.unsubscribe();
}
throw error;
}
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
export class VirtualAction<T> extends AsyncAction<T> {
constructor(protected scheduler: VirtualTimeScheduler,
protected work: (this: VirtualAction<T>, state?: T) => void,
protected index: number = scheduler.index += 1) {
super(scheduler, work);
this.index = scheduler.index = index;
}
public schedule(state?: T, delay: number = 0): Subscription |
protected requestAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
this.delay = scheduler.frame + delay;
const {actions} = scheduler;
actions.push(this);
actions.sort(VirtualAction.sortActions);
return true;
}
protected recycleAsyncId(scheduler: VirtualTimeScheduler, id?: any, delay: number = 0): any {
return undefined;
}
public static sortActions<T>(a: VirtualAction<T>, b: VirtualAction<T>) {
if (a.delay === b.delay) {
if (a.index === b.index) {
return 0;
} else if (a.index > b.index) {
return 1;
} else {
return -1;
}
} else if (a.delay > b.delay) {
return 1;
} else {
return -1;
}
}
}
| {
return !this.id ?
super.schedule(state, delay) : (
// If an action is rescheduled, we save allocations by mutating its state,
// pushing it to the end of the scheduler queue, and recycling the action.
// But since the VirtualTimeScheduler is used for testing, VirtualActions
// must be immutable so they can be inspected later.
<VirtualAction<T>> this.add(
new VirtualAction<T>(this.scheduler, this.work))
).schedule(state, delay);
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.