prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>edit.js<|end_file_name|><|fim▁begin|>'use strict';
var async = require('async');
var validator = require('validator');
var _ = require('underscore');
var db = require('../database');
var topics = require('../topics');
var user = require('../user');
var privileges = require('../privileges');
var plugins = require('../plugins');
var cache = require('./cache');
var pubsub = require('../pubsub');
var utils = require('../../public/src/utils');
module.exports = function (Posts) {
pubsub.on('post:edit', function (pid) {
cache.del(pid);
});
Posts.edit = function (data, callback) {
var postData;
var results;
async.waterfall([
function (next) {
privileges.posts.canEdit(data.pid, data.uid, next);
},
function (canEdit, next) {
if (!canEdit.flag) {
return next(new Error(canEdit.message));
}
Posts.getPostData(data.pid, next);
},
function (_postData, next) {
if (!_postData) {
return next(new Error('[[error:no-post]]'));
}
postData = _postData;
postData.content = data.content;
postData.edited = Date.now();
postData.editor = data.uid;
if (data.handle) {
postData.handle = data.handle;
}
plugins.fireHook('filter:post.edit', {req: data.req, post: postData, data: data, uid: data.uid}, next);
},
function (result, next) {
postData = result.post;
Posts.setPostFields(data.pid, postData, next);
},
function (next) {
async.parallel({
editor: function (next) {
user.getUserFields(data.uid, ['username', 'userslug'], next);
},
topic: function (next) {
editMainPost(data, postData, next);
}
}, next);
},
function (_results, next) {
results = _results;
postData.cid = results.topic.cid;
postData.topic = results.topic;
plugins.fireHook('action:post.edit', _.clone(postData));
cache.del(String(postData.pid));
pubsub.publish('post:edit', String(postData.pid));
Posts.parsePost(postData, next);
},
function (postData, next) {
results.post = postData;
next(null, results);
}
], callback);
};
<|fim▁hole|> async.parallel({
topic: function (next) {
topics.getTopicFields(tid, ['cid', 'title'], next);
},
isMain: function (next) {
Posts.isMain(data.pid, next);
}
}, function (err, results) {
if (err) {
return callback(err);
}
if (!results.isMain) {
return callback(null, {
tid: tid,
cid: results.topic.cid,
isMainPost: false,
renamed: false
});
}
var topicData = {
tid: tid,
cid: results.topic.cid,
uid: postData.uid,
mainPid: data.pid
};
if (title) {
topicData.title = title;
topicData.slug = tid + '/' + (utils.slugify(title) || 'topic');
}
topicData.thumb = data.thumb || '';
data.tags = data.tags || [];
async.waterfall([
function (next) {
plugins.fireHook('filter:topic.edit', {req: data.req, topic: topicData, data: data}, next);
},
function (results, next) {
db.setObject('topic:' + tid, results.topic, next);
},
function (next) {
topics.updateTags(tid, data.tags, next);
},
function (next) {
topics.getTopicTagsObjects(tid, next);
},
function (tags, next) {
topicData.tags = data.tags;
topicData.oldTitle = results.topic.title;
plugins.fireHook('action:topic.edit', topicData);
next(null, {
tid: tid,
cid: results.topic.cid,
uid: postData.uid,
title: validator.escape(String(title)),
oldTitle: results.topic.title,
slug: topicData.slug,
isMainPost: true,
renamed: title !== results.topic.title,
tags: tags
});
}
], callback);
});
}
};<|fim▁end|> | function editMainPost(data, postData, callback) {
var tid = postData.tid;
var title = data.title ? data.title.trim() : '';
|
<|file_name|>errors.py<|end_file_name|><|fim▁begin|>__all__ = [
'RedisError',
'ProtocolError',
'ReplyError',
'MaxClientsError',
'AuthError',
'PipelineError',
'MultiExecError',
'WatchVariableError',
'ChannelClosedError',
'ConnectionClosedError',
'ConnectionForcedCloseError',
'PoolClosedError',
'MasterNotFoundError',
'SlaveNotFoundError',
'ReadOnlyError',
]
class RedisError(Exception):
"""Base exception class for aioredis exceptions."""
class ProtocolError(RedisError):
"""Raised when protocol error occurs."""
class ReplyError(RedisError):
"""Raised for redis error replies (-ERR)."""
MATCH_REPLY = None
def __new__(cls, msg, *args):
for klass in cls.__subclasses__():
if msg and klass.MATCH_REPLY and msg.startswith(klass.MATCH_REPLY):
return klass(msg, *args)
return super().__new__(cls, msg, *args)
class MaxClientsError(ReplyError):
"""Raised for redis server when the maximum number of client has been
reached."""
MATCH_REPLY = "ERR max number of clients reached"
class AuthError(ReplyError):
"""Raised when authentication errors occurs."""
MATCH_REPLY = ("NOAUTH ", "ERR invalid password")
class PipelineError(RedisError):
"""Raised if command within pipeline raised error."""
def __init__(self, errors):
super().__init__('{} errors:'.format(self.__class__.__name__), errors)
class MultiExecError(PipelineError):
"""Raised if command within MULTI/EXEC block caused error."""
class WatchVariableError(MultiExecError):
"""Raised if watched variable changed (EXEC returns None)."""
class ChannelClosedError(RedisError):
"""Raised when Pub/Sub channel is unsubscribed and messages queue is empty.
"""
class ReadOnlyError(RedisError):
"""Raised from slave when read-only mode is enabled"""
class MasterNotFoundError(RedisError):
"""Raised for sentinel master not found error."""
class SlaveNotFoundError(RedisError):
"""Raised for sentinel slave not found error."""
class MasterReplyError(RedisError):
"""Raised by sentinel client for master error replies."""
class SlaveReplyError(RedisError):
"""Raised by sentinel client for slave error replies."""
class ConnectionClosedError(RedisError):
"""Raised if connection to server was closed."""
class ConnectionForcedCloseError(ConnectionClosedError):
"""Raised if connection was closed with .close() method."""
<|fim▁hole|>
class RedisClusterError(RedisError):
"""Cluster exception class for aioredis exceptions."""<|fim▁end|> |
class PoolClosedError(RedisError):
"""Raised if pool is closed.""" |
<|file_name|>sqlalchemy_utils.py<|end_file_name|><|fim▁begin|>from typing import Any, Optional
import sqlalchemy
from django.db import connection
from zerver.lib.db import TimeTrackingConnection
<|fim▁hole|># existing Django database connections.
class NonClosingPool(sqlalchemy.pool.NullPool):
def status(self) -> str:
return "NonClosingPool"
def _do_return_conn(self, conn: sqlalchemy.engine.base.Connection) -> None:
pass
def recreate(self) -> 'NonClosingPool':
return self.__class__(creator=self._creator,
recycle=self._recycle,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch)
sqlalchemy_engine: Optional[Any] = None
def get_sqlalchemy_connection() -> sqlalchemy.engine.base.Connection:
global sqlalchemy_engine
if sqlalchemy_engine is None:
def get_dj_conn() -> TimeTrackingConnection:
connection.ensure_connection()
return connection.connection
sqlalchemy_engine = sqlalchemy.create_engine('postgresql://',
creator=get_dj_conn,
poolclass=NonClosingPool,
pool_reset_on_return=False)
sa_connection = sqlalchemy_engine.connect()
sa_connection.execution_options(autocommit=False)
return sa_connection<|fim▁end|> | # This is a Pool that doesn't close connections. Therefore it can be used with |
<|file_name|>0078_auto__chg_field_alarm_last_checked.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Alarm.last_checked'
db.alter_column(u'ddsc_core_alarm', 'last_checked', self.gf('django.db.models.fields.DateTimeField')(null=True))
def backwards(self, orm):
# Changing field 'Alarm.last_checked'
db.alter_column(u'ddsc_core_alarm', 'last_checked', self.gf('django.db.models.fields.DateTimeField')())
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'ddsc_core.alarm': {
'Meta': {'object_name': 'Alarm'},
'active_status': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_cr': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'null': 'True', 'blank': 'True'}),
'first_born': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'frequency': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'logical_check': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'message_type': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'previous_alarm': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Alarm']", 'null': 'True', 'blank': 'True'}),
'single_or_group': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['contenttypes.ContentType']"}),
'template': ('django.db.models.fields.TextField', [], {'default': "u'this is a alarm message template'"}),
'urgency': ('django.db.models.fields.IntegerField', [], {'default': '2'})
},
u'ddsc_core.alarm_active': {
'Meta': {'object_name': 'Alarm_Active'},<|fim▁hole|> 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {})
},
u'ddsc_core.alarm_item': {
'Meta': {'object_name': 'Alarm_Item'},
'alarm': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Alarm']"}),
'alarm_type': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['contenttypes.ContentType']"}),
'comparision': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_born': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logical_check': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value_bool': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'value_double': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_int': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'value_type': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'ddsc_core.compartment': {
'Meta': {'ordering': "[u'description']", 'object_name': 'Compartment'},
'begin_date': ('django.db.models.fields.DateField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'numeric_code': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'ddsc_core.folder': {
'Meta': {'object_name': 'Folder'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
u'ddsc_core.idmapping': {
'Meta': {'object_name': 'IdMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'remote_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'timeseries': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Timeseries']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
u'ddsc_core.ipaddress': {
'Meta': {'object_name': 'IPAddress'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
u'ddsc_core.location': {
'Meta': {'object_name': 'Location'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 5, 14, 0, 0)'}),
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'geometry_precision': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'point_geometry': ('django.contrib.gis.db.models.fields.PointField', [], {'srid': '4258', 'null': 'True', 'blank': 'True'}),
'real_geometry': ('django.contrib.gis.db.models.fields.GeometryField', [], {'srid': '4258', 'null': 'True', 'blank': 'True'}),
'relative_location': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'show_on_map': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36', 'blank': 'True'})
},
u'ddsc_core.locationtype': {
'Meta': {'object_name': 'LocationType'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locations': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'location_types'", 'blank': 'True', 'to': u"orm['ddsc_core.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'})
},
u'ddsc_core.logicalgroup': {
'Meta': {'ordering': "[u'owner', u'name']", 'unique_together': "((u'owner', u'name'),)", 'object_name': 'LogicalGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_security.DataOwner']"}),
'timeseries': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'logical_groups'", 'blank': 'True', 'to': u"orm['ddsc_core.Timeseries']"})
},
u'ddsc_core.logicalgroupedge': {
'Meta': {'unique_together': "((u'child', u'parent'),)", 'object_name': 'LogicalGroupEdge'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'parents'", 'to': u"orm['ddsc_core.LogicalGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'childs'", 'to': u"orm['ddsc_core.LogicalGroup']"})
},
u'ddsc_core.logrecord': {
'Meta': {'object_name': 'LogRecord'},
'host': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '8', 'db_index': 'True'}),
'line': ('django.db.models.fields.SmallIntegerField', [], {}),
'message': ('django.db.models.fields.TextField', [], {}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
u'ddsc_core.manufacturer': {
'Meta': {'object_name': 'Manufacturer'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
u'ddsc_core.measuringdevice': {
'Meta': {'ordering': "[u'description']", 'object_name': 'MeasuringDevice'},
'begin_date': ('django.db.models.fields.DateField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'ddsc_core.measuringmethod': {
'Meta': {'ordering': "[u'description']", 'object_name': 'MeasuringMethod'},
'begin_date': ('django.db.models.fields.DateField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'titel': ('django.db.models.fields.CharField', [], {'max_length': '600', 'null': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'ddsc_core.parameter': {
'Meta': {'ordering': "[u'description']", 'object_name': 'Parameter'},
'begin_date': ('django.db.models.fields.DateField', [], {}),
'cas_number': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sikb_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'ddsc_core.processingmethod': {
'Meta': {'ordering': "[u'description']", 'object_name': 'ProcessingMethod'},
'begin_date': ('django.db.models.fields.DateField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'ddsc_core.referenceframe': {
'Meta': {'ordering': "[u'description']", 'object_name': 'ReferenceFrame'},
'begin_date': ('django.db.models.fields.DateField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'ddsc_core.source': {
'Meta': {'object_name': 'Source'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 5, 14, 0, 0)'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manufacturer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Manufacturer']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'source_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'timeout': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36', 'blank': 'True'})
},
u'ddsc_core.sourcegroup': {
'Meta': {'object_name': 'SourceGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'sources': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['ddsc_core.Source']", 'symmetrical': 'False'})
},
u'ddsc_core.statuscache': {
'Meta': {'object_name': 'StatusCache'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_val': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mean_val': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'min_val': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'modify_timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1900, 1, 1, 0, 0)'}),
'nr_of_measurements_doubtful': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'nr_of_measurements_reliable': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'nr_of_measurements_total': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'nr_of_measurements_unreliable': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status_date': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'std_val': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'timeseries': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Timeseries']"})
},
u'ddsc_core.timeseries': {
'Meta': {'object_name': 'Timeseries'},
'compartment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Compartment']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 5, 14, 0, 0)'}),
'data_set': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'timeseries'", 'blank': 'True', 'to': "orm['lizard_security.DataSet']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'first_value_timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_value_number': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'latest_value_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'latest_value_timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'timeseries'", 'null': 'True', 'to': u"orm['ddsc_core.Location']"}),
'measuring_device': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.MeasuringDevice']", 'null': 'True', 'blank': 'True'}),
'measuring_method': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.MeasuringMethod']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_security.DataOwner']", 'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Parameter']"}),
'processing_method': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.ProcessingMethod']", 'null': 'True', 'blank': 'True'}),
'reference_frame': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.ReferenceFrame']", 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Source']", 'null': 'True', 'blank': 'True'}),
'supplying_systems': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'through': u"orm['ddsc_core.IdMapping']", 'blank': 'True'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Unit']"}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36', 'blank': 'True'}),
'validate_diff_hard': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'validate_diff_soft': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'validate_max_hard': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'validate_max_soft': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'validate_min_hard': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'validate_min_soft': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'})
},
u'ddsc_core.timeseriesgroup': {
'Meta': {'object_name': 'TimeseriesGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'parameters': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['ddsc_core.Parameter']", 'symmetrical': 'False'}),
'sources': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['ddsc_core.Source']", 'symmetrical': 'False'})
},
u'ddsc_core.timeseriesselectionrule': {
'Meta': {'ordering': "[u'pk']", 'object_name': 'TimeseriesSelectionRule'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'criterion': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'operator': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'})
},
u'ddsc_core.unit': {
'Meta': {'ordering': "[u'description']", 'object_name': 'Unit'},
'begin_date': ('django.db.models.fields.DateField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}),
'conversion_factor': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'dimension': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'lizard_security.dataowner': {
'Meta': {'ordering': "['name']", 'object_name': 'DataOwner'},
'data_managers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'remarks': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'lizard_security.dataset': {
'Meta': {'ordering': "['owner', 'name']", 'unique_together': "(('owner', 'name'),)", 'object_name': 'DataSet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_security.DataOwner']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['ddsc_core']<|fim▁end|> | 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'alarm': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['ddsc_core.Alarm']"}),
'deactivated_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1900, 1, 1, 0, 0)'}),
'first_triggered_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1900, 1, 1, 0, 0)'}), |
<|file_name|>ReverseWords.py<|end_file_name|><|fim▁begin|>class Solution:
def reverseWords(self, s) :
print 1 / 0
tks = s.split(' ');
tks = filter(None, tks)
tks.reverse();
return ' '.join(tks).strip()
<|fim▁hole|>test = ["the sky is blue", " a b "]
sol = Solution();
for t in test :
print sol.reverseWords(t)<|fim▁end|> | |
<|file_name|>env.py<|end_file_name|><|fim▁begin|><|fim▁hole|>DVC_CHECKPOINT = "DVC_CHECKPOINT"
DVC_DAEMON = "DVC_DAEMON"
DVC_PAGER = "DVC_PAGER"
DVC_ROOT = "DVC_ROOT"
DVCLIVE_PATH = "DVCLIVE_PATH"
DVCLIVE_SUMMARY = "DVCLIVE_SUMMARY"
DVCLIVE_HTML = "DVCLIVE_HTML"
DVCLIVE_RESUME = "DVCLIVE_RESUME"
DVC_IGNORE_ISATTY = "DVC_IGNORE_ISATTY"
DVC_EXP_GIT_REMOTE = "DVC_EXP_GIT_REMOTE"
DVC_EXP_AUTO_PUSH = "DVC_EXP_AUTO_PUSH"
DVC_NO_ANALYTICS = "DVC_NO_ANALYTICS"<|fim▁end|> | |
<|file_name|>diagnostics.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
from collections import namedtuple
import poppler
import cairo
from os.path import abspath
Point = namedtuple('Point', ['x', 'y'])
Line = namedtuple('Line', ['start', 'end'])
Polygon = namedtuple('Polygon', 'points')
Rectangle = namedtuple('Rectangle', ['top_left', 'bottom_right'])
AnnotationGroup = namedtuple('AnnotationGroup', ['name', 'color', 'shapes'])
Color = namedtuple('Color', ['red', 'green', 'blue'])
__all__ = [
'render_page',
'make_annotations',
]
def draw_line(context, line):
context.move_to(line.start.x, line.start.y)
context.line_to(line.end.x, line.end.y)
context.stroke()
def draw_polygon(context, polygon):
if len(polygon.points) == 0:
return
first_point = polygon.points[0]
context.move_to(first_point.x, first_point.y)
for line in polygon.points[1:]:
context.line_to(line.x, line.y)
context.stroke()
def draw_rectangle(context, rectangle):
width = abs(rectangle.bottom_right.x - rectangle.top_left.x)
height = abs(rectangle.bottom_right.y - rectangle.top_left.y)
context.rectangle(rectangle.top_left.x,
rectangle.top_left.y,
width,
height)
context.stroke()
RENDERERS = {}
RENDERERS[Line] = draw_line
RENDERERS[Rectangle] = draw_rectangle
RENDERERS[Polygon] = draw_polygon
class CairoPdfPageRenderer(object):
def __init__(self, pdf_page, svg_filename, png_filename):
self._svg_filename = abspath(svg_filename)
self._png_filename = abspath(png_filename) if png_filename else None
self._context, self._surface = self._get_context(
svg_filename, *pdf_page.get_size())
white = poppler.Color()
white.red = white.green = white.blue = 65535
black = poppler.Color()
black.red = black.green = black.blue = 0
# red = poppler.Color()
# red.red = red.green = red.blue = 0
# red.red = 65535
width = pdf_page.get_size()[0]
# We render everything 3 times, moving
# one page-width to the right each time.
self._offset_colors = [
(0, white, white, True),
(width, black, white, True),
(2 * width, black, black, False)
]
for offset, fg_color, bg_color, render_graphics in self._offset_colors:
# Render into context, with a different offset
# each time.
self._context.save()
self._context.translate(offset, 0)
sel = poppler.Rectangle()
sel.x1, sel.y1 = (0, 0)
sel.x2, sel.y2 = pdf_page.get_size()
if render_graphics:
pdf_page.render(self._context)
pdf_page.render_selection(
self._context, sel, sel, poppler.SELECTION_GLYPH,
fg_color, bg_color)
self._context.restore()
@staticmethod
def _get_context(filename, width, height):
SCALE = 1
# left, middle, right
N_RENDERINGS = 3
surface = cairo.SVGSurface(
filename, N_RENDERINGS * width * SCALE, height * SCALE)
# srf = cairo.ImageSurface(
# cairo.FORMAT_RGB24, int(w*SCALE), int(h*SCALE))
context = cairo.Context(surface)
context.scale(SCALE, SCALE)
# Set background color to white
context.set_source_rgb(1, 1, 1)
context.paint()
return context, surface
def draw(self, shape, color):
self._context.save()
self._context.set_line_width(1)
self._context.set_source_rgba(color.red,
color.green,
color.blue,
0.5)
self._context.translate(self._offset_colors[1][0], 0)
RENDERERS[type(shape)](self._context, shape)
self._context.restore()
def flush(self):
if self._png_filename is not None:
self._surface.write_to_png(self._png_filename)
# NOTE! The flush is rather expensive, since it writes out the svg
# data. The profile will show a large amount of time spent inside it.
# Removing it won't help the execution time at all, it will just move
# it somewhere that the profiler can't see it
# (at garbage collection time)
self._surface.flush()
self._surface.finish()
def render_page(pdf_filename, page_number, annotations, svg_file=None,
png_file=None):
"""
Render a single page of a pdf with graphical annotations added.
"""
page = extract_pdf_page(pdf_filename, page_number)
renderer = CairoPdfPageRenderer(page, svg_file, png_file)
for annotation in annotations:
assert isinstance(annotation, AnnotationGroup), (
"annotations: {0}, annotation: {1}".format(
annotations, annotation))
for shape in annotation.shapes:
renderer.draw(shape, annotation.color)
renderer.flush()
def extract_pdf_page(filename, page_number):
file_uri = "file://{0}".format(abspath(filename))
doc = poppler.document_new_from_file(file_uri, "")
page = doc.get_page(page_number)
return page
def make_annotations(table_container):
"""
Take the output of the table-finding algorithm (TableFinder) and create
AnnotationGroups. These can be drawn on top of the original PDF page to
visualise how the algorithm arrived at its output.
"""
annotations = []
annotations.append(
AnnotationGroup(
name='all_glyphs',
color=Color(0, 1, 0),
shapes=convert_rectangles(table_container.all_glyphs)))
annotations.append(
AnnotationGroup(
name='all_words',
color=Color(0, 0, 1),
shapes=convert_rectangles(table_container.all_words)))
annotations.append(
AnnotationGroup(
name='text_barycenters',
color=Color(0, 0, 1),
shapes=convert_barycenters(table_container.all_glyphs)))
annotations.append(
AnnotationGroup(
name='hat_graph_vertical',
color=Color(0, 1, 0),
shapes=make_hat_graph(
table_container._y_point_values,
table_container._center_lines,
direction="vertical")))
for table in table_container:
annotations.append(
AnnotationGroup(
name='row_edges',
color=Color(1, 0, 0),
shapes=convert_horizontal_lines(
table.row_edges, table.bounding_box)))
annotations.append(
AnnotationGroup(
name='column_edges',
color=Color(1, 0, 0),
shapes=convert_vertical_lines(
table.column_edges, table.bounding_box)))
annotations.append(
AnnotationGroup(
name='glyph_histogram_horizontal',
color=Color(1, 0, 0),
shapes=make_glyph_histogram(
table._x_glyph_histogram, table.bounding_box,
direction="horizontal")))
annotations.append(
AnnotationGroup(
name='glyph_histogram_vertical',
color=Color(1, 0, 0),
shapes=make_glyph_histogram(
table._y_glyph_histogram, table.bounding_box,
direction="vertical")))
annotations.append(
AnnotationGroup(
name='horizontal_glyph_above_threshold',
color=Color(0, 0, 0),
shapes=make_thresholds(
table._x_threshold_segs, table.bounding_box,
direction="horizontal")))
annotations.append(
AnnotationGroup(
name='vertical_glyph_above_threshold',
color=Color(0, 0, 0),
shapes=make_thresholds(
table._y_threshold_segs, table.bounding_box,
direction="vertical")))
# Draw bounding boxes last so that they appear on top
annotations.append(
AnnotationGroup(
name='table_bounding_boxes',
color=Color(0, 0, 1),
shapes=convert_rectangles(table_container.bounding_boxes)))
return annotations
def make_thresholds(segments, box, direction):
lines = []
for segment in segments:
if direction == "horizontal":
lines.append(Line(Point(segment.start, box.bottom + 10),
Point(segment.end, box.bottom + 10)))
else:
lines.append(Line(Point(10, segment.start),
Point(10, segment.end)))
return lines
def make_hat_graph(hats, center_lines, direction):
"""
Draw estimated text barycenter
"""
max_value = max(v for _, v in hats)
DISPLAY_WIDTH = 25
points = []
polygon = Polygon(points)
def point(x, y):
points.append(Point(x, y))
for position, value in hats:
point(((value / max_value - 1) * DISPLAY_WIDTH), position)
lines = []
for position in center_lines:
lines.append(Line(Point(-DISPLAY_WIDTH, position),
Point(0, position)))
return [polygon] + lines
def make_glyph_histogram(histogram, box, direction):
# if direction == "vertical":
# return []
bin_edges, bin_values = histogram
if not bin_edges:
# There are no glyphs, and nothing to render!
return []
points = []
polygon = Polygon(points)
def point(x, y):
points.append(Point(x, y))
# def line(*args):
# lines.append(Line(*args))
previous_value = 0 if direction == "horizontal" else box.bottom
x = zip(bin_edges, bin_values)
for edge, value in x:
if direction == "horizontal":
value *= 0.75
value = box.bottom - value
point(edge, previous_value)
point(edge, value)
else:
value *= 0.25
value += 7 # shift pixels to the right
point(previous_value, edge)
point(value, edge)
previous_value = value
# Final point is at 0
if direction == "horizontal":
point(edge, 0)
else:
point(box.bottom, edge)
# Draw edge density plot (not terribly interesting, should probably be
# deleted)
# lines = []
# if direction == "horizontal":
# for edge in bin_edges:
# lines.append(Line(Point(edge, box.bottom),
# Point(edge, box.bottom + 5)))
# else:
# for edge in bin_edges:
# lines.append(Line(Point(0, edge), Point(5, edge)))
return [polygon] # + lines
def convert_rectangles(boxes):
return [Rectangle(Point(b.left, b.top), Point(b.right, b.bottom))
for b in boxes]
def convert_barycenters(boxes):
return [Line(Point(b.left, b.barycenter.midpoint),
Point(b.right, b.barycenter.midpoint))
for b in boxes if b.barycenter is not None]
def convert_horizontal_lines(y_edges, bbox):
return [Line(Point(bbox.left, y), Point(bbox.right, y))<|fim▁hole|>def convert_vertical_lines(x_edges, bbox):
return [Line(Point(x, bbox.top), Point(x, bbox.bottom))
for x in x_edges]
if __name__ == '__main__':
annotations = [
AnnotationGroup(
name='',
color=Color(1, 0, 0),
shapes=[Rectangle(Point(100, 100), Point(200, 200))])
]
render_page(sys.argv[1], 0, annotations)<|fim▁end|> | for y in y_edges]
|
<|file_name|>MyComponentWiredTest.java<|end_file_name|><|fim▁begin|>// package it.at.oiml.bitbucket.lfs;<|fim▁hole|>// import com.atlassian.plugins.osgi.test.AtlassianPluginsTestRunner;
// import at.oiml.bitbucket.lfs.MyPluginComponent;
// import com.atlassian.sal.api.ApplicationProperties;
//
// import static org.junit.Assert.assertEquals;
//
// @RunWith(AtlassianPluginsTestRunner.class)
// public class MyComponentWiredTest
// {
// private final ApplicationProperties applicationProperties;
// private final MyPluginComponent myPluginComponent;
//
// public MyComponentWiredTest(ApplicationProperties applicationProperties,MyPluginComponent myPluginComponent)
// {
// this.applicationProperties = applicationProperties;
// this.myPluginComponent = myPluginComponent;
// }
//
// @Test
// public void testMyName()
// {
// assertEquals("names do not match!", "myComponent:" + applicationProperties.getDisplayName(),myPluginComponent.getName());
// }
// }<|fim▁end|> | //
// import org.junit.Test;
// import org.junit.runner.RunWith; |
<|file_name|>cad2xls.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
## @copyright
# Software License Agreement (BSD License)
#
# Copyright (c) 2017, Jorge De La Cruz, Carmen Castano.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__author__ = 'Jorge De La Cruz, Carmen Castano'
__copyright__ = 'Copyright (c) 2017 Jorge De La Cruz, Carmen Castano'
__license__ = 'BSD'
__maintainer__ = 'Jorge De La Cruz'
__email__ = '[email protected]'
import sys
## Path to FreeCAD library
# change this by your FreeCAD library path
sys.path.append('/usr/lib/freecad/lib')
import FreeCAD as App
import Import
from datetime import datetime
class GetParameters:
def __init__(self):
self.filePath = '/home/jdelacruz/Downloads/KonzeptB_lang090715.stp'
def loadCAD(self):<|fim▁hole|> print('Starting to load the CAD file, please be patient!...')
Import.open(self.filePath)
self.handler = App.ActiveDocument
self.parts = self.handler.Objects
print('CAD model loaded!')
def writeTxt(self):
f = open('data.txt','a')
print >>f, 'Name \t Label'
self.i = 0
self.size = len(self.parts)
self.names = range(self.size)
self.labels = range(self.size)
for self.part in self.parts:
self.names[self.i] = self.part.Name
self.labels[self.i] = self.part.Label
print >>f, self.part.Name+"\t"+self.part.Label
self.i += 1
f.close()
print('The txt file has been created successfully!')
if __name__ == '__main__':
data = GetParameters()
data.loadCAD()
data.writeTxt()<|fim▁end|> | |
<|file_name|>check-kibana-settings.service.ts<|end_file_name|><|fim▁begin|>/*
* Wazuh app - Check Kibana settings service
*
* Copyright (C) 2015-2021 Wazuh, Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Find more information about this on the LICENSE file.
*
*/
import { CheckLogger } from '../types/check_logger';
import _ from 'lodash';
import { getUiSettings } from '../../../kibana-services';
export const checkKibanaSettings = (kibanaSettingName: string, defaultAppValue: any, callback?: (checkLogger: CheckLogger, options: {defaultAppValue: any}) => void) => (appConfig: any) => async (checkLogger: CheckLogger) => {
checkLogger.info('Getting settings...');
const valueKibanaSetting = getUiSettings().get(kibanaSettingName);
const settingsAreDifferent = !_.isEqual(
typeof defaultAppValue === 'string' ? stringifySetting(valueKibanaSetting) : valueKibanaSetting,
defaultAppValue
);
checkLogger.info(`Check Kibana setting [${kibanaSettingName}]: ${stringifySetting(valueKibanaSetting)}`);
checkLogger.info(`App setting [${kibanaSettingName}]: ${stringifySetting(defaultAppValue)}`);
checkLogger.info(`Settings mismatch [${kibanaSettingName}]: ${settingsAreDifferent ? 'yes' : 'no'}`);
if ( !valueKibanaSetting || settingsAreDifferent ){
checkLogger.info(`Updating [${kibanaSettingName}] setting...`);<|fim▁hole|>}
async function updateSetting(kibanaSettingName, defaultAppValue, retries = 3) {
return await getUiSettings()
.set(kibanaSettingName, null)
.catch(async (error) => {
if (retries > 0) {
return await updateSetting(kibanaSettingName, defaultAppValue, --retries);
}
throw error;
});
}
function stringifySetting(setting: any){
try{
return JSON.stringify(setting);
}catch(error){
return setting;
};
};<|fim▁end|> | await updateSetting(kibanaSettingName, defaultAppValue);
checkLogger.action(`Updated [${kibanaSettingName}] setting to: ${stringifySetting(defaultAppValue)}`);
callback && callback(checkLogger,{ defaultAppValue });
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | DEPS = [
'recipe_engine/platform',
] |
<|file_name|>test_context.py<|end_file_name|><|fim▁begin|>from testkit.context import *
def test_context_user():
from contextlib import contextmanager
test_dict = dict(value='before')
@contextmanager
def test_context(test_dict):
test_dict['value'] = 'during'
yield 'test'
test_dict['value'] = 'after'
ctx = ContextUser(test_context(test_dict))
assert test_dict['value'] == 'before'
ctx.enter()
assert test_dict['value'] == 'during'
ctx.exit()
assert test_dict['value'] == 'after'
class my_context(ContextDecorator):
def before(self):
self.hello = 'hello'<|fim▁hole|> self.done = False
def after(self):
self.done = True
def test_context_decorator_as_decorator():
as_decorator = my_context()
@as_decorator
def hello(context):
assert context.hello == 'hello'
hello()
assert as_decorator.done == True
def test_context_decorator_as_decorator_exception():
as_decorator = my_context()
fake_message = 'A fake error!'
@as_decorator
def hello(context):
raise Exception(fake_message)
try:
hello()
except Exception, e:
assert e.message == fake_message
assert as_decorator.done == True
def test_context_decorator_as_context():
as_context = my_context()
with as_context as context:
assert context.hello == 'hello'
assert context.done == False
assert context.done == True
def test_context_decorator_as_context_exception():
as_context = my_context()
fake_message = 'error!'
try:
with as_context as context:
raise Exception(fake_message)
except Exception, e:
assert e.message == fake_message
assert context.done == True
class my_other_context(ContextDecorator):
def before(self):
self.hello = 'hello'
self.done = False
return self.hello
def after(self):
self.done = True
def test_context_decorator_before_returns_custom_context():
as_context = my_other_context()
with as_context as hello:
assert hello == 'hello'<|fim▁end|> | |
<|file_name|>rao.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
# -*- mode: python; coding: utf-8; -*-
"""Module for generating lexicon using Rao and Ravichandran's method (2009).
"""
##################################################################
# Imports
from __future__ import unicode_literals, print_function
from blair_goldensohn import build_mtx, seeds2seedpos
from common import POSITIVE, NEGATIVE, NEUTRAL
from graph import Graph
from itertools import chain
from scipy import sparse
import numpy as np
import sys
##################################################################
# Constants
POS_IDX = 0
NEG_IDX = 1
NEUT_IDX = 2
POL_IDX = 1
SCORE_IDX = 2
MAX_I = 300
IDX2CLS = {POS_IDX: POSITIVE, NEG_IDX: NEGATIVE, NEUT_IDX: NEUTRAL}
##################################################################
# Methods
def _eq_sparse(a_M1, a_M2):
"""Compare two sparse matrices.
@param a_M1 - first sparse matrix to compare
@param a_M2 - second sparse matrix to compare
@return True if both matrices are equal, non-False otherwise
"""
if type(a_M1) != type(a_M2):
return False
if not np.allclose(a_M1.get_shape(), a_M1.get_shape()):
return False
X, Y = a_M1.nonzero()
IDX1 = set([(x, y) for x, y in zip(X, Y)])
X, Y = a_M2.nonzero()
IDX2 = [(x, y) for x, y in zip(X, Y) if (x, y) not in IDX1]
IDX = list(IDX1)
IDX.extend(IDX2)
IDX.sort()
for x_i, y_i in IDX:
# print("a_M1[{:d}, {:d}] = {:f}".format(x_i, y_i, a_M1[x_i, y_i]))
# print("a_M2[{:d}, {:d}] = {:f}".format(x_i, y_i, a_M2[x_i, y_i]))
# print("is_close", np.isclose(a_M1[x_i, y_i], a_M2[x_i, y_i]))
if not np.isclose(a_M1[x_i, y_i], a_M2[x_i, y_i]):
return False
return True
def _mtx2tlist(a_Y, a_term2idx):
"""Convert matrix to a list of polar terms.
@param a_Y - matrix of polar terms
@param a_terms2idx - mapping from terms to their matrix indices
@return list of 3-tuples (word, polarity, score)
"""
ret = []
iscore = 0.
irow = None
lex2lidx = {}
ipol = lidx = 0
for (iword, ipos), idx in a_term2idx.iteritems():
# obtain matrix row for that term
irow = a_Y.getrow(idx).toarray()
# print("irow =", repr(irow))
ipol = irow.argmax(axis=1)[0]
iscore = irow[0, ipol]
# print("ipol =", repr(ipol))
# print("iscore =", repr(iscore))
if ipol != NEUT_IDX:
ipol = IDX2CLS[ipol]
if iword in lex2lidx:
lidx = lex2lidx[iword]
if abs(iscore) > abs(ret[lidx][SCORE_IDX]):
ret[lidx][POL_IDX] = ipol
ret[lidx][SCORE_IDX] = iscore
else:<|fim▁hole|>
def _sign_normalize(a_Y, a_terms2idx, a_pos, a_neg, a_neut,
a_set_dflt=None):
"""Fix seed values and row-normalize the class matrix.
@param a_Y - class matrix to be changed
@param a_terms2idx - mapping from terms to their matrix indices
@param a_pos - set of lexemes with positive polarity
@param a_neg - set of lexemes with negative polarity
@param a_neut - set of lexemes with neutral polarity
@param a_set_dflt - function to set the default value of an unkown term
@return void
@note modifies the input matrix in place
"""
seed_found = False
for iterm, i in a_terms2idx.iteritems():
if iterm in a_pos:
seed_found = True
a_Y[i, :] = 0.
a_Y[i, POS_IDX] = 1.
elif iterm in a_neg:
seed_found = True
a_Y[i, :] = 0.
a_Y[i, NEG_IDX] = 1.
elif iterm in a_neut:
seed_found = True
a_Y[i, :] = 0.
a_Y[i, NEUT_IDX] = 1.
elif a_set_dflt is not None:
a_set_dflt(a_Y, i)
assert seed_found, "No seed term found in matrix."
# normalize class scores
Z = a_Y.sum(1)
x, y = a_Y.nonzero()
for i, j in zip(x, y):
# print("a_Y[{:d}, {:d}] =".format(i, j), repr(a_Y[i, j]))
# print("Z[{:d}, 0] =".format(i), repr(Z[i, 0]))
a_Y[i, j] /= float(Z[i, 0]) or 1.
# print("*a_Y[{:d}, {:d}] =".format(i, j), repr(a_Y[i, j]))
def prune_normalize(a_M):
"""Make each of the adjacency matrix sum up to one.
Args:
a_M (scipy.sparse.csr): matrix to be normalized
Returns:
void:
Note:
modifies the input matrix in place
"""
# remove negative transitions
nonzero_xy = a_M.nonzero()
for i, j in zip(*nonzero_xy):
if a_M[i, j] < 0.:
a_M[i, j] = 0.
a_M.prune()
# normalize all outgoing transitions
Z = a_M.sum(0)
nonzero_xy = a_M.nonzero()
for i, j in zip(*nonzero_xy):
a_M[i, j] /= float(Z[0, j]) or 1.
def rao_min_cut(a_germanet, a_pos, a_neg, a_neut, a_seed_pos,
a_ext_syn_rels):
"""Extend sentiment lexicons using the min-cut method of Rao (2009).
@param a_germanet - GermaNet instance
@param a_pos - set of lexemes with positive polarity
@param a_neg - set of lexemes with negative polarity
@param a_neut - set of lexemes with neutral polarity
@param a_seed_pos - part-of-speech class of seed synsets ("none" for no
restriction)
@param a_ext_syn_rels - use extended set of synonymous relations
@return list of polar terms, their polarities, and scores
"""
sgraph = Graph(a_germanet, a_ext_syn_rels)
# partition the graph into subjective and objective terms
mcs, cut_edges, _, _ = sgraph.min_cut(a_pos | a_neg, a_neut, a_seed_pos)
print("min_cut_score (subj. vs. obj.) = {:d}".format(mcs),
file=sys.stderr)
# remove edges belonging to the min cut (i.e., cut the graph)
for isrc, itrg in cut_edges:
if isrc in sgraph.nodes:
sgraph.nodes[isrc].pop(itrg, None)
# separate the graph into positive and negative terms
mcs, _, pos, neg = sgraph.min_cut(a_pos, a_neg, a_seed_pos)
print("min_cut_score (pos. vs. neg.) = {:d}".format(mcs),
file=sys.stderr)
ret = [(inode[0], POSITIVE, 1.) for inode in pos]
ret.extend((inode[0], NEGATIVE, -1.) for inode in neg)
return ret
def rao_lbl_prop(a_germanet, a_pos, a_neg, a_neut, a_seed_pos,
a_ext_syn_rels):
"""Extend sentiment lexicons using the lbl-prop method of Rao (2009).
@param a_germanet - GermaNet instance
@param a_pos - set of lexemes with positive polarity
@param a_neg - set of lexemes with negative polarity
@param a_neut - set of lexemes with neutral polarity
@param a_seed_pos - part-of-speech class of seed synsets ("none" for no
restriction)
@param a_ext_syn_rels - use extended set of synonymous relations
@return list of polar terms, their polarities, and scores
"""
if a_seed_pos is None:
a_seed_pos = ["adj", "nomen", "verben"]
else:
a_seed_pos = [a_seed_pos]
a_pos = seeds2seedpos(a_pos, a_seed_pos)
a_neg = seeds2seedpos(a_neg, a_seed_pos)
a_neut = seeds2seedpos(a_neut, a_seed_pos)
# obtain and row-normalize the adjacency matrix
terms = set((ilex, ipos)
for isynid, ipos in a_germanet.synid2pos.iteritems()
for ilexid in a_germanet.synid2lexids[isynid]
for ilex in a_germanet.lexid2lex[ilexid]
)
terms2idx = {iterm: i for i, iterm in enumerate(terms)}
M = build_mtx(a_germanet, terms2idx, set(),
a_ext_syn_rels, len(terms))
prune_normalize(M)
# no need to transpose M[i, j] is the link going from node j to the node i;
# and, in Y, the Y[j, k] cell is the polarity score of the class k for the
# term j
# M = M.transpose()
# check that the matrix is column normalized
assert np.all(i == 0 or np.isclose([i], [1.])
for i in M.sum(0)[0, :])
# initialize label matrix
Y = sparse.lil_matrix((len(terms), len(IDX2CLS)), dtype=np.float32)
def _set_neut_one(X, i):
X[i, NEUT_IDX] = 1.
_sign_normalize(Y, terms2idx, a_pos, a_neg, a_neut,
_set_neut_one)
# Y = Y.tocsr()
# output first M row and Y column
# for i in xrange(len(terms)):
# if M[0, i] != 0:
# print("M[0, {:d}] =".format(i), M[0, i], file=sys.stderr)
# if Y[i, 0] != 0:
# print("Y[i, 0] =", Y[i, 0], file=sys.stderr)
# B = M.dot(Y)
# print("B[0, 0] =", B[0, 0], file=sys.stderr)
# perform multiplication until convergence
i = 0
prev_Y = None
while not _eq_sparse(prev_Y, Y) and i < MAX_I:
prev_Y = Y.copy()
Y = Y.tocsc()
Y = M.dot(Y)
Y = Y.tolil()
_sign_normalize(Y, terms2idx, a_pos, a_neg, a_neut)
i += 1
ret = _mtx2tlist(Y, terms2idx)
ret.sort(key=lambda el: abs(el[-1]), reverse=True)
return ret<|fim▁end|> | lex2lidx[iword] = len(ret)
ret.append([iword, ipol, iscore])
return ret |
<|file_name|>PathsVerifier.java<|end_file_name|><|fim▁begin|>// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.diff.impl.patch.formove;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diff.impl.patch.FilePatch;
import com.intellij.openapi.diff.impl.patch.TextFilePatch;
import com.intellij.openapi.diff.impl.patch.apply.ApplyFilePatchBase;
import com.intellij.openapi.diff.impl.patch.apply.ApplyFilePatchFactory;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.fileTypes.ex.FileTypeChooser;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.patch.RelativePathCalculator;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class PathsVerifier {
// in
private final Project myProject;
private final VirtualFile myBaseDirectory;
private final List<FilePatch> myPatches;
// temp
private final Map<VirtualFile, MovedFileData> myMovedFiles;
private final List<FilePath> myBeforePaths;
private final List<VirtualFile> myCreatedDirectories;
// out
private final List<PatchAndFile> myTextPatches;
private final List<PatchAndFile> myBinaryPatches;
@NotNull private final List<VirtualFile> myWritableFiles;
private final ProjectLevelVcsManager myVcsManager;
private final List<FilePatch> mySkipped;
private DelayedPrecheckContext myDelayedPrecheckContext;
private final List<FilePath> myAddedPaths;
private final List<FilePath> myDeletedPaths;
private boolean myIgnoreContentRootsCheck;
public PathsVerifier(@NotNull Project project,
@NotNull VirtualFile baseDirectory,
@NotNull List<FilePatch> patches) {
myProject = project;
myBaseDirectory = baseDirectory;
myPatches = patches;
myMovedFiles = new HashMap<>();
myBeforePaths = new ArrayList<>();
myCreatedDirectories = new ArrayList<>();
myTextPatches = new ArrayList<>();
myBinaryPatches = new ArrayList<>();
myWritableFiles = new ArrayList<>();
myVcsManager = ProjectLevelVcsManager.getInstance(myProject);
mySkipped = new ArrayList<>();
myAddedPaths = new ArrayList<>();
myDeletedPaths = new ArrayList<>();
}
// those to be moved to CL: target + created dirs
public List<FilePath> getDirectlyAffected() {
final List<FilePath> affected = new ArrayList<>();
addAllFilePath(myCreatedDirectories, affected);
addAllFilePath(myWritableFiles, affected);
affected.addAll(myBeforePaths);
return affected;
}
// old parents of moved files
public List<VirtualFile> getAllAffected() {
final List<VirtualFile> affected = new ArrayList<>();
affected.addAll(myCreatedDirectories);
affected.addAll(myWritableFiles);
// after files' parent
for (VirtualFile file : myMovedFiles.keySet()) {
final VirtualFile parent = file.getParent();
if (parent != null) {
affected.add(parent);
}
}
// before..
for (FilePath path : myBeforePaths) {
final FilePath parent = path.getParentPath();
if (parent != null) {
affected.add(parent.getVirtualFile());
}
}
return affected;
}
private static void addAllFilePath(final Collection<VirtualFile> files, final Collection<FilePath> paths) {
for (VirtualFile file : files) {
paths.add(VcsUtil.getFilePath(file));
}
}
@CalledInAwt
public List<FilePatch> nonWriteActionPreCheck() {
List<FilePatch> failedToApply = ContainerUtil.newArrayList();
myDelayedPrecheckContext = new DelayedPrecheckContext(myProject);
for (FilePatch patch : myPatches) {
final CheckPath checker = getChecker(patch);
if (!checker.canBeApplied(myDelayedPrecheckContext)) {
revert(checker.getErrorMessage());
failedToApply.add(patch);
}
}
final Collection<FilePatch> skipped = myDelayedPrecheckContext.doDelayed();
mySkipped.addAll(skipped);
myPatches.removeAll(skipped);
myPatches.removeAll(failedToApply);
return failedToApply;
}
public List<FilePatch> getSkipped() {
return mySkipped;
}
public List<FilePatch> execute() {
List<FilePatch> failedPatches = ContainerUtil.newArrayList();
try {
final List<CheckPath> checkers = new ArrayList<>(myPatches.size());
for (FilePatch patch : myPatches) {
final CheckPath checker = getChecker(patch);
checkers.add(checker);
}
for (CheckPath checker : checkers) {
if (!checker.check()) {
failedPatches.add(checker.getPatch());
revert(checker.getErrorMessage());
}
}
}
catch (IOException e) {
revert(e.getMessage());
}
myPatches.removeAll(failedPatches);
return failedPatches;
}
private CheckPath getChecker(final FilePatch patch) {
final String beforeFileName = patch.getBeforeName();
final String afterFileName = patch.getAfterName();
if (beforeFileName == null || patch.isNewFile()) {
return new CheckAdded(patch);
}
else if (afterFileName == null || patch.isDeletedFile()) {<|fim▁hole|> }
else if (!beforeFileName.equals(afterFileName)) {
return new CheckMoved(patch);
}
else {
return new CheckModified(patch);
}
}
public Collection<FilePath> getToBeAdded() {
return myAddedPaths;
}
public Collection<FilePath> getToBeDeleted() {
return myDeletedPaths;
}
@NotNull
public Collection<FilePatch> filterBadFileTypePatches() {
List<PatchAndFile> failedTextPatches =
ContainerUtil.findAll(myTextPatches, textPatch -> !isFileTypeOk(textPatch.getFile()));
myTextPatches.removeAll(failedTextPatches);
return ContainerUtil.map(failedTextPatches, patchInfo -> patchInfo.getApplyPatch().getPatch());
}
private boolean isFileTypeOk(@NotNull VirtualFile file) {
if (file.isDirectory()) {
PatchApplier
.showError(myProject, "Cannot apply content for " + file.getPresentableName() + " file from patch because it is directory.");
return false;
}
FileType fileType = file.getFileType();
if (fileType == FileTypes.UNKNOWN) {
fileType = FileTypeChooser.associateFileType(file.getName());
if (fileType == null) {
PatchApplier
.showError(myProject, "Cannot apply content for " + file.getPresentableName() + " file from patch because its type not defined.");
return false;
}
}
if (fileType.isBinary()) {
PatchApplier.showError(myProject, "Cannot apply file " + file.getPresentableName() + " from patch because it is binary.");
return false;
}
return true;
}
private class CheckModified extends CheckDeleted {
private CheckModified(final FilePatch path) {
super(path);
}
}
private class CheckDeleted extends CheckPath {
protected CheckDeleted(final FilePatch path) {
super(path);
}
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, DelayedPrecheckContext context) {
if (beforeFile == null) {
context.addSkip(getMappedFilePath(myBeforeName), myPatch);
}
return true;
}
@Override
protected boolean check() {
final VirtualFile beforeFile = getMappedFile(myBeforeName);
if (! checkExistsAndValid(beforeFile, myBeforeName)) {
return false;
}
addPatch(myPatch, beforeFile);
FilePath filePath = VcsUtil.getFilePath(beforeFile.getParent(), beforeFile.getName(), beforeFile.isDirectory());
if (myPatch.isDeletedFile() || myPatch.getAfterName() == null) {
myDeletedPaths.add(filePath);
}
myBeforePaths.add(filePath);
return true;
}
}
private class CheckAdded extends CheckPath {
private CheckAdded(final FilePatch path) {
super(path);
}
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, DelayedPrecheckContext context) {
if (afterFile != null) {
context.addOverrideExisting(myPatch, VcsUtil.getFilePath(afterFile));
}
return true;
}
@Override
public boolean check() throws IOException {
final String[] pieces = RelativePathCalculator.split(myAfterName);
final VirtualFile parent = makeSureParentPathExists(pieces);
if (parent == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
String name = pieces[pieces.length - 1];
File afterFile = new File(parent.getPath(), name);
//if user already accepted overwriting, we shouldn't have created a new one
final VirtualFile file = myDelayedPrecheckContext.getOverridenPaths().contains(VcsUtil.getFilePath(afterFile))
? parent.findChild(name)
: createFile(parent, name);
if (file == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
myAddedPaths.add(VcsUtil.getFilePath(file));
if (! checkExistsAndValid(file, myAfterName)) {
return false;
}
addPatch(myPatch, file);
return true;
}
}
private class CheckMoved extends CheckPath {
private CheckMoved(final FilePatch path) {
super(path);
}
// before exists; after does not exist
@Override
protected boolean precheck(final VirtualFile beforeFile, final VirtualFile afterFile, final DelayedPrecheckContext context) {
if (beforeFile == null) {
setErrorMessage(fileNotFoundMessage(myBeforeName));
} else if (afterFile != null) {
setErrorMessage(fileAlreadyExists(afterFile.getPath()));
}
return beforeFile != null && afterFile == null;
}
@Override
public boolean check() throws IOException {
final String[] pieces = RelativePathCalculator.split(myAfterName);
final VirtualFile afterFileParent = makeSureParentPathExists(pieces);
if (afterFileParent == null) {
setErrorMessage(fileNotFoundMessage(myAfterName));
return false;
}
final VirtualFile beforeFile = getMappedFile(myBeforeName);
if (! checkExistsAndValid(beforeFile, myBeforeName)) {
return false;
}
assert beforeFile != null; // if beforeFile is null then checkExist returned false;
myMovedFiles.put(beforeFile, new MovedFileData(afterFileParent, beforeFile, myPatch.getAfterFileName()));
addPatch(myPatch, beforeFile);
return true;
}
}
private abstract class CheckPath {
protected final String myBeforeName;
protected final String myAfterName;
protected final FilePatch myPatch;
private String myErrorMessage;
CheckPath(final FilePatch path) {
myPatch = path;
myBeforeName = path.getBeforeName();
myAfterName = path.getAfterName();
}
public String getErrorMessage() {
return myErrorMessage;
}
public void setErrorMessage(final String errorMessage) {
myErrorMessage = errorMessage;
}
public boolean canBeApplied(DelayedPrecheckContext context) {
final VirtualFile beforeFile = getMappedFile(myBeforeName);
final VirtualFile afterFile = getMappedFile(myAfterName);
return precheck(beforeFile, afterFile, context);
}
protected abstract boolean precheck(final VirtualFile beforeFile,
final VirtualFile afterFile,
DelayedPrecheckContext context);
protected abstract boolean check() throws IOException;
protected boolean checkExistsAndValid(final VirtualFile file, final String name) {
if (file == null) {
setErrorMessage(fileNotFoundMessage(name));
return false;
}
return checkModificationValid(file, name);
}
protected boolean checkModificationValid(final VirtualFile file, final String name) {
if (ApplicationManager.getApplication().isUnitTestMode() && myIgnoreContentRootsCheck) return true;
// security check to avoid overwriting system files with a patch
if (file == null || !inContent(file) || myVcsManager.getVcsRootFor(file) == null) {
setErrorMessage("File to patch found outside content root: " + name);
return false;
}
return true;
}
@Nullable
protected VirtualFile getMappedFile(String path) {
return PathMerger.getFile(myBaseDirectory, path);
}
protected FilePath getMappedFilePath(String path) {
return PathMerger.getFile(VcsUtil.getFilePath(myBaseDirectory), path);
}
private boolean inContent(VirtualFile file) {
return myVcsManager.isFileInContent(file);
}
public FilePatch getPatch() {
return myPatch;
}
}
private void addPatch(final FilePatch patch, final VirtualFile file) {
if (patch instanceof TextFilePatch) {
myTextPatches.add(new PatchAndFile(file, ApplyFilePatchFactory.create((TextFilePatch)patch)));
}
else {
myBinaryPatches.add(new PatchAndFile(file, ApplyFilePatchFactory.createGeneral(patch)));
}
myWritableFiles.add(file);
}
private static String fileNotFoundMessage(final String path) {
return VcsBundle.message("cannot.find.file.to.patch", path);
}
private static String fileAlreadyExists(final String path) {
return VcsBundle.message("cannot.apply.file.already.exists", path);
}
private void revert(final String errorMessage) {
PatchApplier.showError(myProject, errorMessage);
// move back
/*for (MovedFileData movedFile : myMovedFiles) {
try {
final VirtualFile current = movedFile.getCurrent();
final VirtualFile newParent = current.getParent();
final VirtualFile file;
if (! Comparing.equal(newParent, movedFile.getOldParent())) {
file = moveFile(current, movedFile.getOldParent());
} else {
file = current;
}
if (! Comparing.equal(current.getName(), movedFile.getOldName())) {
file.rename(PatchApplier.class, movedFile.getOldName());
}
}
catch (IOException e) {
// ignore: revert as much as possible
}
}
// go back
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
for (int i = myCreatedDirectories.size() - 1; i >= 0; -- i) {
final VirtualFile file = myCreatedDirectories.get(i);
try {
file.delete(PatchApplier.class);
}
catch (IOException e) {
// ignore
}
}
}
});
myBinaryPatches.clear();
myTextPatches.clear();
myWritableFiles.clear();*/
}
private static VirtualFile createFile(final VirtualFile parent, final String name) throws IOException {
return parent.createChildData(PatchApplier.class, name);
/*final Ref<IOException> ioExceptionRef = new Ref<IOException>();
final Ref<VirtualFile> result = new Ref<VirtualFile>();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
try {
result.set(parent.createChildData(PatchApplier.class, name));
}
catch (IOException e) {
ioExceptionRef.set(e);
}
}
});
if (! ioExceptionRef.isNull()) {
throw ioExceptionRef.get();
}
return result.get();*/
}
private static VirtualFile moveFile(final VirtualFile file, final VirtualFile newParent) throws IOException {
file.move(FilePatch.class, newParent);
return file;
/*final Ref<IOException> ioExceptionRef = new Ref<IOException>();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
try {
file.move(FilePatch.class, newParent);
}
catch (IOException e) {
ioExceptionRef.set(e);
}
}
});
if (! ioExceptionRef.isNull()) {
throw ioExceptionRef.get();
}
return file;*/
}
@Nullable
private VirtualFile makeSureParentPathExists(final String[] pieces) throws IOException {
VirtualFile child = myBaseDirectory;
final int size = pieces.length - 1;
for (int i = 0; i < size; i++) {
final String piece = pieces[i];
if (StringUtil.isEmptyOrSpaces(piece)) {
continue;
}
if ("..".equals(piece)) {
child = child.getParent();
continue;
}
VirtualFile nextChild = child.findChild(piece);
if (nextChild == null) {
nextChild = VfsUtil.createDirectories(child.getPath() + '/' + piece);
myCreatedDirectories.add(nextChild);
}
child = nextChild;
}
return child;
}
public List<PatchAndFile> getTextPatches() {
return myTextPatches;
}
public List<PatchAndFile> getBinaryPatches() {
return myBinaryPatches;
}
@NotNull
public List<VirtualFile> getWritableFiles() {
return myWritableFiles;
}
public void doMoveIfNeeded(final VirtualFile file) throws IOException {
final MovedFileData movedFile = myMovedFiles.get(file);
if (movedFile != null) {
myBeforePaths.add(VcsUtil.getFilePath(file));
ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<VirtualFile, IOException>() {
@Override
public VirtualFile compute() throws IOException {
return movedFile.doMove();
}
});
}
}
private static class MovedFileData {
private final VirtualFile myNewParent;
private final VirtualFile myCurrent;
private final String myNewName;
private MovedFileData(@NotNull final VirtualFile newParent, @NotNull final VirtualFile current, @NotNull final String newName) {
myNewParent = newParent;
myCurrent = current;
myNewName = newName;
}
public VirtualFile getCurrent() {
return myCurrent;
}
public VirtualFile getNewParent() {
return myNewParent;
}
public String getNewName() {
return myNewName;
}
public VirtualFile doMove() throws IOException {
final VirtualFile oldParent = myCurrent.getParent();
boolean needRename = !Comparing.equal(myCurrent.getName(), myNewName);
boolean needMove = !myNewParent.equals(oldParent);
if (needRename) {
if (needMove) {
File oldParentFile = VfsUtilCore.virtualToIoFile(oldParent);
File targetAfterRenameFile = new File(oldParentFile, myNewName);
if (targetAfterRenameFile.exists() && myCurrent.exists()) {
// if there is a conflict during first rename we have to rename to third name, then move, then rename to final target
performRenameWithConflicts(oldParentFile);
return myCurrent;
}
}
myCurrent.rename(PatchApplier.class, myNewName);
}
if (needMove) {
myCurrent.move(PatchApplier.class, myNewParent);
}
return myCurrent;
}
private void performRenameWithConflicts(@NotNull File oldParent) throws IOException {
File tmpFileWithUniqueName = FileUtil.createTempFile(oldParent, "tempFileToMove", null, false);
File newParentFile = VfsUtilCore.virtualToIoFile(myNewParent);
File destFile = new File(newParentFile, tmpFileWithUniqueName.getName());
while (destFile.exists()) {
destFile = new File(newParentFile,
FileUtil.createTempFile(oldParent, FileUtil.getNameWithoutExtension(destFile.getName()), null, false)
.getName());
}
myCurrent.rename(PatchApplier.class, destFile.getName());
myCurrent.move(PatchApplier.class, myNewParent);
myCurrent.rename(PatchApplier.class, myNewName);
}
}
private static class DelayedPrecheckContext {
private final Map<FilePath, FilePatch> mySkipDeleted;
private final Map<FilePath, FilePatch> myOverrideExisting;
private final List<FilePath> myOverridenPaths;
private final Project myProject;
private DelayedPrecheckContext(final Project project) {
myProject = project;
myOverrideExisting = new HashMap<>();
mySkipDeleted = new HashMap<>();
myOverridenPaths = new LinkedList<>();
}
public void addSkip(final FilePath path, final FilePatch filePatch) {
mySkipDeleted.put(path, filePatch);
}
public void addOverrideExisting(final FilePatch patch, final FilePath filePath) {
if (! myOverrideExisting.containsKey(filePath)) {
myOverrideExisting.put(filePath, patch);
}
}
// returns those to be skipped
public Collection<FilePatch> doDelayed() {
final List<FilePatch> result = new LinkedList<>();
if (! myOverrideExisting.isEmpty()) {
final String title = "Overwrite Existing Files";
List<FilePath> files = new ArrayList<>(myOverrideExisting.keySet());
Collection<FilePath> selected = AbstractVcsHelper.getInstance(myProject).selectFilePathsToProcess(
files, title,
"\nThe following files should be created by patch, but they already exist.\nDo you want to overwrite them?\n", title,
"The following file should be created by patch, but it already exists.\nDo you want to overwrite it?\n{0}",
VcsShowConfirmationOption.STATIC_SHOW_CONFIRMATION,
"Overwrite", "Cancel");
if (selected != null) {
for (FilePath path : selected) {
myOverrideExisting.remove(path);
}
}
result.addAll(myOverrideExisting.values());
if (selected != null) {
myOverridenPaths.addAll(selected);
}
}
result.addAll(mySkipDeleted.values());
return result;
}
public List<FilePath> getOverridenPaths() {
return myOverridenPaths;
}
public Collection<FilePath> getAlreadyDeletedPaths() {
return mySkipDeleted.keySet();
}
}
public void setIgnoreContentRootsCheck(boolean ignoreContentRootsCheck) {
myIgnoreContentRootsCheck = ignoreContentRootsCheck;
}
public static class PatchAndFile {
private final VirtualFile myFile;
private final ApplyFilePatchBase<?> myPatch;
public PatchAndFile(VirtualFile file, ApplyFilePatchBase<?> patch) {
myFile = file;
myPatch = patch;
}
public VirtualFile getFile() {
return myFile;
}
public ApplyFilePatchBase<?> getApplyPatch() {
return myPatch;
}
}
}<|fim▁end|> | return new CheckDeleted(patch); |
<|file_name|>assign_process.py<|end_file_name|><|fim▁begin|># IfcOpenShell - IFC toolkit and geometry engine
# Copyright (C) 2021 Dion Moult <[email protected]>
#
# This file is part of IfcOpenShell.
#
# IfcOpenShell is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# IfcOpenShell is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with IfcOpenShell. If not, see <http://www.gnu.org/licenses/>.
import ifcopenshell
import ifcopenshell.api
class Usecase:
def __init__(self, file, **settings):
self.file = file
self.settings = {
"relating_process": None,<|fim▁hole|> for key, value in settings.items():
self.settings[key] = value
def execute(self):
if self.settings["related_object"].HasAssignments:
for assignment in self.settings["related_object"].HasAssignments:
if (
assignment.is_a("IfclRelAssignsToProcess")
and assignment.RelatingProcess == self.settings["relating_process"]
):
return
operates_on = None
if self.settings["relating_process"].OperatesOn:
operates_on = self.settings["relating_process"].OperatesOn[0]
if operates_on:
related_objects = list(operates_on.RelatedObjects)
related_objects.append(self.settings["related_object"])
operates_on.RelatedObjects = related_objects
ifcopenshell.api.run("owner.update_owner_history", self.file, **{"element": operates_on})
else:
operates_on = self.file.create_entity(
"IfcRelAssignsToProcess",
**{
"GlobalId": ifcopenshell.guid.new(),
"OwnerHistory": ifcopenshell.api.run("owner.create_owner_history", self.file),
"RelatedObjects": [self.settings["related_object"]],
"RelatingProcess": self.settings["relating_process"],
}
)
return operates_on<|fim▁end|> | "related_object": None,
}
|
<|file_name|>VerticalGlass.java<|end_file_name|><|fim▁begin|>/*
* VerticalGlass.java
* Copyright (C) 2010-2011 Jonas Eriksson
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.zkt.zmask.masks;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import org.zkt.zmask.Image;
import org.zkt.zmask.GeneralProperties;
import org.zkt.zmask.utils.PropertyDescription;
import org.zkt.zmask.utils.PropertyException;
import org.zkt.zmask.utils.PropertyHandler;
/**
* The vertical glass mask
*
* @author zqad
*/
public class VerticalGlass implements Mask {
public String getDescription() {
return "Vertical Glass";
}
public boolean needClone() {
return false;
}
public boolean needWhole() {
return false;
}
public boolean runBare() {
return false;
}
public BufferedImage runMask(BufferedImage image) {
int width = image.getWidth();
int height = image.getHeight();
BufferedImage bi = new BufferedImage(width, height,
image.getType());
Graphics2D g = (Graphics2D)bi.getGraphics();
g.scale(-1.0, 1.0);
int bs = GeneralProperties.getInstance().getBlockSize().width;
for (int i = 0; i <= width - bs; i += bs)
g.drawImage(image.getSubimage(i, 0, bs, height), -1 * i - bs, 0, null);
int remainder = width % bs;
if (remainder != 0)
g.drawImage(image.getSubimage(width - remainder, 0, remainder, height),
width - remainder, 0, null);
return bi;
}<|fim▁hole|> throw new UnsupportedOperationException("Not supported.");
}
public PropertyDescription[] getProperties() {
return null;
}
}<|fim▁end|> |
public void runMask(Image image) { |
<|file_name|>member.rs<|end_file_name|><|fim▁begin|>//! Tracks membership. Contains both the `Member` struct and the `MemberList`.
pub use crate::protocol::swim::Health;
use crate::{error::{Error,
Result},
protocol::{self,
newscast,
swim as proto,
FromProto},
rumor::{RumorKey,
RumorPayload,
RumorType}};
use habitat_common::sync::{Lock,
ReadGuard,
WriteGuard};
use habitat_core::util::ToI64;
use prometheus::IntGaugeVec;
use rand::{seq::{IteratorRandom,
SliceRandom},
thread_rng};
use serde::{de,
ser::{SerializeMap,
SerializeStruct},
Deserialize,
Deserializer,
Serialize,
Serializer};
use std::{collections::{hash_map,
HashMap},
fmt,
net::SocketAddr,
num::ParseIntError,
ops::Add,
result,<|fim▁hole|> Instant}};
use uuid::Uuid;
/// How many nodes do we target when we need to run PingReq.
const PINGREQ_TARGETS: usize = 5;
lazy_static! {
static ref PEER_HEALTH_COUNT: IntGaugeVec =
register_int_gauge_vec!("hab_butterfly_peer_health_total",
"Number of butterfly peers",
&["health"]).unwrap();
}
/// Wraps a `u64` to represent the "incarnation number" of a
/// `Member`. Incarnation numbers can only ever be incremented.
///
/// Note: we're intentionally deriving `Copy` to be able to treat this
/// like a "normal" numeric type.
#[derive(Clone, Debug, Ord, PartialEq, PartialOrd, Eq, Copy)]
pub struct Incarnation(u64);
impl Default for Incarnation {
fn default() -> Self { Incarnation(0) }
}
impl From<u64> for Incarnation {
fn from(num: u64) -> Self { Incarnation(num) }
}
impl Incarnation {
pub fn to_u64(self) -> u64 { self.0 }
pub fn to_i64(self) -> i64 { self.0.to_i64() }
}
impl fmt::Display for Incarnation {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) }
}
impl Add<u64> for Incarnation {
type Output = Incarnation;
fn add(self, other: u64) -> Incarnation { Incarnation(self.0 + other) }
}
impl Serialize for Incarnation {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: Serializer
{
serializer.serialize_u64(self.0)
}
}
impl FromStr for Incarnation {
type Err = ParseIntError;
fn from_str(s: &str) -> result::Result<Self, Self::Err> {
let raw = s.parse::<u64>()?;
Ok(Incarnation(raw))
}
}
struct IncarnationVisitor;
impl<'de> de::Visitor<'de> for IncarnationVisitor {
type Value = Incarnation;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "a u64")
}
fn visit_u64<E>(self, v: u64) -> result::Result<Self::Value, E>
where E: de::Error
{
Ok(Incarnation::from(v))
}
}
impl<'de> Deserialize<'de> for Incarnation {
fn deserialize<D>(deserializer: D) -> result::Result<Incarnation, D::Error>
where D: Deserializer<'de>
{
deserializer.deserialize_u64(IncarnationVisitor)
}
}
// This is a Uuid type turned to a string
pub type UuidSimple = String;
/// A member in the swim group. Passes most of its functionality along to the internal protobuf
/// representation.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Member {
pub id: String,
pub incarnation: Incarnation,
pub address: String,
pub swim_port: u16,
pub gossip_port: u16,
pub persistent: bool,
pub departed: bool,
}
impl Member {
/// Returns the socket address of this member.
///
/// # Panics
///
/// This function panics if the address is un-parseable. In practice, it shouldn't be
/// un-parseable, since its set from the inbound socket directly.
pub fn swim_socket_address(&self) -> SocketAddr {
let address_str = format!("{}:{}", self.address, self.swim_port);
match address_str.parse() {
Ok(addr) => addr,
Err(e) => {
panic!("Cannot parse member {:?} address: {}", self, e);
}
}
}
}
impl Default for Member {
fn default() -> Self {
Member { id: Uuid::new_v4().to_simple_ref().to_string(),
incarnation: Incarnation::default(),
// TODO (CM): DANGER DANGER DANGER
// This is a lousy default, and suggests that the notion
// of a "default Member" doesn't make much sense.
//
// (Port numbers of 0 are also problematic.)
address: String::default(),
swim_port: 0,
gossip_port: 0,
persistent: false,
departed: false, }
}
}
impl From<Member> for RumorKey {
fn from(member: Member) -> RumorKey { RumorKey::new(RumorType::Member, &member.id, "") }
}
impl<'a> From<&'a Member> for RumorKey {
fn from(member: &'a Member) -> RumorKey { RumorKey::new(RumorType::Member, &member.id, "") }
}
impl<'a> From<&'a &'a Member> for RumorKey {
fn from(member: &'a &'a Member) -> RumorKey { RumorKey::new(RumorType::Member, &member.id, "") }
}
impl From<Member> for proto::Member {
fn from(value: Member) -> Self {
proto::Member { id: Some(value.id),
incarnation: Some(value.incarnation.to_u64()),
address: Some(value.address),
swim_port: Some(value.swim_port.into()),
gossip_port: Some(value.gossip_port.into()),
persistent: Some(value.persistent),
departed: Some(value.departed), }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Membership {
pub member: Member,
pub health: Health,
}
impl fmt::Display for Membership {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f,
"Member i/{} m/{} ad/{} sp/{} gp/{} p/{} d/{} h/{:?}",
self.member.incarnation,
self.member.id,
self.member.address,
self.member.swim_port,
self.member.gossip_port,
self.member.persistent,
self.member.departed,
self.health)
}
}
impl Membership {
/// See MemberList::insert
fn newer_or_less_healthy_than(&self,
other_incarnation: Incarnation,
other_health: Health)
-> bool {
self.member.incarnation > other_incarnation
|| (self.member.incarnation == other_incarnation && self.health > other_health)
}
}
impl protocol::Message<proto::Membership> for Membership {
const MESSAGE_ID: &'static str = "Membership";
}
impl From<Membership> for proto::Membership {
fn from(value: Membership) -> Self {
proto::Membership { member: Some(value.member.into()),
health: Some(value.health as i32), }
}
}
/// Since protobuf doesn't have support for 16-bit ints, we need to check that
/// we haven't received something illegal
fn as_port(x: i32) -> Option<u16> {
const PORT_MIN: i32 = ::std::u16::MIN as i32;
const PORT_MAX: i32 = ::std::u16::MAX as i32;
match x {
PORT_MIN..=PORT_MAX => Some(x as u16),
_ => None,
}
}
impl FromProto<proto::Member> for Member {
fn from_proto(proto: proto::Member) -> Result<Self> {
Ok(Member { id: proto.id.ok_or(Error::ProtocolMismatch("id"))?,
incarnation: proto.incarnation
.map_or_else(Incarnation::default, Incarnation::from),
// This hurts so bad...
//
// Our "Member" protobuf is currently serving two
// purposes. One is here, serving as the return address of
// a Supervisor for a message. Another is serving as a
// record of a known member of the Supervisor ring; this
// data is piggy-backed on our core SWIM messages as a way
// of introducing new members to existing network members.
//
// The thing is, depending on which case the Member struct
// is being used for, it may or may not have an "address"
// field. If it's as the return address, it's actually
// getting the address from the networking layer; the
// sending Supervisor doesn't actually have that
// information.
//
// On the other hand, if it's an actual membership record,
// then it _will_ have an address, which will ultimately
// have been resolved at some point in the past by the
// aforementioned method of relying on the networking
// layer.
//
// The Prost migration introduced validation that wasn't
// taking this into account; it assumed that we would
// _always_ have a network address. This cause it to
// essentially reject any messages from 0.59.0 (and
// before) Supervisors, because they had no such
// validation, and never set any value for a return
// address.
//
// It was able to work with Supervisors _after_ the Prost
// migration because we default to setting an empty string
// for the address. This is arguably NOT the right thing
// to do, since a value of `Some("")` is more dangerous than
// a value of `None`. We ultimately need to either _not_
// generate meaningless default values, or tease apart the
// two uses of our Member protobuf, or both.
address: proto.address.unwrap_or_default(),
swim_port: proto.swim_port
.and_then(as_port)
.ok_or(Error::ProtocolMismatch("swim-port"))?,
gossip_port: proto.gossip_port
.and_then(as_port)
.ok_or(Error::ProtocolMismatch("gossip-port"))?,
persistent: proto.persistent.unwrap_or(false),
departed: proto.departed.unwrap_or(false), })
}
}
impl FromProto<proto::Membership> for Membership {
fn from_proto(proto: proto::Membership) -> Result<Self> {
Ok(Membership { member: proto.member
.ok_or(Error::ProtocolMismatch("member"))
.and_then(Member::from_proto)?,
health: proto.health
.and_then(Health::from_i32)
.unwrap_or(Health::Alive), })
}
}
impl FromProto<newscast::Rumor> for Membership {
fn from_proto(proto: newscast::Rumor) -> Result<Self> {
match proto.payload.ok_or(Error::ProtocolMismatch("payload"))? {
RumorPayload::Member(membership) => Membership::from_proto(membership),
_ => panic!("from-proto payload"),
}
}
}
mod member_list {
#[derive(Clone, Debug)]
pub struct Entry {
pub member: super::Member,
pub health: super::Health,
pub health_updated_at: std::time::Instant,
}
}
/// Tracks lists of members, their health, and how long they have been
/// suspect or confirmed.
#[derive(Debug)]
pub struct MemberList {
entries: Lock<HashMap<UuidSimple, member_list::Entry>>,
initial_members: Lock<Vec<Member>>,
update_counter: AtomicUsize,
}
impl Serialize for MemberList {
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: Serializer
{
let mut strukt = serializer.serialize_struct("member_list", 4)?;
// A hack to maintain backwards compatibility with the version
// Of MemberList where members was a HashMap<UuidSimple, Member>
// and health was a HashMap<UuidSimple, Health>
let mut member_struct = HashMap::new();
let mut health_struct = HashMap::new();
for (id, member_list::Entry { member, health, .. }) in self.read_entries().iter() {
member_struct.insert(id.clone(), member.clone());
health_struct.insert(id.clone(), *health);
}
strukt.serialize_field("members", &member_struct)?;
strukt.serialize_field("health", &health_struct)?;
// TODO (CM): why does this use a different ordering than
// get_update_counter (and why doesn't it just use get_update_counter?)
let update_number = self.update_counter.load(Ordering::SeqCst);
strukt.serialize_field("update_counter", &update_number)?;
strukt.end()
}
}
impl MemberList {
/// Creates a new, empty, MemberList.
pub fn new() -> MemberList {
MemberList { entries: Lock::new(HashMap::new()),
initial_members: Lock::new(Vec::new()),
update_counter: AtomicUsize::new(0), }
}
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
fn read_entries(&self) -> ReadGuard<'_, HashMap<UuidSimple, member_list::Entry>> {
self.entries.read()
}
/// # Locking (see locking.md)
/// * `MemberList::entries` (write)
fn write_entries(&self) -> WriteGuard<'_, HashMap<UuidSimple, member_list::Entry>> {
self.entries.write()
}
fn initial_members_read(&self) -> ReadGuard<'_, Vec<Member>> { self.initial_members.read() }
fn initial_members_write(&self) -> WriteGuard<'_, Vec<Member>> { self.initial_members.write() }
/// We don't care if this repeats - it just needs to be unique for any given two states, which
/// it will be.
fn increment_update_counter(&self) { self.update_counter.fetch_add(1, Ordering::Relaxed); }
pub fn get_update_counter(&self) -> usize { self.update_counter.load(Ordering::Relaxed) }
/// # Locking (see locking.md)
/// * `MemberList::initial_members` (read)
pub fn len_initial_members_imlr(&self) -> usize { self.initial_members_read().len() }
/// # Locking (see locking.md)
/// * `MemberList::initial_members` (write)
pub fn add_initial_member_imlw(&self, member: Member) {
self.initial_members_write().push(member);
}
/// # Locking (see locking.md)
/// * `MemberList::initial_members` (write)
pub fn set_initial_members_imlw(&self, members: Vec<Member>) {
*self.initial_members_write() = members;
}
/// # Locking (see locking.md)
/// * `MemberList::initial_members` (read)
pub fn with_initial_members_imlr(&self, with_closure: impl Fn(&Member)) {
for member in self.initial_members_read().iter() {
with_closure(member);
}
}
/// Inserts a member into the member list with the given health,
/// but only if the criteria for insertion are met. Returns `true`
/// if the rumor information was actually accepted, and `false`
/// otherwise.
///
/// There are a few rules governing how we choose to accept
/// Membership rumors.
///
/// First, if we have absolutely no record of having seen
/// `incoming_member` before, we'll accept the rumor without
/// question.
///
/// On the other hand, if we _have_ seen `incoming_member` we need
/// to compare the incoming information to what we currently have
/// before we decide whether to accept the new information.
///
/// If the incarnation number of the `incoming_member` is lower
/// than that of the rumor we already have, then we reject
/// it. Incarnation numbers for Members are only ever incremented
/// by that Member itself, so the fact that we already have one
/// that is higher means that we have more up-to-date information.
///
/// Similarly, if the incoming incarnation number is greater than
/// what we have, we'll accept it as more up-to-date information.
///
/// If the incarnation numbers match, we need to look at the
/// health to determine if we accept the rumor.
///
/// We only accept the incoming rumor if its health is strictly
/// "worse than" the health we currently have for the member.
///
/// Alternatively, you can think of "worse than" as "greater
/// than", given this ordering of Health states (this is governed
/// by the `PartialOrd` and `Ord` trait implementations on `Health`):
///
/// Alive < Suspect < Confirmed < Departed
///
/// For example, if we think that "Supervisor X (at incarnation 1)
/// is Alive", but the rumor is telling us that "Supervisor X (at
/// incarnation 1) is Suspect", that means that whoever we've
/// received this rumor from is having trouble contacting
/// Supervisor X. We should accept this rumor and propagate it to
/// ensure that a) information about degraded connectivity makes
/// it around the network, and b) the odds that Supervisor X will
/// receive this rumor increase, allowing it to refute this (if
/// indeed Supervisor X is still around.)
///
/// If we were to just accept the rumor regardless of what the
/// health was, we could basically start "arguing" across the
/// network; one Supervisor thinks X is Alive, another thinks it's
/// Suspect, and we just keep flip-flopping between the two
/// without any sort of resolution.
///
/// Below is the truth table that illustrates this. "Current Health"
/// is down the left side, while "Incoming Health" is across the
/// top. We only propagate when Incoming is "worse than" Current:
///
/// | | Alive | Suspect | Confirmed | Departed |
/// |-----------+-------+-----------+-----------+-----------|
/// | Alive | | propagate | propagate | propagate |
/// | Suspect | | | propagate | propagate |
/// | Confirmed | | | | propagate |
/// | Departed | | | | |
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (write)
// TODO (CM): why don't we just insert a membership record here?
pub fn insert_mlw(&self, incoming_member: Member, incoming_health: Health) -> bool {
self.insert_membership_mlw(Membership { member: incoming_member,
health: incoming_health, })
}
/// # Locking (see locking.md)
/// * `MemberList::entries` (write)
fn insert_membership_mlw(&self, incoming: Membership) -> bool {
// Is this clone necessary, or can a key be a reference to a field contained in the value?
// Maybe the members we store should not contain the ID to reduce the duplication?
let modified = match self.write_entries().entry(incoming.member.id.clone()) {
hash_map::Entry::Occupied(mut entry) => {
let val = entry.get_mut();
if incoming.newer_or_less_healthy_than(val.member.incarnation, val.health) {
*val = member_list::Entry { member: incoming.member,
health: incoming.health,
health_updated_at: Instant::now(), };
true
} else {
false
}
}
hash_map::Entry::Vacant(entry) => {
entry.insert(member_list::Entry { member: incoming.member,
health: incoming.health,
health_updated_at: Instant::now(), });
true
}
};
if modified {
self.increment_update_counter();
self.calculate_peer_health_metrics_mlr();
}
modified
}
/// # Locking (see locking.md)
/// * `MemberList::entries` (write)
pub fn set_departed_mlw(&self, member_id: &str) {
if let Some(member_list::Entry { member, health, .. }) =
self.write_entries().get_mut(member_id)
{
debug!("Setting health of {:?}, {} -> {}",
member,
health,
Health::Departed);
*health = Health::Departed;
} else {
trace!("set_departed called on unknown member {}", member_id);
}
}
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
fn calculate_peer_health_metrics_mlr(&self) {
let mut health_counts: HashMap<Health, i64> = HashMap::new();
for entry in self.read_entries().values() {
*health_counts.entry(entry.health).or_insert(0) += 1;
}
for health in [Health::Alive,
Health::Suspect,
Health::Confirmed,
Health::Departed].iter()
{
PEER_HEALTH_COUNT.with_label_values(&[&health.to_string()])
.set(*health_counts.get(health).unwrap_or(&0));
}
}
/// Returns the health of the member, if the member exists.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn health_of_mlr(&self, member: &Member) -> Option<Health> {
self.health_of_by_id_mlr(&member.id)
}
/// Returns the health of the member, if the member exists.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn health_of_by_id_mlr(&self, member_id: &str) -> Option<Health> {
self.read_entries()
.get(member_id)
.map(|member_list::Entry { health, .. }| *health)
}
/// Returns the health of the member, blocking for a limited timeout
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
///
/// # Errors:
/// * `Error::Timeout` if the health data can't be accessed within `timeout`
/// * `Error::UnknownMember` if the member does not exist
pub fn health_of_by_id_with_timeout_mlr(&self,
member_id: &str,
timeout: std::time::Duration)
-> Result<Health> {
let entries = self.entries.try_read_for(timeout);
if entries.is_none() {
debug!("try_lock_for timed out after {:?}", timeout);
return Err(Error::Timeout(format!("waiting on {} member health query", member_id)));
}
entries.unwrap()
.get(member_id)
.map(|member_list::Entry { health, .. }| *health)
.ok_or_else(|| Error::UnknownMember(member_id.to_string()))
}
/// Returns true if the member is alive, suspect, or persistent; used during the target
/// selection phase of the outbound thread.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn pingable_mlr(&self, member: &Member) -> bool {
if member.persistent {
return true;
}
match self.health_of_mlr(member) {
Some(Health::Alive) | Some(Health::Suspect) => true,
_ => false,
}
}
/// Returns true if we are pinging this member because they are persistent, but we think they
/// are gone.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn persistent_and_confirmed_mlr(&self, member: &Member) -> bool {
member.persistent && self.health_of_mlr(member) == Some(Health::Confirmed)
}
/// Returns a protobuf membership record for the given member id.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn membership_for_mlr(&self, member_id: &str) -> Option<Membership> {
self.read_entries()
.get(member_id)
.map(|member_list::Entry { member, health, .. }| {
Membership { member: member.clone(),
health: *health, }
})
}
/// Returns the number of entries.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn len_mlr(&self) -> usize { self.read_entries().len() }
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn is_empty_mlr(&self) -> bool { self.read_entries().is_empty() }
/// A randomized list of members to check.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn check_list_mlr(&self, exclude_id: &str) -> Vec<Member> {
let mut members: Vec<_> = self.read_entries()
.values()
.map(|member_list::Entry { member, .. }| member)
.filter(|member| member.id != exclude_id)
.cloned()
.collect();
members.shuffle(&mut thread_rng());
members
}
/// Takes a function whose first argument is a member, and calls it for every pingreq target.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
/// * Additionally `with_closure` is called with this lock held, so the closure must not call
/// any functions which take this lock.
pub fn with_pingreq_targets_mlr(&self,
sending_member_id: &str,
target_member_id: &str,
mut with_closure: impl FnMut(&Member)) {
for member_list::Entry { member, .. } in
self.read_entries()
.values()
.filter(|member_list::Entry { member, health, .. }| {
member.id != sending_member_id
&& member.id != target_member_id
&& *health == Health::Alive
})
.choose_multiple(&mut thread_rng(), PINGREQ_TARGETS)
{
with_closure(member);
}
}
/// If an owned `Member` is required, use this. If a shared reference is
/// good enough, use `with_member`.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn get_cloned_mlr(&self, member_id: &str) -> Option<Member> {
self.read_entries()
.get(member_id)
.map(|member_list::Entry { member, .. }| member.clone())
}
/// Iterates over the memberships list, calling the function for each membership.
/// This could be return Result<T> instead, but there's only the one caller now.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
/// * Additionally `with_closure` is called with this lock held, so the closure must not call
/// any functions which take this lock.
pub fn with_memberships_mlr<T: Default>(&self,
mut with_closure: impl FnMut(Membership) -> Result<T>)
-> Result<T> {
let mut ok = Ok(T::default());
for membership in self.read_entries()
.values()
.map(|member_list::Entry { member, health, .. }| {
Membership { member: member.clone(),
health: *health, }
})
{
ok = Ok(with_closure(membership)?);
}
ok
}
/// Query the list of aging Suspect members to find those which
/// have now expired to Confirmed. Health is updated
/// appropriately, and a list of newly-Confirmed Member IDs is
/// returned.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (write)
pub fn members_expired_to_confirmed_mlw(&self, timeout: Duration) -> Vec<String> {
self.members_expired_to_mlw(Health::Confirmed, timeout)
}
/// Query the list of aging Confirmed members to find those which
/// have now expired to Departed. Health is updated appropriately,
/// and a list of newly-Departed Member IDs is returned.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (write)
pub fn members_expired_to_departed_mlw(&self, timeout: Duration) -> Vec<String> {
self.members_expired_to_mlw(Health::Departed, timeout)
}
/// Return the member IDs of all members that have "timed out" to
/// the `expiring_to` `Health`.
///
/// For instance,
///
/// members_expired_to(Health::Departed, timeout)
///
/// will return the IDs of those members that have been
/// `Confirmed` for longer than the given `timeout`.
///
/// The newly-updated health status is recorded properly.
///
/// # Locking (see locking.md)
/// * `MemberList::entries` (write)
// TODO (CM): Better return type than Vec<String>
fn members_expired_to_mlw(&self, expiring_to: Health, timeout: Duration) -> Vec<String> {
let now = Instant::now();
let precursor_health = match expiring_to {
Health::Confirmed => Health::Suspect,
Health::Departed => Health::Confirmed,
other => panic!("Expiring to {} is invalid", other),
};
let expired: Vec<_> =
self.write_entries()
.iter_mut()
.filter_map(|(id, v)| {
let member_list::Entry { health,
health_updated_at,
.. } = v;
if *health == precursor_health && now >= *health_updated_at + timeout {
*health = expiring_to;
*health_updated_at = now;
Some(id.clone())
} else {
None
}
})
.collect();
if !expired.is_empty() {
self.increment_update_counter();
}
expired
}
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
pub fn contains_member_mlr(&self, member_id: &str) -> bool {
self.read_entries().contains_key(member_id)
}
}
/// This proxy wraps a MemberList so that we can customize its serialization logic.
pub struct MemberListProxy<'a>(&'a MemberList);
impl<'a> MemberListProxy<'a> {
pub fn new(m: &'a MemberList) -> Self { MemberListProxy(&m) }
}
impl<'a> Serialize for MemberListProxy<'a> {
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: Serializer
{
let map = self.0.read_entries();
let mut m = serializer.serialize_map(Some(map.len()))?;
for (id, member_list::Entry { member, health, .. }) in map.iter() {
m.serialize_entry(id, &MemberProxy::new(member, health))?;
}
m.end()
}
}
/// This proxy wraps both a Member and Health, and presents them together, for use in the
/// supervisor's /butterfly HTTP API endpoint.
pub struct MemberProxy<'a>(&'a Member, &'a Health);
impl<'a> MemberProxy<'a> {
pub fn new(m: &'a Member, h: &'a Health) -> Self { MemberProxy(&m, &h) }
}
impl<'a> Serialize for MemberProxy<'a> {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: Serializer
{
let mut strukt = serializer.serialize_struct("member", 6)?;
strukt.serialize_field("address", &self.0.address)?;
strukt.serialize_field("gossip_port", &self.0.gossip_port)?;
strukt.serialize_field("incarnation", &self.0.incarnation)?;
strukt.serialize_field("persistent", &self.0.persistent)?;
strukt.serialize_field("swim_port", &self.0.swim_port)?;
strukt.serialize_field("health", &self.1)?;
strukt.end()
}
}
#[cfg(test)]
mod tests {
use super::*;
impl MemberList {
// This is a remnant of when the MemberList::members entries were
// simple Member structs. The tests that use this should be replaced,
// but until then, this keeps them working.
/// # Locking (see locking.md)
/// * `MemberList::entries` (read)
fn with_member_iter<T>(&self,
mut with_closure: impl FnMut(hash_map::Values<'_, String, Member>)
-> T)
-> T {
let mut member_map = HashMap::new();
for (id, super::member_list::Entry { member, .. }) in self.read_entries().iter() {
member_map.insert(id.clone(), member.clone());
}
with_closure(member_map.values())
}
}
mod member {
use crate::member::{Incarnation,
Member};
// Sets the uuid to simple, and the incarnation to the default.
#[test]
fn new() {
let member = Member::default();
assert_eq!(member.id.len(), 32);
assert_eq!(member.incarnation, Incarnation::default());
}
}
mod membership {
use crate::{member::{Health,
Member,
Membership},
protocol::Message};
#[test]
fn encode_decode_roundtrip() {
let member = Member::default();
let membership = Membership { member,
health: Health::Suspect };
let bytes = membership.clone()
.write_to_bytes()
.expect("Could not write membership to bytes!");
let from_bytes =
Membership::from_bytes(&bytes).expect("Could not decode membership from bytes!");
assert_eq!(&membership.member, &from_bytes.member);
assert_eq!(&membership.health, &from_bytes.health);
}
}
mod member_list {
use crate::member::{Health,
Member,
MemberList,
Membership,
PINGREQ_TARGETS};
fn populated_member_list(size: u64) -> MemberList {
let ml = MemberList::new();
for _x in 0..size {
let m = Member::default();
ml.insert_mlw(m, Health::Alive);
}
ml
}
#[test]
fn new() {
let ml = MemberList::new();
assert!(ml.is_empty_mlr());
}
#[test]
fn insert_several_members() {
let ml = populated_member_list(4);
assert_eq!(ml.len_mlr(), 4);
}
#[test]
fn check_list() {
let ml = populated_member_list(1000);
let list_a = ml.check_list_mlr("foo");
let list_b = ml.check_list_mlr("foo");
assert!(list_a != list_b);
}
#[test]
fn health_of() {
let ml = populated_member_list(1);
ml.with_memberships_mlr(|Membership { health, .. }| {
assert_eq!(health, Health::Alive);
Ok(())
})
.ok();
}
#[test]
fn health_of_with_memberships() {
let ml = populated_member_list(1);
ml.with_memberships_mlr(|Membership { health, .. }| {
assert_eq!(health, Health::Alive);
Ok(0)
})
.ok();
}
#[test]
fn pingreq_targets() {
let ml = populated_member_list(10);
ml.with_member_iter(|mut i| {
let from = i.next().unwrap();
let target = i.nth(1).unwrap();
let mut counter: usize = 0;
ml.with_pingreq_targets_mlr(&from.id, &target.id, |_m| counter += 1);
assert_eq!(counter, PINGREQ_TARGETS);
});
}
#[test]
fn pingreq_targets_excludes_pinging_member() {
let ml = populated_member_list(3);
ml.with_member_iter(|mut i| {
let from = i.next().unwrap();
let target = i.nth(1).unwrap();
let mut excluded_appears: bool = false;
ml.with_pingreq_targets_mlr(&from.id, &target.id, |m| {
if m.id == from.id {
excluded_appears = true
}
});
assert_eq!(excluded_appears, false);
});
}
#[test]
fn pingreq_targets_excludes_target_member() {
let ml = populated_member_list(3);
ml.with_member_iter(|mut i| {
let from = i.next().unwrap();
let target = i.nth(1).unwrap();
let mut excluded_appears: bool = false;
ml.with_pingreq_targets_mlr(&from.id, &target.id, |m| {
if m.id == target.id {
excluded_appears = true
}
});
assert_eq!(excluded_appears, false);
});
}
#[test]
fn pingreq_targets_minimum_viable_pingreq_size_is_three() {
let ml = populated_member_list(3);
ml.with_member_iter(|mut i| {
let from = i.next().unwrap();
let target = i.nth(1).unwrap();
let mut counter: isize = 0;
ml.with_pingreq_targets_mlr(&from.id, &target.id, |_m| counter += 1);
assert_eq!(counter, 1);
});
}
#[test]
fn insert_no_member() {
let ml = MemberList::new();
let member = Member::default();
let mcheck = member.clone();
assert_eq!(ml.insert_mlw(member, Health::Alive), true);
assert_eq!(ml.health_of_mlr(&mcheck), Some(Health::Alive));
}
/// Tests of MemberList::insert
mod insert {
use crate::member::{Health,
Incarnation,
Member,
MemberList};
use std::cmp::Ordering;
fn assert_cannot_insert_member_rumor_of_lower_incarnation(from_health: Health,
to_health: Health)
{
let ml = MemberList::new();
let initial_update_counter_value = ml.get_update_counter();
let initial_incarnation = Incarnation::from(10); // just to pick a number
let member = {
let mut m = Member::default();
m.incarnation = initial_incarnation;
m
};
assert!(ml.insert_mlw(member.clone(), from_health),
"Could not insert member into list initially");
let update_counter_value_checkpoint_1 = ml.get_update_counter();
assert_eq!(update_counter_value_checkpoint_1,
initial_update_counter_value + 1,
"Update counter should have incremented by one");
assert_eq!(ml.health_of_mlr(&member),
Some(from_health),
"Member should have had health {:?}, but didn't",
from_health);
let member_with_lower_incarnation = {
let mut m = member.clone();
m.incarnation = Incarnation::from(m.incarnation.to_u64() - 1);
m
};
assert!(!ml.insert_mlw(member_with_lower_incarnation, to_health),
"Inserting with {:?}->{:?} should be a no-op with a lower incarnation \
number",
from_health,
to_health);
assert_eq!(ml.get_update_counter(),
update_counter_value_checkpoint_1,
"Update counter should not have been incremented after trying to \
insert a lower-incarnation-number rumor");
assert_eq!(ml.health_of_mlr(&member),
Some(from_health),
"Member should have still have health {:?} following attempt to \
insert lower-incarnation-number rumor, but didn't",
from_health);
}
macro_rules! lower_incarnation {
// Unfortunately, Rust macros currently can't be used to generate
// the name of a function, so we have to provide one :(
($fn_name:ident, $from:expr, $to:expr) => {
#[test]
fn $fn_name() {
assert_cannot_insert_member_rumor_of_lower_incarnation($from, $to);
}
};
}
lower_incarnation!(lower_a_to_a, Health::Alive, Health::Alive);
lower_incarnation!(lower_a_to_s, Health::Alive, Health::Suspect);
lower_incarnation!(lower_a_to_c, Health::Alive, Health::Confirmed);
lower_incarnation!(lower_a_to_d, Health::Alive, Health::Departed);
lower_incarnation!(lower_s_to_a, Health::Suspect, Health::Alive);
lower_incarnation!(lower_s_to_s, Health::Suspect, Health::Suspect);
lower_incarnation!(lower_s_to_c, Health::Suspect, Health::Confirmed);
lower_incarnation!(lower_s_to_d, Health::Suspect, Health::Departed);
lower_incarnation!(lower_c_to_a, Health::Confirmed, Health::Alive);
lower_incarnation!(lower_c_to_s, Health::Confirmed, Health::Suspect);
lower_incarnation!(lower_c_to_c, Health::Confirmed, Health::Confirmed);
lower_incarnation!(lower_c_to_d, Health::Confirmed, Health::Departed);
lower_incarnation!(lower_d_to_a, Health::Departed, Health::Alive);
lower_incarnation!(lower_d_to_s, Health::Departed, Health::Suspect);
lower_incarnation!(lower_d_to_c, Health::Departed, Health::Confirmed);
lower_incarnation!(lower_d_to_d, Health::Departed, Health::Departed);
fn assert_always_insert_member_rumor_of_higher_incarnation(from_health: Health,
to_health: Health)
{
let ml = MemberList::new();
let initial_update_counter_value = ml.get_update_counter();
let initial_incarnation = Incarnation::from(10); // just to pick a number
let member = {
let mut m = Member::default();
m.incarnation = initial_incarnation;
m
};
assert!(ml.insert_mlw(member.clone(), from_health),
"Could not insert member into list initially");
let update_counter_value_checkpoint_1 = ml.get_update_counter();
assert_eq!(update_counter_value_checkpoint_1,
initial_update_counter_value + 1,
"Update counter should have incremented by one");
assert_eq!(ml.health_of_mlr(&member),
Some(from_health),
"Member should have had health {:?}, but didn't",
from_health);
let member_with_higher_incarnation = {
let mut m = member.clone();
m.incarnation = m.incarnation + 1;
m
};
assert!(ml.insert_mlw(member_with_higher_incarnation, to_health),
"Inserting with {:?}->{:?} should be always work with a higher \
incarnation number",
from_health,
to_health);
assert_eq!(ml.get_update_counter(),
update_counter_value_checkpoint_1 + 1,
"Update counter should increment by 1 when inserting a \
higher-incarnation-number rumor");
assert_eq!(ml.health_of_mlr(&member),
Some(to_health),
"Member should have health {:?} following insertion of \
higher-incarnation-number rumor",
to_health);
}
macro_rules! higher_incarnation {
// Unfortunately, Rust macros currently can't be used to generate
// the name of a function, so we have to provide one :(
($fn_name:ident, $from:expr, $to:expr) => {
#[test]
fn $fn_name() {
assert_always_insert_member_rumor_of_higher_incarnation($from, $to);
}
};
}
higher_incarnation!(higher_a_to_a, Health::Alive, Health::Alive);
higher_incarnation!(higher_a_to_s, Health::Alive, Health::Suspect);
higher_incarnation!(higher_a_to_c, Health::Alive, Health::Confirmed);
higher_incarnation!(higher_a_to_d, Health::Alive, Health::Departed);
higher_incarnation!(higher_s_to_a, Health::Suspect, Health::Alive);
higher_incarnation!(higher_s_to_s, Health::Suspect, Health::Suspect);
higher_incarnation!(higher_s_to_c, Health::Suspect, Health::Confirmed);
higher_incarnation!(higher_s_to_d, Health::Suspect, Health::Departed);
higher_incarnation!(higher_c_to_a, Health::Confirmed, Health::Alive);
higher_incarnation!(higher_c_to_s, Health::Confirmed, Health::Suspect);
higher_incarnation!(higher_c_to_c, Health::Confirmed, Health::Confirmed);
higher_incarnation!(higher_c_to_d, Health::Confirmed, Health::Departed);
higher_incarnation!(higher_d_to_a, Health::Departed, Health::Alive);
higher_incarnation!(higher_d_to_s, Health::Departed, Health::Suspect);
higher_incarnation!(higher_d_to_c, Health::Departed, Health::Confirmed);
higher_incarnation!(higher_d_to_d, Health::Departed, Health::Departed);
fn assert_only_insert_member_rumor_of_same_incarnation_if_health_is_worse(from_health: Health,
to_health: Health)
{
let ml = MemberList::new();
let initial_update_counter_value = ml.get_update_counter();
let initial_incarnation = Incarnation::from(10); // just to pick a number
let member = {
let mut m = Member::default();
m.incarnation = initial_incarnation;
m
};
assert!(ml.insert_mlw(member.clone(), from_health),
"Could not insert member into list initially");
let update_counter_value_checkpoint_1 = ml.get_update_counter();
assert_eq!(update_counter_value_checkpoint_1,
initial_update_counter_value + 1,
"Update counter should have incremented by one");
assert_eq!(ml.health_of_mlr(&member),
Some(from_health),
"Member should have had health {:?}, but didn't",
from_health);
let member_with_same_incarnation = member.clone();
if to_health > from_health {
assert!(ml.insert_mlw(member_with_same_incarnation, to_health),
"Inserting with {:?}->{:?} should work with an identical incarnation \
number",
from_health,
to_health);
assert_eq!(ml.get_update_counter(),
update_counter_value_checkpoint_1 + 1,
"Update counter should increment by 1 when inserting a \
same-incarnation-number rumor with worse health");
assert_eq!(ml.health_of_mlr(&member),
Some(to_health),
"Member should have health {:?} following insertion of \
same-incarnation-number rumor with worse health",
to_health);
} else {
assert!(!ml.insert_mlw(member_with_same_incarnation, to_health),
"Inserting with {from:?}->{to:?} should never work with an identical \
incarnation number, because {to:?} is not \"worse than\" {from:?}",
from = from_health,
to = to_health);
assert_eq!(ml.get_update_counter(),
update_counter_value_checkpoint_1,
"Update counter should not increment when inserting a \
same-incarnation-number rumor without worse health");
assert_eq!(ml.health_of_mlr(&member),
Some(from_health),
"Member should still have health {:?} following insertion of \
same-incarnation-number rumor without worse health",
from_health);
}
}
macro_rules! same_incarnation {
// Unfortunately, Rust macros currently can't be used to generate
// the name of a function, so we have to provide one :(
($fn_name:ident, $from:expr, $to:expr) => {
#[test]
fn $fn_name() {
assert_only_insert_member_rumor_of_same_incarnation_if_health_is_worse(
$from, $to,
);
}
};
}
same_incarnation!(same_a_to_a, Health::Alive, Health::Alive);
same_incarnation!(same_a_to_s, Health::Alive, Health::Suspect);
same_incarnation!(same_a_to_c, Health::Alive, Health::Confirmed);
same_incarnation!(same_a_to_d, Health::Alive, Health::Departed);
same_incarnation!(same_s_to_a, Health::Suspect, Health::Alive);
same_incarnation!(same_s_to_s, Health::Suspect, Health::Suspect);
same_incarnation!(same_s_to_c, Health::Suspect, Health::Confirmed);
same_incarnation!(same_s_to_d, Health::Suspect, Health::Departed);
same_incarnation!(same_c_to_a, Health::Confirmed, Health::Alive);
same_incarnation!(same_c_to_s, Health::Confirmed, Health::Suspect);
same_incarnation!(same_c_to_c, Health::Confirmed, Health::Confirmed);
same_incarnation!(same_c_to_d, Health::Confirmed, Health::Departed);
same_incarnation!(same_d_to_a, Health::Departed, Health::Alive);
same_incarnation!(same_d_to_s, Health::Departed, Health::Suspect);
same_incarnation!(same_d_to_c, Health::Departed, Health::Confirmed);
same_incarnation!(same_d_to_d, Health::Departed, Health::Departed);
/// Tests that the transition from `from_health` to `to_health` for
/// `insert` works properly.
fn assert_insert_health_by_id_transition(from_health: Health, to_health: Health) {
let ml = MemberList::new();
let member_one = Member::default();
assert!(ml.insert_mlw(member_one.clone(), from_health),
"Should be able to insert initial health of {:?} into empty MemberList",
from_health);
assert_eq!(ml.health_of_mlr(&member_one)
.expect("Expected member to exist in health after initial insert, \
but it didn't"),
from_health,
"Member should have initial health {:?}",
from_health);
let update_counter_before = ml.get_update_counter();
match from_health.cmp(&to_health) {
Ordering::Greater => {
assert!(!ml.insert_mlw(member_one.clone(), to_health),
"Transitioning from {:?} to {:?} (i.e., no worse health) should \
be a no-op",
from_health,
to_health);
assert_eq!(ml.get_update_counter(),
update_counter_before,
"Transitioning from {:?} to {:?} (i.e., no worse health) \
should not increment update counter",
from_health,
to_health);
assert_eq!(ml.health_of_mlr(&member_one)
.expect("Expected member to exist in health after update, \
but it didn't"),
from_health,
"Member should have retained old health {:?}",
from_health);
}
Ordering::Less => {
assert!(ml.insert_mlw(member_one.clone(), to_health),
"Transitioning from {:?} to {:?} (i.e., worse health) should NOT \
be a no-op",
from_health,
to_health);
assert_eq!(ml.get_update_counter(),
update_counter_before + 1,
"Transitioning from {:?} to {:?} (i.e., different health) \
should increment update counter by one",
from_health,
to_health);
assert_eq!(ml.health_of_mlr(&member_one)
.expect("Expected member to exist in health after update, \
but it didn't"),
to_health,
"Member should have changed health from {:?} to {:?}",
from_health,
to_health);
}
Ordering::Equal => {
assert!(!ml.insert_mlw(member_one.clone(), to_health),
"Transitioning from {:?} to {:?} (i.e., no change) should be a \
no-op",
from_health,
to_health);
assert_eq!(ml.get_update_counter(),
update_counter_before,
"Transitioning from {:?} to {:?} (i.e., no change) should not \
increment update counter",
from_health,
to_health);
assert_eq!(ml.health_of_mlr(&member_one)
.expect("Expected member to exist in health after update, \
but it didn't"),
from_health,
"Member should have still have initial health {:?}",
from_health);
}
}
}
macro_rules! transition {
// Unfortunately, Rust macros currently can't be used to generate
// the name of a function, so we have to provide one :(
($fn_name:ident, $from:expr, $to:expr) => {
#[test]
fn $fn_name() { assert_insert_health_by_id_transition($from, $to); }
};
}
transition!(a_to_a, Health::Alive, Health::Alive);
transition!(a_to_s, Health::Alive, Health::Suspect);
transition!(a_to_c, Health::Alive, Health::Confirmed);
transition!(a_to_d, Health::Alive, Health::Departed);
transition!(s_to_a, Health::Suspect, Health::Alive);
transition!(s_to_s, Health::Suspect, Health::Suspect);
transition!(s_to_c, Health::Suspect, Health::Confirmed);
transition!(s_to_d, Health::Suspect, Health::Departed);
transition!(c_to_a, Health::Confirmed, Health::Alive);
transition!(c_to_s, Health::Confirmed, Health::Suspect);
transition!(c_to_c, Health::Confirmed, Health::Confirmed);
transition!(c_to_d, Health::Confirmed, Health::Departed);
transition!(d_to_a, Health::Departed, Health::Alive);
transition!(d_to_s, Health::Departed, Health::Suspect);
transition!(d_to_c, Health::Departed, Health::Confirmed);
transition!(d_to_d, Health::Departed, Health::Departed);
}
/// Testing of
///
/// - MemberList::members_expired_to_confirmed_mlw
/// - MemberList::members_expired_to_departed_mlw
mod timed_expiration {
use crate::member::{Health,
Member,
MemberList};
use std::{thread,
time::Duration};
#[test]
fn timing_out_from_suspect_to_confirmed() {
let ml = MemberList::new();
let member_one = Member::default();
let small_seconds = 1;
let large_seconds = 100_000;
let small_timeout = Duration::from_secs(small_seconds);
let large_timeout = Duration::from_secs(large_seconds);
assert!(ml.members_expired_to_confirmed_mlw(small_timeout)
.is_empty(),
"An empty MemberList shouldn't have anything that's timing out to being \
Confirmed");
assert!(ml.insert_mlw(member_one.clone(), Health::Alive));
assert!(ml.members_expired_to_confirmed_mlw(small_timeout)
.is_empty(),
"Should be no newly Confirmed members when they're all Alive");
assert!(ml.insert_mlw(member_one.clone(), Health::Suspect));
assert!(ml.members_expired_to_confirmed_mlw(large_timeout)
.is_empty(),
"Nothing should have timed out to Confirmed with a large timeout");
// Allow the Suspect to age
thread::sleep(Duration::from_secs(small_seconds));
let newly_confirmed = ml.members_expired_to_confirmed_mlw(small_timeout);
assert!(newly_confirmed.contains(&member_one.id),
"Member should be newly Confirmed after timing out");
assert_eq!(ml.health_of_mlr(&member_one),
Some(Health::Confirmed),
"Member should have a health of Confirmed after timing out");
}
#[test]
fn timing_out_from_confirmed_to_departed() {
let ml = MemberList::new();
let member_one = Member::default();
let small_seconds = 1;
let large_seconds = 100_000;
let small_timeout = Duration::from_secs(small_seconds);
let large_timeout = Duration::from_secs(large_seconds);
assert!(ml.members_expired_to_departed_mlw(small_timeout).is_empty(),
"An empty MemberList shouldn't have anything that's timing out to being \
Departed");
assert!(ml.insert_mlw(member_one.clone(), Health::Alive));
assert!(ml.members_expired_to_departed_mlw(small_timeout).is_empty(),
"Should be no newly Departed members when they're all Alive");
assert!(ml.insert_mlw(member_one.clone(), Health::Suspect));
assert!(ml.members_expired_to_departed_mlw(small_timeout).is_empty(),
"Should be no newly Departed members when they're all Confirmed");
assert!(ml.insert_mlw(member_one.clone(), Health::Confirmed));
assert!(ml.members_expired_to_departed_mlw(small_timeout).is_empty(),
"Should be no newly Departed members when they're all Confirmed");
assert!(ml.members_expired_to_departed_mlw(large_timeout).is_empty(),
"Nothing should have timed out to Departed with a large timeout");
// Allow the Confirmed to age
thread::sleep(Duration::from_secs(small_seconds));
let newly_departed = ml.members_expired_to_departed_mlw(small_timeout);
assert!(newly_departed.contains(&member_one.id),
"Member should be newly Departed after timing out");
assert_eq!(ml.health_of_mlr(&member_one),
Some(Health::Departed),
"Member should have a health of Departed after timing out");
}
#[test]
fn suspect_timeout_is_appropriately_selective() {
let ml = MemberList::new();
let member_1 = Member::default();
let member_2 = Member::default();
let member_3 = Member::default();
assert!(ml.insert_mlw(member_1.clone(), Health::Suspect));
thread::sleep(Duration::from_secs(1));
assert!(ml.insert_mlw(member_2.clone(), Health::Suspect));
thread::sleep(Duration::from_secs(2)); // Give us a bit of padding
assert!(ml.insert_mlw(member_3.clone(), Health::Suspect));
let timeout = Duration::from_secs(2);
let newly_confirmed = ml.members_expired_to_confirmed_mlw(timeout);
assert!(newly_confirmed.contains(&member_1.id),
"Member 1 should be newly Confirmed after timing out");
assert!(newly_confirmed.contains(&member_2.id),
"Member 2 should be newly Confirmed after timing out");
assert!(!newly_confirmed.contains(&member_3.id),
"Member 3 should NOT be newly Confirmed, because it hasn't timed out yet");
assert_eq!(ml.health_of_mlr(&member_1),
Some(Health::Confirmed),
"Member 1 should have a health of Confirmed after timing out");
assert_eq!(ml.health_of_mlr(&member_2),
Some(Health::Confirmed),
"Member 2 should have a health of Confirmed after timing out");
assert_eq!(ml.health_of_mlr(&member_3),
Some(Health::Suspect),
"Member 3 should still have a health of Suspect, because it hasn't \
timed out yet");
}
#[test]
fn confirmed_timeout_is_appropriately_selective() {
let ml = MemberList::new();
let member_1 = Member::default();
let member_2 = Member::default();
let member_3 = Member::default();
assert!(ml.insert_mlw(member_1.clone(), Health::Confirmed));
thread::sleep(Duration::from_secs(1));
assert!(ml.insert_mlw(member_2.clone(), Health::Confirmed));
thread::sleep(Duration::from_secs(2)); // Give us a bit of padding
assert!(ml.insert_mlw(member_3.clone(), Health::Confirmed));
let timeout = Duration::from_secs(2);
let newly_departed = ml.members_expired_to_departed_mlw(timeout);
assert!(newly_departed.contains(&member_1.id),
"Member 1 should be newly Departed after timing out");
assert!(newly_departed.contains(&member_2.id),
"Member 2 should be newly Departed after timing out");
assert!(!newly_departed.contains(&member_3.id),
"Member 3 should NOT be newly Departed, because it hasn't timed out yet");
assert_eq!(ml.health_of_mlr(&member_1),
Some(Health::Departed),
"Member 1 should have a health of Departed after timing out");
assert_eq!(ml.health_of_mlr(&member_2),
Some(Health::Departed),
"Member 2 should have a health of Departed after timing out");
assert_eq!(ml.health_of_mlr(&member_3),
Some(Health::Confirmed),
"Member 3 should still have a health of Confirmed, because it hasn't \
timed out yet");
}
}
}
}<|fim▁end|> | str::FromStr,
sync::atomic::{AtomicUsize,
Ordering},
time::{Duration, |
<|file_name|>mapclassify.py<|end_file_name|><|fim▁begin|>"""
A module of classification schemes for choropleth mapping.
"""
__author__ = "Sergio J. Rey"
__all__ = ['Map_Classifier', 'quantile', 'Box_Plot', 'Equal_Interval',
'Fisher_Jenks', 'Fisher_Jenks_Sampled', 'Jenks_Caspall',
'Jenks_Caspall_Forced', 'Jenks_Caspall_Sampled',
'Max_P_Classifier', 'Maximum_Breaks', 'Natural_Breaks',
'Quantiles', 'Percentiles', 'Std_Mean', 'User_Defined',
'gadf', 'K_classifiers']
from pysal.common import *
K = 5 # default number of classes in any map scheme with this as an argument
def quantile(y, k=4):
"""
Calculates the quantiles for an array
Parameters
----------
y : array
(n,1), values to classify
k : int
number of quantiles
Returns
-------
implicit : array
(n,1), quantile values
Examples
--------
>>> x = np.arange(1000)
>>> quantile(x)
array([ 249.75, 499.5 , 749.25, 999. ])
>>> quantile(x, k = 3)
array([ 333., 666., 999.])
>>>
Note that if there are enough ties that the quantile values repeat, we
collapse to pseudo quantiles in which case the number of classes will be less than k
>>> x = [1.0] * 100
>>> x.extend([3.0] * 40)
>>> len(x)
140
>>> y = np.array(x)
>>> quantile(y)
array([ 1., 3.])
"""
w = 100. / k
p = np.arange(w, 100 + w, w)
if p[-1] > 100.0:
p[-1] = 100.0
q = np.array([stats.scoreatpercentile(y, pct) for pct in p])
return np.unique(q)
def binC(y, bins):
"""
Bin categorical/qualitative data
Parameters
----------
y : array
(n,q), categorical values
bins : array
(k,1), unique values associated with each bin
Return
------
b : array
(n,q), bin membership, values between 0 and k-1
Examples
--------
>>> np.random.seed(1)
>>> x = np.random.randint(2, 8, (10, 3))
>>> bins = range(2, 8)
>>> x
array([[7, 5, 6],
[2, 3, 5],
[7, 2, 2],
[3, 6, 7],
[6, 3, 4],
[6, 7, 4],
[6, 5, 6],
[4, 6, 7],
[4, 6, 3],
[3, 2, 7]])
>>> y = binC(x, bins)
>>> y
array([[5, 3, 4],
[0, 1, 3],
[5, 0, 0],
[1, 4, 5],
[4, 1, 2],
[4, 5, 2],
[4, 3, 4],
[2, 4, 5],
[2, 4, 1],
[1, 0, 5]])
>>>
"""
if np.rank(y) == 1:
k = 1
n = np.shape(y)[0]
else:
n, k = np.shape(y)
b = np.zeros((n, k), dtype='int')
for i, bin in enumerate(bins):
b[np.nonzero(y == bin)] = i
# check for non-binned items and print a warning if needed
vals = set(y.flatten())
for val in vals:
if val not in bins:
print 'warning: value not in bin: ', val
print 'bins: ', bins
return b
def bin(y, bins):
"""
bin interval/ratio data
Parameters
----------
y : array
(n,q), values to bin
bins : array
(k,1), upper bounds of each bin (monotonic)
Returns
-------
b : array
(n,q), values of values between 0 and k-1
Examples
--------
>>> np.random.seed(1)
>>> x = np.random.randint(2, 20, (10, 3))
>>> bins = [10, 15, 20]
>>> b = bin(x, bins)
>>> x
array([[ 7, 13, 14],
[10, 11, 13],
[ 7, 17, 2],
[18, 3, 14],
[ 9, 15, 8],
[ 7, 13, 12],
[16, 6, 11],
[19, 2, 15],
[11, 11, 9],
[ 3, 2, 19]])
>>> b
array([[0, 1, 1],
[0, 1, 1],
[0, 2, 0],
[2, 0, 1],
[0, 1, 0],
[0, 1, 1],
[2, 0, 1],
[2, 0, 1],
[1, 1, 0],
[0, 0, 2]])
>>>
"""
if np.rank(y) == 1:
k = 1
n = np.shape(y)[0]
else:
n, k = np.shape(y)
b = np.zeros((n, k), dtype='int')
i = len(bins)
if type(bins) != list:
bins = bins.tolist()
binsc = copy.copy(bins)
while binsc:
i -= 1
c = binsc.pop(-1)
b[np.nonzero(y <= c)] = i
return b
def bin1d(x, bins):
"""
place values of a 1-d array into bins and determine counts of values in
each bin
Parameters
----------
x : array
(n, 1), values to bin
bins : array
(k,1), upper bounds of each bin (monotonic)
Returns
-------
binIds : array
1-d array of integer bin Ids
counts: int
number of elements of x falling in each bin
Examples
--------
>>> x = np.arange(100, dtype = 'float')
>>> bins = [25, 74, 100]
>>> binIds, counts = bin1d(x, bins)
>>> binIds
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2])
>>> counts
array([26, 49, 25])
"""
left = [-sys.maxint]
left.extend(bins[0:-1])
right = bins
cuts = zip(left, right)
k = len(bins)
binIds = np.zeros(x.shape, dtype='int')
while cuts:
k -= 1
l, r = cuts.pop(-1)
binIds += (x > l) * (x <= r) * k
counts = np.bincount(binIds)
return (binIds, counts)
def load_example():
"""
Helper function for doc tests
"""
import pysal
np.random.seed(10)
dat = pysal.open(pysal.examples.get_path('calempdensity.csv'))
cal = np.array([record[-1] for record in dat])
return cal
def natural_breaks(values, k=5, itmax=100):
"""
natural breaks helper function
"""
values = np.array(values)
n = len(values)
uv = np.unique(values)
uvk = len(uv)
if uvk < k:
print 'Warning: Not enough unique values in array to form k classes'
print "Warning: setting k to %d" % uvk
k = uvk
sids = np.random.permutation(range(len(uv)))[0:k]
seeds = uv[sids]
seeds.sort()
diffs = abs(np.matrix([values - seed for seed in seeds]))
c0 = diffs.argmin(axis=0)
c0 = np.array(c0)[0]
solving = True
solved = False
rk = range(k)
it = 0
while solving:
# get centroids of clusters
seeds = [np.median(values[c0 == c]) for c in rk]
seeds.sort()
# for each value find closest centroid
diffs = abs(np.matrix([values - seed for seed in seeds]))
# assign value to that centroid
c1 = diffs.argmin(axis=0)
c1 = np.array(c1)[0]
#compare new classids to previous
d = abs(c1 - c0)
if d.sum() == 0:
solving = False
solved = True
else:
c0 = c1
it += 1
if it == itmax:
solving = False
class_ids = c1
cuts = [max(values[c1 == c]) for c in rk]
return sids, seeds, diffs, class_ids, solved, it, cuts
def _fisher_jenks_means(values, classes=5, sort=True):
"""
Jenks Optimal (Natural Breaks) algorithm implemented in Python.
The original Python code comes from here:
http://danieljlewis.org/2010/06/07/jenks-natural-breaks-algorithm-in-python/
and is based on a JAVA and Fortran code available here:
https://stat.ethz.ch/pipermail/r-sig-geo/2006-March/000811.html
Returns class breaks such that classes are internally homogeneous while
assuring heterogeneity among classes.
"""
if sort:
values.sort()
mat1 = []
for i in range(0, len(values) + 1):
temp = []
for j in range(0, classes + 1):
temp.append(0)
mat1.append(temp)
mat2 = []
for i in range(0, len(values) + 1):
temp = []
for j in range(0, classes + 1):
temp.append(0)
mat2.append(temp)
for i in range(1, classes + 1):
mat1[1][i] = 1
mat2[1][i] = 0
for j in range(2, len(values) + 1):
mat2[j][i] = float('inf')
v = 0.0
for l in range(2, len(values) + 1):
s1 = 0.0
s2 = 0.0
w = 0.0
for m in range(1, l + 1):
i3 = l - m + 1
val = float(values[i3 - 1])
s2 += val * val
s1 += val
w += 1
v = s2 - (s1 * s1) / w
i4 = i3 - 1
if i4 != 0:
for j in range(2, classes + 1):
if mat2[l][j] >= (v + mat2[i4][j - 1]):
mat1[l][j] = i3
mat2[l][j] = v + mat2[i4][j - 1]
mat1[l][1] = 1
mat2[l][1] = v
k = len(values)
kclass = []
for i in range(0, classes + 1):
kclass.append(0)
kclass[classes] = float(values[len(values) - 1])
kclass[0] = float(values[0])
countNum = classes
while countNum >= 2:
pivot = mat1[k][countNum]
id = int(pivot - 2)
kclass[countNum - 1] = values[id]
k = int(pivot - 1)
countNum -= 1
return kclass
class Map_Classifier:
"""
Abstract class for all map classifications
For an array :math:`y` of :math:`n` values, a map classifier places each value
:math:`y_i` into one of :math:`k` mutually exclusive and exhaustive classes.
Each classifer defines the classes based on different criteria, but in all
cases the following hold for the classifiers in PySAL:
.. math::
C_j^l < y_i \le C_j^u \ forall i \in C_j
where :math:`C_j` denotes class :math:`j` which has lower bound :math:`C_j^l` and upper bound :math:`C_j^u`.
Map Classifiers Supported
* :class:`~pysal.esda.mapclassify.Box_Plot`
* :class:`~pysal.esda.mapclassify.Equal_Interval`
* :class:`~pysal.esda.mapclassify.Fisher_Jenks`
* :class:`~pysal.esda.mapclassify.Fisher_Jenks_Sampled`
* :class:`~pysal.esda.mapclassify.Jenks_Caspall`
* :class:`~pysal.esda.mapclassify.Jenks_Caspall_Forced`
* :class:`~pysal.esda.mapclassify.Jenks_Caspall_Sampled`
* :class:`~pysal.esda.mapclassify.Max_P_Classifier`
* :class:`~pysal.esda.mapclassify.Maximum_Breaks`
* :class:`~pysal.esda.mapclassify.Natural_Breaks`
* :class:`~pysal.esda.mapclassify.Quantiles`
* :class:`~pysal.esda.mapclassify.Percentiles`
* :class:`~pysal.esda.mapclassify.Std_Mean`
* :class:`~pysal.esda.mapclassify.User_Defined`
Utilities:
In addition to the classifiers, there are several utility functions that can be used to evaluate the properties of a specific classifier for different parameter values, or for automatic selection of a classifier and number of classes.
* :func:`~pysal.esda.mapclassify.gadf`
* :class:`~pysal.esda.mapclassify.K_classifiers`
References
----------
Slocum, T.A., R.B. McMaster, F.C. Kessler and H.H. Howard (2009) *Thematic Cartography and Geovisualization*. Pearson Prentice Hall, Upper Saddle River.
"""
def __init__(self, y):
self.name = 'Map Classifier'
if hasattr(y, 'values'):
y = y.values # fix for pandas
self.y = y
self._classify()
self._summary()
def _summary(self):
yb = self.yb
self.classes = [np.nonzero(yb == c)[0].tolist() for c in range(self.k)]
self.tss = self.get_tss()
self.adcm = self.get_adcm()
self.gadf = self.get_gadf()
def _classify(self):
self._set_bins()
self.yb, self.counts = bin1d(self.y, self.bins)
def __str__(self):
st = self._table_string()
return st
def __repr__(self):
return self._table_string()
def get_tss(self):
"""
Total sum of squares around class means
Returns sum of squares over all class means
"""
tss = 0
for class_def in self.classes:
if len(class_def) > 0:
yc = self.y[class_def]
css = yc - yc.mean()
css *= css
tss += sum(css)
return tss
def _set_bins(self):
pass
def get_adcm(self):
"""
Absolute deviation around class median (ADCM).
Calculates the absolute deviations of each observation about its class
median as a measure of fit for the classification method.
Returns sum of ADCM over all classes
"""
adcm = 0
for class_def in self.classes:
if len(class_def) > 0:
yc = self.y[class_def]
yc_med = np.median(yc)
ycd = np.abs(yc - yc_med)
adcm += sum(ycd)
return adcm
def get_gadf(self):
"""
Goodness of absolute deviation of fit
"""
adam = (np.abs(self.y - np.median(self.y))).sum()
gadf = 1 - self.adcm / adam
return gadf
def _table_string(self, width=12, decimal=3):
fmt = ".%df" % decimal
fmt = "%" + fmt
largest = max([len(fmt % i) for i in self.bins])
width = largest
fmt = "%d.%df" % (width, decimal)
fmt = "%" + fmt
k1 = self.k - 1
h1 = "Lower"
h1 = h1.center(largest)
h2 = " "
h2 = h2.center(10)
h3 = "Upper"
h3 = h3.center(largest + 1)
largest = "%d" % max(self.counts)
largest = len(largest) + 15
h4 = "Count"
h4 = h4.rjust(largest)
table = []
header = h1 + h2 + h3 + h4
table.append(header)
table.append("=" * len(header))
rows = []
for i, up in enumerate(self.bins):
if i == 0:
left = " " * width
left += " x[i] <= "
else:
left = fmt % self.bins[i - 1]
left += " < x[i] <= "
right = fmt % self.bins[i]
row = left + right
cnt = "%d" % self.counts[i]
cnt = cnt.rjust(largest)
row += cnt
table.append(row)
name = self.name
top = name.center(len(row))
table.insert(0, top)
table.insert(1, " ")
table = "\n".join(table)
return table
class Equal_Interval(Map_Classifier):
"""
Equal Interval Classification
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
Attributes
----------
yb : array
(n,1), bin ids for observations,
each value is the id of the class the observation belongs to
yb[i] = j for j>=1 if bins[j-1] < y[i] <= bins[j], yb[i] = 0 otherwise
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> ei = Equal_Interval(cal, k = 5)
>>> ei.k
5
>>> ei.counts
array([57, 0, 0, 0, 1])
>>> ei.bins
array([ 822.394, 1644.658, 2466.922, 3289.186, 4111.45 ])
>>>
Notes
-----
Intervals defined to have equal width:
.. math::
bins_j = min(y)+w*(j+1)
with :math:`w=\\frac{max(y)-min(j)}{k}`
"""
def __init__(self, y, k=K):
"""
see class docstring
"""
self.k = k
Map_Classifier.__init__(self, y)
self.name = 'Equal Interval'
def _set_bins(self):
y = self.y
k = self.k
max_y = max(y)
min_y = min(y)
rg = max_y - min_y
width = rg * 1. / k
cuts = np.arange(min_y + width, max_y + width, width)
if len(cuts) > self.k: # handle overshooting
cuts = cuts[0:k]
cuts[-1] = max_y
bins = cuts.copy()
self.bins = bins
class Percentiles(Map_Classifier):
"""
Percentiles Map Classification
Parameters
----------
y : array
attribute to classify
pct : array
percentiles default=[1,10,50,90,99,100]
Attributes
----------
yb : array
bin ids for observations (numpy array n x 1)
bins : array
the upper bounds of each class (numpy array k x 1)
k : int
the number of classes
counts : int
the number of observations falling in each class (numpy array k x 1)
Examples
--------
>>> cal = load_example()
>>> p = Percentiles(cal)
>>> p.bins
array([ 1.35700000e-01, 5.53000000e-01, 9.36500000e+00,
2.13914000e+02, 2.17994800e+03, 4.11145000e+03])
>>> p.counts
array([ 1, 5, 23, 23, 5, 1])
>>> p2 = Percentiles(cal, pct = [50, 100])
>>> p2.bins
array([ 9.365, 4111.45 ])
>>> p2.counts
array([29, 29])
>>> p2.k
2
"""
def __init__(self, y, pct=[1, 10, 50, 90, 99, 100]):
self.pct = pct
Map_Classifier.__init__(self, y)
self.name = 'Percentiles'
def _set_bins(self):
y = self.y
pct = self.pct
self.bins = np.array([stats.scoreatpercentile(y, p) for p in pct])
self.k = len(self.bins)
class Box_Plot(Map_Classifier):
"""
Box_Plot Map Classification
Parameters
----------
y : array
attribute to classify
hinge : float
multiplier for IQR
Attributes
----------
yb : array
(n,1), bin ids for observations
bins : array
(n,1), the upper bounds of each class (monotonic)
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
low_outlier_ids : array
indices of observations that are low outliers
high_outlier_ids : array
indices of observations that are high outliers
Notes
-----
The bins are set as follows::
bins[0] = q[0]-hinge*IQR
bins[1] = q[0]
bins[2] = q[1]
bins[3] = q[2]
bins[4] = q[2]+hinge*IQR
bins[5] = inf (see Notes)
where q is an array of the first three quartiles of y and
IQR=q[2]-q[0]
If q[2]+hinge*IQR > max(y) there will only be 5 classes and no high outliers,
otherwise, there will be 6 classes and at least one high outlier.
Examples
--------
>>> cal = load_example()
>>> bp = Box_Plot(cal)
>>> bp.bins
array([ -5.28762500e+01, 2.56750000e+00, 9.36500000e+00,
3.95300000e+01, 9.49737500e+01, 4.11145000e+03])
>>> bp.counts
array([ 0, 15, 14, 14, 6, 9])
>>> bp.high_outlier_ids
array([ 0, 6, 18, 29, 33, 36, 37, 40, 42])
>>> cal[bp.high_outlier_ids]
array([ 329.92, 181.27, 370.5 , 722.85, 192.05, 110.74,
4111.45, 317.11, 264.93])
>>> bx = Box_Plot(np.arange(100))
>>> bx.bins
array([ -49.5 , 24.75, 49.5 , 74.25, 148.5 ])
"""
def __init__(self, y, hinge=1.5):
"""
Parameters
----------
y : array (n,1)
attribute to classify
hinge : float
multiple of inter-quartile range (default=1.5)
"""
self.hinge = hinge
Map_Classifier.__init__(self, y)
self.name = 'Box Plot'
def _set_bins(self):
y = self.y
pct = [25, 50, 75, 100]
bins = [stats.scoreatpercentile(y, p) for p in pct]
iqr = bins[-2] - bins[0]
self.iqr = iqr
pivot = self.hinge * iqr
left_fence = bins[0] - pivot
right_fence = bins[-2] + pivot
if right_fence < bins[-1]:
bins.insert(-1, right_fence)
else:
bins[-1] = right_fence
bins.insert(0, left_fence)
self.bins = np.array(bins)
self.k = len(pct)
def _classify(self):
Map_Classifier._classify(self)
self.low_outlier_ids = np.nonzero(self.yb == 0)[0]
self.high_outlier_ids = np.nonzero(self.yb == 5)[0]
class Quantiles(Map_Classifier):
"""
Quantile Map Classification
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
Attributes
----------
yb : array
(n,1), bin ids for observations,
each value is the id of the class the observation belongs to
yb[i] = j for j>=1 if bins[j-1] < y[i] <= bins[j], yb[i] = 0 otherwise
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> q = Quantiles(cal, k = 5)
>>> q.bins
array([ 1.46400000e+00, 5.79800000e+00, 1.32780000e+01,
5.46160000e+01, 4.11145000e+03])
>>> q.counts
array([12, 11, 12, 11, 12])
>>>
"""
def __init__(self, y, k=K):
self.k = k
Map_Classifier.__init__(self, y)
self.name = 'Quantiles'
def _set_bins(self):
y = self.y
k = self.k
self.bins = quantile(y, k=k)
class Std_Mean(Map_Classifier):
"""
Standard Deviation and Mean Map Classification
Parameters
----------
y : array
(n,1), values to classify
multiples : array
the multiples of the standard deviation to add/subtract from
the sample mean to define the bins, default=[-2,-1,1,2]
Attributes
----------
yb : array
(n,1), bin ids for observations,
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> st = Std_Mean(cal)
>>> st.k
5
>>> st.bins
array([ -967.36235382, -420.71712519, 672.57333208, 1219.21856072,
4111.45 ])
>>> st.counts
array([ 0, 0, 56, 1, 1])
>>>
>>> st3 = Std_Mean(cal, multiples = [-3, -1.5, 1.5, 3])
>>> st3.bins
array([-1514.00758246, -694.03973951, 945.8959464 , 1765.86378936,
4111.45 ])
>>> st3.counts
array([ 0, 0, 57, 0, 1])
>>>
"""
def __init__(self, y, multiples=[-2, -1, 1, 2]):
self.multiples = multiples
Map_Classifier.__init__(self, y)
self.name = 'Std_Mean'
def _set_bins(self):
y = self.y
s = y.std(ddof=1)
m = y.mean()
cuts = [m + s * w for w in self.multiples]
y_max = y.max()
if cuts[-1] < y_max:
cuts.append(y_max)
self.bins = np.array(cuts)
self.k = len(cuts)
class Maximum_Breaks(Map_Classifier):
"""
Maximum Breaks Map Classification
Parameters
----------
y : array
(n, 1), values to classify
k : int
number of classes required
mindiff : float
The minimum difference between class breaks
Attributes
----------
yb : array
(n, 1), bin ids for observations
bins : array
(k, 1), the upper bounds of each class
k : int
the number of classes
counts : array
(k, 1), the number of observations falling in each class (numpy array k x 1)
Examples
--------
>>> cal = load_example()
>>> mb = Maximum_Breaks(cal, k = 5)
>>> mb.k
5
>>> mb.bins
array([ 146.005, 228.49 , 546.675, 2417.15 , 4111.45 ])
>>> mb.counts
array([50, 2, 4, 1, 1])
>>>
"""
def __init__(self, y, k=5, mindiff=0):
self.k = k
self.mindiff = mindiff
Map_Classifier.__init__(self, y)
self.name = 'Maximum_Breaks'
def _set_bins(self):
xs = self.y.copy()
y = self.y.copy()
k = self.k
xs.sort()
min_diff = self.mindiff
d = xs[1:] - xs[:-1]
diffs = d[np.nonzero(d > min_diff)]
diffs = sp.unique(diffs)
k1 = k - 1
if len(diffs) > k1:
diffs = diffs[-k1:]
mp = []
self.cids = []
for diff in diffs:
ids = np.nonzero(d == diff)
for id in ids:
self.cids.append(id[0])
cp = ((xs[id] + xs[id + 1]) / 2.)
mp.append(cp[0])
mp.append(xs[-1])
mp.sort()
self.bins = np.array(mp)
class Natural_Breaks(Map_Classifier):
"""
Natural Breaks Map Classification
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
initial : int
number of initial solutions to generate, (default=100)
Attributes
----------
yb : array
(n,1), bin ids for observations,
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> import numpy as np
>>> np.random.seed(10)
>>> cal = load_example()
>>> nb = Natural_Breaks(cal, k = 5)
>>> nb.k
5
>>> nb.counts
array([14, 13, 14, 10, 7])
>>> nb.bins
array([ 1.81000000e+00, 7.60000000e+00, 2.98200000e+01,
1.81270000e+02, 4.11145000e+03])
>>> x = np.array([1] * 50)
>>> x[-1] = 20
>>> nb = Natural_Breaks(x, k = 5, initial = 0)
Warning: Not enough unique values in array to form k classes
Warning: setting k to 2
>>> nb.bins
array([ 1, 20])
>>> nb.counts
array([49, 1])
Notes
-----
There is a tradeoff here between speed and consistency of the
classification
If you want more speed, set initial to a smaller value (0
would result in the best speed, if you want more consistent classes in
multiple runs of Natural_Breaks on the same data, set initial to a
higher value.
"""
def __init__(self, y, k=K, initial=100):
self.k = k
self.initial = initial
Map_Classifier.__init__(self, y)
self.name = 'Natural_Breaks'
def _set_bins(self):
x = self.y.copy()
k = self.k
res0 = natural_breaks(x, k)
fit = res0[2].sum()
for i in xrange(self.initial):
res = natural_breaks(x, k)
fit_i = res[2].sum()
if fit_i < fit:
res0 = res
self.bins = np.array(res0[-1])
self.k = len(self.bins)
self.iterations = res0[-2]
class Fisher_Jenks(Map_Classifier):
"""
Fisher Jenks optimal classifier - mean based
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
Attributes
----------
yb : array
(n,1), bin ids for observations
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> fj = Fisher_Jenks(cal)
>>> fj.adcm
799.24000000000001
>>> fj.bins
array([ 75.29, 192.05, 370.5 , 722.85, 4111.45])
>>> fj.counts
array([49, 3, 4, 1, 1])
>>>
"""
def __init__(self, y, k=K):
nu = len(np.unique(y))
if nu < k:
raise ValueError("Fewer unique values than specified classes.")
self.k = k
Map_Classifier.__init__(self, y)
self.name = "Fisher_Jenks"
def _set_bins(self):
x = self.y.copy()
self.bins = np.array(_fisher_jenks_means(x, classes=self.k)[1:])
class Fisher_Jenks_Sampled(Map_Classifier):
"""
Fisher Jenks optimal classifier - mean based using random sample
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
pct : float
The percentage of n that should form the sample
If pct is specified such that n*pct > 1000, then
pct = 1000./n, unless truncate is False
truncate : boolean
truncate pct in cases where pct * n > 1000., (Default True)
Attributes
----------
yb : array
(n,1), bin ids for observations
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples<|fim▁hole|>
(Turned off due to timing being different across hardware)
"""
def __init__(self, y, k=K, pct=0.10, truncate=True):
self.k = k
n = y.size
if (pct * n > 1000) and truncate:
pct = 1000. / n
ids = np.random.random_integers(0, n - 1, n * pct)
yr = y[ids]
yr[-1] = max(y) # make sure we have the upper bound
yr[0] = min(y) # make sure we have the min
self.original_y = y
self.pct = pct
self.yr = yr
self.yr_n = yr.size
Map_Classifier.__init__(self, yr)
self.yb, self.counts = bin1d(y, self.bins)
self.name = "Fisher_Jenks_Sampled"
self.y = y
self._summary() # have to recalculate summary stats
def _set_bins(self):
fj = Fisher_Jenks(self.y, self.k)
self.bins = fj.bins
class Jenks_Caspall(Map_Classifier):
"""
Jenks Caspall Map Classification
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
Attributes
----------
yb : array
(n,1), bin ids for observations,
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> jc = Jenks_Caspall(cal, k = 5)
>>> jc.bins
array([ 1.81000000e+00, 7.60000000e+00, 2.98200000e+01,
1.81270000e+02, 4.11145000e+03])
>>> jc.counts
array([14, 13, 14, 10, 7])
"""
def __init__(self, y, k=K):
self.k = k
Map_Classifier.__init__(self, y)
self.name = "Jenks_Caspall"
def _set_bins(self):
x = self.y.copy()
k = self.k
# start with quantiles
q = quantile(x, k)
solving = True
xb, cnts = bin1d(x, q)
#class means
if x.ndim == 1:
x.shape = (x.size, 1)
n, k = x.shape
xm = [np.median(x[xb == i]) for i in np.unique(xb)]
xb0 = xb.copy()
q = xm
it = 0
rk = range(self.k)
while solving:
xb = np.zeros(xb0.shape, int)
d = abs(x - q)
xb = d.argmin(axis=1)
if (xb0 == xb).all():
solving = False
else:
xb0 = xb
it += 1
q = np.array([np.median(x[xb == i]) for i in rk])
cuts = np.array([max(x[xb == i]) for i in sp.unique(xb)])
cuts.shape = (len(cuts),)
self.bins = cuts
self.iterations = it
class Jenks_Caspall_Sampled(Map_Classifier):
"""
Jenks Caspall Map Classification using a random sample
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
pct : float
The percentage of n that should form the sample
If pct is specified such that n*pct > 1000, then pct = 1000./n
Attributes
----------
yb : array
(n,1), bin ids for observations,
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> x = np.random.random(100000)
>>> jc = Jenks_Caspall(x)
>>> jcs = Jenks_Caspall_Sampled(x)
>>> jc.bins
array([ 0.19770952, 0.39695769, 0.59588617, 0.79716865, 0.99999425])
>>> jcs.bins
array([ 0.18877882, 0.39341638, 0.6028286 , 0.80070925, 0.99999425])
>>> jc.counts
array([19804, 20005, 19925, 20178, 20088])
>>> jcs.counts
array([18922, 20521, 20980, 19826, 19751])
>>>
# not for testing since we get different times on different hardware
# just included for documentation of likely speed gains
#>>> t1 = time.time(); jc = Jenks_Caspall(x); t2 = time.time()
#>>> t1s = time.time(); jcs = Jenks_Caspall_Sampled(x); t2s = time.time()
#>>> t2 - t1; t2s - t1s
#1.8292930126190186
#0.061631917953491211
Notes
-----
This is intended for large n problems. The logic is to apply
Jenks_Caspall to a random subset of the y space and then bin the
complete vector y on the bins obtained from the subset. This would
trade off some "accuracy" for a gain in speed.
"""
def __init__(self, y, k=K, pct=0.10):
self.k = k
n = y.size
if pct * n > 1000:
pct = 1000. / n
ids = np.random.random_integers(0, n - 1, n * pct)
yr = y[ids]
yr[0] = max(y) # make sure we have the upper bound
self.original_y = y
self.pct = pct
self.yr = yr
self.yr_n = yr.size
Map_Classifier.__init__(self, yr)
self.yb, self.counts = bin1d(y, self.bins)
self.name = "Jenks_Caspall_Sampled"
self.y = y
self._summary() # have to recalculate summary stats
def _set_bins(self):
jc = Jenks_Caspall(self.y, self.k)
self.bins = jc.bins
self.iterations = jc.iterations
class Jenks_Caspall_Forced(Map_Classifier):
"""
Jenks Caspall Map Classification with forced movements
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
Attributes
----------
yb : array
(n,1), bin ids for observations
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> jcf = Jenks_Caspall_Forced(cal, k = 5)
>>> jcf.k
5
>>> jcf.bins
array([[ 1.34000000e+00],
[ 5.90000000e+00],
[ 1.67000000e+01],
[ 5.06500000e+01],
[ 4.11145000e+03]])
>>> jcf.counts
array([12, 12, 13, 9, 12])
>>> jcf4 = Jenks_Caspall_Forced(cal, k = 4)
>>> jcf4.k
4
>>> jcf4.bins
array([[ 2.51000000e+00],
[ 8.70000000e+00],
[ 3.66800000e+01],
[ 4.11145000e+03]])
>>> jcf4.counts
array([15, 14, 14, 15])
>>>
"""
def __init__(self, y, k=K):
self.k = k
Map_Classifier.__init__(self, y)
self.name = "Jenks_Caspall_Forced"
def _set_bins(self):
x = self.y.copy()
k = self.k
q = quantile(x, k)
solving = True
xb, cnt = bin1d(x, q)
#class means
if x.ndim == 1:
x.shape = (x.size, 1)
n, tmp = x.shape
xm = [x[xb == i].mean() for i in np.unique(xb)]
xb0 = xb.copy()
q = xm
xbar = np.array([xm[xbi] for xbi in xb])
xbar.shape = (n, 1)
ss = x - xbar
ss *= ss
ss = sum(ss)
maxk = k - 1
down_moves = up_moves = 0
solving = True
it = 0
while solving:
# try upward moves first
moving_up = True
while moving_up:
class_ids = sp.unique(xb)
nk = [sum(xb == j) for j in class_ids]
candidates = nk[:-1]
i = 0
up_moves = 0
while candidates:
nki = candidates.pop(0)
if nki > 1:
ids = np.nonzero(xb == class_ids[i])
mover = max(ids[0])
tmp = xb.copy()
tmp[mover] = xb[mover] + 1
tm = [x[tmp == j].mean() for j in sp.unique(tmp)]
txbar = np.array([tm[xbi] for xbi in tmp])
txbar.shape = (n, 1)
tss = x - txbar
tss *= tss
tss = sum(tss)
if tss < ss:
xb = tmp
ss = tss
candidates = []
up_moves += 1
i += 1
if not up_moves:
moving_up = False
moving_down = True
while moving_down:
class_ids = sp.unique(xb)
nk = [sum(xb == j) for j in class_ids]
candidates = nk[1:]
i = 1
down_moves = 0
while candidates:
nki = candidates.pop(0)
if nki > 1:
ids = np.nonzero(xb == class_ids[i])
mover = min(ids[0])
mover_class = xb[mover]
target_class = mover_class - 1
tmp = xb.copy()
tmp[mover] = target_class
tm = [x[tmp == j].mean() for j in sp.unique(tmp)]
txbar = np.array([tm[xbi] for xbi in tmp])
txbar.shape = (n, 1)
tss = x - txbar
tss *= tss
tss = sum(tss)
if tss < ss:
xb = tmp
ss = tss
candidates = []
down_moves += 1
i += 1
if not down_moves:
moving_down = False
if not up_moves and not down_moves:
solving = False
it += 1
cuts = [max(x[xb == i]) for i in sp.unique(xb)]
self.bins = np.array(cuts)
self.iterations = it
class User_Defined(Map_Classifier):
"""
User Specified Binning
Parameters
----------
y : array
(n,1), values to classify
bins : array
(k,1), upper bounds of classes (have to be monotically increasing)
Attributes
----------
yb : array
(n,1), bin ids for observations,
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> cal = load_example()
>>> bins = [20, max(cal)]
>>> bins
[20, 4111.4499999999998]
>>> ud = User_Defined(cal, bins)
>>> ud.bins
array([ 20. , 4111.45])
>>> ud.counts
array([37, 21])
>>> bins = [20, 30]
>>> ud = User_Defined(cal, bins)
>>> ud.bins
array([ 20. , 30. , 4111.45])
>>> ud.counts
array([37, 4, 17])
>>>
Notes
-----
If upper bound of user bins does not exceed max(y) we append an
additional bin.
"""
def __init__(self, y, bins):
if bins[-1] < max(y):
bins.append(max(y))
self.k = len(bins)
self.bins = np.array(bins)
self.y = y
Map_Classifier.__init__(self, y)
self.name = 'User Defined'
def _set_bins(self):
pass
class Max_P_Classifier(Map_Classifier):
"""
Max_P Map Classification
Based on Max_p regionalization algorithm
Parameters
----------
y : array
(n,1), values to classify
k : int
number of classes required
initial : int
number of initial solutions to use prior to swapping
Attributes
----------
yb : array
(n,1), bin ids for observations,
bins : array
(k,1), the upper bounds of each class
k : int
the number of classes
counts : array
(k,1), the number of observations falling in each class
Examples
--------
>>> import pysal
>>> cal = pysal.esda.mapclassify.load_example()
>>> mp = pysal.Max_P_Classifier(cal)
>>> mp.bins
array([ 8.7 , 16.7 , 20.47, 66.26, 4111.45])
>>> mp.counts
array([29, 8, 1, 10, 10])
"""
def __init__(self, y, k=K, initial=1000):
self.k = k
self.initial = initial
Map_Classifier.__init__(self, y)
self.name = "Max_P"
def _set_bins(self):
x = self.y.copy()
k = self.k
q = quantile(x, k)
if x.ndim == 1:
x.shape = (x.size, 1)
n, tmp = x.shape
x.sort(axis=0)
# find best of initial solutions
solution = 0
best_tss = x.var() * x.shape[0]
tss_all = np.zeros((self.initial, 1))
while solution < self.initial:
remaining = range(n)
seeds = [np.nonzero(di == min(
di))[0][0] for di in [np.abs(x - qi) for qi in q]]
rseeds = np.random.permutation(range(k)).tolist()
tmp = [remaining.remove(seed) for seed in seeds]
self.classes = classes = []
tmp = [classes.append([seed]) for seed in seeds]
while rseeds:
seed_id = rseeds.pop()
current = classes[seed_id]
growing = True
while growing:
current = classes[seed_id]
low = current[0]
high = current[-1]
left = low - 1
right = high + 1
move_made = False
if left in remaining:
current.insert(0, left)
remaining.remove(left)
move_made = True
if right in remaining:
current.append(right)
remaining.remove(right)
move_made = True
if move_made:
classes[seed_id] = current
else:
growing = False
tss = _fit(self.y, classes)
tss_all[solution] = tss
if tss < best_tss:
best_solution = classes
best_it = solution
best_tss = tss
solution += 1
classes = best_solution
self.best_it = best_it
self.tss = best_tss
self.a2c = a2c = {}
self.tss_all = tss_all
for r, cl in enumerate(classes):
for a in cl:
a2c[a] = r
swapping = True
it = 0
while swapping:
rseeds = np.random.permutation(range(k)).tolist()
total_moves = 0
while rseeds:
id = rseeds.pop()
growing = True
total_moves = 0
while growing:
target = classes[id]
left = target[0] - 1
right = target[-1] + 1
n_moves = 0
if left in a2c:
left_class = classes[a2c[left]]
if len(left_class) > 1:
a = left_class[-1]
if self._swap(left_class, target, a):
target.insert(0, a)
left_class.remove(a)
a2c[a] = id
n_moves += 1
if right in a2c:
right_class = classes[a2c[right]]
if len(right_class) > 1:
a = right_class[0]
if self._swap(right_class, target, a):
target.append(a)
right_class.remove(a)
n_moves += 1
a2c[a] = id
if not n_moves:
growing = False
total_moves += n_moves
if not total_moves:
swapping = False
xs = self.y.copy()
xs.sort()
self.bins = np.array([xs[cl][-1] for cl in classes])
def _ss(self, class_def):
"""calculates sum of squares for a class"""
yc = self.y[class_def]
css = yc - yc.mean()
css *= css
return sum(css)
def _swap(self, class1, class2, a):
"""evaluate cost of moving a from class1 to class2"""
ss1 = self._ss(class1)
ss2 = self._ss(class2)
tss1 = ss1 + ss2
class1c = copy.copy(class1)
class2c = copy.copy(class2)
class1c.remove(a)
class2c.append(a)
ss1 = self._ss(class1c)
ss2 = self._ss(class2c)
tss2 = ss1 + ss2
if tss1 < tss2:
return False
else:
return True
def _fit(y, classes):
"""Calculate the total sum of squares for a vector y classified into
classes
Parameters
----------
y : array
(n,1), variable to be classified
classes : array
(k,1), integer values denoting class membership
"""
tss = 0
for class_def in classes:
yc = y[class_def]
css = yc - yc.mean()
css *= css
tss += sum(css)
return tss
kmethods = {}
kmethods["Quantiles"] = Quantiles
kmethods["Fisher_Jenks"] = Fisher_Jenks
kmethods['Natural_Breaks'] = Natural_Breaks
kmethods['Maximum_Breaks'] = Maximum_Breaks
def gadf(y, method="Quantiles", maxk=15, pct=0.8):
"""
Evaluate the Goodness of Absolute Deviation Fit of a Classifier
Finds the minimum value of k for which gadf>pct
Parameters
----------
y : array
(n, 1) values to be classified
method : {'Quantiles, 'Fisher_Jenks', 'Maximum_Breaks', 'Natrual_Breaks'}
maxk : int
maximum value of k to evaluate
pct : float
The percentage of GADF to exceed
Returns
-------
k : int
number of classes
cl : object
instance of the classifier at k
gadf : float
goodness of absolute deviation fit
Examples
--------
>>> cal = load_example()
>>> qgadf = gadf(cal)
>>> qgadf[0]
15
>>> qgadf[-1]
0.37402575909092828
Quantiles fail to exceed 0.80 before 15 classes. If we lower the bar to
0.2 we see quintiles as a result
>>> qgadf2 = gadf(cal, pct = 0.2)
>>> qgadf2[0]
5
>>> qgadf2[-1]
0.21710231966462412
>>>
Notes
-----
The GADF is defined as:
.. math::
GADF = 1 - \sum_c \sum_{i \in c} |y_i - y_{c,med}| / \sum_i |y_i - y_{med}|
where :math:`y_{med}` is the global median and :math:`y_{c,med}` is
the median for class :math:`c`.
See Also
--------
K_classifiers
"""
y = np.array(y)
adam = (np.abs(y - np.median(y))).sum()
for k in range(2, maxk + 1):
cl = kmethods[method](y, k)
gadf = 1 - cl.adcm / adam
if gadf > pct:
break
return (k, cl, gadf)
class K_classifiers:
"""
Evaluate all k-classifers and pick optimal based on k and GADF
Parameters
----------
y : array
(n,1), values to be classified
pct : float
The percentage of GADF to exceed
Attributes
----------
best : object
instance of the optimal Map_Classifier
results : dictionary
keys are classifier names, values are the Map_Classifier instances with the best pct for each classifer
Examples
--------
>>> cal = load_example()
>>> ks = K_classifiers(cal)
>>> ks.best.name
'Fisher_Jenks'
>>> ks.best.k
4
>>> ks.best.gadf
0.84810327199081048
>>>
Notes
-----
This can be used to suggest a classification scheme.
See Also
--------
gadf
"""
def __init__(self, y, pct=0.8):
results = {}
c = 0
best = gadf(y, "Fisher_Jenks", maxk=len(y) - 1, pct=pct)
pct0 = best[0]
k0 = best[-1]
keys = kmethods.keys()
keys.remove("Fisher_Jenks")
results["Fisher_Jenks"] = best
for method in keys:
results[method] = gadf(y, method, maxk=len(y) - 1, pct=pct)
k1 = results[method][0]
pct1 = results[method][-1]
if (k1 < k0) or (k1 == k0 and pct0 < pct1):
best = results[method]
k0 = k1
pct0 = pct1
self.results = results
self.best = best[1]
def fj(x, k=5):
y = x.copy()
y.sort()
d = {}
initial = opt_part(y)
# d has key = number of groups
# value: list of ids, list of group tss, group size
split_id = [initial[0]]
tss = initial[1:] # left and right within tss
sizes = [split_id - 1, len(y) - split_id]
d[2] = [split_id, tss, sizes]
return d
def opt_part(x):
"""
Find optimal bi-partition of x values
Parameters
-----------
x : array
(n,1), Array of attribute values
Returns
-------
opt_i : int
partition index
tss : float
toal sum of squares
left_min : float
variance to the left of the break (including the break)
right_min : float
variance to the right of the break
"""
n = len(x)
tss = np.inf
opt_i = -999
for i in xrange(1, n):
print i
left = x[:i].var() * i
right = x[i:].var() * (n - i)
tss_i = left + right
if tss_i < tss:
opt_i = i
tss = tss_i
left_min = left
right_min = right
return (opt_i, tss, left_min, right_min)<|fim▁end|> | -------- |
<|file_name|>FBXLoader2.js<|end_file_name|><|fim▁begin|>/**
* @author Kyle-Larson https://github.com/Kyle-Larson
*
* Loader loads FBX file and generates Group representing FBX scene.
* Requires FBX file to be >= 7.0 and in ASCII format.
*
* Supports:
* Mesh Generation (Positional Data)
* Normal Data (Per Vertex Drawing Instance)
* UV Data (Per Vertex Drawing Instance)
* Skinning
* Animation
* - Separated Animations based on stacks.
* - Skeletal & Non-Skeletal Animations
*
* Needs Support:
* Indexed Buffers
* PreRotation support.
*/
( function () {
/**
* Generates a loader for loading FBX files from URL and parsing into
* a THREE.Group.
* @param {THREE.LoadingManager} manager - Loading Manager for loader to use.
*/
THREE.FBXLoader = function ( manager ) {
THREE.Loader.call( this );
this.manager = ( manager !== undefined ) ? manager : THREE.DefaultLoadingManager;
this.fileLoader = new THREE.FileLoader( this.manager );
this.textureLoader = new THREE.TextureLoader( this.manager );
};
Object.assign( THREE.FBXLoader.prototype, THREE.Loader.prototype );
THREE.FBXLoader.prototype.constructor = THREE.FBXLoader;
Object.assign( THREE.FBXLoader.prototype, {
/**
* Loads an ASCII FBX file from URL and parses into a THREE.Group.
* THREE.Group will have an animations property of AnimationClips
* of the different animations exported with the FBX.
* @param {string} url - URL of the FBX file.
* @param {function(THREE.Group):void} onLoad - Callback for when FBX file is loaded and parsed.
* @param {function(ProgressEvent):void} onProgress - Callback fired periodically when file is being retrieved from server.
* @param {function(Event):void} onError - Callback fired when error occurs (Currently only with retrieving file, not with parsing errors).
*/
load: function ( url, onLoad, onProgress, onError ) {
var self = this;
var resourceDirectory = url.split( /[\\\/]/ );
resourceDirectory.pop();
resourceDirectory = resourceDirectory.join( '/' );
this.fileLoader.load( url, function ( text ) {
if ( ! isFbxFormatASCII( text ) ) {
console.error( 'FBXLoader: FBX Binary format not supported.' );
self.manager.itemError( url );
return;
}
if ( getFbxVersion( text ) < 7000 ) {
console.error( 'FBXLoader: FBX version not supported for file at ' + url + ', FileVersion: ' + getFbxVersion( text ) );
self.manager.itemError( url );
return;
}
var scene = self.parse( text, resourceDirectory );
onLoad( scene );
}, onProgress, onError );
},
/**
* Parses an ASCII FBX file and returns a THREE.Group.
* THREE.Group will have an animations property of AnimationClips
* of the different animations within the FBX file.
* @param {string} FBXText - Contents of FBX file to parse.
* @param {string} resourceDirectory - Directory to load external assets (e.g. textures ) from.
* @returns {THREE.Group}
*/
parse: function ( FBXText, resourceDirectory ) {
var loader = this;
var FBXTree = new TextParser().parse( FBXText );
var connections = parseConnections( FBXTree );
var textures = parseTextures( FBXTree );
var materials = parseMaterials( FBXTree, textures, connections );
var deformerMap = parseDeformers( FBXTree, connections );
var geometryMap = parseGeometries( FBXTree, connections, deformerMap );
var sceneGraph = parseScene( FBXTree, connections, deformerMap, geometryMap, materials );
return sceneGraph;
/**
* @typedef {{value: number}} FBXValue
*/
/**
* @typedef {{value: {x: string, y: string, z: string}}} FBXVector3
*/
/**
* @typedef {{properties: {a: string}}} FBXArrayNode
*/
/**
* @typedef {{properties: {MappingInformationType: string, ReferenceInformationType: string }, subNodes: Object<string, FBXArrayNode>}} FBXMappedArrayNode
*/
/**
* @typedef {{id: number, name: string, properties: {FileName: string}}} FBXTextureNode
*/
/**
* @typedef {{id: number, attrName: string, properties: {ShadingModel: string, Diffuse: FBXVector3, Specular: FBXVector3, Shininess: FBXValue, Emissive: FBXVector3, EmissiveFactor: FBXValue, Opacity: FBXValue}}} FBXMaterialNode
*/
/**
* @typedef {{subNodes: {Indexes: FBXArrayNode, Weights: FBXArrayNode, Transform: FBXArrayNode, TransformLink: FBXArrayNode}, properties: { Mode: string }}} FBXSubDeformerNode
*/
/**
* @typedef {{id: number, attrName: string, attrType: string, subNodes: {Vertices: FBXArrayNode, PolygonVertexIndex: FBXArrayNode, LayerElementNormal: FBXMappedArrayNode[], LayerElementMaterial: FBXMappedArrayNode[], LayerElementUV: FBXMappedArrayNode[]}}} FBXGeometryNode
*/
/**
* @typedef {{id: number, attrName: string, attrType: string, properties: {Lcl_Translation: FBXValue, Lcl_Rotation: FBXValue, Lcl_Scaling: FBXValue}}} FBXModelNode
*/
/**
* Parses map of relationships between objects.
* @param {{Connections: { properties: { connections: [number, number, string][]}}}} FBXTree
* @returns {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>}
*/
function parseConnections( FBXTree ) {
/**
* @type {Map<number, { parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>}
*/
var connectionMap = new Map();
if ( 'Connections' in FBXTree ) {
/**
* @type {[number, number, string][]}
*/
var connectionArray = FBXTree.Connections.properties.connections;
connectionArray.forEach( function ( connection ) {
if ( ! connectionMap.has( connection[ 0 ] ) ) {
connectionMap.set( connection[ 0 ], {
parents: [],
children: []
} );
}
var parentRelationship = { ID: connection[ 1 ], relationship: connection[ 2 ] };
connectionMap.get( connection[ 0 ] ).parents.push( parentRelationship );
if ( ! connectionMap.has( connection[ 1 ] ) ) {
connectionMap.set( connection[ 1 ], {
parents: [],
children: []
} );
}
var childRelationship = { ID: connection[ 0 ], relationship: connection[ 2 ] };
connectionMap.get( connection[ 1 ] ).children.push( childRelationship );
} );
}
return connectionMap;
}
/**
* Parses map of textures referenced in FBXTree.
* @param {{Objects: {subNodes: {Texture: Object.<string, FBXTextureNode>}}}} FBXTree
* @returns {Map<number, THREE.Texture>}
*/
function parseTextures( FBXTree ) {
/**
* @type {Map<number, THREE.Texture>}
*/
var textureMap = new Map();
if ( 'Texture' in FBXTree.Objects.subNodes ) {
var textureNodes = FBXTree.Objects.subNodes.Texture;
for ( var nodeID in textureNodes ) {
var texture = parseTexture( textureNodes[ nodeID ] );
textureMap.set( parseInt( nodeID ), texture );
}
}
return textureMap;
/**
* @param {textureNode} textureNode - Node to get texture information from.
* @returns {THREE.Texture}
*/
function parseTexture( textureNode ) {
var FBX_ID = textureNode.id;
var name = textureNode.name;
var filePath = textureNode.properties.FileName;
var split = filePath.split( /[\\\/]/ );
if ( split.length > 0 ) {
var fileName = split[ split.length - 1 ];
} else {
var fileName = filePath;
}
/**
* @type {THREE.Texture}
*/
var texture = loader.textureLoader.load( resourceDirectory + '/' + fileName );
texture.name = name;
texture.FBX_ID = FBX_ID;
return texture;
}
}
/**
* Parses map of Material information.
* @param {{Objects: {subNodes: {Material: Object.<number, FBXMaterialNode>}}}} FBXTree
* @param {Map<number, THREE.Texture>} textureMap
* @param {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>} connections
* @returns {Map<number, THREE.Material>}
*/
function parseMaterials( FBXTree, textureMap, connections ) {
var materialMap = new Map();
if ( 'Material' in FBXTree.Objects.subNodes ) {
var materialNodes = FBXTree.Objects.subNodes.Material;
for ( var nodeID in materialNodes ) {
var material = parseMaterial( materialNodes[ nodeID ], textureMap, connections );
materialMap.set( parseInt( nodeID ), material );
}
}
return materialMap;
/**
* Takes information from Material node and returns a generated THREE.Material
* @param {FBXMaterialNode} materialNode
* @param {Map<number, THREE.Texture>} textureMap
* @param {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>} connections
* @returns {THREE.Material}
*/
function parseMaterial( materialNode, textureMap, connections ) {
var FBX_ID = materialNode.id;
var name = materialNode.attrName;
var type = materialNode.properties.ShadingModel;
var children = connections.get( FBX_ID ).children;
var parameters = parseParameters( materialNode.properties, textureMap, children );
var material;
switch ( type ) {
case 'phong':
material = new THREE.MeshPhongMaterial();
break;
case 'lambert':
material = new THREE.MeshLambertMaterial();
break;
default:
console.warn( 'No implementation given for material type ' + type + ' in FBXLoader.js. Defaulting to basic material' );
material = new THREE.MeshBasicMaterial( { color: 0x3300ff } );
break;
}
material.setValues( parameters );
material.name = name;
return material;
/**
* @typedef {{Diffuse: FBXVector3, Specular: FBXVector3, Shininess: FBXValue, Emissive: FBXVector3, EmissiveFactor: FBXValue, Opacity: FBXValue}} FBXMaterialProperties
*/
/**
* @typedef {{color: THREE.Color=, specular: THREE.Color=, shininess: number=, emissive: THREE.Color=, emissiveIntensity: number=, opacity: number=, transparent: boolean=, map: THREE.Texture=}} THREEMaterialParameterPack
*/
/**
* @param {FBXMaterialProperties} properties
* @param {Map<number, THREE.Texture>} textureMap
* @param {{ID: number, relationship: string}[]} childrenRelationships
* @returns {THREEMaterialParameterPack}
*/
function parseParameters( properties, textureMap, childrenRelationships ) {
var parameters = {};
if ( properties.Diffuse ) {
parameters.color = parseColor( properties.Diffuse );
}
if ( properties.Specular ) {
parameters.specular = parseColor( properties.Specular );
}
if ( properties.Shininess ) {
parameters.shininess = properties.Shininess.value;
}
if ( properties.Emissive ) {
parameters.emissive = parseColor( properties.Emissive );
}
if ( properties.EmissiveFactor ) {
parameters.emissiveIntensity = properties.EmissiveFactor.value;
}
if ( properties.Opacity ) {
parameters.opacity = properties.Opacity.value;
}
if ( parameters.opacity < 1.0 ) {
parameters.transparent = true;
}
childrenRelationships.forEach( function ( relationship ) {
var type = relationship.relationship;
switch ( type ) {
case " \"AmbientColor":
//TODO: Support AmbientColor textures
break;
case " \"DiffuseColor":
parameters.map = textureMap.get( relationship.ID );
break;
default:
console.warn( 'Unknown texture application of type ' + type + ', skipping texture' );
break;
}
} );
return parameters;
}
}
}
/**
* Generates map of Skeleton-like objects for use later when generating and binding skeletons.
* @param {{Objects: {subNodes: {Deformer: Object.<number, FBXSubDeformerNode>}}}} FBXTree
* @param {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>} connections
* @returns {Map<number, {map: Map<number, {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}>, array: {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}[], skeleton: THREE.Skeleton|null}>}
*/
function parseDeformers( FBXTree, connections ) {
var skeletonMap = new Map();
if ( 'Deformer' in FBXTree.Objects.subNodes ) {
var DeformerNodes = FBXTree.Objects.subNodes.Deformer;
for ( var nodeID in DeformerNodes ) {
var deformerNode = DeformerNodes[ nodeID ];
if ( deformerNode.attrType === 'Skin' ) {
var conns = connections.get( parseInt( nodeID ) );
var skeleton = parseSkeleton( conns, DeformerNodes );
skeleton.FBX_ID = parseInt( nodeID );
skeletonMap.set( parseInt( nodeID ), skeleton );
}
}
}
return skeletonMap;
/**
* Generates a "Skeleton Representation" of FBX nodes based on an FBX Skin Deformer's connections and an object containing SubDeformer nodes.
* @param {{parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}} connections
* @param {Object.<number, FBXSubDeformerNode>} DeformerNodes
* @returns {{map: Map<number, {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}>, array: {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}[], skeleton: THREE.Skeleton|null}}
*/
function parseSkeleton( connections, DeformerNodes ) {
var subDeformers = new Map();
var subDeformerArray = [];
connections.children.forEach( function ( child ) {
var subDeformerNode = DeformerNodes[ child.ID ];
var subDeformer = {
FBX_ID: child.ID,
indices: parseIntArray( subDeformerNode.subNodes.Indexes.properties.a ),
weights: parseFloatArray( subDeformerNode.subNodes.Weights.properties.a ),
transform: parseMatrixArray( subDeformerNode.subNodes.Transform.properties.a ),
transformLink: parseMatrixArray( subDeformerNode.subNodes.TransformLink.properties.a ),
linkMode: subDeformerNode.properties.Mode
};
subDeformers.set( child.ID, subDeformer );
subDeformerArray.push( subDeformer );
} );
return {
map: subDeformers,
array: subDeformerArray,
bones: []
};
}
}
/**
* Generates Buffer geometries from geometry information in FBXTree, and generates map of THREE.BufferGeometries
* @param {{Objects: {subNodes: {Geometry: Object.<number, FBXGeometryNode}}}} FBXTree
* @param {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>} connections
* @param {Map<number, {map: Map<number, {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}>, array: {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}[], skeleton: THREE.Skeleton|null}>} deformerMap
* @returns {Map<number, THREE.BufferGeometry>}
*/
function parseGeometries( FBXTree, connections, deformerMap ) {
var geometryMap = new Map();
if ( 'Geometry' in FBXTree.Objects.subNodes ) {
var geometryNodes = FBXTree.Objects.subNodes.Geometry;
for ( var nodeID in geometryNodes ) {
var relationships = connections.get( parseInt( nodeID ) );
var geo = parseGeometry( geometryNodes[ nodeID ], relationships, deformerMap );
geometryMap.set( parseInt( nodeID ), geo );
}
}
return geometryMap;
/**
* Generates BufferGeometry from FBXGeometryNode.
* @param {FBXGeometryNode} geometryNode
* @param {{parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}} relationships
* @param {Map<number, {map: Map<number, {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}>, array: {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}[]}>} deformerMap
* @returns {THREE.BufferGeometry}
*/
function parseGeometry( geometryNode, relationships, deformerMap ) {
switch ( geometryNode.attrType ) {
case 'Mesh':
return parseMeshGeometry( geometryNode, relationships, deformerMap );
break;
case 'NurbsCurve':
return parseNurbsGeometry( geometryNode, relationships, deformerMap );
break;
}
/**
* Specialty function for parsing Mesh based Geometry Nodes.
* @param {FBXGeometryNode} geometryNode
* @param {{parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}} relationships - Object representing relationships between specific geometry node and other nodes.
* @param {Map<number, {map: Map<number, {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}>, array: {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}[]}>} deformerMap - Map object of deformers and subDeformers by ID.
* @returns {THREE.BufferGeometry}
*/
function parseMeshGeometry( geometryNode, relationships, deformerMap ) {
var FBX_ID = geometryNode.id;
var name = geometryNode.attrName;
for ( var i = 0; i < relationships.children.length; ++ i ) {
if ( deformerMap.has( relationships.children[ i ].ID ) ) {
var deformer = deformerMap.get( relationships.children[ i ].ID );
break;
}
}
var geometry = genGeometry( geometryNode, deformer );
return geometry;
/**
* @param {{map: Map<number, {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}>, array: {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}[]}} deformer - Skeleton representation for geometry instance.
* @returns {THREE.BufferGeometry}
*/
function genGeometry( geometryNode, deformer ) {
var geometry = new Geometry();
//First, each index is going to be its own vertex.
var vertexBuffer = parseFloatArray( geometryNode.subNodes.Vertices.properties.a );
var indexBuffer = parseIntArray( geometryNode.subNodes.PolygonVertexIndex.properties.a );
if ( 'LayerElementNormal' in geometryNode.subNodes ) {
var normalInfo = getNormals( geometryNode );
}
if ( 'LayerElementUV' in geometryNode.subNodes ) {
var uvInfo = getUVs( geometryNode );
}
if ( 'LayerElementMaterial' in geometryNode.subNodes ) {
var materialInfo = getMaterials( geometryNode );
}
var faceVertexBuffer = [];
var polygonIndex = 0;
for ( var polygonVertexIndex = 0; polygonVertexIndex < indexBuffer.length; ++ polygonVertexIndex ) {
var endOfFace;
var vertexIndex = indexBuffer[ polygonVertexIndex ];
if ( indexBuffer[ polygonVertexIndex ] < 0 ) {
vertexIndex = vertexIndex ^ - 1;
indexBuffer[ polygonVertexIndex ] = vertexIndex;
endOfFace = true;
}
var vertex = new Vertex();
var weightIndices = [];
var weights = [];
vertex.position.fromArray( vertexBuffer, vertexIndex * 3 );
// If we have a deformer for this geometry, get the skinIndex and skinWeights for this object.
// They are stored as vertex indices on each deformer, and we need them as deformer indices
// for each vertex.
if ( deformer ) {
for ( var j = 0; j < deformer.array.length; ++ j ) {
var index = deformer.array[ j ].indices.findIndex( function ( index ) {
return index === indexBuffer[ polygonVertexIndex ];
} );
if ( index !== - 1 ) {
weights.push( deformer.array[ j ].weights[ index ] );
weightIndices.push( j );
}
}
if ( weights.length > 4 ) {
console.warn( 'FBXLoader: Vertex has more than 4 skinning weights assigned to vertex. Deleting additional weights.' );
var WIndex = [ 0, 0, 0, 0 ];
var Weight = [ 0, 0, 0, 0 ];
for ( var polygonVertexIndex = 0; polygonVertexIndex < weights.length; ++ polygonVertexIndex ) {
var currentWeight = weights[ polygonVertexIndex ];
var currentIndex = weightIndices[ polygonVertexIndex ];
for ( var j = 0; j < Weight.length; ++ j ) {
if ( currentWeight > Weight[ j ] ) {
var tmp = Weight[ j ];
Weight[ j ] = currentWeight;
currentWeight = tmp;
tmp = WIndex[ j ];
WIndex[ j ] = currentIndex;
currentIndex = tmp;
}
}
}
weightIndices = WIndex;
weights = Weight;
}
for ( var i = weights.length; i < 4; i ++ ) {
weights[ i ] = 0;
weightIndices[ i ] = 0;
}
vertex.skinWeights.fromArray( weights );
vertex.skinIndices.fromArray( weightIndices );
//vertex.skinWeights.normalize();
}
if ( normalInfo ) {
vertex.normal.fromArray( getData( polygonVertexIndex, polygonIndex, vertexIndex, normalInfo ) );
}
if ( uvInfo ) {
vertex.uv.fromArray( getData( polygonVertexIndex, polygonIndex, vertexIndex, uvInfo ) );
}
//Add vertex to face buffer.
faceVertexBuffer.push( vertex );
// If index was negative to start with, we have finished this individual face
// and can generate the face data to the geometry.
if ( endOfFace ) {
var face = new Face();
var materials = getData( polygonVertexIndex, polygonIndex, vertexIndex, materialInfo );
face.genTrianglesFromVertices( faceVertexBuffer );
face.materialIndex = materials[ 0 ];
geometry.faces.push( face );
faceVertexBuffer = [];
polygonIndex ++;
endOfFace = false;
}
}
/**
* @type {{vertexBuffer: number[], normalBuffer: number[], uvBuffer: number[], skinIndexBuffer: number[], skinWeightBuffer: number[], materialIndexBuffer: number[]}}
*/
var bufferInfo = geometry.flattenToBuffers();
var geo = new THREE.BufferGeometry();
geo.name = geometryNode.name;
geo.addAttribute( 'position', new THREE.BufferAttribute( new Float32Array( bufferInfo.vertexBuffer ), 3 ) );
if ( bufferInfo.normalBuffer.length > 0 ) {
geo.addAttribute( 'normal', new THREE.BufferAttribute( new Float32Array( bufferInfo.normalBuffer ), 3 ) );
}
if ( bufferInfo.uvBuffer.length > 0 ) {
geo.addAttribute( 'uv', new THREE.BufferAttribute( new Float32Array( bufferInfo.uvBuffer ), 2 ) );
}
if ( deformer ) {
geo.addAttribute( 'skinIndex', new THREE.BufferAttribute( new Float32Array( bufferInfo.skinIndexBuffer ), 4 ) );
geo.addAttribute( 'skinWeight', new THREE.BufferAttribute( new Float32Array( bufferInfo.skinWeightBuffer ), 4 ) );
geo.FBX_Deformer = deformer;
}
// Convert the material indices of each vertex into rendering groups on the geometry.
var prevMaterialIndex = bufferInfo.materialIndexBuffer[ 0 ];
var startIndex = 0;
for ( var materialBufferIndex = 0; materialBufferIndex < bufferInfo.materialIndexBuffer.length; ++ materialBufferIndex ) {
if ( bufferInfo.materialIndexBuffer[ materialBufferIndex ] !== prevMaterialIndex ) {
geo.addGroup( startIndex, materialBufferIndex - startIndex, prevMaterialIndex );
startIndex = materialBufferIndex;
prevMaterialIndex = bufferInfo.materialIndexBuffer[ materialBufferIndex ];
}
}
return geo;
/**
* Parses normal information for geometry.
* @param {FBXGeometryNode} geometryNode
* @returns {{dataSize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}}
*/
function getNormals( geometryNode ) {
var NormalNode = geometryNode.subNodes.LayerElementNormal[ 0 ];
var mappingType = NormalNode.properties.MappingInformationType;
var referenceType = NormalNode.properties.ReferenceInformationType;
var buffer = parseFloatArray( NormalNode.subNodes.Normals.properties.a );
var indexBuffer = [];
if ( referenceType === 'IndexToDirect' ) {
indexBuffer = parseIntArray( NormalNode.subNodes.NormalIndex.properties.a );
}
return {
dataSize: 3,
buffer: buffer,
indices: indexBuffer,
mappingType: mappingType,
referenceType: referenceType
};
}
/**
* Parses UV information for geometry.
* @param {FBXGeometryNode} geometryNode
* @returns {{dataSize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}}
*/
function getUVs( geometryNode ) {
var UVNode = geometryNode.subNodes.LayerElementUV[ 0 ];
var mappingType = UVNode.properties.MappingInformationType;
var referenceType = UVNode.properties.ReferenceInformationType;
var buffer = parseFloatArray( UVNode.subNodes.UV.properties.a );
var indexBuffer = [];
if ( referenceType === 'IndexToDirect' ) {
indexBuffer = parseIntArray( UVNode.subNodes.UVIndex.properties.a );
}
return {
dataSize: 2,
buffer: buffer,
indices: indexBuffer,
mappingType: mappingType,
referenceType: referenceType
};
}
/**
* Parses material application information for geometry.
* @param {FBXGeometryNode}
* @returns {{dataSize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}}
*/
function getMaterials( geometryNode ) {
var MaterialNode = geometryNode.subNodes.LayerElementMaterial[ 0 ];
var mappingType = MaterialNode.properties.MappingInformationType;
var referenceType = MaterialNode.properties.ReferenceInformationType;
var materialIndexBuffer = parseIntArray( MaterialNode.subNodes.Materials.properties.a );
// Since materials are stored as indices, there's a bit of a mismatch between FBX and what
// we expect. So we create an intermediate buffer that points to the index in the buffer,
// for conforming with the other functions we've written for other data.
var materialIndices = [];
materialIndexBuffer.forEach( function ( materialIndex, index ) {
materialIndices.push( index );
} );
return {
dataSize: 1,
buffer: materialIndexBuffer,
indices: materialIndices,
mappingType: mappingType,
referenceType: referenceType
};
}
/**
* Function uses the infoObject and given indices to return value array of object.
* @param {number} polygonVertexIndex - Index of vertex in draw order (which index of the index buffer refers to this vertex).
* @param {number} polygonIndex - Index of polygon in geometry.
* @param {number} vertexIndex - Index of vertex inside vertex buffer (used because some data refers to old index buffer that we don't use anymore).
* @param {{datasize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}} infoObject - Object containing data and how to access data.
* @returns {number[]}
*/
function getData( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
var GetData = {
ByPolygonVertex: {
/**
* Function uses the infoObject and given indices to return value array of object.
* @param {number} polygonVertexIndex - Index of vertex in draw order (which index of the index buffer refers to this vertex).
* @param {number} polygonIndex - Index of polygon in geometry.
* @param {number} vertexIndex - Index of vertex inside vertex buffer (used because some data refers to old index buffer that we don't use anymore).
* @param {{datasize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}} infoObject - Object containing data and how to access data.
* @returns {number[]}
*/
Direct: function ( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
return infoObject.buffer.slice( ( polygonVertexIndex * infoObject.dataSize ), ( polygonVertexIndex * infoObject.dataSize ) + infoObject.dataSize );
},
/**
* Function uses the infoObject and given indices to return value array of object.
* @param {number} polygonVertexIndex - Index of vertex in draw order (which index of the index buffer refers to this vertex).
* @param {number} polygonIndex - Index of polygon in geometry.
* @param {number} vertexIndex - Index of vertex inside vertex buffer (used because some data refers to old index buffer that we don't use anymore).
* @param {{datasize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}} infoObject - Object containing data and how to access data.
* @returns {number[]}
*/
IndexToDirect: function ( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
var index = infoObject.indices[ polygonVertexIndex ];
return infoObject.buffer.slice( ( index * infoObject.dataSize ), ( index * infoObject.dataSize ) + infoObject.dataSize );
}
},
ByPolygon: {
/**
* Function uses the infoObject and given indices to return value array of object.
* @param {number} polygonVertexIndex - Index of vertex in draw order (which index of the index buffer refers to this vertex).
* @param {number} polygonIndex - Index of polygon in geometry.
* @param {number} vertexIndex - Index of vertex inside vertex buffer (used because some data refers to old index buffer that we don't use anymore).
* @param {{datasize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}} infoObject - Object containing data and how to access data.
* @returns {number[]}
*/
Direct: function ( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
return infoObject.buffer.slice( polygonIndex * infoObject.dataSize, polygonIndex * infoObject.dataSize + infoObject.dataSize );
},
/**
* Function uses the infoObject and given indices to return value array of object.
* @param {number} polygonVertexIndex - Index of vertex in draw order (which index of the index buffer refers to this vertex).
* @param {number} polygonIndex - Index of polygon in geometry.
* @param {number} vertexIndex - Index of vertex inside vertex buffer (used because some data refers to old index buffer that we don't use anymore).
* @param {{datasize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}} infoObject - Object containing data and how to access data.
* @returns {number[]}
*/
IndexToDirect: function ( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
var index = infoObject.indices[ polygonIndex ];
return infoObject.buffer.slice( index * infoObject.dataSize, index * infoObject.dataSize + infoObject.dataSize );
}
},
AllSame: {
/**
* Function uses the infoObject and given indices to return value array of object.
* @param {number} polygonVertexIndex - Index of vertex in draw order (which index of the index buffer refers to this vertex).
* @param {number} polygonIndex - Index of polygon in geometry.
* @param {number} vertexIndex - Index of vertex inside vertex buffer (used because some data refers to old index buffer that we don't use anymore).
* @param {{datasize: number, buffer: number[], indices: number[], mappingType: string, referenceType: string}} infoObject - Object containing data and how to access data.
* @returns {number[]}
*/
IndexToDirect: function ( polygonVertexIndex, polygonIndex, vertexIndex, infoObject ) {
return infoObject.buffer.slice( infoObject.indices[ 0 ] * infoObject.dataSize, infoObject.indices[ 0 ] * infoObject.dataSize + infoObject.dataSize );
}
}
};
return GetData[ infoObject.mappingType ][ infoObject.referenceType ]( polygonVertexIndex, polygonIndex, vertexIndex, infoObject );
}
}
}
/**
* Specialty function for parsing NurbsCurve based Geometry Nodes.
* @param {FBXGeometryNode} geometryNode
* @param {{parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}} relationships
* @returns {THREE.BufferGeometry}
*/
function parseNurbsGeometry( geometryNode, relationships ) {
if ( THREE.NURBSCurve === undefined ) {
console.error( "THREE.FBXLoader relies on THREE.NURBSCurve for any nurbs present in the model. Nurbs will show up as empty geometry." );
return new THREE.BufferGeometry();
}
var order = parseInt( geometryNode.properties.Order );
if ( isNaN( order ) ) {
console.error( "FBXLoader: Invalid Order " + geometryNode.properties.Order + " given for geometry ID: " + geometryNode.id );
return new THREE.BufferGeometry();
}
var knots = parseFloatArray( geometryNode.subNodes.KnotVector.properties.a );
var controlPoints = [];
var pointsValues = parseFloatArray( geometryNode.subNodes.Points.properties.a );
for ( var i = 0; i < pointsValues.length; i += 4 ) {
controlPoints.push( new THREE.Vector4( pointsValues[ i ], pointsValues[ i + 1 ], pointsValues[ i + 2 ], pointsValues[ i + 3 ] ) );
}
if ( geometryNode.properties.Form === 'Closed' ) {
controlPoints.push( controlPoints[ 0 ] );
}
var curve = new THREE.NURBSCurve( order - 1, knots, controlPoints );
var vertices = curve.getPoints( controlPoints.length * 1.5 );
var vertexBuffer = [];
vertices.forEach( function ( position ) {
var array = position.toArray();
vertexBuffer = vertexBuffer.concat( array );
} );
var geometry = new THREE.BufferGeometry();
geometry.addAttribute( 'position', new THREE.BufferAttribute( new Float32Array( vertexBuffer ), 3 ) );
return geometry;
}
}
}
/**
* Finally generates Scene graph and Scene graph Objects.
* @param {{Objects: {subNodes: {Model: Object.<number, FBXModelNode>}}}} FBXTree
* @param {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>} connections
* @param {Map<number, {map: Map<number, {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}>, array: {FBX_ID: number, indices: number[], weights: number[], transform: number[], transformLink: number[], linkMode: string}[], skeleton: THREE.Skeleton|null}>} deformerMap
* @param {Map<number, THREE.BufferGeometry>} geometryMap
* @param {Map<number, THREE.Material>} materialMap
* @returns {THREE.Group}
*/
function parseScene( FBXTree, connections, deformerMap, geometryMap, materialMap ) {
var sceneGraph = new THREE.Group();
var ModelNode = FBXTree.Objects.subNodes.Model;
/**
* @type {Array.<THREE.Object3D>}
*/
var modelArray = [];
/**
* @type {Map.<number, THREE.Object3D>}
*/
var modelMap = new Map();
for ( var nodeID in ModelNode ) {
var id = parseInt( nodeID );
var node = ModelNode[ nodeID ];
var conns = connections.get( id );
var model = null;
for ( var i = 0; i < conns.parents.length; ++ i ) {
deformerMap.forEach( function ( deformer ) {
if ( deformer.map.has( conns.parents[ i ].ID ) ) {
model = new THREE.Bone();
var index = deformer.array.findIndex( function ( subDeformer ) {
return subDeformer.FBX_ID === conns.parents[ i ].ID;
} );
deformer.bones[ index ] = model;
}
} );
}
if ( ! model ) {
switch ( node.attrType ) {
case "Mesh":
/**
* @type {?THREE.BufferGeometry}
*/
var geometry = null;
/**
* @type {THREE.MultiMaterial|THREE.Material}
*/
var material = null;
/**
* @type {Array.<THREE.Material>}
*/
var materials = [];
conns.children.forEach( function ( child ) {
if ( geometryMap.has( child.ID ) ) {
geometry = geometryMap.get( child.ID );
}
if ( materialMap.has( child.ID ) ) {
materials.push( materialMap.get( child.ID ) );
}
} );
if ( materials.length > 1 ) {
material = new THREE.MultiMaterial( materials );
} else if ( materials.length > 0 ) {
material = materials[ 0 ];
} else {
material = new THREE.MeshBasicMaterial( { color: 0x3300ff } );
}
if ( geometry.FBX_Deformer ) {
materials.forEach( function ( material ) {
material.skinning = true;
} );
material.skinning = true;
model = new THREE.SkinnedMesh( geometry, material );
} else {
model = new THREE.Mesh( geometry, material );
}
break;
case "NurbsCurve":
var geometry = null;
conns.children.forEach( function ( child ) {
if ( geometryMap.has( child.ID ) ) {
geometry = geometryMap.get( child.ID );
}
} );
// FBX does not list materials for Nurbs lines, so we'll just put our own in here.
material = new THREE.LineBasicMaterial( { color: 0x3300ff, linewidth: 5 } );
model = new THREE.Line( geometry, material );
break;
default:
model = new THREE.Object3D();
break;
}
}
model.name = node.attrName.replace( /:/, '' ).replace( /_/, '' ).replace( /-/, '' );
model.FBX_ID = id;
modelArray.push( model );
modelMap.set( id, model );
}
modelArray.forEach( function ( model ) {
var node = ModelNode[ model.FBX_ID ];
if ( 'Lcl_Translation' in node.properties ) {
model.position.fromArray( parseFloatArray( node.properties.Lcl_Translation.value ) );
}
if ( 'Lcl_Rotation' in node.properties ) {
var rotation = parseFloatArray( node.properties.Lcl_Rotation.value ).map( function ( value ) {
return value * Math.PI / 180;
} );
rotation.push( 'ZYX' );
model.rotation.fromArray( rotation );
}
if ( 'Lcl_Scaling' in node.properties ) {
model.scale.fromArray( parseFloatArray( node.properties.Lcl_Scaling.value ) );
}
var conns = connections.get( model.FBX_ID );
for ( var parentIndex = 0; parentIndex < conns.parents.length; parentIndex ++ ) {
var pIndex = modelArray.findIndex( function ( mod ) {
return mod.FBX_ID === conns.parents[ parentIndex ].ID;
} );
if ( pIndex > - 1 ) {
modelArray[ pIndex ].add( model );
break;
}
}
if ( model.parent === null ) {
sceneGraph.add( model );
}
} );
// Now with the bones created, we can update the skeletons and bind them to the skinned meshes.
sceneGraph.updateMatrixWorld( true );
// Put skeleton into bind pose.
var BindPoseNode = FBXTree.Objects.subNodes.Pose;
for ( var nodeID in BindPoseNode ) {
if ( BindPoseNode[ nodeID ].attrType === 'BindPose' ) {
BindPoseNode = BindPoseNode[ nodeID ];
break;
}
}
if ( BindPoseNode ) {
var PoseNode = BindPoseNode.subNodes.PoseNode;
var worldMatrices = new Map();
PoseNode.forEach( function ( node ) {
var rawMatWrd = parseMatrixArray( node.subNodes.Matrix.properties.a );
worldMatrices.set( parseInt( node.id ), rawMatWrd );
} );
}
deformerMap.forEach( function ( deformer, FBX_ID ) {
deformer.array.forEach( function ( subDeformer, subDeformerIndex ) {
/**
* @type {THREE.Bone}
*/
var bone = deformer.bones[ subDeformerIndex ];
if ( ! worldMatrices.has( bone.FBX_ID ) ) {
return;
}
var mat = worldMatrices.get( bone.FBX_ID );
bone.matrixWorld.copy( mat );
} );
// Now that skeleton is in bind pose, bind to model.
deformer.skeleton = new THREE.Skeleton( deformer.bones );
var conns = connections.get( FBX_ID );
conns.parents.forEach( function ( parent ) {
if ( geometryMap.has( parent.ID ) ) {
var geoID = parent.ID;
var geoConns = connections.get( geoID );
for ( var i = 0; i < geoConns.parents.length; ++ i ) {
if ( modelMap.has( geoConns.parents[ i ].ID ) ) {
var model = modelMap.get( geoConns.parents[ i ].ID );
//ASSERT model typeof SkinnedMesh
model.bind( deformer.skeleton, model.matrixWorld );
break;
}
}
}
} );
} );
// Skeleton is now bound, we are now free to set up the
// scene graph.
modelArray.forEach( function ( model ) {
var node = ModelNode[ model.FBX_ID ];
if ( 'Lcl_Translation' in node.properties ) {
model.position.fromArray( parseFloatArray( node.properties.Lcl_Translation.value ) );
}
if ( 'Lcl_Rotation' in node.properties ) {
var rotation = parseFloatArray( node.properties.Lcl_Rotation.value ).map( function ( value ) {
return value * Math.PI / 180;
} );
rotation.push( 'ZYX' );
model.rotation.fromArray( rotation );
}
if ( 'Lcl_Scaling' in node.properties ) {
model.scale.fromArray( parseFloatArray( node.properties.Lcl_Scaling.value ) );
}
} );
// Silly hack with the animation parsing. We're gonna pretend the scene graph has a skeleton
// to attach animations to, since FBXs treat animations as animations for the entire scene,
// not just for individual objects.
sceneGraph.skeleton = {
bones: modelArray
};
var animations = parseAnimations( FBXTree, connections, sceneGraph );
addAnimations( sceneGraph, animations );
return sceneGraph;
}
/**
* Parses animation information from FBXTree and generates an AnimationInfoObject.
* @param {{Objects: {subNodes: {AnimationCurveNode: any, AnimationCurve: any, AnimationLayer: any, AnimationStack: any}}}} FBXTree
* @param {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>} connections
*/
function parseAnimations( FBXTree, connections, sceneGraph ) {
var rawNodes = FBXTree.Objects.subNodes.AnimationCurveNode;
var rawCurves = FBXTree.Objects.subNodes.AnimationCurve;
var rawLayers = FBXTree.Objects.subNodes.AnimationLayer;
var rawStacks = FBXTree.Objects.subNodes.AnimationStack;
/**
* @type {{
curves: Map<number, {
T: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
},
R: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
},
S: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
}
}>,
layers: Map<number, {
T: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
},
},
R: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
},
},
S: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
},
}
}[]>,
stacks: Map<number, {
name: string,
layers: {
T: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
R: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
S: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
}[][],
length: number,
frames: number }>,
length: number,
fps: number,
frames: number
}}
*/
var returnObject = {
curves: new Map(),
layers: new Map(),
stacks: new Map(),
length: 0,
fps: 30,
frames: 0
};
/**
* @type {Array.<{
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
}>}
*/
var animationCurveNodes = [];
for ( var nodeID in rawNodes ) {
if ( nodeID.match( /\d+/ ) ) {
var animationNode = parseAnimationNode( FBXTree, rawNodes[ nodeID ], connections, sceneGraph );
animationCurveNodes.push( animationNode );
}
}
/**
* @type {Map.<number, {
id: number,
attr: string,
internalID: number,
attrX: boolean,
attrY: boolean,
attrZ: boolean,
containerBoneID: number,
containerID: number,
curves: {
x: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
y: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
z: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
}
}
}>}
*/
var tmpMap = new Map();
for ( var animationCurveNodeIndex = 0; animationCurveNodeIndex < animationCurveNodes.length; ++ animationCurveNodeIndex ) {
if ( animationCurveNodes[ animationCurveNodeIndex ] === null ) {
continue;
}
tmpMap.set( animationCurveNodes[ animationCurveNodeIndex ].id, animationCurveNodes[ animationCurveNodeIndex ] );
}
/**
* @type {{
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
}[]}
*/
var animationCurves = [];
for ( nodeID in rawCurves ) {
if ( nodeID.match( /\d+/ ) ) {
var animationCurve = parseAnimationCurve( rawCurves[ nodeID ] );
animationCurves.push( animationCurve );
var firstParentConn = connections.get( animationCurve.id ).parents[ 0 ];
var firstParentID = firstParentConn.ID;
var firstParentRelationship = firstParentConn.relationship;
var axis = '';
if ( firstParentRelationship.match( /X/ ) ) {
axis = 'x';
} else if ( firstParentRelationship.match( /Y/ ) ) {
axis = 'y';
} else if ( firstParentRelationship.match( /Z/ ) ) {
axis = 'z';
} else {
continue;
}
tmpMap.get( firstParentID ).curves[ axis ] = animationCurve;
}
}
tmpMap.forEach( function ( curveNode ) {
var id = curveNode.containerBoneID;
if ( ! returnObject.curves.has( id ) ) {
returnObject.curves.set( id, { T: null, R: null, S: null } );
}
returnObject.curves.get( id )[ curveNode.attr ] = curveNode;
} );
for ( var nodeID in rawLayers ) {
/**
* @type {{
T: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
},
},
R: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
},
},
S: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
},
}
}[]}
*/
var layer = [];
var children = connections.get( parseInt( nodeID ) ).children;
for ( var childIndex = 0; childIndex < children.length; childIndex ++ ) {
// Skip lockInfluenceWeights
if ( tmpMap.has( children[ childIndex ].ID ) ) {
var curveNode = tmpMap.get( children[ childIndex ].ID );
var boneID = curveNode.containerBoneID;
if ( layer[ boneID ] === undefined ) {
layer[ boneID ] = {
T: null,
R: null,
S: null
};
}
layer[ boneID ][ curveNode.attr ] = curveNode;
}
}
returnObject.layers.set( parseInt( nodeID ), layer );
}
for ( var nodeID in rawStacks ) {
var layers = [];
var children = connections.get( parseInt( nodeID ) ).children;
var maxTimeStamp = 0;
var minTimeStamp = Number.MAX_VALUE;
for ( var childIndex = 0; childIndex < children.length; ++ childIndex ) {
if ( returnObject.layers.has( children[ childIndex ].ID ) ) {
var currentLayer = returnObject.layers.get( children[ childIndex ].ID );
layers.push( currentLayer );
currentLayer.forEach( function ( layer ) {
if ( layer ) {
getCurveNodeMaxMinTimeStamps( layer );
}
/**
* Sets the maxTimeStamp and minTimeStamp variables if it has timeStamps that are either larger or smaller
* than the max or min respectively.
* @param {{
T: {
id: number,
attr: string,
internalID: number,
attrX: boolean,
attrY: boolean,
attrZ: boolean,
containerBoneID: number,
containerID: number,
curves: {
x: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
y: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
z: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
},
},
R: {
id: number,
attr: string,
internalID: number,
attrX: boolean,
attrY: boolean,
attrZ: boolean,
containerBoneID: number,
containerID: number,
curves: {
x: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
y: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
z: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
},
},
S: {
id: number,
attr: string,
internalID: number,
attrX: boolean,
attrY: boolean,
attrZ: boolean,
containerBoneID: number,
containerID: number,
curves: {
x: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
y: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
z: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
},
},
}} layer
*/
function getCurveNodeMaxMinTimeStamps( layer ) {
/**
* Sets the maxTimeStamp and minTimeStamp if one of the curve's time stamps
* exceeds the maximum or minimum.
* @param {{
x: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
y: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
},
z: {
version: any,
id: number,
internalID: number,
times: number[],
values: number[],
attrFlag: number[],
attrData: number[],
}
}} curve
*/
function getCurveMaxMinTimeStamp( curve ) {
/**
* Sets the maxTimeStamp and minTimeStamp if one of its timestamps exceeds the maximum or minimum.
* @param {{times: number[]}} axis
*/
function getCurveAxisMaxMinTimeStamps( axis ) {
maxTimeStamp = axis.times[ axis.times.length - 1 ] > maxTimeStamp ? axis.times[ axis.times.length - 1 ] : maxTimeStamp;
minTimeStamp = axis.times[ 0 ] < minTimeStamp ? axis.times[ 0 ] : minTimeStamp;
}
if ( curve.x ) {
getCurveAxisMaxMinTimeStamps( curve.x );
}
if ( curve.y ) {
getCurveAxisMaxMinTimeStamps( curve.y );
}
if ( curve.z ) {
getCurveAxisMaxMinTimeStamps( curve.z );
}
}
if ( layer.R ) {
getCurveMaxMinTimeStamp( layer.R.curves );
}
if ( layer.S ) {
getCurveMaxMinTimeStamp( layer.S.curves );
}
if ( layer.T ) {
getCurveMaxMinTimeStamp( layer.T.curves );
}
}
} );
}
}
// Do we have an animation clip with actual length?
if ( maxTimeStamp > minTimeStamp ) {
returnObject.stacks.set( parseInt( nodeID ), {
name: rawStacks[ nodeID ].attrName,
layers: layers,
length: maxTimeStamp - minTimeStamp,
frames: ( maxTimeStamp - minTimeStamp ) * 30
} );
}
}
return returnObject;
/**
* @param {Object} FBXTree
* @param {{id: number, attrName: string, properties: Object<string, any>}} animationCurveNode
* @param {Map<number, {parents: {ID: number, relationship: string}[], children: {ID: number, relationship: string}[]}>} connections
* @param {{skeleton: {bones: {FBX_ID: number}[]}}} sceneGraph
*/
function parseAnimationNode( FBXTree, animationCurveNode, connections, sceneGraph ) {
var returnObject = {
/**
* @type {number}
*/
id: animationCurveNode.id,
/**
* @type {string}
*/
attr: animationCurveNode.attrName,
/**
* @type {number}
*/
internalID: animationCurveNode.id,
/**
* @type {boolean}
*/
attrX: false,
/**
* @type {boolean}
*/
attrY: false,
/**
* @type {boolean}
*/
attrZ: false,
/**
* @type {number}
*/
containerBoneID: - 1,
/**
* @type {number}
*/
containerID: - 1,
curves: {
x: null,
y: null,
z: null
}
};
if ( returnObject.attr.match( /S|R|T/ ) ) {
for ( var attributeKey in animationCurveNode.properties ) {
if ( attributeKey.match( /X/ ) ) {
returnObject.attrX = true;
}
if ( attributeKey.match( /Y/ ) ) {<|fim▁hole|>
returnObject.attrY = true;
}
if ( attributeKey.match( /Z/ ) ) {
returnObject.attrZ = true;
}
}
} else {
return null;
}
var conns = connections.get( returnObject.id );
var containerIndices = conns.parents;
for ( var containerIndicesIndex = containerIndices.length - 1; containerIndicesIndex >= 0; -- containerIndicesIndex ) {
var boneID = sceneGraph.skeleton.bones.findIndex( function ( bone ) {
return bone.FBX_ID === containerIndices[ containerIndicesIndex ].ID;
} );
if ( boneID > - 1 ) {
returnObject.containerBoneID = boneID;
returnObject.containerID = containerIndices[ containerIndicesIndex ].ID;
break;
}
}
return returnObject;
}
/**
* @param {{id: number, subNodes: {KeyTime: {properties: {a: string}}, KeyValueFloat: {properties: {a: string}}, KeyAttrFlags: {properties: {a: string}}, KeyAttrDataFloat: {properties: {a: string}}}}} animationCurve
*/
function parseAnimationCurve( animationCurve ) {
return {
version: null,
id: animationCurve.id,
internalID: animationCurve.id,
times: parseFloatArray( animationCurve.subNodes.KeyTime.properties.a ).map( function ( time ) {
return ConvertFBXTimeToSeconds( time );
} ),
values: parseFloatArray( animationCurve.subNodes.KeyValueFloat.properties.a ),
attrFlag: parseIntArray( animationCurve.subNodes.KeyAttrFlags.properties.a ),
attrData: parseFloatArray( animationCurve.subNodes.KeyAttrDataFloat.properties.a )
};
}
}
/**
* @param {{
curves: Map<number, {
T: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
R: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
S: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
}>;
layers: Map<number, {
T: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
R: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
S: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
}[]>;
stacks: Map<number, {
name: string;
layers: {
T: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
R: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
S: {
id: number;
attr: string;
internalID: number;
attrX: boolean;
attrY: boolean;
attrZ: boolean;
containerBoneID: number;
containerID: number;
curves: {
x: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
y: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
z: {
version: any;
id: number;
internalID: number;
times: number[];
values: number[];
attrFlag: number[];
attrData: number[];
};
};
};
}[][];
length: number;
frames: number;
}>;
length: number;
fps: number;
frames: number;
}} animations,
* @param {{skeleton: { bones: THREE.Bone[]}}} group
*/
function addAnimations( group, animations ) {
if ( group.animations === undefined ) {
group.animations = [];
}
animations.stacks.forEach( function ( stack ) {
var animationData = {
name: stack.name,
fps: 30,
length: stack.length,
hierarchy: []
};
var bones = group.skeleton.bones;
bones.forEach( function ( bone ) {
var name = bone.name.replace( /.*:/, '' );
var parentIndex = bones.findIndex( function ( parentBone ) {
return bone.parent === parentBone;
} );
animationData.hierarchy.push( { parent: parentIndex, name: name, keys: [] } );
} );
for ( var frame = 0; frame < stack.frames; frame ++ ) {
bones.forEach( function ( bone, boneIndex ) {
var animationNode = stack.layers[ 0 ][ boneIndex ];
animationData.hierarchy.forEach( function ( node ) {
if ( node.name === bone.name ) {
node.keys.push( generateKey( animationNode, bone, frame ) );
}
} );
} );
}
group.animations.push( THREE.AnimationClip.parseAnimation( animationData, bones ) );
/**
* @param {THREE.Bone} bone
*/
function generateKey( animationNode, bone, frame ) {
var key = {
time: frame / animations.fps,
pos: bone.position.toArray(),
rot: bone.quaternion.toArray(),
scl: bone.scale.toArray()
};
if ( animationNode === undefined ) {
return key;
}
try {
if ( hasCurve( animationNode, 'T' ) && hasKeyOnFrame( animationNode.T, frame ) ) {
key.pos = [ animationNode.T.curves.x.values[ frame ], animationNode.T.curves.y.values[ frame ], animationNode.T.curves.z.values[ frame ] ];
}
if ( hasCurve( animationNode, 'R' ) && hasKeyOnFrame( animationNode.R, frame ) ) {
var rotationX = degreeToRadian( animationNode.R.curves.x.values[ frame ] );
var rotationY = degreeToRadian( animationNode.R.curves.y.values[ frame ] );
var rotationZ = degreeToRadian( animationNode.R.curves.z.values[ frame ] );
var euler = new THREE.Euler( rotationX, rotationY, rotationZ, 'ZYX' );
key.rot = new THREE.Quaternion().setFromEuler( euler ).toArray();
}
if ( hasCurve( animationNode, 'S' ) && hasKeyOnFrame( animationNode.S, frame ) ) {
key.scl = [ animationNode.S.curves.x.values[ frame ], animationNode.S.curves.y.values[ frame ], animationNode.S.curves.z.values[ frame ] ];
}
} catch ( error ) {
// Curve is not fully plotted.
console.log( bone );
console.log( error );
}
return key;
function hasCurve( animationNode, attribute ) {
if ( animationNode === undefined ) {
return false;
}
var attributeNode = animationNode[ attribute ];
if ( ! attributeNode ) {
return false;
}
return [ 'x', 'y', 'z' ].every( function ( key ) {
return attributeNode.curves[ key ] !== undefined;
} );
}
function hasKeyOnFrame( attributeNode, frame ) {
return [ 'x', 'y', 'z' ].every( function ( key ) {
return isKeyExistOnFrame( attributeNode.curves[ key ], frame );
function isKeyExistOnFrame( curve, frame ) {
return curve.values[ frame ] !== undefined;
}
} );
}
}
} );
}
// UTILS
/**
* Parses Vector3 property from FBXTree. Property is given as .value.x, .value.y, etc.
* @param {FBXVector3} property - Property to parse as Vector3.
* @returns {THREE.Vector3}
*/
function parseVector3( property ) {
return new THREE.Vector3( parseFloat( property.value.x ), parseFloat( property.value.y ), parseFloat( property.value.z ) );
}
/**
* Parses Color property from FBXTree. Property is given as .value.x, .value.y, etc.
* @param {FBXVector3} property - Property to parse as Color.
* @returns {THREE.Color}
*/
function parseColor( property ) {
return new THREE.Color().fromArray( parseVector3( property ).toArray() );
}
}
} );
/**
* An instance of a Vertex with data for drawing vertices to the screen.
* @constructor
*/
function Vertex() {
/**
* Position of the vertex.
* @type {THREE.Vector3}
*/
this.position = new THREE.Vector3( );
/**
* Normal of the vertex
* @type {THREE.Vector3}
*/
this.normal = new THREE.Vector3( );
/**
* UV coordinates of the vertex.
* @type {THREE.Vector2}
*/
this.uv = new THREE.Vector2( );
/**
* Indices of the bones vertex is influenced by.
* @type {THREE.Vector4}
*/
this.skinIndices = new THREE.Vector4( 0, 0, 0, 0 );
/**
* Weights that each bone influences the vertex.
* @type {THREE.Vector4}
*/
this.skinWeights = new THREE.Vector4( 0, 0, 0, 0 );
}
Object.assign( Vertex.prototype, {
copy: function ( target ) {
var returnVar = target || new Vertex();
returnVar.position.copy( this.position );
returnVar.normal.copy( this.normal );
returnVar.uv.copy( this.uv );
returnVar.skinIndices.copy( this.skinIndices );
returnVar.skinWeights.copy( this.skinWeights );
return returnVar;
},
flattenToBuffers: function () {
var vertexBuffer = this.position.toArray();
var normalBuffer = this.normal.toArray();
var uvBuffer = this.uv.toArray();
var skinIndexBuffer = this.skinIndices.toArray();
var skinWeightBuffer = this.skinWeights.toArray();
return {
vertexBuffer: vertexBuffer,
normalBuffer: normalBuffer,
uvBuffer: uvBuffer,
skinIndexBuffer: skinIndexBuffer,
skinWeightBuffer: skinWeightBuffer,
};
}
} );
/**
* @constructor
*/
function Triangle() {
/**
* @type {{position: THREE.Vector3, normal: THREE.Vector3, uv: THREE.Vector2, skinIndices: THREE.Vector4, skinWeights: THREE.Vector4}[]}
*/
this.vertices = [ ];
}
Object.assign( Triangle.prototype, {
copy: function ( target ) {
var returnVar = target || new Triangle();
for ( var i = 0; i < this.vertices.length; ++ i ) {
this.vertices[ i ].copy( returnVar.vertices[ i ] );
}
return returnVar;
},
flattenToBuffers: function () {
var vertexBuffer = [];
var normalBuffer = [];
var uvBuffer = [];
var skinIndexBuffer = [];
var skinWeightBuffer = [];
this.vertices.forEach( function ( vertex ) {
var flatVertex = vertex.flattenToBuffers();
vertexBuffer = vertexBuffer.concat( flatVertex.vertexBuffer );
normalBuffer = normalBuffer.concat( flatVertex.normalBuffer );
uvBuffer = uvBuffer.concat( flatVertex.uvBuffer );
skinIndexBuffer = skinIndexBuffer.concat( flatVertex.skinIndexBuffer );
skinWeightBuffer = skinWeightBuffer.concat( flatVertex.skinWeightBuffer );
} );
return {
vertexBuffer: vertexBuffer,
normalBuffer: normalBuffer,
uvBuffer: uvBuffer,
skinIndexBuffer: skinIndexBuffer,
skinWeightBuffer: skinWeightBuffer,
};
}
} );
/**
* @constructor
*/
function Face() {
/**
* @type {{vertices: {position: THREE.Vector3, normal: THREE.Vector3, uv: THREE.Vector2, skinIndices: THREE.Vector4, skinWeights: THREE.Vector4}[]}[]}
*/
this.triangles = [ ];
this.materialIndex = 0;
}
Object.assign( Face.prototype, {
copy: function ( target ) {
var returnVar = target || new Face();
for ( var i = 0; i < this.triangles.length; ++ i ) {
this.triangles[ i ].copy( returnVar.triangles[ i ] );
}
returnVar.materialIndex = this.materialIndex;
return returnVar;
},
genTrianglesFromVertices: function ( vertexArray ) {
for ( var i = 2; i < vertexArray.length; ++ i ) {
var triangle = new Triangle();
triangle.vertices[ 0 ] = vertexArray[ 0 ];
triangle.vertices[ 1 ] = vertexArray[ i - 1 ];
triangle.vertices[ 2 ] = vertexArray[ i ];
this.triangles.push( triangle );
}
},
flattenToBuffers: function () {
var vertexBuffer = [];
var normalBuffer = [];
var uvBuffer = [];
var skinIndexBuffer = [];
var skinWeightBuffer = [];
var materialIndexBuffer = [];
var materialIndex = this.materialIndex;
this.triangles.forEach( function ( triangle ) {
var flatTriangle = triangle.flattenToBuffers();
vertexBuffer = vertexBuffer.concat( flatTriangle.vertexBuffer );
normalBuffer = normalBuffer.concat( flatTriangle.normalBuffer );
uvBuffer = uvBuffer.concat( flatTriangle.uvBuffer );
skinIndexBuffer = skinIndexBuffer.concat( flatTriangle.skinIndexBuffer );
skinWeightBuffer = skinWeightBuffer.concat( flatTriangle.skinWeightBuffer );
materialIndexBuffer = materialIndexBuffer.concat( [ materialIndex, materialIndex, materialIndex ] );
} );
return {
vertexBuffer: vertexBuffer,
normalBuffer: normalBuffer,
uvBuffer: uvBuffer,
skinIndexBuffer: skinIndexBuffer,
skinWeightBuffer: skinWeightBuffer,
materialIndexBuffer: materialIndexBuffer
};
}
} );
/**
* @constructor
*/
function Geometry() {
/**
* @type {{triangles: {vertices: {position: THREE.Vector3, normal: THREE.Vector3, uv: THREE.Vector2, skinIndices: THREE.Vector4, skinWeights: THREE.Vector4}[]}[], materialIndex: number}[]}
*/
this.faces = [ ];
/**
* @type {{}|THREE.Skeleton}
*/
this.skeleton = null;
}
Object.assign( Geometry.prototype, {
/**
* @returns {{vertexBuffer: number[], normalBuffer: number[], uvBuffer: number[], skinIndexBuffer: number[], skinWeightBuffer: number[], materialIndexBuffer: number[]}}
*/
flattenToBuffers: function () {
var vertexBuffer = [];
var normalBuffer = [];
var uvBuffer = [];
var skinIndexBuffer = [];
var skinWeightBuffer = [];
var materialIndexBuffer = [];
this.faces.forEach( function ( face ) {
var flatFace = face.flattenToBuffers();
vertexBuffer = vertexBuffer.concat( flatFace.vertexBuffer );
normalBuffer = normalBuffer.concat( flatFace.normalBuffer );
uvBuffer = uvBuffer.concat( flatFace.uvBuffer );
skinIndexBuffer = skinIndexBuffer.concat( flatFace.skinIndexBuffer );
skinWeightBuffer = skinWeightBuffer.concat( flatFace.skinWeightBuffer );
materialIndexBuffer = materialIndexBuffer.concat( flatFace.materialIndexBuffer );
} );
return {
vertexBuffer: vertexBuffer,
normalBuffer: normalBuffer,
uvBuffer: uvBuffer,
skinIndexBuffer: skinIndexBuffer,
skinWeightBuffer: skinWeightBuffer,
materialIndexBuffer: materialIndexBuffer
};
}
} );
function TextParser() {}
Object.assign( TextParser.prototype, {
getPrevNode: function () {
return this.nodeStack[ this.currentIndent - 2 ];
},
getCurrentNode: function () {
return this.nodeStack[ this.currentIndent - 1 ];
},
getCurrentProp: function () {
return this.currentProp;
},
pushStack: function ( node ) {
this.nodeStack.push( node );
this.currentIndent += 1;
},
popStack: function () {
this.nodeStack.pop();
this.currentIndent -= 1;
},
setCurrentProp: function ( val, name ) {
this.currentProp = val;
this.currentPropName = name;
},
// ----------parse ---------------------------------------------------
parse: function ( text ) {
this.currentIndent = 0;
this.allNodes = new FBXTree();
this.nodeStack = [];
this.currentProp = [];
this.currentPropName = '';
var split = text.split( "\n" );
for ( var line in split ) {
var l = split[ line ];
// short cut
if ( l.match( /^[\s\t]*;/ ) ) {
continue;
} // skip comment line
if ( l.match( /^[\s\t]*$/ ) ) {
continue;
} // skip empty line
// beginning of node
var beginningOfNodeExp = new RegExp( "^\\t{" + this.currentIndent + "}(\\w+):(.*){", '' );
var match = l.match( beginningOfNodeExp );
if ( match ) {
var nodeName = match[ 1 ].trim().replace( /^"/, '' ).replace( /"$/, "" );
var nodeAttrs = match[ 2 ].split( ',' ).map( function ( element ) {
return element.trim().replace( /^"/, '' ).replace( /"$/, '' );
} );
this.parseNodeBegin( l, nodeName, nodeAttrs || null );
continue;
}
// node's property
var propExp = new RegExp( "^\\t{" + ( this.currentIndent ) + "}(\\w+):[\\s\\t\\r\\n](.*)" );
var match = l.match( propExp );
if ( match ) {
var propName = match[ 1 ].replace( /^"/, '' ).replace( /"$/, "" ).trim();
var propValue = match[ 2 ].replace( /^"/, '' ).replace( /"$/, "" ).trim();
this.parseNodeProperty( l, propName, propValue );
continue;
}
// end of node
var endOfNodeExp = new RegExp( "^\\t{" + ( this.currentIndent - 1 ) + "}}" );
if ( l.match( endOfNodeExp ) ) {
this.nodeEnd();
continue;
}
// for special case,
//
// Vertices: *8670 {
// a: 0.0356229953467846,13.9599733352661,-0.399196773.....(snip)
// -0.0612030513584614,13.960485458374,-0.409748703241348,-0.10.....
// 0.12490539252758,13.7450733184814,-0.454119384288788,0.09272.....
// 0.0836158767342567,13.5432004928589,-0.435397416353226,0.028.....
//
// these case the lines must contiue with previous line
if ( l.match( /^[^\s\t}]/ ) ) {
this.parseNodePropertyContinued( l );
}
}
return this.allNodes;
},
parseNodeBegin: function ( line, nodeName, nodeAttrs ) {
// var nodeName = match[1];
var node = { 'name': nodeName, properties: {}, 'subNodes': {} };
var attrs = this.parseNodeAttr( nodeAttrs );
var currentNode = this.getCurrentNode();
// a top node
if ( this.currentIndent === 0 ) {
this.allNodes.add( nodeName, node );
} else {
// a subnode
// already exists subnode, then append it
if ( nodeName in currentNode.subNodes ) {
var tmp = currentNode.subNodes[ nodeName ];
// console.log( "duped entry found\nkey: " + nodeName + "\nvalue: " + propValue );
if ( this.isFlattenNode( currentNode.subNodes[ nodeName ] ) ) {
if ( attrs.id === '' ) {
currentNode.subNodes[ nodeName ] = [];
currentNode.subNodes[ nodeName ].push( tmp );
} else {
currentNode.subNodes[ nodeName ] = {};
currentNode.subNodes[ nodeName ][ tmp.id ] = tmp;
}
}
if ( attrs.id === '' ) {
currentNode.subNodes[ nodeName ].push( node );
} else {
currentNode.subNodes[ nodeName ][ attrs.id ] = node;
}
} else if ( typeof attrs.id === 'number' || attrs.id.match( /^\d+$/ ) ) {
currentNode.subNodes[ nodeName ] = {};
currentNode.subNodes[ nodeName ][ attrs.id ] = node;
} else {
currentNode.subNodes[ nodeName ] = node;
}
}
// for this ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓
// NodeAttribute: 1001463072, "NodeAttribute::", "LimbNode" {
if ( nodeAttrs ) {
node.id = attrs.id;
node.attrName = attrs.name;
node.attrType = attrs.type;
}
this.pushStack( node );
},
parseNodeAttr: function ( attrs ) {
var id = attrs[ 0 ];
if ( attrs[ 0 ] !== "" ) {
id = parseInt( attrs[ 0 ] );
if ( isNaN( id ) ) {
// PolygonVertexIndex: *16380 {
id = attrs[ 0 ];
}
}
var name;
var type;
if ( attrs.length > 1 ) {
name = attrs[ 1 ].replace( /^(\w+)::/, '' );
type = attrs[ 2 ];
}
return { id: id, name: name || '', type: type || '' };
},
parseNodeProperty: function ( line, propName, propValue ) {
var currentNode = this.getCurrentNode();
var parentName = currentNode.name;
// special case parent node's is like "Properties70"
// these chilren nodes must treat with careful
if ( parentName !== undefined ) {
var propMatch = parentName.match( /Properties(\d)+/ );
if ( propMatch ) {
this.parseNodeSpecialProperty( line, propName, propValue );
return;
}
}
// special case Connections
if ( propName == 'C' ) {
var connProps = propValue.split( ',' ).slice( 1 );
var from = parseInt( connProps[ 0 ] );
var to = parseInt( connProps[ 1 ] );
var rest = propValue.split( ',' ).slice( 3 );
propName = 'connections';
propValue = [ from, to ];
propValue = propValue.concat( rest );
if ( currentNode.properties[ propName ] === undefined ) {
currentNode.properties[ propName ] = [];
}
}
// special case Connections
if ( propName == 'Node' ) {
var id = parseInt( propValue );
currentNode.properties.id = id;
currentNode.id = id;
}
// already exists in properties, then append this
if ( propName in currentNode.properties ) {
// console.log( "duped entry found\nkey: " + propName + "\nvalue: " + propValue );
if ( Array.isArray( currentNode.properties[ propName ] ) ) {
currentNode.properties[ propName ].push( propValue );
} else {
currentNode.properties[ propName ] += propValue;
}
} else {
// console.log( propName + ": " + propValue );
if ( Array.isArray( currentNode.properties[ propName ] ) ) {
currentNode.properties[ propName ].push( propValue );
} else {
currentNode.properties[ propName ] = propValue;
}
}
this.setCurrentProp( currentNode.properties, propName );
},
// TODO:
parseNodePropertyContinued: function ( line ) {
this.currentProp[ this.currentPropName ] += line;
},
parseNodeSpecialProperty: function ( line, propName, propValue ) {
// split this
// P: "Lcl Scaling", "Lcl Scaling", "", "A",1,1,1
// into array like below
// ["Lcl Scaling", "Lcl Scaling", "", "A", "1,1,1" ]
var props = propValue.split( '",' ).map( function ( element ) {
return element.trim().replace( /^\"/, '' ).replace( /\s/, '_' );
} );
var innerPropName = props[ 0 ];
var innerPropType1 = props[ 1 ];
var innerPropType2 = props[ 2 ];
var innerPropFlag = props[ 3 ];
var innerPropValue = props[ 4 ];
/*
if ( innerPropValue === undefined ) {
innerPropValue = props[3];
}
*/
// cast value in its type
switch ( innerPropType1 ) {
case "int":
innerPropValue = parseInt( innerPropValue );
break;
case "double":
innerPropValue = parseFloat( innerPropValue );
break;
case "ColorRGB":
case "Vector3D":
var tmp = innerPropValue.split( ',' );
innerPropValue = new THREE.Vector3( tmp[ 0 ], tmp[ 1 ], tmp[ 2 ] );
break;
}
// CAUTION: these props must append to parent's parent
this.getPrevNode().properties[ innerPropName ] = {
'type': innerPropType1,
'type2': innerPropType2,
'flag': innerPropFlag,
'value': innerPropValue
};
this.setCurrentProp( this.getPrevNode().properties, innerPropName );
},
nodeEnd: function () {
this.popStack();
},
/* ---------------------------------------------------------------- */
/* util */
isFlattenNode: function ( node ) {
return ( 'subNodes' in node && 'properties' in node ) ? true : false;
}
} );
function FBXTree() {}
Object.assign( FBXTree.prototype, {
add: function ( key, val ) {
this[ key ] = val;
},
searchConnectionParent: function ( id ) {
if ( this.__cache_search_connection_parent === undefined ) {
this.__cache_search_connection_parent = [];
}
if ( this.__cache_search_connection_parent[ id ] !== undefined ) {
return this.__cache_search_connection_parent[ id ];
} else {
this.__cache_search_connection_parent[ id ] = [];
}
var conns = this.Connections.properties.connections;
var results = [];
for ( var i = 0; i < conns.length; ++ i ) {
if ( conns[ i ][ 0 ] == id ) {
// 0 means scene root
var res = conns[ i ][ 1 ] === 0 ? - 1 : conns[ i ][ 1 ];
results.push( res );
}
}
if ( results.length > 0 ) {
this.__cache_search_connection_parent[ id ] = this.__cache_search_connection_parent[ id ].concat( results );
return results;
} else {
this.__cache_search_connection_parent[ id ] = [ - 1 ];
return [ - 1 ];
}
},
searchConnectionChildren: function ( id ) {
if ( this.__cache_search_connection_children === undefined ) {
this.__cache_search_connection_children = [];
}
if ( this.__cache_search_connection_children[ id ] !== undefined ) {
return this.__cache_search_connection_children[ id ];
} else {
this.__cache_search_connection_children[ id ] = [];
}
var conns = this.Connections.properties.connections;
var res = [];
for ( var i = 0; i < conns.length; ++ i ) {
if ( conns[ i ][ 1 ] == id ) {
// 0 means scene root
res.push( conns[ i ][ 0 ] === 0 ? - 1 : conns[ i ][ 0 ] );
// there may more than one kid, then search to the end
}
}
if ( res.length > 0 ) {
this.__cache_search_connection_children[ id ] = this.__cache_search_connection_children[ id ].concat( res );
return res;
} else {
this.__cache_search_connection_children[ id ] = [ ];
return [ ];
}
},
searchConnectionType: function ( id, to ) {
var key = id + ',' + to; // TODO: to hash
if ( this.__cache_search_connection_type === undefined ) {
this.__cache_search_connection_type = {};
}
if ( this.__cache_search_connection_type[ key ] !== undefined ) {
return this.__cache_search_connection_type[ key ];
} else {
this.__cache_search_connection_type[ key ] = '';
}
var conns = this.Connections.properties.connections;
for ( var i = 0; i < conns.length; ++ i ) {
if ( conns[ i ][ 0 ] == id && conns[ i ][ 1 ] == to ) {
// 0 means scene root
this.__cache_search_connection_type[ key ] = conns[ i ][ 2 ];
return conns[ i ][ 2 ];
}
}
this.__cache_search_connection_type[ id ] = null;
return null;
}
} );
/**
* @returns {boolean}
*/
function isFbxFormatASCII( text ) {
var CORRECT = [ 'K', 'a', 'y', 'd', 'a', 'r', 'a', '\\', 'F', 'B', 'X', '\\', 'B', 'i', 'n', 'a', 'r', 'y', '\\', '\\' ];
var cursor = 0;
var read = function ( offset ) {
var result = text[ offset - 1 ];
text = text.slice( cursor + offset );
cursor ++;
return result;
};
for ( var i = 0; i < CORRECT.length; ++ i ) {
var num = read( 1 );
if ( num == CORRECT[ i ] ) {
return false;
}
}
return true;
}
/**
* @returns {number}
*/
function getFbxVersion( text ) {
var versionRegExp = /FBXVersion: (\d+)/;
var match = text.match( versionRegExp );
if ( match ) {
var version = parseInt( match[ 1 ] );
return version;
}
throw new Error( 'FBXLoader: Cannot find the version number for the file given.' );
}
/**
* Converts FBX ticks into real time seconds.
* @param {number} time - FBX tick timestamp to convert.
* @returns {number} - FBX tick in real world time.
*/
function ConvertFBXTimeToSeconds( time ) {
// Constant is FBX ticks per second.
return time / 46186158000;
}
/**
* Parses comma separated list of float numbers and returns them in an array.
* @example
* // Returns [ 5.6, 9.4, 2.5, 1.4 ]
* parseFloatArray( "5.6,9.4,2.5,1.4" )
* @returns {number[]}
*/
function parseFloatArray( floatString ) {
return floatString.split( ',' ).map( function ( stringValue ) {
return parseFloat( stringValue );
} );
}
/**
* Parses comma separated list of int numbers and returns them in an array.
* @example
* // Returns [ 5, 8, 2, 3 ]
* parseFloatArray( "5,8,2,3" )
* @returns {number[]}
*/
function parseIntArray( intString ) {
return intString.split( ',' ).map( function ( stringValue ) {
return parseInt( stringValue );
} );
}
function parseMatrixArray( floatString ) {
return new THREE.Matrix4().fromArray( parseFloatArray( floatString ) );
}
/**
* Converts number from degrees into radians.
* @param {number} value
* @returns {number}
*/
function degreeToRadian( value ) {
return value * Math.PI / 180;
}
} )();<|fim▁end|> | |
<|file_name|>eslint.js<|end_file_name|><|fim▁begin|>/**
* DOCUMENTANTION
* ----------------------------------------------------------------------------
* Each key in the .eslintrc corresponds to a page in the rules directory.
* [Rules on eslint.org]{@link http://eslint.org/docs/rules/}
*
* The rest of the otpiosn can be found here:
* [Eslint Options][@link http://eslint.org/docs/user-guide/configuring]
*/
import project from './project';
// Read the eslintrc into JSON
let config = project.getJSONConfig('.eslintrc');
/**
* Add new rules to the linter
*/<|fim▁hole|> * Since this tool will be used on linting production ready code
* lets make it an error to have console statements in the code.
*/
'no-console': 2
});
export default config;<|fim▁end|> | Object.assign(config.rules, {
/** |
<|file_name|>calibration.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import numpy as np
import scipy.stats, scipy.optimize
import acq4.pyqtgraph as pg
class StageCalibration(object):
def __init__(self, stage):
self.stage = stage
self.framedelay = None
def calibrate(self, camera):
import imreg_dft # FFT image registration by Chris Gohlke; available via pip
n = 300
dx = 10e-6
self.move = None
self.camera = camera
self.offsets = np.empty((n, 2))
self.frames = []
self.index = 0
# current stage position
pos = self.stage.getPosition()
# where to move on each update
self.positions = np.zeros((n, 2))
self.positions[:,0] = pos[0] + np.arange(n) * dx
self.positions[:,1] = pos[1]
camera.sigNewFrame.connect(self.newFrame)
def newFrame(self, frame):
try:
if self.move is not None and not self.move.isDone():
# stage is still moving; discard frame
return
if self.framedelay is None:
# stage has stopped; discard 2 more frames to be sure
# we get the right image.
self.framedelay = pg.ptime.time() + 1./frame.info()['fps']
elif self.framedelay < frame.info()['time']:
# now we are ready to keep this frame.
self.framedelay = None
self.processFrame(frame)
except Exception:
pg.disconnect(self.camera.sigNewFrame, self.newFrame)
raise
def processFrame(self, frame):
self.frames.append(frame)
index = self.index
# update index for next iteration
self.index += 1
# decide whether to move the stage
finished = self.index >= self.positions.shape[0]
if not finished:
self.move = self.stage.moveTo(self.positions[self.index], 'slow')
# calculate offset (while stage moves no next location)
if index == 0:
offset = (0, 0)
else:
compareIndex = max(0, index-10)
offset, _ = imreg_dft.translation(frame.getImage(), self.frames[compareIndex].getImage())
px = self.camera.getPixelSize()
offset = self.offsets[compareIndex] + offset.astype(float) * [px.x(), px.y()]
self.offsets[index] = offset
<|fim▁hole|> pg.disconnect(self.camera.sigNewFrame, self.newFrame)
self.analyze()
def analyze(self):
# frames = []
# for frame in self.frames:
# frames.append(frame.getImage()[np.newaxis, ...])
# self.frameArray = np.concatenate(frames, axis=0)
# self.imageView = pg.image(self.frameArray)
# linear regression to determine scale between stage steps and camera microns
x = ((self.positions - self.positions[0])**2).sum(axis=1)**0.5
y = (self.offsets**2).sum(axis=1)**0.5
slope, yint, r, p, stdev = scipy.stats.linregress(x, y)
# subtract linear approximation to get residual error
y1 = x * slope + yint
self.xvals = x
self.error = y - y1
self.errorPlot = pg.plot(x, self.error, title='X axis error (slope = %0.2f um/step)' % (slope*1e6), labels={'left': ('Error', 'm'), 'bottom': ('position', 'steps')})
# fit residual to combination of sine waves
def fn(p, x):
return (p[2] * np.sin((x + p[0]) * 1 * p[1]) +
p[3] * np.sin((x + p[0]) * 2 * p[1]) +
p[4] * np.sin((x + p[0]) * 3 * p[1]) +
p[5] * np.sin((x + p[0]) * 4 * p[1]))
def erf(p, x, y):
return fn(p, x) - y
f0 = 6 * np.pi / x.max() # guess there are 3 cycles in the data
amp = self.error.max()
self.fit = scipy.optimize.leastsq(erf, [0, f0, amp, amp, amp, amp], (x, self.error))[0]
self.errorPlot.plot(x, fn(self.fit, x), pen='g')<|fim▁end|> | # finish up if there are no more positions
if finished: |
<|file_name|>MerchantSideBar.tsx<|end_file_name|><|fim▁begin|>/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from 'react';
import CentralisedContainer from 'components/common/CentralisedContainer';
import MerchantProfile from 'components/common/MerchantProfile';
import RoundedButton from 'components/common/RoundedButton';
import Row from 'muicss/lib/react/row';
import {useHistory, useLocation} from 'react-router-dom';
import styled from 'styled-components';
const SideBarContainer = styled(CentralisedContainer)`
min-height: 100vh;
height: 100%;
width: 20%;
box-shadow: 5px 0 10px var(--light-gray);
margin: 0;
<|fim▁hole|>`;
const StyledRoundedButton = styled(RoundedButton)`
font-size: 0.85em;
`;
/**
* This is a side bar for merchant pages. It shows merchant's profile and buttons
* to view merchant's ongoing and past listings.
*/
const MerchantSideBar: React.FC = () => {
const history = useHistory();
const {pathname, hash} = useLocation();
return (
<SideBarContainer>
<MerchantProfile />
<CentralisedContainer>
<Row>
<StyledRoundedButton
onClick={() => history.push('/merchant/home')}
disabled={
pathname === '/merchant/home' && hash !== '#past-listings'
}
>
View Ongoing Listings
</StyledRoundedButton>
</Row>
<Row>
<StyledRoundedButton
onClick={() => history.push('/merchant/home#past-listings')}
disabled={
pathname === '/merchant/home' && hash === '#past-listings'
}
>
View Past Listings
</StyledRoundedButton>
</Row>
</CentralisedContainer>
</SideBarContainer>
);
};
export default MerchantSideBar;<|fim▁end|> | justify-content: space-between; |
<|file_name|>session_persistence.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c)2012 Rackspace US, Inc.
# All Rights Reserved.
#<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import sys
import pyrax
pyrax.set_setting("identity_type", "rackspace")
creds_file = os.path.expanduser("~/.rackspace_cloud_credentials")
pyrax.set_credential_file(creds_file)
clb = pyrax.cloud_loadbalancers
try:
lb = clb.list()[0]
except IndexError:
print("You do not have any load balancers yet.")
print("Please create one and then re-run this script.")
sys.exit()
print("Load Balancer:", lb)
orig = lb.session_persistence
print("Current setting of session persistence:", orig or '""')
print()
if orig:
print("Clearing...")
lb.session_persistence = ""
else:
print("Setting persistence to HTTP_COOKIE...")
lb.session_persistence = "HTTP_COOKIE"
print("New setting of session persistence:", lb.session_persistence or '""')<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# |
<|file_name|>webrepl.py<|end_file_name|><|fim▁begin|># This module should be imported from REPL, not run from command line.
import socket
import uos
import network
import uwebsocket
import websocket_helper
import _webrepl
listen_s = None
client_s = None
def setup_conn(port, accept_handler):<|fim▁hole|>
ai = socket.getaddrinfo("0.0.0.0", port)
addr = ai[0][4]
listen_s.bind(addr)
listen_s.listen(1)
if accept_handler:
listen_s.setsockopt(socket.SOL_SOCKET, 20, accept_handler)
for i in (network.AP_IF, network.STA_IF):
iface = network.WLAN(i)
if iface.active():
print("WebREPL daemon started on ws://%s:%d" % (iface.ifconfig()[0], port))
return listen_s
def accept_conn(listen_sock):
global client_s
cl, remote_addr = listen_sock.accept()
prev = uos.dupterm(None)
uos.dupterm(prev)
if prev:
print("\nConcurrent WebREPL connection from", remote_addr, "rejected")
cl.close()
return
print("\nWebREPL connection from:", remote_addr)
client_s = cl
websocket_helper.server_handshake(cl)
ws = uwebsocket.websocket(cl, True)
ws = _webrepl._webrepl(ws)
cl.setblocking(False)
# notify REPL on socket incoming data (ESP32/ESP8266-only)
if hasattr(uos, 'dupterm_notify'):
cl.setsockopt(socket.SOL_SOCKET, 20, uos.dupterm_notify)
uos.dupterm(ws)
def stop():
global listen_s, client_s
uos.dupterm(None)
if client_s:
client_s.close()
if listen_s:
listen_s.close()
def start(port=8266, password=None):
stop()
if password is None:
try:
import webrepl_cfg
_webrepl.password(webrepl_cfg.PASS)
setup_conn(port, accept_conn)
print("Started webrepl in normal mode")
except:
print("WebREPL is not configured, run 'import webrepl_setup'")
else:
_webrepl.password(password)
setup_conn(port, accept_conn)
print("Started webrepl in manual override mode")
def start_foreground(port=8266):
stop()
s = setup_conn(port, None)
accept_conn(s)<|fim▁end|> | global listen_s
listen_s = socket.socket()
listen_s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) |
<|file_name|>navigator.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::utils::{WrapperCache, BindingObject, CacheableWrapper};
use dom::bindings::utils::{DOMString, ErrorResult, str, null_string};
use dom::bindings::codegen::NavigatorBinding;
use script_task::{page_from_context};
use js::jsapi::{JSContext, JSObject};
use std::cast;
pub struct Navigator {
wrapper: WrapperCache
}
impl Navigator {
pub fn new() -> @mut Navigator {
@mut Navigator {
wrapper: WrapperCache::new()
}
}
pub fn DoNotTrack(&self) -> DOMString {
str(~"unspecified")
}
pub fn Vendor(&self) -> DOMString {
str(~"") // Like Gecko
}
pub fn VendorSub(&self) -> DOMString {
str(~"") // Like Gecko
}
pub fn Product(&self) -> DOMString {
str(~"Gecko") // This is supposed to be constant, see webidl.
}
pub fn ProductSub(&self) -> DOMString {
null_string
}
pub fn CookieEnabled(&self) -> bool {
false
}
pub fn GetBuildID(&self, _rv: &mut ErrorResult) -> DOMString {
null_string
}
pub fn JavaEnabled(&self, _rv: &mut ErrorResult) -> bool {
false
}
<|fim▁hole|>
pub fn AppName(&self) -> DOMString {
str(~"Netscape") // Like Gecko/Webkit
}
pub fn GetAppCodeName(&self, _rv: &mut ErrorResult) -> DOMString {
str(~"Mozilla") // Like Gecko/Webkit
}
pub fn GetAppVersion(&self, _rv: &mut ErrorResult) -> DOMString {
null_string
}
pub fn GetPlatform(&self, _rv: &mut ErrorResult) -> DOMString {
null_string
}
pub fn GetUserAgent(&self, _rv: &mut ErrorResult) -> DOMString {
null_string
}
pub fn GetLanguage(&self) -> DOMString {
null_string
}
pub fn OnLine(&self) -> bool {
true
}
}
impl CacheableWrapper for Navigator {
fn get_wrappercache(&mut self) -> &mut WrapperCache {
unsafe { cast::transmute(&self.wrapper) }
}
fn wrap_object_shared(@mut self, cx: *JSContext, scope: *JSObject) -> *JSObject {
let mut unused = false;
NavigatorBinding::Wrap(cx, scope, self, &mut unused)
}
}
impl BindingObject for Navigator {
fn GetParentObject(&self, cx: *JSContext) -> Option<@mut CacheableWrapper> {
let page = page_from_context(cx);
unsafe {
Some((*page).frame.get_ref().window as @mut CacheableWrapper)
}
}
}<|fim▁end|> | pub fn TaintEnabled(&self) -> bool {
false
} |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import * as PaperUtil from './PaperUtil';
export { PaperUtil };<|fim▁hole|><|fim▁end|> | export { PivotType } from './PivotType';
export { SnapUtil } from './snap'; |
<|file_name|>project_plugins.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from rest_framework.response import Response
from sentry.plugins.base import plugins
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize<|fim▁hole|>class ProjectPluginsEndpoint(ProjectEndpoint):
def get(self, request, project):
context = serialize(
[plugin for plugin in plugins.configurable_for_project(project, version=None)],
request.user,
PluginSerializer(project),
)
return Response(context)<|fim▁end|> | from sentry.api.serializers.models.plugin import PluginSerializer
|
<|file_name|>uart.rs<|end_file_name|><|fim▁begin|>use core::fmt;
use core::str::StrExt;
use core::result::Result;<|fim▁hole|>pub trait Uart<W : HW> {
fn put(&self, &mut W, ch : u8);
}
pub trait UartWriter : fmt::Write { }
pub struct DummyUartWriter;
impl UartWriter for DummyUartWriter { }
impl fmt::Write for DummyUartWriter {
fn write_str(&mut self, _: &str) -> fmt::Result {
Result::Ok(())
}
}
pub struct BlockingUartWriter<H : 'static+HW> {
uart : &'static Uart<H>,
hw : &'static mut H,
}
impl<H : HW> UartWriter for BlockingUartWriter<H> { }
impl<H> BlockingUartWriter<H>
where H : HW {
pub fn new(hw : &'static mut H, uart : &'static Uart<H>) -> BlockingUartWriter<H> {
BlockingUartWriter { uart: uart, hw: hw }
}
}
impl<H> fmt::Write for BlockingUartWriter<H>
where H : HW {
fn write_str(&mut self, s: &str) -> fmt::Result {
for ch in s.bytes() {
self.uart.put(self.hw, ch);
}
Result::Ok(())
}
}<|fim▁end|> |
use hw::HW;
|
<|file_name|>plshadow.rs<|end_file_name|><|fim▁begin|>use winapi::*;
use create_device;
use dxsafe::*;<|fim▁hole|>use dxsafe::structwrappers::*;
use dxsems::VertexFormat;
use std::marker::PhantomData;
// Point Light Shadow
// The name isn't quite correct. This module just fills depth cubemap.
pub struct PLShadow<T: VertexFormat> {
pub pso: D3D12PipelineState,
pub root_sig: D3D12RootSignature,
_luid: Luid,
_phd: PhantomData<T>,
}
impl<T: VertexFormat> PLShadow<T> {
pub fn new(dev: &D3D12Device) -> HResult<PLShadow<T>> {
let mut vshader_bc = vec![];
let mut gshader_bc = vec![];
let mut pshader_bc = vec![];
let mut rsig_bc = vec![];
trace!("Compiling 'plshadow.hlsl'...");
match create_device::compile_shaders("plshadow.hlsl",&mut[
("VSMain", "vs_5_0", &mut vshader_bc),
("GSMain", "gs_5_0", &mut gshader_bc),
("PSMain", "ps_5_0", &mut pshader_bc),
("RSD", "rootsig_1_0", &mut rsig_bc),
], D3DCOMPILE_OPTIMIZATION_LEVEL3) {
Err(err) => {
error!("Error compiling 'plshadow.hlsl': {}", err);
return Err(E_FAIL);
},
Ok(_) => {},
};
trace!("Done");
trace!("Root signature creation");
let root_sig = try!(dev.create_root_signature(0, &rsig_bc[..]));
try!(root_sig.set_name("plshadow RSD"));
let input_elts_desc = T::generate(0);
// pso_desc contains pointers to local data, so I mustn't pass it around, but I can. Unsafe.
let pso_desc = D3D12_GRAPHICS_PIPELINE_STATE_DESC {
pRootSignature: root_sig.iptr() as *mut _,
VS: ShaderBytecode::from_vec(&vshader_bc).get(),
GS: ShaderBytecode::from_vec(&gshader_bc).get(),
PS: ShaderBytecode::from_vec(&pshader_bc).get(),
RasterizerState: D3D12_RASTERIZER_DESC {
CullMode: D3D12_CULL_MODE_NONE,
DepthBias: 550,
SlopeScaledDepthBias: 1.5,
..rasterizer_desc_default()
},
InputLayout: D3D12_INPUT_LAYOUT_DESC {
pInputElementDescs: input_elts_desc.as_ptr(),
NumElements: input_elts_desc.len() as u32,
},
PrimitiveTopologyType: D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE,
NumRenderTargets: 0, // Pixel shader writes depth buffer only
DSVFormat: DXGI_FORMAT_D32_FLOAT,
Flags: D3D12_PIPELINE_STATE_FLAG_NONE,
// Take other fields from default gpsd
..graphics_pipeline_state_desc_default()
};
//pso_desc.RTVFormats[0] = DXGI_FORMAT_D32_FLOAT;
trace!("Graphics pipeline state creation");
let pso = try!(dev.create_graphics_pipeline_state(&pso_desc));
Ok(PLShadow::<T> {
pso: pso,
root_sig: root_sig,
_luid: Luid(dev.get_adapter_luid()),
_phd: PhantomData,
})
}
}<|fim▁end|> | |
<|file_name|>Schuler.java<|end_file_name|><|fim▁begin|>public class Schuler
{<|fim▁hole|> private int test2;
private int test3;
public Schuler()
{
name = "";
test1 = 0;
test2 = 0;
test3 = 0;
}
public void setName(String nm)
{
name = nm;
}
public String getName()
{
return name;
}
public void setPunkte(int i, int punkte)
{
if (i==1) test1=punkte;
if (i==2) test2=punkte;
if (i==3) test3=punkte;
}
public int getPunkte(int i)
{
if (i==1) return test1;
if (i==2) return test2;
if (i==3) return test3;
return -1;
}
public int getAverage()
{
return (int)Math.round((test1+test2+test3)/3.0);
}
public String toString()
{
return "Name: "+name+"\nTest 1: "+test1+"\nTest 2: "+test2+"\nTest 3: "+test3+"\nAverage: " +getAverage();
}
}<|fim▁end|> | private String name;
private int test1; |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>import json
import enum
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib import request
class APINonSingle:
def __init__(self, api_key, agent = "webnews-python", webnews_base = "https://webnews.csh.rit.edu/"):
self.agent = agent
self.api_key = api_key
self.webnews_base = webnews_base
class Actions(enum.Enum):
user = "user"
unread_counts = "unread_counts"
newsgroups = "newsgroups"<|fim▁hole|>
def POST(self, action, args={}):
if type(action) == API.Actions:
action = action.value
args['api_key'] = self.api_key
args['api_agent'] = self.agent
args = urlencode(args).encode('utf-8')
req = request.Request(self.webnews_base+ action)
req.add_header('Accept', 'application/json')
resp = urlopen(req, args).read().decode('utf-8')
return json.loads(resp)
def GET(self, action, args={}):
if type(action) == API.Actions:
action = action.value
args['api_key'] = self.api_key
args['api_agent'] = self.agent
args = urlencode(args)
req = request.Request(self.webnews_base + action + '?' + args)
req.add_header('Accept', 'application/json')
resp = urlopen(req).read().decode('utf-8')
return json.loads(resp)
def user(self):
return self.GET(API.Actions.user)
def unread_counts(self):
return self.GET(API.Actions.unread_counts)
def newsgroups(self):
return self.GET(API.Actions.newsgroups)
def newsgroups_search(self, newsgroup):
return self.GET("newsgroups/" + newsgroup)
def newsgroup_posts(self, newsgroup, params={}):
return self.GET(newsgroup + '/index', params)
def search(self, params = {}):
return self.GET(API.Actions.search, params)
def post_specifics(self, newsgroup, index, params={}):
return self.GET(str(newsgroup)+"/"+str(index), params)
def compose(self, newsgroup, subject, body, params={}):
params['subject'] = subject
params['body'] = body
params['newsgroup'] = newsgroup
return self.POST(API.Actions.compose, params)
"""
Wrap the APINonSingle object so that
only a single object for each key will exist.
Optimization for object implementation
"""
class API(APINonSingle):
_instance = {}
def __new__(cls, *args, **kwargs):
if not args[0] in cls._instance:
cls._instance[args[0]] = APINonSingle(*args, **kwargs)
return cls._instance[args[0]]<|fim▁end|> | search = "search"
compose = "compose" |
<|file_name|>Sorveteria Tropical,py.py<|end_file_name|><|fim▁begin|>sabor = input()
quantidade = int(input())
if sabor.lower() == "morango" or sabor.lower() == "cereja":
total = quantidade*4.50
elif sabor.lower() == "damasco" or sabor.lower() == "siriguela":<|fim▁hole|>
print("%.2f"%total)
if quantidade > 2:
print ("COM CALDA")
else:
print("SEM CALDA")<|fim▁end|> | total = quantidade*3.80
else:
total = quantidade*2.75 |
<|file_name|>win32_test.cpp<|end_file_name|><|fim▁begin|>// boost win32_test.cpp -----------------------------------------------------//
// Copyright 2010 Vicente J. Botet Escriba
// Distributed under the Boost Software License, Version 1.0.
// See http://www.boost.org/LICENSE_1_0.txt
// See http://www.boost.org/libs/chrono for documentation.
#include <boost/chrono/config.hpp>
#include <boost/detail/lightweight_test.hpp>
#if defined(BOOST_CHRONO_WINDOWS_API) || defined(__CYGWIN__)
#include <boost/chrono/detail/static_assert.hpp>
#if !defined(BOOST_NO_CXX11_STATIC_ASSERT)
#define NOTHING ""
#endif
#include <boost/type_traits.hpp>
#include <boost/typeof/typeof.hpp><|fim▁hole|>#include <boost/detail/win/time.hpp>
#include <windows.h>
void test() {
{
boost::detail::win32::LARGE_INTEGER_ a;
LARGE_INTEGER b;
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::LARGE_INTEGER_)==sizeof(LARGE_INTEGER)
), NOTHING, (boost::detail::win32::LARGE_INTEGER_, LARGE_INTEGER));
BOOST_TEST((
sizeof(a.QuadPart)==sizeof(b.QuadPart)
));
BOOST_CHRONO_STATIC_ASSERT((
offsetof(boost::detail::win32::LARGE_INTEGER_, QuadPart)==offsetof(LARGE_INTEGER, QuadPart)
), NOTHING, (boost::detail::win32::LARGE_INTEGER_, LARGE_INTEGER));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<
BOOST_TYPEOF(a.QuadPart),
BOOST_TYPEOF(b.QuadPart)
>::value
), NOTHING, (boost::detail::win32::LARGE_INTEGER_, LARGE_INTEGER));
}
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::BOOL_)==sizeof(BOOL)
), NOTHING, (boost::detail::win32::BOOL_, BOOL));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<boost::detail::win32::BOOL_,BOOL>::value
), NOTHING, (boost::detail::win32::BOOL_, BOOL));
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::DWORD_)==sizeof(DWORD)
), NOTHING, (boost::detail::win32::DWORD_, DWORD));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<boost::detail::win32::DWORD_,DWORD>::value
), NOTHING, (boost::detail::win32::DWORD_, DWORD));
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::HANDLE_)==sizeof(HANDLE)
), NOTHING, (boost::detail::win32::HANDLE_, HANDLE));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<boost::detail::win32::HANDLE_,HANDLE>::value
), NOTHING, (boost::detail::win32::HANDLE_, HANDLE));
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::LONG_)==sizeof(LONG)
), NOTHING, (boost::detail::win32::LONG_, LONG));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<boost::detail::win32::LONG_,LONG>::value
), NOTHING, (boost::detail::win32::LONG_, LONG));
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::LONGLONG_)==sizeof(LONGLONG)
), NOTHING, (boost::detail::win32::LONGLONG_, LONGLONG));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<boost::detail::win32::LONGLONG_,LONGLONG>::value
), NOTHING, (boost::detail::win32::LONGLONG_, LONGLONG));
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::ULONG_PTR_)==sizeof(ULONG_PTR)
), NOTHING, (boost::detail::win32::ULONG_PTR_, ULONG_PTR));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<boost::detail::win32::ULONG_PTR_,ULONG_PTR>::value
), NOTHING, (boost::detail::win32::ULONG_PTR_, ULONG_PTR));
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::PLARGE_INTEGER_)==sizeof(PLARGE_INTEGER)
), NOTHING, (boost::detail::win32::PLARGE_INTEGER_, PLARGE_INTEGER));
//~ BOOST_CHRONO_STATIC_ASSERT((
//~ boost::is_same<boost::detail::win32::PLARGE_INTEGER_,PLARGE_INTEGER>::value
//~ ), NOTHING, (boost::detail::win32::PLARGE_INTEGER_, PLARGE_INTEGER));
{
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::FILETIME_)==sizeof(FILETIME)
), NOTHING, (boost::detail::win32::FILETIME_, FILETIME));
BOOST_CHRONO_STATIC_ASSERT((
sizeof(boost::detail::win32::PFILETIME_)==sizeof(PFILETIME)
), NOTHING, (boost::detail::win32::PFILETIME_, PFILETIME));
boost::detail::win32::FILETIME_ a;
FILETIME b;
BOOST_TEST((
sizeof(a.dwLowDateTime)==sizeof(b.dwLowDateTime)
));
BOOST_TEST((
sizeof(a.dwHighDateTime)==sizeof(b.dwHighDateTime)
));
BOOST_CHRONO_STATIC_ASSERT((
offsetof(boost::detail::win32::FILETIME_, dwLowDateTime)==offsetof(FILETIME, dwLowDateTime)
), NOTHING, (boost::detail::win32::FILETIME_, FILETIME));
BOOST_CHRONO_STATIC_ASSERT((
offsetof(boost::detail::win32::FILETIME_, dwHighDateTime)==offsetof(FILETIME, dwHighDateTime)
), NOTHING, (boost::detail::win32::FILETIME_, FILETIME));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<
BOOST_TYPEOF(a.dwLowDateTime),
BOOST_TYPEOF(b.dwLowDateTime)
>::value
), NOTHING, (boost::detail::win32::FILETIME_, FILETIME));
BOOST_CHRONO_STATIC_ASSERT((
boost::is_same<
BOOST_TYPEOF(a.dwHighDateTime),
BOOST_TYPEOF(b.dwHighDateTime)
>::value
), NOTHING, (boost::detail::win32::FILETIME_, FILETIME));
}
// BOOST_CHRONO_STATIC_ASSERT((
// GetLastError==boost::detail::win32::::GetLastError
// ), NOTHING, ());
}
#else
void test() {
}
#endif
int main( )
{
test();
return boost::report_errors();
}<|fim▁end|> | #undef BOOST_USE_WINDOWS_H
#include <boost/detail/win/basic_types.hpp> |
<|file_name|>17.py<|end_file_name|><|fim▁begin|># Created by PyCharm Pro Edition
# User: Kaushik Talukdar
# Date: 30-03-17
# Time: 11:35 PM
<|fim▁hole|>
# tuple can't be modified but the variable holding a tuple can be assigned new values
# basically changing the tuple
cars = ["bmw", "rollsroyce", "audi", "ferrari"]
print(cars)
cars = ["bmw", "koenigsegg", "audi", "ferrari"]
print(cars)<|fim▁end|> | |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|># topics.serializers
# Serializers for the topic and voting models.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Wed Sep 09 09:34:46 2015 -0400
#
# Copyright (C) 2015 District Data Labs
# For license information, see LICENSE.txt
#<|fim▁hole|>Serializers for the topic and voting models.
"""
##########################################################################
## Imports
##########################################################################
from topics.models import Topic, Vote
from rest_framework import serializers
##########################################################################
## Validators
##########################################################################
class InRange(object):
"""
Validator that specifies a value must be in a particular range
"""
def __init__(self, low, high):
self.low = low
self.high = high
def __call__(self, value):
if value > self.high or value < self.low:
raise serializers.ValidationError(
"value must be between %d and %d (inclusive)" % (self.low, self.high)
)
##########################################################################
## Serializers
##########################################################################
class TopicSerializer(serializers.HyperlinkedModelSerializer):
"""
Serializers topics and their weights.
"""
class Meta:
model = Topic
fields = ('url', 'title', 'vote_total',)
extra_kwargs = {
'url': {'view_name': 'api:topic-detail',},
}
class VotingSerializer(serializers.Serializer):
"""
Serializes incoming votes.
Note: There is no model associated with this serializer
"""
vote = serializers.IntegerField(validators=[InRange(-1,1)])
display = serializers.SerializerMethodField('get_vote_display')
def get_vote_display(self, obj):
displays = {
-1: "downvote",
0: "novote",
1: "upvote",
}
return displays[obj['vote']]<|fim▁end|> | # ID: serializers.py [] [email protected] $
""" |
<|file_name|>daclips.py<|end_file_name|><|fim▁begin|>"""
urlresolver XBMC Addon
Copyright (C) 2011 t0mm0<|fim▁hole|> This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
import urllib2, os, re
from urlresolver import common
#SET ERROR_LOGO# THANKS TO VOINAGE, BSTRDMKR, ELDORADO
error_logo = os.path.join(common.addon_path, 'resources', 'images', 'redx.png')
class DaclipsResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "daclips"
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
#e.g. http://daclips.com/vb80o1esx2eb
self.pattern = 'http://((?:www.)?daclips.(?:in|com))/([0-9a-zA-Z]+)'
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
""" Human Verification """
try:
resp = self.net.http_GET(web_url)
html = resp.content
r = re.findall(r'<span class="t" id="head_title">404 - File Not Found</span>',html)
if r:
raise Exception ('File Not Found or removed')
post_url = resp.get_url()
form_values = {}
for i in re.finditer('<input type="hidden" name="(.+?)" value="(.+?)">', html):
form_values[i.group(1)] = i.group(2)
html = self.net.http_POST(post_url, form_data=form_values).content
r = re.search('file: "http(.+?)"', html)
if r:
return "http" + r.group(1)
else:
raise Exception ('Unable to resolve Daclips link')
except urllib2.URLError, e:
common.addon.log_error('daclips: got http error %d fetching %s' %
(e.code, web_url))
common.addon.show_small_popup('Error','Http error: '+str(e), 5000, error_logo)
return self.unresolvable(code=3, msg=e)
except Exception, e:
common.addon.log_error('**** Daclips Error occured: %s' % e)
common.addon.show_small_popup(title='[B][COLOR white]DACLIPS[/COLOR][/B]', msg='[COLOR red]%s[/COLOR]' % e, delay=5000, image=error_logo)
return self.unresolvable(code=0, msg=e)
def get_url(self, host, media_id):
#return 'http://(daclips|daclips).(in|com)/%s' % (media_id)
return 'http://daclips.in/%s' % (media_id)
def get_host_and_id(self, url):
r = re.search(self.pattern, url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
if self.get_setting('enabled') == 'false': return False
return re.match(self.pattern, url) or self.name in host<|fim▁end|> | |
<|file_name|>DAOPerson.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import java.util.List;
import com.lms.jpa.Person;
public interface DAOPerson {
public boolean personExist(long personId);
public void insertPerson(Person person);
public void updatePerson(Person person);
public void deletePerson(long personId);
public Person fetchPersonInfo(long personId);
public List<Person> fetchAllPerson();
}<|fim▁end|> | package com.lms.dao;
|
<|file_name|>markers.py<|end_file_name|><|fim▁begin|>''' Display a variety of simple scatter marker shapes whose attributes
can be associated with data columns from ``ColumnDataSources``.
The full list of markers built into Bokeh is given below:
* :class:`~bokeh.models.markers.Asterisk`
* :class:`~bokeh.models.markers.Circle`
* :class:`~bokeh.models.markers.CircleCross`
* :class:`~bokeh.models.markers.CircleX`
* :class:`~bokeh.models.markers.Cross`
* :class:`~bokeh.models.markers.Diamond`
* :class:`~bokeh.models.markers.DiamondCross`
* :class:`~bokeh.models.markers.Hex`
* :class:`~bokeh.models.markers.InvertedTriangle`
* :class:`~bokeh.models.markers.Square`
* :class:`~bokeh.models.markers.SquareCross`
* :class:`~bokeh.models.markers.SquareX`
* :class:`~bokeh.models.markers.Triangle`
* :class:`~bokeh.models.markers.X`
Markers are all subclasses of ``Glyph``. Additionally, they all share the
same common interface providing fill and line properties provided by their
base class ``Marker``. Note that a few glyphs, ``Cross`` and ``X``, only
draw lines. For these the fill property values are ignored. Also note that
the ``Circle`` glyph has some additional properties such as ``radius`` that
other markers do not.
.. autoclass:: Marker
:members:
'''
from __future__ import absolute_import
from ..core.enums import enumeration
from ..core.has_props import abstract
from ..core.properties import AngleSpec, DistanceSpec, Enum, Include, NumberSpec, ScreenDistanceSpec
from ..core.property_mixins import FillProps, LineProps
from .glyphs import XYGlyph
@abstract
class Marker(XYGlyph):
''' Base class for glyphs that are simple markers with line and
fill properties, located at an (x, y) location with a specified
size.
.. note::
For simplicity, all markers have both line and fill properties
declared, however some markers (`Asterisk`, `Cross`, `X`) only
draw lines. For these markers, the fill values are simply
ignored.
'''
# a canonical order for positional args that can be used for any
# functions derived from this class
_args = ('x', 'y', 'size', 'angle')
x = NumberSpec(help="""
The x-axis coordinates for the center of the markers.
""")
y = NumberSpec(help="""
The y-axis coordinates for the center of the markers.
""")
size = ScreenDistanceSpec(default=4, help="""
The size (diameter) values for the markers in screen space units.
""")
angle = AngleSpec(default=0.0, help="""
The angles to rotate the markers.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the markers.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the markers.
""")
class Asterisk(Marker):
''' Render asterisk '*' markers. '''
__example__ = "examples/reference/models/Asterisk.py"
class Circle(Marker):
''' Render circle markers. '''
__example__ = "examples/reference/models/Circle.py"
# a canonical order for positional args that can be used for any<|fim▁hole|> radius = DistanceSpec(None, help="""
The radius values for circle markers (in "data space" units, by default).
.. note::
Circle markers are slightly unusual in that they support specifying
a radius in addition to a size. Only one of ``radius`` or ``size``
should be given.
.. warning::
Note that ``Circle`` glyphs are always drawn as circles on the screen,
even in cases where the data space aspect ratio is not 1-1. In all
cases where radius values are specified, the "distance" for the radius
is measured along the dimension specified by ``radius_dimension``. If
the aspect ratio is very large or small, the drawn circles may appear
much larger or smaller than expected. See :bokeh-issue:`626` for more
information.
""")
radius_dimension = Enum(enumeration('x', 'y'), help="""
What dimension to measure circle radii along.
When the data space aspect ratio is not 1-1, then the size of the drawn
circles depends on what direction is used to measure the "distance" of
the radius. This property allows that direction to be controlled.
""")
class CircleCross(Marker):
''' Render circle markers with a '+' cross through the center. '''
__example__ = "examples/reference/models/CircleCross.py"
class CircleX(Marker):
''' Render circle markers with an 'X' cross through the center. '''
__example__ = "examples/reference/models/CircleX.py"
class Cross(Marker):
''' Render '+' cross markers. '''
__example__ = "examples/reference/models/Cross.py"
class Diamond(Marker):
''' Render diamond markers. '''
__example__ = "examples/reference/models/Diamond.py"
class DiamondCross(Marker):
''' Render diamond markers with a '+' cross through the center. '''
__example__ = "examples/reference/models/DiamondCross.py"
class Hex(Marker):
''' Render hexagon markers. '''
__example__ = "examples/reference/models/Hex.py"
class InvertedTriangle(Marker):
''' Render upside-down triangle markers. '''
__example__ = "examples/reference/models/InvertedTriangle.py"
class Square(Marker):
''' Render a square marker, optionally rotated. '''
__example__ = "examples/reference/models/Square.py"
class SquareCross(Marker):
''' Render square markers with a '+' cross through the center. '''
__example__ = "examples/reference/models/SquareCross.py"
class SquareX(Marker):
''' Render square markers with an 'X' cross through the center. '''
__example__ = "examples/reference/models/SquareX.py"
class Triangle(Marker):
''' Render triangle markers. '''
__example__ = "examples/reference/models/Triangle.py"
class X(Marker):
''' Render a 'X' cross markers. '''
__example__ = "examples/reference/models/X.py"<|fim▁end|> | # functions derived from this class
_args = ('x', 'y')
|
<|file_name|>row-harness.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ComponentHarness, HarnessPredicate} from '@angular/cdk/testing';
import {RowHarnessFilters, CellHarnessFilters} from './table-harness-filters';
import {MatCellHarness, MatHeaderCellHarness, MatFooterCellHarness} from './cell-harness';
/** Text extracted from a table row organized by columns. */
export interface MatRowHarnessColumnsText {
[columnName: string]: string;
}
/** Harness for interacting with a standard Angular Material table row. */
export class MatRowHarness extends ComponentHarness {
/** The selector for the host element of a `MatRowHarness` instance. */
static hostSelector = '.mat-row';
/**
* Gets a `HarnessPredicate` that can be used to search for a table row with specific attributes.
* @param options Options for narrowing the search
* @return a `HarnessPredicate` configured with the given options.
*/
static with(options: RowHarnessFilters = {}): HarnessPredicate<MatRowHarness> {
return new HarnessPredicate(MatRowHarness, options);
}
/** Gets a list of `MatCellHarness` for all cells in the row. */
async getCells(filter: CellHarnessFilters = {}): Promise<MatCellHarness[]> {
return this.locatorForAll(MatCellHarness.with(filter))();
}
/** Gets the text of the cells in the row. */
async getCellTextByIndex(filter: CellHarnessFilters = {}): Promise<string[]> {
return getCellTextByIndex(this, filter);
}
/** Gets the text inside the row organized by columns. */
async getCellTextByColumnName(): Promise<MatRowHarnessColumnsText> {
return getCellTextByColumnName(this);
}
}
/** Harness for interacting with a standard Angular Material table header row. */
export class MatHeaderRowHarness extends ComponentHarness {
/** The selector for the host element of a `MatHeaderRowHarness` instance. */
static hostSelector = '.mat-header-row';
/**
* Gets a `HarnessPredicate` that can be used to search for
* a table header row with specific attributes.
* @param options Options for narrowing the search
* @return a `HarnessPredicate` configured with the given options.
*/
static with(options: RowHarnessFilters = {}): HarnessPredicate<MatHeaderRowHarness> {
return new HarnessPredicate(MatHeaderRowHarness, options);
}
/** Gets a list of `MatHeaderCellHarness` for all cells in the row. */
async getCells(filter: CellHarnessFilters = {}): Promise<MatHeaderCellHarness[]> {<|fim▁hole|> return this.locatorForAll(MatHeaderCellHarness.with(filter))();
}
/** Gets the text of the cells in the header row. */
async getCellTextByIndex(filter: CellHarnessFilters = {}): Promise<string[]> {
return getCellTextByIndex(this, filter);
}
/** Gets the text inside the header row organized by columns. */
async getCellTextByColumnName(): Promise<MatRowHarnessColumnsText> {
return getCellTextByColumnName(this);
}
}
/** Harness for interacting with a standard Angular Material table footer row. */
export class MatFooterRowHarness extends ComponentHarness {
/** The selector for the host element of a `MatFooterRowHarness` instance. */
static hostSelector = '.mat-footer-row';
/**
* Gets a `HarnessPredicate` that can be used to search for
* a table footer row cell with specific attributes.
* @param options Options for narrowing the search
* @return a `HarnessPredicate` configured with the given options.
*/
static with(options: RowHarnessFilters = {}): HarnessPredicate<MatFooterRowHarness> {
return new HarnessPredicate(MatFooterRowHarness, options);
}
/** Gets a list of `MatFooterCellHarness` for all cells in the row. */
async getCells(filter: CellHarnessFilters = {}): Promise<MatFooterCellHarness[]> {
return this.locatorForAll(MatFooterCellHarness.with(filter))();
}
/** Gets the text of the cells in the footer row. */
async getCellTextByIndex(filter: CellHarnessFilters = {}): Promise<string[]> {
return getCellTextByIndex(this, filter);
}
/** Gets the text inside the footer row organized by columns. */
async getCellTextByColumnName(): Promise<MatRowHarnessColumnsText> {
return getCellTextByColumnName(this);
}
}
async function getCellTextByIndex(harness: {
getCells: (filter?: CellHarnessFilters) => Promise<MatCellHarness[]>
}, filter: CellHarnessFilters): Promise<string[]> {
const cells = await harness.getCells(filter);
return Promise.all(cells.map(cell => cell.getText()));
}
async function getCellTextByColumnName(harness: {
getCells: () => Promise<MatCellHarness[]>
}): Promise<MatRowHarnessColumnsText> {
const output: MatRowHarnessColumnsText = {};
const cells = await harness.getCells();
const cellsData = await Promise.all(cells.map(cell => {
return Promise.all([cell.getColumnName(), cell.getText()]);
}));
cellsData.forEach(([columnName, text]) => output[columnName] = text);
return output;
}<|fim▁end|> | |
<|file_name|>NullMemento.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2001, 2005 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* Jens Lukowski/Innoopract - initial renaming/restructuring
*
*******************************************************************************/
package org.eclipse.wst.xml.core.internal.contenttype;
import org.eclipse.wst.sse.core.internal.encoding.EncodingMemento;
import org.eclipse.wst.sse.core.internal.encoding.NonContentBasedEncodingRules;
/**
* This class can be used in place of an EncodingMemento (its super class),
* when there is not in fact ANY encoding information. For example, when a
* structuredDocument is created directly from a String
*/
public class NullMemento extends EncodingMemento {
/**
*
*/
public NullMemento() {
super();
String defaultCharset = NonContentBasedEncodingRules.useDefaultNameRules(null);
setJavaCharsetName(defaultCharset);
setAppropriateDefault(defaultCharset);
setDetectedCharsetName(null);
}
<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>cap.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" Sahana Eden Common Alerting Protocol (CAP) Model
@copyright: 2009-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3CAPModel",
"cap_info_labels",
"cap_alert_is_template",
"cap_rheader",
"cap_alert_list_layout",
#"cap_gis_location_xml_post_parse",
#"cap_gis_location_xml_post_render",
)
import datetime
import urllib2 # Needed for quoting & error handling on fetch
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
from gluon import *
from gluon.storage import Storage
from gluon.tools import fetch
from ..s3 import *
# =============================================================================
class S3CAPModel(S3Model):
"""
CAP: Common Alerting Protocol
- this module is a non-functional stub
http://eden.sahanafoundation.org/wiki/BluePrint/Messaging#CAP
"""
names = ("cap_alert",
"cap_alert_represent",
"cap_warning_priority",
"cap_info",
"cap_info_represent",
"cap_resource",
"cap_area",
"cap_area_represent",
"cap_area_location",
"cap_area_tag",
"cap_info_category_opts",
)
def model(self):
T = current.T
db = current.db
settings = current.deployment_settings
add_components = self.add_components
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table<|fim▁hole|> # @ToDo: Switch to using event_incident_type
#
# The keys are based on the Canadian ems.incident hierarchy, with a
# few extra general versions added to 'other'
# The values are meant for end-users, so can be customised as-required
# NB It is important that the meaning of these entries is not changed
# as otherwise this hurts our ability to do synchronisation
# Entries can be hidden from user view in the controller.
# Additional sets of 'translations' can be added to the tuples.
cap_incident_type_opts = {
"animalHealth.animalDieOff": T("Animal Die Off"),
"animalHealth.animalFeed": T("Animal Feed"),
"aviation.aircraftCrash": T("Aircraft Crash"),
"aviation.aircraftHijacking": T("Aircraft Hijacking"),
"aviation.airportClosure": T("Airport Closure"),
"aviation.airspaceClosure": T("Airspace Closure"),
"aviation.noticeToAirmen": T("Notice to Airmen"),
"aviation.spaceDebris": T("Space Debris"),
"civil.demonstrations": T("Demonstrations"),
"civil.dignitaryVisit": T("Dignitary Visit"),
"civil.displacedPopulations": T("Displaced Populations"),
"civil.emergency": T("Civil Emergency"),
"civil.looting": T("Looting"),
"civil.publicEvent": T("Public Event"),
"civil.riot": T("Riot"),
"civil.volunteerRequest": T("Volunteer Request"),
"crime": T("Crime"),
"crime.bomb": T("Bomb"),
"crime.bombExplosion": T("Bomb Explosion"),
"crime.bombThreat": T("Bomb Threat"),
"crime.dangerousPerson": T("Dangerous Person"),
"crime.drugs": T("Drugs"),
"crime.homeCrime": T("Home Crime"),
"crime.illegalImmigrant": T("Illegal Immigrant"),
"crime.industrialCrime": T("Industrial Crime"),
"crime.poisoning": T("Poisoning"),
"crime.retailCrime": T("Retail Crime"),
"crime.shooting": T("Shooting"),
"crime.stowaway": T("Stowaway"),
"crime.terrorism": T("Terrorism"),
"crime.vehicleCrime": T("Vehicle Crime"),
"fire": T("Fire"),
"fire.forestFire": T("Forest Fire"),
"fire.hotSpot": T("Hot Spot"),
"fire.industryFire": T("Industry Fire"),
"fire.smoke": T("Smoke"),
"fire.urbanFire": T("Urban Fire"),
"fire.wildFire": T("Wild Fire"),
"flood": T("Flood"),
"flood.damOverflow": T("Dam Overflow"),
"flood.flashFlood": T("Flash Flood"),
"flood.highWater": T("High Water"),
"flood.overlandFlowFlood": T("Overland Flow Flood"),
"flood.tsunami": T("Tsunami"),
"geophysical.avalanche": T("Avalanche"),
"geophysical.earthquake": T("Earthquake"),
"geophysical.lahar": T("Lahar"),
"geophysical.landslide": T("Landslide"),
"geophysical.magneticStorm": T("Magnetic Storm"),
"geophysical.meteorite": T("Meteorite"),
"geophysical.pyroclasticFlow": T("Pyroclastic Flow"),
"geophysical.pyroclasticSurge": T("Pyroclastic Surge"),
"geophysical.volcanicAshCloud": T("Volcanic Ash Cloud"),
"geophysical.volcanicEvent": T("Volcanic Event"),
"hazardousMaterial": T("Hazardous Material"),
"hazardousMaterial.biologicalHazard": T("Biological Hazard"),
"hazardousMaterial.chemicalHazard": T("Chemical Hazard"),
"hazardousMaterial.explosiveHazard": T("Explosive Hazard"),
"hazardousMaterial.fallingObjectHazard": T("Falling Object Hazard"),
"hazardousMaterial.infectiousDisease": T("Infectious Disease (Hazardous Material)"),
"hazardousMaterial.poisonousGas": T("Poisonous Gas"),
"hazardousMaterial.radiologicalHazard": T("Radiological Hazard"),
"health.infectiousDisease": T("Infectious Disease"),
"health.infestation": T("Infestation"),
"ice.iceberg": T("Iceberg"),
"ice.icePressure": T("Ice Pressure"),
"ice.rapidCloseLead": T("Rapid Close Lead"),
"ice.specialIce": T("Special Ice"),
"marine.marineSecurity": T("Marine Security"),
"marine.nauticalAccident": T("Nautical Accident"),
"marine.nauticalHijacking": T("Nautical Hijacking"),
"marine.portClosure": T("Port Closure"),
"marine.specialMarine": T("Special Marine"),
"meteorological.blizzard": T("Blizzard"),
"meteorological.blowingSnow": T("Blowing Snow"),
"meteorological.drought": T("Drought"),
"meteorological.dustStorm": T("Dust Storm"),
"meteorological.fog": T("Fog"),
"meteorological.freezingDrizzle": T("Freezing Drizzle"),
"meteorological.freezingRain": T("Freezing Rain"),
"meteorological.freezingSpray": T("Freezing Spray"),
"meteorological.hail": T("Hail"),
"meteorological.hurricane": T("Hurricane"),
"meteorological.rainFall": T("Rain Fall"),
"meteorological.snowFall": T("Snow Fall"),
"meteorological.snowSquall": T("Snow Squall"),
"meteorological.squall": T("Squall"),
"meteorological.stormSurge": T("Storm Surge"),
"meteorological.thunderstorm": T("Thunderstorm"),
"meteorological.tornado": T("Tornado"),
"meteorological.tropicalStorm": T("Tropical Storm"),
"meteorological.waterspout": T("Waterspout"),
"meteorological.winterStorm": T("Winter Storm"),
"missingPerson": T("Missing Person"),
# http://en.wikipedia.org/wiki/Amber_Alert
"missingPerson.amberAlert": T("Child Abduction Emergency"),
"missingPerson.missingVulnerablePerson": T("Missing Vulnerable Person"),
# http://en.wikipedia.org/wiki/Silver_Alert
"missingPerson.silver": T("Missing Senior Citizen"),
"publicService.emergencySupportFacility": T("Emergency Support Facility"),
"publicService.emergencySupportService": T("Emergency Support Service"),
"publicService.schoolClosure": T("School Closure"),
"publicService.schoolLockdown": T("School Lockdown"),
"publicService.serviceOrFacility": T("Service or Facility"),
"publicService.transit": T("Transit"),
"railway.railwayAccident": T("Railway Accident"),
"railway.railwayHijacking": T("Railway Hijacking"),
"roadway.bridgeClosure": T("Bridge Closed"),
"roadway.hazardousRoadConditions": T("Hazardous Road Conditions"),
"roadway.roadwayAccident": T("Road Accident"),
"roadway.roadwayClosure": T("Road Closed"),
"roadway.roadwayDelay": T("Road Delay"),
"roadway.roadwayHijacking": T("Road Hijacking"),
"roadway.roadwayUsageCondition": T("Road Usage Condition"),
"roadway.trafficReport": T("Traffic Report"),
"temperature.arcticOutflow": T("Arctic Outflow"),
"temperature.coldWave": T("Cold Wave"),
"temperature.flashFreeze": T("Flash Freeze"),
"temperature.frost": T("Frost"),
"temperature.heatAndHumidity": T("Heat and Humidity"),
"temperature.heatWave": T("Heat Wave"),
"temperature.windChill": T("Wind Chill"),
"wind.galeWind": T("Gale Wind"),
"wind.hurricaneForceWind": T("Hurricane Force Wind"),
"wind.stormForceWind": T("Storm Force Wind"),
"wind.strongWind": T("Strong Wind"),
"other.buildingCollapsed": T("Building Collapsed"),
"other.peopleTrapped": T("People Trapped"),
"other.powerFailure": T("Power Failure"),
}
# ---------------------------------------------------------------------
# CAP alerts
#
# CAP alert Status Code (status)
cap_alert_status_code_opts = OrderedDict([
("Actual", T("Actual - actionable by all targeted recipients")),
("Exercise", T("Exercise - only for designated participants (decribed in note)")),
("System", T("System - for internal functions")),
("Test", T("Test - testing, all recipients disregard")),
("Draft", T("Draft - not actionable in its current form")),
])
# CAP alert message type (msgType)
cap_alert_msgType_code_opts = OrderedDict([
("Alert", T("Alert: Initial information requiring attention by targeted recipients")),
("Update", T("Update: Update and supercede earlier message(s)")),
("Cancel", T("Cancel: Cancel earlier message(s)")),
("Ack", T("Ack: Acknowledge receipt and acceptance of the message(s)")),
("Error", T("Error: Indicate rejection of the message(s)")),
])
# CAP alert scope
cap_alert_scope_code_opts = OrderedDict([
("Public", T("Public - unrestricted audiences")),
("Restricted", T("Restricted - to users with a known operational requirement (described in restriction)")),
("Private", T("Private - only to specified addresses (mentioned as recipients)"))
])
# CAP info categories
cap_info_category_opts = OrderedDict([
("Geo", T("Geophysical (inc. landslide)")),
("Met", T("Meteorological (inc. flood)")),
("Safety", T("General emergency and public safety")),
("Security", T("Law enforcement, military, homeland and local/private security")),
("Rescue", T("Rescue and recovery")),
("Fire", T("Fire suppression and rescue")),
("Health", T("Medical and public health")),
("Env", T("Pollution and other environmental")),
("Transport", T("Public and private transportation")),
("Infra", T("Utility, telecommunication, other non-transport infrastructure")),
("CBRNE", T("Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack")),
("Other", T("Other events")),
])
tablename = "cap_alert"
define_table(tablename,
Field("is_template", "boolean",
readable = False,
writable = True,
),
Field("template_id", "reference cap_alert",
label = T("Template"),
ondelete = "RESTRICT",
represent = self.template_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_alert.id",
self.template_represent,
filterby="is_template",
filter_opts=(True,)
)),
comment = T("Apply a template"),
),
Field("template_title",
label = T("Template Title"),
),
Field("template_settings", "text",
default = "{}",
readable = False,
),
Field("identifier", unique=True, length=128,
default = self.generate_identifier,
label = T("Identifier"),
),
Field("sender",
label = T("Sender"),
default = self.generate_sender,
# @todo: can not be empty in alerts (validator!)
),
s3_datetime("sent",
default = "now",
writable = False,
),
Field("status",
default = "Draft",
label = T("Status"),
requires = IS_IN_SET(cap_alert_status_code_opts),
),
Field("msg_type",
label = T("Message Type"),
requires = IS_EMPTY_OR(
IS_IN_SET(cap_alert_msgType_code_opts)
),
),
Field("source",
label = T("Source"),
default = self.generate_source,
),
Field("scope",
label = T("Scope"),
requires = IS_EMPTY_OR(
IS_IN_SET(cap_alert_scope_code_opts)
),
),
# Text describing the restriction for scope=restricted
Field("restriction", "text",
label = T("Restriction"),
),
Field("addresses", "list:string",
label = T("Recipients"),
represent = self.list_string_represent,
#@ToDo: provide a better way to add multiple addresses,
# do not ask the user to delimit it themselves
# this should eventually use the CAP contacts
#widget = S3CAPAddressesWidget,
),
Field("codes", "text",
default = settings.get_cap_codes(),
label = T("Codes"),
represent = S3KeyValueWidget.represent,
widget = S3KeyValueWidget(),
),
Field("note", "text",
label = T("Note"),
),
Field("reference", "list:reference cap_alert",
label = T("Reference"),
represent = S3Represent(lookup = tablename,
fields = ["msg_type", "sent", "sender"],
field_sep = " - ",
multiple = True,
),
# @ToDo: This should not be manually entered,
# needs a widget
#widget = S3ReferenceWidget(table,
# one_to_many=True,
# allow_create=False),
),
# @ToDo: Switch to using event_incident_type_id
Field("incidents", "list:string",
label = T("Incidents"),
represent = S3Represent(options = cap_incident_type_opts,
multiple = True),
requires = IS_EMPTY_OR(
IS_IN_SET(cap_incident_type_opts,
multiple = True,
sort = True,
)),
widget = S3MultiSelectWidget(),
),
# approved_on field for recording when the alert was approved
s3_datetime("approved_on",
readable = False,
writable = False,
),
*s3_meta_fields())
filter_widgets = [
S3TextFilter(["identifier",
"sender",
"incidents",
"cap_info.headline",
"cap_info.event_type_id",
],
label = T("Search"),
comment = T("Search for an Alert by sender, incident, headline or event."),
),
S3OptionsFilter("info.category",
label = T("Category"),
options = cap_info_category_opts,
),
S3LocationFilter("location.location_id",
label = T("Location(s)"),
# options = gis.get_countries().keys(),
),
S3OptionsFilter("info.language",
label = T("Language"),
),
]
configure(tablename,
context = {"location": "location.location_id",
},
filter_widgets = filter_widgets,
list_layout = cap_alert_list_layout,
list_orderby = "cap_info.expires desc",
onvalidation = self.cap_alert_form_validation,
# update the approved_on field on approve of the alert
onapprove = self.cap_alert_approve,
)
# Components
add_components(tablename,
cap_area = "alert_id",
cap_area_location = {"name": "location",
"joinby": "alert_id",
},
cap_info = "alert_id",
cap_resource = "alert_id",
)
self.set_method("cap", "alert",
method = "import_feed",
action = CAPImportFeed())
if crud_strings["cap_template"]:
crud_strings[tablename] = crud_strings["cap_template"]
else:
ADD_ALERT = T("Create Alert")
crud_strings[tablename] = Storage(
label_create = ADD_ALERT,
title_display = T("Alert Details"),
title_list = T("Alerts"),
# If already-published, this should create a new "Update"
# alert instead of modifying the original
title_update = T("Edit Alert"),
title_upload = T("Import Alerts"),
label_list_button = T("List Alerts"),
label_delete_button = T("Delete Alert"),
msg_record_created = T("Alert created"),
msg_record_modified = T("Alert modified"),
msg_record_deleted = T("Alert deleted"),
msg_list_empty = T("No alerts to show"))
alert_represent = S3Represent(lookup = tablename,
fields = ["msg_type", "sent", "sender"],
field_sep = " - ")
alert_id = S3ReusableField("alert_id", "reference %s" % tablename,
comment = T("The alert message containing this information"),
label = T("Alert"),
ondelete = "CASCADE",
represent = alert_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_alert.id",
alert_represent)),
)
# ---------------------------------------------------------------------
# CAP info segments
#
cap_info_responseType_opts = OrderedDict([
("Shelter", T("Shelter - Take shelter in place or per instruction")),
("Evacuate", T("Evacuate - Relocate as instructed in the instruction")),
("Prepare", T("Prepare - Make preparations per the instruction")),
("Execute", T("Execute - Execute a pre-planned activity identified in instruction")),
("Avoid", T("Avoid - Avoid the subject event as per the instruction")),
("Monitor", T("Monitor - Attend to information sources as described in instruction")),
("Assess", T("Assess - Evaluate the information in this message.")),
("AllClear", T("AllClear - The subject event no longer poses a threat")),
("None", T("None - No action recommended")),
])
cap_info_urgency_opts = OrderedDict([
("Immediate", T("Response action should be taken immediately")),
("Expected", T("Response action should be taken soon (within next hour)")),
("Future", T("Responsive action should be taken in the near future")),
("Past", T("Responsive action is no longer required")),
("Unknown", T("Unknown")),
])
cap_info_severity_opts = OrderedDict([
("Extreme", T("Extraordinary threat to life or property")),
("Severe", T("Significant threat to life or property")),
("Moderate", T("Possible threat to life or property")),
("Minor", T("Minimal to no known threat to life or property")),
("Unknown", T("Severity unknown")),
])
cap_info_certainty_opts = OrderedDict([
("Observed", T("Observed: determined to have occurred or to be ongoing")),
("Likely", T("Likely (p > ~50%)")),
("Possible", T("Possible but not likely (p <= ~50%)")),
("Unlikely", T("Not expected to occur (p ~ 0)")),
("Unknown", T("Certainty unknown")),
])
# ---------------------------------------------------------------------
# Warning Priorities for CAP
tablename = "cap_warning_priority"
define_table(tablename,
Field("priority_rank", "integer",
label = T("Priority Rank"),
length = 2,
),
Field("event_code",
label = T("Event Code"),
),
Field("name", notnull = True, length = 64,
label = T("Name"),
),
Field("event_type",
label = T("Event Type"),
),
Field("urgency",
label = T("Urgency"),
requires = IS_IN_SET(cap_info_urgency_opts),
),
Field("severity",
label = T("Severity"),
requires = IS_IN_SET(cap_info_severity_opts),
),
Field("certainty",
label = T("Certainty"),
requires = IS_IN_SET(cap_info_certainty_opts),
),
Field("color_code",
label = T("Color Code"),
),
*s3_meta_fields())
priority_represent = S3Represent(lookup = tablename)
crud_strings[tablename] = Storage(
label_create = T("Create Warning Priority"),
title_display = T("Warning Priority Details"),
title_list = T("Warning Priorities"),
title_update = T("Edit Warning Priority"),
title_upload = T("Import Warning Priorities"),
label_list_button = T("List Warning Priorities"),
label_delete_button = T("Delete Warning Priority"),
msg_record_created = T("Warning Priority added"),
msg_record_modified = T("Warning Priority updated"),
msg_record_deleted = T("Warning Priority removed"),
msg_list_empty = T("No Warning Priorities currently registered")
)
# ---------------------------------------------------------------------
# CAP info priority
# @ToDo: i18n: Need label=T("")
tablename = "cap_info"
define_table(tablename,
alert_id(),
Field("is_template", "boolean",
default = False,
readable = False,
writable = False,
),
Field("template_info_id", "reference cap_info",
ondelete = "RESTRICT",
readable = False,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_info.id",
self.template_represent,
filterby="is_template",
filter_opts=(True,)
)),
widget = S3HiddenWidget(),
),
Field("template_settings", "text",
readable = False,
),
Field("language",
default = "en",
requires = IS_EMPTY_OR(
IS_IN_SET(settings.get_cap_languages())
),
),
Field("category", "list:string",
represent = S3Represent(options = cap_info_category_opts,
multiple = True,
),
required = True,
requires = IS_IN_SET(cap_info_category_opts,
multiple = True,
),
widget = S3MultiSelectWidget(),
), # 1 or more allowed
self.event_type_id(empty = False,
script = '''
$.filterOptionsS3({
'trigger':'event_type_id',
'target':'priority',
'lookupURL':S3.Ap.concat('/cap/priority_get/'),
'lookupResource':'event_type'
})'''
),
Field("response_type", "list:string",
represent = S3Represent(options = cap_info_responseType_opts,
multiple = True,
),
requires = IS_IN_SET(cap_info_responseType_opts,
multiple = True),
widget = S3MultiSelectWidget(),
), # 0 or more allowed
Field("priority",
represent = priority_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(
db, "cap_warning_priority.id",
priority_represent
),
),
),
Field("urgency",
required = True,
requires = IS_IN_SET(cap_info_urgency_opts),
),
Field("severity",
required = True,
requires = IS_IN_SET(cap_info_severity_opts),
),
Field("certainty",
required = True,
requires = IS_IN_SET(cap_info_certainty_opts),
),
Field("audience", "text"),
Field("event_code", "text",
default = settings.get_cap_event_codes(),
represent = S3KeyValueWidget.represent,
widget = S3KeyValueWidget(),
),
s3_datetime("effective",
default = "now",
),
s3_datetime("onset"),
s3_datetime("expires",
past = 0,
),
Field("sender_name"),
Field("headline"),
Field("description", "text"),
Field("instruction", "text"),
Field("contact", "text"),
Field("web",
requires = IS_EMPTY_OR(IS_URL()),
),
Field("parameter", "text",
default = settings.get_cap_parameters(),
label = T("Parameters"),
represent = S3KeyValueWidget.represent,
widget = S3KeyValueWidget(),
),
*s3_meta_fields())
# @ToDo: Move labels into main define_table (can then be lazy & performs better anyway)
info_labels = cap_info_labels()
for field in info_labels:
db.cap_info[field].label = info_labels[field]
if crud_strings["cap_template_info"]:
crud_strings[tablename] = crud_strings["cap_template_info"]
else:
ADD_INFO = T("Add alert information")
crud_strings[tablename] = Storage(
label_create = ADD_INFO,
title_display = T("Alert information"),
title_list = T("Information entries"),
title_update = T("Update alert information"), # this will create a new "Update" alert?
title_upload = T("Import alert information"),
subtitle_list = T("Listing of alert information items"),
label_list_button = T("List information entries"),
label_delete_button = T("Delete Information"),
msg_record_created = T("Alert information created"),
msg_record_modified = T("Alert information modified"),
msg_record_deleted = T("Alert information deleted"),
msg_list_empty = T("No alert information to show"))
info_represent = S3Represent(lookup = tablename,
fields = ["language", "headline"],
field_sep = " - ")
info_id = S3ReusableField("info_id", "reference %s" % tablename,
label = T("Information Segment"),
ondelete = "CASCADE",
represent = info_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_info.id",
info_represent)
),
sortby = "identifier",
)
configure(tablename,
#create_next = URL(f="info", args=["[id]", "area"]),
onaccept = self.info_onaccept,
)
# Components
add_components(tablename,
cap_resource = "info_id",
cap_area = "info_id",
)
# ---------------------------------------------------------------------
# CAP Resource segments
#
# Resource elements sit inside the Info segment of the export XML
# - however in most cases these would be common across all Infos, so in
# our internal UI we link these primarily to the Alert but still
# allow the option to differentiate by Info
#
tablename = "cap_resource"
define_table(tablename,
alert_id(writable = False,
),
info_id(),
self.super_link("doc_id", "doc_entity"),
Field("resource_desc",
requires = IS_NOT_EMPTY(),
),
Field("mime_type",
requires = IS_NOT_EMPTY(),
),
Field("size", "integer",
writable = False,
),
Field("uri",
# needs a special validation
writable = False,
),
#Field("file", "upload"),
Field("deref_uri", "text",
readable = False,
writable = False,
),
Field("digest",
writable = False,
),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Resource"),
title_display = T("Alert Resource"),
title_list = T("Resources"),
title_update = T("Edit Resource"),
subtitle_list = T("List Resources"),
label_list_button = T("List Resources"),
label_delete_button = T("Delete Resource"),
msg_record_created = T("Resource added"),
msg_record_modified = T("Resource updated"),
msg_record_deleted = T("Resource deleted"),
msg_list_empty = T("No resources currently defined for this alert"))
# @todo: complete custom form
crud_form = S3SQLCustomForm(#"name",
"info_id",
"resource_desc",
S3SQLInlineComponent("image",
label=T("Image"),
fields=["file",
],
),
S3SQLInlineComponent("document",
label=T("Document"),
fields=["file",
],
),
)
configure(tablename,
super_entity = "doc_entity",
crud_form = crud_form,
# Shouldn't be required if all UI actions go through alert controller & XSLT configured appropriately
create_onaccept = update_alert_id(tablename),
)
# ---------------------------------------------------------------------
# CAP Area segments
#
# Area elements sit inside the Info segment of the export XML
# - however in most cases these would be common across all Infos, so in
# our internal UI we link these primarily to the Alert but still
# allow the option to differentiate by Info
#
# Each <area> can have multiple elements which are one of <polygon>,
# <circle>, or <geocode>.
# <polygon> and <circle> are explicit geometry elements.
# <geocode> is a key-value pair in which the key is a standard
# geocoding system like SAME, FIPS, ZIP, and the value is a defined
# value in that system. The region described by the <area> is the
# union of the areas described by the individual elements, but the
# CAP spec advises that, if geocodes are included, the concrete
# geometry elements should outline the area specified by the geocodes,
# as not all recipients will have access to the meanings of the
# geocodes. However, since geocodes are a compact way to describe an
# area, it may be that they will be used without accompanying geometry,
# so we should not count on having <polygon> or <circle>.
#
# Geometry elements are each represented by a gis_location record, and
# linked to the cap_area record via the cap_area_location link table.
# For the moment, <circle> objects are stored with the center in the
# gis_location's lat, lon, and radius (in km) as a tag "radius" and
# value. ToDo: Later, we will add CIRCLESTRING WKT.
#
# Geocode elements are currently stored as key value pairs in the
# cap_area record.
#
# <area> can also specify a minimum altitude and maximum altitude
# ("ceiling"). These are stored in explicit fields for now, but could
# be replaced by key value pairs, if it is found that they are rarely
# used.
#
# (An alternative would be to have cap_area link to a gis_location_group
# record. In that case, the geocode tags could be stored in the
# gis_location_group's overall gis_location element's tags. The altitude
# could be stored in the overall gis_location's elevation, with ceiling
# stored in a tag. We could consider adding a maximum elevation field.)
tablename = "cap_area"
define_table(tablename,
alert_id(writable = False,
),
info_id(),
Field("name",
label = T("Area description"),
required = True,
),
Field("altitude", "integer"), # Feet above Sea-level in WGS84 (Specific or Minimum is using a range)
Field("ceiling", "integer"), # Feet above Sea-level in WGS84 (Maximum)
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Area"),
title_display = T("Alert Area"),
title_list = T("Areas"),
title_update = T("Edit Area"),
subtitle_list = T("List Areas"),
label_list_button = T("List Areas"),
label_delete_button = T("Delete Area"),
msg_record_created = T("Area added"),
msg_record_modified = T("Area updated"),
msg_record_deleted = T("Area deleted"),
msg_list_empty = T("No areas currently defined for this alert"))
crud_form = S3SQLCustomForm("name",
"info_id",
# Not yet working with default formstyle or multiple=True
#S3SQLInlineComponent("location",
# name = "location",
# label = "",
# multiple = False,
# fields = [("", "location_id")],
# ),
S3SQLInlineComponent("tag",
name = "tag",
label = "",
fields = ["tag",
"value",
],
),
"altitude",
"ceiling",
)
area_represent = S3Represent(lookup=tablename)
configure(tablename,
#create_next = URL(f="area", args=["[id]", "location"]),
# Shouldn't be required if all UI actions go through alert controller & XSLT configured appropriately
create_onaccept = update_alert_id(tablename),
crud_form = crud_form,
)
# Components
add_components(tablename,
cap_area_location = {"name": "location",
"joinby": "area_id",
},
cap_area_tag = {"name": "tag",
"joinby": "area_id",
},
)
area_id = S3ReusableField("area_id", "reference %s" % tablename,
label = T("Area"),
ondelete = "CASCADE",
represent = area_represent,
requires = IS_ONE_OF(db, "cap_area.id",
area_represent),
)
# ToDo: Use a widget tailored to entering <polygon> and <circle>.
# Want to be able to enter them by drawing on the map.
# Also want to allow selecting existing locations that have
# geometry, maybe with some filtering so the list isn't cluttered
# with irrelevant locations.
tablename = "cap_area_location"
define_table(tablename,
alert_id(readable = False,
writable = False,
),
area_id(),
self.gis_location_id(
widget = S3LocationSelector(points = False,
polygons = True,
show_map = True,
catalog_layers = True,
show_address = False,
show_postcode = False,
),
),
)
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Location"),
title_display = T("Alert Location"),
title_list = T("Locations"),
title_update = T("Edit Location"),
subtitle_list = T("List Locations"),
label_list_button = T("List Locations"),
label_delete_button = T("Delete Location"),
msg_record_created = T("Location added"),
msg_record_modified = T("Location updated"),
msg_record_deleted = T("Location deleted"),
msg_list_empty = T("No locations currently defined for this alert"))
configure(tablename,
# Shouldn't be required if all UI actions go through alert controller & XSLT configured appropriately
create_onaccept = update_alert_id(tablename),
)
# ---------------------------------------------------------------------
# Area Tags
# - Key-Value extensions
# - Used to hold for geocodes: key is the geocode system name, and
# value is the specific value for this area.
# - Could store other values here as well, to avoid dedicated fields
# in cap_area for rarely-used items like altitude and ceiling, but
# would have to distinguish those from geocodes.
#
# ToDo: Provide a mechanism for pre-loading geocodes that are not tied
# to individual areas.
# ToDo: Allow sharing the key-value pairs. Cf. Ruby on Rails tagging
# systems such as acts-as-taggable-on, which has a single table of tags
# used by all classes. Each tag record has the class and field that the
# tag belongs to, as well as the tag string. We'd want tag and value,
# but the idea is the same: There would be a table with tag / value
# pairs, and individual cap_area, event_event, org_whatever records
# would link to records in the tag table. So we actually would not have
# duplicate tag value records as we do now.
tablename = "cap_area_tag"
define_table(tablename,
area_id(),
# ToDo: Allow selecting from a dropdown list of pre-defined
# geocode system names.
Field("tag",
label = T("Geocode Name"),
),
# ToDo: Once the geocode system is selected, fetch a list
# of current values for that geocode system. Allow adding
# new values, e.g. with combo box menu.
Field("value",
label = T("Value"),
),
s3_comments(),
*s3_meta_fields())
#configure(tablename,
# deduplicate = self.cap_area_tag_deduplicate,
# )
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
return dict(cap_alert_id = alert_id,
cap_alert_represent = alert_represent,
cap_area_represent = area_represent,
cap_info_represent = info_represent,
cap_info_category_opts = cap_info_category_opts
)
# -------------------------------------------------------------------------
@staticmethod
def generate_identifier():
"""
Generate an identifier for a new form
"""
db = current.db
table = db.cap_alert
r = db().select(table.id,
limitby=(0, 1),
orderby=~table.id).first()
_time = datetime.datetime.strftime(datetime.datetime.utcnow(), "%Y%m%d")
if r:
next_id = int(r.id) + 1
else:
next_id = 1
# Format: prefix-time+-timezone+sequence-suffix
settings = current.deployment_settings
prefix = settings.get_cap_identifier_prefix() or current.xml.domain
oid = settings.get_cap_identifier_oid()
suffix = settings.get_cap_identifier_suffix()
return "%s-%s-%s-%03d%s%s" % \
(prefix, oid, _time, next_id, ["", "-"][bool(suffix)], suffix)
# -------------------------------------------------------------------------
@staticmethod
def generate_sender():
"""
Generate a sender for a new form
"""
try:
user_id = current.auth.user.id
except AttributeError:
return ""
return "%s/%d" % (current.xml.domain, user_id)
# -------------------------------------------------------------------------
@staticmethod
def generate_source():
"""
Generate a source for CAP alert
"""
return "%s@%s" % (current.xml.domain,
current.deployment_settings.get_base_public_url())
# -------------------------------------------------------------------------
@staticmethod
def template_represent(id, row=None):
"""
Represent an alert template concisely
"""
if row:
id = row.id
elif not id:
return current.messages["NONE"]
else:
db = current.db
table = db.cap_alert
row = db(table.id == id).select(table.is_template,
table.template_title,
# left = table.on(table.id == table.parent_item_category_id), Doesn't work
limitby=(0, 1)).first()
try:
# @ToDo: Should get headline from "info"?
if row.is_template:
return row.template_title
else:
return s3db.cap_alert_represent(id)
except:
return current.messages.UNKNOWN_OPT
# -------------------------------------------------------------------------
@staticmethod
def list_string_represent(string, fmt=lambda v: v):
try:
if isinstance(string, list):
return ", ".join([fmt(i) for i in string])
elif isinstance(string, basestring):
return ", ".join([fmt(i) for i in string[1:-1].split("|")])
except IndexError:
return current.messages.UNKNOWN_OPT
return ""
# -------------------------------------------------------------------------
@staticmethod
def cap_alert_form_validation(form):
"""
On Validation for CAP alert form
"""
form_vars = form.vars
if form_vars.get("scope") == "Private" and not form_vars.get("addresses"):
form.errors["addresses"] = \
current.T("'Recipients' field mandatory in case of 'Private' scope")
return
# -------------------------------------------------------------------------
@staticmethod
def info_onaccept(form):
"""
After DB I/O
"""
if "vars" in form:
form_vars = form.vars
elif "id" in form:
form_vars = form
elif hasattr(form, "vars"):
form_vars = form.vars
else:
form_vars = form
info_id = form_vars.id
if not info_id:
return
db = current.db
atable = db.cap_alert
itable = db.cap_info
info = db(itable.id == info_id).select(itable.alert_id,
limitby=(0, 1)).first()
if info:
alert_id = info.alert_id
if alert_id and cap_alert_is_template(alert_id):
db(itable.id == info_id).update(is_template = True)
return True
# -------------------------------------------------------------------------
@staticmethod
def cap_alert_approve(record=None):
"""
Update the approved_on field when alert gets approved
"""
if not record:
return
alert_id = record["id"]
# Update approved_on at the time the alert is approved
if alert_id:
db = current.db
approved_on = record["approved_on"]
db(db.cap_alert.id == alert_id).update(approved_on = current.request.utcnow)
# =============================================================================
def cap_info_labels():
"""
Labels for CAP info segments
"""
T = current.T
return dict(language=T("Language"),
category=T("Category"),
event_type_id=T("Event"),
response_type=T("Response type"),
urgency=T("Urgency"),
severity=T("Severity"),
certainty=T("Certainty"),
audience=T("Audience"),
event_code=T("Event code"),
effective=T("Effective"),
onset=T("Onset"),
expires=T("Expires at"),
sender_name=T("Sender's name"),
headline=T("Headline"),
description=T("Description"),
instruction=T("Instruction"),
web=T("URL"),
contact=T("Contact information"),
parameter=T("Parameters")
)
# =============================================================================
def cap_alert_is_template(alert_id):
"""
Tell whether an alert entry is a template
"""
if not alert_id:
return False
table = current.s3db.cap_alert
query = (table.id == alert_id)
r = current.db(query).select(table.is_template,
limitby=(0, 1)).first()
return r and r.is_template
# =============================================================================
def cap_rheader(r):
""" Resource Header for CAP module """
rheader = None
if r.representation == "html":
record = r.record
if record:
T = current.T
s3db = current.s3db
tablename = r.tablename
if tablename == "cap_alert":
record_id = record.id
table = s3db.cap_info
query = (table.alert_id == record_id)
row = current.db(query).select(table.id,
limitby=(0, 1)).first()
if record.is_template:
if not (row and row.id):
error = DIV(T("An alert needs to contain at least one info item."),
_class="error")
else:
error = ""
tabs = [(T("Template"), None),
(T("Information template"), "info"),
#(T("Area"), "area"),
#(T("Resource Files"), "resource"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(TR(TH("%s: " % T("Template")),
TD(A(S3CAPModel.template_represent(record_id, record),
_href=URL(c="cap", f="template",
args=[record_id, "update"]))),
),
),
rheader_tabs,
error
)
else:
if not (row and row.id):
error = DIV(T("You need to create at least one alert information item in order to be able to broadcast this alert!"),
_class="error")
export_btn = ""
else:
error = ""
export_btn = A(DIV(_class="export_cap_large"),
_href=URL(c="cap", f="alert", args=["%s.cap" % record_id]),
_target="_blank",
)
auth = current.auth
# Display 'Submit for Approval' based on permission
# and deployment settings
if not r.record.approved_by and \
current.deployment_settings.get_cap_authorisation() and \
auth.s3_has_permission("update", "cap_alert", record_id=r.id):
# Get the user ids for the role alert_approver
db = current.db
agtable = db.auth_group
rows = db(agtable.role == "Alert Approver")._select(agtable.id)
group_rows = db(agtable.id.belongs(rows)).select(agtable.id)
if group_rows:
for group_row in group_rows:
group_id = group_row.id
user_ids = auth.s3_group_members(group_id) # List of user_ids
pe_ids = [] # List of pe_ids
for user_id in user_ids:
pe_ids.append(auth.s3_user_pe_id(int(user_id)))
submit_btn = A(T("Submit for Approval"),
_href = URL(f = "compose",
vars = {"cap_alert.id": record.id,
"pe_ids": pe_ids,
},
),
_class = "action-btn"
)
else:
submit_btn = None
else:
submit_btn = None
table = s3db.cap_area
query = (table.alert_id == record_id)
row = current.db(query).select(table.id,
limitby=(0, 1)).first()
if row:
# We have an Area, so we can add Locations
location_tab = (T("Location"), "location")
else:
location_tab = ""
tabs = [(T("Alert Details"), None),
(T("Information"), "info"),
(T("Area"), "area"),
location_tab,
(T("Resource Files"), "resource"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(TR(TH("%s: " % T("Alert")),
TD(A(s3db.cap_alert_represent(record_id, record),
_href=URL(c="cap", f="alert",
args=[record_id, "update"]))),
),
TR(export_btn)
),
rheader_tabs,
error
)
if submit_btn:
rheader.insert(1, TR(submit_btn))
elif tablename == "cap_area":
# Shouldn't ever be called
tabs = [(T("Area"), None),
(T("Locations"), "location"),
#(T("Geocodes"), "tag"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(TR(TH("%s: " % T("Alert")),
TD(A(s3db.cap_alert_represent(record.alert_id),
_href=URL(c="cap", f="alert",
args=[record.id, "update"])))
),
TR(TH("%s: " % T("Information")),
TD(A(s3db.cap_info_represent(record.info_id),
_href=URL(c="cap", f="info",
args=[record.info_id, "update"]))),
),
TR(TH("%s: " % T("Area")),
TD(A(s3db.cap_area_represent(record.id, record),
_href=URL(c="cap", f="area",
args=[record.id, "update"]))),
),
),
rheader_tabs
)
elif tablename == "cap_area_location":
# Shouldn't ever be called
# We need the rheader only for the link back to the area.
rheader = DIV(TABLE(TR(TH("%s: " % T("Area")),
TD(A(s3db.cap_area_represent(record.area_id),
_href=URL(c="cap", f="area",
args=[record.area_id, "update"]))),
),
))
elif tablename == "cap_info":
# Shouldn't ever be called
tabs = [(T("Information"), None),
(T("Resource Files"), "resource"),
]
if cap_alert_is_template(record.alert_id):
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % T("Template")),
TD(A(S3CAPModel.template_represent(record.alert_id),
_href=URL(c="cap", f="template",
args=[record.alert_id, "update"]))),
),
TR(TH("%s: " % T("Info template")),
TD(A(s3db.cap_info_represent(record.id, record),
_href=URL(c="cap", f="info",
args=[record.id, "update"]))),
)
),
rheader_tabs,
_class="cap_info_template_form"
)
current.response.s3.js_global.append('''i18n.cap_locked="%s"''' % T("Locked"))
else:
tabs.insert(1, (T("Areas"), "area"))
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % T("Alert")),
TD(A(s3db.cap_alert_represent(record.alert_id),
_href=URL(c="cap", f="alert",
args=[record.alert_id, "update"]))),
),
TR(TH("%s: " % T("Information")),
TD(A(s3db.cap_info_represent(record.id, record),
_href=URL(c="cap", f="info",
args=[record.id, "update"]))),
)
),
rheader_tabs
)
return rheader
# =============================================================================
def update_alert_id(tablename):
""" On-accept for area and resource records """
def func(form):
if "vars" in form:
form_vars = form.vars
elif "id" in form:
form_vars = form
elif hasattr(form, "vars"):
form_vars = form.vars
else:
form_vars = form
if form_vars.get("alert_id", None):
# Nothing to do
return
# Look up from the info/area
_id = form_vars.id
if not _id:
return
db = current.db
table = db[tablename]
if tablename == "cap_area_location":
area_id = form_vars.get("area_id", None)
if not area_id:
# Get the full record
item = db(table.id == _id).select(table.alert_id,
table.area_id,
limitby=(0, 1)).first()
try:
alert_id = item.alert_id
area_id = item.area_id
except:
# Nothing we can do
return
if alert_id:
# Nothing to do
return
atable = db.cap_area
area = db(atable.id == area_id).select(atable.alert_id,
limitby=(0, 1)).first()
try:
alert_id = area.alert_id
except:
# Nothing we can do
return
else:
info_id = form_vars.get("info_id", None)
if not info_id:
# Get the full record
item = db(table.id == _id).select(table.alert_id,
table.info_id,
limitby=(0, 1)).first()
try:
alert_id = item.alert_id
info_id = item.info_id
except:
# Nothing we can do
return
if alert_id:
# Nothing to do
return
itable = db.cap_info
info = db(itable.id == info_id).select(itable.alert_id,
limitby=(0, 1)).first()
try:
alert_id = info.alert_id
except:
# Nothing we can do
return
db(table.id == _id).update(alert_id = alert_id)
return func
# =============================================================================
def cap_gis_location_xml_post_parse(element, record):
"""
UNUSED - done in XSLT
Convert CAP polygon representation to WKT; extract circle lat lon.
Latitude and longitude in CAP are expressed as signed decimal values in
coordinate pairs:
latitude,longitude
The circle text consists of:
latitude,longitude radius
where the radius is in km.
Polygon text consists of a space separated sequence of at least 4
coordinate pairs where the first and last are the same.
lat1,lon1 lat2,lon2 lat3,lon3 ... lat1,lon1
"""
# @ToDo: Extract altitude and ceiling from the enclosing <area>, and
# compute an elevation value to apply to all enclosed gis_locations.
cap_polygons = element.xpath("cap_polygon")
if cap_polygons:
cap_polygon_text = cap_polygons[0].text
# CAP polygons and WKT have opposite separator conventions:
# CAP has spaces between coordinate pairs and within pairs the
# coordinates are separated by comma, and vice versa for WKT.
# Unfortunately, CAP and WKT (as we use it) also have opposite
# orders of lat and lon. CAP has lat lon, WKT has lon lat.
# Both close the polygon by repeating the first point.
cap_points_text = cap_polygon_text.split()
cap_points = [cpoint.split(",") for cpoint in cap_points_text]
# @ToDo: Should we try interpreting all the points as decimal numbers,
# and failing validation if they're wrong?
wkt_points = ["%s %s" % (cpoint[1], cpoint[0]) for cpoint in cap_points]
wkt_polygon_text = "POLYGON ((%s))" % ", ".join(wkt_points)
record.wkt = wkt_polygon_text
return
cap_circle_values = element.xpath("resource[@name='gis_location_tag']/data[@field='tag' and text()='cap_circle']/../data[@field='value']")
if cap_circle_values:
cap_circle_text = cap_circle_values[0].text
coords, radius = cap_circle_text.split()
lat, lon = coords.split(",")
try:
# If any of these fail to interpret as numbers, the circle was
# badly formatted. For now, we don't try to fail validation,
# but just don't set the lat, lon.
lat = float(lat)
lon = float(lon)
radius = float(radius)
except ValueError:
return
record.lat = lat
record.lon = lon
# Add a bounding box for the given radius, if it is not zero.
if radius > 0.0:
bbox = current.gis.get_bounds_from_radius(lat, lon, radius)
record.lat_min = bbox["lat_min"]
record.lon_min = bbox["lon_min"]
record.lat_max = bbox["lat_max"]
record.lon_max = bbox["lon_max"]
# =============================================================================
def cap_gis_location_xml_post_render(element, record):
"""
UNUSED - done in XSLT
Convert Eden WKT polygon (and eventually circle) representation to
CAP format and provide them in the rendered s3xml.
Not all internal formats have a parallel in CAP, but an effort is made
to provide a resonable substitute:
Polygons are supported.
Circles that were read in from CAP (and thus carry the original CAP
circle data) are supported.
Multipolygons are currently rendered as their bounding box.
Points are rendered as zero radius circles.
Latitude and longitude in CAP are expressed as signed decimal values in
coordinate pairs:
latitude,longitude
The circle text consists of:
latitude,longitude radius
where the radius is in km.
Polygon text consists of a space separated sequence of at least 4
coordinate pairs where the first and last are the same.
lat1,lon1 lat2,lon2 lat3,lon3 ... lat1,lon1
"""
# @ToDo: Can we rely on gis_feature_type == 3 to tell if the location is a
# polygon, or is it better to look for POLYGON in the wkt? For now, check
# both.
# @ToDo: CAP does not support multipolygons. Do we want to extract their
# outer polygon if passed MULTIPOLYGON wkt? For now, these are exported
# with their bounding box as the polygon.
# @ToDo: What if a point (gis_feature_type == 1) that is not a CAP circle
# has a non-point bounding box? Should it be rendered as a polygon for
# the bounding box?
try:
from lxml import etree
except:
# This won't fail, since we're in the middle of processing xml.
return
SubElement = etree.SubElement
s3xml = current.xml
TAG = s3xml.TAG
RESOURCE = TAG["resource"]
DATA = TAG["data"]
ATTRIBUTE = s3xml.ATTRIBUTE
NAME = ATTRIBUTE["name"]
FIELD = ATTRIBUTE["field"]
VALUE = ATTRIBUTE["value"]
loc_tablename = "gis_location"
tag_tablename = "gis_location_tag"
tag_fieldname = "tag"
val_fieldname = "value"
polygon_tag = "cap_polygon"
circle_tag = "cap_circle"
fallback_polygon_tag = "cap_polygon_fallback"
fallback_circle_tag = "cap_circle_fallback"
def __cap_gis_location_add_polygon(element, cap_polygon_text, fallback=False):
"""
Helper for cap_gis_location_xml_post_render that adds the CAP polygon
data to the current element in a gis_location_tag element.
"""
# Make a gis_location_tag.
tag_resource = SubElement(element, RESOURCE)
tag_resource.set(NAME, tag_tablename)
tag_field = SubElement(tag_resource, DATA)
# Add tag and value children.
tag_field.set(FIELD, tag_fieldname)
if fallback:
tag_field.text = fallback_polygon_tag
else:
tag_field.text = polygon_tag
val_field = SubElement(tag_resource, DATA)
val_field.set(FIELD, val_fieldname)
val_field.text = cap_polygon_text
def __cap_gis_location_add_circle(element, lat, lon, radius, fallback=False):
"""
Helper for cap_gis_location_xml_post_render that adds CAP circle
data to the current element in a gis_location_tag element.
"""
# Make a gis_location_tag.
tag_resource = SubElement(element, RESOURCE)
tag_resource.set(NAME, tag_tablename)
tag_field = SubElement(tag_resource, DATA)
# Add tag and value children.
tag_field.set(FIELD, tag_fieldname)
if fallback:
tag_field.text = fallback_circle_tag
else:
tag_field.text = circle_tag
val_field = SubElement(tag_resource, DATA)
val_field.set(FIELD, val_fieldname)
# Construct a CAP circle string: latitude,longitude radius
cap_circle_text = "%s,%s %s" % (lat, lon, radius)
val_field.text = cap_circle_text
# Sort out the geometry case by wkt, CAP tags, gis_feature_type, bounds,...
# Check the two cases for CAP-specific locations first, as those will have
# definite export values. For others, we'll attempt to produce either a
# circle or polygon: Locations with a bounding box will get a box polygon,
# points will get a zero-radius circle.
# Currently wkt is stripped out of gis_location records right here:
# https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L1332
# https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L1426
# https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L3152
# Until we provide a way to configure that choice, this will not work for
# polygons.
wkt = record.get("wkt", None)
# WKT POLYGON: Although there is no WKT spec, according to every reference
# that deals with nested polygons, the outer, enclosing, polygon must be
# listed first. Hence, we extract only the first polygon, as CAP has no
# provision for nesting.
if wkt and wkt.startswith("POLYGON"):
# ToDo: Is it sufficient to test for adjacent (( to find the start of
# the polygon, or might there be whitespace between them?
start = wkt.find("((")
end = wkt.find(")")
if start >=0 and end >=0:
polygon_text = wkt[start + 2 : end]
points_text = polygon_text.split(",")
points = [p.split() for p in points_text]
cap_points_text = ["%s,%s" % (point[1], point[0]) for point in points]
cap_polygon_text = " ".join(cap_points_text)
__cap_gis_location_add_polygon(element, cap_polygon_text)
return
# Fall through if the wkt string was mal-formed.
# CAP circle stored in a gis_location_tag with tag = cap_circle.
# If there is a cap_circle tag, we don't need to do anything further, as
# export.xsl will use it. However, we don't know if there is a cap_circle
# tag...
#
# @ToDo: The export calls xml_post_render after processing a resource's
# fields, but before its components are added as children in the xml tree.
# If this were delayed til after the components were added, we could look
# there for the cap_circle gis_location_tag record. Since xml_post_parse
# isn't in use yet (except for this), maybe we could look at moving it til
# after the components?
#
# For now, with the xml_post_render before components: We could do a db
# query to check for a real cap_circle tag record, and not bother with
# creating fallbacks from bounding box or point...but we don't have to.
# Instead, just go ahead and add the fallbacks under different tag names,
# and let the export.xsl sort them out. This only wastes a little time
# compared to a db query.
# ToDo: MULTIPOLYGON -- Can stitch together the outer polygons in the
# multipolygon, but would need to assure all were the same handedness.
# The remaining cases are for locations that don't have either polygon wkt
# or a cap_circle tag.
# Bounding box: Make a four-vertex polygon from the bounding box.
# This is a fallback, as if there is a circle tag, we'll use that.
lon_min = record.get("lon_min", None)
lon_max = record.get("lon_max", None)
lat_min = record.get("lat_min", None)
lat_max = record.get("lat_max", None)
if lon_min and lon_max and lat_min and lat_max and \
(lon_min != lon_max) and (lat_min != lat_max):
# Although there is no WKT requirement, arrange the points in
# counterclockwise order. Recall format is:
# lat1,lon1 lat2,lon2 ... latN,lonN, lat1,lon1
cap_polygon_text = \
"%(lat_min)s,%(lon_min)s %(lat_min)s,%(lon_max)s %(lat_max)s,%(lon_max)s %(lat_max)s,%(lon_min)s %(lat_min)s,%(lon_min)s" \
% {"lon_min": lon_min,
"lon_max": lon_max,
"lat_min": lat_min,
"lat_max": lat_max}
__cap_gis_location_add_polygon(element, cap_polygon_text, fallback=True)
return
# WKT POINT or location with lat, lon: This can be rendered as a
# zero-radius circle.
# Q: Do we put bounding boxes around POINT locations, and are they
# meaningful?
lat = record.get("lat", None)
lon = record.get("lon", None)
if not lat or not lon:
# Look for POINT.
if wkt and wkt.startswith("POINT"):
start = wkt.find("(")
end = wkt.find(")")
if start >=0 and end >=0:
point_text = wkt[start + 2 : end]
point = point_text.split()
try:
lon = float(point[0])
lat = float(point[1])
except ValueError:
pass
if lat and lon:
# Add a (fallback) circle with zero radius.
__cap_gis_location_add_circle(element, lat, lon, 0, True)
return
# ToDo: Other WKT.
# Did not find anything to use. Presumably the area has a text description.
return
# =============================================================================
def cap_alert_list_layout(list_id, item_id, resource, rfields, record):
"""
Default dataList item renderer for CAP Alerts on the Home page.
@param list_id: the HTML ID of the list
@param item_id: the HTML ID of the item
@param resource: the S3Resource to render
@param rfields: the S3ResourceFields to render
@param record: the record as dict
"""
record_id = record["cap_alert.id"]
item_class = "thumbnail"
#raw = record._row
headline = record["cap_info.headline"]
location = record["cap_area.name"]
description = record["cap_info.description"]
sender = record["cap_info.sender_name"]
headline = A(headline,
# @ToDo: Link to nicely-formatted version of Display page
_href = URL(c="cap", f="alert", args=record_id),
)
headline = DIV(headline,
current.T("in %(location)s") % dict(location=location)
)
item = DIV(headline,
P(description),
P(sender, style="bold"),
_class=item_class,
_id=item_id,
)
return item
# =============================================================================
class CAPImportFeed(S3Method):
"""
Import CAP alerts from a URL
"""
# -------------------------------------------------------------------------
@staticmethod
def apply_method(r, **attr):
"""
Apply method.
@param r: the S3Request
@param attr: controller options for this request
"""
if r.representation == "html":
T = current.T
request = current.request
response = current.response
title = T("Import from Feed URL")
# @ToDo: use Formstyle
form = FORM(
TABLE(
TR(TD(DIV(B("%s:" % T("URL")),
SPAN(" *", _class="req"))),
TD(INPUT(_type="text", _name="url",
_id="url", _value="")),
TD(),
),
TR(TD(B("%s: " % T("User"))),
TD(INPUT(_type="text", _name="user",
_id="user", _value="")),
TD(),
),
TR(TD(B("%s: " % T("Password"))),
TD(INPUT(_type="text", _name="password",
_id="password", _value="")),
TD(),
),
TR(TD(B("%s: " % T("Ignore Errors?"))),
TD(INPUT(_type="checkbox", _name="ignore_errors",
_id="ignore_errors")),
TD(),
),
TR(TD(),
TD(INPUT(_type="submit", _value=T("Import"))),
TD(),
)
)
)
response.view = "create.html"
output = dict(title=title,
form=form)
if form.accepts(request.vars, current.session):
form_vars = form.vars
url = form_vars.get("url", None)
if not url:
response.error = T("URL is required")
return output
# @ToDo:
username = form_vars.get("username", None)
password = form_vars.get("password", None)
try:
file = fetch(url)
except urllib2.URLError:
response.error = str(sys.exc_info()[1])
return output
except urllib2.HTTPError:
response.error = str(sys.exc_info()[1])
return output
File = StringIO(file)
stylesheet = os.path.join(request.folder, "static", "formats",
"cap", "import.xsl")
xml = current.xml
tree = xml.parse(File)
resource = current.s3db.resource("cap_alert")
s3xml = xml.transform(tree, stylesheet_path=stylesheet,
name=resource.name)
try:
resource.import_xml(s3xml,
ignore_errors=form_vars.get("ignore_errors", None))
except:
response.error = str(sys.exc_info()[1])
else:
import_count = resource.import_count
if import_count:
response.confirmation = "%s %s" % \
(import_count,
T("Alerts successfully imported."))
else:
response.information = T("No Alerts available.")
return output
else:
raise HTTP(501, current.ERROR.BAD_METHOD)
# END =========================================================================<|fim▁end|> |
# ---------------------------------------------------------------------
# List of Incident Categories -- copied from irs module <-- |
<|file_name|>serialize.js<|end_file_name|><|fim▁begin|>// eslint-disable-next-line import/prefer-default-export
export const serializePlaylist = model => ({
_id: model.id,
name: model.name,<|fim▁hole|> author: model.author,
createdAt: model.createdAt,
description: model.description,
shared: model.shared,
nsfw: model.nsfw,
size: model.media.length,
});<|fim▁end|> | |
<|file_name|>template-fun-ty.rs<|end_file_name|><|fim▁begin|>#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]<|fim▁hole|>#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Foo {
pub _address: u8,
}
pub type Foo_FunctionPtr<T> =
::std::option::Option<unsafe extern "C" fn() -> T>;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RefPtr {
pub _address: u8,
}
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RefPtr_Proxy {
pub _address: u8,
}
pub type RefPtr_Proxy_member_function<R, Args> =
::std::option::Option<unsafe extern "C" fn(arg1: Args) -> R>;
pub type Returner<T> = ::std::option::Option<unsafe extern "C" fn() -> T>;<|fim▁end|> | |
<|file_name|>conv.rs<|end_file_name|><|fim▁begin|>use prelude::*;
use kernels::activate::*;
use densearray::prelude::*;
use operator::prelude::*;
use std::cell::{RefCell};
use std::cmp::{max};
use std::rc::{Rc};
#[derive(Clone, Copy, Debug)]
pub struct Conv1dOperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize),
pub kernel: usize,
pub stride: usize,
pub dilation: usize,
pub pad: usize,
pub out_chan: usize,
pub bias: bool,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
impl Conv1dOperatorConfig {
pub fn out_dim(&self) -> (usize, usize) {
// FIXME(20161106): dilation.
let (in_u, _) = self.in_dim;
let out_u = max(0, (in_u + 2 * self.pad - self.kernel + self.stride) as isize) as usize / self.stride;
(out_u, self.out_chan)
}
}
#[derive(Clone, Copy, Debug)]
pub struct Conv2dOperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize, usize),
pub kernel_w: usize,
pub kernel_h: usize,
pub stride_w: usize,
pub stride_h: usize,
pub pad_w: usize,
pub pad_h: usize,
pub out_chan: usize,
pub bias: bool,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
<|fim▁hole|>impl Conv2dOperatorConfig {
pub fn out_dim(&self) -> (usize, usize, usize) {
let (in_w, in_h, _) = self.in_dim;
let out_w = max(0, (in_w + 2 * self.pad_w - self.kernel_w + self.stride_w) as isize) as usize / self.stride_w;
let out_h = max(0, (in_h + 2 * self.pad_h - self.kernel_h + self.stride_h) as isize) as usize / self.stride_h;
(out_w, out_h, self.out_chan)
}
pub fn prefer_gemm_conv(&self) -> bool {
//self.cfg.stride_w != 1 || self.cfg.stride_h != 1
true
}
}
#[derive(Clone, Copy, Debug)]
pub struct Conv2d1x1OperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize, usize),
pub out_chan: usize,
pub bias: bool,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
impl Conv2d1x1OperatorConfig {
pub fn out_dim(&self) -> (usize, usize, usize) {
let (in_w, in_h, _) = self.in_dim;
(in_w, in_h, self.out_chan)
}
}
#[derive(Clone, Copy, Debug)]
pub struct Conv3dOperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize, usize, usize),
pub kernel: (usize, usize, usize),
pub stride: (usize, usize, usize),
pub dilation: (usize, usize, usize),
pub pad: (usize, usize, usize),
pub out_chan: usize,
pub bias: bool,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
impl Conv3dOperatorConfig {
pub fn out_dim(&self) -> (usize, usize, usize, usize) {
// FIXME(20161106): dilation.
let (in_u, in_v, in_w, _) = self.in_dim;
let (kernel_u, kernel_v, kernel_w) = self.kernel;
let (stride_u, stride_v, stride_w) = self.stride;
let (pad_u, pad_v, pad_w) = self.pad;
let out_u = max(0, (in_u + 2 * pad_u - kernel_u + stride_u) as isize) as usize / stride_u;
let out_v = max(0, (in_v + 2 * pad_v - kernel_v + stride_v) as isize) as usize / stride_v;
let out_w = max(0, (in_w + 2 * pad_w - kernel_w + stride_w) as isize) as usize / stride_w;
(out_u, out_v, out_w, self.out_chan)
}
}
#[derive(Clone, Copy)]
pub struct BatchNormConv2dOperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize, usize),
pub kernel_w: usize,
pub kernel_h: usize,
pub stride_w: usize,
pub stride_h: usize,
pub pad_w: usize,
pub pad_h: usize,
pub out_chan: usize,
pub avg_rate: f32,
pub epsilon: f32,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
impl BatchNormConv2dOperatorConfig {
pub fn out_dim(&self) -> (usize, usize, usize) {
let (in_w, in_h, _) = self.in_dim;
let out_w = max(0, (in_w + 2 * self.pad_w - self.kernel_w + self.stride_w) as isize) as usize / self.stride_w;
let out_h = max(0, (in_h + 2 * self.pad_h - self.kernel_h + self.stride_h) as isize) as usize / self.stride_h;
(out_w, out_h, self.out_chan)
}
pub fn prefer_gemm_conv(&self) -> bool {
//self.cfg.stride_w != 1 || self.cfg.stride_h != 1
true
}
}
#[derive(Clone, Copy, Debug)]
pub struct ResidualConv2dOperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize, usize),
pub avg_rate: f32,
pub epsilon: f32,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
#[derive(Clone, Copy, Debug)]
pub struct ProjResidualConv2dOperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize, usize),
pub stride_w: usize,
pub stride_h: usize,
pub out_chan: usize,
pub avg_rate: f32,
pub epsilon: f32,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
impl ProjResidualConv2dOperatorConfig {
pub fn out_dim(&self) -> (usize, usize, usize) {
let (in_w, in_h, _) = self.in_dim;
let kernel_w = 3;
let kernel_h = 3;
let pad_w = 1;
let pad_h = 1;
let out_w = max(0, (in_w + 2 * pad_w - kernel_w + self.stride_w) as isize) as usize / self.stride_w;
let out_h = max(0, (in_h + 2 * pad_h - kernel_h + self.stride_h) as isize) as usize / self.stride_h;
(out_w, out_h, self.out_chan)
}
}
pub struct NewResidualConv2dOperator<S, IoBuf: ?Sized> {
cfg: ResidualConv2dOperatorConfig,
node: OperatorNode,
join_op: Rc<RefCell<NewAddJoinOperator<S, IoBuf>>>,
out: CommonOutput,
act_k: ActivateKernel,
}
impl<S, IoBuf: ?Sized> NewResidualConv2dOperator<S, IoBuf> where S: 'static, IoBuf: 'static {
pub fn new<InOp>(cfg: ResidualConv2dOperatorConfig, cap: OpCapability, prev_op: Rc<RefCell<InOp>>, prev_arm: usize) -> Rc<RefCell<NewResidualConv2dOperator<S, IoBuf>>> where InOp: 'static + CommonOperator + DiffOperator<S, IoBuf> {
let split_cfg = SplitOperatorConfig{
batch_sz: cfg.batch_sz,
out_arms: 2,
dim: cfg.in_dim.flat_len(),
};
let conv1_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 3,
kernel_h: 3,
stride_w: 1,
stride_h: 1,
pad_w: 1,
pad_h: 1,
out_chan: cfg.in_dim.2,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let conv2_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 3,
kernel_h: 3,
stride_w: 1,
stride_h: 1,
pad_w: 1,
pad_h: 1,
out_chan: cfg.in_dim.2,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Identity,
w_init: cfg.w_init,
};
let join_cfg = JoinOperatorConfig{
batch_sz: cfg.batch_sz,
in_arms: 2,
dim: cfg.in_dim.flat_len(),
};
let split_op = NewCopySplitOperator::new(split_cfg, cap, prev_op, prev_arm);
let conv1_op = NewBatchNormConv2dOperator::new(conv1_cfg, cap, split_op.clone(), 0);
let conv2_op = NewBatchNormConv2dOperator::new(conv2_cfg, cap, conv1_op, 0);
let join_op = NewAddJoinOperator::new(join_cfg, cap);
join_op.borrow_mut().append_input(conv2_op, 0);
join_op.borrow_mut().append_input(split_op, 1);
Rc::new(RefCell::new(NewResidualConv2dOperator{
cfg: cfg,
node: OperatorNode::default(),
join_op: join_op,
out: CommonOutput::new(cfg.batch_sz, cfg.in_dim.flat_len(), cap),
act_k: ActivateKernel::new(cfg.batch_sz, cfg.in_dim.flat_len(), cfg.act_kind),
}))
}
}
impl<S, IoBuf: ?Sized> Operator for NewResidualConv2dOperator<S, IoBuf> {
fn _next(&self) -> u64 {
self.node._next()
}
}
impl<S, IoBuf: ?Sized> CommonOperator for NewResidualConv2dOperator<S, IoBuf> {
fn _output(&self, arm: usize) -> CommonOutput {
assert_eq!(0, arm);
self.out.clone()
}
}
impl<S, IoBuf: ?Sized> DiffOperatorData<S> for NewResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperatorIo<IoBuf> for NewResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperator<S, IoBuf> for NewResidualConv2dOperator<S, IoBuf> {
//type IoBuf = [f32];
fn _traverse_fwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
self.join_op.borrow_mut()._traverse_fwd(epoch, apply);
apply(self);
self.node.pop(epoch);
}
fn _traverse_bwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
apply(self);
self.join_op.borrow_mut()._traverse_bwd(epoch, apply);
self.node.pop(epoch);
}
fn _forward(&mut self, phase: OpPhase) {
let join_out = self.join_op.borrow()._output(0);
let batch_size = join_out.batch_sz.get();
self.out.batch_sz.set(batch_size);
self.act_k.forward(batch_size, &*join_out.buf.borrow(), &mut *self.out.buf.borrow_mut());
}
fn _backward(&mut self) {
let join_out = self.join_op.borrow()._output(0);
if let Some(ref join_grad) = join_out.grad.as_ref() {
let batch_size = self.out.batch_sz.get();
self.act_k.backward(batch_size, &*join_out.buf.borrow(), &*self.out.grad.as_ref().unwrap().borrow(), &mut *join_grad.borrow_mut());
}
}
}
pub struct ParallelResidualConv2dOperator<S, IoBuf: ?Sized> {
cfg: ResidualConv2dOperatorConfig,
node: OperatorNode,
join_op: Rc<RefCell<NewAddJoinOperator<S, IoBuf>>>,
out: CommonOutput,
act_k: ParallelActivateKernel,
}
impl<S, IoBuf: ?Sized> ParallelResidualConv2dOperator<S, IoBuf> where S: 'static, IoBuf: 'static {
pub fn new<InOp>(cfg: ResidualConv2dOperatorConfig, cap: OpCapability, prev_op: Rc<RefCell<InOp>>, prev_arm: usize) -> Rc<RefCell<ParallelResidualConv2dOperator<S, IoBuf>>> where InOp: 'static + CommonOperator + DiffOperator<S, IoBuf> {
let split_cfg = SplitOperatorConfig{
batch_sz: cfg.batch_sz,
out_arms: 2,
dim: cfg.in_dim.flat_len(),
};
let conv1_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 3,
kernel_h: 3,
stride_w: 1,
stride_h: 1,
pad_w: 1,
pad_h: 1,
out_chan: cfg.in_dim.2,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let conv2_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 3,
kernel_h: 3,
stride_w: 1,
stride_h: 1,
pad_w: 1,
pad_h: 1,
out_chan: cfg.in_dim.2,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Identity,
w_init: cfg.w_init,
};
let join_cfg = JoinOperatorConfig{
batch_sz: cfg.batch_sz,
in_arms: 2,
dim: cfg.in_dim.flat_len(),
};
let split_op = NewCopySplitOperator::new(split_cfg, cap, prev_op, prev_arm);
let conv1_op = ParallelBatchNormConv2dOperator::new(conv1_cfg, cap, split_op.clone(), 0);
let conv2_op = ParallelBatchNormConv2dOperator::new(conv2_cfg, cap, conv1_op, 0);
let join_op = NewAddJoinOperator::new(join_cfg, cap);
join_op.borrow_mut().append_input(conv2_op, 0);
join_op.borrow_mut().append_input(split_op, 1);
Rc::new(RefCell::new(ParallelResidualConv2dOperator{
cfg: cfg,
node: OperatorNode::default(),
join_op: join_op,
out: CommonOutput::new(cfg.batch_sz, cfg.in_dim.flat_len(), cap),
act_k: ParallelActivateKernel::new(cfg.batch_sz, cfg.in_dim.flat_len(), cfg.act_kind),
}))
}
}
impl<S, IoBuf: ?Sized> Operator for ParallelResidualConv2dOperator<S, IoBuf> {
fn _next(&self) -> u64 {
self.node._next()
}
}
impl<S, IoBuf: ?Sized> CommonOperator for ParallelResidualConv2dOperator<S, IoBuf> {
fn _output(&self, arm: usize) -> CommonOutput {
assert_eq!(0, arm);
self.out.clone()
}
}
impl<S, IoBuf: ?Sized> DiffOperatorData<S> for ParallelResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperatorIo<IoBuf> for ParallelResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperator<S, IoBuf> for ParallelResidualConv2dOperator<S, IoBuf> {
//type IoBuf = [f32];
fn _traverse_fwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
self.join_op.borrow_mut()._traverse_fwd(epoch, apply);
apply(self);
self.node.pop(epoch);
}
fn _traverse_bwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
apply(self);
self.join_op.borrow_mut()._traverse_bwd(epoch, apply);
self.node.pop(epoch);
}
fn _forward(&mut self, phase: OpPhase) {
let join_out = self.join_op.borrow()._output(0);
let batch_size = join_out.batch_sz.get();
self.out.batch_sz.set(batch_size);
self.act_k.forward(batch_size, &*join_out.buf.borrow(), &mut *self.out.buf.borrow_mut());
}
fn _backward(&mut self) {
let join_out = self.join_op.borrow()._output(0);
if let Some(ref join_grad) = join_out.grad.as_ref() {
let batch_size = self.out.batch_sz.get();
self.act_k.backward(batch_size, &*join_out.buf.borrow(), &*self.out.grad.as_ref().unwrap().borrow(), &mut *join_grad.borrow_mut());
}
}
}
pub struct NewProjResidualConv2dOperator<S, IoBuf: ?Sized> {
cfg: ProjResidualConv2dOperatorConfig,
node: OperatorNode,
join_op: Rc<RefCell<NewAddJoinOperator<S, IoBuf>>>,
out: CommonOutput,
act_k: ActivateKernel,
}
impl<S, IoBuf: ?Sized> NewProjResidualConv2dOperator<S, IoBuf> where S: 'static, IoBuf: 'static {
pub fn new<InOp>(cfg: ProjResidualConv2dOperatorConfig, cap: OpCapability, prev_op: Rc<RefCell<InOp>>, prev_arm: usize) -> Rc<RefCell<NewProjResidualConv2dOperator<S, IoBuf>>> where InOp: 'static + CommonOperator + DiffOperator<S, IoBuf> {
let split_cfg = SplitOperatorConfig{
batch_sz: cfg.batch_sz,
out_arms: 2,
dim: cfg.in_dim.flat_len(),
};
let conv1_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 3,
kernel_h: 3,
stride_w: cfg.stride_w,
stride_h: cfg.stride_h,
pad_w: 1,
pad_h: 1,
out_chan: cfg.out_chan,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let conv2_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.out_dim(),
kernel_w: 3,
kernel_h: 3,
stride_w: 1,
stride_h: 1,
pad_w: 1,
pad_h: 1,
out_chan: cfg.out_chan,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Identity,
w_init: cfg.w_init,
};
let conv1x1_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 1,
kernel_h: 1,
stride_w: cfg.stride_w,
stride_h: cfg.stride_h,
pad_w: 0,
pad_h: 0,
out_chan: cfg.out_chan,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Identity,
w_init: cfg.w_init,
};
let join_cfg = JoinOperatorConfig{
batch_sz: cfg.batch_sz,
in_arms: 2,
dim: cfg.out_dim().flat_len(),
};
let split_op = NewCopySplitOperator::new(split_cfg, cap, prev_op, prev_arm);
let conv1_op = NewBatchNormConv2dOperator::new(conv1_cfg, cap, split_op.clone(), 0);
let conv2_op = NewBatchNormConv2dOperator::new(conv2_cfg, cap, conv1_op, 0);
let conv1x1_op = NewBatchNormConv2dOperator::new(conv1x1_cfg, cap, split_op, 1);
let join_op = NewAddJoinOperator::new(join_cfg, cap);
join_op.borrow_mut().append_input(conv2_op, 0);
join_op.borrow_mut().append_input(conv1x1_op, 0);
Rc::new(RefCell::new(NewProjResidualConv2dOperator{
cfg: cfg,
node: OperatorNode::default(),
join_op: join_op,
out: CommonOutput::new(cfg.batch_sz, cfg.out_dim().flat_len(), cap),
act_k: ActivateKernel::new(cfg.batch_sz, cfg.out_dim().flat_len(), cfg.act_kind),
}))
}
}
impl<S, IoBuf: ?Sized> Operator for NewProjResidualConv2dOperator<S, IoBuf> {
fn _next(&self) -> u64 {
self.node._next()
}
}
impl<S, IoBuf: ?Sized> CommonOperator for NewProjResidualConv2dOperator<S, IoBuf> {
fn _output(&self, arm: usize) -> CommonOutput {
assert_eq!(0, arm);
self.out.clone()
}
}
impl<S, IoBuf: ?Sized> DiffOperatorData<S> for NewProjResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperatorIo<IoBuf> for NewProjResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperator<S, IoBuf> for NewProjResidualConv2dOperator<S, IoBuf> {
//type IoBuf = [f32];
fn _traverse_fwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
self.join_op.borrow_mut()._traverse_fwd(epoch, apply);
apply(self);
self.node.pop(epoch);
}
fn _traverse_bwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
apply(self);
self.join_op.borrow_mut()._traverse_bwd(epoch, apply);
self.node.pop(epoch);
}
fn _forward(&mut self, phase: OpPhase) {
let join_out = self.join_op.borrow()._output(0);
let batch_size = join_out.batch_sz.get();
self.out.batch_sz.set(batch_size);
self.act_k.forward(batch_size, &*join_out.buf.borrow(), &mut *self.out.buf.borrow_mut());
}
fn _backward(&mut self) {
let join_out = self.join_op.borrow()._output(0);
if let Some(ref join_grad) = join_out.grad.as_ref() {
let batch_size = self.out.batch_sz.get();
self.act_k.backward(batch_size, &*join_out.buf.borrow(), &*self.out.grad.as_ref().unwrap().borrow(), &mut *join_grad.borrow_mut());
}
}
}
pub struct ParallelProjResidualConv2dOperator<S, IoBuf: ?Sized> {
cfg: ProjResidualConv2dOperatorConfig,
node: OperatorNode,
join_op: Rc<RefCell<NewAddJoinOperator<S, IoBuf>>>,
out: CommonOutput,
act_k: ParallelActivateKernel,
}
impl<S, IoBuf: ?Sized> ParallelProjResidualConv2dOperator<S, IoBuf> where S: 'static, IoBuf: 'static {
pub fn new<InOp>(cfg: ProjResidualConv2dOperatorConfig, cap: OpCapability, prev_op: Rc<RefCell<InOp>>, prev_arm: usize) -> Rc<RefCell<ParallelProjResidualConv2dOperator<S, IoBuf>>> where InOp: 'static + CommonOperator + DiffOperator<S, IoBuf> {
let split_cfg = SplitOperatorConfig{
batch_sz: cfg.batch_sz,
out_arms: 2,
dim: cfg.in_dim.flat_len(),
};
let conv1_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 3,
kernel_h: 3,
stride_w: cfg.stride_w,
stride_h: cfg.stride_h,
pad_w: 1,
pad_h: 1,
out_chan: cfg.out_chan,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let conv2_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.out_dim(),
kernel_w: 3,
kernel_h: 3,
stride_w: 1,
stride_h: 1,
pad_w: 1,
pad_h: 1,
out_chan: cfg.out_chan,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Identity,
w_init: cfg.w_init,
};
let conv1x1_cfg = BatchNormConv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 1,
kernel_h: 1,
stride_w: cfg.stride_w,
stride_h: cfg.stride_h,
pad_w: 0,
pad_h: 0,
out_chan: cfg.out_chan,
avg_rate: cfg.avg_rate,
epsilon: cfg.epsilon,
act_kind: ActivationKind::Identity,
w_init: cfg.w_init,
};
let join_cfg = JoinOperatorConfig{
batch_sz: cfg.batch_sz,
in_arms: 2,
dim: cfg.out_dim().flat_len(),
};
let split_op = NewCopySplitOperator::new(split_cfg, cap, prev_op, prev_arm);
let conv1_op = ParallelBatchNormConv2dOperator::new(conv1_cfg, cap, split_op.clone(), 0);
let conv2_op = ParallelBatchNormConv2dOperator::new(conv2_cfg, cap, conv1_op, 0);
let conv1x1_op = ParallelBatchNormConv2dOperator::new(conv1x1_cfg, cap, split_op, 1);
let join_op = NewAddJoinOperator::new(join_cfg, cap);
join_op.borrow_mut().append_input(conv2_op, 0);
join_op.borrow_mut().append_input(conv1x1_op, 0);
Rc::new(RefCell::new(ParallelProjResidualConv2dOperator{
cfg: cfg,
node: OperatorNode::default(),
join_op: join_op,
out: CommonOutput::new(cfg.batch_sz, cfg.out_dim().flat_len(), cap),
act_k: ParallelActivateKernel::new(cfg.batch_sz, cfg.out_dim().flat_len(), cfg.act_kind),
}))
}
}
impl<S, IoBuf: ?Sized> Operator for ParallelProjResidualConv2dOperator<S, IoBuf> {
fn _next(&self) -> u64 {
self.node._next()
}
}
impl<S, IoBuf: ?Sized> CommonOperator for ParallelProjResidualConv2dOperator<S, IoBuf> {
fn _output(&self, arm: usize) -> CommonOutput {
assert_eq!(0, arm);
self.out.clone()
}
}
impl<S, IoBuf: ?Sized> DiffOperatorData<S> for ParallelProjResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperatorIo<IoBuf> for ParallelProjResidualConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperator<S, IoBuf> for ParallelProjResidualConv2dOperator<S, IoBuf> {
//type IoBuf = [f32];
fn _traverse_fwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
self.join_op.borrow_mut()._traverse_fwd(epoch, apply);
apply(self);
self.node.pop(epoch);
}
fn _traverse_bwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
apply(self);
self.join_op.borrow_mut()._traverse_bwd(epoch, apply);
self.node.pop(epoch);
}
fn _forward(&mut self, phase: OpPhase) {
let join_out = self.join_op.borrow()._output(0);
let batch_size = join_out.batch_sz.get();
self.out.batch_sz.set(batch_size);
self.act_k.forward(batch_size, &*join_out.buf.borrow(), &mut *self.out.buf.borrow_mut());
}
fn _backward(&mut self) {
let join_out = self.join_op.borrow()._output(0);
if let Some(ref join_grad) = join_out.grad.as_ref() {
let batch_size = self.out.batch_sz.get();
self.act_k.backward(batch_size, &*join_out.buf.borrow(), &*self.out.grad.as_ref().unwrap().borrow(), &mut *join_grad.borrow_mut());
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct SqueezeConv2dOperatorConfig {
pub batch_sz: usize,
pub in_dim: (usize, usize, usize),
pub stride_w: usize,
pub stride_h: usize,
pub squeeze: usize,
pub out_chan: usize,
pub act_kind: ActivationKind,
pub w_init: ParamInitKind,
}
impl SqueezeConv2dOperatorConfig {
pub fn squeeze_dim(&self) -> (usize, usize, usize) {
let (in_w, in_h, _) = self.in_dim;
let out_w = max(0, (in_w + self.stride_w - 1) as isize) as usize / self.stride_w;
let out_h = max(0, (in_h + self.stride_h - 1) as isize) as usize / self.stride_h;
(out_w, out_h, self.squeeze)
}
pub fn out_dim(&self) -> (usize, usize, usize) {
let (in_w, in_h, _) = self.in_dim;
let out_w = max(0, (in_w + self.stride_w - 1) as isize) as usize / self.stride_w;
let out_h = max(0, (in_h + self.stride_h - 1) as isize) as usize / self.stride_h;
(out_w, out_h, self.out_chan)
}
}
pub struct SqueezeConv2dOperator<S, IoBuf: ?Sized> {
cfg: SqueezeConv2dOperatorConfig,
node: OperatorNode,
join_op: Rc<RefCell<ConcatJoinOperator<S, IoBuf>>>,
}
impl<S, IoBuf: ?Sized> SqueezeConv2dOperator<S, IoBuf> where S: 'static, IoBuf: 'static {
pub fn new<InOp>(cfg: SqueezeConv2dOperatorConfig, cap: OpCapability, prev_op: Rc<RefCell<InOp>>, prev_arm: usize) -> Rc<RefCell<SqueezeConv2dOperator<S, IoBuf>>> where InOp: 'static + CommonOperator + DiffOperator<S, IoBuf> {
let squeeze_dim = cfg.squeeze_dim();
let expand_chan = cfg.out_chan / 2;
assert_eq!(0, cfg.out_chan % 2);
let conv1_cfg = Conv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 1, kernel_h: 1,
stride_w: cfg.stride_w, stride_h: cfg.stride_h,
pad_w: 0, pad_h: 0,
out_chan: cfg.squeeze,
bias: false,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let split_cfg = SplitOperatorConfig{
batch_sz: cfg.batch_sz,
out_arms: 2,
dim: squeeze_dim.flat_len(),
};
let conv1x1_cfg = Conv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: squeeze_dim,
kernel_w: 1, kernel_h: 1,
stride_w: 1, stride_h: 1,
pad_w: 0, pad_h: 0,
out_chan: expand_chan,
bias: false,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let conv3x3_cfg = Conv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: squeeze_dim,
kernel_w: 3, kernel_h: 3,
stride_w: 1, stride_h: 1,
pad_w: 1, pad_h: 1,
out_chan: expand_chan,
bias: false,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let join_cfg = ConcatJoinOperatorConfig{
batch_sz: cfg.batch_sz,
in_arms: 2,
in_dims: vec![conv1x1_cfg.out_dim().flat_len(), conv3x3_cfg.out_dim().flat_len()],
};
let conv1_op = NewConv2dOperator::new(conv1_cfg, cap, prev_op, prev_arm);
let split_op = NewCopySplitOperator::new(split_cfg, cap, conv1_op, 0);
let conv1x1_op = NewConv2dOperator::new(conv1x1_cfg, cap, split_op.clone(), 0);
let conv3x3_op = NewConv2dOperator::new(conv3x3_cfg, cap, split_op.clone(), 1);
let join_op = ConcatJoinOperator::new(join_cfg, cap);
join_op.borrow_mut().append_input(conv1x1_op, 0);
join_op.borrow_mut().append_input(conv3x3_op, 0);
Rc::new(RefCell::new(SqueezeConv2dOperator{
cfg: cfg,
node: OperatorNode::default(),
join_op: join_op,
}))
}
}
impl<S, IoBuf: ?Sized> Operator for SqueezeConv2dOperator<S, IoBuf> {
fn _next(&self) -> u64 {
self.node._next()
}
}
impl<S, IoBuf: ?Sized> CommonOperator for SqueezeConv2dOperator<S, IoBuf> {
fn _output(&self, arm: usize) -> CommonOutput {
assert_eq!(0, arm);
let join_out = self.join_op.borrow()._output(0);
join_out
}
}
impl<S, IoBuf: ?Sized> DiffOperatorData<S> for SqueezeConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperatorIo<IoBuf> for SqueezeConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperator<S, IoBuf> for SqueezeConv2dOperator<S, IoBuf> {
fn _traverse_fwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
self.join_op.borrow_mut()._traverse_fwd(epoch, apply);
apply(self);
self.node.pop(epoch);
}
fn _traverse_bwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
apply(self);
self.join_op.borrow_mut()._traverse_bwd(epoch, apply);
self.node.pop(epoch);
}
fn _forward(&mut self, phase: OpPhase) {
}
fn _backward(&mut self) {
}
}
pub struct ParallelSqueezeConv2dOperator<S, IoBuf: ?Sized> {
cfg: SqueezeConv2dOperatorConfig,
node: OperatorNode,
join_op: Rc<RefCell<ConcatJoinOperator<S, IoBuf>>>,
}
impl<S, IoBuf: ?Sized> ParallelSqueezeConv2dOperator<S, IoBuf> where S: 'static, IoBuf: 'static {
pub fn new<InOp>(cfg: SqueezeConv2dOperatorConfig, cap: OpCapability, prev_op: Rc<RefCell<InOp>>, prev_arm: usize) -> Rc<RefCell<ParallelSqueezeConv2dOperator<S, IoBuf>>> where InOp: 'static + CommonOperator + DiffOperator<S, IoBuf> {
let squeeze_dim = cfg.squeeze_dim();
let expand_chan = cfg.out_chan / 2;
assert_eq!(0, cfg.out_chan % 2);
let conv1_cfg = Conv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: cfg.in_dim,
kernel_w: 1, kernel_h: 1,
stride_w: cfg.stride_w, stride_h: cfg.stride_h,
pad_w: 0, pad_h: 0,
out_chan: cfg.squeeze,
bias: false,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let split_cfg = SplitOperatorConfig{
batch_sz: cfg.batch_sz,
out_arms: 2,
dim: squeeze_dim.flat_len(),
};
let conv1x1_cfg = Conv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: squeeze_dim,
kernel_w: 1, kernel_h: 1,
stride_w: 1, stride_h: 1,
pad_w: 0, pad_h: 0,
out_chan: expand_chan,
bias: false,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let conv3x3_cfg = Conv2dOperatorConfig{
batch_sz: cfg.batch_sz,
in_dim: squeeze_dim,
kernel_w: 3, kernel_h: 3,
stride_w: 1, stride_h: 1,
pad_w: 1, pad_h: 1,
out_chan: expand_chan,
bias: false,
act_kind: ActivationKind::Rect,
w_init: cfg.w_init,
};
let join_cfg = ConcatJoinOperatorConfig{
batch_sz: cfg.batch_sz,
in_arms: 2,
in_dims: vec![conv1x1_cfg.out_dim().flat_len(), conv3x3_cfg.out_dim().flat_len()],
};
let conv1_op = ParallelConv2dOperator::new(conv1_cfg, cap, prev_op, prev_arm);
let split_op = NewCopySplitOperator::new(split_cfg, cap, conv1_op, 0);
let conv1x1_op = ParallelConv2dOperator::new(conv1x1_cfg, cap, split_op.clone(), 0);
let conv3x3_op = ParallelConv2dOperator::new(conv3x3_cfg, cap, split_op.clone(), 1);
let join_op = ConcatJoinOperator::new(join_cfg, cap);
join_op.borrow_mut().append_input(conv1x1_op, 0);
join_op.borrow_mut().append_input(conv3x3_op, 0);
Rc::new(RefCell::new(ParallelSqueezeConv2dOperator{
cfg: cfg,
node: OperatorNode::default(),
join_op: join_op,
}))
}
}
impl<S, IoBuf: ?Sized> Operator for ParallelSqueezeConv2dOperator<S, IoBuf> {
fn _next(&self) -> u64 {
self.node._next()
}
}
impl<S, IoBuf: ?Sized> CommonOperator for ParallelSqueezeConv2dOperator<S, IoBuf> {
fn _output(&self, arm: usize) -> CommonOutput {
assert_eq!(0, arm);
let join_out = self.join_op.borrow()._output(0);
join_out
}
}
impl<S, IoBuf: ?Sized> DiffOperatorData<S> for ParallelSqueezeConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperatorIo<IoBuf> for ParallelSqueezeConv2dOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperator<S, IoBuf> for ParallelSqueezeConv2dOperator<S, IoBuf> {
fn _traverse_fwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
self.join_op.borrow_mut()._traverse_fwd(epoch, apply);
apply(self);
self.node.pop(epoch);
}
fn _traverse_bwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
apply(self);
self.join_op.borrow_mut()._traverse_bwd(epoch, apply);
self.node.pop(epoch);
}
fn _forward(&mut self, phase: OpPhase) {
}
fn _backward(&mut self) {
}
}<|fim▁end|> | |
<|file_name|>test_validate_hash.py<|end_file_name|><|fim▁begin|>'''Tests for the ValidateHash object'''
from __future__ import absolute_import
import unittest
from nose.tools import assert_true, assert_false
from hashit.core.hash_data import HashData
from hashit.core.hash_type import HashType
from hashit.service.validate_hash import ValidateHash
from hashit.utils.data_encap import DataEncap
from hashit.utils.data_type import DataType
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
# pylint: disable=no-self-use
class TestHashIt(unittest.TestCase):
def setUp(self):
self.data = HashData(
DataEncap(DataType.FILE, "test/support/example.bin"))
def tearDown(self):
pass
def test_verify_hash_crc8_expected_result(self):
assert_true(ValidateHash(
result="14",
hash_type=HashType.CRC8,
data=self.data
).is_vaild())
def test_verify_hash_crc8_bad_result(self):
assert_false(ValidateHash(
result="FE",
hash_type=HashType.CRC8,
data=self.data
).is_vaild())
def test_verify_hash_crc16_expected_result(self):
assert_true(ValidateHash(
result="BAD3",
hash_type=HashType.CRC16,
data=self.data
).is_vaild())
def test_verify_hash_crc16_bad_result(self):
assert_false(ValidateHash(
result="78E7",
hash_type=HashType.CRC16,
data=self.data
).is_vaild())
def test_verify_hash_crc32_expected_result(self):
assert_true(ValidateHash(
result="29058C73",
hash_type=HashType.CRC32,
data=self.data
).is_vaild())
def test_verify_hash_crc32_bad_result(self):
assert_false(ValidateHash(
result="ACEF2345",
hash_type=HashType.CRC32,
data=self.data
).is_vaild())
def test_verify_hash_crc64_expected_result(self):
assert_true(ValidateHash(
result="6C27EAA78BA3F822",
hash_type=HashType.CRC64,
data=self.data
).is_vaild())
def test_verify_hash_crc64_bad_result(self):
assert_false(ValidateHash(
result="DEADBEEFF00DB00F",
hash_type=HashType.CRC64,
data=self.data
).is_vaild())
def test_verify_hash_md5_expected_result(self):
assert_true(ValidateHash(
result="E2C865DB4162BED963BFAA9EF6AC18F0",
hash_type=HashType.MD5,
data=self.data
).is_vaild())<|fim▁hole|> def test_verify_hash_md5_bad_result(self):
assert_false(ValidateHash(
result="11223344556677889900AECF431304065",
hash_type=HashType.MD5,
data=self.data
).is_vaild())
def test_verify_hash_sha1_expected_result(self):
assert_true(ValidateHash(
result="4916D6BDB7F78E6803698CAB32D1586EA457DFC8",
hash_type=HashType.SHA1,
data=self.data
).is_vaild())
def test_verify_hash_sha1_bad_result(self):
assert_false(ValidateHash(
result="987654321AC12345876543BCC34567862737FF20",
hash_type=HashType.SHA1,
data=self.data
).is_vaild())
def test_verify_hash_sha224_expected_result(self):
assert_true(ValidateHash(
result="88702E63237824C4EB0D0FCFE41469A462493E8BEB2A75BBE5981734",
hash_type=HashType.SHA224,
data=self.data
).is_vaild())
def test_verify_hash_sha224_bad_result(self):
assert_false(ValidateHash(
result="AACCEEDDFF928173647D0FBC09375847268EB88EEFF378592047583",
hash_type=HashType.SHA224,
data=self.data
).is_vaild())
def test_verify_hash_sha256_expected_result(self):
assert_true(ValidateHash(
result="40AFF2E9D2D8922E47AFD4648E6967497158785FBD1DA870E7110266BF944880",
hash_type=HashType.SHA256,
data=self.data
).is_vaild())
def test_verify_hash_sha256_bad_result(self):
assert_false(ValidateHash(
result="AF82E982D8922E47AFD4648E674ACE587BEEF85FBD1D0266BF944880123455FF",
hash_type=HashType.SHA256,
data=self.data
).is_vaild())<|fim▁end|> | |
<|file_name|>MpegDecoder.cpp<|end_file_name|><|fim▁begin|>//-----------------------------------------------------------------------------
// Декодер Mpeg Layer 1,2,3
// Копонент звукового двигателя Шквал
// команда : AntiTank
// разработчик : Гилязетдинов Марат (Марыч)
//-----------------------------------------------------------------------------
// включения
#include <string.h>
#include <math.h>
#include "MpegDecoder.h"
void CDecompressMpeg::imdct_init()
{
int k, p, n;
double t, pi;
n = 18;
pi = 4.0 * atan(1.0);
t = pi / (4 * n);
for (p = 0; p < n; p++)
w[p] = (float) (2.0 * cos(t * (2 * p + 1)));
for (p = 0; p < 9; p++)
w2[p] = (float) (2.0 * cos(2 * t * (2 * p + 1)));
t = pi / (2 * n);
for (k = 0; k < 9; k++) {
for (p = 0; p < 4; p++)
coef[k][p] = (float) (cos(t * (2 * k) * (2 * p + 1)));
}
n = 6;
pi = 4.0 * atan(1.0);
t = pi / (4 * n);
for (p = 0; p < n; p++)
v[p] = (float) (2.0 * cos(t * (2 * p + 1)));
for (p = 0; p < 3; p++)
v2[p] = (float) (2.0 * cos(2 * t * (2 * p + 1)));
t = pi / (2 * n);
k = 1;
p = 0;
coef87 = (float) (cos(t * (2 * k) * (2 * p + 1)));
for (p = 0; p < 6; p++)
v[p] = v[p] / 2.0f;
coef87 = (float) (2.0 * coef87);
}
void CDecompressMpeg::imdct18(float f[18]) /* 18 point */
{
int p;
float a[9], b[9];
float ap, bp, a8p, b8p;
float g1, g2;
for (p = 0; p < 4; p++) {
g1 = w[p] * f[p];
g2 = w[17 - p] * f[17 - p];
ap = g1 + g2; // a[p]
bp = w2[p] * (g1 - g2); // b[p]
g1 = w[8 - p] * f[8 - p];
g2 = w[9 + p] * f[9 + p];
a8p = g1 + g2; // a[8-p]
b8p = w2[8 - p] * (g1 - g2); // b[8-p]
a[p] = ap + a8p;
a[5 + p] = ap - a8p;
b[p] = bp + b8p;
b[5 + p] = bp - b8p;
}
g1 = w[p] * f[p];
g2 = w[17 - p] * f[17 - p];
a[p] = g1 + g2;
b[p] = w2[p] * (g1 - g2);
f[0] = 0.5f * (a[0] + a[1] + a[2] + a[3] + a[4]);
f[1] = 0.5f * (b[0] + b[1] + b[2] + b[3] + b[4]);
f[2] = coef[1][0] * a[5] +
coef[1][1] * a[6] +
coef[1][2] * a[7] +
coef[1][3] * a[8];
f[3] = coef[1][0] * b[5] +
coef[1][1] * b[6] +
coef[1][2] * b[7] +
coef[1][3] * b[8] -
f[1];
f[1] = f[1] - f[0];
f[2] = f[2] - f[1];
f[4] = coef[2][0] * a[0] +
coef[2][1] * a[1] +
coef[2][2] * a[2] +
coef[2][3] * a[3] -
a[4];
f[5] = coef[2][0] * b[0] +
coef[2][1] * b[1] +
coef[2][2] * b[2] +
coef[2][3] * b[3] -
b[4] -
f[3];
f[3] = f[3] - f[2];
f[4] = f[4] - f[3];
f[6] = coef[3][0] * (a[5] - a[7] - a[8]);
f[7] = coef[3][0] * (b[5] - b[7] - b[8]) - f[5];
f[5] = f[5] - f[4];
f[6] = f[6] - f[5];
f[8] = coef[4][0] * a[0] +
coef[4][1] * a[1] +
coef[4][2] * a[2] +
coef[4][3] * a[3] +
a[4];
f[9] = coef[4][0] * b[0] +
coef[4][1] * b[1] +
coef[4][2] * b[2] +
coef[4][3] * b[3] +
b[4] -
f[7];
f[7] = f[7] - f[6];
f[8] = f[8] - f[7];
f[10] = coef[5][0] * a[5] +
coef[5][1] * a[6] +
coef[5][2] * a[7] +
coef[5][3] * a[8];
f[11] = coef[5][0] * b[5] +
coef[5][1] * b[6] +
coef[5][2] * b[7] +
coef[5][3] * b[8] -
f[9];
f[9] = f[9] - f[8];
f[10] = f[10] - f[9];
f[12] = 0.5f * (a[0] + a[2] + a[3]) - a[1] - a[4];
f[13] = 0.5f * (b[0] + b[2] + b[3]) - b[1] - b[4] - f[11];
f[11] = f[11] - f[10];
f[12] = f[12] - f[11];
f[14] = coef[7][0] * a[5] +
coef[7][1] * a[6] +
coef[7][2] * a[7] +
coef[7][3] * a[8];
f[15] = coef[7][0] * b[5] +
coef[7][1] * b[6] +
coef[7][2] * b[7] +
coef[7][3] * b[8] -
f[13];
f[13] = f[13] - f[12];
f[14] = f[14] - f[13];
f[16] = coef[8][0] * a[0] +
coef[8][1] * a[1] +
coef[8][2] * a[2] +
coef[8][3] * a[3] +
a[4];
f[17] = coef[8][0] * b[0] +
coef[8][1] * b[1] +
coef[8][2] * b[2] +
coef[8][3] * b[3] +
b[4] -
f[15];
f[15] = f[15] - f[14];
f[16] = f[16] - f[15];
f[17] = f[17] - f[16];
}
/*--------------------------------------------------------------------*/
/* does 3, 6 pt dct. changes order from f[i][window] c[window][i] */
void CDecompressMpeg::imdct6_3(float f[]) /* 6 point */
{
int w;
float buf[18];
float* a,* c; // b[i] = a[3+i]
float g1, g2;
float a02, b02;
c = f;
a = buf;
for (w = 0; w < 3; w++) {
g1 = v[0] * f[3 * 0];
g2 = v[5] * f[3 * 5];
a[0] = g1 + g2;
a[3 + 0] = v2[0] * (g1 - g2);
g1 = v[1] * f[3 * 1];
g2 = v[4] * f[3 * 4];
a[1] = g1 + g2;
a[3 + 1] = v2[1] * (g1 - g2);
g1 = v[2] * f[3 * 2];
g2 = v[3] * f[3 * 3];
a[2] = g1 + g2;
a[3 + 2] = v2[2] * (g1 - g2);
a += 6;
f++;
}
a = buf;
for (w = 0; w < 3; w++) {
a02 = (a[0] + a[2]);
b02 = (a[3 + 0] + a[3 + 2]);
c[0] = a02 + a[1];
c[1] = b02 + a[3 + 1];
c[2] = coef87 * (a[0] - a[2]);
c[3] = coef87 * (a[3 + 0] - a[3 + 2]) - c[1];
c[1] = c[1] - c[0];
c[2] = c[2] - c[1];
c[4] = a02 - a[1] - a[1];
c[5] = b02 - a[3 + 1] - a[3 + 1] - c[3];
c[3] = c[3] - c[2];
c[4] = c[4] - c[3];
c[5] = c[5] - c[4];
a += 6;
c += 6;
}
}
void CDecompressMpeg::fdct_init() /* gen coef for N=32 (31 coefs) */
{
int p, n, i, k;
double t, pi;
pi = 4.0 * atan(1.0);
n = 16;
k = 0;
for (i = 0; i < 5; i++, n = n / 2) {
for (p = 0; p < n; p++, k++) {
t = (pi / (4 * n)) * (2 * p + 1);
coef32[k] = (float) (0.50 / cos(t));
}
}
}
void CDecompressMpeg::forward_bf(int m, int n, float x[], float f[],
float coef[])
{
int i, j, n2;
int p, q, p0, k;
p0 = 0;
n2 = n >> 1;
for (i = 0; i < m; i++, p0 += n) {
k = 0;
p = p0;
q = p + n - 1;
for (j = 0; j < n2; j++, p++, q--, k++) {
f[p] = x[p] + x[q];
f[n2 + p] = coef[k] * (x[p] - x[q]);
}
}
}
void CDecompressMpeg::back_bf(int m, int n, float x[], float f[])
{
int i, j, n2, n21;
int p, q, p0;
p0 = 0;
n2 = n >> 1;
n21 = n2 - 1;
for (i = 0; i < m; i++, p0 += n) {
p = p0;
q = p0;
for (j = 0; j < n2; j++, p += 2, q++)
f[p] = x[q];
p = p0 + 1;
for (j = 0; j < n21; j++, p += 2, q++)
f[p] = x[q] + x[q + 1];
f[p] = x[q];
}
}
void CDecompressMpeg::fdct32(float x[], float c[])
{
float a[32]; /* ping pong buffers */
float b[32];
int p, q;
// если эквалайзер включен занести значения
/* if (m_enableEQ) {
for (p = 0; p < 32; p++)
x[p] *= m_equalizer[p];
}*/
/* special first stage */
for (p = 0, q = 31; p < 16; p++, q--) {
a[p] = x[p] + x[q];
a[16 + p] = coef32[p] * (x[p] - x[q]);
}
forward_bf(2, 16, a, b, coef32 + 16);
forward_bf(4, 8, b, a, coef32 + 16 + 8);
forward_bf(8, 4, a, b, coef32 + 16 + 8 + 4);
forward_bf(16, 2, b, a, coef32 + 16 + 8 + 4 + 2);
back_bf(8, 4, a, b);
back_bf(4, 8, b, a);
back_bf(2, 16, a, b);
back_bf(1, 32, b, c);
}
void CDecompressMpeg::fdct32_dual(float x[], float c[])
{
float a[32]; /* ping pong buffers */
float b[32];
int p, pp, qq;
/* if (m_enableEQ) {
for (p = 0; p < 32; p++)
x[p] *= m_equalizer[p];
}*/
/* special first stage for dual chan (interleaved x) */
pp = 0;
qq = 2 * 31;
for (p = 0; p < 16; p++, pp += 2, qq -= 2) {
a[p] = x[pp] + x[qq];
a[16 + p] = coef32[p] * (x[pp] - x[qq]);
}
forward_bf(2, 16, a, b, coef32 + 16);
forward_bf(4, 8, b, a, coef32 + 16 + 8);
forward_bf(8, 4, a, b, coef32 + 16 + 8 + 4);
forward_bf(16, 2, b, a, coef32 + 16 + 8 + 4 + 2);
back_bf(8, 4, a, b);
back_bf(4, 8, b, a);
back_bf(2, 16, a, b);
back_bf(1, 32, b, c);
}
void CDecompressMpeg::fdct32_dual_mono(float x[], float c[])
{
float a[32]; /* ping pong buffers */
float b[32];
float t1, t2;
int p, pp, qq;
/* special first stage */
pp = 0;
qq = 2 * 31;
for (p = 0; p < 16; p++, pp += 2, qq -= 2) {
t1 = 0.5F * (x[pp] + x[pp + 1]);
t2 = 0.5F * (x[qq] + x[qq + 1]);
a[p] = t1 + t2;
a[16 + p] = coef32[p] * (t1 - t2);
}
forward_bf(2, 16, a, b, coef32 + 16);
forward_bf(4, 8, b, a, coef32 + 16 + 8);
forward_bf(8, 4, a, b, coef32 + 16 + 8 + 4);
forward_bf(16, 2, b, a, coef32 + 16 + 8 + 4 + 2);
back_bf(8, 4, a, b);
back_bf(4, 8, b, a);
back_bf(2, 16, a, b);
back_bf(1, 32, b, c);
}
void CDecompressMpeg::fdct16(float x[], float c[])
{
float a[16]; /* ping pong buffers */
float b[16];
int p, q;
/* special first stage (drop highest sb) */
a[0] = x[0];
a[8] = coef32[16] * x[0];
for (p = 1, q = 14; p < 8; p++, q--) {
a[p] = x[p] + x[q];
a[8 + p] = coef32[16 + p] * (x[p] - x[q]);
}
forward_bf(2, 8, a, b, coef32 + 16 + 8);
forward_bf(4, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(8, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(4, 4, b, a);
back_bf(2, 8, a, b);
back_bf(1, 16, b, c);
}
void CDecompressMpeg::fdct16_dual(float x[], float c[])
{
float a[16]; /* ping pong buffers */
float b[16];
int p, pp, qq;
/* special first stage for interleaved input */
a[0] = x[0];
a[8] = coef32[16] * x[0];
pp = 2;
qq = 2 * 14;
for (p = 1; p < 8; p++, pp += 2, qq -= 2) {
a[p] = x[pp] + x[qq];
a[8 + p] = coef32[16 + p] * (x[pp] - x[qq]);
}
forward_bf(2, 8, a, b, coef32 + 16 + 8);
forward_bf(4, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(8, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(4, 4, b, a);
back_bf(2, 8, a, b);
back_bf(1, 16, b, c);
}
void CDecompressMpeg::fdct16_dual_mono(float x[], float c[])
{
float a[16]; /* ping pong buffers */
float b[16];
float t1, t2;
int p, pp, qq;
/* special first stage */
a[0] = 0.5F * (x[0] + x[1]);
a[8] = coef32[16] * a[0];
pp = 2;
qq = 2 * 14;
for (p = 1; p < 8; p++, pp += 2, qq -= 2) {
t1 = 0.5F * (x[pp] + x[pp + 1]);
t2 = 0.5F * (x[qq] + x[qq + 1]);
a[p] = t1 + t2;
a[8 + p] = coef32[16 + p] * (t1 - t2);
}
forward_bf(2, 8, a, b, coef32 + 16 + 8);
forward_bf(4, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(8, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(4, 4, b, a);
back_bf(2, 8, a, b);
back_bf(1, 16, b, c);
}
void CDecompressMpeg::fdct8(float x[], float c[])
{
float a[8]; /* ping pong buffers */
float b[8];
int p, q;
/* special first stage */
b[0] = x[0] + x[7];
b[4] = coef32[16 + 8] * (x[0] - x[7]);
for (p = 1, q = 6; p < 4; p++, q--) {
b[p] = x[p] + x[q];
b[4 + p] = coef32[16 + 8 + p] * (x[p] - x[q]);
}
forward_bf(2, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(4, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(2, 4, b, a);
back_bf(1, 8, a, c);
}
void CDecompressMpeg::fdct8_dual(float x[], float c[])
{
float a[8]; /* ping pong buffers */
float b[8];
int p, pp, qq;
/* special first stage for interleaved input */
b[0] = x[0] + x[14];
b[4] = coef32[16 + 8] * (x[0] - x[14]);
pp = 2;
qq = 2 * 6;
for (p = 1; p < 4; p++, pp += 2, qq -= 2) {
b[p] = x[pp] + x[qq];
b[4 + p] = coef32[16 + 8 + p] * (x[pp] - x[qq]);
}
forward_bf(2, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(4, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(2, 4, b, a);
back_bf(1, 8, a, c);
}
void CDecompressMpeg::fdct8_dual_mono(float x[], float c[])
{
float a[8]; /* ping pong buffers */
float b[8];
float t1, t2;
int p, pp, qq;
/* special first stage */
t1 = 0.5F * (x[0] + x[1]);
t2 = 0.5F * (x[14] + x[15]);
b[0] = t1 + t2;
b[4] = coef32[16 + 8] * (t1 - t2);
pp = 2;
qq = 2 * 6;
for (p = 1; p < 4; p++, pp += 2, qq -= 2) {
t1 = 0.5F * (x[pp] + x[pp + 1]);
t2 = 0.5F * (x[qq] + x[qq + 1]);
b[p] = t1 + t2;
b[4 + p] = coef32[16 + 8 + p] * (t1 - t2);
}
forward_bf(2, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(4, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(2, 4, b, a);
back_bf(1, 8, a, c);
}
void CDecompressMpeg::bitget_init(unsigned char* buf)
{
bs_ptr0 = bs_ptr = buf;
bits = 0;
bitbuf = 0;
}
int CDecompressMpeg::bitget(int n)
{
unsigned int x;
if (bits < n) {
/* refill bit buf if necessary */
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
bits -= n;
x = bitbuf >> bits;
bitbuf -= x << bits;
return x;
}
void CDecompressMpeg::bitget_skip(int n)
{
unsigned int k;
if (bits < n) {
n -= bits;
k = n >> 3;
/*--- bytes = n/8 --*/
bs_ptr += k;
n -= k << 3;
bitbuf = *bs_ptr++;
bits = 8;
}
bits -= n;
bitbuf -= (bitbuf >> bits) << bits;
}
void CDecompressMpeg::bitget_init_end(unsigned char* buf_end)
{
bs_ptr_end = buf_end;
}
int CDecompressMpeg::bitget_overrun()
{
return bs_ptr > bs_ptr_end;
}
int CDecompressMpeg::bitget_bits_used()
{
unsigned int n;
n = ((bs_ptr - bs_ptr0) << 3) - bits;
return n;
}
void CDecompressMpeg::bitget_check(int n)
{
if (bits < n) {
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
}
/* only huffman */
/*----- get n bits - checks for n+2 avail bits (linbits+sign) -----*/
int CDecompressMpeg::bitget_lb(int n)
{
unsigned int x;
if (bits < (n + 2)) {
/* refill bit buf if necessary */
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
bits -= n;
x = bitbuf >> bits;
bitbuf -= x << bits;
return x;
}
/*------------- get n bits but DO NOT remove from bitstream --*/
int CDecompressMpeg::bitget2(int n)
{
unsigned int x;
if (bits < (MAXBITS + 2)) {
/* refill bit buf if necessary */
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
x = bitbuf >> (bits - n);
return x;
}
/*------------- remove n bits from bitstream ---------*/
void CDecompressMpeg::bitget_purge(int n)
{
bits -= n;
bitbuf -= (bitbuf >> bits) << bits;
}
void CDecompressMpeg::mac_bitget_check(int n)
{
if (bits < n) {
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
}
int CDecompressMpeg::mac_bitget(int n)
{
unsigned int code;
bits -= n;
code = bitbuf >> bits;
bitbuf -= code << bits;
return code;
}
int CDecompressMpeg::mac_bitget2(int n)
{
return (bitbuf >> (bits - n));
}
int CDecompressMpeg::mac_bitget_1bit()
{
unsigned int code;
bits--;
code = bitbuf >> bits;
bitbuf -= code << bits;
return code;
}
void CDecompressMpeg::mac_bitget_purge(int n)
{
bits -= n;
bitbuf -= (bitbuf >> bits) << bits;
}
void CDecompressMpeg::windowB(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
}
void CDecompressMpeg::windowB_dual(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j; /* dual window interleaves output */
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;<|fim▁hole|> sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
}
void CDecompressMpeg::windowB16(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
}
void CDecompressMpeg::windowB16_dual(float* vbuf, int vb_ptr,
unsigned char* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
}
void CDecompressMpeg::windowB8(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
}
/*--------------- 8 pt dual window (interleaved output) -----------------*/
void CDecompressMpeg::windowB8_dual(float* vbuf, int vb_ptr,
unsigned char* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
}
void CDecompressMpeg::sbtB_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
fdct32_dual(sample + 1, vbuf2 + vb_ptr);
windowB_dual(vbuf, vb_ptr, pcm);
windowB_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbtB_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual_mono(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual_left(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual_right(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
sample++; /* point to right chan */
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB16_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
fdct16_dual(sample + 1, vbuf2 + vb_ptr);
windowB16_dual(vbuf, vb_ptr, pcm);
windowB16_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
}
void CDecompressMpeg::sbtB16_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual_mono(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual_left(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual_right(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB8_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
fdct8_dual(sample + 1, vbuf2 + vb_ptr);
windowB8_dual(vbuf, vb_ptr, pcm);
windowB8_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
}
void CDecompressMpeg::sbtB8_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual_mono(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual_left(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual_right(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
if (ch == 0)
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
windowB_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
else
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf2 + vb2_ptr);
windowB_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbtB16_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
windowB16_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
} else {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf2 + vb2_ptr);
windowB16_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 16) & 255;
pcm += 32;
}
}
}
void CDecompressMpeg::sbtB8_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
windowB8_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
} else {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf2 + vb2_ptr);
windowB8_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 8) & 127;
pcm += 16;
}
}
}
// window coefs
float CDecompressMpeg::wincoef[264] = {
0.000000000f, 0.000442505f, -0.003250122f, 0.007003784f, -0.031082151f,
0.078628540f, -0.100311279f, 0.572036743f, -1.144989014f, -0.572036743f,
-0.100311279f, -0.078628540f, -0.031082151f, -0.007003784f, -0.003250122f,
-0.000442505f, 0.000015259f, 0.000473022f, -0.003326416f, 0.007919312f,
-0.030517576f, 0.084182739f, -0.090927124f, 0.600219727f, -1.144287109f,
-0.543823242f, -0.108856201f, -0.073059082f, -0.031478882f, -0.006118774f,
-0.003173828f, -0.000396729f, 0.000015259f, 0.000534058f, -0.003387451f,
0.008865356f, -0.029785154f, 0.089706421f, -0.080688477f, 0.628295898f,
-1.142211914f, -0.515609741f, -0.116577141f, -0.067520142f, -0.031738281f,
-0.005294800f, -0.003082275f, -0.000366211f, 0.000015259f, 0.000579834f,
-0.003433228f, 0.009841919f, -0.028884888f, 0.095169067f, -0.069595337f,
0.656219482f, -1.138763428f, -0.487472534f, -0.123474121f, -0.061996460f,
-0.031845093f, -0.004486084f, -0.002990723f, -0.000320435f, 0.000015259f,
0.000625610f, -0.003463745f, 0.010848999f, -0.027801514f, 0.100540161f,
-0.057617184f, 0.683914185f, -1.133926392f, -0.459472656f, -0.129577637f,
-0.056533810f, -0.031814575f, -0.003723145f, -0.002899170f, -0.000289917f,
0.000015259f, 0.000686646f, -0.003479004f, 0.011886597f, -0.026535034f,
0.105819702f, -0.044784546f, 0.711318970f, -1.127746582f, -0.431655884f,
-0.134887695f, -0.051132202f, -0.031661987f, -0.003005981f, -0.002792358f,
-0.000259399f, 0.000015259f, 0.000747681f, -0.003479004f, 0.012939452f,
-0.025085449f, 0.110946655f, -0.031082151f, 0.738372803f, -1.120223999f,
-0.404083252f, -0.139450073f, -0.045837402f, -0.031387329f, -0.002334595f,
-0.002685547f, -0.000244141f, 0.000030518f, 0.000808716f, -0.003463745f,
0.014022826f, -0.023422241f, 0.115921021f, -0.016510010f, 0.765029907f,
-1.111373901f, -0.376800537f, -0.143264771f, -0.040634155f, -0.031005858f,
-0.001693726f, -0.002578735f, -0.000213623f, 0.000030518f, 0.000885010f,
-0.003417969f, 0.015121460f, -0.021575928f, 0.120697014f, -0.001068115f,
0.791213989f, -1.101211548f, -0.349868774f, -0.146362305f, -0.035552979f,
-0.030532837f, -0.001098633f, -0.002456665f, -0.000198364f, 0.000030518f,
0.000961304f, -0.003372192f, 0.016235352f, -0.019531250f, 0.125259399f,
0.015228271f, 0.816864014f, -1.089782715f, -0.323318481f, -0.148773193f,
-0.030609131f, -0.029937742f, -0.000549316f, -0.002349854f, -0.000167847f,
0.000030518f, 0.001037598f, -0.003280640f, 0.017349243f, -0.017257690f,
0.129562378f, 0.032379150f, 0.841949463f, -1.077117920f, -0.297210693f,
-0.150497437f, -0.025817871f, -0.029281614f, -0.000030518f, -0.002243042f,
-0.000152588f, 0.000045776f, 0.001113892f, -0.003173828f, 0.018463135f,
-0.014801024f, 0.133590698f, 0.050354004f, 0.866363525f, -1.063217163f,
-0.271591187f, -0.151596069f, -0.021179199f, -0.028533936f, 0.000442505f,
-0.002120972f, -0.000137329f, 0.000045776f, 0.001205444f, -0.003051758f,
0.019577026f, -0.012115479f, 0.137298584f, 0.069168091f, 0.890090942f,
-1.048156738f, -0.246505737f, -0.152069092f, -0.016708374f, -0.027725220f,
0.000869751f, -0.002014160f, -0.000122070f, 0.000061035f, 0.001296997f,
-0.002883911f, 0.020690918f, -0.009231566f, 0.140670776f, 0.088775635f,
0.913055420f, -1.031936646f, -0.221984863f, -0.151962280f, -0.012420653f,
-0.026840210f, 0.001266479f, -0.001907349f, -0.000106812f, 0.000061035f,
0.001388550f, -0.002700806f, 0.021789551f, -0.006134033f, 0.143676758f,
0.109161377f, 0.935195923f, -1.014617920f, -0.198059082f, -0.151306152f,
-0.008316040f, -0.025909424f, 0.001617432f, -0.001785278f, -0.000106812f,
0.000076294f, 0.001480103f, -0.002487183f, 0.022857666f, -0.002822876f,
0.146255493f, 0.130310059f, 0.956481934f, -0.996246338f, -0.174789429f,
-0.150115967f, -0.004394531f, -0.024932859f, 0.001937866f, -0.001693726f,
-0.000091553f, -0.001586914f, -0.023910521f, -0.148422241f, -0.976852417f,
0.152206421f, 0.000686646f, -0.002227783f, 0.000076294f,
};
void CDecompressMpeg::window(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
}
void CDecompressMpeg::window_dual(float* vbuf, int vb_ptr, short* pcm)
{
int i, j; /* dual window interleaves output */
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
}
void CDecompressMpeg::window16(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
}
void CDecompressMpeg::window16_dual(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
}
void CDecompressMpeg::window8(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
}
void CDecompressMpeg::window8_dual(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
}
void CDecompressMpeg::sbt_init()
{
int i;
/* clear window vbuf */
for (i = 0; i < 512; i++) {
vbuf[i] = 0.0F;
vbuf2[i] = 0.0F;
}
vb2_ptr = vb_ptr = 0;
}
void CDecompressMpeg::sbt_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
fdct32_dual(sample + 1, vbuf2 + vb_ptr);
window_dual(vbuf, vb_ptr, pcm);
window_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbt_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual_mono(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual_left(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual_right(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
sample++; /* point to right chan */
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt16_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
fdct16_dual(sample + 1, vbuf2 + vb_ptr);
window16_dual(vbuf, vb_ptr, pcm);
window16_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
}
void CDecompressMpeg::sbt16_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual_mono(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual_left(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual_right(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt8_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
fdct8_dual(sample + 1, vbuf2 + vb_ptr);
window8_dual(vbuf, vb_ptr, pcm);
window8_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
}
void CDecompressMpeg::sbt8_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual_mono(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual_left(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual_right(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
if (ch == 0)
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
window_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
else
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf2 + vb2_ptr);
window_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbt16_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
window16_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
} else {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf2 + vb2_ptr);
window16_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 16) & 255;
pcm += 32;
}
}
}
void CDecompressMpeg::sbt8_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
window8_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
} else {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf2 + vb2_ptr);
window8_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 8) & 127;
pcm += 16;
}
}
}
int CDecompressMpeg::br_tbl[3][3][16] = {
{// MPEG-1
// Layer1
{ 0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 0 },
// Layer2
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 0 },
// Layer3
{ 0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 0 },
}, {// MPEG-2
// Layer1
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0 },
// Layer2
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
// Layer3
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
}, {// MPEG-2.5
// Layer1 (not available)
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0 },
// Layer2 (not available)
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
// Layer3
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
},
};
int CDecompressMpeg::fr_tbl[3][4] = {
{ 44100, 48000, 32000, 0 }, // MPEG-1
{ 22050, 24000, 16000, 0 }, // MPEG-2
{ 11025, 12000, 8000, 0 }, // MPEG-2.5
};
void CDecompressMpeg::mp3DecodeInit()
{
m_option.reduction = 0;
m_option.convert = 0;
m_option.freqLimit = 24000;
L1table_init();
L2table_init();
L3table_init();
}
int CDecompressMpeg::mp3GetHeader(BYTE* buf, MPEG_HEADER* h)
{
h->version = (buf[1] & 0x08) >> 3;
h->layer = (buf[1] & 0x06) >> 1;
h->error_prot = (buf[1] & 0x01);
h->br_index = (buf[2] & 0xf0) >> 4;
h->fr_index = (buf[2] & 0x0c) >> 2;
h->padding = (buf[2] & 0x02) >> 1;
h->extension = (buf[2] & 0x01);
h->mode = (buf[3] & 0xc0) >> 6;
h->mode_ext = (buf[3] & 0x30) >> 4;
h->copyright = (buf[3] & 0x08) >> 3;
h->original = (buf[3] & 0x04) >> 2;
h->emphasis = (buf[3] & 0x03);
if (buf[0] != 0xFF) {
//sync error
m_last_error = MP3_ERROR_INVALID_SYNC;
return 0;
}
if ((buf[1] & 0xF0) == 0xF0) //MPEG-1, MPEG-2
h->version = (h->version) ? 1 : 2;
else if ((buf[1] & 0xF0) == 0xE0) //MPEG-2.5
h->version = 3;
else {
m_last_error = MP3_ERROR_INVALID_SYNC;
return 0;
}
if (h->fr_index >= 3 ||
h->br_index == 0 ||
h->br_index >= 15 ||
h->layer == 0 ||
h->layer >= 4) {
m_last_error = MP3_ERROR_INVALID_HEADER;
return 0;
}
h->layer = 4 - h->layer;
h->error_prot = (h->error_prot) ? 0 : 1;
return 1;
}
bool CDecompressMpeg::mp3GetHeaderInfo(BYTE* buffer, MPEG_HEADER_INFO* info)
{
int ch, ver;
MPEG_HEADER* h =& info->header;
// получим информацию из заголовка
if (!mp3GetHeader(buffer, h))
return false;
// расчет нужных данных
info->curBitRate = br_tbl[h->version - 1][h->layer - 1][h->br_index] * 1000;
switch (h->layer) {
case 1:
//layer1
info->curFrameSize = (12 * info->curBitRate / m_frequency + h->padding) * 4;
break;
case 2:
//layer2
info->curFrameSize = 144 * info->curBitRate /
m_frequency +
h->padding;
break;
case 3:
//layer3
if (h->version == 1)
info->curFrameSize = 144 * info->curBitRate /
m_frequency +
h->padding;
else
info->curFrameSize = (144 * info->curBitRate / m_frequency) /
2 +
h->padding;
break;
}
ch = (h->mode == 3) ? 1 : 2;
ver = (h->version == 1) ? 1 : 2;
info->samplesInFrame = (1152 >> m_option.reduction) / ver;
info->outputSize = info->samplesInFrame * 2 * ch;
return true;
}
int CDecompressMpeg::mp3GetLastError()
{
return m_last_error;
}
int CDecompressMpeg::mp3FindSync(BYTE* buf, int size, int* sync)
{
int i;
MPEG_HEADER h;
*sync = 0;
size -= 3;
if (size <= 0) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
// поиск данных
for (i = 0; i < size; i++) {
if (buf[i] == 0xFF) {
if (mp3GetHeader(buf + i, & h)) {
if ((h.layer == _layer) &&
(h.version == _version) &&
(h.br_index == _br_index) &&
(h.fr_index == _fr_index) &&
(h.mode == _mode))
break;
}
}
}
if (i == size) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
*sync = i;
return 1;
}
void CDecompressMpeg::mp3GetDecodeOption(MPEG_DECODE_OPTION* option)
{
*option = m_option;
}
int CDecompressMpeg::mp3SetDecodeOption(MPEG_DECODE_OPTION* option)
{
m_option = *option;
return 1;
}
/*
//-----------------------------------------------------------------------------
// Установка эквалайзера
// value - указатель на параметры эквалайзера
//-----------------------------------------------------------------------------
int CDecompressMpeg::mp3SetEqualizer(int* value)
{
int i;
if (value == (void*)0) {
m_enableEQ = 0;
return 1;
}
m_enableEQ = 1;
//60, 170, 310, 600, 1K, 3K
for (i = 0; i < 6; i ++) {
m_equalizer[i] = (float)pow(10,(double)value[i]/200);
}
//6K
m_equalizer[6] = (float)pow(10,(double)value[6]/200);
m_equalizer[7] = m_equalizer[6];
//12K
m_equalizer[8] = (float)pow(10,(double)value[7]/200);
m_equalizer[9] = m_equalizer[8];
m_equalizer[10] = m_equalizer[8];
m_equalizer[11] = m_equalizer[8];
//14K
m_equalizer[12] = (float)pow(10,(double)value[8]/200);
m_equalizer[13] = m_equalizer[12];
m_equalizer[14] = m_equalizer[12];
m_equalizer[15] = m_equalizer[12];
m_equalizer[16] = m_equalizer[12];
m_equalizer[17] = m_equalizer[12];
m_equalizer[18] = m_equalizer[12];
m_equalizer[19] = m_equalizer[12];
//16K
m_equalizer[20] = (float)pow(10,(double)value[9]/200);
m_equalizer[21] = m_equalizer[20];
m_equalizer[22] = m_equalizer[20];
m_equalizer[23] = m_equalizer[20];
m_equalizer[24] = m_equalizer[20];
m_equalizer[25] = m_equalizer[20];
m_equalizer[26] = m_equalizer[20];
m_equalizer[27] = m_equalizer[20];
m_equalizer[28] = m_equalizer[20];
m_equalizer[29] = m_equalizer[20];
m_equalizer[30] = m_equalizer[20];
m_equalizer[31] = m_equalizer[20];
return 1;
}
*/
#define VBR_FRAMES_FLAG 0x0001
#define VBR_BYTES_FLAG 0x0002
#define VBR_TOC_FLAG 0x0004
#define VBR_SCALE_FLAG 0x0008
// big endian extract
int CDecompressMpeg::extractInt4(BYTE* buf)
{
return buf[3] | (buf[2] << 8) | (buf[1] << 16) | (buf[0] << 24);
}
//-----------------------------------------------------------------------------
// извленение заголовка и важных данных
// mpeg - указатель на буфер с данными
// size - размер буфера с данными
// info - указатель на структуру куда поместить расширенные данные
// decFlag - ? помоему использовать настройки частоты из файла
//-----------------------------------------------------------------------------
int CDecompressMpeg::mp3GetDecodeInfo(BYTE* mpeg, int size,
MPEG_DECODE_INFO* info, int decFlag)
{
MPEG_HEADER* h =& info->header;
byte* p = mpeg;
int vbr;
DWORD minBitRate, maxBitRate;
DWORD i, j, flags;
//int bitRate;
//int frame_size;
// if (size < 156) {//max vbr header size
// m_last_error = MP3_ERROR_OUT_OF_BUFFER;
// return 0;
// }
if (!mp3GetHeader(p, h)) {
return 0;
}
//check VBR Header
p += 4;//skip mpeg header
if (h->error_prot)
p += 2;//skip crc
if (h->layer == 3) {
//skip side info
if (h->version == 1) {
//MPEG-1
if (h->mode != 3)
p += 32;
else
p += 17;
} else {
//MPEG-2, MPEG-2.5
if (h->mode != 3)
p += 17;
else
p += 9;
}
}
info->bitRate = br_tbl[h->version - 1][h->layer - 1][h->br_index] * 1000;
info->frequency = fr_tbl[h->version - 1][h->fr_index];
if (memcmp(p, "Xing", 4) == 0) {
//VBR
p += 4;
flags = extractInt4(p);
p += 4;
if (!(flags & (VBR_FRAMES_FLAG | VBR_BYTES_FLAG))) {
m_last_error = MP3_ERROR_INVALID_HEADER;
return 0;
}
info->frames = extractInt4(p);
p += 4;
info->dataSize = extractInt4(p);
p += 4;
if (flags & VBR_TOC_FLAG)
p += 100;
if (flags & VBR_SCALE_FLAG)
p += 4;
/*
//•WЏЂVBR‘О‰ћ
if ( p[0] == mpeg[0] && p[1] == mpeg[1] ) {
info->skipSize = (int)(p - mpeg);
} else {
bitRate = br_tbl[h->version-1][h->layer-1][h->br_index] * 1000;
switch (h->layer) {
case 1://layer1
frame_size = (12 * bitRate / fr_tbl[h->version-1][h->fr_index]) * 4;//one slot is 4 bytes long
if (h->padding) frame_size += 4;
break;
case 2://layer2
frame_size = 144 * bitRate / fr_tbl[h->version-1][h->fr_index];
if (h->padding) frame_size ++;
break;
case 3://layer3
frame_size = 144 * bitRate / fr_tbl[h->version-1][h->fr_index];
if (h->version != 1) //MPEG-2, MPEG-2.5
frame_size /= 2;
if (h->padding) frame_size ++;
break;
}
info->skipSize = (int)(frame_size);
}
info->bitRate = 0;
*/
vbr = 1;
minBitRate = 0xffffffff;
maxBitRate = 0;
for (i = 1; i < 15; i ++) {
j = br_tbl[h->version - 1][h->layer - 1][i] * 1000;
if (j < minBitRate)
minBitRate = j;
if (j > maxBitRate)
maxBitRate = j;
}
} else if (memcmp(p, "VBRI", 4) == 0) {
//VBRI
p += 10;
info->dataSize = extractInt4(p);
p += 4;
info->frames = extractInt4(p);
p += 4;
vbr = 1;
minBitRate = 0xffffffff;
maxBitRate = 0;
for (i = 1; i < 15; i ++) {
j = br_tbl[h->version - 1][h->layer - 1][i] * 1000;
if (j < minBitRate)
minBitRate = j;
if (j > maxBitRate)
maxBitRate = j;
}
} else {
//not VBR
vbr = 0;
info->frames = 0;
//info->skipSize = 0;
info->dataSize = 0;
//info->bitRate = br_tbl[h->version-1][h->layer-1][h->br_index] * 1000;
}
// info->frequency = fr_tbl[h->version-1][h->fr_index];
// info->msPerFrame = ms_p_f_table[h->layer-1][h->fr_index];
// if (h->version == 3) info->msPerFrame *= 2;
switch (h->layer) {
case 1:
//layer1
info->outputSize = 384 >> m_option.reduction;
//if (info->bitRate) {
if (!vbr) {
info->skipSize = 0;
info->minInputSize = (12 * info->bitRate / info->frequency) * 4;//one slot is 4 bytes long
info->maxInputSize = info->minInputSize + 4;
} else {
info->skipSize = (12 * info->bitRate /
info->frequency +
h->padding) * 4;
info->minInputSize = (12 * minBitRate / info->frequency) * 4;
info->maxInputSize = (12 * maxBitRate / info->frequency) * 4 + 4;
}
break;
case 2:
//layer2
info->outputSize = 1152 >> m_option.reduction;
//if (info->bitRate) {
if (!vbr) {
info->skipSize = 0;
info->minInputSize = 144 * info->bitRate / info->frequency;
info->maxInputSize = info->minInputSize + 1;
} else {
info->skipSize = 144 * info->bitRate /
info->frequency +
h->padding;
info->minInputSize = 144 * minBitRate / info->frequency;
info->maxInputSize = 144 * maxBitRate / info->frequency + 1;
}
break;
case 3:
//layer3
i = (h->version == 1) ? 1 : 2;
//info->outputSize = 1152 >> m_option.reduction;
info->outputSize = (1152 >> m_option.reduction) / i;
//if (info->bitRate) {
if (!vbr) {
info->skipSize = 0;
info->minInputSize = 144 * info->bitRate / info->frequency / i;
info->maxInputSize = info->minInputSize + 1;
} else {
info->skipSize = 144 * info->bitRate /
info->frequency /
i +
h->padding;
info->minInputSize = 144 * minBitRate / info->frequency / i;
info->maxInputSize = 144 * maxBitRate / info->frequency / i + 1;
}
break;
/*
if (h->version != 1) {
//MPEG-2, MPEG-2.5
info->outputSize /= 2;
info->minInputSize /= 2;
info->maxInputSize /= 2;
}
info->maxInputSize ++;
break;
*/
}
if ((h->mode == 3) || (m_option.convert & 3))
info->channels = 1;
else
info->channels = 2;
if (m_option.convert & 8) {
//not available
info->bitsPerSample = 8;
info->outputSize *= info->channels;
} else {
info->bitsPerSample = 16;
info->outputSize *= info->channels * 2;
}
if (decFlag == 1) {
m_frequency = info->frequency;
m_pcm_size = info->outputSize;
}
info->frequency >>= m_option.reduction;
info->HeadBitRate = info->bitRate;
if (vbr)
info->bitRate = 0;
return 1;
}
// начало декодирования
int CDecompressMpeg::mp3DecodeStart(BYTE* mpeg, int size)
{
MPEG_DECODE_INFO info;
MPEG_HEADER* h =& info.header;
// распаковка заголовка и предрасчет важных данных
if (!mp3GetDecodeInfo(mpeg, size, & info, 1))
return 0;
// инициализация
sbt_init();
// вызов методов инициализации слоя
switch (h->layer) {
case 1:
L1decode_start(h);
break;
case 2:
L2decode_start(h);
break;
case 3:
L3decode_start(h);
break;
}
return 1;
}
// декодирование 1 фрейма
int CDecompressMpeg::mp3DecodeFrame(MPEG_DECODE_PARAM* param)
{
MPEG_HEADER* h =& param->header;
// проверка размера входных данных
if (param->inputSize <= 4) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
// прочитаем заголовок
if (!mp3GetHeader((unsigned char *) param->inputBuf, h)) {
return 0;
}
// вычисление размера данных в фрейме
param->bitRate = br_tbl[h->version - 1][h->layer - 1][h->br_index] * 1000;
switch (h->layer) {
//layer1
case 1:
m_frame_size = (12 * param->bitRate / m_frequency + h->padding) * 4;
break;
//layer2
case 2:
m_frame_size = 144 * param->bitRate / m_frequency + h->padding;
break;
//layer3
case 3:
if (h->version == 1)
m_frame_size = 144 * param->bitRate / m_frequency + h->padding;
else
m_frame_size = (144 * param->bitRate / m_frequency) /
2 +
h->padding;
break;
}
// проверка размера входных данных
if (param->inputSize < m_frame_size) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
// подбор декодера
switch (h->layer) {
case 1:
L1decode_frame(h,
(unsigned char *) param->inputBuf,
(unsigned char *) param->outputBuf);
break;
case 2:
L2decode_frame(h,
(unsigned char *) param->inputBuf,
(unsigned char *) param->outputBuf);
break;
case 3:
L3decode_frame(h,
(unsigned char *) param->inputBuf,
(unsigned char *) param->outputBuf);
break;
}
//!!!todo m_frame_proc(h, (unsigned char*)param->inputBuf, (unsigned char *)param->outputBuf);
// скоректируем размеры входного и выходного буфера
param->inputSize = m_frame_size;
param->outputSize = m_pcm_size;
return 1;
}
void CDecompressMpeg::mp3Reset(void)
{
sbt_init();
L3decode_reset();
}
//-----------------------------------------------------------------------------
// Установка новой позиции файла
// на входе : pos - новая позиция в файле
// на выходе : *
//-----------------------------------------------------------------------------
int CDecompressMpeg::mp3seek(DWORD frame)
{
// инициализация переменных
DWORD cur = 0;
DWORD back = 3;
int off = 0;
DWORD need_frame_offset = 0;
// позиционируемся на данных
if (_curFrame != frame) {
if (_curFrame != (frame - 1)) {
// прочитаем на несколько фреймов назад
if (frame > back)
frame -= back;
else {
back = frame;
frame = 0;
}
if (!_vbr) {
// приблизительный расчет положения фрейма
need_frame_offset = (DWORD)
floor(((double) frame * _bitPerFrame) /
8);
// поиск начала фрейма
while (1) {
// установка позиции чтения
if (SourceData->seek(need_frame_offset, 0) !=
need_frame_offset)
return 0;
// проверка на конец файла
if (SourceData->eof())
return 0;
// прочитаем данные для поиска начала
if (SourceData->peek(_frameBuffer, _minFrameSize) !=
_minFrameSize)
return 0;
// поиск начала файла
if (!mp3FindSync(_frameBuffer, _minFrameSize, & off)) {
need_frame_offset += (_minFrameSize - 3);
} else {
need_frame_offset += off;
break;
}
};
} else {
need_frame_offset = _vbrFrameOffTable[frame];
}
if (SourceData->seek(need_frame_offset, 0) != need_frame_offset)
return 0;
mp3Reset();
// сбросим декодер
for (int ch = 0; ch < 2; ch++) {
for (int gr = 0; gr < 2; gr++) {
for (int sam = 0; sam < 576; sam++) {
m_sample[ch][gr][sam].s = 0;
m_sample[ch][gr][sam].x = 0;
}
}
}
for (cur = 0; cur < back; cur++) {
SourceData->peek(_frameBuffer, 4);
if (!mp3GetHeaderInfo(_frameBuffer, & _mpegHI))
return 0;
_curFrameSize = _mpegHI.curFrameSize;
if (SourceData->read(_frameBuffer, _curFrameSize) !=
_curFrameSize)
return 0;
_mpegDP.header = _mpegHI.header;
_mpegDP.bitRate = _mpegHI.curBitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _mpegHI.curFrameSize;
_mpegDP.outputBuf = _sampleBuffer;
_mpegDP.outputSize = _mpegHI.outputSize;
// декодирование одного фрейма
if (!mp3DecodeFrame(&_mpegDP))
return 0;
}
}
}
return 1;
}
//-----------------------------------------------------------------------------
// Конструктор декодера
// на входе : a - указатель на данные файла
// на выходе : *
//-----------------------------------------------------------------------------
CDecompressMpeg::CDecompressMpeg(WAVEFORMATEX* pcm_format, bool& flag,
CAbstractSoundFile* a)
: CAbstractDecompressor(pcm_format, flag, a)
{
DWORD cur;
DWORD pos;
MPEG_HEADER_INFO info;
BYTE head[156];
// файл не определен
flag = false;
// инициализация декодера
mp3DecodeInit();
// инициализация данных декодера
m_cs_factorL1 = m_cs_factor[0];
// m_enableEQ = 0;
memset(&m_side_info, 0, sizeof(SIDE_INFO));
memset(&m_scale_fac, 0, sizeof(SCALE_FACTOR) * 4);
memset(&m_cb_info, 0, sizeof(CB_INFO) * 4);
memset(&m_nsamp, 0, sizeof(int) * 4);
// очистим указатели на буфера
_frameBuffer = 0;
_vbr = 0;
_vbrFrameOffTable = 0;
// получение информаци о файле
if (SourceData->peek(head, sizeof(head)) != sizeof(head))
return;
if (!mp3GetDecodeInfo(head, sizeof(head), & _mpegDI, 1))
return;
if (!mp3GetHeaderInfo(head, & _mpegHI))
return;
// получим интерисующую нас информацию
_channels = _mpegDI.channels;
_frequency = _mpegDI.frequency;
_bitrate = _mpegDI.HeadBitRate;
_vbr = _mpegDI.bitRate ? false : true;
_minFrameSize = _mpegDI.minInputSize;
_maxFrameSize = _mpegDI.maxInputSize;
_samplesInFrame = _mpegHI.samplesInFrame;
_curFrameSize = _mpegHI.curFrameSize;
_version = _mpegDI.header.version;
_layer = _mpegDI.header.layer;
_br_index = _mpegDI.header.br_index;
_fr_index = _mpegDI.header.fr_index;
_mode = _mpegDI.header.mode;
_slotSize = (_mpegDI.header.layer == 1) ? 4 : 1;
_bitPerFrame = (_mpegDI.header.version == 1) ?
(double) (144 * 8 * _bitrate) /
(double) _frequency :
(double) (144 * 8 * _bitrate) /
(double) (_frequency * 2);
_frames = _vbr ?
_mpegDI.frames :
(DWORD) floor(((double) ((SourceData->size + _slotSize) * 8)) /
_bitPerFrame);
_samplesInFile = _frames * _samplesInFrame;
//*********************************************************************************
// отладка
// заполним таблицу смещений
cur = 0;
pos = 0;
while (!SourceData->eof()) {
SourceData->seek(pos, 0);
if (SourceData->peek(head, 4) != 4)
break;
if (!mp3GetHeaderInfo(head, & info))
break;
pos += info.curFrameSize;
cur++;
}
SourceData->seek(0, 0);
if (cur != _frames)
_frames = cur;
_vbr = true;
//**********************************************************************************
// файл с переменным битрейтом ?
if (_vbr) {
// выделим память под таблицу смещений на фреймы
#if AGSS_USE_MALLOC
_vbrFrameOffTable = (DWORD *) malloc(_frames * sizeof(DWORD));
#else
_vbrFrameOffTable = (DWORD *) GlobalAlloc(GPTR,
_frames * sizeof(DWORD));
#endif
if (!_vbrFrameOffTable)
return;
cur = 0;
pos = 0;
// заполним таблицу смещений
while (cur != _frames) {
SourceData->seek(pos, 0);
SourceData->peek(head, 4);
if (!mp3GetHeaderInfo(head, & info))
break;
_vbrFrameOffTable[cur] = pos;
pos += info.curFrameSize;
cur++;
}
SourceData->seek(0, 0);
}
// выделим феймовый буфер
#if AGSS_USE_MALLOC
_frameBuffer = (BYTE *) malloc(_mpegDI.maxInputSize);
#else
_frameBuffer = (BYTE *) GlobalAlloc(GPTR, _mpegDI.maxInputSize);
#endif
if (!_frameBuffer)
return;
// прочитаем один фрейм
if (SourceData->read(_frameBuffer, _curFrameSize) != _curFrameSize) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
return;
}
// начало декодирования
if (!mp3DecodeStart(_frameBuffer, _curFrameSize)) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
return;
}
// подготовка к декодированию первого фрейма
_mpegDP.header = _mpegDI.header;
_mpegDP.bitRate = _mpegDI.bitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _curFrameSize;
_mpegDP.outputBuf = _sampleBuffer;
_mpegDP.outputSize = _mpegDI.outputSize;
// декодируем первый фрейм
if (!mp3DecodeFrame(&_mpegDP)) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
return;
}
// установим дополнительные параметры
_curFrame = 0;
_curSampleOffset = 0;
// преобразуем данные для Direct X (иначе Direct X не сможет создать буфер)
pcm_format->wFormatTag = 1;
pcm_format->wBitsPerSample = 16;
pcm_format->nSamplesPerSec = _frequency;
pcm_format->nChannels = _channels;
pcm_format->nBlockAlign = (pcm_format->nChannels * pcm_format->wBitsPerSample) >>
3;
pcm_format->nAvgBytesPerSec = pcm_format->nBlockAlign * pcm_format->nSamplesPerSec;
// файл определен
flag = true;
}
//-----------------------------------------------------------------------------
// Деструктор декодера
// на входе : *
// на выходе : *
//-----------------------------------------------------------------------------
CDecompressMpeg::~CDecompressMpeg()
{
if (_vbrFrameOffTable) {
#if AGSS_USE_MALLOC
free(_vbrFrameOffTable);
#else
GlobalFree(_vbrFrameOffTable);
#endif
_vbrFrameOffTable = 0;
}
if (_frameBuffer) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
}
}
//-----------------------------------------------------------------------------
// Декомпрессия Mp3 формата в моно данные
// на входе : buffer - указатель на буфер
// start - смещение в данных звука, в семплах
// length - количество семплов для декодирования
// на выходе : На сколько байт сдвинулся буфер в который
// читали семплы
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetMonoSamples(void* buffer, DWORD start, DWORD length,
bool loop)
{
DWORD NeedFrame;
DWORD NeedOffset;
DWORD samples;
DWORD i;
BYTE head[4];
short* dst = (short*) buffer;
// проверка выхода за пределы
if (start > _samplesInFile)
return 0;
// проверка на чтение сверх нормы
if ((start + length) > _samplesInFile)
length = _samplesInFile - start;
// вычислим текущую позицию чтения
NeedFrame = start / _samplesInFrame;
NeedOffset = start % _samplesInFrame;
// позиционируемся на данных
if (!mp3seek(NeedFrame))
return 0;
DWORD remaining = length;
DWORD readsize = 0;
bool readframe = false;
while (remaining) {
if ((_channels == 1) &&
(NeedOffset == 0) &&
(remaining > _samplesInFrame))
readframe = true;
else
readframe = false;
if (_curFrame != NeedFrame) {
_curFrame = NeedFrame;
if (SourceData->peek(&head, 4) != 4)
break;
if (!mp3GetHeaderInfo(head, & _mpegHI))
return 0;
_curFrameSize = _mpegHI.curFrameSize;
if (SourceData->read(_frameBuffer, _curFrameSize) != _curFrameSize)
return 0;
_mpegDP.header = _mpegHI.header;
_mpegDP.bitRate = _mpegHI.curBitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _mpegHI.curFrameSize;
_mpegDP.outputBuf = (readframe) ? dst : _sampleBuffer;
_mpegDP.outputSize = _mpegHI.outputSize;
// декодирование одного фрейма
if (!mp3DecodeFrame(&_mpegDP))
return 0;
}
samples = _samplesInFrame - NeedOffset;
readsize = (remaining > samples) ? samples : remaining;
short* src = _sampleBuffer + (NeedOffset* _channels);
if (_channels == 1) {
if (!readframe)
memcpy(dst, src, readsize * 2);
dst += readsize;
} else {
for (i = 0; i < readsize; i++) {
int s = ((int) src[0] + (int) src[1]) >> 1;
s = (s < -32768) ? -32768 : (s > 32767) ? 32767 : s;
*dst++ = (short) s;
src += 2;
}
}
NeedOffset = 0;
remaining -= readsize;
if (remaining)
NeedFrame++;
}
return ((DWORD) dst - (DWORD) buffer);
}
//-----------------------------------------------------------------------------
// Декомпрессия Mp3 формата в стерео данные
// на входе : buffer - указатель на буфер
// start - смещение в данных звука, в семплах
// length - количество семплов для декодирования
// на выходе : На сколько байт сдвинулся буфер в который
// читали семплы
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetStereoSamples(void* buffer, DWORD start,
DWORD length, bool loop)
{
DWORD NeedFrame;
DWORD NeedOffset;
// DWORD NeedFrameOffset;
DWORD samples;
DWORD i;
BYTE head[4];
// int off;
short* dst = (short*) buffer;
// проверка выхода за пределы
if (start > _samplesInFile)
return 0;
// проверка на чтение сверх нормы
if ((start + length) > _samplesInFile)
length = _samplesInFile - start;
// вычислим текущую позицию чтения
NeedFrame = start / _samplesInFrame;
NeedOffset = start % _samplesInFrame;
// позиционируемся на данных
if (!mp3seek(NeedFrame))
return 0;
DWORD remaining = length;
DWORD readsize = 0;
bool readframe = false;
while (remaining) {
if ((_channels == 2) &&
(NeedOffset == 0) &&
(remaining > _samplesInFrame))
readframe = true;
else
readframe = false;
if (_curFrame != NeedFrame) {
_curFrame = NeedFrame;
SourceData->peek(&head, 4);
if (!mp3GetHeaderInfo(head, & _mpegHI))
return 0;
_curFrameSize = _mpegHI.curFrameSize;
if (SourceData->read(_frameBuffer, _curFrameSize) != _curFrameSize)
return 0;
_mpegDP.header = _mpegHI.header;
_mpegDP.bitRate = _mpegHI.curBitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _mpegHI.curFrameSize;
_mpegDP.outputBuf = (readframe) ? dst : _sampleBuffer;
_mpegDP.outputSize = _mpegHI.outputSize;
// декодирование одного фрейма
if (!mp3DecodeFrame(&_mpegDP))
return 0;
}
samples = _samplesInFrame - NeedOffset;
readsize = (remaining > samples) ? samples : remaining;
short* src = _sampleBuffer + (NeedOffset* _channels);
if (_channels == 1) {
for (i = 0; i < readsize; i++) {
*dst++ = *src;
*dst++ = *src;
src++;
}
} else {
if (!readframe)
memcpy(dst, src, readsize * 4);
dst += readsize * 2;
}
NeedOffset = 0;
remaining -= readsize;
if (remaining)
NeedFrame++;
}
return ((DWORD) dst - (DWORD) buffer);
}
//-----------------------------------------------------------------------------
// Создание тишины на заданом отрезке буфера моно режим
// на входе : buffer - указатель на буфер
// length - количество семплов
// на выходе : На сколько байт сдвинулся буфер
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetMonoMute(void* buffer, DWORD length)
{
length <<= 1;
memset(buffer, 0, length);
return length;
}
//-----------------------------------------------------------------------------
// Создание тишины на заданом отрезке буфера стерео режим
// на входе : buffer - указатель на буфер
// length - количество семплов
// на выходе : На сколько байт сдвинулся буфер
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetStereoMute(void* buffer, DWORD length)
{
length <<= 2;
memset(buffer, 0, length);
return length;
}
//-----------------------------------------------------------------------------
// Получение количества семплов в файле
// на входе : *
// на выходе : Количество семплов в файла
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetSamplesInFile(void)
{
return _samplesInFile;
}
//-----------------------------------------------------------------------------
// Получение количества байт в треке моно режим
// на входе : *
// на выходе : Количество баит в треке
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetRealMonoDataSize(void)
{
return _samplesInFile * 2;
}
//-----------------------------------------------------------------------------
// Получение количества байт в треке стерео режим
// на входе : *
// на выходе : Количество баит в треке
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetRealStereoDataSize(void)
{
return _samplesInFile * 4;
}<|fim▁end|> | bx++; |
<|file_name|>threatfox.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import requests
import json
misperrors = {'error': 'Error'}
mispattributes = {'input': ['md5', 'sha1', 'sha256', 'domain', 'url', 'email-src', 'ip-dst|port', 'ip-src|port'], 'output': ['text']}
moduleinfo = {'version': '0.1', 'author': 'Corsin Camichel', 'description': 'Module to search for an IOC on ThreatFox by abuse.ch.', 'module-type': ['hover', 'expansion']}
moduleconfig = []
API_URL = "https://threatfox-api.abuse.ch/api/v1/"
# copied from
# https://github.com/marjatech/threatfox2misp/blob/main/threatfox2misp.py
def confidence_level_to_tag(level: int) -> str:
confidence_tagging = {
0: 'misp:confidence-level="unconfident"',
10: 'misp:confidence-level="rarely-confident"',
37: 'misp:confidence-level="fairly-confident"',
63: 'misp:confidence-level="usually-confident"',
90: 'misp:confidence-level="completely-confident"',
}
confidence_tag = ""
for tag_minvalue, tag in confidence_tagging.items():
if level >= tag_minvalue:
confidence_tag = tag
return confidence_tag
def handler(q=False):
if q is False:
return False
request = json.loads(q)
ret_val = ""
for input_type in mispattributes['input']:
if input_type in request:
to_query = request[input_type]
break
else:
misperrors['error'] = "Unsupported attributes type:"
return misperrors
data = {"query": "search_ioc", "search_term": f"{to_query}"}<|fim▁hole|> response = requests.post(API_URL, data=json.dumps(data))
if response.status_code == 200:
result = json.loads(response.text)
if(result["query_status"] == "ok"):
confidence_tag = confidence_level_to_tag(result["data"][0]["confidence_level"])
ret_val = {'results': [{'types': mispattributes['output'], 'values': [result["data"][0]["threat_type_desc"]], 'tags': [result["data"][0]["malware"], result["data"][0]["malware_printable"], confidence_tag]}]}
return ret_val
def introspection():
return mispattributes
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo<|fim▁end|> | |
<|file_name|>FontsOverride.java<|end_file_name|><|fim▁begin|>package com.chandilsachin.diettracker.io;
import android.app.Activity;
import android.content.Context;
import android.graphics.Typeface;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Iterator;
public final class FontsOverride
{
public static void populateFonts(Activity activity, HashMap<String, String> fontTable)
{
if (fontTable != null)
{
Iterator<String> fonts = fontTable.keySet().iterator();
while (fonts.hasNext())
{
String font = fonts.next();
setDefaultFont(activity, font, "fonts/" + fontTable.get(font));
}
}
}
public static void setDefaultFont(Context context,
String staticTypefaceFieldName, String fontAssetName)
{
final Typeface regular = Typeface.createFromAsset(context.getAssets(),
fontAssetName);
replaceFont(staticTypefaceFieldName, regular);
}
protected static void replaceFont(String staticTypefaceFieldName,
final Typeface newTypeface)
{
try
{
final Field staticField = Typeface.class
.getDeclaredField(staticTypefaceFieldName);
staticField.setAccessible(true);
staticField.set(null, newTypeface);
} catch (NoSuchFieldException e)
{
e.printStackTrace();
} catch (IllegalAccessException e)<|fim▁hole|> }
}<|fim▁end|> | {
e.printStackTrace();
} |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _assetloader = require('./assetloader');
var _assetloader2 = _interopRequireDefault(_assetloader);
var _input = require('./input');
var _input2 = _interopRequireDefault(_input);
var _loop = require('./loop');
var _loop2 = _interopRequireDefault(_loop);
var _log = require('./log');
var _log2 = _interopRequireDefault(_log);
var _timer = require('./timer');
var _timer2 = _interopRequireDefault(_timer);
var _math = require('./math');
var _math2 = _interopRequireDefault(_math);
var _types = require('./types');
var _types2 = _interopRequireDefault(_types);
'use strict';
<|fim▁hole|> Loop: _loop2['default'],
Log: _log2['default'],
Timer: _timer2['default'],
Math: _math2['default'],
Types: _types2['default']
};
module.exports = exports['default'];<|fim▁end|> | exports['default'] = {
AssetLoader: _assetloader2['default'],
Input: _input2['default'], |
<|file_name|>Order.java<|end_file_name|><|fim▁begin|>package seborama.demo2.kafka.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
<|fim▁hole|>@JsonIgnoreProperties(ignoreUnknown = true)
public class Order {
private String id;
private boolean fulfilled;
private boolean dispatched;
private boolean completed;
public void setId(String id) {
this.id = id;
}
public void setFulfilled(Boolean fulfilled) {
this.fulfilled = fulfilled;
}
public void setDispatched(Boolean dispatched) {
this.dispatched = dispatched;
}
public void setCompleted(Boolean completed) {
this.completed = completed;
}
public String getId() {
return id;
}
public Boolean getFulfilled() {
return fulfilled;
}
public Boolean getDispatched() {
return dispatched;
}
public Boolean getCompleted() {
return completed;
}
@Override
public String toString() {
return "Order{" +
"id='" + id + '\'' +
", fulfilled=" + fulfilled +
", dispatched=" + dispatched +
", completed=" + completed +
'}';
}
}<|fim▁end|> | @JsonInclude(JsonInclude.Include.NON_EMPTY) |
<|file_name|>macros4.rs<|end_file_name|><|fim▁begin|>// Make me compile! Scroll down for hints :)
macro_rules! my_macro {
() => {
println!("Check out my macro!");
}
($val:expr) => {
println!("Look at this other macro: {}", $val);
}
}
fn main() {
my_macro!();
my_macro!(7777);
}
<|fim▁hole|>
// You only need to add a single character to make this compile.
// The way macros are written, it wants to see something between each
// "macro arm", so it can separate them.<|fim▁end|> | |
<|file_name|>378.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from heapq import heapify, heapreplace
class Solution(object):
def kthSmallest(self, matrix, k):
"""
:type matrix: List[List[int]]
:type k: int
:rtype: int
"""
if len(matrix) is 1:
return matrix[0][0]
z = zip(*matrix[1:])
h = [(matrix[0][i], z[i]) for i in xrange(len(matrix))]
heapify(h)
i = 0<|fim▁hole|> else:
heappop(h)
i += 1
return h[0][0]
a = [[1,5,10], [4,5,11], [7,8,12]]
s = Solution()
print s.kthSmallest(a, 3)<|fim▁end|> | while i < k - 1:
val, nextval = h[0]
if nextval:
heapreplace(h, (nextval[0], nextval[1:])) |
<|file_name|>EmptyIterator.java<|end_file_name|><|fim▁begin|>package com.github.saulis.enumerables;
import java.util.Iterator;
import java.util.NoSuchElementException;
public class EmptyIterator<T> implements Iterator<T> {
@Override
public boolean hasNext() {
return false;<|fim▁hole|> public T next() {
throw new NoSuchElementException();
}
}<|fim▁end|> | }
@Override |
<|file_name|>UnsignedIntDecodingState.java<|end_file_name|><|fim▁begin|>package org.lastbamboo.common.util.mina.decode.binary;
import org.littleshoot.mina.common.ByteBuffer;
import org.littleshoot.mina.filter.codec.ProtocolDecoderOutput;
import org.littleshoot.util.mina.DecodingState;
/**
* Decoding state for reading a single unsigned int.
*/
public abstract class UnsignedIntDecodingState implements DecodingState
{
public DecodingState decode(final ByteBuffer in,
final ProtocolDecoderOutput out) throws Exception
{
if (in.remaining() > 3)
{
final long decoded = in.getUnsignedInt();
return finishDecode(decoded, out);
}
else
{<|fim▁hole|> }
/**
* Called on the subclass when the unsigned int has been successfully
* decoded.
*
* @param decodedShort The decoded unsigned int.
* @param out The decoder output.
* @return The next state.
* @throws Exception If any unexpected error occurs.
*/
protected abstract DecodingState finishDecode(final long decodedShort,
final ProtocolDecoderOutput out) throws Exception;
}<|fim▁end|> | return this;
} |
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function, unicode_literals
from django.contrib.auth.decorators import user_passes_test
from django_otp import user_has_device
from django_otp.conf import settings
def otp_required(view=None, redirect_field_name='next', login_url=None, if_configured=False):
"""
Similar to :func:`~django.contrib.auth.decorators.login_required`, but
requires the user to be :term:`verified`. By default, this redirects users
to :setting:`OTP_LOGIN_URL`.
<|fim▁hole|> """
if login_url is None:
login_url = settings.OTP_LOGIN_URL
def test(user):
return user.is_verified() or (if_configured and user.is_authenticated() and not user_has_device(user))
decorator = user_passes_test(test, login_url=login_url, redirect_field_name=redirect_field_name)
return decorator if (view is None) else decorator(view)<|fim▁end|> | :param if_configured: If ``True``, an authenticated user with no confirmed
OTP devices will be allowed. Default is ``False``.
:type if_configured: bool |
<|file_name|>zigbeeoutputstep_test.py<|end_file_name|><|fim▁begin|>'''
Created on Mar 8, 2013
@author: Gary
'''
import unittest
from housemonitor.outputs.zigbee.zigbeecontrol import ZigBeeControl
from housemonitor.outputs.zigbee.zigbeeoutputstep import ZigBeeOutputStep
from housemonitor.outputs.zigbee.zigbeeoutputthread import ZigBeeOutputThread
from housemonitor.lib.hmqueue import HMQueue
from housemonitor.lib.constants import Constants
from mock import Mock, MagicMock, patch
from housemonitor.lib.common import Common
import logging.config
class Test( unittest.TestCase ):
logger = logging.getLogger( 'UnitTest' )
def setUp( self ):
logging.config.fileConfig( "unittest_logging.conf" )
def tearDown( self ):
pass
def test_logger_name( self ):
queue = HMQueue()
zig = ZigBeeOutputStep( queue )
self.assertEqual( Constants.LogKeys.outputsZigBee, zig.logger_name )
def test_topic_name( self ):
queue = HMQueue()
zig = ZigBeeOutputStep( queue )<|fim▁hole|> value = 5
data = {Constants.DataPacket.device: 'device',
Constants.DataPacket.port: 'port',
Constants.DataPacket.arrival_time: 'arrival_time'}
listeners = ['a', 'b', 'c']
package = {'data': data, 'value': value}
queue = MagicMock( spec=HMQueue )
zig = ZigBeeOutputStep( queue )
v, d, l = zig.step( value, data, listeners )
queue.transmit.assert_called_once()
self.assertEqual( value, v )
self.assertEqual( data, d )
self.assertEqual( listeners, l )
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()<|fim▁end|> | self.assertEqual( Constants.TopicNames.ZigBeeOutput, zig.topic_name )
def test_step( self ): |
<|file_name|>wireless_headset.py<|end_file_name|><|fim▁begin|>import time
from headset import Headset
from stream import Stream
from common import Version, BytesStatus
class WirelessHeadset(Headset):
"""This class represents the wireless version of the mindwave
Args:
dev: device link
headset: the id of mindwave wireless version
It has the basic functionality to connect, autoconnect and disconnect
"""
def __init__(self, dev=None, headset_id=None, rate=None):
<|fim▁hole|>
self.device = dev
self.bauderate = rate
self.stream = Stream(device=self.device, bauderate=rate, version=Version.MINDWAVE)
time.sleep(2)
self.connect()
self.run(self.stream)
# def open(self):
# if not self.stream or not self.stream.IsOpen():
# #self.stream = stream.stream(self.device, baudrate=115200, parity=stream.PARITY_NONE, stopbits=stream.STOPBITS_ONE,
# # bytesize=stream.EIGHTBITS, writeTimeout=0, timeout=3, rtscts=True, xonxoff=False)
# self.stream = serial.Serial(self.device, self.baudrate, timeout=0.001, rtscts=True)
def autoconnect(self):
"""This method autoconnects to the mindwave every."""
self.stream.getStream().write(BytesStatus.AUTOCONNECT)
#the dongle switch to autoconnect mode it must wait 10 second to connect any headset
time.sleep(10)
def connect(self):
"""This method connects to the mindwave with the id."""
if self.id is not None:
# we send a byte to CONNECTED and other byte in hex of headset id
self.stream.getStream().write(''.join([BytesStatus.CONNECT, self.id.decode('hex')]))
else:
self.autoconnect()
def disconnect(self):
"""This method disconnects the mindwave."""
self.stream.getStream().write(BytesStatus.DISCONNECT)
def echo_raw(self):
"""This method prints the raw data from mindwave."""
while 1:
#time.sleep()
data = self.stream.read(1)
for b in data:
print '0x%s, ' % b.encode('hex'),
print ""<|fim▁end|> | Headset.__init__(self, headset_id) |
<|file_name|>cubic.py<|end_file_name|><|fim▁begin|>"""
Function-like objects that creates cubic clusters.
"""
import numpy as np
from ase.data import reference_states as _refstate
from ase.cluster.factory import ClusterFactory
class SimpleCubicFactory(ClusterFactory):
spacegroup = 221
xtal_name = 'sc'
def get_lattice_constant(self):
"Get the lattice constant of an element with cubic crystal structure."
symmetry = _refstate[self.atomic_numbers[0]]['symmetry']
if symmetry != self.xtal_name:
raise ValueError, ("Cannot guess the %s " % (self.xtal_name,) +
"lattice constant of an element with crystal " +
"structure %s." % (symmetry,))
return _refstate[self.atomic_numbers[0]]['a']
def set_basis(self):
a = self.lattice_constant
if not isinstance(a, (int, float)):
raise ValueError("Improper lattice constant for %s crystal." % (self.xtal_name,))
self.lattice_basis = np.array([[a, 0., 0.],
[0., a, 0.],
[0., 0., a]])
self.resiproc_basis = self.get_resiproc_basis(self.lattice_basis)
SimpleCubic = SimpleCubicFactory()
class BodyCenteredCubicFactory(SimpleCubicFactory):
xtal_name = 'bcc'
atomic_basis = np.array([[0., 0., 0.],
[.5, .5, .5]])
BodyCenteredCubic = BodyCenteredCubicFactory()
class FaceCenteredCubicFactory(SimpleCubicFactory):
xtal_name = 'fcc'
atomic_basis = np.array([[0., 0., 0.],
[0., .5, .5],
[.5, 0., .5],
[.5, .5, 0.]])
<|fim▁hole|>FaceCenteredCubic = FaceCenteredCubicFactory()<|fim▁end|> | |
<|file_name|>dashboard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""<|fim▁hole|> ⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on
Network monitoring, controls & Application development, testing, debugging.
:copyright: (c) 2013-present by Abhinav Singh and contributors.
:license: BSD, see LICENSE for more details.
"""
import os
import logging
from typing import List, Tuple
from ..http.parser import HttpParser
from ..http.server import HttpWebServerBasePlugin, httpProtocolTypes
from ..http.responses import permanentRedirectResponse
logger = logging.getLogger(__name__)
class ProxyDashboard(HttpWebServerBasePlugin):
"""Proxy Dashboard."""
# Redirects to /dashboard/
REDIRECT_ROUTES = [
(httpProtocolTypes.HTTP, r'/dashboard$'),
(httpProtocolTypes.HTTPS, r'/dashboard$'),
(httpProtocolTypes.HTTP, r'/dashboard/proxy.html$'),
(httpProtocolTypes.HTTPS, r'/dashboard/proxy.html$'),
]
# Index html route
INDEX_ROUTES = [
(httpProtocolTypes.HTTP, r'/dashboard/$'),
(httpProtocolTypes.HTTPS, r'/dashboard/$'),
]
def routes(self) -> List[Tuple[int, str]]:
return ProxyDashboard.REDIRECT_ROUTES + \
ProxyDashboard.INDEX_ROUTES
def handle_request(self, request: HttpParser) -> None:
if request.path == b'/dashboard/':
self.client.queue(
self.serve_static_file(
os.path.join(
self.flags.static_server_dir,
'dashboard', 'proxy.html',
),
self.flags.min_compression_length,
),
)
elif request.path in (
b'/dashboard',
b'/dashboard/proxy.html',
):
self.client.queue(permanentRedirectResponse(b'/dashboard/'))<|fim▁end|> | proxy.py
~~~~~~~~ |
<|file_name|>users-rooms-list.js<|end_file_name|><|fim▁begin|>define('view/rooms/users-rooms-list', [<|fim▁hole|>], function (
View
) {
function UsersRoomsListView() {
View.apply(this, arguments);
}
View.extend({
constructor: UsersRoomsListView,
template: {
'root': {
each: {
view: UserRoomView,
el: '> *'
}
}
}
});
function UserRoomView() {
View.apply(this, arguments);
}
View.extend({
constructor: UserRoomView,
template: {
'root': {
'class': {
'hidden': '@hidden'
}
},
'[data-title]': {
text: '@name',
attr: {
'href': function () {
return '#user-room/' + this.model.get('id')
}
}
}
}
});
return UsersRoomsListView;
});<|fim▁end|> | 'view' |
<|file_name|>Timex.java<|end_file_name|><|fim▁begin|>package edu.stanford.nlp.time;
import java.io.Serializable;
import java.util.Calendar;
import java.util.Map;
import java.util.regex.Pattern;
import edu.stanford.nlp.util.Pair;
import org.w3c.dom.Element;
/**
* Stores one TIMEX3 expression. This class is used for both TimeAnnotator and
* GUTimeAnnotator for storing information for TIMEX3 tags.
*
* <p>
* Example text with TIMEX3 annotation:<br>
* <code>In Washington <TIMEX3 tid="t1" TYPE="DATE" VAL="PRESENT_REF"
* temporalFunction="true" valueFromFunction="tf1"
* anchorTimeID="t0">today</TIMEX3>, the Federal Aviation Administration
* released air traffic control tapes from the night the TWA Flight eight
* hundred went down.
* </code>
* <p>
* <br>
* TIMEX3 specification:
* <br>
* <pre><code>
* attributes ::= tid type [functionInDocument] [beginPoint] [endPoint]
* [quant] [freq] [temporalFunction] (value | valueFromFunction)
* [mod] [anchorTimeID] [comment]
*<|fim▁hole|> * beginPoint ::= IDREF
* {beginPoint ::= TimeID}
* endPoint ::= IDREF
* {endPoint ::= TimeID}
* quant ::= CDATA
* freq ::= Duration
* functionInDocument ::= 'CREATION_TIME' | 'EXPIRATION_TIME' | 'MODIFICATION_TIME' |
* 'PUBLICATION_TIME' | 'RELEASE_TIME'| 'RECEPTION_TIME' |
* 'NONE' {default, if absent, is 'NONE'}
* temporalFunction ::= 'true' | 'false' {default, if absent, is 'false'}
* {temporalFunction ::= boolean}
* value ::= Duration | Date | Time | WeekDate | WeekTime | Season | PartOfYear | PaPrFu
* valueFromFunction ::= IDREF
* {valueFromFunction ::= TemporalFunctionID
* TemporalFunctionID ::= tf<integer>}
* mod ::= 'BEFORE' | 'AFTER' | 'ON_OR_BEFORE' | 'ON_OR_AFTER' |'LESS_THAN' | 'MORE_THAN' |
* 'EQUAL_OR_LESS' | 'EQUAL_OR_MORE' | 'START' | 'MID' | 'END' | 'APPROX'
* anchorTimeID ::= IDREF
* {anchorTimeID ::= TimeID}
* comment ::= CDATA
* </code></pre>
*
* <p>
* References
* <br>
* Guidelines: <a href="http://www.timeml.org/tempeval2/tempeval2-trial/guidelines/timex3guidelines-072009.pdf">
* http://www.timeml.org/tempeval2/tempeval2-trial/guidelines/timex3guidelines-072009.pdf</a>
* <br>
* Specifications: <a href="http://www.timeml.org/site/publications/timeMLdocs/timeml_1.2.1.html#timex3">
* http://www.timeml.org/site/publications/timeMLdocs/timeml_1.2.1.html#timex3</a>
* <br>
* XSD: <a href="http://www.timeml.org/timeMLdocs/TimeML.xsd">http://www.timeml.org/timeMLdocs/TimeML.xsd</a>
**/
public class Timex implements Serializable {
private static final long serialVersionUID = 385847729549981302L;
/**
* XML representation of the TIMEX tag
*/
private String xml;
/**
* TIMEX3 value attribute - Time value (given in extended ISO 8601 format).
*/
private String val;
/**
* Alternate representation for time value (not part of TIMEX3 standard).
* used when value of the time expression cannot be expressed as a standard TIMEX3 value.
*/
private String altVal;
/**
* Actual text that make up the time expression
*/
private String text;
/**
* TIMEX3 type attribute - Type of the time expression (DATE, TIME, DURATION, or SET)
*/
private String type;
/**
* TIMEX3 tid attribute - TimeID. ID to identify this time expression.
* Should have the format of {@code t<integer>}
*/
private String tid;
// TODO: maybe its easier if these are just strings...
/**
* TIMEX3 beginPoint attribute - integer indicating the TimeID of the begin time
* that anchors this duration/range (-1 is not present).
*/
private int beginPoint;
/**
* TIMEX3 beginPoint attribute - integer indicating the TimeID of the end time
* that anchors this duration/range (-1 is not present).
*/
private int endPoint;
/**
* Range begin/end/duration
* (this is not part of the timex standard and is typically null, available if sutime.includeRange is true)
*/
private Range range;
public static class Range implements Serializable {
private static final long serialVersionUID = 1L;
public String begin;
public String end;
public String duration;
public Range(String begin, String end, String duration) {
this.begin = begin;
this.end = end;
this.duration = duration;
}
}
public String value() {
return val;
}
public String altVal() {
return altVal;
}
public String text() {
return text;
}
public String timexType() {
return type;
}
public String tid() {
return tid;
}
public Range range() {
return range;
}
public Timex() {
}
public Timex(Element element) {
this.val = null;
this.beginPoint = -1;
this.endPoint = -1;
/*
* ByteArrayOutputStream os = new ByteArrayOutputStream(); Serializer ser =
* new Serializer(os, "UTF-8"); ser.setIndent(2); // this is the default in
* JDOM so let's keep the same ser.setMaxLength(0); // no line wrapping for
* content ser.write(new Document(element));
*/
init(element);
}
public Timex(String val) {
this(null, val);
}
public Timex(String type, String val) {
this.val = val;
this.type = type;
this.beginPoint = -1;
this.endPoint = -1;
this.xml = (val == null ? "<TIMEX3/>" : String.format("<TIMEX3 VAL=\"%s\" TYPE=\"%s\"/>", this.val, this.type));
}
public Timex(String type, String val, String altVal, String tid, String text, int beginPoint, int endPoint) {
this.type = type;
this.val = val;
this.altVal = altVal;
this.tid = tid;
this.text = text;
this.beginPoint = beginPoint;
this.endPoint = endPoint;
}
private void init(Element element) {
init(XMLUtils.nodeToString(element, false), element);
}
private void init(String xml, Element element) {
this.xml = xml;
this.text = element.getTextContent();
// Mandatory attributes
this.tid = XMLUtils.getAttribute(element, "tid");
this.val = XMLUtils.getAttribute(element, "VAL");
if (this.val == null) {
this.val = XMLUtils.getAttribute(element, "value");
}
this.altVal = XMLUtils.getAttribute(element, "alt_value");
this.type = XMLUtils.getAttribute(element, "type");
if (type == null) {
this.type = XMLUtils.getAttribute(element, "TYPE");
}
// if (this.type != null) {
// this.type = this.type.intern();
// }
// Optional attributes
String beginPoint = XMLUtils.getAttribute(element, "beginPoint");
this.beginPoint = (beginPoint == null || beginPoint.length() == 0)? -1 : Integer.parseInt(beginPoint.substring(1));
String endPoint = XMLUtils.getAttribute(element, "endPoint");
this.endPoint = (endPoint == null || endPoint.length() == 0)? -1 : Integer.parseInt(endPoint.substring(1));
// Optional range
String rangeStr = XMLUtils.getAttribute(element, "range");
if (rangeStr != null) {
if (rangeStr.startsWith("(") && rangeStr.endsWith(")")) {
rangeStr = rangeStr.substring(1, rangeStr.length()-1);
}
String[] parts = rangeStr.split(",");
this.range = new Range(parts.length > 0? parts[0]:"", parts.length > 1? parts[1]:"", parts.length > 2? parts[2]:"");
}
}
public int beginPoint() { return beginPoint; }
public int endPoint() { return endPoint; }
public String toString() {
return (this.xml != null) ? this.xml : this.val;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Timex timex = (Timex) o;
if (beginPoint != timex.beginPoint) {
return false;
}
if (endPoint != timex.endPoint) {
return false;
}
if (type != null ? !type.equals(timex.type) : timex.type != null) {
return false;
}
if (val != null ? !val.equals(timex.val) : timex.val != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = val != null ? val.hashCode() : 0;
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + beginPoint;
result = 31 * result + endPoint;
return result;
}
public Element toXmlElement() {
Element element = XMLUtils.createElement("TIMEX3");
if (tid != null) {
element.setAttribute("tid", tid);
}
if (value() != null) {
element.setAttribute("value", val);
}
if (altVal != null) {
element.setAttribute("altVal", altVal);
}
if (type != null) {
element.setAttribute("type", type);
}
if (beginPoint != -1) {
element.setAttribute("beginPoint", "t" + String.valueOf(beginPoint));
}
if (endPoint != -1) {
element.setAttribute("endPoint", "t" + String.valueOf(endPoint));
}
if (text != null) {
element.setTextContent(text);
}
return element;
}
// Used to create timex from XML (mainly for testing)
public static Timex fromXml(String xml) {
Element element = XMLUtils.parseElement(xml);
if ("TIMEX3".equals(element.getNodeName())) {
Timex t = new Timex();
// t.init(xml, element);
// Doesn't preserve original input xml
// Will reorder attributes of xml so can match xml of test timex and actual timex
// (for which we can't control the order of the attributes now we don't use nu.xom...)
t.init(element);
return t;
} else {
throw new IllegalArgumentException("Invalid timex xml: " + xml);
}
}
public static Timex fromMap(String text, Map<String, String> map) {
try {
Element element = XMLUtils.createElement("TIMEX3");
for (Map.Entry<String, String> entry : map.entrySet()) {
if (entry.getValue() != null) {
element.setAttribute(entry.getKey(), entry.getValue());
}
}
element.setTextContent(text);
return new Timex(element);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Gets the Calendar matching the year, month and day of this Timex.
*
* @return The matching Calendar.
*/
public Calendar getDate() {
if (Pattern.matches("\\d\\d\\d\\d-\\d\\d-\\d\\d", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
int month = Integer.parseInt(this.val.substring(5, 7));
int day = Integer.parseInt(this.val.substring(8, 10));
return makeCalendar(year, month, day);
} else if (Pattern.matches("\\d\\d\\d\\d\\d\\d\\d\\d", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
int month = Integer.parseInt(this.val.substring(4, 6));
int day = Integer.parseInt(this.val.substring(6, 8));
return makeCalendar(year, month, day);
}
throw new UnsupportedOperationException(String.format("%s is not a fully specified date", this));
}
/**
* Gets two Calendars, marking the beginning and ending of this Timex's range.
*
* @return The begin point and end point Calendars.
*/
public Pair<Calendar, Calendar> getRange() {
return this.getRange(null);
}
/**
* Gets two Calendars, marking the beginning and ending of this Timex's range.
*
* @param documentTime
* The time the document containing this Timex was written. (Not
* necessary for resolving all Timex expressions. This may be
* {@code null}, but then relative time expressions cannot be
* resolved.)
* @return The begin point and end point Calendars.
*/
public Pair<Calendar, Calendar> getRange(Timex documentTime) {
if (this.val == null) {
throw new UnsupportedOperationException("no value specified for " + this);
}
// YYYYMMDD or YYYYMMDDT... where the time is concatenated directly with the
// date
else if (val.length() >= 8 && Pattern.matches("\\d\\d\\d\\d\\d\\d\\d\\d", this.val.substring(0, 8))) {
int year = Integer.parseInt(this.val.substring(0, 4));
int month = Integer.parseInt(this.val.substring(4, 6));
int day = Integer.parseInt(this.val.substring(6, 8));
return new Pair<>(makeCalendar(year, month, day), makeCalendar(year, month, day));
}
// YYYY-MM-DD or YYYY-MM-DDT...
else if (val.length() >= 10 && Pattern.matches("\\d\\d\\d\\d-\\d\\d-\\d\\d", this.val.substring(0, 10))) {
int year = Integer.parseInt(this.val.substring(0, 4));
int month = Integer.parseInt(this.val.substring(5, 7));
int day = Integer.parseInt(this.val.substring(8, 10));
return new Pair<>(makeCalendar(year, month, day), makeCalendar(year, month, day));
}
// YYYYMMDDL+
else if (Pattern.matches("\\d\\d\\d\\d\\d\\d\\d\\d[A-Z]+", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
int month = Integer.parseInt(this.val.substring(4, 6));
int day = Integer.parseInt(this.val.substring(6, 8));
return new Pair<>(makeCalendar(year, month, day), makeCalendar(year, month, day));
}
// YYYYMM or YYYYMMT...
else if (val.length() >= 6 && Pattern.matches("\\d\\d\\d\\d\\d\\d", this.val.substring(0, 6))) {
int year = Integer.parseInt(this.val.substring(0, 4));
int month = Integer.parseInt(this.val.substring(4, 6));
Calendar begin = makeCalendar(year, month, 1);
int lastDay = begin.getActualMaximum(Calendar.DATE);
Calendar end = makeCalendar(year, month, lastDay);
return new Pair<>(begin, end);
}
// YYYY-MM or YYYY-MMT...
else if (val.length() >= 7 && Pattern.matches("\\d\\d\\d\\d-\\d\\d", this.val.substring(0, 7))) {
int year = Integer.parseInt(this.val.substring(0, 4));
int month = Integer.parseInt(this.val.substring(5, 7));
Calendar begin = makeCalendar(year, month, 1);
int lastDay = begin.getActualMaximum(Calendar.DATE);
Calendar end = makeCalendar(year, month, lastDay);
return new Pair<>(begin, end);
}
// YYYY or YYYYT...
else if (val.length() >= 4 && Pattern.matches("\\d\\d\\d\\d", this.val.substring(0, 4))) {
int year = Integer.parseInt(this.val.substring(0, 4));
return new Pair<>(makeCalendar(year, 1, 1), makeCalendar(year, 12, 31));
}
// PDDY
if (Pattern.matches("P\\d+Y", this.val) && documentTime != null) {
Calendar rc = documentTime.getDate();
int yearRange = Integer.parseInt(this.val.substring(1, this.val.length() - 1));
// in the future
if (this.beginPoint < this.endPoint) {
Calendar start = copyCalendar(rc);
Calendar end = copyCalendar(rc);
end.add(Calendar.YEAR, yearRange);
return new Pair<>(start, end);
}
// in the past
else if (this.beginPoint > this.endPoint) {
Calendar start = copyCalendar(rc);
Calendar end = copyCalendar(rc);
start.add(Calendar.YEAR, 0 - yearRange);
return new Pair<>(start, end);
}
throw new RuntimeException("begin and end are equal " + this);
}
// PDDM
if (Pattern.matches("P\\d+M", this.val) && documentTime != null) {
Calendar rc = documentTime.getDate();
int monthRange = Integer.parseInt(this.val.substring(1, this.val.length() - 1));
// in the future
if (this.beginPoint < this.endPoint) {
Calendar start = copyCalendar(rc);
Calendar end = copyCalendar(rc);
end.add(Calendar.MONTH, monthRange);
return new Pair<>(start, end);
}
// in the past
if (this.beginPoint > this.endPoint) {
Calendar start = copyCalendar(rc);
Calendar end = copyCalendar(rc);
start.add(Calendar.MONTH, 0 - monthRange);
return new Pair<>(start, end);
}
throw new RuntimeException("begin and end are equal " + this);
}
// PDDD
if (Pattern.matches("P\\d+D", this.val) && documentTime != null) {
Calendar rc = documentTime.getDate();
int dayRange = Integer.parseInt(this.val.substring(1, this.val.length() - 1));
// in the future
if (this.beginPoint < this.endPoint) {
Calendar start = copyCalendar(rc);
Calendar end = copyCalendar(rc);
end.add(Calendar.DAY_OF_MONTH, dayRange);
return new Pair<>(start, end);
}
// in the past
if (this.beginPoint > this.endPoint) {
Calendar start = copyCalendar(rc);
Calendar end = copyCalendar(rc);
start.add(Calendar.DAY_OF_MONTH, 0 - dayRange);
return new Pair<>(start, end);
}
throw new RuntimeException("begin and end are equal " + this);
}
// YYYYSP
if (Pattern.matches("\\d+SP", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
Calendar start = makeCalendar(year, 2, 1);
Calendar end = makeCalendar(year, 4, 31);
return new Pair<>(start, end);
}
// YYYYSU
if (Pattern.matches("\\d+SU", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
Calendar start = makeCalendar(year, 5, 1);
Calendar end = makeCalendar(year, 7, 31);
return new Pair<>(start, end);
}
// YYYYFA
if (Pattern.matches("\\d+FA", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
Calendar start = makeCalendar(year, 8, 1);
Calendar end = makeCalendar(year, 10, 31);
return new Pair<>(start, end);
}
// YYYYWI
if (Pattern.matches("\\d+WI", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
Calendar start = makeCalendar(year, 11, 1);
Calendar end = makeCalendar(year + 1, 1, 29);
return new Pair<>(start, end);
}
// YYYYWDD
if (Pattern.matches("\\d\\d\\d\\dW\\d+", this.val)) {
int year = Integer.parseInt(this.val.substring(0, 4));
int week = Integer.parseInt(this.val.substring(5));
int startDay = (week - 1) * 7;
int endDay = startDay + 6;
Calendar start = makeCalendar(year, startDay);
Calendar end = makeCalendar(year, endDay);
return new Pair<>(start, end);
}
// PRESENT_REF
if (this.val.equals("PRESENT_REF")) {
Calendar rc = documentTime.getDate(); // todo: This case doesn't check for documentTime being null and will NPE
Calendar start = copyCalendar(rc);
Calendar end = copyCalendar(rc);
return new Pair<>(start, end);
}
throw new RuntimeException(String.format("unknown value \"%s\" in %s", this.val, this));
}
private static Calendar makeCalendar(int year, int month, int day) {
Calendar date = Calendar.getInstance();
date.clear();
date.set(year, month - 1, day, 0, 0, 0);
return date;
}
private static Calendar makeCalendar(int year, int dayOfYear) {
Calendar date = Calendar.getInstance();
date.clear();
date.set(Calendar.YEAR, year);
date.set(Calendar.DAY_OF_YEAR, dayOfYear);
return date;
}
private static Calendar copyCalendar(Calendar c) {
Calendar date = Calendar.getInstance();
date.clear();
date.set(c.get(Calendar.YEAR), c.get(Calendar.MONTH), c.get(Calendar.DAY_OF_MONTH), c.get(Calendar.HOUR_OF_DAY), c
.get(Calendar.MINUTE), c.get(Calendar.SECOND));
return date;
}
}<|fim▁end|> | * tid ::= ID
* {tid ::= TimeID
* TimeID ::= t<integer>}
* type ::= 'DATE' | 'TIME' | 'DURATION' | 'SET' |
<|file_name|>style_properties.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use StateFlags;
use StyleProvider;
use SymbolicColor;
use ffi;
use glib;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct StyleProperties(Object<ffi::GtkStyleProperties>): StyleProvider;
match fn {
get_type => || ffi::gtk_style_properties_get_type(),
}
}
impl StyleProperties {
pub fn new() -> StyleProperties {
assert_initialized_main_thread!();
unsafe {
from_glib_full(ffi::gtk_style_properties_new())
}
}
//pub fn lookup_property(property_name: &str, parse_func: /*Unknown conversion*//*Unimplemented*/StylePropertyParser, pspec: /*Ignored*/glib::ParamSpec) -> bool {
// unsafe { TODO: call ffi::gtk_style_properties_lookup_property() }
//}
//pub fn register_property<'a, P: Into<Option<&'a /*Unimplemented*/StylePropertyParser>>, Q: IsA</*Ignored*/glib::ParamSpec>>(parse_func: P, pspec: &Q) {
// unsafe { TODO: call ffi::gtk_style_properties_register_property() }
//}
}
pub trait StylePropertiesExt {
fn clear(&self);
//fn get(&self, state: StateFlags, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);
fn get_property(&self, property: &str, state: StateFlags) -> Option<glib::Value>;
//fn get_valist(&self, state: StateFlags, args: /*Unknown conversion*//*Unimplemented*/Unsupported);
fn lookup_color(&self, name: &str) -> Option<SymbolicColor>;
fn map_color(&self, name: &str, color: &SymbolicColor);
fn merge(&self, props_to_merge: &StyleProperties, replace: bool);
//fn set(&self, state: StateFlags, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);
fn set_property(&self, property: &str, state: StateFlags, value: &glib::Value);
//fn set_valist(&self, state: StateFlags, args: /*Unknown conversion*//*Unimplemented*/Unsupported);
fn unset_property(&self, property: &str, state: StateFlags);
}
impl<O: IsA<StyleProperties>> StylePropertiesExt for O {
fn clear(&self) {
unsafe {
ffi::gtk_style_properties_clear(self.to_glib_none().0);
}
}
//fn get(&self, state: StateFlags, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {
// unsafe { TODO: call ffi::gtk_style_properties_get() }
//}
fn get_property(&self, property: &str, state: StateFlags) -> Option<glib::Value> {
unsafe {
let mut value = glib::Value::uninitialized();
let ret = from_glib(ffi::gtk_style_properties_get_property(self.to_glib_none().0, property.to_glib_none().0, state.to_glib(), value.to_glib_none_mut().0));
if ret { Some(value) } else { None }
}
}
//fn get_valist(&self, state: StateFlags, args: /*Unknown conversion*//*Unimplemented*/Unsupported) {
// unsafe { TODO: call ffi::gtk_style_properties_get_valist() }
//}
fn lookup_color(&self, name: &str) -> Option<SymbolicColor> {
unsafe {
from_glib_none(ffi::gtk_style_properties_lookup_color(self.to_glib_none().0, name.to_glib_none().0))
}
}
fn map_color(&self, name: &str, color: &SymbolicColor) {
unsafe {
ffi::gtk_style_properties_map_color(self.to_glib_none().0, name.to_glib_none().0, color.to_glib_none().0);
}
}
fn merge(&self, props_to_merge: &StyleProperties, replace: bool) {
unsafe {
ffi::gtk_style_properties_merge(self.to_glib_none().0, props_to_merge.to_glib_none().0, replace.to_glib());
}
}
//fn set(&self, state: StateFlags, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {
// unsafe { TODO: call ffi::gtk_style_properties_set() }
//}
fn set_property(&self, property: &str, state: StateFlags, value: &glib::Value) {
unsafe {
ffi::gtk_style_properties_set_property(self.to_glib_none().0, property.to_glib_none().0, state.to_glib(), value.to_glib_none().0);
}
}
//fn set_valist(&self, state: StateFlags, args: /*Unknown conversion*//*Unimplemented*/Unsupported) {
// unsafe { TODO: call ffi::gtk_style_properties_set_valist() }
//}
fn unset_property(&self, property: &str, state: StateFlags) {
unsafe {
ffi::gtk_style_properties_unset_property(self.to_glib_none().0, property.to_glib_none().0, state.to_glib());<|fim▁hole|> }
}
}<|fim▁end|> | |
<|file_name|>basic.py<|end_file_name|><|fim▁begin|>#encoding: utf8
class MemoryStorage(object):
"""
Simple(dummy) in-memory storage
In production better to use some database. Written to describe the api and testing
"""
def __init__(self, config):
# configuration of channels and services provided by application
self.config = config
# Storage of undelivered messages
self.storage = dict()
# Dict with services and subscriptions
self.clients = dict()
self.load_clients()
# The list of keys to interact with the application
self.api_keys = list()
self.load_api_keys()
def load_clients(self):
"""
Loading services and key data from config
"""
channel_dict = self.config.get('channels', {})
for channel in channel_dict:
self.add_channel(channel)
for user_data in channel_dict.get(channel, []):
self.subscribe(user_data['url'], user_data['key'], channel)
def load_api_keys(self):
"""
Loading keys to store messages
"""
self.api_keys = self.config.get('keys', [])
def push(self, client, channel, data, status_code):
"""
Adding the undeliverable message in the storage
"""
if client not in self.storage:
self.storage[client] = {}
if channel not in self.storage[client]:
self.storage[client][channel] = []
self.storage[client][channel].append((data, status_code))
def waiting_clients(self):
"""
Returns the services that have undelivered messages
"""
return self.storage.iterkeys()
def waiting_messages(self, client):
"""
Returns a dict with the channels in which there are unsent messages
for a given service
"""
return self.storage.get(client, None)
def get_client_key(self, client, channel):
"""
Returns the key to send the data to the client in a given channel
"""
result = None
if channel in self.clients:
for service in self.clients[channel]:
if service[0] != client:
continue
result = service[1]
return result
def get_clients(self, channel):
"""
Returns a list of services subscribed to the channel
"""
if channel not in self.clients.keys():
return []
return self.clients[channel]
def drop_message(self, client, channel, i):
"""
Deletes the message from storage
"""
del self.storage[client][channel][i]
def messages_in_channel(self, client, channel):
"""
Returns the number of unsent messages for service in a given channel
"""
result = None
if client not in self.storage:
return result
if channel not in self.storage[client]:
return result
result = len(self.storage[client][channel])
return result
def __repr__(self):
"""
In print we trust
"""
return repr(self.storage)
def add_channel(self, channel):
"""
Try to create a channel in the storage
"""
if channel in self.clients:
return False
self.clients[channel] = []
return True
def subscribe(self, client, api_key, channel):
"""
Subscribe client to the channel
"""
if channel not in self.clients:
return False
pair = (client, api_key)
if pair in self.clients[channel]:
return False
self.clients[channel].append(pair)
return True
def unsubscribe(self, client, channel):
"""
Unsubscribe client from the channel
N.B. we must drop all messages in queue for this client
"""
clients = self.clients.get(channel)
if clients is None:
return False
index = None
for i, pair in enumerate(clients):
if pair[0] != client:
continue
index = i
break
if index is not None:
del self.clients[channel][index]
return True
def drop_channel(self, channel):
"""
Drop channel with all messages and clients
"""<|fim▁hole|><|fim▁end|> | return self.clients.pop(channel, None) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>}<|fim▁end|> | fn main() {
let lucky_number = 7; // I’m feeling lucky today |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># coding=utf-8
from django_sae.contrib.tasks.cron import OperationView
from django_sae.contrib.tasks.operations import TaskOperationMixin
class OperationViewMock(OperationView):
def get_operation(self, request):<|fim▁hole|><|fim▁end|> | return [TaskOperationMixin() for _ in range(0, 3)] |
<|file_name|>application.py<|end_file_name|><|fim▁begin|>'''
Created on 2013-7-21
@author: hujin
'''<|fim▁hole|>import sys
from PySide.QtGui import QApplication
from mdeditor.ui.window import MainWindow
class Application(QApplication):
def __init__(self):
'''
Constructor
'''
super(Application, self).__init__(sys.argv)
def run(self):
'''
Run the application.
'''
frame = MainWindow()
frame.show()
self.exec_()
sys.exit()<|fim▁end|> | |
<|file_name|>plan.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from ..resource import Resource
from collections import namedtuple
class Plan(Resource):
"""
https://dev.chartmogul.com/v1.0/reference#plans
"""
_path = "/plans{/uuid}"
_root_key = 'plans'
_many = namedtuple('Plans', [_root_key, "current_page", "total_pages"])
class _Schema(Schema):
uuid = fields.String()
data_source_uuid = fields.String()
name = fields.String()
interval_count = fields.Int()
interval_unit = fields.String()
external_id = fields.String()
@post_load
def make(self, data, **kwargs):
return Plan(**data)
_schema = _Schema(unknown=EXCLUDE)<|fim▁end|> | from marshmallow import Schema, fields, post_load, EXCLUDE |
<|file_name|>db.py<|end_file_name|><|fim▁begin|>import sqlite3
class Database:
def __init__(self, dbfile, page_rows=100):<|fim▁hole|> self.dbfile = dbfile
self.page_rows = page_rows
self.conn = sqlite3.connect(self.dbfile)
self.conn.row_factory = sqlite3.Row
cursor = self.conn.cursor()
cursor.execute(
"CREATE TABLE IF NOT EXISTS messages "
"(timestamp TEXT, message TEXT);"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS messages_timestamp_idx "
"ON messages (timestamp);"
)
self.conn.commit()
def __del__(self):
if self.conn:
self.conn.close()
self.conn = None
def count(self):
cursor = self.conn.cursor()
n = cursor.execute("SELECT COUNT(*) FROM messages").fetchone()[0]
return n
def messages(self, offset=0):
cursor = self.conn.cursor()
rows = cursor.execute(
"SELECT * FROM messages "
"ORDER BY timestamp DESC "
"LIMIT ? "
"OFFSET ?",
[self.page_rows, offset]
).fetchall()
return [ dict(row) for row in rows ]
def save(self, item):
saved = False
if item.item_type == 'message':
timestamp = item.content['timestamp']
message = item.asJson()
cursor = self.conn.cursor()
cursor.execute(
"INSERT INTO messages VALUES (?,?)",
[timestamp, message]
)
self.conn.commit()
saved = True
return saved<|fim▁end|> | |
<|file_name|>ForTestFakeSpout.java<|end_file_name|><|fim▁begin|>package com.feiyu.storm.streamingdatacollection.spout;
/**
* from https://github.com/apache/incubator-storm/blob/master/examples/storm-starter/src/jvm/storm/starter/spout/RandomSentenceSpout.java
* modified by feiyu
*/
import java.util.Map;
import java.util.Random;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import backtype.storm.utils.Utils;
@SuppressWarnings("serial")
public class ForTestFakeSpout extends BaseRichSpout {
private SpoutOutputCollector _collector;
private Random _rand;
@SuppressWarnings("rawtypes")
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
_collector = collector;
_rand = new Random();
}
@Override
public void nextTuple() {
Utils.sleep(5000);
String[] tweets = new String[]{
"I rated X-Men: Days of Future Past 8/10 #IMDb http://www.imdb.com/title/tt1877832",
"I rated Game of Thrones 10/10 #IMDb http://www.imdb.com/title/tt0944947",
"I rated Snowpiercer 7/10 #IMDb really enjoyed this. Beautifully shot & choreographed. Great performance from Swinton http://www.imdb.com/title/tt1706620",
"Back on form. That ending = awesome! - I rated X-Men: Days of Future Past 7/10 #IMDb http://www.imdb.com/title/tt1877832",
"A great movie especially for those trying to learn German ~> I rated Run Lola Run 8/10 #IMDb http://www.imdb.com/title/tt0130827",
"I rated Breaking Bad 8/10 #IMDb :: I would say 7 but last season made it worth it... Matter of taste @mmelgarejoc http://www.imdb.com/title/tt0903747",
"I rated White House Down 7/10 #IMDb bunch of explosions and one liners, fun for all http://www.imdb.com/title/tt2334879"
};
String tweet = tweets[_rand.nextInt(tweets.length)];<|fim▁hole|> @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("tweet"));
}
}<|fim▁end|> | _collector.emit(new Values(tweet));
}
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponse
from django.views.generic import TemplateView, DetailView
from django.views.generic.edit import FormView
from .forms import DonationForm
from .models import CardType
import json
class PagoView(FormView):
form_class = DonationForm
template_name = 'pago.html'
success_url = 'http://funsepa.org/cms/es/gracias/'
def get_context_data(self, **kwargs):
context = super(PagoView, self).get_context_data(**kwargs)
david = self.request.GET.get('david', None)
navidad = self.request.GET.get('navidad', None)
if david:
context['david'] = True
if navidad:
context['navidad'] = True
return context
def form_valid(self, form):
instance = super(PagoView, self).form_valid(form)
return instance
class PagoDone(TemplateView):<|fim▁hole|>
class CardTypeView(DetailView):
model = CardType
slug_field = 'card_type'
def get(self, *args, **kwargs):
card_type = CardType.objects.filter(card_type=kwargs.pop('slug')).first()
if card_type:
response = {
'id': card_type.id,
'card_type': card_type.card_type,
'name': card_type.alias}
else:
response = None
return HttpResponse(json.dumps(response))<|fim▁end|> | template_name = 'done.html'
|
<|file_name|>benchmark.rs<|end_file_name|><|fim▁begin|>use std::mem;
use mio::net::{AddressFamily, Inet, Inet6, SockAddr, InetAddr, IPv4Addr, SocketType, Dgram, Stream};
use std::io::net::ip::IpAddr;
use native::NativeTaskBuilder;
use std::task::TaskBuilder;
use mio::os::{from_sockaddr};
use time::Instant;
use std::vec::*;
use std::io::timer;
mod nix {
pub use nix::c_int;
pub use nix::fcntl::{Fd, O_NONBLOCK, O_CLOEXEC};
pub use nix::errno::{EWOULDBLOCK, EINPROGRESS};
pub use nix::sys::socket::*;
pub use nix::unistd::*;<|fim▁hole|>}
fn timed(label: &str, f: ||) {
let start = Instant::now();
f();
let elapsed = start.elapsed();
println!(" {}: {}", label, elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 / 1_000_000_000.0);
}
fn init(saddr: &str) -> (nix::Fd, nix::Fd) {
let optval = 1i;
let addr = SockAddr::parse(saddr.as_slice()).expect("could not parse InetAddr");
let srvfd = nix::socket(nix::AF_INET, nix::SOCK_STREAM, nix::SOCK_CLOEXEC).unwrap();
nix::setsockopt(srvfd, nix::SOL_SOCKET, nix::SO_REUSEADDR, &optval).unwrap();
nix::bind(srvfd, &from_sockaddr(&addr)).unwrap();
nix::listen(srvfd, 256u).unwrap();
let fd = nix::socket(nix::AF_INET, nix::SOCK_STREAM, nix::SOCK_CLOEXEC | nix::SOCK_NONBLOCK).unwrap();
let res = nix::connect(fd, &from_sockaddr(&addr));
let start = Instant::now();
println!("connecting : {}", res);
let clifd = nix::accept4(srvfd, nix::SOCK_CLOEXEC | nix::SOCK_NONBLOCK).unwrap();
let elapsed = start.elapsed();
println!("accepted : {} - {}", clifd, elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 / 1_000_000_000.0);
(clifd, srvfd)
}
#[test]
fn read_bench() {
let (clifd, srvfd) = init("10.10.1.5:11111");
let mut buf = Vec::with_capacity(1600);
unsafe { buf.set_len(1600); }
timed("read", || {
let mut i = 0u;
while i < 10000000 {
let res = nix::read(clifd, buf.as_mut_slice());
assert_eq!(res.unwrap_err().kind, nix::EWOULDBLOCK);
i = i + 1;
}
});
}
#[test]
fn epollctl_bench() {
let (clifd, srvfd) = init("10.10.1.5:22222");
let epfd = nix::epoll_create().unwrap();
let info = nix::EpollEvent { events: nix::EPOLLIN | nix::EPOLLONESHOT | nix::EPOLLET,
data: 0u64 };
nix::epoll_ctl(epfd, nix::EpollCtlAdd, clifd, &info);
timed("epoll_ctl", || {
let mut i = 0u;
while i < 10000000 {
nix::epoll_ctl(epfd, nix::EpollCtlMod, clifd, &info);
i = i + 1;
}
});
}<|fim▁end|> | pub use nix::sys::epoll::*; |
<|file_name|>definitions.py<|end_file_name|><|fim▁begin|># Copyright (C) 2012-2020 Ben Kurtovic <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Contains data about certain markup, like HTML tags and external links.
When updating this file, please also update the the C tokenizer version:
- mwparserfromhell/parser/ctokenizer/definitions.c
- mwparserfromhell/parser/ctokenizer/definitions.h
"""
__all__ = [
"get_html_tag",
"is_parsable",
"is_visible",
"is_single",
"is_single_only",
"is_scheme",
]
URI_SCHEMES = {
# [wikimedia/mediawiki.git]/includes/DefaultSettings.php @ 5c660de5d0
"bitcoin": False,
"ftp": True,
"ftps": True,
"geo": False,
"git": True,
"gopher": True,
"http": True,
"https": True,
"irc": True,
"ircs": True,
"magnet": False,
"mailto": False,
"mms": True,
"news": False,
"nntp": True,
"redis": True,
"sftp": True,
"sip": False,
"sips": False,
"sms": False,
"ssh": True,
"svn": True,
"tel": False,
"telnet": True,
"urn": False,
"worldwind": True,
"xmpp": False,
}
PARSER_BLACKLIST = [
# https://www.mediawiki.org/wiki/Parser_extension_tags @ 2020-12-21
"categorytree",
"ce",
"chem",
"gallery",
"graph",
"hiero",
"imagemap",
"inputbox",
"math",
"nowiki",
"pre",
"score",
"section",
"source",
"syntaxhighlight",
"templatedata",<|fim▁hole|>INVISIBLE_TAGS = [
# https://www.mediawiki.org/wiki/Parser_extension_tags @ 2020-12-21
"categorytree",
"gallery",
"graph",
"imagemap",
"inputbox",
"math",
"score",
"section",
"templatedata",
"timeline",
]
# [wikimedia/mediawiki.git]/includes/parser/Sanitizer.php @ 95e17ee645
SINGLE_ONLY = ["br", "wbr", "hr", "meta", "link", "img"]
SINGLE = SINGLE_ONLY + ["li", "dt", "dd", "th", "td", "tr"]
MARKUP_TO_HTML = {
"#": "li",
"*": "li",
";": "dt",
":": "dd",
}
def get_html_tag(markup):
"""Return the HTML tag associated with the given wiki-markup."""
return MARKUP_TO_HTML[markup]
def is_parsable(tag):
"""Return if the given *tag*'s contents should be passed to the parser."""
return tag.lower() not in PARSER_BLACKLIST
def is_visible(tag):
"""Return whether or not the given *tag* contains visible text."""
return tag.lower() not in INVISIBLE_TAGS
def is_single(tag):
"""Return whether or not the given *tag* can exist without a close tag."""
return tag.lower() in SINGLE
def is_single_only(tag):
"""Return whether or not the given *tag* must exist without a close tag."""
return tag.lower() in SINGLE_ONLY
def is_scheme(scheme, slashes=True):
"""Return whether *scheme* is valid for external links."""
scheme = scheme.lower()
if slashes:
return scheme in URI_SCHEMES
return scheme in URI_SCHEMES and not URI_SCHEMES[scheme]<|fim▁end|> | "timeline",
]
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(
html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png"
)]
//! <p>Amazon Route 53 is a highly available and scalable Domain Name System (DNS) web service.</p>
//!
//! If you're using the service, you're probably looking for [Route53Client](struct.Route53Client.html) and [Route53](trait.Route53.html).
mod custom;
mod generated;<|fim▁hole|><|fim▁end|> | pub use custom::*;
pub use generated::*; |
<|file_name|>2480.cc<|end_file_name|><|fim▁begin|>#include <cstdio>
#include <vector>
using namespace std;
long long phi(long long n)
{
long long ans = n;
for (long long i = 2LL; i*i <= n; i++) {
if (n % i == 0LL) {
ans -= ans / i;
while (n % i == 0LL) {
n /= i;
}
}
}
if (n > 1) {
ans -= ans / n;
}
return ans;
}
vector<long long> factorize(long long n)
{
vector<long long> fs;
for (long long i = 1LL; i*i <= n; i++) {
if (n % i == 0LL) {
fs.push_back(i);
if (i*i != n) {
fs.push_back(n/i);
}
}
}
return fs;
}
int main()
{
long long N;
while (scanf("%lld", &N) != EOF) {
long long ans = 0;
const vector<long long> fs = factorize(N);
ans = 0;
for (vector<long long>::const_iterator it = fs.begin(); it != fs.end(); ++it) {
ans += *it * phi(N / *it);<|fim▁hole|> printf("%lld\n", ans);
}
return 0;
}<|fim▁end|> | } |
<|file_name|>hex2dump.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2008,2010,2011,2012,2013 Alexander Belchenko
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain
# the above copyright notice, this list of conditions
# and the following disclaimer.
# * Redistributions in binary form must reproduce
# the above copyright notice, this list of conditions
# and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the author nor the names
# of its contributors may be used to endorse
# or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Show content of hex file as hexdump."""
VERSION = '1.5.1'
USAGE = '''hex2dump: show content of hex file as hexdump.
Usage:
python hex2dump.py [options] HEXFILE
Options:
-h, --help this help message.
-v, --version version info.
-r, --range=START:END specify address range for dumping
(ascii hex value).
Range can be in form 'START:' or ':END'.
Arguments:
HEXFILE name of hex file for processing (use '-' to read
from stdin)
'''
import sys
def hex2dump(hexfile, start=None, end=None):
import intelhex
if hexfile == '-':
hexfile = sys.stdin
try:
ih = intelhex.IntelHex(hexfile)
except (IOError, intelhex.IntelHexError), e:
sys.stderr.write('Error reading file: %s\n' % e)
return 1
if not (start is None and end is None):
ih = ih[slice(start,end)]
ih.dump()
return 0
def main(argv=None):
import getopt
if argv is None:
argv = sys.argv[1:]
start = None<|fim▁hole|> end = None
try:
opts, args = getopt.getopt(sys.argv[1:], "hvp:r:",
["help", "version", "range="])
for o, a in opts:
if o in ("-h", "--help"):
print(USAGE)
return 0
elif o in ("-v", "--version"):
print(VERSION)
return 0
elif o in ("-r", "--range"):
try:
l = a.split(":")
if l[0] != '':
start = int(l[0], 16)
if l[1] != '':
end = int(l[1], 16)
except:
raise getopt.GetoptError('Bad range value(s)')
if not args:
raise getopt.GetoptError('Hex file is not specified')
if len(args) > 1:
raise getopt.GetoptError('Too many arguments')
except getopt.GetoptError, msg:
txt = 'ERROR: '+str(msg) # that's required to get not-so-dumb result from 2to3 tool
print(txt)
print(USAGE)
return 2
try:
return hex2dump(args[0], start, end)
except IOError, e:
import errno
if e.errno not in (0, errno.EPIPE):
raise
if __name__ == '__main__':
import sys
sys.exit(main())<|fim▁end|> | |
<|file_name|>sample_recognize_business_cards.py<|end_file_name|><|fim▁begin|># coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_recognize_business_cards.py
DESCRIPTION:
This sample demonstrates how to recognize fields on business cards.
See fields found on a business card here:
https://aka.ms/formrecognizer/businesscardfields
USAGE:
python sample_recognize_business_cards.py
Set the environment variables with your own values before running the sample:
1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Cognitive Services resource.
2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key
"""
import os
class RecognizeBusinessCardSample(object):
def recognize_business_card(self):
path_to_sample_forms = os.path.abspath(os.path.join(os.path.abspath(__file__),
"..", "..", "./sample_forms/business_cards/business-card-english.jpg"))
# [START recognize_business_cards]
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import FormRecognizerClient
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
form_recognizer_client = FormRecognizerClient(
endpoint=endpoint, credential=AzureKeyCredential(key)
)
with open(path_to_sample_forms, "rb") as f:
poller = form_recognizer_client.begin_recognize_business_cards(business_card=f, locale="en-US")
business_cards = poller.result()
for idx, business_card in enumerate(business_cards):
print("--------Recognizing business card #{}--------".format(idx+1))
contact_names = business_card.fields.get("ContactNames")
if contact_names:
for contact_name in contact_names.value:
print("Contact First Name: {} has confidence: {}".format(
contact_name.value["FirstName"].value, contact_name.value["FirstName"].confidence
))
print("Contact Last Name: {} has confidence: {}".format(
contact_name.value["LastName"].value, contact_name.value["LastName"].confidence
))
company_names = business_card.fields.get("CompanyNames")
if company_names:
for company_name in company_names.value:
print("Company Name: {} has confidence: {}".format(company_name.value, company_name.confidence))
departments = business_card.fields.get("Departments")
if departments:
for department in departments.value:
print("Department: {} has confidence: {}".format(department.value, department.confidence))
job_titles = business_card.fields.get("JobTitles")
if job_titles:
for job_title in job_titles.value:
print("Job Title: {} has confidence: {}".format(job_title.value, job_title.confidence))
emails = business_card.fields.get("Emails")
if emails:
for email in emails.value:
print("Email: {} has confidence: {}".format(email.value, email.confidence))
websites = business_card.fields.get("Websites")<|fim▁hole|> if addresses:
for address in addresses.value:
print("Address: {} has confidence: {}".format(address.value, address.confidence))
mobile_phones = business_card.fields.get("MobilePhones")
if mobile_phones:
for phone in mobile_phones.value:
print("Mobile phone number: {} has confidence: {}".format(phone.value, phone.confidence))
faxes = business_card.fields.get("Faxes")
if faxes:
for fax in faxes.value:
print("Fax number: {} has confidence: {}".format(fax.value, fax.confidence))
work_phones = business_card.fields.get("WorkPhones")
if work_phones:
for work_phone in work_phones.value:
print("Work phone number: {} has confidence: {}".format(work_phone.value, work_phone.confidence))
other_phones = business_card.fields.get("OtherPhones")
if other_phones:
for other_phone in other_phones.value:
print("Other phone number: {} has confidence: {}".format(other_phone.value, other_phone.confidence))
# [END recognize_business_cards]
if __name__ == '__main__':
sample = RecognizeBusinessCardSample()
sample.recognize_business_card()<|fim▁end|> | if websites:
for website in websites.value:
print("Website: {} has confidence: {}".format(website.value, website.confidence))
addresses = business_card.fields.get("Addresses") |
<|file_name|>LanScan.py<|end_file_name|><|fim▁begin|>import subprocess
import time
import sys
import re
class checkIfUp:
__shellPings = []
__shell2Nbst = []
__ipsToCheck = []
checkedIps = 0
onlineIps = 0
unreachable = 0
timedOut = 0
upIpsAddress = []
computerName = []
completeMacAddress = []
executionTime = 0
def __init__(self,fromIp,toIp):
startTime = time.time()
self.fromIp = fromIp # from 192.168.1.x
self.toIp = toIp # to 192.168.x.x
self.__checkIfIpIsValid(fromIp)
self.__checkIfIpIsValid(toIp)
self.__getRange(fromIp,toIp)
self.__shellToQueue()
#self.__checkIfUp() # run by the shellToQueue queue organizer
self.__computerInfoInQueue()
endTime = time.time()
self.executionTime = round(endTime - startTime,3)
def __checkIfIpIsValid(self,ip):
def validateRange(val):
# valid range => 1 <-> 255
try:
val = int(val)
if val < 0 or val > 255:
print "Invalid IP Range ("+str(val)+")"
sys.exit(0)
except:
print "Invalid IP"
sys.exit(0)
ip = ip.split(".")
firstVal = validateRange(ip[0])
secondVal = validateRange(ip[1])
thirdVal = validateRange(ip[2])
fourthVal = validateRange(ip[3])
return True
def __getRange(self,fromIp,toIp):
fromIp = fromIp.split(".")
toIp = toIp.split(".")
# toIp must be > fromIp
def ip3chars(ipBlock):
# input 1; output 001
ipBlock = str(ipBlock)
while len(ipBlock) != 3:
ipBlock = "0"+ipBlock
return ipBlock
fromIpRaw = ip3chars(fromIp[0])+ip3chars(fromIp[1])+ip3chars(fromIp[2])+ip3chars(fromIp[3])
toIpRaw = ip3chars(toIp[0])+ip3chars(toIp[1])+ip3chars(toIp[2])+ip3chars(toIp[3])
if fromIpRaw > toIpRaw:
# if from is bigger switch the order
temp = fromIp
fromIp = toIp
toIp = temp
currentIp = [0,0,0,0]
# all to integers
currentIp0 = int(fromIp[0])
currentIp1 = int(fromIp[1])
currentIp2 = int(fromIp[2])
currentIp3 = int(fromIp[3])
toIp0 = int(toIp[0])
toIp1 = int(toIp[1])
toIp2 = int(toIp[2])
toIp3 = int(toIp[3])
firstIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck = [firstIp]
while currentIp3 != toIp3 or currentIp2 != toIp2 or currentIp1 != toIp1 or currentIp0 != toIp0:
currentIp3 += 1
if currentIp3 > 255:
currentIp3 = 0
currentIp2 += 1
if currentIp2 > 255:
currentIp2 = 0
currentIp1 += 1
if currentIp1 > 255:
currentIp1 = 0
currentIp0 += 1
addIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck.append(addIp)
def __shellToQueue(self):
# write them in the shell queue
maxPingsAtOnce = 200
currentQueuedPings = 0
for pingIp in self.__ipsToCheck:
proc = subprocess.Popen(['ping','-n','1',pingIp],stdout=subprocess.PIPE,shell=True)
self.__shellPings.append(proc)
currentQueuedPings += 1
if currentQueuedPings >= maxPingsAtOnce:
#execute shells
self.__checkIfUp()
currentQueuedPings = 0
self.__shellPings = []
self.__checkIfUp() # execute last queue
def __checkIfUp(self):
# execute the shells & determine whether the host is up or not
for shellInQueue in self.__shellPings:
pingResult = ""
shellInQueue.wait()
while True:
line = shellInQueue.stdout.readline()
if line != "":
pingResult += line
else:
break;
self.checkedIps += 1
if 'unreachable' in pingResult:
self.unreachable += 1
elif 'timed out' in pingResult:
self.timedOut += 1
else:
self.onlineIps += 1
currentIp = self.__ipsToCheck[self.checkedIps-1]
self.upIpsAddress.append(currentIp)
def __computerInfoInQueue(self):
# shell queue for online hosts
maxShellsAtOnce = 255
currentQueuedNbst = 0
for onlineIp in self.upIpsAddress:
proc = subprocess.Popen(['\\Windows\\sysnative\\nbtstat.exe','-a',onlineIp],stdout=subprocess.PIPE,shell=True)
self.__shell2Nbst.append(proc)
currentQueuedNbst += 1
if currentQueuedNbst >= maxShellsAtOnce:
# execute shells
self.__gatherComputerInfo()
currentQueuedNbst = 0
self.__shell2Nbst = []
self.__gatherComputerInfo() # execute last queue
def __gatherComputerInfo(self):
# execute the shells and find host Name and MAC
for shellInQueue in self.__shell2Nbst:
nbstResult = ""
shellInQueue.wait()
computerNameLine = ""
macAddressLine = ""
computerName = ""
macAddress = ""
while True:
line = shellInQueue.stdout.readline()
if line != "":
if '<00>' in line and 'UNIQUE' in line:
computerNameLine = line
if 'MAC Address' in line:
macAddressLine = line
else:
break;
computerName = re.findall('([ ]+)(.*?)([ ]+)<00>', computerNameLine)
macAddress = re.findall('([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)',macAddressLine)
try:
self.computerName.append(computerName[0][1])
except:
self.computerName.append("")
completeMacAddress = ""
firstMacElement = 0
try:
for macEach in macAddress[0]:
if firstMacElement == 0:
firstMacElement += 1
else:
completeMacAddress += ":"
completeMacAddress += macEach
firstMacElement = 0
except:
completeMacAddress = ""
self.completeMacAddress.append(completeMacAddress)
def readValue(self):
# debugging use only
ips = []
<|fim▁hole|>print "\t\t---LANScanner v1.0---\n"
# brief tutorial
print "Sample input data:"
print "FromIP: 192.168.1.50"
print "ToIP: 192.168.1.20"
print "---"
# input
fromIp = raw_input("From: ")
toIp = raw_input("To: ")
# enter values to class
userRange = checkIfUp(fromIp,toIp)
# read class values
print ""
#print userRange.readValue() # debugging use only
print "Checked",userRange.checkedIps,"IPs"
print ""
print "Online:",str(userRange.onlineIps)+"/"+str(userRange.checkedIps)
print "Unreachable:",userRange.unreachable,"Timed out:",userRange.timedOut
print "" # newline
print "Online IPs:"
print "IP\t\tNAME\t\tMAC"
counter = 0
for onlineIp in userRange.upIpsAddress:
print onlineIp+"\t"+userRange.computerName[counter]+"\t"+userRange.completeMacAddress[counter]
counter += 1
print ""
print "Took",userRange.executionTime,"seconds"<|fim▁end|> | for ip in self.completeMacAddress:
ips.append(ip)
return ips
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from pysimplesoap.client import SoapClient
def inspect(wsdl_url):
client = SoapClient(wsdl=wsdl_url,trace=False)
print("Target Namespace", client.namespace)
for service in list(client.services.values()):<|fim▁hole|> for port in list(service['ports'].values()):
print(port['location'])
for op in list(port['operations'].values()):
print('Name:', op['name'])
print('Docs:', op['documentation'].strip())
print('SOAPAction:', op['action'])
print('Input', op['input']) # args type declaration
print('Output', op['output']) # returns type declaration
print('\n')<|fim▁end|> | |
<|file_name|>centos5.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# centos5.py - A webKickstart module to handle changes needed from
# RHEL 5 to CentOS 5 Kickstart generation.
#
# Copyright 2007 NC State University
# Written by Jack Neely <[email protected]><|fim▁hole|># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from baseRealmLinuxKickstart import baseRealmLinuxKickstart
class Kickstart(baseRealmLinuxKickstart):
def __init__(self, url, cfg, sc=None):
baseRealmLinuxKickstart.__init__(self, url, cfg, sc)
self.buildOrder.remove(self.installationNumber)
self.buildOrder.remove(self.RHN)<|fim▁end|> | #
# SDG
# |
<|file_name|>simpleForLoop.py<|end_file_name|><|fim▁begin|>for x in undefined(): # ty<caret>pe: int<|fim▁hole|><|fim▁end|> | pass |
<|file_name|>service_worker_micro_benchmark.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page
from telemetry import story
class ServiceWorkerBenchmarkPage(page.Page):
"""Page for workload to measure some specific functions in JS"""
def RunNavigateSteps(self, action_runner):
super(ServiceWorkerBenchmarkPage, self).RunNavigateSteps(action_runner)
action_runner.WaitForJavaScriptCondition('window.done')
class ServiceWorkerMicroBenchmarkPageSet(story.StorySet):
"""Page set for micro benchmarking of each functions with ServiceWorker"""
def __init__(self):
super(ServiceWorkerMicroBenchmarkPageSet, self).__init__(
archive_data_file='data/service_worker_micro_benchmark.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
# pylint: disable=line-too-long
# The latest code of localhost:8091 is from:
# https://github.com/horo-t/Service-Worker-Performance/tree/fix-flakyness
# (rev: 0cc35c2398526665399ca99fe53147ff81101408)
# TODO(falken): House the code in GoogleChrome's GitHub repository.
# pylint: enable=C0301
# Why: to measure performance of many concurrent fetches
self.AddStory(ServiceWorkerBenchmarkPage(
'http://localhost:8091/index.html', self,<|fim▁hole|><|fim▁end|> | make_javascript_deterministic=False)) |
<|file_name|>base_function.py<|end_file_name|><|fim▁begin|>import numpy as np
def data_concat(result_a):
return np.concatenate(result_a, axis=0)
def data_mean(result_a):<|fim▁hole|> return result_a
def data_stack(result_a):
return np.stack(result_a)
def data_single(result_a):
return result_a[0]
def data_stack_mean(result_a):
return np.mean(data_stack(result_a), axis=0)<|fim▁end|> | return np.mean(result_a)
def data_identity(result_a): |
<|file_name|>FirstFragment.java<|end_file_name|><|fim▁begin|>/*
* The MIT License (MIT)
*<|fim▁hole|>*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.seamusdawkins.tablayout.fragments;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.seamusdawkins.tablayout.R;
public class FirstFragment extends Fragment {
TextView tv;
RelativeLayout rl;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_one, container, false);
tv = (TextView) rootView.findViewById(R.id.action);
tv.setText(R.string.str_first);
return rootView;
}
}<|fim▁end|> | * Copyright (c) 2015 Cleidimar Viana |
<|file_name|>go90.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
ExtractorError,
int_or_none,
parse_age_limit,
parse_iso8601,<|fim▁hole|>
class Go90IE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?go90\.com/videos/(?P<id>[0-9a-zA-Z]+)'
_TEST = {
'url': 'https://www.go90.com/videos/84BUqjLpf9D',
'md5': 'efa7670dbbbf21a7b07b360652b24a32',
'info_dict': {
'id': '84BUqjLpf9D',
'ext': 'mp4',
'title': 'Daily VICE - Inside The Utah Coalition Against Pornography Convention',
'description': 'VICE\'s Karley Sciortino meets with activists who discuss the state\'s strong anti-porn stance. Then, VICE Sports explains NFL contracts.',
'timestamp': 1491868800,
'upload_date': '20170411',
'age_limit': 14,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
video_data = self._download_json(
'https://www.go90.com/api/view/items/' + video_id,
video_id, headers={
'Content-Type': 'application/json; charset=utf-8',
}, data=b'{"client":"web","device_type":"pc"}')
if video_data.get('requires_drm'):
raise ExtractorError('This video is DRM protected.', expected=True)
main_video_asset = video_data['main_video_asset']
episode_number = int_or_none(video_data.get('episode_number'))
series = None
season = None
season_id = None
season_number = None
for metadata in video_data.get('__children', {}).get('Item', {}).values():
if metadata.get('type') == 'show':
series = metadata.get('title')
elif metadata.get('type') == 'season':
season = metadata.get('title')
season_id = metadata.get('id')
season_number = int_or_none(metadata.get('season_number'))
title = episode = video_data.get('title') or series
if series and series != title:
title = '%s - %s' % (series, title)
thumbnails = []
formats = []
subtitles = {}
for asset in video_data.get('assets'):
if asset.get('id') == main_video_asset:
for source in asset.get('sources', []):
source_location = source.get('location')
if not source_location:
continue
source_type = source.get('type')
if source_type == 'hls':
m3u8_formats = self._extract_m3u8_formats(
source_location, video_id, 'mp4',
'm3u8_native', m3u8_id='hls', fatal=False)
for f in m3u8_formats:
mobj = re.search(r'/hls-(\d+)-(\d+)K', f['url'])
if mobj:
height, tbr = mobj.groups()
height = int_or_none(height)
f.update({
'height': f.get('height') or height,
'width': f.get('width') or int_or_none(height / 9.0 * 16.0 if height else None),
'tbr': f.get('tbr') or int_or_none(tbr),
})
formats.extend(m3u8_formats)
elif source_type == 'dash':
formats.extend(self._extract_mpd_formats(
source_location, video_id, mpd_id='dash', fatal=False))
else:
formats.append({
'format_id': source.get('name'),
'url': source_location,
'width': int_or_none(source.get('width')),
'height': int_or_none(source.get('height')),
'tbr': int_or_none(source.get('bitrate')),
})
for caption in asset.get('caption_metadata', []):
caption_url = caption.get('source_url')
if not caption_url:
continue
subtitles.setdefault(caption.get('language', 'en'), []).append({
'url': caption_url,
'ext': determine_ext(caption_url, 'vtt'),
})
elif asset.get('type') == 'image':
asset_location = asset.get('location')
if not asset_location:
continue
thumbnails.append({
'url': asset_location,
'width': int_or_none(asset.get('width')),
'height': int_or_none(asset.get('height')),
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnails': thumbnails,
'description': video_data.get('short_description'),
'like_count': int_or_none(video_data.get('like_count')),
'timestamp': parse_iso8601(video_data.get('released_at')),
'series': series,
'episode': episode,
'season': season,
'season_id': season_id,
'season_number': season_number,
'episode_number': episode_number,
'subtitles': subtitles,
'age_limit': parse_age_limit(video_data.get('rating')),
}<|fim▁end|> | ) |
<|file_name|>retrieve.py<|end_file_name|><|fim▁begin|>"""Retrieves the paths to the required schedule files."""
from datetime import datetime
import logging
import pytz
from unipath import Path
# Setup Logging
LOG = logging.getLogger(__name__)
def get_date(tz_string):
"""Generates todays date as string (in format yyyy-mm-dd)"""
schedule_tz = pytz.timezone(tz_string)
today = datetime.now(schedule_tz)
return today.strftime('%Y-%m-%d')
def retrieve_schedule_file_paths(config):
"""Creates the path to the schedules from supplied config file."""
schedule_loc = config['excel']['schedule_loc']
date = get_date(config['timezone'])
# Assemble the details for the assistant schedule
file_name_a = '{0}_{1}.{2}'.format(
date, 'assistant', config['excel']['ext_a']
)
# Assemble the details for the pharmacist schedule
file_name_p = '{0}_{1}.{2}'.format(
date, 'pharmacist', config['excel']['ext_p']
)
<|fim▁hole|> # Assemble the details for the technician schedule
file_name_t = '{0}_{1}.{2}'.format(
date, 'technician', config['excel']['ext_t']
)
# Return the final details
return {
'a': Path(schedule_loc, file_name_a),
'p': Path(schedule_loc, file_name_p),
't': Path(schedule_loc, file_name_t),
}<|fim▁end|> | |
<|file_name|>holme_kim.go<|end_file_name|><|fim▁begin|>// Copyright ©2015 The gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gen
import (
"errors"
"fmt"
"math/rand"
"github.com/gonum/graph"
"github.com/gonum/graph/simple"
"github.com/gonum/stat/sampleuv"
)
// TunableClusteringScaleFree constructs a graph in the destination, dst, of order n.
// The graph is constructed successively starting from an m order graph with one node
// having degree m-1. At each iteration of graph addition, one node is added with m
// additional edges joining existing nodes with probability proportional to the nodes'
// degrees. The edges are formed as a triad with probability, p.
// If src is not nil it is used as the random source, otherwise rand.Float64 and
// rand.Intn are used.
//
// The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452.
func TunableClusteringScaleFree(dst graph.UndirectedBuilder, n, m int, p float64, src *rand.Rand) error {
if p < 0 || p > 1 {
return fmt.Errorf("gen: bad probability: p=%v", p)
}
if n <= m {<|fim▁hole|> }
var (
rnd func() float64
rndN func(int) int
)
if src == nil {
rnd = rand.Float64
rndN = rand.Intn
} else {
rnd = src.Float64
rndN = src.Intn
}
// Initial condition.
wt := make([]float64, n)
for u := 0; u < m; u++ {
if !dst.Has(simple.Node(u)) {
dst.AddNode(simple.Node(u))
}
// We need to give equal probability for
// adding the first generation of edges.
wt[u] = 1
}
ws := sampleuv.NewWeighted(wt, src)
for i := range wt {
// These weights will organically grow
// after the first growth iteration.
wt[i] = 0
}
// Growth.
for v := m; v < n; v++ {
var u int
pa:
for i := 0; i < m; i++ {
// Triad formation.
if i != 0 && rnd() < p {
for _, w := range permute(dst.From(simple.Node(u)), rndN) {
wid := w.ID()
if wid == v || dst.HasEdgeBetween(w, simple.Node(v)) {
continue
}
dst.SetEdge(simple.Edge{F: w, T: simple.Node(v), W: 1})
wt[wid]++
wt[v]++
continue pa
}
}
// Preferential attachment.
for {
var ok bool
u, ok = ws.Take()
if !ok {
return errors.New("gen: depleted distribution")
}
if u == v || dst.HasEdgeBetween(simple.Node(u), simple.Node(v)) {
continue
}
dst.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
wt[u]++
wt[v]++
break
}
}
ws.ReweightAll(wt)
}
return nil
}
func permute(n []graph.Node, rnd func(int) int) []graph.Node {
for i := range n[:len(n)-1] {
j := rnd(len(n)-i) + i
n[i], n[j] = n[j], n[i]
}
return n
}
// PreferentialAttachment constructs a graph in the destination, dst, of order n.
// The graph is constructed successively starting from an m order graph with one
// node having degree m-1. At each iteration of graph addition, one node is added
// with m additional edges joining existing nodes with probability proportional
// to the nodes' degrees. If src is not nil it is used as the random source,
// otherwise rand.Float64 is used.
//
// The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452
// after 10.1126/science.286.5439.509.
func PreferentialAttachment(dst graph.UndirectedBuilder, n, m int, src *rand.Rand) error {
if n <= m {
return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m)
}
// Initial condition.
wt := make([]float64, n)
for u := 0; u < m; u++ {
if !dst.Has(simple.Node(u)) {
dst.AddNode(simple.Node(u))
}
// We need to give equal probability for
// adding the first generation of edges.
wt[u] = 1
}
ws := sampleuv.NewWeighted(wt, src)
for i := range wt {
// These weights will organically grow
// after the first growth iteration.
wt[i] = 0
}
// Growth.
for v := m; v < n; v++ {
for i := 0; i < m; i++ {
// Preferential attachment.
u, ok := ws.Take()
if !ok {
return errors.New("gen: depleted distribution")
}
dst.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
wt[u]++
wt[v]++
}
ws.ReweightAll(wt)
}
return nil
}<|fim▁end|> | return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m) |
<|file_name|>return_codes.py<|end_file_name|><|fim▁begin|># return_codes.py
#
# Copyright (C) 2018 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU GPL v2
#
# Return codes of binaries used throughout this project.
class RC(object):
"""Return codes of binaries used throughout this project. See ``source``
for more details."""
SUCCESS = 0
<|fim▁hole|> CANNOT_CREATE_FLAG = 4 # read-only fs?
# kano-feedback-cli specific.
ERROR_SEND_DATA = 10
ERROR_COPY_ARCHIVE = 11
ERROR_CREATE_FLAG = 12<|fim▁end|> | INCORRECT_ARGS = 1
NO_INTERNET = 2
NO_KANO_WORLD_ACC = 3 |
<|file_name|>patrol_smach_iterator.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
""" patrol_smach_iterator.py - Version 1.0 2013-10-23
Control a robot using SMACH to patrol a square area a specified number of times
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2013 Patrick Goebel. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.5
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.htmlPoint
"""
import rospy
import smach
from smach import StateMachine, Iterator
from smach_ros import SimpleActionState, IntrospectionServer
import actionlib
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from geometry_msgs.msg import Pose, PoseWithCovarianceStamped, Point, Quaternion, Twist
from tf.transformations import quaternion_from_euler
from visualization_msgs.msg import Marker
from math import radians, pi
class main():
def __init__(self):
rospy.init_node('patrol_smach', anonymous=False)
# Set the shutdown function (stop the robot)
rospy.on_shutdown(self.shutdown)
# Initialize a number of parameters and variables
self.init()
# Subscribe to the move_base action server
self.move_base = actionlib.SimpleActionClient("move_base", MoveBaseAction)
rospy.loginfo("Waiting for move_base action server...")
# Wait up to 60 seconds for the action server to become available
self.move_base.wait_for_server(rospy.Duration(60))
rospy.loginfo("Connected to move_base action server")
# Track success rate of getting to the goal locations
self.n_succeeded = 0
self.n_aborted = 0
self.n_preempted = 0
self.n_patrols = 2
# Turn the waypoints into SMACH states
nav_states = list()
for waypoint in self.waypoints:
nav_goal = MoveBaseGoal()
nav_goal.target_pose.header.frame_id = 'map'
nav_goal.target_pose.pose = waypoint
move_base_state = SimpleActionState('move_base', MoveBaseAction, goal=nav_goal, result_cb=self.move_base_result_cb,
exec_timeout=rospy.Duration(10.0),
server_wait_timeout=rospy.Duration(10.0))
nav_states.append(move_base_state)
move_base_state = SimpleActionState('move_base', MoveBaseAction, goal=nav_goal, result_cb=self.move_base_result_cb,
exec_timeout=rospy.Duration(10.0))
<|fim▁hole|> self.sm = StateMachine(outcomes=['succeeded','aborted','preempted'])
with self.sm:
# Initialize the iterator
self.sm_patrol_iterator = Iterator(outcomes = ['succeeded','preempted','aborted'],
input_keys = [],
it = lambda: range(0, self.n_patrols),
output_keys = [],
it_label = 'index',
exhausted_outcome = 'succeeded')
with self.sm_patrol_iterator:
# Initialize the patrol state machine
self.sm_patrol = StateMachine(outcomes=['succeeded','aborted','preempted','continue'])
# Add the states to the state machine with the appropriate transitions
with self.sm_patrol:
StateMachine.add('NAV_STATE_0', nav_states[0], transitions={'succeeded':'NAV_STATE_1','aborted':'NAV_STATE_1','preempted':'NAV_STATE_1'})
StateMachine.add('NAV_STATE_1', nav_states[1], transitions={'succeeded':'NAV_STATE_2','aborted':'NAV_STATE_2','preempted':'NAV_STATE_2'})
StateMachine.add('NAV_STATE_2', nav_states[2], transitions={'succeeded':'NAV_STATE_3','aborted':'NAV_STATE_3','preempted':'NAV_STATE_3'})
StateMachine.add('NAV_STATE_3', nav_states[3], transitions={'succeeded':'NAV_STATE_4','aborted':'NAV_STATE_4','preempted':'NAV_STATE_4'})
StateMachine.add('NAV_STATE_4', nav_states[0], transitions={'succeeded':'continue','aborted':'continue','preempted':'continue'})
# Close the sm_patrol machine and add it to the iterator
Iterator.set_contained_state('PATROL_STATE', self.sm_patrol, loop_outcomes=['continue'])
# Close the top level state machine
StateMachine.add('PATROL_ITERATOR', self.sm_patrol_iterator, {'succeeded':'succeeded', 'aborted':'aborted'})
# Create and start the SMACH introspection server
intro_server = IntrospectionServer('patrol', self.sm, '/SM_ROOT')
intro_server.start()
# Execute the state machine
sm_outcome = self.sm.execute()
rospy.loginfo('State Machine Outcome: ' + str(sm_outcome))
intro_server.stop()
def move_base_result_cb(self, userdata, status, result):
if status == actionlib.GoalStatus.SUCCEEDED:
self.n_succeeded += 1
elif status == actionlib.GoalStatus.ABORTED:
self.n_aborted += 1
elif status == actionlib.GoalStatus.PREEMPTED:
self.n_preempted += 1
try:
rospy.loginfo("Success rate: " + str(100.0 * self.n_succeeded / (self.n_succeeded + self.n_aborted + self.n_preempted)))
except:
pass
def init(self):
# How big is the square we want the robot to patrol?
self.square_size = rospy.get_param("~square_size", 1.0) # meters
# How many times should we execute the patrol loop
self.n_patrols = rospy.get_param("~n_patrols", 3) # meters
# Create a list to hold the target quaternions (orientations)
quaternions = list()
# First define the corner orientations as Euler angles
euler_angles = (pi/2, pi, 3*pi/2, 0)
# Then convert the angles to quaternions
for angle in euler_angles:
q_angle = quaternion_from_euler(0, 0, angle, axes='sxyz')
q = Quaternion(*q_angle)
quaternions.append(q)
# Create a list to hold the waypoint poses
self.waypoints = list()
# Append each of the four waypoints to the list. Each waypoint
# is a pose consisting of a position and orientation in the map frame.
self.waypoints.append(Pose(Point(0.0, 0.0, 0.0), quaternions[3]))
self.waypoints.append(Pose(Point(self.square_size, 0.0, 0.0), quaternions[0]))
self.waypoints.append(Pose(Point(self.square_size, self.square_size, 0.0), quaternions[1]))
self.waypoints.append(Pose(Point(0.0, self.square_size, 0.0), quaternions[2]))
# Initialize the waypoint visualization markers for RViz
self.init_waypoint_markers()
# Set a visualization marker at each waypoint
for waypoint in self.waypoints:
p = Point()
p = waypoint.position
self.waypoint_markers.points.append(p)
# Publisher to manually control the robot (e.g. to stop it)
self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist)
rospy.loginfo("Starting SMACH test")
# Publish the waypoint markers
self.marker_pub.publish(self.waypoint_markers)
rospy.sleep(1)
self.marker_pub.publish(self.waypoint_markers)
def init_waypoint_markers(self):
# Set up our waypoint markers
marker_scale = 0.2
marker_lifetime = 0 # 0 is forever
marker_ns = 'waypoints'
marker_id = 0
marker_color = {'r': 1.0, 'g': 0.7, 'b': 1.0, 'a': 1.0}
# Define a marker publisher.
self.marker_pub = rospy.Publisher('waypoint_markers', Marker)
# Initialize the marker points list.
self.waypoint_markers = Marker()
self.waypoint_markers.ns = marker_ns
self.waypoint_markers.id = marker_id
self.waypoint_markers.type = Marker.CUBE_LIST
self.waypoint_markers.action = Marker.ADD
self.waypoint_markers.lifetime = rospy.Duration(marker_lifetime)
self.waypoint_markers.scale.x = marker_scale
self.waypoint_markers.scale.y = marker_scale
self.waypoint_markers.color.r = marker_color['r']
self.waypoint_markers.color.g = marker_color['g']
self.waypoint_markers.color.b = marker_color['b']
self.waypoint_markers.color.a = marker_color['a']
self.waypoint_markers.header.frame_id = 'odom'
self.waypoint_markers.header.stamp = rospy.Time.now()
self.waypoint_markers.points = list()
def shutdown(self):
rospy.loginfo("Stopping the robot...")
self.sm_patrol.request_preempt()
self.cmd_vel_pub.publish(Twist())
rospy.sleep(1)
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
rospy.loginfo("SMACH test finished.")<|fim▁end|> | # Initialize the top level state machine |
<|file_name|>training_dataset_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training routines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import sys
import numpy as np
import six
from tensorflow.python import keras
from tensorflow.python.data.experimental.ops import cardinality
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import ops
from tensorflow.python.keras import callbacks
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import metrics as metrics_module
from tensorflow.python.keras import testing_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
class BatchCounterCallback(callbacks.Callback):
def __init__(self):
self.batch_begin_count = 0
self.batch_end_count = 0
def on_batch_begin(self, *args, **kwargs):
self.batch_begin_count += 1
def on_batch_end(self, *args, **kwargs):
self.batch_end_count += 1
class TestTrainingWithDataset(keras_parameterized.TestCase):
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_calling_model_on_same_dataset(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae']
model.compile(
optimizer,
loss,
metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
# Call fit with validation data
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_training_and_eval_methods_on_dataset(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae', metrics_module.CategoricalAccuracy()]
model.compile(
optimizer,
loss,
metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat() # Infinite dataset.
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset, steps=2, verbose=1)
model.predict(dataset, steps=2)
# Test with validation data
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0,
validation_data=dataset, validation_steps=2)
# Test with validation split
with self.assertRaises(ValueError):
model.fit(dataset,
epochs=1, steps_per_epoch=2, verbose=0,
validation_split=0.5, validation_steps=2)
# Test with sample weight.
sample_weight = np.random.random((10,))
with self.assertRaisesRegexp(
ValueError, r'`sample_weight` argument is not supported .+dataset'):
model.fit(
dataset,
epochs=1,
steps_per_epoch=2,
verbose=0,
sample_weight=sample_weight)
with self.assertRaisesRegexp(
ValueError, '(you should not specify a target)|'
'(`y` argument is not supported when using dataset as input.)'):
model.fit(dataset, dataset,
epochs=1, steps_per_epoch=2, verbose=0)
# With an infinite dataset, `steps_per_epoch`/`steps` argument is required.
with self.assertRaises(ValueError):
model.fit(dataset, epochs=1, verbose=0)
with self.assertRaises(ValueError):
model.evaluate(dataset, verbose=0)
with self.assertRaises(ValueError):
model.predict(dataset, verbose=0)
@keras_parameterized.run_with_all_model_types(exclude_models='sequential')
@keras_parameterized.run_all_keras_modes
def test_training_and_eval_methods_on_multi_input_output_dataset(self):
input_a = keras.layers.Input(shape=(3,), name='input_1')
input_b = keras.layers.Input(shape=(3,), name='input_2')
dense = keras.layers.Dense(4, name='dense')
dropout = keras.layers.Dropout(0.5, name='dropout')
branch_a = [input_a, dense]
branch_b = [input_b, dense, dropout]
model = testing_utils.get_multi_io_model(branch_a, branch_b)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
input_a_np = np.random.random((10, 3)).astype(dtype=np.float32)
input_b_np = np.random.random((10, 3)).astype(dtype=np.float32)
output_d_np = np.random.random((10, 4)).astype(dtype=np.float32)
output_e_np = np.random.random((10, 4)).astype(dtype=np.float32)
# Test with tuples
dataset_tuple = dataset_ops.Dataset.from_tensor_slices((
(input_a_np, input_b_np), (output_d_np, output_e_np)))
dataset_tuple = dataset_tuple.repeat(100)
dataset_tuple = dataset_tuple.batch(10)
model.fit(dataset_tuple, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset_tuple, steps=2, verbose=1)
# Test with dict
input_dict = {'input_1': input_a_np, 'input_2': input_b_np}
if testing_utils.get_model_type() == 'subclass':
output_dict = {'output_1': output_d_np, 'output_2': output_e_np}
else:
output_dict = {'dense': output_d_np, 'dropout': output_e_np}
dataset_dict = dataset_ops.Dataset.from_tensor_slices((
input_dict, output_dict))
dataset_dict = dataset_dict.repeat(100)
dataset_dict = dataset_dict.batch(10)
model.fit(dataset_dict, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset_dict, steps=2, verbose=1)
predict_dataset_dict = dataset_ops.Dataset.from_tensor_slices(
input_dict)
predict_dataset_dict = predict_dataset_dict.repeat(100)
predict_dataset_dict = predict_dataset_dict.batch(10)
model.predict(predict_dataset_dict, steps=1)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_dataset_with_sample_weights(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
loss = 'mse'
metrics = ['mae', metrics_module.CategoricalAccuracy()]
model.compile(
optimizer,
loss,
metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), np.float32)
targets = np.zeros((10, 4), np.float32)
sample_weights = np.ones((10), np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets,
sample_weights))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
model.evaluate(dataset, steps=2, verbose=1)
model.predict(dataset, steps=2)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_dataset_with_sample_weights_correctness(self):
x = keras.layers.Input(shape=(1,), name='input')
y = keras.layers.Dense(
1, kernel_initializer='ones', bias_initializer='zeros', name='dense')(x)
model = keras.Model(x, y)
optimizer = 'rmsprop'
loss = 'mse'
model.compile(optimizer, loss)
inputs = np.array([[0], [1], [2], [3]], np.float32)
targets = np.array([[2], [4], [6], [8]], np.float32)
sample_weights = np.array([0.25, 0.5, 0.75, 1], np.float32)
ds = dataset_ops.Dataset.from_tensor_slices((inputs, targets,
sample_weights)).batch(2)
result = model.evaluate(ds, verbose=1)
# The per sample loss is multipled by the corresponding sample weight. The
# average of these weighted losses is the return value of the `evaluate`
# call. For example, in the test above the average weighted loss is
# calculated in the following manner:
# ((2-0)^2) * 0.25 + ((4-1)^2) * 0.5 + ((6-2)^2 * 0.75) + ((8-3)^2 * 1)
# equals 42.5 / 4 = 10.625
self.assertEqual(result, 10.625)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_dataset_with_sparse_labels(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = 'rmsprop'
model.compile(
optimizer,
loss='sparse_categorical_crossentropy',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=10, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))<|fim▁hole|>
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
@keras_parameterized.run_all_keras_modes
def test_dataset_fit_correctness(self):
class SumLayer(keras.layers.Layer):
def build(self, _):
self.w = self.add_weight('w', ())
def call(self, inputs):
return keras.backend.sum(inputs, axis=1, keepdims=True) + self.w * 0
model = keras.Sequential([SumLayer(input_shape=(2,))])
model.compile(
'rmsprop',
loss='mae',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((40, 2), dtype=np.float32)
inputs[10:20, :] = 2
inputs[20:30, :] = 1
inputs[30:, :] = 4
targets = np.zeros((40, 1), dtype=np.float32)
# Test correctness with `steps_per_epoch`.
train_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
val_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
history = model.fit(train_dataset,
epochs=2, steps_per_epoch=2, verbose=1,
validation_data=val_dataset, validation_steps=2)
self.assertAllClose(history.history['loss'],
[inputs[:20].sum() / 20, inputs[20:].sum() / 20])
# The validation dataset will be reset at the end of each validation run.
self.assertAllClose(history.history['val_loss'],
[inputs[:20].sum() / 20, inputs[:20].sum() / 20])
# Test correctness with dataset reset.
train_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
val_dataset = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).batch(10)
history = model.fit(train_dataset,
epochs=2, verbose=1, validation_data=val_dataset)
self.assertAllClose(
history.history['loss'],
[inputs.sum() / 40, inputs.sum() / 40])
self.assertAllClose(
history.history['val_loss'],
[inputs.sum() / 40, inputs.sum() / 40])
def test_dataset_input_shape_validation(self):
with ops.get_default_graph().as_default(), self.cached_session():
model = testing_utils.get_small_functional_mlp(1, 4, input_dim=3)
model.compile(optimizer='rmsprop', loss='mse')
# User forgets to batch the dataset
inputs = np.zeros((10, 3))
targets = np.zeros((10, 4))
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
with self.assertRaisesRegexp(
ValueError,
r'expected (.*?) to have shape \(3,\) but got array with shape \(1,\)'
):
model.train_on_batch(dataset)
# Wrong input shape
inputs = np.zeros((10, 5))
targets = np.zeros((10, 4))
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
with self.assertRaisesRegexp(ValueError,
r'expected (.*?) to have shape \(3,\)'):
model.train_on_batch(dataset)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_finite_dataset_known_cardinality_no_steps_arg(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.batch(10)
batch_counter = BatchCounterCallback()
history = model.fit(dataset, epochs=2, verbose=1, callbacks=[batch_counter])
self.assertLen(history.history['loss'], 2)
self.assertEqual(batch_counter.batch_end_count, 20)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_finite_dataset_unknown_cardinality_no_steps_arg(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.filter(lambda x, y: True).batch(10)
self.assertEqual(keras.backend.get_value(cardinality.cardinality(dataset)),
cardinality.UNKNOWN)
batch_counter = BatchCounterCallback()
history = model.fit(dataset, epochs=2, verbose=1, callbacks=[batch_counter])
self.assertLen(history.history['loss'], 2)
self.assertEqual(batch_counter.batch_end_count, 20)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_finite_dataset_unknown_cardinality_no_step_with_train_and_val(self):
class CaptureStdout(object):
def __enter__(self):
self._stdout = sys.stdout
string_io = six.StringIO()
sys.stdout = string_io
self._stringio = string_io
return self
def __exit__(self, *args):
self.output = self._stringio.getvalue()
sys.stdout = self._stdout
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.filter(lambda x, y: True).batch(10)
self.assertEqual(
keras.backend.get_value(cardinality.cardinality(dataset)),
cardinality.UNKNOWN)
batch_counter = BatchCounterCallback()
with CaptureStdout() as capture:
history = model.fit(
dataset,
epochs=2,
callbacks=[batch_counter],
validation_data=dataset.take(3))
lines = capture.output.splitlines()
self.assertIn('10/10', lines[-1])
self.assertLen(history.history['loss'], 2)
self.assertEqual(batch_counter.batch_begin_count, 21)
self.assertEqual(batch_counter.batch_end_count, 20)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_finite_dataset_unknown_cardinality_out_of_data(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 3), dtype=np.float32)
targets = np.random.randint(0, 4, size=100, dtype=np.int32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.filter(lambda x, y: True).batch(10)
self.assertEqual(
keras.backend.get_value(cardinality.cardinality(dataset)),
cardinality.UNKNOWN)
batch_counter = BatchCounterCallback()
with test.mock.patch.object(logging, 'warning') as mock_log:
# steps_per_epoch (200) is greater than the dataset size (100). As this is
# unexpected, training will stop and not make it to the second epoch.
history = model.fit(
dataset,
epochs=2,
verbose=1,
callbacks=[batch_counter],
steps_per_epoch=200)
self.assertIn(
'ran out of data; interrupting training.', str(mock_log.call_args))
self.assertIn(
'can generate at least '
'`steps_per_epoch * epochs` batches (in this case, 400 batches). '
'You may need to use the repeat() function when '
'building your dataset.', str(mock_log.call_args))
self.assertLen(history.history['loss'], 1)
self.assertEqual(batch_counter.batch_end_count, 10)
model.evaluate(dataset)
out = model.predict(dataset)
self.assertEqual(out.shape[0], 100)
@keras_parameterized.run_all_keras_modes
def test_with_external_loss(self):
inp = keras.Input(shape=(4,), name='inp1')
out = keras.layers.Dense(2)(inp)
model = keras.Model(inp, out)
model.add_loss(math_ops.reduce_mean(out))
model.compile('rmsprop')
x = np.ones((10, 4))
# dataset contains only features, no labels.
dataset = dataset_ops.Dataset.from_tensor_slices(x).repeat(10).batch(10)
model.fit(dataset)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_train_eval_with_steps(self):
# See b/142880049 for more details.
inp = keras.Input(shape=(4,), name='inp1')
out = keras.layers.Dense(2)(inp)
model = keras.Model(inp, out)
model.compile(
'rmsprop', loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((100, 4), dtype=np.float32)
targets = np.random.randint(0, 2, size=100, dtype=np.int32)
training_ds = dataset_ops.Dataset.from_tensor_slices(
(inputs, targets)).repeat().batch(10)
# Create eval dataset with generator, so that dataset won't contain the
# overall size metadata. Without eval_steps, we expect to run through all
# the data in this dataset every epoch.
def gen():
for _ in range(100):
yield (np.zeros(4, dtype=np.float32),
np.random.randint(0, 2, size=1, dtype=np.int32))
eval_ds = dataset_ops.Dataset.from_generator(
generator=gen,
output_types=('float64', 'int32'),
output_shapes=([4], [1])).batch(100)
batch_counter = BatchCounterCallback()
model.fit(
training_ds,
steps_per_epoch=10,
epochs=10,
validation_data=eval_ds,
callbacks=[batch_counter]
)
# Expect 10 batch from training per epoch.
self.assertEqual(batch_counter.batch_end_count, 100)
class TestMetricsWithDatasets(keras_parameterized.TestCase):
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
def test_metrics_correctness_with_dataset(self):
layers = [
keras.layers.Dense(8, activation='relu', input_dim=4,
kernel_initializer='ones'),
keras.layers.Dense(1, activation='sigmoid', kernel_initializer='ones')
]
model = testing_utils.get_model_from_layers(layers, (4,))
model.compile(
loss='binary_crossentropy',
metrics=['accuracy', metrics_module.BinaryAccuracy()],
optimizer='rmsprop',
run_eagerly=testing_utils.should_run_eagerly())
np.random.seed(123)
x = np.random.randint(10, size=(100, 4)).astype(np.float32)
y = np.random.randint(2, size=(100, 1)).astype(np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.batch(10)
outs = model.evaluate(dataset, steps=10)
self.assertEqual(np.around(outs[1], decimals=1), 0.5)
self.assertEqual(np.around(outs[2], decimals=1), 0.5)
y = np.zeros((100, 1), dtype=np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
outs = model.evaluate(dataset, steps=10)
self.assertEqual(outs[1], 0.)
self.assertEqual(outs[2], 0.)
if __name__ == '__main__':
test.main()<|fim▁end|> | dataset = dataset.repeat(100)
dataset = dataset.batch(10) |
<|file_name|>ResponsiveFontSizesChart.js<|end_file_name|><|fim▁begin|>import * as React from 'react';
// import of a small, pure module in a private demo
// bundle size and module duplication is negligible
/* eslint-disable-next-line no-restricted-imports */
import { convertLength } from '@mui/material/styles/cssUtils';
import { createTheme, responsiveFontSizes } from '@mui/material/styles';
import Box from '@mui/material/Box';
import {
Legend,
Tooltip,
LineChart,
Line,
XAxis,
YAxis,
Label,
ResponsiveContainer,
} from 'recharts';
let theme = createTheme();
theme = responsiveFontSizes(theme);
const colors = [
'#443dc2',
'#2060df',
'#277e91',
'#378153',
'#4d811d',
'#63780d',
'#996600',
];
const variants = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'subtitle1'];
export default function ResponsiveFontSizes() {
const convert = convertLength(theme.typography.htmlFontSize);
const toPx = (rem) => parseFloat(convert(rem, 'px'));
const series = variants.map((variantName) => {
const variant = theme.typography[variantName];
const data = [];
data.push({
viewport: 0,
fontSize: toPx(variant.fontSize),
});
theme.breakpoints.keys.forEach((key) => {
const viewport = theme.breakpoints.values[key];
const value = theme.breakpoints.up(key);
if (variant[value]) {
data.push({
viewport: viewport - 1,
fontSize: data[data.length - 1].fontSize,
});
data.push({
viewport,
fontSize: toPx(variant[value].fontSize),
});
} else if (key === 'xl') {
data.push({
viewport,
fontSize: data[data.length - 1].fontSize,
});
}
});
return {
name: variantName,
data,
};
});
return (<|fim▁hole|> top: 50,
right: 140,
bottom: 0,
left: 30,
}}
>
<XAxis dataKey="viewport" type="number">
<Label position="right" offset={30}>
viewport (px)
</Label>
</XAxis>
<YAxis dataKey="fontSize" type="number">
<Label position="top" offset={20}>
font-size (rem)
</Label>
</YAxis>
<Tooltip />
<Legend />
{series.map((serie, index) => (
<Line
dataKey="fontSize"
stroke={colors[index % colors.length]}
data={serie.data}
name={serie.name}
key={serie.name}
/>
))}
</LineChart>
</ResponsiveContainer>
</Box>
);
}<|fim▁end|> | <Box sx={{ height: 380, width: '100%', color: 'black' }}>
<ResponsiveContainer>
<LineChart
margin={{ |
<|file_name|>geocode.js<|end_file_name|><|fim▁begin|>var config = require('./config');
var express = require('express');
var superagent = require('superagent');
/**
* Auth Token
*/
var authToken = null;
var expires = 0;
var expires_in = 20160; // 14 days in minutes
/**
* Urls
*/
var OAUTH = 'https://www.arcgis.com/sharing/oauth2/token';
var GEOCODE =
'http://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer';
/**
* ESRI Query Parameter Defaults
*/
var CATEGORY = 'Address';
var CENTER = config.geocode.center;
var DISTANCE = 160 * 1000; // meters
/**
* Expose `router`
*/
var router = module.exports = express.Router();
/**
* Expose `encode` & `reverse`
*/
module.exports.encode = encode;
module.exports.reverse = reverse;
module.exports.suggest = suggest;
/**
* Geocode
*/
router.get('/:address', function(req, res) {
encode(req.params.address, function(err, addresses) {
if (err) {
console.error(err);
res.status(400).send(err);
} else {
var ll = addresses[0].feature.geometry;
res.status(200).send({
lng: ll.x,
lat: ll.y
});
}
});
});
/**
* Reverse
*/
router.get('/reverse/:coordinate', function(req, res) {
reverse(req.params.coordinate, function(err, address) {
if (err) {
console.error(err);
res.status(400).send(err);
} else {
res.status(200).send(address);
}
});
});
/**
* Suggest
*/
router.get('/suggest/:text', function(req, res) {
suggest(req.params.text, function(err, suggestions) {
if (err) {
console.error(err);
res.status(400).send(err);
} else {
res.status(200).send(suggestions);
}
});
});
/**
* Geocode
*/
function encode(address, callback) {
var text = '';
if (address.address) {
text = address.address + ', ' + address.city + ', ' + address.state + ' ' +
address.zip;
} else {
text = address;
}
auth(callback, function(token) {
superagent
.get(GEOCODE + '/find')
.query({
category: CATEGORY,
f: 'json',
text: text,
token: token
})
.end(function(err, res) {
if (err) {
callback(err, res);
} else {
var body = parseResponse(res, callback);
if (!body || !body.locations || body.locations.length === 0) {
callback(new Error('Location not found.'));
} else {
callback(null, body.locations);
}
}
});
});
}
/**
* Reverse geocode
*/
function reverse(ll, callback) {
var location = ll;
if (ll.lng) {
location = ll.lng + ',' + ll.lat;
} else if (ll.x) {
location = ll.x + ',' + ll.y;
} else if (ll[0]) {
location = ll[0] + ',' + ll[1];
}
auth(callback, function(token) {
superagent
.get(GEOCODE + '/reverseGeocode')
.query({
f: 'json',
location: location,
token: token
})
.end(function(err, res) {
if (err) {
callback(err, res);
} else {
var body = parseResponse(res, callback);
if (!body || !body.address) {
callback(new Error('Location not found.'));
} else {
var addr = body.address;
callback(null, {
address: addr.Address,<|fim▁hole|> city: addr.City,
county: addr.Subregion,
state: addr.Region,
zip: parseInt(addr.Postal, 10),
country: addr.CountryCode
});
}
}
});
});
}
/**
* Auto suggest
*/
function suggest(text, callback) {
auth(callback, function(token) {
superagent
.get(GEOCODE + '/suggest')
.query({
category: CATEGORY,
distance: DISTANCE,
f: 'json',
location: CENTER,
text: text,
token: token
})
.end(function(err, res) {
if (err) {
callback(err, res);
} else {
var body = parseResponse(res, callback);
callback(null, body.suggestions);
}
});
});
}
/**
* Auth?
*/
function auth(callback, next) {
generateAuthToken(function(err, token) {
if (err) {
callback(err);
} else {
next(token);
}
});
}
/**
* Parse
*/
function parseResponse(res, callback) {
try {
return JSON.parse(res.text);
} catch (e) {
callback(e);
}
}
/**
* Generate an auth token
*/
function generateAuthToken(callback) {
// If we're within 7 days of auth token expiration, generate a new one
if ((expires - expires_in / 2) < Date.now().valueOf()) {
superagent
.get(OAUTH)
.query({
client_id: config.arcgis_id,
client_secret: config.arcgis_secret,
expiration: expires_in,
grant_type: 'client_credentials'
})
.end(function(err, res) {
if (err || res.error || !res.ok) {
callback(err || res.error || res.text);
} else {
authToken = res.body.access_token;
// Set the expires time
expires = new Date();
expires.setSeconds(expires.getSeconds() + res.body.expires_in);
expires = expires.valueOf();
callback(null, authToken);
}
});
} else {
callback(null, authToken);
}
}<|fim▁end|> | neighborhood: addr.Neighborhood, |
<|file_name|>enhancers.js<|end_file_name|><|fim▁begin|>import { applyMiddleware, compose } from 'redux';
import history from './history';
import { routerMiddleware } from 'react-router-redux';
import thunk from 'redux-thunk';
export default compose(
applyMiddleware(thunk, routerMiddleware(history))<|fim▁hole|><|fim▁end|> | ); |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, url<|fim▁hole|>from .views import FeedList, ImportView, AddView
from .ajax import mark_as_read
urlpatterns = patterns(
'',
url(r'^$', FeedList.as_view(), name="feedme-feed-list"),
url(r'^by_category/(?P<category>[-\w]+)/$', FeedList.as_view(),
name='feedme-feed-list-by-category'),
url(r'^by_feed/(?P<feed_id>[-\w]+)/$', FeedList.as_view(),
name='feedme-feed-list-by-feed'),
url(r'^import/$', ImportView.as_view(),
name='feedme-import-google-takeout'),
url(r'^ajax/mark_as_read/$', mark_as_read,
name='feedme-mark-as-read-ajax'),
url(r'^ajax/add/$', AddView.as_view(), name='feedme-add-ajax'),
)<|fim▁end|> | |
<|file_name|>WebChangeCustomerPassword.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package elektra;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.io.BufferedOutputStream;
import java.io.FileWriter;
import java.util.ResourceBundle;
import java.util.Properties;
import java.util.Date;
import java.net.*;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletContext;
import javax.servlet.ServletConfig;
import javax.servlet.http.HttpSession;
import javax.servlet.RequestDispatcher;
import javax.mail.*;
import javax.activation.*;
import javax.mail.internet.*;
/**
*
* @author Rolfs
*/
public class WebChangeCustomerPassword extends HttpServlet
{
protected String szElektraConfig = "";
protected String szElektraServer = "";
protected String szElektraServerPort = "";
protected String szRequest = "27"; // Kundenpaßwort ändern
protected String szVersion = "270";
protected Integer nDebug;
protected String strJSP = "";
protected String strJSPError = "";
protected String strJSPReload = "";
protected ServletContext context;
ResourceBundle resource = ResourceBundle.getBundle("elektra.Resource");
/** Adresse/Name des Mailservers, definiert in elektra.properties */
protected String szMailServer = "";
/** gewuenschte From-Adresse, definiert in elektra.properties */
protected String szMailFrom = "";
/** Liste der CC-Adressen, definiert in elektra.properties */
protected String szMailCC = "";
/** Liste der BCC-Adressen, definiert in elektra.properties */
protected String szMailBCC = "";
/** Liste der Reply-Adressen, definiert in elektra.properties */
protected String szMailReply = "";
/** Benutzer auf Mailserver: optional, definiert in elektra.properties */
protected String szMailUserName = "";
/** Passwort auf Mailserver: optional, definiert in elektra.properties */
protected String szMailUserPassword = "";
public void init(ServletConfig config) throws ServletException
{
super.init(config);
// Remember context
context = config.getServletContext();
try { szElektraConfig = context.getInitParameter("ElektraConfig"); } catch (Exception e) { e.printStackTrace(); }
if (null == szElektraConfig)
{
throw new ServletException("Servlet configuration property \"ElektraConfig\" is not defined!");
}
String szDebug = "";
try { szDebug = context.getInitParameter("Debug"); } catch (Exception e) { e.printStackTrace(); }
if (null == szDebug)
nDebug = new Integer(0);
else
nDebug = new Integer(szDebug);
// Properties laden
Properties props = new Properties();
try
{
props.load(new FileInputStream(szElektraConfig));
strJSP = props.getProperty("ChangeCustomerPasswordPage");
if (null == strJSP)
strJSP = "/jsp/webchangecustomerpassword.jsp";
strJSPError = props.getProperty("ChangeCustomerPasswordErrorPage");
if (null == strJSPError)
strJSPError = "/jsp/webchangecustomerpassworderr.jsp";
strJSPReload = props.getProperty("ChangeCustomerPasswordReloadPage");
if (null == strJSPReload)
strJSPReload = "/jsp/webchangecustomerpasswordreload.jsp";
}
catch (IOException ex)
{
ex.printStackTrace();
}
// Remember context
context = config.getServletContext();
}
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
Date d = new Date(); // Getting current date
ChangeCustomerPasswordInData changecustomerpasswordData = new ChangeCustomerPasswordInData();
boolean bHeaderPrinted = false;
int nCnt = 1;
WriteDebug wd = new WriteDebug();
String szTemp = "";
HttpSession session = request.getSession();
session.getLastAccessedTime();
//
try { szTemp = request.getParameter(resource.getString("SysID")); } catch (Exception e) { wd.write(response, "Feld "+resource.getString("SysID")+" fehlt", e, nDebug); }
changecustomerpasswordData.setSysID(szTemp);
//
try { szTemp = request.getParameter(resource.getString("ClientID")); } catch (Exception e) { wd.write(response, "Feld "+resource.getString("ClientID")+" fehlt", e, nDebug); }
changecustomerpasswordData.setClientID(szTemp);
//
try { szTemp = request.getParameter("CustomerNumber"); } catch (Exception e) { wd.write(response, "Feld CustomerNumber fehlt", e, nDebug); }
if (null == szTemp)
changecustomerpasswordData.setCustomerNumber("0");
else
changecustomerpasswordData.setCustomerNumber(szTemp);
try { szTemp = request.getParameter("Password"); } catch (Exception e) { wd.write(response, "Feld Password fehlt", e, nDebug); }
changecustomerpasswordData.setCustomerPassword(szTemp);
try { szTemp = request.getParameter("NewPassword"); } catch (Exception e) { wd.write(response, "Feld NewPassword fehlt", e, nDebug); }
changecustomerpasswordData.setNewCustomerPassword(szTemp);
try { szTemp = request.getParameter("EMail"); } catch (Exception e) { wd.write(response, "Feld EMail fehlt", e, nDebug); }
changecustomerpasswordData.setEMail(szTemp);
changecustomerpasswordData.setSession(session.getId());
// Properties laden
Configuration Conf = new Configuration(context, szElektraConfig, "ElektraServerChangeCustomerPassword", changecustomerpasswordData.getSysID(), changecustomerpasswordData.getClientID(), nDebug.intValue());
szElektraServer = Conf.getServerName();
szElektraServerPort = Conf.getServerPort();
szMailServer = Conf.getMailServer();
szMailFrom = Conf.getMailFrom();
szMailCC = Conf.getMailCC();
szMailBCC = Conf.getMailBCC();
szMailReply = Conf.getMailReply();
if (null == szElektraServer)
{
wd.write(response, "Internal error!<br />System error: Elektra-Server not defined<br />", nDebug);
context.log("Elektra-Server not defined!");
}
else
{
// Daten an DB-Server senden
Socket socket = null;
ChangeCustomerPasswordOutData changecustomerpasswordOut = new ChangeCustomerPasswordOutData();
if (nDebug.intValue() > 0)
context.log(" starting ChangeCustomerpassword.");
try
{
socket = new Socket(szElektraServer, Integer.parseInt(szElektraServerPort));
if (nDebug.intValue() > 1)
context.log(" socket created.");
//BufferedInputStream is = new BufferedInputStream(socket.getInputStream());
BufferedReader in = new BufferedReader( new InputStreamReader(socket.getInputStream()));
BufferedOutputStream os = new BufferedOutputStream(socket.getOutputStream());
// Daten senden
try { os.write(szVersion.getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(szRequest.getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
// Encryption
try { os.write("NONE".getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
// compression
try { os.write("NONE".getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(changecustomerpasswordData.getSysID().getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(changecustomerpasswordData.getClientID().getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(changecustomerpasswordData.getCustomerNumber().getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(changecustomerpasswordData.getCustomerPassword().getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(changecustomerpasswordData.getSession().getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(changecustomerpasswordData.getNewCustomerPassword().getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
try { os.write(changecustomerpasswordData.getEMail().getBytes() ); } catch (IOException e) { e.printStackTrace(); }
os.write( 9 );
if (nDebug.intValue() > 1)
context.log(" flushing: "+os.toString());
os.flush();
// Daten senden, nun auf die Buchung warten
changecustomerpasswordOut.readSock(in, szVersion);
if (nDebug.intValue() > 4)
{
wd.write(response, "<br><h2>Getting:</h2>Error: "+changecustomerpasswordOut.getErrorCode(), nDebug );
}
if (nDebug.intValue() > 0)
context.log(" changecustomerpasswordData performed.");
request.setAttribute("changecustomerpasswordData", changecustomerpasswordData);
request.setAttribute("changecustomerpasswordOut", changecustomerpasswordOut);
// Im Fehlerfall
if (0 != changecustomerpasswordOut.getErrorCode())
{
context.log("Systemerror: "+changecustomerpasswordOut.getErrorCode());
// hole den Request Dispatcher fuer die JSP Datei
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(strJSPError);
//leite auf die JSP Datei zum Anzeigen der Liste weiter
dispatcher.forward(request, response);
}
else
{
// hole den Request Dispatcher fuer die JSP Datei
// Alles OK Password per Mail
if (null != szMailServer)
{
String szMailText = "";
String szKopfText = "";
szKopfText = szKopfText.concat("Ihr neues Password");
/*
szMailText = szMailText.concat("Seher geehrte(r) ");
szMailText = szMailText.concat(changecustomerpasswordData.getSalution());
szMailText = szMailText.concat(" ");
szMailText = szMailText.concat(changecustomerpasswordOut.getName1());
szMailText = szMailText.concat(" ");
szMailText = szMailText.concat(changecustomerpasswordOut.getName2());
szMailText = szMailText.concat(" ");
szMailText = szMailText.concat(changecustomerpasswordOut.getName3());
szMailText = szMailText.concat(",\r\n\r\n");
szMailText = szMailText.concat("anbei das geaenderte Password für Ihren Kundenzugriff auf den Server von Frankfurt Ticket.\r\n\r\n");
szMailText = szMailText.concat("Ihre Kundennummer : ");
szMailText = szMailText.concat(changecustomerpasswordOut.getCustId());
szMailText = szMailText.concat("\r\n");
szMailText = szMailText.concat("Ihr geaendertes Password: ");
szMailText = szMailText.concat(changecustomerpasswordOut.getNewPassword());
szMailText = szMailText.concat("\r\n");
Mail m = new Mail(szMailServer, szMailFrom , changecustomerpasswordOut.getEMail(), szMailBCC, szMailCC, szMailReply, szKopfText, szMailText);
MailSender.getInstance().sendMail(m);
*/
}
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(strJSP);
//leite auf die JSP Datei zum Anzeigen der Liste weiter
dispatcher.forward(request, response);
}
}
catch (IOException ioex)
{
changecustomerpasswordOut.setErrorCode( -999);
changecustomerpasswordOut.setErrorMessage(ioex.getLocalizedMessage());
if ( (null != strJSPError) && !strJSPError.equals(""))
{
request.setAttribute("changecustomerpasswordData", changecustomerpasswordData);
request.setAttribute("changecustomerpasswordOut", changecustomerpasswordOut);
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(strJSPError);<|fim▁hole|> }
}
catch (Exception ex)
{
ex.printStackTrace();
}
finally
{
if (socket != null)
{
try
{
socket.close();
}
catch (IOException ioex)
{
ioex.printStackTrace();
}
}
}
}
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP <code>GET</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP <code>POST</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}<|fim▁end|> | //leite auf die JSP Datei zum Anzeigen der Liste weiter
dispatcher.forward(request, response); |
<|file_name|>test_artifact.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------
# Copyright (c) 2016-2017, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import collections
import os
import tempfile
import unittest
import uuid
import qiime2.core.type
from qiime2.sdk import Artifact
from qiime2.sdk.result import ResultMetadata
import qiime2.core.archive as archive
from qiime2.core.testing.type import IntSequence1, FourInts, Mapping
from qiime2.core.testing.util import get_dummy_plugin, ArchiveTestingMixin
class TestArtifact(unittest.TestCase, ArchiveTestingMixin):
def setUp(self):
# Ignore the returned dummy plugin object, just run this to verify the
# plugin exists as the tests rely on it being loaded.
get_dummy_plugin()
# TODO standardize temporary directories created by QIIME 2
self.test_dir = tempfile.TemporaryDirectory(prefix='qiime2-test-temp-')
self.provenance_capture = archive.ImportProvenanceCapture()
def tearDown(self):
self.test_dir.cleanup()
def test_private_constructor(self):
with self.assertRaisesRegex(
NotImplementedError,
'Artifact constructor.*private.*Artifact.load'):
Artifact()
# Note on testing strategy below: many of the tests for `_from_view` and
# `load` are similar, with the exception that when `load`ing, the
# artifact's UUID is known so more specific assertions can be performed.
# While these tests appear somewhat redundant, they are important because
# they exercise the same operations on Artifact objects constructed from
# different sources, whose codepaths have very different internal behavior.
# This internal behavior could be tested explicitly but it is safer to test
# the public API behavior (e.g. as a user would interact with the object)
# in case the internals change.
def test_from_view(self):
artifact = Artifact._from_view(FourInts, [-1, 42, 0, 43], list,
self.provenance_capture)
self.assertEqual(artifact.type, FourInts)
# We don't know what the UUID is because it's generated within
# Artifact._from_view.
self.assertIsInstance(artifact.uuid, uuid.UUID)
self.assertEqual(artifact.view(list), [-1, 42, 0, 43])
# Can produce same view if called again.
self.assertEqual(artifact.view(list), [-1, 42, 0, 43])
def test_from_view_different_type_with_multiple_view_types(self):
artifact = Artifact._from_view(IntSequence1, [42, 42, 43, -999, 42],
list, self.provenance_capture)
self.assertEqual(artifact.type, IntSequence1)
self.assertIsInstance(artifact.uuid, uuid.UUID)
self.assertEqual(artifact.view(list),
[42, 42, 43, -999, 42])
self.assertEqual(artifact.view(list),
[42, 42, 43, -999, 42])
self.assertEqual(artifact.view(collections.Counter),
collections.Counter({42: 3, 43: 1, -999: 1}))
self.assertEqual(artifact.view(collections.Counter),
collections.Counter({42: 3, 43: 1, -999: 1}))
def test_from_view_and_save(self):
fp = os.path.join(self.test_dir.name, 'artifact.qza')
# Using four-ints data layout because it has multiple files, some of
# which are in a nested directory.
artifact = Artifact._from_view(FourInts, [-1, 42, 0, 43], list,
self.provenance_capture)
artifact.save(fp)
root_dir = str(artifact.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/file1.txt',
'data/file2.txt',
'data/nested/file3.txt',
'data/nested/file4.txt',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertArchiveMembers(fp, root_dir, expected)
def test_load(self):
saved_artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
fp = os.path.join(self.test_dir.name, 'artifact.qza')
saved_artifact.save(fp)
artifact = Artifact.load(fp)
self.assertEqual(artifact.type, FourInts)
self.assertEqual(artifact.uuid, saved_artifact.uuid)
self.assertEqual(artifact.view(list), [-1, 42, 0, 43])
self.assertEqual(artifact.view(list), [-1, 42, 0, 43])
def test_load_different_type_with_multiple_view_types(self):
saved_artifact = Artifact.import_data(IntSequence1,
[42, 42, 43, -999, 42])
fp = os.path.join(self.test_dir.name, 'artifact.qza')
saved_artifact.save(fp)
artifact = Artifact.load(fp)
self.assertEqual(artifact.type, IntSequence1)
self.assertEqual(artifact.uuid, saved_artifact.uuid)
self.assertEqual(artifact.view(list),
[42, 42, 43, -999, 42])
self.assertEqual(artifact.view(list),
[42, 42, 43, -999, 42])
self.assertEqual(artifact.view(collections.Counter),
collections.Counter({42: 3, 43: 1, -999: 1}))
self.assertEqual(artifact.view(collections.Counter),
collections.Counter({42: 3, 43: 1, -999: 1}))
def test_load_and_save(self):
fp1 = os.path.join(self.test_dir.name, 'artifact1.qza')
fp2 = os.path.join(self.test_dir.name, 'artifact2.qza')
artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
artifact.save(fp1)
artifact = Artifact.load(fp1)
# Overwriting its source file works.
artifact.save(fp1)
# Saving to a new file works.
artifact.save(fp2)
root_dir = str(artifact.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/file1.txt',
'data/file2.txt',
'data/nested/file3.txt',
'data/nested/file4.txt',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertArchiveMembers(fp1, root_dir, expected)
root_dir = str(artifact.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/file1.txt',
'data/file2.txt',
'data/nested/file3.txt',
'data/nested/file4.txt',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertArchiveMembers(fp2, root_dir, expected)
def test_roundtrip(self):
fp1 = os.path.join(self.test_dir.name, 'artifact1.qza')
fp2 = os.path.join(self.test_dir.name, 'artifact2.qza')
artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
artifact.save(fp1)
artifact1 = Artifact.load(fp1)
artifact1.save(fp2)
artifact2 = Artifact.load(fp2)
self.assertEqual(artifact1.type, artifact2.type)
self.assertEqual(artifact1.format, artifact2.format)
self.assertEqual(artifact1.uuid, artifact2.uuid)
self.assertEqual(artifact1.view(list),
artifact2.view(list))
# double view to make sure multiple views can be taken
self.assertEqual(artifact1.view(list),
artifact2.view(list))
def test_load_with_archive_filepath_modified(self):
# Save an artifact for use in the following test case.
fp = os.path.join(self.test_dir.name, 'artifact.qza')
Artifact.import_data(FourInts, [-1, 42, 0, 43]).save(fp)
# Load the artifact from a filepath then save a different artifact to
# the same filepath. Assert that both artifacts produce the correct
# views of their data.
#
# `load` used to be lazy, only extracting data when it needed to (e.g.
# when `save` or `view` was called). This was buggy as the filepath
# could have been deleted, or worse, modified to contain a different
# .qza file. Thus, the wrong archive could be extracted on demand, or
# the archive could be missing altogether. There isn't an easy
# cross-platform compatible way to solve this problem, so Artifact.load
# is no longer lazy and always extracts its data immediately. The real
# motivation for lazy loading was for quick inspection of archives
# without extracting/copying data, so that API is now provided through
# Artifact.peek.
artifact1 = Artifact.load(fp)
Artifact.import_data(FourInts, [10, 11, 12, 13]).save(fp)
artifact2 = Artifact.load(fp)
self.assertEqual(artifact1.view(list), [-1, 42, 0, 43])
self.assertEqual(artifact2.view(list), [10, 11, 12, 13])
def test_extract(self):
fp = os.path.join(self.test_dir.name, 'artifact.qza')
artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
artifact.save(fp)
root_dir = str(artifact.uuid)
output_dir = os.path.join(self.test_dir.name, 'artifact-extract-test')
result_dir = Artifact.extract(fp, output_dir=output_dir)
self.assertEqual(result_dir, os.path.join(output_dir, root_dir))
expected = {
'VERSION',
'metadata.yaml',
'data/file1.txt',
'data/file2.txt',
'data/nested/file3.txt',
'data/nested/file4.txt',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertExtractedArchiveMembers(output_dir, root_dir, expected)
def test_peek(self):
artifact = Artifact.import_data(FourInts, [0, 0, 42, 1000])
fp = os.path.join(self.test_dir.name, 'artifact.qza')
artifact.save(fp)
metadata = Artifact.peek(fp)
self.assertIsInstance(metadata, ResultMetadata)
self.assertEqual(metadata.type, 'FourInts')
self.assertEqual(metadata.uuid, str(artifact.uuid))
self.assertEqual(metadata.format, 'FourIntsDirectoryFormat')
def test_import_data_invalid_type(self):
with self.assertRaisesRegex(TypeError,
'concrete semantic type.*Visualization'):
Artifact.import_data(qiime2.core.type.Visualization, self.test_dir)
with self.assertRaisesRegex(TypeError,
'concrete semantic type.*Visualization'):
Artifact.import_data('Visualization', self.test_dir)
def test_import_data_with_filepath_multi_file_data_layout(self):
fp = os.path.join(self.test_dir.name, 'test.txt')
with open(fp, 'w') as fh:
fh.write('42\n')
with self.assertRaisesRegex(ValueError,
"FourIntsDirectoryFormat.*directory"):
Artifact.import_data(FourInts, fp)
def test_import_data_with_wrong_number_of_files(self):
data_dir = os.path.join(self.test_dir.name, 'test')
os.mkdir(data_dir)
error_regex = ("Missing.*MappingDirectoryFormat.*mapping.tsv")
with self.assertRaisesRegex(ValueError, error_regex):
Artifact.import_data(Mapping, data_dir)
def test_import_data_with_unrecognized_files(self):
data_dir = os.path.join(self.test_dir.name, 'test')
os.mkdir(data_dir)
with open(os.path.join(data_dir, 'file1.txt'), 'w') as fh:
fh.write('42\n')
with open(os.path.join(data_dir, 'file2.txt'), 'w') as fh:
fh.write('43\n')
nested = os.path.join(data_dir, 'nested')
os.mkdir(nested)
with open(os.path.join(nested, 'file3.txt'), 'w') as fh:
fh.write('44\n')
with open(os.path.join(nested, 'foo.txt'), 'w') as fh:
fh.write('45\n')
<|fim▁hole|> Artifact.import_data(FourInts, data_dir)
def test_import_data_with_unreachable_path(self):
with self.assertRaisesRegex(ValueError, "does not exist"):
Artifact.import_data(IntSequence1,
os.path.join(self.test_dir.name, 'foo.txt'))
with self.assertRaisesRegex(ValueError, "does not exist"):
Artifact.import_data(FourInts,
os.path.join(self.test_dir.name, 'bar', ''))
def test_import_data_with_invalid_format_single_file(self):
fp = os.path.join(self.test_dir.name, 'foo.txt')
with open(fp, 'w') as fh:
fh.write('42\n')
fh.write('43\n')
fh.write('abc\n')
fh.write('123\n')
error_regex = "foo.txt.*IntSequenceFormat"
with self.assertRaisesRegex(ValueError, error_regex):
Artifact.import_data(IntSequence1, fp)
def test_import_data_with_invalid_format_multi_file(self):
data_dir = os.path.join(self.test_dir.name, 'test')
os.mkdir(data_dir)
with open(os.path.join(data_dir, 'file1.txt'), 'w') as fh:
fh.write('42\n')
with open(os.path.join(data_dir, 'file2.txt'), 'w') as fh:
fh.write('43\n')
nested = os.path.join(data_dir, 'nested')
os.mkdir(nested)
with open(os.path.join(nested, 'file3.txt'), 'w') as fh:
fh.write('44\n')
with open(os.path.join(nested, 'file4.txt'), 'w') as fh:
fh.write('foo\n')
error_regex = "file4.txt.*SingleIntFormat"
with self.assertRaisesRegex(ValueError, error_regex):
Artifact.import_data(FourInts, data_dir)
def test_import_data_with_filepath(self):
data_dir = os.path.join(self.test_dir.name, 'test')
os.mkdir(data_dir)
# Filename shouldn't matter for single-file case.
fp = os.path.join(data_dir, 'foo.txt')
with open(fp, 'w') as fh:
fh.write('42\n')
fh.write('43\n')
fh.write('42\n')
fh.write('0\n')
artifact = Artifact.import_data(IntSequence1, fp)
self.assertEqual(artifact.type, IntSequence1)
self.assertIsInstance(artifact.uuid, uuid.UUID)
self.assertEqual(artifact.view(list), [42, 43, 42, 0])
def test_import_data_with_directory_single_file(self):
data_dir = os.path.join(self.test_dir.name, 'test')
os.mkdir(data_dir)
fp = os.path.join(data_dir, 'ints.txt')
with open(fp, 'w') as fh:
fh.write('-1\n')
fh.write('-2\n')
fh.write('10\n')
fh.write('100\n')
artifact = Artifact.import_data(IntSequence1, data_dir)
self.assertEqual(artifact.type, IntSequence1)
self.assertIsInstance(artifact.uuid, uuid.UUID)
self.assertEqual(artifact.view(list), [-1, -2, 10, 100])
def test_import_data_with_directory_multi_file(self):
data_dir = os.path.join(self.test_dir.name, 'test')
os.mkdir(data_dir)
with open(os.path.join(data_dir, 'file1.txt'), 'w') as fh:
fh.write('42\n')
with open(os.path.join(data_dir, 'file2.txt'), 'w') as fh:
fh.write('41\n')
nested = os.path.join(data_dir, 'nested')
os.mkdir(nested)
with open(os.path.join(nested, 'file3.txt'), 'w') as fh:
fh.write('43\n')
with open(os.path.join(nested, 'file4.txt'), 'w') as fh:
fh.write('40\n')
artifact = Artifact.import_data(FourInts, data_dir)
self.assertEqual(artifact.type, FourInts)
self.assertIsInstance(artifact.uuid, uuid.UUID)
self.assertEqual(artifact.view(list), [42, 41, 43, 40])
def test_eq_identity(self):
artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
self.assertEqual(artifact, artifact)
def test_eq_same_uuid(self):
fp = os.path.join(self.test_dir.name, 'artifact.qza')
artifact1 = Artifact.import_data(FourInts, [-1, 42, 0, 43])
artifact1.save(fp)
artifact2 = Artifact.load(fp)
self.assertEqual(artifact1, artifact2)
def test_ne_same_data_different_uuid(self):
artifact1 = Artifact.import_data(FourInts, [-1, 42, 0, 43])
artifact2 = Artifact.import_data(FourInts, [-1, 42, 0, 43])
self.assertNotEqual(artifact1, artifact2)
def test_ne_different_data_different_uuid(self):
artifact1 = Artifact.import_data(FourInts, [-1, 42, 0, 43])
artifact2 = Artifact.import_data(FourInts, [1, 2, 3, 4])
self.assertNotEqual(artifact1, artifact2)
def test_ne_subclass_same_uuid(self):
class ArtifactSubclass(Artifact):
pass
fp = os.path.join(self.test_dir.name, 'artifact.qza')
artifact1 = ArtifactSubclass.import_data(FourInts, [-1, 42, 0, 43])
artifact1.save(fp)
artifact2 = Artifact.load(fp)
self.assertNotEqual(artifact1, artifact2)
self.assertNotEqual(artifact2, artifact1)
def test_ne_different_type_same_uuid(self):
artifact = Artifact.import_data(FourInts, [-1, 42, 0, 43])
class Faker:
@property
def uuid(self):
return artifact.uuid
faker = Faker()
self.assertNotEqual(artifact, faker)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | error_regex = ("Unrecognized.*foo.txt.*FourIntsDirectoryFormat")
with self.assertRaisesRegex(ValueError, error_regex): |
<|file_name|>MI_example.py<|end_file_name|><|fim▁begin|># Copyright (c) 2006-2007 The Regents of The University of Michigan
# Copyright (c) 2009 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Brad Beckmann
import math
import m5
from m5.objects import *
from m5.defines import buildEnv
#
# Note: the cache latency is only used by the sequencer on fast path hits
#
class Cache(RubyCache):
latency = 3
def define_options(parser):
return
def create_system(options, system, piobus, dma_ports, ruby_system):
if buildEnv['PROTOCOL'] != 'MI_example':
panic("This script requires the MI_example protocol to be built.")
cpu_sequencers = []
#
# The ruby network creation expects the list of nodes in the system to be
# consistent with the NetDest list. Therefore the l1 controller nodes must be
# listed before the directory nodes and directory nodes before dma nodes, etc.
#
l1_cntrl_nodes = []
dir_cntrl_nodes = []
dma_cntrl_nodes = []
#
# Must create the individual controllers before the network to ensure the
# controller constructors are called before the network constructor
#
block_size_bits = int(math.log(options.cacheline_size, 2))
cntrl_count = 0
for i in xrange(options.num_cpus):
#
# First create the Ruby objects associated with this cpu
# Only one cache exists for this protocol, so by default use the L1D
# config parameters.
#
cache = Cache(size = options.l1d_size,
assoc = options.l1d_assoc,
start_index_bit = block_size_bits)
#
# Only one unified L1 cache exists. Can cache instructions and data.
#
l1_cntrl = L1Cache_Controller(version = i,
cntrl_id = cntrl_count,
cacheMemory = cache,
send_evictions = (
options.cpu_type == "detailed"),
ruby_system = ruby_system)
cpu_seq = RubySequencer(version = i,
icache = cache,
dcache = cache,
ruby_system = ruby_system)
l1_cntrl.sequencer = cpu_seq
if piobus != None:
cpu_seq.pio_port = piobus.slave
exec("system.l1_cntrl%d = l1_cntrl" % i)
#
# Add controllers and sequencers to the appropriate lists
#
cpu_sequencers.append(cpu_seq)
l1_cntrl_nodes.append(l1_cntrl)
cntrl_count += 1
phys_mem_size = 0
for mem in system.memories.unproxy(system):
phys_mem_size += long(mem.range.second) - long(mem.range.first) + 1
mem_module_size = phys_mem_size / options.num_dirs
for i in xrange(options.num_dirs):
#
# Create the Ruby objects associated with the directory controller
#
mem_cntrl = RubyMemoryControl(version = i)
dir_size = MemorySize('0B')
dir_size.value = mem_module_size
dir_cntrl = Directory_Controller(version = i,
cntrl_id = cntrl_count,
directory = \
RubyDirectoryMemory( \
version = i,
size = dir_size,
use_map = options.use_map,
map_levels = \
options.map_levels),
memBuffer = mem_cntrl,
ruby_system = ruby_system)
exec("system.dir_cntrl%d = dir_cntrl" % i)
dir_cntrl_nodes.append(dir_cntrl)
cntrl_count += 1
for i, dma_port in enumerate(dma_ports):
#
# Create the Ruby objects associated with the dma controller<|fim▁hole|> dma_seq = DMASequencer(version = i,
ruby_system = ruby_system)
dma_cntrl = DMA_Controller(version = i,
cntrl_id = cntrl_count,
dma_sequencer = dma_seq,
ruby_system = ruby_system)
exec("system.dma_cntrl%d = dma_cntrl" % i)
exec("system.dma_cntrl%d.dma_sequencer.slave = dma_port" % i)
dma_cntrl_nodes.append(dma_cntrl)
cntrl_count += 1
all_cntrls = l1_cntrl_nodes + dir_cntrl_nodes + dma_cntrl_nodes
return (cpu_sequencers, dir_cntrl_nodes, all_cntrls)<|fim▁end|> | # |
<|file_name|>App.js<|end_file_name|><|fim▁begin|>Kanboard.App = function() {
this.controllers = {};
};
Kanboard.App.prototype.get = function(controller) {
return this.controllers[controller];
};
Kanboard.App.prototype.execute = function() {
for (var className in Kanboard) {
if (className !== "App") {
var controller = new Kanboard[className](this);
this.controllers[className] = controller;
if (typeof controller.execute === "function") {
controller.execute();
}
if (typeof controller.listen === "function") {
controller.listen();
}
if (typeof controller.focus === "function") {
controller.focus();
}<|fim▁hole|>
this.focus();
this.datePicker();
this.autoComplete();
this.tagAutoComplete();
};
Kanboard.App.prototype.focus = function() {
// Auto-select input fields
$(document).on('focus', '.auto-select', function() {
$(this).select();
});
// Workaround for chrome
$(document).on('mouseup', '.auto-select', function(e) {
e.preventDefault();
});
};
Kanboard.App.prototype.datePicker = function() {
var bodyElement = $("body");
var dateFormat = bodyElement.data("js-date-format");
var timeFormat = bodyElement.data("js-time-format");
var lang = bodyElement.data("js-lang");
$.datepicker.setDefaults($.datepicker.regional[lang]);
$.timepicker.setDefaults($.timepicker.regional[lang]);
// Datepicker
$(".form-date").datepicker({
showOtherMonths: true,
selectOtherMonths: true,
dateFormat: dateFormat,
constrainInput: false
});
// Datetime picker
$(".form-datetime").datetimepicker({
dateFormat: dateFormat,
timeFormat: timeFormat,
constrainInput: false
});
};
Kanboard.App.prototype.tagAutoComplete = function() {
$(".tag-autocomplete").select2({
tags: true
});
};
Kanboard.App.prototype.autoComplete = function() {
$(".autocomplete").each(function() {
var input = $(this);
var field = input.data("dst-field");
var extraFields = input.data("dst-extra-fields");
if ($('#form-' + field).val() === '') {
input.parent().find("button[type=submit]").attr('disabled','disabled');
}
input.autocomplete({
source: input.data("search-url"),
minLength: 1,
select: function(event, ui) {
$("input[name=" + field + "]").val(ui.item.id);
if (extraFields) {
var fields = extraFields.split(',');
for (var i = 0; i < fields.length; i++) {
var fieldName = fields[i].trim();
$("input[name=" + fieldName + "]").val(ui.item[fieldName]);
}
}
input.parent().find("button[type=submit]").removeAttr('disabled');
}
});
});
};
Kanboard.App.prototype.hasId = function(id) {
return !!document.getElementById(id);
};
Kanboard.App.prototype.showLoadingIcon = function() {
$("body").append('<span id="app-loading-icon"> <i class="fa fa-spinner fa-spin"></i></span>');
};
Kanboard.App.prototype.hideLoadingIcon = function() {
$("#app-loading-icon").remove();
};<|fim▁end|> | }
} |
<|file_name|>HciCloudHwrHelper.java<|end_file_name|><|fim▁begin|>package com.sinovoice.pathfinder.hcicloud.hwr;
import android.content.Context;
import android.util.Log;
<|fim▁hole|>import com.sinovoice.pathfinder.hcicloud.sys.SysConfig;
public class HciCloudHwrHelper {
private static final String TAG = HciCloudHwrHelper.class.getSimpleName();
private static HciCloudHwrHelper mInstance;
private HciCloudHwrHelper() {
}
public static HciCloudHwrHelper getInstance() {
if (mInstance == null) {
mInstance = new HciCloudHwrHelper();
}
return mInstance;
}
/**
* HWRÊÖдʶ±ðÄÜÁ¦³õʼ»¯£¬·µ»ØµÄ´íÎóÂë¿ÉÒÔÔÚAPIÖÐHciErrorCode²é¿´
*
* @param context
* @return ´íÎóÂë, return 0 ±íʾ³É¹¦
*/
public int init(Context context) {
int initResult = 0;
// ¹¹ÔìHwr³õʼ»¯µÄ²ÎÊýÀàµÄʵÀý
HwrInitParam hwrInitParam = new HwrInitParam();
// »ñÈ¡AppÓ¦ÓÃÖеÄlibµÄ·¾¶,Èç¹ûʹÓÃ/data/data/pkgName/libϵÄ×ÊÔ´Îļþ,ÐèÒªÌí¼Óandroid_soµÄ±ê¼Ç
String hwrDirPath = context.getFilesDir().getAbsolutePath()
.replace("files", "lib");
hwrInitParam.addParam(HwrInitParam.PARAM_KEY_DATA_PATH, hwrDirPath);
hwrInitParam.addParam(HwrInitParam.PARAM_KEY_FILE_FLAG, "android_so");
hwrInitParam.addParam(HwrInitParam.PARAM_KEY_INIT_CAP_KEYS,
SysConfig.CAPKEY_HWR);
Log.d(TAG, "hwr init config: " + hwrInitParam.getStringConfig());
// HWR ³õʼ»¯
initResult = HciCloudHwr.hciHwrInit(hwrInitParam.getStringConfig());
return initResult;
}
/**
* HWRÊÖдʶ±ðÄÜÁ¦·´³õʼ»¯£¬·µ»ØµÄ´íÎóÂë¿ÉÒÔÔÚAPIÖÐHciErrorCode²é¿´
*
* @return ´íÎóÂë, return 0 ±íʾ³É¹¦
*/
public int release() {
int result = HciCloudHwr.hciHwrRelease();
return result;
}
}<|fim▁end|> | import com.sinovoice.hcicloudsdk.api.hwr.HciCloudHwr;
import com.sinovoice.hcicloudsdk.common.hwr.HwrInitParam; |
<|file_name|>studio_locale-bfd0c94141f74417a2ff3e69569591ae.js<|end_file_name|><|fim▁begin|>var studio_locale = {lc:{"ar":function(n){
if (n === 0) {
return 'zero';
}
if (n == 1) {
return 'one';
}
if (n == 2) {
return 'two';
}
if ((n % 100) >= 3 && (n % 100) <= 10 && n == Math.floor(n)) {
return 'few';
}
if ((n % 100) >= 11 && (n % 100) <= 99 && n == Math.floor(n)) {
return 'many';
}
return 'other';
},"en":function(n){return n===1?"one":"other"},"bg":function(n){return n===1?"one":"other"},"bn":function(n){return n===1?"one":"other"},"ca":function(n){return n===1?"one":"other"},"cs":function(n){
if (n == 1) {
return 'one';
}
if (n == 2 || n == 3 || n == 4) {
return 'few';
}
return 'other';
},"da":function(n){return n===1?"one":"other"},"de":function(n){return n===1?"one":"other"},"el":function(n){return n===1?"one":"other"},"es":function(n){return n===1?"one":"other"},"et":function(n){return n===1?"one":"other"},"eu":function(n){return n===1?"one":"other"},"fa":function(n){return "other"},"fi":function(n){return n===1?"one":"other"},"fil":function(n){return n===0||n==1?"one":"other"},"fr":function(n){return Math.floor(n)===0||Math.floor(n)==1?"one":"other"},"ga":function(n){return n==1?"one":(n==2?"two":"other")},"gl":function(n){return n===1?"one":"other"},"he":function(n){return n===1?"one":"other"},"hi":function(n){return n===0||n==1?"one":"other"},"hr":function(n){
if ((n % 10) == 1 && (n % 100) != 11) {
return 'one';
}
if ((n % 10) >= 2 && (n % 10) <= 4 &&
((n % 100) < 12 || (n % 100) > 14) && n == Math.floor(n)) {
return 'few';
}
if ((n % 10) === 0 || ((n % 10) >= 5 && (n % 10) <= 9) ||
((n % 100) >= 11 && (n % 100) <= 14) && n == Math.floor(n)) {
return 'many';
}
return 'other';
},"hu":function(n){return "other"},"id":function(n){return "other"},"is":function(n){
return ((n%10) === 1 && (n%100) !== 11) ? 'one' : 'other';
},"it":function(n){return n===1?"one":"other"},"ja":function(n){return "other"},"ko":function(n){return "other"},"lt":function(n){
if ((n % 10) == 1 && ((n % 100) < 11 || (n % 100) > 19)) {
return 'one';
}
if ((n % 10) >= 2 && (n % 10) <= 9 &&
((n % 100) < 11 || (n % 100) > 19) && n == Math.floor(n)) {
return 'few';
}
return 'other';
},"lv":function(n){
if (n === 0) {
return 'zero';
}
if ((n % 10) == 1 && (n % 100) != 11) {
return 'one';
}
return 'other';
},"mk":function(n){return (n%10)==1&&n!=11?"one":"other"},"mr":function(n){return n===1?"one":"other"},"ms":function(n){return "other"},"mt":function(n){
if (n == 1) {
return 'one';
}
if (n === 0 || ((n % 100) >= 2 && (n % 100) <= 4 && n == Math.floor(n))) {
return 'few';
}
if ((n % 100) >= 11 && (n % 100) <= 19 && n == Math.floor(n)) {
return 'many';
}
return 'other';
},"nl":function(n){return n===1?"one":"other"},"no":function(n){return n===1?"one":"other"},"pl":function(n){
if (n == 1) {
return 'one';
}
if ((n % 10) >= 2 && (n % 10) <= 4 &&
((n % 100) < 12 || (n % 100) > 14) && n == Math.floor(n)) {
return 'few';
}
if ((n % 10) === 0 || n != 1 && (n % 10) == 1 ||
((n % 10) >= 5 && (n % 10) <= 9 || (n % 100) >= 12 && (n % 100) <= 14) &&
n == Math.floor(n)) {
return 'many';
}
return 'other';
},"pt":function(n){return n===1?"one":"other"},"ro":function(n){
if (n == 1) {
return 'one';
}
if (n === 0 || n != 1 && (n % 100) >= 1 &&
(n % 100) <= 19 && n == Math.floor(n)) {
return 'few';
}
return 'other';
},"ru":function(n){
if ((n % 10) == 1 && (n % 100) != 11) {
return 'one';
}
if ((n % 10) >= 2 && (n % 10) <= 4 &&
((n % 100) < 12 || (n % 100) > 14) && n == Math.floor(n)) {
return 'few';
}
if ((n % 10) === 0 || ((n % 10) >= 5 && (n % 10) <= 9) ||
((n % 100) >= 11 && (n % 100) <= 14) && n == Math.floor(n)) {
return 'many';
}
return 'other';
},"sk":function(n){
if (n == 1) {
return 'one';
}
if (n == 2 || n == 3 || n == 4) {
return 'few';
}
return 'other';
},"sl":function(n){
if ((n % 100) == 1) {
return 'one';
}
if ((n % 100) == 2) {
return 'two';
}
if ((n % 100) == 3 || (n % 100) == 4) {
return 'few';
}
return 'other';
},"sq":function(n){return n===1?"one":"other"},"sr":function(n){
if ((n % 10) == 1 && (n % 100) != 11) {
return 'one';
}
if ((n % 10) >= 2 && (n % 10) <= 4 &&
((n % 100) < 12 || (n % 100) > 14) && n == Math.floor(n)) {
return 'few';
}<|fim▁hole|> return 'other';
},"sv":function(n){return n===1?"one":"other"},"ta":function(n){return n===1?"one":"other"},"th":function(n){return "other"},"tr":function(n){return n===1?"one":"other"},"uk":function(n){
if ((n % 10) == 1 && (n % 100) != 11) {
return 'one';
}
if ((n % 10) >= 2 && (n % 10) <= 4 &&
((n % 100) < 12 || (n % 100) > 14) && n == Math.floor(n)) {
return 'few';
}
if ((n % 10) === 0 || ((n % 10) >= 5 && (n % 10) <= 9) ||
((n % 100) >= 11 && (n % 100) <= 14) && n == Math.floor(n)) {
return 'many';
}
return 'other';
},"ur":function(n){return n===1?"one":"other"},"vi":function(n){return "other"},"zh":function(n){return "other"}},
c:function(d,k){if(!d)throw new Error("MessageFormat: Data required for '"+k+"'.")},
n:function(d,k,o){if(isNaN(d[k]))throw new Error("MessageFormat: '"+k+"' isn't a number.");return d[k]-(o||0)},
v:function(d,k){studio_locale.c(d,k);return d[k]},
p:function(d,k,o,l,p){studio_locale.c(d,k);return d[k] in p?p[d[k]]:(k=studio_locale.lc[l](d[k]-o),k in p?p[k]:p.other)},
s:function(d,k,p){studio_locale.c(d,k);return d[k] in p?p[d[k]]:p.other}};
(window.blockly = window.blockly || {}).studio_locale = {
"actor":function(d){return "herec"},
"addItems1":function(d){return "přidat 1 položku typu"},
"addItems2":function(d){return "přidat 2 položky typu"},
"addItems3":function(d){return "přidat 3 položky typu"},
"addItems5":function(d){return "přidat 5 položek typu"},
"addItems10":function(d){return "přidat 10 položek typu"},
"addItemsRandom":function(d){return "přidat náhodné položky typu"},
"addItemsTooltip":function(d){return "Přidat položky na scénu."},
"alienInvasion":function(d){return "Mimozemská invaze!"},
"backgroundBlack":function(d){return "černý"},
"backgroundCave":function(d){return "jeskyně"},
"backgroundCloudy":function(d){return "zataženo"},
"backgroundHardcourt":function(d){return "tvrdé hřiště"},
"backgroundNight":function(d){return "noc"},
"backgroundUnderwater":function(d){return "pod vodou"},
"backgroundCity":function(d){return "město"},
"backgroundDesert":function(d){return "poušť"},
"backgroundRainbow":function(d){return "duha"},
"backgroundSoccer":function(d){return "fotbal"},
"backgroundSpace":function(d){return "vesmír"},
"backgroundTennis":function(d){return "tenis"},
"backgroundWinter":function(d){return "zima"},
"catActions":function(d){return "Akce"},
"catControl":function(d){return "Smyčky"},
"catEvents":function(d){return "Události"},
"catLogic":function(d){return "Logika"},
"catMath":function(d){return "Matematika"},
"catProcedures":function(d){return "Funkce"},
"catText":function(d){return "text"},
"catVariables":function(d){return "Proměnné"},
"changeScoreTooltip":function(d){return "Přidat nebo odebrat bod ze skóre."},
"changeScoreTooltipK1":function(d){return "Přidat bod."},
"continue":function(d){return "Pokračovat"},
"decrementPlayerScore":function(d){return "Odebrat bod"},
"defaultSayText":function(d){return "Piš zde"},
"dropletBlock_changeScore_description":function(d){return "Přidat nebo odebrat bod ze skóre."},
"dropletBlock_penColour_description":function(d){return "Sets the color of the line drawn behind the turtle as it moves"},
"dropletBlock_penColour_param0":function(d){return "color"},
"dropletBlock_setBackground_description":function(d){return "Nastavit obrázek pozadí"},
"dropletBlock_setSpriteEmotion_description":function(d){return "Nastaví náladu herce"},
"dropletBlock_setSpritePosition_description":function(d){return "Okamžitě přesune herce na zadané místo."},
"dropletBlock_setSpriteSpeed_description":function(d){return "Nastaví rychlost herce"},
"dropletBlock_setSprite_description":function(d){return "Nastaví obrázek herce"},
"dropletBlock_throw_description":function(d){return "Hodí střelu od zadaného herce."},
"dropletBlock_vanish_description":function(d){return "Herec zmizí."},
"emotion":function(d){return "nálada"},
"finalLevel":function(d){return "Dobrá práce! Vyřešil si poslední hádanku."},
"for":function(d){return "pro"},
"hello":function(d){return "ahoj"},
"helloWorld":function(d){return "Ahoj světe!"},
"incrementPlayerScore":function(d){return "Bod"},
"itemBlueFireball":function(d){return "modrá ohnivá koule"},
"itemPurpleFireball":function(d){return "fialová ohnivá koule"},
"itemRedFireball":function(d){return "červená ohnivá koule"},
"itemYellowHearts":function(d){return "žlutá srdce"},
"itemPurpleHearts":function(d){return "fialová srdce"},
"itemRedHearts":function(d){return "červená srdce"},
"itemRandom":function(d){return "náhodně"},
"itemAnna":function(d){return "hák"},
"itemElsa":function(d){return "jiskra"},
"itemHiro":function(d){return "mikroboti"},
"itemBaymax":function(d){return "raketa"},
"itemRapunzel":function(d){return "pánev"},
"itemCherry":function(d){return "třešeň"},
"itemIce":function(d){return "led"},
"itemDuck":function(d){return "kachna"},
"makeProjectileDisappear":function(d){return "zmizet"},
"makeProjectileBounce":function(d){return "odrazit"},
"makeProjectileBlueFireball":function(d){return "udělej modrou ohnivou kouli"},
"makeProjectilePurpleFireball":function(d){return "udělej fialovou ohnivou kouli"},
"makeProjectileRedFireball":function(d){return "udělej červenou ohnivou kouli"},
"makeProjectileYellowHearts":function(d){return "udělej žlutá srdce"},
"makeProjectilePurpleHearts":function(d){return "udělej fialová srdce"},
"makeProjectileRedHearts":function(d){return "udělej červená srdce"},
"makeProjectileTooltip":function(d){return "Nechat střelu narazit a zmizet nebo odrazit."},
"makeYourOwn":function(d){return "Vytvořit si vlastní hru v Hravé laboratoři"},
"moveDirectionDown":function(d){return "dolů"},
"moveDirectionLeft":function(d){return "vlevo"},
"moveDirectionRight":function(d){return "vpravo"},
"moveDirectionUp":function(d){return "nahoru"},
"moveDirectionRandom":function(d){return "náhodně"},
"moveDistance25":function(d){return "25 pixelů"},
"moveDistance50":function(d){return "50 pixelů"},
"moveDistance100":function(d){return "100 pixelů"},
"moveDistance200":function(d){return "200 pixelů"},
"moveDistance400":function(d){return "400 pixelů"},
"moveDistancePixels":function(d){return "pixely"},
"moveDistanceRandom":function(d){return "náhodné pixely"},
"moveDistanceTooltip":function(d){return "Přemístit herce určenou vzdálenost ve specifickém směru."},
"moveSprite":function(d){return "pohyb"},
"moveSpriteN":function(d){return "pohnout hercem "+studio_locale.v(d,"spriteIndex")},
"toXY":function(d){return "do x,y"},
"moveDown":function(d){return "pohyb dolů"},
"moveDownTooltip":function(d){return "pohnout hercem dolů."},
"moveLeft":function(d){return "pohnout vlevo"},
"moveLeftTooltip":function(d){return "pohnout hercem vlevo."},
"moveRight":function(d){return "pohnout vpravo"},
"moveRightTooltip":function(d){return "pohnout hercem vpravo."},
"moveUp":function(d){return "pohnout nahoru"},
"moveUpTooltip":function(d){return "pohnout hercem nahoru."},
"moveTooltip":function(d){return "pohnout hercem."},
"nextLevel":function(d){return "Dobrá práce! Dokončil jsi tuto hádanku."},
"no":function(d){return "Ne"},
"numBlocksNeeded":function(d){return "Tato hádanka může být vyřešena pomocí %1 bloků."},
"onEventTooltip":function(d){return "Spustit kód v reakci na konkrétní událost."},
"ouchExclamation":function(d){return "Au!"},
"playSoundCrunch":function(d){return "přehrát zvuk křupání"},
"playSoundGoal1":function(d){return "přehrát zvuk cíl 1"},
"playSoundGoal2":function(d){return "přehrát zvuk cíl 2"},
"playSoundHit":function(d){return "přehrát zvuk zásah"},
"playSoundLosePoint":function(d){return "přehrát zvuk ztráta bodu"},
"playSoundLosePoint2":function(d){return "přehrát zvuk ztráta bodu 2"},
"playSoundRetro":function(d){return "přehrát zvuk \"retro\""},
"playSoundRubber":function(d){return "přehrát zvuk guma"},
"playSoundSlap":function(d){return "přehrát zvuk plácnutí"},
"playSoundTooltip":function(d){return "Přehraj vybraný zvuk."},
"playSoundWinPoint":function(d){return "přehrát zvuk získaný bod"},
"playSoundWinPoint2":function(d){return "přehrát zvuk získaný bod 2"},
"playSoundWood":function(d){return "přehrát zvuk dřevo"},
"positionOutTopLeft":function(d){return "na pozici nad horní levou pozicí"},
"positionOutTopRight":function(d){return "na pozici nad horní pravou pozicí"},
"positionTopOutLeft":function(d){return "na horní vnější levou pozici"},
"positionTopLeft":function(d){return "na horní levou pozici"},
"positionTopCenter":function(d){return "na horní středovou pozici"},
"positionTopRight":function(d){return "na horní pravou pozici"},
"positionTopOutRight":function(d){return "na horní vnější pravou pozici"},
"positionMiddleLeft":function(d){return "na střední levou pozici"},
"positionMiddleCenter":function(d){return "na prostřední středovou pozici"},
"positionMiddleRight":function(d){return "na prostřední pravou pozici"},
"positionBottomOutLeft":function(d){return "na spodní vnější levou pozici"},
"positionBottomLeft":function(d){return "na spodní levou pozici"},
"positionBottomCenter":function(d){return "na spodní středovou pozici"},
"positionBottomRight":function(d){return "na spodní pravou pozici"},
"positionBottomOutRight":function(d){return "na spodní vnější pravou pozici"},
"positionOutBottomLeft":function(d){return "na pozici pod spodní levou pozicí"},
"positionOutBottomRight":function(d){return "na pozici pod spodní pravou pozicí"},
"positionRandom":function(d){return "na náhodnou pozici"},
"projectileBlueFireball":function(d){return "modrá ohnivá koule"},
"projectilePurpleFireball":function(d){return "fialová ohnivá koule"},
"projectileRedFireball":function(d){return "červená ohnivá koule"},
"projectileYellowHearts":function(d){return "žlutá srdce"},
"projectilePurpleHearts":function(d){return "fialová srdce"},
"projectileRedHearts":function(d){return "červená srdce"},
"projectileRandom":function(d){return "náhodně"},
"projectileAnna":function(d){return "hák"},
"projectileElsa":function(d){return "jiskra"},
"projectileHiro":function(d){return "mikroboti"},
"projectileBaymax":function(d){return "raketa"},
"projectileRapunzel":function(d){return "pánev"},
"projectileCherry":function(d){return "třešeň"},
"projectileIce":function(d){return "led"},
"projectileDuck":function(d){return "kachna"},
"reinfFeedbackMsg":function(d){return "Můžeš stisknout tlačítko \"Pokračovat v hraní\" pro návrat do hraní tvé historie."},
"repeatForever":function(d){return "opakujte navždy"},
"repeatDo":function(d){return "dělej"},
"repeatForeverTooltip":function(d){return "Provést akce v tomto bloku opakovaně dokud je spuštěn příběh."},
"saySprite":function(d){return "řekni"},
"saySpriteN":function(d){return "herec "+studio_locale.v(d,"spriteIndex")+" říká"},
"saySpriteTooltip":function(d){return "Zobrazit komiksovou bublinu s přidruženým textem od zvoleného herece."},
"saySpriteChoices_0":function(d){return "Ahoj."},
"saySpriteChoices_1":function(d){return "Ahoj všichni."},
"saySpriteChoices_2":function(d){return "Jak se máš?"},
"saySpriteChoices_3":function(d){return "Dobré ráno"},
"saySpriteChoices_4":function(d){return "Dobré odpoledne"},
"saySpriteChoices_5":function(d){return "Dobrou noc"},
"saySpriteChoices_6":function(d){return "Dobrý večer"},
"saySpriteChoices_7":function(d){return "Co je nového?"},
"saySpriteChoices_8":function(d){return "Co?"},
"saySpriteChoices_9":function(d){return "Kde?"},
"saySpriteChoices_10":function(d){return "Kdy?"},
"saySpriteChoices_11":function(d){return "Dobře."},
"saySpriteChoices_12":function(d){return "Skvělé!"},
"saySpriteChoices_13":function(d){return "Dobře."},
"saySpriteChoices_14":function(d){return "Není to zlé."},
"saySpriteChoices_15":function(d){return "Hodně štěstí."},
"saySpriteChoices_16":function(d){return "Ano"},
"saySpriteChoices_17":function(d){return "Ne"},
"saySpriteChoices_18":function(d){return "Dobře"},
"saySpriteChoices_19":function(d){return "Pěkný hod!"},
"saySpriteChoices_20":function(d){return "Hezký den."},
"saySpriteChoices_21":function(d){return "Ahoj."},
"saySpriteChoices_22":function(d){return "Hned jsem zpátky."},
"saySpriteChoices_23":function(d){return "Zítra ahoj!"},
"saySpriteChoices_24":function(d){return "Zatím se měj!"},
"saySpriteChoices_25":function(d){return "Opatruj se!"},
"saySpriteChoices_26":function(d){return "Užijte si!"},
"saySpriteChoices_27":function(d){return "Musím jít."},
"saySpriteChoices_28":function(d){return "Chcete být přátelé?"},
"saySpriteChoices_29":function(d){return "Skvělá práce!"},
"saySpriteChoices_30":function(d){return "Pane jo!"},
"saySpriteChoices_31":function(d){return "Jaj!"},
"saySpriteChoices_32":function(d){return "Těší mě."},
"saySpriteChoices_33":function(d){return "Dobře!"},
"saySpriteChoices_34":function(d){return "Děkuji"},
"saySpriteChoices_35":function(d){return "Ne, děkuji"},
"saySpriteChoices_36":function(d){return "Aaaaaah!"},
"saySpriteChoices_37":function(d){return "Nevadí"},
"saySpriteChoices_38":function(d){return "Dnes"},
"saySpriteChoices_39":function(d){return "Zítra"},
"saySpriteChoices_40":function(d){return "Včera"},
"saySpriteChoices_41":function(d){return "Našel jsem tě!"},
"saySpriteChoices_42":function(d){return "Našel si mě!"},
"saySpriteChoices_43":function(d){return "10, 9, 8, 7, 6, 5, 4, 3, 2, 1!"},
"saySpriteChoices_44":function(d){return "Jsi skvělý!"},
"saySpriteChoices_45":function(d){return "Jsi vtipný!"},
"saySpriteChoices_46":function(d){return "Jsi pošetilý! "},
"saySpriteChoices_47":function(d){return "Jsi dobrý přítel!"},
"saySpriteChoices_48":function(d){return "Dávej pozor!"},
"saySpriteChoices_49":function(d){return "Kachna!"},
"saySpriteChoices_50":function(d){return "Mám tě!"},
"saySpriteChoices_51":function(d){return "Au!"},
"saySpriteChoices_52":function(d){return "Promiň!"},
"saySpriteChoices_53":function(d){return "Opatrně!"},
"saySpriteChoices_54":function(d){return "Uau!"},
"saySpriteChoices_55":function(d){return "Ups!"},
"saySpriteChoices_56":function(d){return "Skoro si mě dostal!"},
"saySpriteChoices_57":function(d){return "Dobrý pokus!"},
"saySpriteChoices_58":function(d){return "Nemůžeš mě chytit!"},
"scoreText":function(d){return "Body: "+studio_locale.v(d,"playerScore")},
"setBackground":function(d){return "nastavit pozadí"},
"setBackgroundRandom":function(d){return "nastavit náhodné pozadí"},
"setBackgroundBlack":function(d){return "nastavit černé pozadí"},
"setBackgroundCave":function(d){return "nastavit pozadí jeskyně"},
"setBackgroundCloudy":function(d){return "nastavit pozadí mraky"},
"setBackgroundHardcourt":function(d){return "nastavit pozadí tvrdé hřiště"},
"setBackgroundNight":function(d){return "nastavit pozadí noc"},
"setBackgroundUnderwater":function(d){return "nastavit pozadí pod vodou"},
"setBackgroundCity":function(d){return "nastavit pozadí město"},
"setBackgroundDesert":function(d){return "nastavit pozadí poušť"},
"setBackgroundRainbow":function(d){return "nastavit pozadí duha"},
"setBackgroundSoccer":function(d){return "nastavit pozadí fotbal"},
"setBackgroundSpace":function(d){return "nastavit pozadí vesmír"},
"setBackgroundTennis":function(d){return "nastavit pozadí tenis"},
"setBackgroundWinter":function(d){return "nastavit pozadí zima"},
"setBackgroundLeafy":function(d){return "nastavit listnaté pozadí"},
"setBackgroundGrassy":function(d){return "nastavit travnaté pozadí"},
"setBackgroundFlower":function(d){return "nastavit květinové pozadí"},
"setBackgroundTile":function(d){return "nastavit dlaždicové pozadí"},
"setBackgroundIcy":function(d){return "nastavit ledové pozadí"},
"setBackgroundSnowy":function(d){return "nastavit zasněžené pozadí"},
"setBackgroundTooltip":function(d){return "Nastavit obrázek pozadí"},
"setEnemySpeed":function(d){return "nastavit rychlost protivníka"},
"setPlayerSpeed":function(d){return "nastavit rychlost hráče"},
"setScoreText":function(d){return "nastavit body"},
"setScoreTextTooltip":function(d){return "Nastaví text, který se má zobrazit v oblasti pro výsledek."},
"setSpriteEmotionAngry":function(d){return "na nahněvanou náladu"},
"setSpriteEmotionHappy":function(d){return "na šťastnou náladu"},
"setSpriteEmotionNormal":function(d){return "na normální náladu"},
"setSpriteEmotionRandom":function(d){return "na náhodnou náladu"},
"setSpriteEmotionSad":function(d){return "na smutnou náladu"},
"setSpriteEmotionTooltip":function(d){return "Nastaví náladu herce"},
"setSpriteAlien":function(d){return "na obrázek mimozemšťana"},
"setSpriteBat":function(d){return "na obrázek netopýra"},
"setSpriteBird":function(d){return "na obrázek ptáka"},
"setSpriteCat":function(d){return "na obrázek kočky"},
"setSpriteCaveBoy":function(d){return "na obrázek jeskynního chlapece"},
"setSpriteCaveGirl":function(d){return "na obrázek jeskynní dívky"},
"setSpriteDinosaur":function(d){return "na obrázek dinosaura"},
"setSpriteDog":function(d){return "na obrázek psa"},
"setSpriteDragon":function(d){return "na obrázek draka"},
"setSpriteGhost":function(d){return "na obrázek ducha"},
"setSpriteHidden":function(d){return "na skrytý obrázek"},
"setSpriteHideK1":function(d){return "skrýt"},
"setSpriteAnna":function(d){return "na Anin obrázek"},
"setSpriteElsa":function(d){return "na Elsin obrázek"},
"setSpriteHiro":function(d){return "na obrázek Hira"},
"setSpriteBaymax":function(d){return "na obrázek Baymaxe"},
"setSpriteRapunzel":function(d){return "na obrázek Rapunzela"},
"setSpriteKnight":function(d){return "na obrázek rytíře"},
"setSpriteMonster":function(d){return "na obrázek příšery"},
"setSpriteNinja":function(d){return "na obrázek maskovaného ninjy"},
"setSpriteOctopus":function(d){return "na obrázek chobotnice"},
"setSpritePenguin":function(d){return "na obrázek tučňáka"},
"setSpritePirate":function(d){return "na obrázek piráta"},
"setSpritePrincess":function(d){return "na obrázek princezny"},
"setSpriteRandom":function(d){return "na náhodný obrázek"},
"setSpriteRobot":function(d){return "na obrázek robota"},
"setSpriteShowK1":function(d){return "zobrazit"},
"setSpriteSpacebot":function(d){return "na obrázek vesmírného robota"},
"setSpriteSoccerGirl":function(d){return "na obrázek fotbalistky"},
"setSpriteSoccerBoy":function(d){return "na obrázek fotbalisty"},
"setSpriteSquirrel":function(d){return "na obrázek veverky"},
"setSpriteTennisGirl":function(d){return "na obrázek tenistky"},
"setSpriteTennisBoy":function(d){return "na obrázek tenisty"},
"setSpriteUnicorn":function(d){return "na obrázek jednorožce"},
"setSpriteWitch":function(d){return "na obrázek čarodějnice"},
"setSpriteWizard":function(d){return "na obrázek čaroděje"},
"setSpritePositionTooltip":function(d){return "Okamžitě přesune herce na zadané místo."},
"setSpriteK1Tooltip":function(d){return "Zobrazí nebo skryje zadaného herce."},
"setSpriteTooltip":function(d){return "Nastaví obrázek herce"},
"setSpriteSizeRandom":function(d){return "na náhodnou velikost"},
"setSpriteSizeVerySmall":function(d){return "na velmi malou velikost"},
"setSpriteSizeSmall":function(d){return "na malou velikost"},
"setSpriteSizeNormal":function(d){return "na normální velikost"},
"setSpriteSizeLarge":function(d){return "na velkou velikost"},
"setSpriteSizeVeryLarge":function(d){return "na velmi velkou velikost"},
"setSpriteSizeTooltip":function(d){return "Nastaví velikost herce"},
"setSpriteSpeedRandom":function(d){return "na náhodnou rychlost"},
"setSpriteSpeedVerySlow":function(d){return "na velmi pomalou rychlost"},
"setSpriteSpeedSlow":function(d){return "na pomalou rychlost"},
"setSpriteSpeedNormal":function(d){return "na normální rychlost"},
"setSpriteSpeedFast":function(d){return "na rychlou rychlost"},
"setSpriteSpeedVeryFast":function(d){return "na velmi rychlou rychlost"},
"setSpriteSpeedTooltip":function(d){return "Nastaví rychlost herce"},
"setSpriteZombie":function(d){return "na obrázek zombie"},
"shareStudioTwitter":function(d){return "Podívejte se na aplikaci, kterou jsem udělal. Napsal jsem to sám s @codeorg"},
"shareGame":function(d){return "Sdílej svůj příběh:"},
"showCoordinates":function(d){return "Zobrazit souřadnice"},
"showCoordinatesTooltip":function(d){return "zobrazí souřadnice hlavní postavy na obrazovce"},
"showTitleScreen":function(d){return "zobrazit úvodní obrazovku"},
"showTitleScreenTitle":function(d){return "nadpis"},
"showTitleScreenText":function(d){return "text"},
"showTSDefTitle":function(d){return "zde napiš nadpis"},
"showTSDefText":function(d){return "zde napiš text"},
"showTitleScreenTooltip":function(d){return "Zobrazit úvodní obrazovka k přidruženému názvu a textu."},
"size":function(d){return "velikost"},
"setSprite":function(d){return "nastavit"},
"setSpriteN":function(d){return "nastavit herece "+studio_locale.v(d,"spriteIndex")},
"soundCrunch":function(d){return "křupnutí"},
"soundGoal1":function(d){return "cíl 1"},
"soundGoal2":function(d){return "cíl 2"},
"soundHit":function(d){return "zásah"},
"soundLosePoint":function(d){return "ztracený bod"},
"soundLosePoint2":function(d){return "ztracený bod 2"},
"soundRetro":function(d){return "retro"},
"soundRubber":function(d){return "guma"},
"soundSlap":function(d){return "facka"},
"soundWinPoint":function(d){return "vyhraný bod"},
"soundWinPoint2":function(d){return "vyhraný bod 2"},
"soundWood":function(d){return "dřevo"},
"speed":function(d){return "rychlost"},
"startSetValue":function(d){return "start (funkce)"},
"startSetVars":function(d){return "game_vars (nadpis, podnadpis, pozadí, cíl, nebezpečí, hráč)"},
"startSetFuncs":function(d){return "game_funcs (aktualizuj-cíl, aktualizuj-nebezpečí, aktualizuj-hráče, kolize?, na-obrazovce?)"},
"stopSprite":function(d){return "zastavit"},
"stopSpriteN":function(d){return "zastavit herce "+studio_locale.v(d,"spriteIndex")},
"stopTooltip":function(d){return "Zastaví pohyb herce."},
"throwSprite":function(d){return "hoď"},
"throwSpriteN":function(d){return "herec "+studio_locale.v(d,"spriteIndex")+" hodí"},
"throwTooltip":function(d){return "Hodí střelu od zadaného herce."},
"vanish":function(d){return "zmiz"},
"vanishActorN":function(d){return "zmiz herec "+studio_locale.v(d,"spriteIndex")},
"vanishTooltip":function(d){return "Herec zmizí."},
"waitFor":function(d){return "čekat na"},
"waitSeconds":function(d){return "sekund"},
"waitForClick":function(d){return "čekat na kliknutí"},
"waitForRandom":function(d){return "čekat na náhodně"},
"waitForHalfSecond":function(d){return "čekat půl sekundy"},
"waitFor1Second":function(d){return "čekat 1 sekundu"},
"waitFor2Seconds":function(d){return "čekat 2 sekundy"},
"waitFor5Seconds":function(d){return "čekat 5 sekund"},
"waitFor10Seconds":function(d){return "čekat 10 sekund"},
"waitParamsTooltip":function(d){return "Čeká zadaný počet sekund. Použijte nulu pro čekání na kliknutí."},
"waitTooltip":function(d){return "Čeká zadané množství času nebo dokud nedojde ke kliknutí."},
"whenArrowDown":function(d){return "šipka dolů"},
"whenArrowLeft":function(d){return "šipka vlevo"},
"whenArrowRight":function(d){return "šipka vpravo"},
"whenArrowUp":function(d){return "šipka nahoru"},
"whenArrowTooltip":function(d){return "Provést zadané akce po stisknutí klávesy se šipkou."},
"whenDown":function(d){return "když šipka dolů"},
"whenDownTooltip":function(d){return "Spusť uvedené akce když je stisknutá klávesa \"dolů\"."},
"whenGameStarts":function(d){return "když se příběh začne"},
"whenGameStartsTooltip":function(d){return "Provést uvedené akce, když příběh začne."},
"whenLeft":function(d){return "když šipka vlevo"},
"whenLeftTooltip":function(d){return "Spusť uvedené akce když je stisknutá klávesa \"vlevo\"."},
"whenRight":function(d){return "když šipka vpravo"},
"whenRightTooltip":function(d){return "Spusť uvedené akce když je stisknutá klávesa \"vpravo\"."},
"whenSpriteClicked":function(d){return "po kliknutí na herce"},
"whenSpriteClickedN":function(d){return "po kliknutí na herce "+studio_locale.v(d,"spriteIndex")},
"whenSpriteClickedTooltip":function(d){return "Provést uvedené akce po kliknutí na herce."},
"whenSpriteCollidedN":function(d){return "když se herec "+studio_locale.v(d,"spriteIndex")},
"whenSpriteCollidedTooltip":function(d){return "Provést uvedené akce když se herec dotkne jiného herce."},
"whenSpriteCollidedWith":function(d){return "dotkne"},
"whenSpriteCollidedWithAnyActor":function(d){return "dotkne jiného herce"},
"whenSpriteCollidedWithAnyEdge":function(d){return "dotkne okraje"},
"whenSpriteCollidedWithAnyProjectile":function(d){return "dotkne střely"},
"whenSpriteCollidedWithAnything":function(d){return "dotkne čehokoliv"},
"whenSpriteCollidedWithN":function(d){return "dotkne herce "+studio_locale.v(d,"spriteIndex")},
"whenSpriteCollidedWithBlueFireball":function(d){return "dotkne modré ohnivé koule"},
"whenSpriteCollidedWithPurpleFireball":function(d){return "dotkne fialové ohnivé koule"},
"whenSpriteCollidedWithRedFireball":function(d){return "dotkne červené ohnivé koule"},
"whenSpriteCollidedWithYellowHearts":function(d){return "dotkne žlutých srdcí"},
"whenSpriteCollidedWithPurpleHearts":function(d){return "dotkne fialových srdcí"},
"whenSpriteCollidedWithRedHearts":function(d){return "dotkne červených srdcí"},
"whenSpriteCollidedWithBottomEdge":function(d){return "dotkne dolního okraje"},
"whenSpriteCollidedWithLeftEdge":function(d){return "dotkne levého okraje"},
"whenSpriteCollidedWithRightEdge":function(d){return "dotkne pravého okraje"},
"whenSpriteCollidedWithTopEdge":function(d){return "dotkne horního okraje"},
"whenUp":function(d){return "když šipka nahoru"},
"whenUpTooltip":function(d){return "Spusť uvedené akce když je stisknutá klávesa \"nahoru\"."},
"yes":function(d){return "Ano"}};<|fim▁end|> | if ((n % 10) === 0 || ((n % 10) >= 5 && (n % 10) <= 9) ||
((n % 100) >= 11 && (n % 100) <= 14) && n == Math.floor(n)) {
return 'many';
} |
<|file_name|>sunat_1_1_print.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 NUMA Extreme Systems (www.numaes.com) for Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import time
from openerp.report import report_sxw
class sunat_1_1_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context=None):
super(sunat_1_1_report, self).__init__(cr, uid, name, context)
self.localcontext.update( {
'time': time,<|fim▁hole|> self.context = context
report_sxw.report_sxw('report.l10n_pe.sunat_1_1', 'l10n_pe.ple_1_1',
'addons/l10n_pe_ple01/report/sunat_1_1.rml', parser=sunat_1_1_report, header=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | }) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.