prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>cast.py<|end_file_name|><|fim▁begin|>"""
homeassistant.components.media_player.chromecast
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides functionality to interact with Cast devices on the network.
WARNING: This platform is currently not working due to a changed Cast API
"""
import logging
from homeassistant.const import (
STATE_PLAYING, STATE_PAUSED, STATE_IDLE, STATE_OFF,
STATE_UNKNOWN, CONF_HOST)
from homeassistant.components.media_player import (
MediaPlayerDevice,
SUPPORT_PAUSE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_MUTE,
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_YOUTUBE,
SUPPORT_PREVIOUS_TRACK, SUPPORT_NEXT_TRACK,
MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO)
REQUIREMENTS = ['pychromecast==0.6.10']
CONF_IGNORE_CEC = 'ignore_cec'
CAST_SPLASH = 'https://home-assistant.io/images/cast/splash.png'
SUPPORT_CAST = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_YOUTUBE
KNOWN_HOSTS = []
# pylint: disable=invalid-name
cast = None
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the cast platform. """
global cast
import pychromecast
cast = pychromecast
logger = logging.getLogger(__name__)
# import CEC IGNORE attributes
ignore_cec = config.get(CONF_IGNORE_CEC, [])
if isinstance(ignore_cec, list):
cast.IGNORE_CEC += ignore_cec
else:
logger.error('Chromecast conig, %s must be a list.', CONF_IGNORE_CEC)
hosts = []
if discovery_info and discovery_info[0] not in KNOWN_HOSTS:
hosts = [discovery_info[0]]
elif CONF_HOST in config:
hosts = [config[CONF_HOST]]
else:
hosts = (host_port[0] for host_port
in cast.discover_chromecasts()
if host_port[0] not in KNOWN_HOSTS)
casts = []
for host in hosts:
try:
casts.append(CastDevice(host))
except cast.ChromecastConnectionError:
pass
else:
KNOWN_HOSTS.append(host)
add_devices(casts)
class CastDevice(MediaPlayerDevice):
""" Represents a Cast device on the network. """
# pylint: disable=too-many-public-methods
def __init__(self, host):
import pychromecast.controllers.youtube as youtube
self.cast = cast.Chromecast(host)
self.youtube = youtube.YouTubeController()
self.cast.register_handler(self.youtube)
self.cast.socket_client.receiver_controller.register_status_listener(
self)
self.cast.socket_client.media_controller.register_status_listener(self)
self.cast_status = self.cast.status
self.media_status = self.cast.media_controller.status
# Entity properties and methods
@property
def should_poll(self):
return False
@property
def name(self):
""" Returns the name of the device. """
return self.cast.device.friendly_name
# MediaPlayerDevice properties and methods
@property
def state(self):
""" State of the player. """
if self.media_status is None:
return STATE_UNKNOWN
elif self.media_status.player_is_playing:
return STATE_PLAYING
elif self.media_status.player_is_paused:
return STATE_PAUSED
elif self.media_status.player_is_idle:
return STATE_IDLE
elif self.cast.is_idle:
return STATE_OFF
else:
return STATE_UNKNOWN
@property
def volume_level(self):
""" Volume level of the media player (0..1). """
return self.cast_status.volume_level if self.cast_status else None
@property
def is_volume_muted(self):
""" Boolean if volume is currently muted. """
return self.cast_status.volume_muted if self.cast_status else None
@property
def media_content_id(self):
""" Content ID of current playing media. """
return self.media_status.content_id if self.media_status else None
@property
def media_content_type(self):
""" Content type of current playing media. """
if self.media_status is None:
return None
elif self.media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
elif self.media_status.media_is_movie:
return MEDIA_TYPE_VIDEO
elif self.media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
""" Duration of current playing media in seconds. """
return self.media_status.duration if self.media_status else None
@property
def media_image_url(self):
""" Image url of current playing media. """
if self.media_status is None:
return None
images = self.media_status.images
return images[0].url if images else None
@property
def media_title(self):
""" Title of current playing media. """
return self.media_status.title if self.media_status else None
@property
def media_artist(self):
""" Artist of current playing media. (Music track only) """
return self.media_status.artist if self.media_status else None
@property
def media_album(self):
""" Album of current playing media. (Music track only) """
return self.media_status.album_name if self.media_status else None
@property
def media_album_artist(self):
""" Album arist of current playing media. (Music track only) """
return self.media_status.album_artist if self.media_status else None
@property
def media_track(self):
""" Track number of current playing media. (Music track only) """
return self.media_status.track if self.media_status else None
@property
def media_series_title(self):
""" Series title of current playing media. (TV Show only)"""
return self.media_status.series_title if self.media_status else None<|fim▁hole|> """ Season of current playing media. (TV Show only) """
return self.media_status.season if self.media_status else None
@property
def media_episode(self):
""" Episode of current playing media. (TV Show only) """
return self.media_status.episode if self.media_status else None
@property
def app_id(self):
""" ID of the current running app. """
return self.cast.app_id
@property
def app_name(self):
""" Name of the current running app. """
return self.cast.app_display_name
@property
def supported_media_commands(self):
""" Flags of media commands that are supported. """
return SUPPORT_CAST
def turn_on(self):
""" Turns on the ChromeCast. """
# The only way we can turn the Chromecast is on is by launching an app
if not self.cast.status or not self.cast.status.is_active_input:
if self.cast.app_id:
self.cast.quit_app()
self.cast.play_media(
CAST_SPLASH, cast.STREAM_TYPE_BUFFERED)
def turn_off(self):
""" Turns Chromecast off. """
self.cast.quit_app()
def mute_volume(self, mute):
""" mute the volume. """
self.cast.set_volume_muted(mute)
def set_volume_level(self, volume):
""" set volume level, range 0..1. """
self.cast.set_volume(volume)
def media_play(self):
""" Send play commmand. """
self.cast.media_controller.play()
def media_pause(self):
""" Send pause command. """
self.cast.media_controller.pause()
def media_previous_track(self):
""" Send previous track command. """
self.cast.media_controller.rewind()
def media_next_track(self):
""" Send next track command. """
self.cast.media_controller.skip()
def media_seek(self, position):
""" Seek the media to a specific location. """
self.cast.media_controller.seek(position)
def play_youtube(self, media_id):
""" Plays a YouTube media. """
self.youtube.play_video(media_id)
# implementation of chromecast status_listener methods
def new_cast_status(self, status):
""" Called when a new cast status is received. """
self.cast_status = status
self.update_ha_state()
def new_media_status(self, status):
""" Called when a new media status is received. """
self.media_status = status
self.update_ha_state()<|fim▁end|> |
@property
def media_season(self): |
<|file_name|>event.go<|end_file_name|><|fim▁begin|>/*
Copyright 2021 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by injection-gen. DO NOT EDIT.
package event
import (
context "context"
apieventsv1beta1 "k8s.io/api/events/v1beta1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
labels "k8s.io/apimachinery/pkg/labels"
v1beta1 "k8s.io/client-go/informers/events/v1beta1"
kubernetes "k8s.io/client-go/kubernetes"
eventsv1beta1 "k8s.io/client-go/listers/events/v1beta1"
cache "k8s.io/client-go/tools/cache"
client "knative.dev/pkg/client/injection/kube/client"
factory "knative.dev/pkg/client/injection/kube/informers/factory"
controller "knative.dev/pkg/controller"
injection "knative.dev/pkg/injection"
logging "knative.dev/pkg/logging"
)
func init() {
injection.Default.RegisterInformer(withInformer)
injection.Dynamic.RegisterDynamicInformer(withDynamicInformer)
}
// Key is used for associating the Informer inside the context.Context.
type Key struct{}
func withInformer(ctx context.Context) (context.Context, controller.Informer) {
f := factory.Get(ctx)
inf := f.Events().V1beta1().Events()
return context.WithValue(ctx, Key{}, inf), inf.Informer()
}
func withDynamicInformer(ctx context.Context) context.Context {
inf := &wrapper{client: client.Get(ctx), resourceVersion: injection.GetResourceVersion(ctx)}
return context.WithValue(ctx, Key{}, inf)
}<|fim▁hole|>func Get(ctx context.Context) v1beta1.EventInformer {
untyped := ctx.Value(Key{})
if untyped == nil {
logging.FromContext(ctx).Panic(
"Unable to fetch k8s.io/client-go/informers/events/v1beta1.EventInformer from context.")
}
return untyped.(v1beta1.EventInformer)
}
type wrapper struct {
client kubernetes.Interface
namespace string
resourceVersion string
}
var _ v1beta1.EventInformer = (*wrapper)(nil)
var _ eventsv1beta1.EventLister = (*wrapper)(nil)
func (w *wrapper) Informer() cache.SharedIndexInformer {
return cache.NewSharedIndexInformer(nil, &apieventsv1beta1.Event{}, 0, nil)
}
func (w *wrapper) Lister() eventsv1beta1.EventLister {
return w
}
func (w *wrapper) Events(namespace string) eventsv1beta1.EventNamespaceLister {
return &wrapper{client: w.client, namespace: namespace, resourceVersion: w.resourceVersion}
}
// SetResourceVersion allows consumers to adjust the minimum resourceVersion
// used by the underlying client. It is not accessible via the standard
// lister interface, but can be accessed through a user-defined interface and
// an implementation check e.g. rvs, ok := foo.(ResourceVersionSetter)
func (w *wrapper) SetResourceVersion(resourceVersion string) {
w.resourceVersion = resourceVersion
}
func (w *wrapper) List(selector labels.Selector) (ret []*apieventsv1beta1.Event, err error) {
lo, err := w.client.EventsV1beta1().Events(w.namespace).List(context.TODO(), v1.ListOptions{
LabelSelector: selector.String(),
ResourceVersion: w.resourceVersion,
})
if err != nil {
return nil, err
}
for idx := range lo.Items {
ret = append(ret, &lo.Items[idx])
}
return ret, nil
}
func (w *wrapper) Get(name string) (*apieventsv1beta1.Event, error) {
return w.client.EventsV1beta1().Events(w.namespace).Get(context.TODO(), name, v1.GetOptions{
ResourceVersion: w.resourceVersion,
})
}<|fim▁end|> |
// Get extracts the typed informer from the context. |
<|file_name|>script.js<|end_file_name|><|fim▁begin|>'use strict';
$(function() {
$.material.init();
$('#libraries').btsListFilter('#searcher', {
itemChild: 'h3',
resetOnBlur: false
});<|fim▁hole|><|fim▁end|> | $('#searcher').focus();
}); |
<|file_name|>sanity_resources.go<|end_file_name|><|fim▁begin|>package hookexecutor
/**
* The file is used to define the resources that will be created during a sanity
* test. All the reosurces are in yaml format, but stored in the file as separate
* strings. In this way, we can package these resources at compile-time, and use<|fim▁hole|>
var sanityPod = `
kind: Pod
apiVersion: v1
metadata:
name: sanity-pod
namespace: default
spec:
containers:
- name: container1
image: alpine:3.8
command: [ "/bin/sh", "-c", "--" ]
args: [ "while true; do sleep 30; done;" ]
volumeMounts:
- name: vol1
mountPath: "/data"
restartPolicy: "Never"
volumes:
- name: vol1
persistentVolumeClaim:
claimName: sanity-pvc
`
var sanityPvc = `
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: sanity-pvc
namespace: default
labels:
pv-name: sanity-pv
annotations:
volume.beta.kubernetes.io/storage-class: ""
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Gi
`<|fim▁end|> | * them at any time without touching local files in the container.
*/ |
<|file_name|>3d0bf4ee67d1_geonature_samples.py<|end_file_name|><|fim▁begin|>"""geonature samples
Revision ID: 3d0bf4ee67d1
Create Date: 2021-09-27 18:00:45.818766
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3d0bf4ee67d1'<|fim▁hole|>)
def upgrade():
op.execute("""
INSERT INTO gn_meta.sinp_datatype_protocols (
unique_protocol_id,
protocol_name,
protocol_desc,
id_nomenclature_protocol_type,
protocol_url)
VALUES (
'9ed37cb1-803b-4eec-9ecd-31880475bbe9',
'hors protocole',
'observation réalisées hors protocole',
ref_nomenclatures.get_id_nomenclature('TYPE_PROTOCOLE','1'),
null)
""")
def downgrade():
op.execute("""
DELETE FROM gn_meta.sinp_datatype_protocols
WHERE unique_protocol_id = '9ed37cb1-803b-4eec-9ecd-31880475bbe9'
""")<|fim▁end|> | down_revision = None
branch_labels = ('geonature-samples',)
depends_on = (
'geonature', |
<|file_name|>test_prefetch_related_objects.py<|end_file_name|><|fim▁begin|>from django.db.models import Prefetch, prefetch_related_objects
from django.test import TestCase
from .models import Author, Book, Reader
class PrefetchRelatedObjectsTests(TestCase):
"""
Since prefetch_related_objects() is just the inner part of
prefetch_related(), only do basic tests to ensure its API hasn't changed.
"""
@classmethod
def setUpTestData(cls):
cls.book1 = Book.objects.create(title='Poems')
cls.book2 = Book.objects.create(title='Jane Eyre')
cls.book3 = Book.objects.create(title='Wuthering Heights')
cls.book4 = Book.objects.create(title='Sense and Sensibility')
cls.author1 = Author.objects.create(name='Charlotte', first_book=cls.book1)
cls.author2 = Author.objects.create(name='Anne', first_book=cls.book1)
cls.author3 = Author.objects.create(name='Emily', first_book=cls.book1)
cls.author4 = Author.objects.create(name='Jane', first_book=cls.book4)
cls.book1.authors.add(cls.author1, cls.author2, cls.author3)
cls.book2.authors.add(cls.author1)
cls.book3.authors.add(cls.author3)
cls.book4.authors.add(cls.author4)
cls.reader1 = Reader.objects.create(name='Amy')
cls.reader2 = Reader.objects.create(name='Belinda')
cls.reader1.books_read.add(cls.book1, cls.book4)
cls.reader2.books_read.add(cls.book2, cls.book4)
def test_unknown(self):
book1 = Book.objects.get(id=self.book1.id)
with self.assertRaises(AttributeError):
prefetch_related_objects([book1], 'unknown_attribute')
def test_m2m_forward(self):
book1 = Book.objects.get(id=self.book1.id)
with self.assertNumQueries(1):
prefetch_related_objects([book1], 'authors')
with self.assertNumQueries(0):
self.assertEqual(set(book1.authors.all()), {self.author1, self.author2, self.author3})
def test_m2m_reverse(self):
author1 = Author.objects.get(id=self.author1.id)
with self.assertNumQueries(1):
prefetch_related_objects([author1], 'books')
with self.assertNumQueries(0):
self.assertEqual(set(author1.books.all()), {self.book1, self.book2})
def test_foreignkey_forward(self):
authors = list(Author.objects.all())
with self.assertNumQueries(1):
prefetch_related_objects(authors, 'first_book')
with self.assertNumQueries(0):
[author.first_book for author in authors]
def test_foreignkey_reverse(self):
books = list(Book.objects.all())
with self.assertNumQueries(1):
prefetch_related_objects(books, 'first_time_authors')
with self.assertNumQueries(0):
[list(book.first_time_authors.all()) for book in books]
def test_m2m_then_m2m(self):
"""
We can follow a m2m and another m2m.
"""
authors = list(Author.objects.all())
with self.assertNumQueries(2):
prefetch_related_objects(authors, 'books__read_by')
with self.assertNumQueries(0):
self.assertEqual(
[
[[str(r) for r in b.read_by.all()] for b in a.books.all()]
for a in authors
],
[
[['Amy'], ['Belinda']], # Charlotte - Poems, Jane Eyre
[['Amy']], # Anne - Poems
[['Amy'], []], # Emily - Poems, Wuthering Heights
[['Amy', 'Belinda']], # Jane - Sense and Sense
]
)
<|fim▁hole|> book1 = Book.objects.get(id=self.book1.id)
with self.assertNumQueries(1):
prefetch_related_objects([book1], Prefetch('authors'))
with self.assertNumQueries(0):
self.assertEqual(set(book1.authors.all()), {self.author1, self.author2, self.author3})
def test_prefetch_object_to_attr(self):
book1 = Book.objects.get(id=self.book1.id)
with self.assertNumQueries(1):
prefetch_related_objects([book1], Prefetch('authors', to_attr='the_authors'))
with self.assertNumQueries(0):
self.assertEqual(set(book1.the_authors), {self.author1, self.author2, self.author3})
def test_prefetch_queryset(self):
book1 = Book.objects.get(id=self.book1.id)
with self.assertNumQueries(1):
prefetch_related_objects(
[book1],
Prefetch('authors', queryset=Author.objects.filter(id__in=[self.author1.id, self.author2.id]))
)
with self.assertNumQueries(0):
self.assertEqual(set(book1.authors.all()), {self.author1, self.author2})<|fim▁end|> |
def test_prefetch_object(self):
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>extern crate build;
fn main() {
build::link("dnsperf", true)
}<|fim▁end|> | // Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md> |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
<|fim▁hole|>
class Post(SmartModel):
title = models.CharField(max_length=128,
help_text="The title of this blog post, keep it relevant")
body = models.TextField(help_text="The body of the post, go crazy")
order = models.IntegerField(help_text="The order for this post, posts with smaller orders come first")
tags = models.CharField(max_length=128,
help_text="Any tags for this post")
objects = models.Manager()
active = ActiveManager()
@classmethod
def pre_create_instance(cls, field_dict):
field_dict['body'] = "Body: %s" % field_dict['body']
return field_dict
@classmethod
def prepare_fields(cls, field_dict, import_params=None, user=None):
field_dict['order'] = int(float(field_dict['order']))
return field_dict
@classmethod
def validate_import_header(cls, header):
if 'title' not in header:
raise Exception('missing "title" header')
def __unicode__(self):
return self.title
class Category(SmartModel):
name = models.SlugField(max_length=64, unique=True,
help_text="The name of this category")<|fim▁end|> | from django.db import models
from smartmin.models import SmartModel, ActiveManager |
<|file_name|>nonlinear_elliptic_problem.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015-2022 by the RBniCS authors
#
# This file is part of RBniCS.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
from rbnics.problems.base import NonlinearProblem
from rbnics.problems.elliptic import EllipticProblem
from rbnics.backends import product, sum, transpose
NonlinearEllipticProblem_Base = NonlinearProblem(EllipticProblem)
class NonlinearEllipticProblem(NonlinearEllipticProblem_Base):
<|fim▁hole|> # Call to parent
NonlinearEllipticProblem_Base.__init__(self, V, **kwargs)
# Form names for nonlinear problems
self.terms = ["a", "c", "dc", "f", "s"]
self.terms_order = {"a": 2, "c": 1, "dc": 2, "f": 1, "s": 1}
class ProblemSolver(NonlinearEllipticProblem_Base.ProblemSolver):
def residual_eval(self, solution):
problem = self.problem
assembled_operator = dict()
assembled_operator["a"] = sum(product(problem.compute_theta("a"), problem.operator["a"]))
assembled_operator["c"] = sum(product(problem.compute_theta("c"), problem.operator["c"]))
assembled_operator["f"] = sum(product(problem.compute_theta("f"), problem.operator["f"]))
return assembled_operator["a"] * solution + assembled_operator["c"] - assembled_operator["f"]
def jacobian_eval(self, solution):
problem = self.problem
assembled_operator = dict()
assembled_operator["a"] = sum(product(problem.compute_theta("a"), problem.operator["a"]))
assembled_operator["dc"] = sum(product(problem.compute_theta("dc"), problem.operator["dc"]))
return assembled_operator["a"] + assembled_operator["dc"]
# Perform a truth evaluation of the output
def _compute_output(self):
self._output = transpose(self._solution) * sum(product(self.compute_theta("s"), self.operator["s"]))<|fim▁end|> | # Default initialization of members
def __init__(self, V, **kwargs): |
<|file_name|>UserRole.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2013 Alfresco Software Limited.
* This file is part of Alfresco
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.po.share.enums;
import java.util.NoSuchElementException;
/**
* This enums used to describe the user roles.
*
* @author cbairaajoni
* @since v1.0
*/
public enum UserRole
{
ALL("All"),
MANAGER("Manager"),
EDITOR("Editor"),
CONSUMER("Consumer"),
COLLABORATOR("Collaborator"),
COORDINATOR("Coordinator"),
CONTRIBUTOR("Contributor"),
SITECONSUMER("Site Consumer"),
SITECONTRIBUTOR("Site Contributor"),
SITEMANAGER("Site Manager"),
SITECOLLABORATOR("Site Collaborator");
<|fim▁hole|> {
roleName = role;
}
public String getRoleName()
{
return roleName;
}
public static UserRole getUserRoleforName(String name)
{
for (UserRole role : UserRole.values())
{
if (role.getRoleName().equalsIgnoreCase(name))
{
return role;
}
}
throw new NoSuchElementException("No Role for value - " + name);
}
}<|fim▁end|> |
private String roleName;
private UserRole(String role)
|
<|file_name|>regression_slope_validator.ts<|end_file_name|><|fim▁begin|>import {ListWrapper} from '@angular/facade';
import {OpaqueToken} from '@angular/core/src/di';
import {Validator} from '../validator';
import {Statistic} from '../statistic';
import {MeasureValues} from '../measure_values';
/**
* A validator that checks the regression slope of a specific metric.
* Waits for the regression slope to be >=0.
*/
export class RegressionSlopeValidator extends Validator {
// TODO(tbosch): use static values when our transpiler supports them
static get SAMPLE_SIZE(): OpaqueToken { return _SAMPLE_SIZE; }
// TODO(tbosch): use static values when our transpiler supports them
static get METRIC(): OpaqueToken { return _METRIC; }
// TODO(tbosch): use static values when our transpiler supports them
static get PROVIDERS(): any[] { return _PROVIDERS; }
_sampleSize: number;
_metric: string;
constructor(sampleSize, metric) {
super();
this._sampleSize = sampleSize;
this._metric = metric;
}
describe(): {[key: string]: any} {
return {'sampleSize': this._sampleSize, 'regressionSlopeMetric': this._metric};
}
validate(completeSample: MeasureValues[]): MeasureValues[] {
if (completeSample.length >= this._sampleSize) {
var latestSample = ListWrapper.slice(completeSample, completeSample.length - this._sampleSize,
completeSample.length);
var xValues = [];
var yValues = [];
for (var i = 0; i < latestSample.length; i++) {
// For now, we only use the array index as x value.
// TODO(tbosch): think about whether we should use time here instead
xValues.push(i);
yValues.push(latestSample[i].values[this._metric]);
}
var regressionSlope = Statistic.calculateRegressionSlope(
xValues, Statistic.calculateMean(xValues), yValues, Statistic.calculateMean(yValues));
return regressionSlope >= 0 ? latestSample : null;
} else {
return null;
}
}
}
var _SAMPLE_SIZE = new OpaqueToken('RegressionSlopeValidator.sampleSize');
var _METRIC = new OpaqueToken('RegressionSlopeValidator.metric');
var _PROVIDERS = [
{
provide: RegressionSlopeValidator
useFactory: (sampleSize, metric) => new RegressionSlopeValidator(sampleSize, metric),
deps: [_SAMPLE_SIZE, _METRIC]
},
{provide: _SAMPLE_SIZE, useValue: 10},
{provide: _METRIC, useValue: 'scriptTime'}<|fim▁hole|>];<|fim▁end|> | |
<|file_name|>use-get-thumbnail.js<|end_file_name|><|fim▁begin|>// @flow
import React from 'react';
import { generateStreamUrl } from 'util/web';
export default function useGetThumbnail(
uri: string,
claim: ?Claim,
streamingUrl: ?string,
getFile: string => void,
shouldHide: boolean
) {
let thumbnailToUse;
// $FlowFixMe
const isImage = claim && claim.value && claim.value.stream_type === 'image';
// $FlowFixMe
const isFree = claim && claim.value && (!claim.value.fee || Number(claim.value.fee.amount) <= 0);
const thumbnailInClaim = claim && claim.value && claim.value.thumbnail && claim.value.thumbnail.url;
// @if TARGET='web'
if (thumbnailInClaim) {
thumbnailToUse = thumbnailInClaim;
} else if (claim && isImage && isFree) {
thumbnailToUse = generateStreamUrl(claim.name, claim.claim_id);
}
// @endif
// @if TARGET='app'
thumbnailToUse = thumbnailInClaim;
//
// Temporarily disabled until we can call get with "save_blobs: off"
//
// React.useEffect(() => {<|fim▁hole|> // } else if (!shouldHide) {
// getFile(uri);
// }
// }
// }, [hasClaim, isFree, isImage, streamingUrl, uri, shouldHide]);
// @endif
const [thumbnail, setThumbnail] = React.useState(thumbnailToUse);
React.useEffect(() => {
setThumbnail(thumbnailToUse);
}, [thumbnailToUse]);
return thumbnail;
}<|fim▁end|> | // if (hasClaim && isImage && isFree) {
// if (streamingUrl) {
// setThumbnail(streamingUrl); |
<|file_name|>ChunkCache.java<|end_file_name|><|fim▁begin|>/*
* This file is part of Jiffy, licensed under the MIT License (MIT).
*
* Copyright (c) OreCruncher
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.blockartistry.world;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.init.Blocks;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.EnumSkyBlock;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraft.world.biome.BiomeGenBase;
import net.minecraft.world.chunk.Chunk;
import net.minecraftforge.common.util.ForgeDirection;
/**
* Used by the client renderer as well as path finding routines. Changes:
*
* + Chunk array vs. matrix
*
* + Removed unnecessary checks
*/
public class ChunkCache implements IBlockAccess {
private final int chunkX;
private final int chunkZ;
private final int dimX;
private final int dimZ;
private final Chunk[] chunkArray;
private final boolean isEmpty;
private final World worldObj;
public ChunkCache(World world, int x1, int y1, int z1, int x2, int y2, int z2, int buffer) {
this.worldObj = world;
this.chunkX = x1 - buffer >> 4;
this.chunkZ = z1 - buffer >> 4;
int l1 = x2 + buffer >> 4;
int i2 = z2 + buffer >> 4;
this.dimX = l1 - this.chunkX + 1;
this.dimZ = i2 - this.chunkZ + 1;
this.chunkArray = new Chunk[this.dimX * this.dimZ];
boolean emptyFlag = true;
for (int j2 = this.chunkX; j2 <= l1; ++j2) {
for (int k2 = this.chunkZ; k2 <= i2; ++k2) {
final int idx = j2 - this.chunkX + (k2 - this.chunkZ) * this.dimX;
final Chunk chunk = this.chunkArray[idx] = world.getChunkFromChunkCoords(j2, k2);
assert chunk != null;
if (emptyFlag && !chunk.getAreLevelsEmpty(y1, y2))
emptyFlag = false;
}
}
this.isEmpty = emptyFlag;
}
/**
* set by !chunk.getAreLevelsEmpty
*/
@SideOnly(Side.CLIENT)
public boolean extendedLevelsInChunkCache() {
return this.isEmpty;
}
public Block getBlock(final int x, final int y, final int z) {
// Seen out of range Ys come in. Haven't seen out of range
// X or Z. Relaxing range checks as not needed.
if(y < 0 || y > 255)
return Blocks.air;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].getBlock(x & 15, y, z & 15);
}
public TileEntity getTileEntity(final int x, final int y, final int z) {
// Seen out of range Ys come in. Haven't seen out of range
// X or Z. Relaxing range checks as not needed.
if(y < 0 || y > 255)
return null;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].func_150806_e(x & 15, y, z & 15);
}
public int getBlockMetadata(final int x, final int y, final int z) {
// Seen out of range Ys come in. Haven't seen out of range
// X or Z. Relaxing range checks as not needed.
if(y < 0 || y > 255)
return 0;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].getBlockMetadata(x & 15, y, z & 15);
}
public boolean isAirBlock(final int x, final int y, final int z) {
return getBlock(x, y, z).getMaterial() == Material.air;
}
public int isBlockProvidingPowerTo(final int x, final int y, final int z, final int dir) {
return getBlock(x, y, z).isProvidingStrongPower(this, x, y, z, dir);
}
/**
* Any Light rendered on a 1.8 Block goes through here
*/
@SideOnly(Side.CLIENT)
public int getLightBrightnessForSkyBlocks(final int x, final int y, final int z, int p_72802_4_) {
int i1 = this.getSkyBlockTypeBrightness(EnumSkyBlock.Sky, x, y, z);
int j1 = this.getSkyBlockTypeBrightness(EnumSkyBlock.Block, x, y, z);
if (j1 < p_72802_4_) {
j1 = p_72802_4_;
}
return i1 << 20 | j1 << 4;
}
/**
* Gets the biome for a given set of x/z coordinates
*/
@SideOnly(Side.CLIENT)
public BiomeGenBase getBiomeGenForCoords(final int x, final int z) {
return this.worldObj.getBiomeGenForCoords(x, z);
}
/**
* Brightness for SkyBlock.Sky is clear white and (through color computing
* it is assumed) DEPENDENT ON DAYTIME. Brightness for SkyBlock.Block is
* yellowish and independent.
*/
@SideOnly(Side.CLIENT)
public int getSkyBlockTypeBrightness(final EnumSkyBlock skyBlock, final int x, int y, final int z) {
if (x >= -30000000 && z >= -30000000 && x < 30000000 && z <= 30000000) {
if (skyBlock == EnumSkyBlock.Sky && this.worldObj.provider.hasNoSky)
return 0;
if (y < 0)
y = 0;
else if (y > 255)
y = 255;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
final Chunk chunk = this.chunkArray[arrayX + arrayZ * this.dimX];
if (chunk.getBlock(x & 15, y, z & 15).getUseNeighborBrightness()) {
int l = this.getSpecialBlockBrightness(skyBlock, x, y + 1, z);
int i1 = this.getSpecialBlockBrightness(skyBlock, x + 1, y, z);
int j1 = this.getSpecialBlockBrightness(skyBlock, x - 1, y, z);
int k1 = this.getSpecialBlockBrightness(skyBlock, x, y, z + 1);
int l1 = this.getSpecialBlockBrightness(skyBlock, x, y, z - 1);
if (i1 > l) {
l = i1;
}
if (j1 > l) {
l = j1;
}
<|fim▁hole|>
if (l1 > l) {
l = l1;
}
return l;
} else {
return chunk.getSavedLightValue(skyBlock, x & 15, y, z & 15);
}
} else {
return skyBlock.defaultLightValue;
}
}
/**
* is only used on stairs and tilled fields
*/
@SideOnly(Side.CLIENT)
public int getSpecialBlockBrightness(final EnumSkyBlock skyBlock, final int x, int y, final int z) {
if (x >= -30000000 && z >= -30000000 && x < 30000000 && z <= 30000000) {
if (y < 0)
y = 0;
else if (y > 255)
y = 255;
final int arrayX = (x >> 4) - this.chunkX;
final int arrayZ = (z >> 4) - this.chunkZ;
assert (arrayX >= 0 && arrayX < this.dimX && arrayZ >= 0 && arrayZ < this.dimZ);
// if (l >= 0 && l < this.dimX && i1 >= 0 && i1 < this.dimZ)
return this.chunkArray[arrayX + arrayZ * this.dimX].getSavedLightValue(skyBlock, x & 15, y, z & 15);
} else {
return skyBlock.defaultLightValue;
}
}
/**
* Returns current world height.
*/
@SideOnly(Side.CLIENT)
public int getHeight() {
return 256;
}
@Override
public boolean isSideSolid(final int x, final int y, final int z, final ForgeDirection side,
final boolean _default) {
if (x < -30000000 || z < -30000000 || x >= 30000000 || z >= 30000000) {
return _default;
}
return getBlock(x, y, z).isSideSolid(this, x, y, z, side);
}
}<|fim▁end|> | if (k1 > l) {
l = k1;
} |
<|file_name|>KafkaRequester.java<|end_file_name|><|fim▁begin|>/*
*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
*
* contributor license agreements. See the NOTICE file distributed with
*
* this work for additional information regarding copyright ownership.
*
* The ASF licenses this file to You under the Apache License, Version 2.0
*
* (the "License"); you may not use this file except in compliance with
*
* the License. You may obtain a copy of the License at
*
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
*
* Unless required by applicable law or agreed to in writing, software
*
* distributed under the License is distributed on an "AS IS" BASIS,
*
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
*
* limitations under the License.
*
* /
*/
package org.apache.kylin.source.kafka.util;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import javax.annotation.Nullable;
import kafka.api.FetchRequestBuilder;
import kafka.api.PartitionOffsetRequestInfo;
import kafka.cluster.Broker;
import kafka.common.TopicAndPartition;
import kafka.javaapi.FetchResponse;
import kafka.javaapi.OffsetRequest;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.TopicMetadataResponse;
import kafka.javaapi.consumer.SimpleConsumer;
import org.apache.kylin.source.kafka.TopicMeta;
import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
*/
public final class KafkaRequester {
private static final Logger logger = LoggerFactory.getLogger(KafkaRequester.class);
private static ConcurrentMap<String, SimpleConsumer> consumerCache = Maps.newConcurrentMap();
static {
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
KafkaRequester.shutdown();
}
}));
}
private static SimpleConsumer getSimpleConsumer(Broker broker, int timeout, int bufferSize, String clientId) {
String key = createKey(broker, timeout, bufferSize, clientId);
if (consumerCache.containsKey(key)) {
return consumerCache.get(key);
} else {
consumerCache.putIfAbsent(key, new SimpleConsumer(broker.host(), broker.port(), timeout, bufferSize, clientId));
return consumerCache.get(key);
}
}
private static String createKey(Broker broker, int timeout, int bufferSize, String clientId) {
return broker.getConnectionString() + "_" + timeout + "_" + bufferSize + "_" + clientId;
}
public static TopicMeta getKafkaTopicMeta(KafkaClusterConfig kafkaClusterConfig) {
SimpleConsumer consumer;
for (Broker broker : kafkaClusterConfig.getBrokers()) {
consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), "topic_meta_lookup");
List<String> topics = Collections.singletonList(kafkaClusterConfig.getTopic());
TopicMetadataRequest req = new TopicMetadataRequest(topics);
TopicMetadataResponse resp = consumer.send(req);
final List<TopicMetadata> topicMetadatas = resp.topicsMetadata();
if (topicMetadatas.size() != 1) {
break;
}
final TopicMetadata topicMetadata = topicMetadatas.get(0);
if (topicMetadata.errorCode() != 0) {
break;
}
List<Integer> partitionIds = Lists.transform(topicMetadata.partitionsMetadata(), new Function<PartitionMetadata, Integer>() {
@Nullable
@Override
public Integer apply(PartitionMetadata partitionMetadata) {
return partitionMetadata.partitionId();
}
});
return new TopicMeta(kafkaClusterConfig.getTopic(), partitionIds);
}
logger.debug("cannot find topic:" + kafkaClusterConfig.getTopic());
return null;
}
public static PartitionMetadata getPartitionMetadata(String topic, int partitionId, List<Broker> brokers, KafkaClusterConfig kafkaClusterConfig) {
SimpleConsumer consumer;
for (Broker broker : brokers) {
consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), "topic_meta_lookup");
List<String> topics = Collections.singletonList(topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
TopicMetadataResponse resp = consumer.send(req);
final List<TopicMetadata> topicMetadatas = resp.topicsMetadata();
if (topicMetadatas.size() != 1) {
logger.warn("invalid topicMetadata size:" + topicMetadatas.size());
break;
}
final TopicMetadata topicMetadata = topicMetadatas.get(0);
if (topicMetadata.errorCode() != 0) {
logger.warn("fetching topicMetadata with errorCode:" + topicMetadata.errorCode());
break;
}
for (PartitionMetadata partitionMetadata : topicMetadata.partitionsMetadata()) {
if (partitionMetadata.partitionId() == partitionId) {
return partitionMetadata;
}
}
}<|fim▁hole|>
public static FetchResponse fetchResponse(String topic, int partitionId, long offset, Broker broker, KafkaClusterConfig kafkaClusterConfig) {
final String clientName = "client_" + topic + "_" + partitionId;
SimpleConsumer consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), clientName);
kafka.api.FetchRequest req = new FetchRequestBuilder().clientId(clientName).addFetch(topic, partitionId, offset, 1048576) // Note: this fetchSize of 100000 might need to be increased if large batches are written to Kafka, 1048576 is the default value on shell
.build();
return consumer.fetch(req);
}
public static long getLastOffset(String topic, int partitionId, long whichTime, Broker broker, KafkaClusterConfig kafkaClusterConfig) {
String clientName = "client_" + topic + "_" + partitionId;
SimpleConsumer consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), clientName);
TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partitionId);
Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(whichTime, 1));
OffsetRequest request = new OffsetRequest(requestInfo, kafka.api.OffsetRequest.CurrentVersion(), clientName);
OffsetResponse response = consumer.getOffsetsBefore(request);
if (response.hasError()) {
logger.error("Error fetching data Offset Data the Broker. Reason: " + response.errorCode(topic, partitionId));
return 0;
}
long[] offsets = response.offsets(topic, partitionId);
return offsets[0];
}
public static void shutdown() {
for (SimpleConsumer simpleConsumer : consumerCache.values()) {
simpleConsumer.close();
}
}
}<|fim▁end|> | logger.debug("cannot find PartitionMetadata, topic:" + topic + " partitionId:" + partitionId);
return null;
} |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for ahaha project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also<|fim▁hole|>might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ahaha.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)<|fim▁end|> | |
<|file_name|>get.js<|end_file_name|><|fim▁begin|>var editMode = portal.request.mode == 'edit';
var content = portal.content;
var component = portal.component;
var layoutRegions = portal.layoutRegions;
var body = system.thymeleaf.render('view/layout-70-30.html', {
title: content.displayName,
path: content.path,
name: content.name,
editable: editMode,
resourcesPath: portal.url.createResourceUrl(''),
component: component,
leftRegion: layoutRegions.getRegion("left"),<|fim▁hole|>
portal.response.body = body;
portal.response.contentType = 'text/html';
portal.response.status = 200;<|fim▁end|> | rightRegion: layoutRegions.getRegion("right")
}); |
<|file_name|>log.js<|end_file_name|><|fim▁begin|>/**
* Logger configuration
*
* Configure the log level for your app, as well as the transport
* (Underneath the covers, Sails uses Winston for logging, which
* allows for some pretty neat custom transports/adapters for log messages)
*
* For more information on the Sails logger, check out:
* http://sailsjs.org/#documentation
*/
module.exports = {
// Valid `level` configs:
// i.e. the minimum log level to capture with sails.log.*()
//
// 'error' : Display calls to `.error()`
// 'warn' : Display calls from `.error()` to `.warn()`
// 'debug' : Display calls from `.error()`, `.warn()` to `.debug()`
// 'info' : Display calls from `.error()`, `.warn()`, `.debug()` to `.info()`
// 'verbose': Display calls from `.error()`, `.warn()`, `.debug()`, `.info()` to `.verbose()`<|fim▁hole|> level: 'info'
}
};<|fim▁end|> | //
log: { |
<|file_name|>control_area.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" A <b>control area </b>is a grouping of <b>generating units</b> and/or loads and a cutset of tie lines (as <b>terminals</b>) which may be used for a variety of purposes including automatic generation control, powerflow solution area interchange control specification, and input to load forecasting. Note that any number of overlapping control area specifications can be superimposed on the physical model.
"""
# <<< imports
# @generated
from ucte.core.identified_object import IdentifiedObject
from ucte.domain import ActivePower
from google.appengine.ext import db
# >>> imports
class ControlArea(IdentifiedObject):
""" A <b>control area </b>is a grouping of <b>generating units</b> and/or loads and a cutset of tie lines (as <b>terminals</b>) which may be used for a variety of purposes including automatic generation control, powerflow solution area interchange control specification, and input to load forecasting. Note that any number of overlapping control area specifications can be superimposed on the physical model.
"""
# <<< control_area.attributes
# @generated
# Active power net interchange tolerance
p_tolerance = ActivePower
# The specified positive net interchange into the control area.
net_interchange = ActivePower
<|fim▁hole|> # @generated
# Virtual property. The topological nodes included in the control area.
pass # topological_node
# Virtual property. The tie flows associated with the control area.
pass # tie_flow
# Virtual property. The generating unit specificaitons for the control area.
pass # control_area_generating_unit
# >>> control_area.references
# <<< control_area.operations
# @generated
# >>> control_area.operations
# EOF -------------------------------------------------------------------------<|fim▁end|> | # >>> control_area.attributes
# <<< control_area.references |
<|file_name|>connection.rs<|end_file_name|><|fim▁begin|>use std::io::{BufRead, BufReader, Write};
use std::net::{self, TcpStream};
use std::path::PathBuf;
use std::str::from_utf8;
use std::time::Duration;
use url;
use cmd::{cmd, pipe, Pipeline};
use parser::Parser;
use types::{
from_redis_value, ErrorKind, FromRedisValue, RedisError, RedisResult, ToRedisArgs, Value,
};
#[cfg(unix)]
use std::os::unix::net::UnixStream;
static DEFAULT_PORT: u16 = 6379;
/// This function takes a redis URL string and parses it into a URL
/// as used by rust-url. This is necessary as the default parser does
/// not understand how redis URLs function.
pub fn parse_redis_url(input: &str) -> Result<url::Url, ()> {
match url::Url::parse(input) {
Ok(result) => match result.scheme() {
"redis" | "redis+unix" | "unix" => Ok(result),
_ => Err(()),
},
Err(_) => Err(()),
}
}
/// Defines the connection address.
///
/// Not all connection addresses are supported on all platforms. For instance
/// to connect to a unix socket you need to run this on an operating system
/// that supports them.
#[derive(Clone, Debug, PartialEq)]
pub enum ConnectionAddr {
/// Format for this is `(host, port)`.
Tcp(String, u16),
/// Format for this is the path to the unix socket.
Unix(PathBuf),
}
impl ConnectionAddr {
// Because not all platforms uspport all connection addresses this is a
// quick way to figure out if a connection method is supported. Currently
// this only affects unix connections which are only supported on unix
// platforms and on older versions of rust also require an explicit feature
// to be enabled.
pub fn is_supported(&self) -> bool {
match *self {
ConnectionAddr::Tcp(_, _) => true,
ConnectionAddr::Unix(_) => cfg!(unix),
}
}
}
/// Holds the connection information that redis should use for connecting.
#[derive(Clone, Debug)]
pub struct ConnectionInfo {
/// A boxed connection address for where to connect to.
pub addr: Box<ConnectionAddr>,
/// The database number to use. This is usually `0`.
pub db: i64,
/// Optionally a password that should be used for connection.
pub passwd: Option<String>,
}
/// Converts an object into a connection info struct. This allows the
/// constructor of the client to accept connection information in a
/// range of different formats.
pub trait IntoConnectionInfo {
fn into_connection_info(self) -> RedisResult<ConnectionInfo>;
}
impl IntoConnectionInfo for ConnectionInfo {
fn into_connection_info(self) -> RedisResult<ConnectionInfo> {
Ok(self)
}
}
impl<'a> IntoConnectionInfo for &'a str {
fn into_connection_info(self) -> RedisResult<ConnectionInfo> {
match parse_redis_url(self) {
Ok(u) => u.into_connection_info(),
Err(_) => fail!((ErrorKind::InvalidClientConfig, "Redis URL did not parse")),
}
}
}
fn url_to_tcp_connection_info(url: url::Url) -> RedisResult<ConnectionInfo> {
Ok(ConnectionInfo {
addr: Box::new(ConnectionAddr::Tcp(
match url.host() {
Some(host) => host.to_string(),
None => fail!((ErrorKind::InvalidClientConfig, "Missing hostname")),
},
url.port().unwrap_or(DEFAULT_PORT),
)),
db: match url.path().trim_matches('/') {
"" => 0,
path => unwrap_or!(
path.parse::<i64>().ok(),
fail!((ErrorKind::InvalidClientConfig, "Invalid database number"))
),
},
passwd: match url.password() {
Some(pw) => match url::percent_encoding::percent_decode(pw.as_bytes()).decode_utf8() {
Ok(decoded) => Some(decoded.into_owned()),
Err(_) => fail!((
ErrorKind::InvalidClientConfig,
"Password is not valid UTF-8 string"
)),
},
None => None,
},
})
}
#[cfg(unix)]
fn url_to_unix_connection_info(url: url::Url) -> RedisResult<ConnectionInfo> {
Ok(ConnectionInfo {<|fim▁hole|> addr: Box::new(ConnectionAddr::Unix(unwrap_or!(
url.to_file_path().ok(),
fail!((ErrorKind::InvalidClientConfig, "Missing path"))
))),
db: match url
.query_pairs()
.into_iter()
.filter(|&(ref key, _)| key == "db")
.next()
{
Some((_, db)) => unwrap_or!(
db.parse::<i64>().ok(),
fail!((ErrorKind::InvalidClientConfig, "Invalid database number"))
),
None => 0,
},
passwd: url.password().and_then(|pw| Some(pw.to_string())),
})
}
#[cfg(not(unix))]
fn url_to_unix_connection_info(_: url::Url) -> RedisResult<ConnectionInfo> {
fail!((
ErrorKind::InvalidClientConfig,
"Unix sockets are not available on this platform."
));
}
impl IntoConnectionInfo for url::Url {
fn into_connection_info(self) -> RedisResult<ConnectionInfo> {
if self.scheme() == "redis" {
url_to_tcp_connection_info(self)
} else if self.scheme() == "unix" || self.scheme() == "redis+unix" {
url_to_unix_connection_info(self)
} else {
fail!((
ErrorKind::InvalidClientConfig,
"URL provided is not a redis URL"
));
}
}
}
struct TcpConnection {
reader: BufReader<TcpStream>,
open: bool,
}
#[cfg(unix)]
struct UnixConnection {
sock: BufReader<UnixStream>,
open: bool,
}
enum ActualConnection {
Tcp(TcpConnection),
#[cfg(unix)]
Unix(UnixConnection),
}
/// Represents a stateful redis TCP connection.
pub struct Connection {
con: ActualConnection,
db: i64,
/// Flag indicating whether the connection was left in the PubSub state after dropping `PubSub`.
///
/// This flag is checked when attempting to send a command, and if it's raised, we attempt to
/// exit the pubsub state before executing the new request.
pubsub: bool,
}
/// Represents a pubsub connection.
pub struct PubSub<'a> {
con: &'a mut Connection,
}
/// Represents a pubsub message.
pub struct Msg {
payload: Value,
channel: Value,
pattern: Option<Value>,
}
impl ActualConnection {
pub fn new(addr: &ConnectionAddr) -> RedisResult<ActualConnection> {
Ok(match *addr {
ConnectionAddr::Tcp(ref host, ref port) => {
let host: &str = &*host;
let tcp = TcpStream::connect((host, *port))?;
let buffered = BufReader::new(tcp);
ActualConnection::Tcp(TcpConnection {
reader: buffered,
open: true,
})
}
#[cfg(unix)]
ConnectionAddr::Unix(ref path) => ActualConnection::Unix(UnixConnection {
sock: BufReader::new(UnixStream::connect(path)?),
open: true,
}),
#[cfg(not(unix))]
ConnectionAddr::Unix(ref path) => {
fail!((
ErrorKind::InvalidClientConfig,
"Cannot connect to unix sockets \
on this platform"
));
}
})
}
pub fn send_bytes(&mut self, bytes: &[u8]) -> RedisResult<Value> {
match *self {
ActualConnection::Tcp(ref mut connection) => {
let res = connection
.reader
.get_mut()
.write_all(bytes)
.map_err(|e| RedisError::from(e));
match res {
Err(e) => {
if e.is_connection_dropped() {
connection.open = false;
}
Err(e)
}
Ok(_) => Ok(Value::Okay),
}
}
#[cfg(unix)]
ActualConnection::Unix(ref mut connection) => {
let result = connection
.sock
.get_mut()
.write_all(bytes)
.map_err(|e| RedisError::from(e));
match result {
Err(e) => {
if e.is_connection_dropped() {
connection.open = false;
}
Err(e)
}
Ok(_) => Ok(Value::Okay),
}
}
}
}
pub fn read_response(&mut self) -> RedisResult<Value> {
let result = Parser::new(match *self {
ActualConnection::Tcp(TcpConnection { ref mut reader, .. }) => reader as &mut BufRead,
#[cfg(unix)]
ActualConnection::Unix(UnixConnection { ref mut sock, .. }) => sock as &mut BufRead,
})
.parse_value();
// shutdown connection on protocol error
match result {
Err(ref e) if e.kind() == ErrorKind::ResponseError => match *self {
ActualConnection::Tcp(ref mut connection) => {
let _ = connection.reader.get_mut().shutdown(net::Shutdown::Both);
connection.open = false;
}
#[cfg(unix)]
ActualConnection::Unix(ref mut connection) => {
let _ = connection.sock.get_mut().shutdown(net::Shutdown::Both);
connection.open = false;
}
},
_ => (),
}
result
}
pub fn set_write_timeout(&self, dur: Option<Duration>) -> RedisResult<()> {
match *self {
ActualConnection::Tcp(TcpConnection { ref reader, .. }) => {
reader.get_ref().set_write_timeout(dur)?;
}
#[cfg(unix)]
ActualConnection::Unix(UnixConnection { ref sock, .. }) => {
sock.get_ref().set_write_timeout(dur)?;
}
}
Ok(())
}
pub fn set_read_timeout(&self, dur: Option<Duration>) -> RedisResult<()> {
match *self {
ActualConnection::Tcp(TcpConnection { ref reader, .. }) => {
reader.get_ref().set_read_timeout(dur)?;
}
#[cfg(unix)]
ActualConnection::Unix(UnixConnection { ref sock, .. }) => {
sock.get_ref().set_read_timeout(dur)?;
}
}
Ok(())
}
pub fn is_open(&self) -> bool {
match *self {
ActualConnection::Tcp(TcpConnection { open, .. }) => open,
#[cfg(unix)]
ActualConnection::Unix(UnixConnection { open, .. }) => open,
}
}
}
pub fn connect(connection_info: &ConnectionInfo) -> RedisResult<Connection> {
let con = ActualConnection::new(&connection_info.addr)?;
let mut rv = Connection {
con: con,
db: connection_info.db,
pubsub: false,
};
match connection_info.passwd {
Some(ref passwd) => match cmd("AUTH").arg(&**passwd).query::<Value>(&mut rv) {
Ok(Value::Okay) => {}
_ => {
fail!((
ErrorKind::AuthenticationFailed,
"Password authentication failed"
));
}
},
None => {}
}
if connection_info.db != 0 {
match cmd("SELECT")
.arg(connection_info.db)
.query::<Value>(&mut rv)
{
Ok(Value::Okay) => {}
_ => fail!((
ErrorKind::ResponseError,
"Redis server refused to switch database"
)),
}
}
Ok(rv)
}
/// Implements the "stateless" part of the connection interface that is used by the
/// different objects in redis-rs. Primarily it obviously applies to `Connection`
/// object but also some other objects implement the interface (for instance
/// whole clients or certain redis results).
///
/// Generally clients and connections (as well as redis results of those) implement
/// this trait. Actual connections provide more functionality which can be used
/// to implement things like `PubSub` but they also can modify the intrinsic
/// state of the TCP connection. This is not possible with `ConnectionLike`
/// implementors because that functionality is not exposed.
pub trait ConnectionLike {
/// Sends an already encoded (packed) command into the TCP socket and
/// reads the single response from it.
fn req_packed_command(&mut self, cmd: &[u8]) -> RedisResult<Value>;
/// Sends multiple already encoded (packed) command into the TCP socket
/// and reads `count` responses from it. This is used to implement
/// pipelining.
fn req_packed_commands(
&mut self,
cmd: &[u8],
offset: usize,
count: usize,
) -> RedisResult<Vec<Value>>;
/// Returns the database this connection is bound to. Note that this
/// information might be unreliable because it's initially cached and
/// also might be incorrect if the connection like object is not
/// actually connected.
fn get_db(&self) -> i64;
}
/// A connection is an object that represents a single redis connection. It
/// provides basic support for sending encoded commands into a redis connection
/// and to read a response from it. It's bound to a single database and can
/// only be created from the client.
///
/// You generally do not much with this object other than passing it to
/// `Cmd` objects.
impl Connection {
/// Sends an already encoded (packed) command into the TCP socket and
/// does not read a response. This is useful for commands like
/// `MONITOR` which yield multiple items. This needs to be used with
/// care because it changes the state of the connection.
pub fn send_packed_command(&mut self, cmd: &[u8]) -> RedisResult<()> {
self.con.send_bytes(cmd)?;
Ok(())
}
/// Fetches a single response from the connection. This is useful
/// if used in combination with `send_packed_command`.
pub fn recv_response(&mut self) -> RedisResult<Value> {
self.con.read_response()
}
/// Sets the write timeout for the connection.
///
/// If the provided value is `None`, then `send_packed_command` call will
/// block indefinitely. It is an error to pass the zero `Duration` to this
/// method.
pub fn set_write_timeout(&self, dur: Option<Duration>) -> RedisResult<()> {
self.con.set_write_timeout(dur)
}
/// Sets the read timeout for the connection.
///
/// If the provided value is `None`, then `recv_response` call will
/// block indefinitely. It is an error to pass the zero `Duration` to this
/// method.
pub fn set_read_timeout(&self, dur: Option<Duration>) -> RedisResult<()> {
self.con.set_read_timeout(dur)
}
pub fn as_pubsub<'a>(&'a mut self) -> PubSub<'a> {
// NOTE: The pubsub flag is intentionally not raised at this time since running commands
// within the pubsub state should not try and exit from the pubsub state.
PubSub::new(self)
}
fn exit_pubsub(&mut self) -> RedisResult<()> {
let res = self.clear_active_subscriptions();
if res.is_ok() {
self.pubsub = false;
} else {
// Raise the pubsub flag to indicate the connection is "stuck" in that state.
self.pubsub = true;
}
res
}
/// Get the inner connection out of a PubSub
///
/// Any active subscriptions are unsubscribed. In the event of an error, the connection is
/// dropped.
fn clear_active_subscriptions(&mut self) -> RedisResult<()> {
// Responses to unsubscribe commands return in a 3-tuple with values
// ("unsubscribe" or "punsubscribe", name of subscription removed, count of remaining subs).
// The "count of remaining subs" includes both pattern subscriptions and non pattern
// subscriptions. Thus, to accurately drain all unsubscribe messages received from the
// server, both commands need to be executed at once.
{
// Prepare both unsubscribe commands
let unsubscribe = cmd("UNSUBSCRIBE").get_packed_command();
let punsubscribe = cmd("PUNSUBSCRIBE").get_packed_command();
// Grab a reference to the underlying connection so that we may send
// the commands without immediately blocking for a response.
let con = &mut self.con;
// Execute commands
con.send_bytes(&unsubscribe)?;
con.send_bytes(&punsubscribe)?;
}
// Receive responses
//
// There will be at minimum two responses - 1 for each of punsubscribe and unsubscribe
// commands. There may be more responses if there are active subscriptions. In this case,
// messages are received until the _subscription count_ in the responses reach zero.
let mut received_unsub = false;
let mut received_punsub = false;
loop {
let res: (Vec<u8>, (), isize) = from_redis_value(&self.recv_response()?)?;
match res.0.first().map(|v| *v) {
Some(b'u') => received_unsub = true,
Some(b'p') => received_punsub = true,
_ => (),
}
if received_unsub && received_punsub && res.2 == 0 {
break;
}
}
// Finally, the connection is back in its normal state since all subscriptions were
// cancelled *and* all unsubscribe messages were received.
Ok(())
}
/// Returns the connection status.
///
/// The connection is open until any `read_response` call recieved an
/// invalid response from the server (most likely a closed or dropped
/// connection, otherwise a Redis protocol error). When using unix
/// sockets the connection is open until writing a command failed with a
/// `BrokenPipe` error.
pub fn is_open(&self) -> bool {
self.con.is_open()
}
}
impl ConnectionLike for Connection {
fn req_packed_command(&mut self, cmd: &[u8]) -> RedisResult<Value> {
if self.pubsub {
self.exit_pubsub()?;
}
let con = &mut self.con;
con.send_bytes(cmd)?;
con.read_response()
}
fn req_packed_commands(
&mut self,
cmd: &[u8],
offset: usize,
count: usize,
) -> RedisResult<Vec<Value>> {
if self.pubsub {
self.exit_pubsub()?;
}
let con = &mut self.con;
con.send_bytes(cmd)?;
let mut rv = vec![];
for idx in 0..(offset + count) {
let item = con.read_response()?;
if idx >= offset {
rv.push(item);
}
}
Ok(rv)
}
fn get_db(&self) -> i64 {
self.db
}
}
/// The pubsub object provides convenient access to the redis pubsub
/// system. Once created you can subscribe and unsubscribe from channels
/// and listen in on messages.
///
/// Example:
///
/// ```rust,no_run
/// # fn do_something() -> redis::RedisResult<()> {
/// let client = redis::Client::open("redis://127.0.0.1/")?;
/// let mut con = client.get_connection()?;
/// let mut pubsub = con.as_pubsub();
/// pubsub.subscribe("channel_1")?;
/// pubsub.subscribe("channel_2")?;
///
/// loop {
/// let msg = pubsub.get_message()?;
/// let payload : String = msg.get_payload()?;
/// println!("channel '{}': {}", msg.get_channel_name(), payload);
/// }
/// # }
/// ```
impl<'a> PubSub<'a> {
fn new(con: &'a mut Connection) -> Self {
Self { con }
}
/// Subscribes to a new channel.
pub fn subscribe<T: ToRedisArgs>(&mut self, channel: T) -> RedisResult<()> {
let _: () = cmd("SUBSCRIBE").arg(channel).query(self.con)?;
Ok(())
}
/// Subscribes to a new channel with a pattern.
pub fn psubscribe<T: ToRedisArgs>(&mut self, pchannel: T) -> RedisResult<()> {
let _: () = cmd("PSUBSCRIBE").arg(pchannel).query(self.con)?;
Ok(())
}
/// Unsubscribes from a channel.
pub fn unsubscribe<T: ToRedisArgs>(&mut self, channel: T) -> RedisResult<()> {
let _: () = cmd("UNSUBSCRIBE").arg(channel).query(self.con)?;
Ok(())
}
/// Unsubscribes from a channel with a pattern.
pub fn punsubscribe<T: ToRedisArgs>(&mut self, pchannel: T) -> RedisResult<()> {
let _: () = cmd("PUNSUBSCRIBE").arg(pchannel).query(self.con)?;
Ok(())
}
/// Fetches the next message from the pubsub connection. Blocks until
/// a message becomes available. This currently does not provide a
/// wait not to block :(
///
/// The message itself is still generic and can be converted into an
/// appropriate type through the helper methods on it.
pub fn get_message(&mut self) -> RedisResult<Msg> {
loop {
let raw_msg: Vec<Value> = from_redis_value(&self.con.recv_response()?)?;
let mut iter = raw_msg.into_iter();
let msg_type: String = from_redis_value(&unwrap_or!(iter.next(), continue))?;
let mut pattern = None;
let payload;
let channel;
if msg_type == "message" {
channel = unwrap_or!(iter.next(), continue);
payload = unwrap_or!(iter.next(), continue);
} else if msg_type == "pmessage" {
pattern = Some(unwrap_or!(iter.next(), continue));
channel = unwrap_or!(iter.next(), continue);
payload = unwrap_or!(iter.next(), continue);
} else {
continue;
}
return Ok(Msg {
payload: payload,
channel: channel,
pattern: pattern,
});
}
}
/// Sets the read timeout for the connection.
///
/// If the provided value is `None`, then `get_message` call will
/// block indefinitely. It is an error to pass the zero `Duration` to this
/// method.
pub fn set_read_timeout(&self, dur: Option<Duration>) -> RedisResult<()> {
self.con.set_read_timeout(dur)
}
}
impl<'a> Drop for PubSub<'a> {
fn drop(&mut self) {
let _ = self.con.exit_pubsub();
}
}
/// This holds the data that comes from listening to a pubsub
/// connection. It only contains actual message data.
impl Msg {
/// Returns the channel this message came on.
pub fn get_channel<T: FromRedisValue>(&self) -> RedisResult<T> {
from_redis_value(&self.channel)
}
/// Convenience method to get a string version of the channel. Unless
/// your channel contains non utf-8 bytes you can always use this
/// method. If the channel is not a valid string (which really should
/// not happen) then the return value is `"?"`.
pub fn get_channel_name(&self) -> &str {
match self.channel {
Value::Data(ref bytes) => from_utf8(bytes).unwrap_or("?"),
_ => "?",
}
}
/// Returns the message's payload in a specific format.
pub fn get_payload<T: FromRedisValue>(&self) -> RedisResult<T> {
from_redis_value(&self.payload)
}
/// Returns the bytes that are the message's payload. This can be used
/// as an alternative to the `get_payload` function if you are interested
/// in the raw bytes in it.
pub fn get_payload_bytes(&self) -> &[u8] {
match self.payload {
Value::Data(ref bytes) => bytes,
_ => b"",
}
}
/// Returns true if the message was constructed from a pattern
/// subscription.
pub fn from_pattern(&self) -> bool {
self.pattern.is_some()
}
/// If the message was constructed from a message pattern this can be
/// used to find out which one. It's recommended to match against
/// an `Option<String>` so that you do not need to use `from_pattern`
/// to figure out if a pattern was set.
pub fn get_pattern<T: FromRedisValue>(&self) -> RedisResult<T> {
match self.pattern {
None => from_redis_value(&Value::Nil),
Some(ref x) => from_redis_value(x),
}
}
}
/// This function simplifies transaction management slightly. What it
/// does is automatically watching keys and then going into a transaction
/// loop util it succeeds. Once it goes through the results are
/// returned.
///
/// To use the transaction two pieces of information are needed: a list
/// of all the keys that need to be watched for modifications and a
/// closure with the code that should be execute in the context of the
/// transaction. The closure is invoked with a fresh pipeline in atomic
/// mode. To use the transaction the function needs to return the result
/// from querying the pipeline with the connection.
///
/// The end result of the transaction is then available as the return
/// value from the function call.
///
/// Example:
///
/// ```rust,no_run
/// use redis::{Commands, PipelineCommands};
/// # fn do_something() -> redis::RedisResult<()> {
/// # let client = redis::Client::open("redis://127.0.0.1/").unwrap();
/// # let mut con = client.get_connection().unwrap();
/// let key = "the_key";
/// let (new_val,) : (isize,) = redis::transaction(&mut con, &[key], |con, pipe| {
/// let old_val : isize = con.get(key)?;
/// pipe
/// .set(key, old_val + 1).ignore()
/// .get(key).query(con)
/// })?;
/// println!("The incremented number is: {}", new_val);
/// # Ok(()) }
/// ```
pub fn transaction<
C: ConnectionLike,
K: ToRedisArgs,
T: FromRedisValue,
F: FnMut(&mut C, &mut Pipeline) -> RedisResult<Option<T>>,
>(
con: &mut C,
keys: &[K],
func: F,
) -> RedisResult<T> {
let mut func = func;
loop {
let _: () = cmd("WATCH").arg(keys).query(con)?;
let mut p = pipe();
let response: Option<T> = func(con, p.atomic())?;
match response {
None => {
continue;
}
Some(response) => {
// make sure no watch is left in the connection, even if
// someone forgot to use the pipeline.
let _: () = cmd("UNWATCH").query(con)?;
return Ok(response);
}
}
}
}<|fim▁end|> | |
<|file_name|>ledger.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Utility functions to make it easier to work with Ledger in Rust
// TODO merge with equivalent module in fuchsia/rust_ledger_example into a library?
use apps_ledger_services_public::*;
use fuchsia::read_entire_vmo;
use fidl::Error;
use magenta::{Vmo, self};
use sha2::{Sha256, Digest};
// Rust emits a warning if matched-on constants aren't all-caps
pub const OK: Status = Status_Ok;
pub const KEY_NOT_FOUND: Status = Status_KeyNotFound;
pub const NEEDS_FETCH: Status = Status_NeedsFetch;
pub const RESULT_COMPLETED: ResultState = ResultState_Completed;
pub fn ledger_crash_callback(res: Result<Status, Error>) {
let status = res.expect("ledger call failed to respond with a status");
assert_eq!(status, Status_Ok, "ledger call failed");
}
#[derive(Debug)]
pub enum ValueError {
NeedsFetch,
LedgerFail(Status),
Vmo(magenta::Status),
}
/// Convert the low level result of getting a key from the ledger into a
/// higher level Rust representation.
pub fn value_result(res: (Status, Option<Vmo>)) -> Result<Option<Vec<u8>>, ValueError> {
match res {
(OK, Some(vmo)) => {
let buffer = read_entire_vmo(&vmo).map_err(ValueError::Vmo)?;
Ok(Some(buffer))
},
(KEY_NOT_FOUND, _) => Ok(None),
(NEEDS_FETCH, _) => Err(ValueError::NeedsFetch),
(status, _) => Err(ValueError::LedgerFail(status)),
}
}
/// Ledger page ids are exactly 16 bytes, so we need a way of determining
/// a unique 16 byte ID that won't collide based on some data we have
pub fn gen_page_id(input_data: &[u8]) -> [u8; 16] {
let mut hasher = Sha256::default();
hasher.input(input_data);
let full_hash = hasher.result();
let full_slice = full_hash.as_slice();
<|fim▁hole|> arr
}<|fim▁end|> | // magic incantation to get the first 16 bytes of the hash
let mut arr: [u8; 16] = Default::default();
arr.as_mut().clone_from_slice(&full_slice[0..16]); |
<|file_name|>CommandObjectStats.cpp<|end_file_name|><|fim▁begin|>//===-- CommandObjectStats.cpp ----------------------------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "CommandObjectStats.h"
#include "lldb/Host/Host.h"<|fim▁hole|>using namespace lldb;
using namespace lldb_private;
class CommandObjectStatsEnable : public CommandObjectParsed {
public:
CommandObjectStatsEnable(CommandInterpreter &interpreter)
: CommandObjectParsed(interpreter, "enable",
"Enable statistics collection", nullptr,
eCommandProcessMustBePaused) {}
~CommandObjectStatsEnable() override = default;
protected:
bool DoExecute(Args &command, CommandReturnObject &result) override {
Target &target = GetSelectedOrDummyTarget();
if (target.GetCollectingStats()) {
result.AppendError("statistics already enabled");
result.SetStatus(eReturnStatusFailed);
return false;
}
target.SetCollectingStats(true);
result.SetStatus(eReturnStatusSuccessFinishResult);
return true;
}
};
class CommandObjectStatsDisable : public CommandObjectParsed {
public:
CommandObjectStatsDisable(CommandInterpreter &interpreter)
: CommandObjectParsed(interpreter, "disable",
"Disable statistics collection", nullptr,
eCommandProcessMustBePaused) {}
~CommandObjectStatsDisable() override = default;
protected:
bool DoExecute(Args &command, CommandReturnObject &result) override {
Target &target = GetSelectedOrDummyTarget();
if (!target.GetCollectingStats()) {
result.AppendError("need to enable statistics before disabling them");
result.SetStatus(eReturnStatusFailed);
return false;
}
target.SetCollectingStats(false);
result.SetStatus(eReturnStatusSuccessFinishResult);
return true;
}
};
class CommandObjectStatsDump : public CommandObjectParsed {
public:
CommandObjectStatsDump(CommandInterpreter &interpreter)
: CommandObjectParsed(interpreter, "dump", "Dump statistics results",
nullptr, eCommandProcessMustBePaused) {}
~CommandObjectStatsDump() override = default;
protected:
bool DoExecute(Args &command, CommandReturnObject &result) override {
Target &target = GetSelectedOrDummyTarget();
uint32_t i = 0;
for (auto &stat : target.GetStatistics()) {
result.AppendMessageWithFormat(
"%s : %u\n",
lldb_private::GetStatDescription(static_cast<lldb_private::StatisticKind>(i))
.c_str(),
stat);
i += 1;
}
result.SetStatus(eReturnStatusSuccessFinishResult);
return true;
}
};
CommandObjectStats::CommandObjectStats(CommandInterpreter &interpreter)
: CommandObjectMultiword(interpreter, "statistics",
"Print statistics about a debugging session",
"statistics <subcommand> [<subcommand-options>]") {
LoadSubCommand("enable",
CommandObjectSP(new CommandObjectStatsEnable(interpreter)));
LoadSubCommand("disable",
CommandObjectSP(new CommandObjectStatsDisable(interpreter)));
LoadSubCommand("dump",
CommandObjectSP(new CommandObjectStatsDump(interpreter)));
}
CommandObjectStats::~CommandObjectStats() = default;<|fim▁end|> | #include "lldb/Interpreter/CommandInterpreter.h"
#include "lldb/Interpreter/CommandReturnObject.h"
#include "lldb/Target/Target.h"
|
<|file_name|>NettyContext.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.jstorm.message.netty;
import backtype.storm.Config;
import backtype.storm.messaging.IConnection;
import backtype.storm.messaging.IContext;
import backtype.storm.utils.DisruptorQueue;
import backtype.storm.utils.Utils;
import com.alibaba.jstorm.callback.AsyncLoopThread;
import com.alibaba.jstorm.metric.MetricDef;
import com.alibaba.jstorm.utils.JStormUtils;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class NettyContext implements IContext {
private final static Logger LOG = LoggerFactory.getLogger(NettyContext.class);
@SuppressWarnings("rawtypes")
private Map stormConf;
private NioClientSocketChannelFactory clientChannelFactory;
private ReconnectRunnable reconnector;
<|fim▁hole|> @SuppressWarnings("unused")
public NettyContext() {
}
/**
* initialization per Storm configuration
*/
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf) {
this.stormConf = stormConf;
int maxWorkers = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS));
ThreadFactory bossFactory = new NettyRenameThreadFactory(MetricDef.NETTY_CLI + "boss");
ThreadFactory workerFactory = new NettyRenameThreadFactory(MetricDef.NETTY_CLI + "worker");
if (maxWorkers > 0) {
clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory), maxWorkers);
} else {
clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory));
}
reconnector = new ReconnectRunnable();
new AsyncLoopThread(reconnector, true, Thread.MIN_PRIORITY, true);
}
@Override
public IConnection bind(String topologyId, int port, ConcurrentHashMap<Integer, DisruptorQueue> deserializedQueue,
DisruptorQueue recvControlQueue, boolean bstartRec, Set<Integer> workerTasks) {
IConnection retConnection = null;
try {
retConnection = new NettyServer(stormConf, port, deserializedQueue, recvControlQueue, bstartRec, workerTasks);
} catch (Throwable e) {
LOG.error("Failed to create NettyServer", e);
JStormUtils.halt_process(-1, "Failed to bind " + port);
}
return retConnection;
}
@Override
public IConnection connect(String topologyId, String host, int port) {
return new NettyClientAsync(stormConf, clientChannelFactory, host, port, reconnector, new HashSet<Integer>(), new HashSet<Integer>());
}
@Override
public IConnection connect(String topologyId, String host, int port, Set<Integer> sourceTasks, Set<Integer> targetTasks) {
return new NettyClientAsync(stormConf, clientChannelFactory, host, port, reconnector, sourceTasks, targetTasks);
}
/**
* terminate this context
*/
public void term() {
/* clientScheduleService.shutdown();
try {
clientScheduleService.awaitTermination(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOG.error("Error when shutting down client scheduler", e);
}*/
clientChannelFactory.releaseExternalResources();
reconnector.shutdown();
}
}<|fim▁end|> | |
<|file_name|>transform.py<|end_file_name|><|fim▁begin|># This file is part of the MapProxy project.
# Copyright (C) 2010 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from mapproxy.compat.image import Image, transform_uses_center
from mapproxy.image import ImageSource, image_filter
from mapproxy.srs import make_lin_transf, bbox_equals
class ImageTransformer(object):
"""
Transform images between different bbox and spatial reference systems.
:note: The transformation doesn't make a real transformation for each pixel,
but a mesh transformation (see `PIL Image.transform`_).
It will divide the target image into rectangles (a mesh). The
source coordinates for each rectangle vertex will be calculated.
The quadrilateral will then be transformed with the source coordinates
into the destination quad (affine).
The number of quads is calculated dynamically to keep the deviation in
the image transformation below one pixel.
.. _PIL Image.transform:
http://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.transform
::
src quad dst quad
.----. <- coord- .----.
/ / transformation | |
/ / | |
.----. img-transformation -> .----.----
| | |
---------------.
large src image large dst image
"""
def __init__(self, src_srs, dst_srs, max_px_err=1):
"""
:param src_srs: the srs of the source image
:param dst_srs: the srs of the target image
:param resampling: the resampling method used for transformation
:type resampling: nearest|bilinear|bicubic
"""
self.src_srs = src_srs
self.dst_srs = dst_srs
self.dst_bbox = self.dst_size = None
self.max_px_err = max_px_err
def transform(self, src_img, src_bbox, dst_size, dst_bbox, image_opts):
"""
Transforms the `src_img` between the source and destination SRS
of this ``ImageTransformer`` instance.
When the ``src_srs`` and ``dst_srs`` are equal the image will be cropped
and not transformed. If the `src_bbox` and `dst_bbox` are equal,
the `src_img` itself will be returned.
:param src_img: the source image for the transformation
:param src_bbox: the bbox of the src_img
:param dst_size: the size of the result image (in pizel)
:type dst_size: ``(int(width), int(height))``
:param dst_bbox: the bbox of the result image
:return: the transformed image
:rtype: `ImageSource`
"""
if self._no_transformation_needed(src_img.size, src_bbox, dst_size, dst_bbox):
return src_img
if self.src_srs == self.dst_srs:
result = self._transform_simple(src_img, src_bbox, dst_size, dst_bbox,
image_opts)
else:
result = self._transform(src_img, src_bbox, dst_size, dst_bbox, image_opts)
result.cacheable = src_img.cacheable
return result
def _transform_simple(self, src_img, src_bbox, dst_size, dst_bbox, image_opts):
"""
Do a simple crop/extent transformation.
"""
src_quad = (0, 0, src_img.size[0], src_img.size[1])
to_src_px = make_lin_transf(src_bbox, src_quad)
minx, miny = to_src_px((dst_bbox[0], dst_bbox[3]))
maxx, maxy = to_src_px((dst_bbox[2], dst_bbox[1]))
src_res = ((src_bbox[0]-src_bbox[2])/src_img.size[0],
(src_bbox[1]-src_bbox[3])/src_img.size[1])
dst_res = ((dst_bbox[0]-dst_bbox[2])/dst_size[0],
(dst_bbox[1]-dst_bbox[3])/dst_size[1])
tenth_px_res = (abs(dst_res[0]/(dst_size[0]*10)),
abs(dst_res[1]/(dst_size[1]*10)))
if (abs(src_res[0]-dst_res[0]) < tenth_px_res[0] and
abs(src_res[1]-dst_res[1]) < tenth_px_res[1]):
# rounding might result in subpixel inaccuracy
# this exact resolutioni match should only happen in clients with
# fixed resolutions like OpenLayers
minx = int(round(minx))
miny = int(round(miny))
result = src_img.as_image().crop((minx, miny,
minx+dst_size[0], miny+dst_size[1]))
else:
img = img_for_resampling(src_img.as_image(), image_opts.resampling)
result = img.transform(dst_size, Image.EXTENT,
(minx, miny, maxx, maxy),
image_filter[image_opts.resampling])
return ImageSource(result, size=dst_size, image_opts=image_opts)
def _transform(self, src_img, src_bbox, dst_size, dst_bbox, image_opts):
"""
Do a 'real' transformation with a transformed mesh (see above).
"""
meshes = transform_meshes(
src_size=src_img.size,
src_bbox=src_bbox,
src_srs=self.src_srs,
dst_size=dst_size,
dst_bbox=dst_bbox,<|fim▁hole|> max_px_err=self.max_px_err,
)
img = img_for_resampling(src_img.as_image(), image_opts.resampling)
result = img.transform(dst_size, Image.MESH, meshes,
image_filter[image_opts.resampling])
if False:
# draw mesh for debuging
from PIL import ImageDraw
draw = ImageDraw.Draw(result)
for g, _ in meshes:
draw.rectangle(g, fill=None, outline=(255, 0, 0))
return ImageSource(result, size=dst_size, image_opts=image_opts)
def _no_transformation_needed(self, src_size, src_bbox, dst_size, dst_bbox):
"""
>>> src_bbox = (-2504688.5428486541, 1252344.271424327,
... -1252344.271424327, 2504688.5428486541)
>>> dst_bbox = (-2504688.5431999983, 1252344.2704,
... -1252344.2719999983, 2504688.5416000001)
>>> from mapproxy.srs import SRS
>>> t = ImageTransformer(SRS(900913), SRS(900913))
>>> t._no_transformation_needed((256, 256), src_bbox, (256, 256), dst_bbox)
True
"""
xres = (dst_bbox[2]-dst_bbox[0])/dst_size[0]
yres = (dst_bbox[3]-dst_bbox[1])/dst_size[1]
return (src_size == dst_size and
self.src_srs == self.dst_srs and
bbox_equals(src_bbox, dst_bbox, xres/10, yres/10))
def transform_meshes(src_size, src_bbox, src_srs, dst_size, dst_bbox, dst_srs, max_px_err=1):
"""
transform_meshes creates a list of QUAD transformation parameters for PIL's
MESH image transformation.
Each QUAD is a rectangle in the destination image, like ``(0, 0, 100, 100)`` and
a list of four pixel coordinates in the source image that match the destination rectangle.
The four points form a quadliteral (i.e. not a rectangle).
PIL's image transform uses affine transformation to fill each rectangle in the destination
image with data from the source quadliteral.
The number of QUADs is calculated dynamically to keep the deviation in the image
transformation below one pixel. Image transformations for large map scales can be transformed with
1-4 QUADs most of the time. For low scales, transform_meshes can generate a few hundred QUADs.
It generates a maximum of one QUAD per 50 pixel.
"""
src_bbox = src_srs.align_bbox(src_bbox)
dst_bbox = dst_srs.align_bbox(dst_bbox)
src_rect = (0, 0, src_size[0], src_size[1])
dst_rect = (0, 0, dst_size[0], dst_size[1])
to_src_px = make_lin_transf(src_bbox, src_rect)
to_src_w = make_lin_transf(src_rect, src_bbox)
to_dst_w = make_lin_transf(dst_rect, dst_bbox)
meshes = []
# more recent versions of Pillow use center coordinates for
# transformations, we manually need to add half a pixel otherwise
if transform_uses_center():
px_offset = 0.0
else:
px_offset = 0.5
def dst_quad_to_src(quad):
src_quad = []
for dst_px in [(quad[0], quad[1]), (quad[0], quad[3]),
(quad[2], quad[3]), (quad[2], quad[1])]:
dst_w = to_dst_w(
(dst_px[0] + px_offset, dst_px[1] + px_offset))
src_w = dst_srs.transform_to(src_srs, dst_w)
src_px = to_src_px(src_w)
src_quad.extend(src_px)
return quad, src_quad
res = (dst_bbox[2] - dst_bbox[0]) / dst_size[0]
max_err = max_px_err * res
def is_good(quad, src_quad):
w = quad[2] - quad[0]
h = quad[3] - quad[1]
if w < 50 or h < 50:
return True
xc = quad[0] + w / 2.0 - 0.5
yc = quad[1] + h / 2.0 - 0.5
# coordinate for the center of the quad
dst_w = to_dst_w((xc, yc))
# actual coordinate for the center of the quad
src_px = center_quad_transform(quad, src_quad)
real_dst_w = src_srs.transform_to(dst_srs, to_src_w(src_px))
err = max(abs(dst_w[0] - real_dst_w[0]), abs(dst_w[1] - real_dst_w[1]))
return err < max_err
# recursively add meshes. divide each quad into four sub quad till
# accuracy is good enough.
def add_meshes(quads):
for quad in quads:
quad, src_quad = dst_quad_to_src(quad)
if is_good(quad, src_quad):
meshes.append((quad, src_quad))
else:
add_meshes(divide_quad(quad))
add_meshes([(0, 0, dst_size[0], dst_size[1])])
return meshes
def center_quad_transform(quad, src_quad):
"""
center_quad_transfrom transforms the center pixel coordinates
from ``quad`` to ``src_quad`` by using affine transformation
as used by PIL.Image.transform.
"""
w = quad[2] - quad[0]
h = quad[3] - quad[1]
nw = src_quad[0:2]
sw = src_quad[2:4]
se = src_quad[4:6]
ne = src_quad[6:8]
x0, y0 = nw
As = 1.0 / w
At = 1.0 / h
a0 = x0
a1 = (ne[0] - x0) * As
a2 = (sw[0] - x0) * At
a3 = (se[0] - sw[0] - ne[0] + x0) * As * At
a4 = y0
a5 = (ne[1] - y0) * As
a6 = (sw[1] - y0) * At
a7 = (se[1] - sw[1] - ne[1] + y0) * As * At
x = w / 2.0 - 0.5
y = h / 2.0 - 0.5
return (
a0 + a1*x + a2*y + a3*x*y,
a4 + a5*x + a6*y + a7*x*y
)
def img_for_resampling(img, resampling):
"""
Convert P images to RGB(A) for non-NEAREST resamplings.
"""
resampling = image_filter[resampling]
if img.mode == 'P' and resampling != Image.NEAREST:
img.load() # load to get actual palette mode
if img.palette is not None:
# palette can still be None for cropped images
img = img.convert(img.palette.mode)
else:
img = img.convert('RGBA')
return img
def divide_quad(quad):
"""
divide_quad in up to four sub quads. Only divide horizontal if quad is twice as wide then high,
and vertical vice versa.
PIL.Image.transform expects that the lower-right corner
of a quad overlaps by one pixel.
>>> divide_quad((0, 0, 500, 500))
[(0, 0, 250, 250), (250, 0, 500, 250), (0, 250, 250, 500), (250, 250, 500, 500)]
>>> divide_quad((0, 0, 2000, 500))
[(0, 0, 1000, 500), (1000, 0, 2000, 500)]
>>> divide_quad((100, 200, 200, 500))
[(100, 200, 200, 350), (100, 350, 200, 500)]
"""
w = quad[2] - quad[0]
h = quad[3] - quad[1]
xc = int(quad[0] + w/2)
yc = int(quad[1] + h/2)
if w > 2*h:
return [
(quad[0], quad[1], xc, quad[3]),
(xc, quad[1], quad[2], quad[3]),
]
if h > 2*w:
return [
(quad[0], quad[1], quad[2], yc),
(quad[0], yc, quad[2], quad[3]),
]
return [
(quad[0], quad[1], xc, yc),
(xc, quad[1], quad[2], yc),
(quad[0], yc, xc, quad[3]),
(xc, yc, quad[2], quad[3]),
]<|fim▁end|> | dst_srs=self.dst_srs, |
<|file_name|>Dias_Da_Semana.java<|end_file_name|><|fim▁begin|>package br.com.fuelclub.entity;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import javax.persistence.Table;<|fim▁hole|>
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long diasDaSemana_id;
private String diasDaSemana_descricao;
@ManyToMany
private List<PostoCombustivel> postos;
public Long getDiasDaSemana_id() {
return diasDaSemana_id;
}
public void setDiasDaSemana_id(Long diasDaSemana_id) {
this.diasDaSemana_id = diasDaSemana_id;
}
public String getDiasDaSemana_descricao() {
return diasDaSemana_descricao;
}
public void setDiasDaSemana_descricao(String diasDaSemana_descricao) {
this.diasDaSemana_descricao = diasDaSemana_descricao;
}
public List<PostoCombustivel> getPostos() {
return postos;
}
public void setPostos(List<PostoCombustivel> postos) {
this.postos = postos;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((diasDaSemana_descricao == null) ? 0 : diasDaSemana_descricao.hashCode());
result = prime * result + ((diasDaSemana_id == null) ? 0 : diasDaSemana_id.hashCode());
result = prime * result + ((postos == null) ? 0 : postos.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Dias_Da_Semana other = (Dias_Da_Semana) obj;
if (diasDaSemana_descricao == null) {
if (other.diasDaSemana_descricao != null)
return false;
} else if (!diasDaSemana_descricao.equals(other.diasDaSemana_descricao))
return false;
if (diasDaSemana_id == null) {
if (other.diasDaSemana_id != null)
return false;
} else if (!diasDaSemana_id.equals(other.diasDaSemana_id))
return false;
if (postos == null) {
if (other.postos != null)
return false;
} else if (!postos.equals(other.postos))
return false;
return true;
}
@Override
public String toString() {
return "Dias_Da_Semana [diasDaSemana_descricao=" + diasDaSemana_descricao + "]";
}
public Dias_Da_Semana(Long diasDaSemana_id, String diasDaSemana_descricao, List<PostoCombustivel> postos) {
super();
this.diasDaSemana_id = diasDaSemana_id;
this.diasDaSemana_descricao = diasDaSemana_descricao;
this.postos = postos;
}
public Dias_Da_Semana() {
super();
// TODO Auto-generated constructor stub
}
}<|fim▁end|> |
@Entity
@Table (name = "Dias_da_Semana")
public class Dias_Da_Semana { |
<|file_name|>muffincms.js<|end_file_name|><|fim▁begin|>$(document).ready(function(){
var toggleMuffEditor = function(stat=false){
$("#muff-opt").remove();
// bind event
if(stat){
$(".muff").mouseover(function() {
$("#muff-opt").remove();
muffShowOptions($(this));
$(window).scroll(function(){
$("#muff-opt").remove();
})
});
}else{// unbind event
$(".muff").unbind("mouseover");
}
};
function muffShowOptions( e ){
var t = "";
var id = e.attr("data-muff-id");
var title = e.attr("data-muff-title");
var p = e.offset();
var opttop = p.top + 15;
var optleft = p.left + 5;
if(e.hasClass("muff-div")){ t="div";
}else if(e.hasClass("muff-text")){ t="text";<|fim▁hole|> }else if(e.hasClass("muff-a")){ t="link";
}else if(e.hasClass("muff-img")){ t="image";
}
if(!title){ title = t;}
// check position is beyond document
if((p.left + 25 + 75) > $(window).width()){
optleft -= 75;
}
var opt = "<div id='muff-opt' style='position:absolute;top:"+opttop+"px;left:"+optleft+"px;z-index:99998;display:none;'>";
opt += "<a href='admin/"+t+"/"+id+"/edit' class='mbtn edit'></a>";
opt += "<a href='admin/"+t+"/delete/' class='mbtn delete' data-mod='"+t+"' data-id='"+id+"'></a>";
opt += "<span>"+title+"</span>";
opt += "</div>";
$("body").prepend(opt);
$("#muff-opt").slideDown(300);
$("body").find("#muff-opt > a.delete").click(function(e){
var path = $(this).attr('href');
var mod = $(this).attr('data-mod');
// e.preventDefault();
swal({
title: "Are you sure?",
text: "You are about to delete this "+mod,
type: "warning",
showCancelButton: true,
confirmButtonColor: "#DD6B55",
confirmButtonText: "Yes, delete it!",
cancelButtonText: "Cancel",
closeOnConfirm: true,
closeOnCancel: true
},
function(isConfirm){
if (isConfirm) {
// window.location.href = path;
proceedDelete(path, id);
}
});
return false;
});
}
toggleMuffEditor(false);
// set checkbox editor event
$("input[name=cb-muff-editor]").click(function(){
if($(this).is(':checked')){ toggleMuffEditor(true); }
else{ toggleMuffEditor(false) }
});
function proceedDelete(path, id){
var newForm = jQuery('<form>', {
'action': path,
'method': 'POST',
'target': '_top'
}).append(jQuery('<input>', {
'name': '_token',
'value': $("meta[name=csrf-token]").attr("content"),
'type': 'hidden'
})).append(jQuery('<input>', {
'name': 'id',
'value': id,
'type': 'hidden'
}));
newForm.hide().appendTo("body").submit();
}
// $(".opt-div a.delete, .w-conf a.delete, .w-conf-hvr a.delete").click(function(e){
// var path = $(this).attr('href');
// var mod = $(this).attr('data-mod');
// // e.preventDefault();
// swal({
// title: "Are you sure?",
// text: "You are about to delete this "+mod,
// type: "warning",
// showCancelButton: true,
// confirmButtonColor: "#DD6B55",
// confirmButtonText: "Yes, delete it!",
// cancelButtonText: "Cancel",
// closeOnConfirm: true,
// closeOnCancel: true
// },
// function(isConfirm){
// if (isConfirm) {
// window.location.href = path;
// }
// });
// return false;
// });
// top nav click
$(".top-nav>li").click(function(){
var i = $(this).find('.dropdown-menu');
toggleClassExcept('.top-nav .dropdown-menu', 'rmv', 'active', i);
i.toggleClass("active");
});
/** toggle a certain class except the given object
* works with li and lists
* @param id identifier
* @param a action
* @param c class
* @param ex object
*/
function toggleClassExcept(id, a, c, ex){
$(id).each(function(){
switch(a){
case 'remove':
case 'rmv':
if(!$(this).is(ex)) $(this).removeClass(c);
break;
case 'add':
if(!$(this).is(ex)) $(this).addClass(c);
break;
default:
break;
}
});
}
$(".w-add .muff-add").click(function(event){
event.preventDefault();
var b = $(this);
var newForm = jQuery('<form>', {
'action': b.data('href'),
'method': 'GET',
'target': '_top'
}).append(jQuery('<input>', {
'name': '_token',
'value': $("meta[name=csrf-token]").attr("content"),
'type': 'hidden'
})).append(jQuery('<input>', {
'name': 'url',
'value': $("meta[name=muffin-url]").attr("content"),
'type': 'hidden'
})).append(jQuery('<input>', {
'name': 'location',
'value': b.data("loc"),
'type': 'hidden'
}));
// console.log(newForm);
newForm.hide().appendTo("body").submit();
})
// TAGs
//var tagArea = '.tag-area';
if($('.tagarea')[0]){
var backSpace;
var close = '<a class="close"></a>';
var PreTags = $('.tagarea').val().trim().split(" ");
$('.tagarea').after('<ul class="tag-box"></ul>');
for (i=0 ; i < PreTags.length; i++ ){
var pretag = PreTags[i].split("_").join(" ");
if($('.tagarea').val().trim() != "" )
$('.tag-box').append('<li class="tags"><input type="hidden" name="tags[]" value="'+pretag+'">'+pretag+close+'</li>');
}
$('.tag-box').append('<li class="new-tag"><input class="input-tag" type="text"></li>');
// unbind submit form when pressing enter
$('.input-tag').on('keyup keypress', function(e) {
var keyCode = e.keyCode || e.which;
if (keyCode === 13) {
e.preventDefault();
return false;
}
});
// Taging
$('.input-tag').bind("keydown", function (kp) {
var tag = $('.input-tag').val().trim();
if(tag.length > 0){
$(".tags").removeClass("danger");
if(kp.keyCode == 13 || kp.keyCode == 9){
$(".new-tag").before('<li class="tags"><input type="hidden" name="tags[]" value="'+tag+'">'+tag+close+'</li>');
$(this).val('');
}}
else {if(kp.keyCode == 8 ){
if($(".new-tag").prev().hasClass("danger")){
$(".new-tag").prev().remove();
}else{
$(".new-tag").prev().addClass("danger");
}
}
}
});
//Delete tag
$(".tag-box").on("click", ".close", function() {
$(this).parent().remove();
});
$(".tag-box").click(function(){
$('.input-tag').focus();
});
// Edit
$('.tag-box').on("dblclick" , ".tags", function(cl){
var tags = $(this);
var tag = tags.text().trim();
$('.tags').removeClass('edit');
tags.addClass('edit');
tags.html('<input class="input-tag" value="'+tag+'" type="text">')
$(".new-tag").hide();
tags.find('.input-tag').focus();
tag = $(this).find('.input-tag').val() ;
$('.tags').dblclick(function(){
tags.html(tag + close);
$('.tags').removeClass('edit');
$(".new-tag").show();
});
tags.find('.input-tag').bind("keydown", function (edit) {
tag = $(this).val() ;
if(edit.keyCode == 13){
$(".new-tag").show();
$('.input-tag').focus();
$('.tags').removeClass('edit');
if(tag.length > 0){
tags.html('<input type="hidden" name="tags[]" value="'+tag+'">'+tag + close);
}
else{
tags.remove();
}
}
});
});
}
// sorting
// $(function() {
// $( ".tag-box" ).sortable({
// items: "li:not(.new-tag)",
// containment: "parent",
// scrollSpeed: 100
// });
// $( ".tag-box" ).disableSelection();
// });
});<|fim▁end|> | |
<|file_name|>suspend_and_re_activate.py<|end_file_name|><|fim▁begin|>from paypalrestsdk import BillingAgreement
import logging
BILLING_AGREEMENT_ID = "I-HT38K76XPMGJ"
try:
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
suspend_note = {
"note": "Suspending the agreement"
}
if billing_agreement.suspend(suspend_note):
# Would expect state has changed to Suspended
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
reactivate_note = {
"note": "Reactivating the agreement"
}
<|fim▁hole|> billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
else:
print(billing_agreement.error)
else:
print(billing_agreement.error)
except ResourceNotFound as error:
print("Billing Agreement Not Found")<|fim▁end|> | if billing_agreement.reactivate(reactivate_note):
# Would expect state has changed to Active |
<|file_name|>ChatLikeCMD.py<|end_file_name|><|fim▁begin|>#coding=utf8
import thread, time, sys, os, platform
try:
import termios, tty
termios.tcgetattr, termios.tcsetattr
import threading
OS = 'Linux'
except (ImportError, AttributeError):
try:
import msvcrt
OS = 'Windows'
except ImportError:
raise Exception('Mac is currently not supported')
OS = 'Mac'
else:
getch = msvcrt.getwch
else:
def fn():
try:
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
tty.setraw(fd)
ch = sys.stdin.read(1)
except:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
raise Exception
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
getch = fn
CMD_HISTORY = 30
class ChatLikeCMD():
def __init__(self, header = 'LittleCoder', symbol = '>', inPip = None, inputMaintain = False):
self.strBuff = []
self.cmdBuff = []
self.historyCmd = -1
self.cursor = 0
self.inPip = [] if inPip == None else inPip
self.outPip = []
self.isLaunch = False
self.isPause = False
self.header = header
self.symbol = symbol
self.inputMaintain = inputMaintain
def reprint_input(self):
sys.stdout.write(self.header + self.symbol)
if self.strBuff:
for i in self.strBuff: sys.stdout.write(i)
sys.stdout.flush()
def getch(self):
c = getch()
return c if c != '\r' else '\n'
def get_history_command(self, direction):
if direction == 'UP':
if self.historyCmd < CMD_HISTORY - 1 and self.historyCmd < len(self.cmdBuff) - 1: self.historyCmd += 1
else:
if self.historyCmd == 0: return ''
if self.historyCmd > 0: self.historyCmd -= 1
if -1 < self.historyCmd < len(self.cmdBuff): return self.cmdBuff[self.historyCmd]
def output_command(self, s):
self.outPip.append(s if isinstance(s, unicode) else s.decode(sys.stdin.encoding))
if len(self.cmdBuff) >= CMD_HISTORY: self.cmdBuff = self.cmdBuff[::-1].pop()[::-1]
self.cmdBuff.append(s)
def print_thread(self):
while self.isLaunch:
if self.inPip:
sys.stdout.write('\r' + ' ' * 50 + '\r')
sys.stdout.flush()
<|fim▁hole|> sys.stdout.write('\r')
sys.stdout.flush()
self.reprint_input()
time.sleep(0.01)
def fast_input_test(self):
timer = threading.Timer(0.001, thread.interrupt_main)
c = None
try:
timer.start()
c = getch()
except:
pass
timer.cancel()
return c
def process_direction_char(self, c):
if OS == 'Windows':
if ord(c) == 72:
c = 'A'
elif ord(c) == 80:
c = 'B'
elif ord(c) == 77:
c = 'C'
elif ord(c) == 75:
c = 'D'
if ord(c) == 68: # LEFT
self.process_char('\b')
return
# cursor bugs
if self.cursor > 0:
if OS == 'Windows':
sys.stdout.write(chr(224) + chr(75))
else:
sys.stdout.write(chr(27) + '[C')
self.cursor -= 1
elif ord(c) == 67: # RIGHT
return
# cursor bugs
if self.cursor < len(self.strBuff):
if OS == 'Windows':
sys.stdout.write(chr(224) + chr(77))
else:
sys.stdout.write(chr(27) + '[D')
self.cursor += 1
elif ord(c) == 65: # UP
hc = self.get_history_command('UP')
if not hc is None:
self.strBuff = [i for i in hc]
self.cursor = len(hc)
sys.stdout.write('\r' + ' ' * 50 + '\r')
self.reprint_input()
elif ord(c) == 66: # DOWN
hc = self.get_history_command('DOWN')
if not hc is None:
self.strBuff = [i for i in hc]
self.cursor = len(hc)
sys.stdout.write('\r' + ' ' * 50 + '\r')
self.reprint_input()
else:
raise Exception(c)
def process_char(self, c):
if ord(c) == 27: # Esc
if OS == 'Linux':
fitc1 = self.fast_input_test()
if ord(fitc1) == 91:
fitc2 = self.fast_input_test()
if 65 <= ord(fitc2) <= 68:
self.process_direction_char(fitc2)
return
sys.stdout.write('\r' + ' ' * 50 + '\r')
sys.stdout.flush()
self.reprint_input()
self.outPip.append(c)
time.sleep(0.02)
if 'fitc1' in dir():
self.process_char(fitc1)
self.cursor += 1
if 'fitc2' in dir():
self.process_char(fitc2)
self.cursor += 1
elif ord(c) == 3: # Ctrl+C
self.stop()
self.isPause = True
if raw_input('Exit?(y) ') == 'y':
sys.stdout.write('Command Line Exit')
else:
self.start()
self.isPause = False
elif ord(c) in (8, 127): # Backspace
if self.strBuff:
if ord(self.strBuff[-1]) < 128:
sys.stdout.write('\b \b')
else:
sys.stdout.write('\b\b \b')
if OS == 'Linux':
self.strBuff.pop()
self.strBuff.pop()
self.strBuff.pop()
self.cursor -= 1
elif c == '\n':
if self.strBuff:
if self.inputMaintain:
sys.stdout.write(c)
else:
sys.stdout.write('\r' + ' ' * 50 + '\r')
sys.stdout.flush()
self.reprint_input()
self.output_command(''.join(self.strBuff))
self.strBuff = []
self.historyCmd = -1
elif ord(c) == 224: # Windows direction
if OS == 'Windows':
direction = self.getch()
self.process_direction_char(direction)
else:
sys.stdout.write(c)
sys.stdout.flush()
self.strBuff.append(c)
self.cursor += 1
def command_thread(self):
c = None
while self.isLaunch:
c = self.getch()
self.process_char(c)
time.sleep(0.01)
def start(self):
self.isLaunch = True
thread.start_new_thread(self.print_thread, ())
self.reprint_input()
thread.start_new_thread(self.command_thread, ())
def stop(self):
sys.stdout.write('\r' + ' ' * 50 + '\r')
sys.stdout.flush()
self.isLaunch = False
def print_line(self, msg = None):
self.inPip.append(msg)
def clear(self):
os.system('cls' if platform.system() == 'Windows' else 'clear')
self.reprint_input()
def get_command_pip(self):
return self.outPip
def set_header(self, header):
self.header = header
if __name__ == '__main__':
c = ChatLikeCMD()
s = c.get_command_pip()
c.start()
def loopinput(c):
while True:
c.print_line('LOOP INPUT......')
time.sleep(3)
thread.start_new_thread(loopinput, (c,))
while c.isLaunch or c.isPause:
if s:
c.print_line(s.pop())
time.sleep(0.01)<|fim▁end|> | print self.inPip.pop()
# linux special
|
<|file_name|>PlatformServerlessTest.ts<|end_file_name|><|fim▁begin|>import {DITest} from "@tsed/di";
import {APIGatewayEventDefaultAuthorizerContext, APIGatewayProxyEventBase, APIGatewayProxyHandler} from "aws-lambda";
import {APIGatewayProxyResult} from "aws-lambda/trigger/api-gateway-proxy";
import {createFakeEvent} from "./createFakeEvent";
import {createFakeContext} from "./createFakeContext";
import {nameOf, Type} from "@tsed/core";
import {PlatformBuilder, PlatformBuilderSettings} from "@tsed/common";
export interface LambdaPromiseResult extends Promise<APIGatewayProxyResult> {}
export class LambdaClientRequest extends Promise<APIGatewayProxyResult> {
event = createFakeEvent();
context = createFakeContext();
static call(lambdaName: string) {
const resolvers: any = {};
const promise = new LambdaClientRequest((resolve, reject) => {
resolvers.resolve = resolve;
resolvers.reject = reject;
});
promise.init(lambdaName, resolvers.resolve, resolvers.reject);
return promise;
}
static get(path: string, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
return this.call("handler").get(path, options);
}
static post(path: string, body?: any, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
return this.call("handler").post(path, body, options);
}
static put(path: string, body?: any, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
return this.call("handler").put(path, body, options);<|fim▁hole|> }
static patch(path: string, body?: any, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
return this.call("handler").patch(path, body, options);
}
static delete(path: string, body?: any, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
return this.call("handler").delete(path, body, options);
}
get(path: string, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
Object.assign(this.event, options);
this.event.path = path;
this.event.httpMethod = "GET";
return this;
}
post(path: string, body: any = {}, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
Object.assign(this.event, options);
this.event.path = path;
this.event.httpMethod = "POST";
this.body(body);
return this;
}
patch(path: string, body: any = {}, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
Object.assign(this.event, options);
this.event.path = path;
this.event.httpMethod = "PATCH";
this.body(body);
return this;
}
put(path: string, body: any = {}, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
Object.assign(this.event, options);
this.event.path = path;
this.event.httpMethod = "PUT";
this.body(body);
return this;
}
delete(path: string, body: any = {}, options: Partial<APIGatewayProxyEventBase<APIGatewayEventDefaultAuthorizerContext>> = {}) {
Object.assign(this.event, options);
this.event.path = path;
this.event.httpMethod = "DELETE";
this.body(body);
return this;
}
query(query: any) {
this.event.queryStringParameters = {
...this.event.queryStringParameters,
...JSON.parse(JSON.stringify(query))
};
return this;
}
params(pathParameters: any) {
this.event.pathParameters = {
...this.event.pathParameters,
...JSON.parse(JSON.stringify(pathParameters))
};
return this;
}
headers(headers: Record<string, any>) {
this.event.headers = {
...this.event.headers,
...JSON.parse(JSON.stringify(headers))
};
return this;
}
body(body: any) {
if (body !== undefined) {
this.event.headers["content-type"] = "application/json";
this.event.body = JSON.stringify(body);
}
return this;
}
protected init(lambda: string, resolve: Function, reject: Function) {
setTimeout(async () => {
try {
const result = await PlatformServerlessTest.callbacks[lambda](this.event, this.context, resolve as any);
resolve(result as any);
} catch (er) {
reject(er);
}
});
}
}
export class PlatformServerlessTest extends DITest {
static callbacks: Record<string, APIGatewayProxyHandler> = {};
static instance: any;
static request = LambdaClientRequest;
static bootstrap(
serverless: {bootstrap: (server: Type<any>, settings: PlatformBuilderSettings) => PlatformBuilder},
{server, ...settings}: PlatformBuilderSettings & {server: Type<any>}
): () => Promise<any>;
static bootstrap(
serverless: {bootstrap: (settings: Partial<TsED.Configuration> & {lambda?: Type[]}) => any},
{server, ...settings}: PlatformBuilderSettings
): () => Promise<any>;
static bootstrap(serverless: any, {server, ...settings}: PlatformBuilderSettings) {
return async function before(): Promise<void> {
settings = DITest.configure(settings);
const isServerlessHttp = nameOf(serverless).includes("Http");
// @ts-ignore
const instance = isServerlessHttp ? serverless.bootstrap(server, settings) : serverless.bootstrap(settings);
PlatformServerlessTest.instance = instance;
PlatformServerlessTest.callbacks = {};
if (!isServerlessHttp) {
PlatformServerlessTest.callbacks = instance.callbacks();
}
PlatformServerlessTest.callbacks.handler = instance.handler();
// used by inject method
DITest.injector = instance.injector;
return instance.promise;
};
}
/**
* Resets the test injector of the test context, so it won't pollute your next test. Call this in your `tearDown` logic.
*/
static async reset() {
if (PlatformServerlessTest.instance) {
await PlatformServerlessTest.instance.stop();
}
if (DITest.hasInjector()) {
await DITest.injector.destroy();
DITest._injector = null;
}
}
}<|fim▁end|> | |
<|file_name|>json.java<|end_file_name|><|fim▁begin|>package com.xhuihui.app.json;<|fim▁hole|> */
public class json {
}<|fim▁end|> |
/**
* Created by lihuiguang on 2017/7/13. |
<|file_name|>Shader.color.js<|end_file_name|><|fim▁begin|>PP.lib.shader.shaders.color = {
info: {
name: 'color adjustement',
author: 'Evan Wallace',
link: 'https://github.com/evanw/glfx.js'
},
uniforms: { textureIn: { type: "t", value: 0, texture: null },
brightness: { type: "f", value: 0.0 },
contrast: { type: "f", value: 0.0 },
hue: { type: "f", value: 0.0 },
saturation: { type: "f", value: 0.0 },
exposure: { type: "f", value: 0.0 },
negative: { type: "i", value: 0 }
},
controls: {
brightness: {min:-1, max: 1, step:.05},
contrast: {min:-1, max: 1, step:.05},
hue: {min:-1, max: 1, step:.05},
saturation: {min:-1, max: 1, step:.05},
exposure: {min:0, max: 1, step:.05},
negative: {}
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
"varying vec2 vUv;",
"uniform sampler2D textureIn;",
"uniform float brightness;",
"uniform float contrast;",
"uniform float hue;",
"uniform float saturation;",
"uniform float exposure;",
"uniform int negative;",
"const float sqrtoftwo = 1.41421356237;",
"void main() {",
"vec4 color = texture2D(textureIn, vUv);",
"color.rgb += brightness;",
"if (contrast > 0.0) {",
"color.rgb = (color.rgb - 0.5) / (1.0 - contrast) + 0.5;",
"} else {",
"color.rgb = (color.rgb - 0.5) * (1.0 + contrast) + 0.5;",
"}",
"/* hue adjustment, wolfram alpha: RotationTransform[angle, {1, 1, 1}][{x, y, z}] */",
"float angle = hue * 3.14159265;",
"float s = sin(angle), c = cos(angle);",
"vec3 weights = (vec3(2.0 * c, -sqrt(3.0) * s - c, sqrt(3.0) * s - c) + 1.0) / 3.0;",
"float len = length(color.rgb);",
"color.rgb = vec3(",
"dot(color.rgb, weights.xyz),",
"dot(color.rgb, weights.zxy),",
"dot(color.rgb, weights.yzx)",
");",
"/* saturation adjustment */",
"float average = (color.r + color.g + color.b) / 3.0;",
"if (saturation > 0.0) {",
"color.rgb += (average - color.rgb) * (1.0 - 1.0 / (1.0 - saturation));",
"} else {",
"color.rgb += (average - color.rgb) * (-saturation);",
"}",
"if(negative == 1){",
" color.rgb = 1.0 - color.rgb;",
"}",
"if(exposure > 0.0){",
" color = log2(vec4(pow(exposure + sqrtoftwo, 2.0))) * color;",
"}",
"gl_FragColor = color;",
"}",
].join("\n")
};
PP.lib.shader.shaders.bleach = {
info: {
name: 'Bleach',
author: 'Brian Chirls @bchirls',
link: 'https://github.com/brianchirls/Seriously.js'
},
uniforms: { textureIn: { type: "t", value: 0, texture: null },
amount: { type: "f", value: 1.0 }
},
controls: {
amount: {min:0, max: 1, step:.1}
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
'varying vec2 vUv;',
'uniform sampler2D textureIn;',
'uniform float amount;',
'const vec4 one = vec4(1.0);',
'const vec4 two = vec4(2.0);',
'const vec4 lumcoeff = vec4(0.2125,0.7154,0.0721,0.0);',
'vec4 overlay(vec4 myInput, vec4 previousmix, vec4 amount) {',
' float luminance = dot(previousmix,lumcoeff);',
' float mixamount = clamp((luminance - 0.45) * 10.0, 0.0, 1.0);',
' vec4 branch1 = two * previousmix * myInput;',
' vec4 branch2 = one - (two * (one - previousmix) * (one - myInput));',
' vec4 result = mix(branch1, branch2, vec4(mixamount) );',
' return mix(previousmix, result, amount);',
'}',
'void main (void) {',
' vec4 pixel = texture2D(textureIn, vUv);',
' vec4 luma = vec4(vec3(dot(pixel,lumcoeff)), pixel.a);',
' gl_FragColor = overlay(luma, pixel, vec4(amount));',
'}'
].join("\n")
};
PP.lib.shader.shaders.plasma = {
info: {
name: 'plasma',
author: 'iq',
link: 'http://www.iquilezles.org'
},
uniforms: { resolution: { type: "v2", value: new THREE.Vector2( PP.config.dimension.width, PP.config.dimension.height )},<|fim▁hole|> saturation: { type: "f", value: 1.0 },
waves: { type: "f", value: .2 },
wiggle: { type: "f", value: 1000.0 },
scale: { type: "f", value: 1.0 }
},
controls: {
speed: {min:0, max: .1, step:.001},
saturation: {min:0, max: 10, step:.01},
waves: {min:0, max: .4, step:.0001},
wiggle: {min:0, max: 10000, step:1},
scale: {min:0, max: 10, step:.01}
},
update: function(e){
e.material.uniforms.time.value += e.material.uniforms.speed.value;
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
"varying vec2 vUv;",
"uniform float time;",
"uniform float saturation;",
"uniform vec2 resolution;",
"uniform float waves;",
"uniform float wiggle;",
"uniform float scale;",
"void main() {",
"float x = gl_FragCoord.x*scale;",
"float y = gl_FragCoord.y*scale;",
"float mov0 = x+y+cos(sin(time)*2.)*100.+sin(x/100.)*wiggle;",
"float mov1 = y / resolution.y / waves + time;",
"float mov2 = x / resolution.x / waves;",
"float r = abs(sin(mov1+time)/2.+mov2/2.-mov1-mov2+time);",
"float g = abs(sin(r+sin(mov0/1000.+time)+sin(y/40.+time)+sin((x+y)/100.)*3.));",
"float b = abs(sin(g+cos(mov1+mov2+g)+cos(mov2)+sin(x/1000.)));",
"vec3 plasma = vec3(r,g,b) * saturation;",
"gl_FragColor = vec4( plasma ,1.0);",
"}"
].join("\n")
};
PP.lib.shader.shaders.plasma2 = {
info: {
name: 'plasma2',
author: 'mrDoob',
link: 'http://mrdoob.com'
},
uniforms: { resolution: { type: "v2", value: new THREE.Vector2( PP.config.dimension.width, PP.config.dimension.height )},
time: { type: "f", value: 0.0},
speed: { type: "f", value: 0.01 },
qteX: { type: "f", value: 80.0 },
qteY: { type: "f", value: 10.0 },
intensity: { type: "f", value: 10.0 },
hue: { type: "f", value: .25 }
},
controls: {
speed: {min:0, max: 1, step:.001},
qteX: {min:0, max: 200, step:1},
qteY: {min:0, max: 200, step:1},
intensity: {min:0, max: 50, step:.1},
hue: {min:0, max: 2, step:.001}
},
update: function(e){
e.material.uniforms.time.value += e.material.uniforms.speed.value;
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
"uniform float time;",
"uniform vec2 resolution;",
"uniform float qteX;",
"uniform float qteY;",
"uniform float intensity;",
"uniform float hue;",
"void main() {",
"vec2 position = gl_FragCoord.xy / resolution.xy;",
"float color = 0.0;",
"color += sin( position.x * cos( time / 15.0 ) * qteX ) + cos( position.y * cos( time / 15.0 ) * qteY );",
"color += sin( position.y * sin( time / 10.0 ) * 40.0 ) + cos( position.x * sin( time / 25.0 ) * 40.0 );",
"color += sin( position.x * sin( time / 5.0 ) * 10.0 ) + sin( position.y * sin( time / 35.0 ) * 80.0 );",
"color *= sin( time / intensity ) * 0.5;",
"gl_FragColor = vec4( vec3( color, color * (hue*2.0), sin( color + time / (hue*12.0) ) * (hue*3.0) ), 1.0 );",
"}"
].join("\n")
};
PP.lib.shader.shaders.plasma3 = {
info: {
name: 'plasma 3',
author: 'Hakim El Hattab',
link: 'http://hakim.se'
},
uniforms: { color: { type: "c", value: new THREE.Color( 0x8CC6DA ) },
resolution: { type: "v2", value: new THREE.Vector2( PP.config.dimension.width, PP.config.dimension.height )},
time: { type: "f", value: 0.0},
speed: { type: "f", value: 0.05 },
scale: { type: "f", value: 10.0 },
quantity: { type: "f", value: 5.0 },
lens: { type: "f", value: 2.0 },
intensity: { type: "f", value: .5 }
},
controls: {
speed: {min:0, max: 1, step:.001},
scale: {min:0, max: 100, step:.1},
quantity: {min:0, max: 100, step:1},
lens: {min:0, max: 100, step:1},
intensity: {min:0, max: 5, step:.01}
},
update: function(e){
e.material.uniforms.time.value += e.material.uniforms.speed.value;
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
"uniform float time;",
"uniform vec2 resolution;",
"uniform vec3 color;",
"uniform float scale;",
"uniform float quantity;",
"uniform float lens;",
"uniform float intensity;",
"void main() {",
"vec2 p = -1.0 + 2.0 * gl_FragCoord.xy / resolution.xy;",
"p = p * scale;",
"vec2 uv;",
"float a = atan(p.y,p.x);",
"float r = sqrt(dot(p,p));",
"uv.x = 2.0*a/3.1416;",
"uv.y = -time+ sin(7.0*r+time) + .7*cos(time+7.0*a);",
"float w = intensity+1.0*(sin(time+lens*r)+ 1.0*cos(time+(quantity * 2.0)*a));",
"gl_FragColor = vec4(color*w,1.0);",
"}"
].join("\n")
};
PP.lib.shader.shaders.plasma4 = {
info: {
name: 'plasma 4 (vortex)',
author: 'Hakim El Hattab',
link: 'http://hakim.se'
},
uniforms: { color: { type: "c", value: new THREE.Color( 0xff5200 ) }, // 0x8CC6DA
resolution: { type: "v2", value: new THREE.Vector2( PP.config.dimension.width, PP.config.dimension.height )},
time: { type: "f", value: 0.0},
speed: { type: "f", value: 0.05 },
scale: { type: "f", value: 20.0 },
wobble: { type: "f", value: 1.0 },
ripple: { type: "f", value: 5.0 },
light: { type: "f", value: 2.0 }
},
controls: {
speed: {min:0, max: 1, step:.001},
scale: {min:0, max: 100, step:.1},
wobble: {min:0, max: 50, step:1},
ripple: {min:0, max: 50, step:.1},
light: {min:1, max: 50, step:1}
},
update: function(e){
e.material.uniforms.time.value += e.material.uniforms.speed.value;
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
"uniform float time;",
"uniform vec2 resolution;",
"uniform vec3 color;",
"uniform float scale;",
"uniform float wobble;",
"uniform float ripple;",
"uniform float light;",
"void main() {",
"vec2 p = -1.0 + 2.0 * gl_FragCoord.xy / resolution.xy;",
"vec2 uv;",
"float a = atan(p.y,p.x);",
"float r = sqrt(dot(p,p));",
"float u = cos(a*(wobble * 2.0) + ripple * sin(-time + scale * r));",
"float intensity = sqrt(pow(abs(p.x),light) + pow(abs(p.y),light));",
"vec3 result = u*intensity*color;",
"gl_FragColor = vec4(result,1.0);",
"}"
].join("\n")
};
PP.lib.shader.shaders.plasma5 = {
info: {
name: 'plasma 5',
author: 'Silexars',
link: 'http://www.silexars.com'
},
uniforms: { resolution: { type: "v2", value: new THREE.Vector2( PP.config.dimension.width, PP.config.dimension.height )},
time: { type: "f", value: 0.0},
speed: { type: "f", value: 0.01 }
},
controls: {
speed: {min:0, max: .2, step:.001}
},
update: function(e){
e.material.uniforms.time.value += e.material.uniforms.speed.value;
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
"uniform vec2 resolution;",
"uniform float time;",
"void main() {",
"vec3 col;",
"float l,z=time;",
"for(int i=0;i<3;i++){",
"vec2 uv;",
"vec2 p=gl_FragCoord.xy/resolution.xy;",
"uv=p;",
"p-=.5;",
"p.x*=resolution.x/resolution.y;",
"z+=.07;",
"l=length(p);",
"uv+=p/l*(sin(z)+1.)*abs(sin(l*9.-z*2.));",
"col[i]=.01/length(abs(mod(uv,1.)-.5));",
"}",
"gl_FragColor=vec4(col/l,1.0);",
"}"
].join("\n")
};
PP.lib.shader.shaders.plasmaByTexture = {
info: {
name: 'plasma by texture',
author: 'J3D',
link: 'http://www.everyday3d.com/j3d/demo/011_Plasma.html'
},
uniforms: { textureIn: { type: "t", value: 0, texture: null },
time: { type: "f", value: 0.0},
speed: { type: "f", value: 0.01 }
},
controls: {
speed: {min:0, max: .1, step:.001}
},
update: function(e){
e.material.uniforms.time.value += e.material.uniforms.speed.value;
},
vertexShader: PP.lib.vextexShaderBase.join("\n"),
fragmentShader: [
"varying vec2 vUv;",
"uniform sampler2D textureIn;",
"uniform float time;",
"void main() {",
"vec2 ca = vec2(0.1, 0.2);",
"vec2 cb = vec2(0.7, 0.9);",
"float da = distance(vUv, ca);",
"float db = distance(vUv, cb);",
"float t = time * 0.5;",
"float c1 = sin(da * cos(t) * 16.0 + t * 4.0);",
"float c2 = cos(vUv.y * 8.0 + t);",
"float c3 = cos(db * 14.0) + sin(t);",
"float p = (c1 + c2 + c3) / 3.0;",
"gl_FragColor = texture2D(textureIn, vec2(p, p));",
"}"
].join("\n")
};<|fim▁end|> | time: { type: "f", value: 0.0},
speed: { type: "f", value: 0.01 }, |
<|file_name|>ganeti.tools.node_daemon_setup_unittest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright (C) 2012 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.<|fim▁hole|>#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for testing ganeti.tools.node_daemon_setup"""
import unittest
from ganeti import errors
from ganeti import constants
from ganeti.tools import node_daemon_setup
import testutils
_SetupError = node_daemon_setup.SetupError
class TestVerifySsconf(unittest.TestCase):
def testNoSsconf(self):
self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf,
{}, NotImplemented, _verify_fn=NotImplemented)
for items in [None, {}]:
self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf, {
constants.NDS_SSCONF: items,
}, NotImplemented, _verify_fn=NotImplemented)
def _Check(self, names):
self.assertEqual(frozenset(names), frozenset([
constants.SS_CLUSTER_NAME,
constants.SS_INSTANCE_LIST,
]))
def testSuccess(self):
ssdata = {
constants.SS_CLUSTER_NAME: "cluster.example.com",
constants.SS_INSTANCE_LIST: [],
}
result = node_daemon_setup.VerifySsconf({
constants.NDS_SSCONF: ssdata,
}, "cluster.example.com", _verify_fn=self._Check)
self.assertEqual(result, ssdata)
self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf, {
constants.NDS_SSCONF: ssdata,
}, "wrong.example.com", _verify_fn=self._Check)
def testInvalidKey(self):
self.assertRaises(errors.GenericError, node_daemon_setup.VerifySsconf, {
constants.NDS_SSCONF: {
"no-valid-ssconf-key": "value",
},
}, NotImplemented)
if __name__ == "__main__":
testutils.GanetiTestProgram()<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate rand;
use std::io;
use rand::Rng;
use std::cmp::Ordering;
fn main() {
println!("Guess the number!");
let secret_number = rand::thread_rng().gen_range(1, 101);
print!("secret_number is {}\n", secret_number);
<|fim▁hole|> // 为什么必须在loop里面做new
let mut guess = String::new();
println!("Please input your number:");
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
println!("your guess : {}", guess);
match guess.cmp(&secret_number) {
Ordering::Less => println!("less"),
Ordering::Equal => {
println!("you win");
break;
}
Ordering::Greater => println!("big"),
}
}
}<|fim▁end|> | loop { |
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyLocalcider(PythonPackage):
"""Tools for calculating sequence properties of disordered proteins"""<|fim▁hole|> url = "https://pypi.io/packages/source/l/localcider/localcider-0.1.14.tar.gz"
version('0.1.14', sha256='54ff29e8a011947cca5df79e96f3c69a76c49c4db41dcf1608663992be3e3f5f')
depends_on('py-setuptools', type='build')
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-matplotlib', type=('build', 'run'))
depends_on('py-scipy', type=('build', 'run'))<|fim▁end|> |
homepage = "http://pappulab.github.io/localCIDER" |
<|file_name|>GetRepositoryInfo.java<|end_file_name|><|fim▁begin|>package org.alfresco.repo.cmis.ws;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="repositoryId" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="extension" type="{http://docs.oasis-open.org/ns/cmis/messaging/200908/}cmisExtensionType" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"repositoryId",
"extension"
})
@XmlRootElement(name = "getRepositoryInfo")
public class GetRepositoryInfo {
@XmlElement(required = true)
protected String repositoryId;
@XmlElementRef(name = "extension", namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/", type = JAXBElement.class)
protected JAXBElement<CmisExtensionType> extension;
<|fim▁hole|> *
* @return
* possible object is
* {@link String }
*
*/
public String getRepositoryId() {
return repositoryId;
}
/**
* Sets the value of the repositoryId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRepositoryId(String value) {
this.repositoryId = value;
}
/**
* Gets the value of the extension property.
*
* @return
* possible object is
* {@link JAXBElement }{@code <}{@link CmisExtensionType }{@code >}
*
*/
public JAXBElement<CmisExtensionType> getExtension() {
return extension;
}
/**
* Sets the value of the extension property.
*
* @param value
* allowed object is
* {@link JAXBElement }{@code <}{@link CmisExtensionType }{@code >}
*
*/
public void setExtension(JAXBElement<CmisExtensionType> value) {
this.extension = ((JAXBElement<CmisExtensionType> ) value);
}
}<|fim▁end|> | /**
* Gets the value of the repositoryId property.
|
<|file_name|>blob.spec.ts<|end_file_name|><|fim▁begin|>import { expect } from 'chai';
<|fim▁hole|>
describe('entitype-integration-tests > query > websql > blob', async () => {
beforeEach(async function () {
this.timeout(10000);
await seedNorthwindDatabase();
});
it('should return Uint8Array for blob type', async () => {
let ctx = new NorthwindContext();
let photo = await ctx.employees.select(x => x.photo).first();
expect(photo).to.be.instanceof(Buffer);
});
});<|fim▁end|> | import { NorthwindContext } from 'common/northwind-sqlite';
import { seedNorthwindDatabase } from './helper';
|
<|file_name|>bezier_traj.py<|end_file_name|><|fim▁begin|>from gen_data_from_rbprm import *
from hpp.corbaserver.rbprm.tools.com_constraints import get_com_constraint
from hpp.gepetto import PathPlayer
from hpp.corbaserver.rbprm.state_alg import computeIntermediateState, isContactCreated
from numpy import matrix, asarray
from numpy.linalg import norm
from spline import bezier
def __curveToWps(curve):
return asarray(curve.waypoints().transpose()).tolist()
def __Bezier(wps, init_acc = [0.,0.,0.], end_acc = [0.,0.,0.], init_vel = [0.,0.,0.], end_vel = [0.,0.,0.]):
c = curve_constraints();
c.init_vel = matrix(init_vel);
c.end_vel = matrix(end_vel);
c.init_acc = matrix(init_acc);
c.end_acc = matrix(end_acc);
matrix_bezier = matrix(wps).transpose()
curve = bezier(matrix_bezier, c)
return __curveToWps(curve), curve
#~ return __curveToWps(bezier(matrix_bezier))
allpaths = []
def play_all_paths():
for _, pid in enumerate(allpaths):
ppl(pid)
def play_all_paths_smooth():
for i, pid in enumerate(allpaths):
if i % 2 == 1 :
ppl(pid)
def play_all_paths_qs():
for i, pid in enumerate(allpaths):
if i % 2 == 0 :
ppl(pid)
def test(s1,s2, path = False, use_rand = False, just_one_curve = False, num_optim = 0, effector = False, mu=0.5, use_Kin = True) :
q1 = s1.q()
q2 = s2.q()
stateid = s1.sId
stateid1 = s2.sId
sInt = computeIntermediateState(s1,s2)
com_1 = s1.getCenterOfMass()
com_2 = s2.getCenterOfMass()
createPtBox(viewer.client.gui, 0, com_1, 0.01, [0,1,1,1.])
createPtBox(viewer.client.gui, 0, com_2, 0.01, [0,1,1,1.])
#~ isContactCreated_= isContactCreated(s1,s2)
isContactCreated_ = True
data = gen_sequence_data_from_state_objects(s1,s2,sInt,mu = mu, isContactCreated = isContactCreated_)
c_bounds_1 = s1.getComConstraint(limbsCOMConstraints)
c_bounds_mid = sInt.getComConstraint(limbsCOMConstraints)
c_bounds_2 = s2.getComConstraint(limbsCOMConstraints)
success, c_mid_1, c_mid_2 = solve_quasi_static(data, c_bounds = [c_bounds_1, c_bounds_2, c_bounds_mid], use_rand = use_rand, mu = mu, use_Kin = use_Kin)
print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ calling effector", effector
paths_ids = []
if path and success:
#~ fullBody.straightPath([c_mid_1[0].tolist(),c_mid_2[0].tolist()])
#~ fullBody.straightPath([c_mid_2[0].tolist(),com_2])
if just_one_curve:
bezier_0, curve = __Bezier([com_1,c_mid_1[0].tolist(),c_mid_2[0].tolist(),com_2])
createPtBox(viewer.client.gui, 0, c_mid_1[0].tolist(), 0.01, [0,1,0,1.])
createPtBox(viewer.client.gui, 0, c_mid_2[0].tolist(), 0.01, [0,1,0,1.])
#testing intermediary configurations
partions = [0.,0.3,0.8,1.]
#~ if(not isContactCreated_):
#~ partions = [0.,0.6,0.8,1.]
print 'paritions:', partions[1], " "
com_interm2 = curve(partions[2])
#~ print "com_1", com_1
#~ print "com_1", curve(partions[0])
#~ print "com_interm2", com_interm2
#~ print "com_2", com_2
#~ print "com_2", curve(partions[-1])
success_proj1 = False;
success_proj2 = False
for _ in range(7):
print "WRTFF", partions[1]
com_interm1 = curve(partions[1])
print "com_interm1", com_interm1
success_proj1 = project_com_colfree(fullBody, stateid , asarray((com_interm1).transpose()).tolist()[0])
if success_proj1:
break
else:
print "decreasing com"
partions[1] -= 0.04
for _ in range(7):
print "WRTFF", partions[-2]
com_interm2 = curve(partions[-2])
print "com_interm2", com_interm2
success_proj2 = project_com_colfree(fullBody, stateid1 , asarray((com_interm2).transpose()).tolist()[0])
if success_proj2:
break
else:
print "decreasing com"
partions[-2] += 0.039
#~ success_proj2 = project_com_colfree(fullBody, stateid1 , asarray((com_interm2).transpose()).tolist()[0])
#~ if success_proj1:
#~ q_1 = fullBody.projectToCom(stateid, asarray((com_interm1).transpose()).tolist()[0])
#~ viewer(q_1)
if not success_proj1:
print "proj 1 failed"
return False, c_mid_1, c_mid_2, paths_ids
if not success_proj2:
print "proj 2 failed"
return False, c_mid_1, c_mid_2, paths_ids
p0 = fullBody.generateCurveTrajParts(bezier_0,partions)
#~ pp.displayPath(p0+1)
#~ pp.displayPath(p0+2)
ppl.displayPath(p0)
#~ ppl.displayPath(p0+1)
#~ ppl.displayPath(p0+2)
#~ ppl.displayPath(p0+3)
if(effector):
#~ assert False, "Cant deal with effectors right now"
paths_ids = [int(el) for el in fullBody.effectorRRT(stateid,p0+1,p0+2,p0+3,num_optim)]
else:
paths_ids = [int(el) for el in fullBody.comRRTFromPosBetweenState(stateid,stateid1,p0+1,p0+2,p0+3,num_optim)]
else:
success_proj1 = project_com_colfree(fullBody, stateid , c_mid_1[0].tolist())
success_proj2 = project_com_colfree(fullBody, stateid1 , c_mid_2[0].tolist())
if not success_proj1:
print "proj 1 failed"
return False, c_mid_1, c_mid_2, paths_ids
if not success_proj2:
print "proj 2 failed"
return False, c_mid_1, c_mid_2, paths_ids
bezier_0, curve = __Bezier([com_1,c_mid_1[0].tolist()] , end_acc = c_mid_1[1].tolist() , end_vel = [0.,0.,0.])
bezier_1, curve = __Bezier([c_mid_1[0].tolist(),c_mid_2[0].tolist()], end_acc = c_mid_2[1].tolist(), init_acc = c_mid_1[1].tolist(), init_vel = [0.,0.,0.], end_vel = [0.,0.,0.])
bezier_2, curve = __Bezier([c_mid_2[0].tolist(),com_2] , init_acc = c_mid_2[1].tolist(), init_vel = [0.,0.,0.])
p0 = fullBody.generateCurveTraj(bezier_0)
fullBody.generateCurveTraj(bezier_1)
fullBody.generateCurveTraj(bezier_2)
ppl.displayPath(p0)
#~ ppl.displayPath(p0+1)
#~ ppl.displayPath(p0+2)
paths_ids = [int(el) for el in fullBody.comRRTFromPosBetweenState(stateid,stateid1, p0,p0+1,p0+2,num_optim)]
#~ paths_ids = []
global allpaths
allpaths += paths_ids[:-1]
#~ allpaths += [paths_ids[-1]]
#~ pp(paths_ids[-1])
#~ return success, paths_ids, c_mid_1, c_mid_2
return success, c_mid_1, c_mid_2, paths_ids
#~ data = gen_sequence_data_from_state(fullBody,3,configs)
#~ pp(29),pp(9),pp(17)
from hpp.corbaserver.rbprm.tools.path_to_trajectory import *
def createPtBox(gui, winId, config, res = 0.01, color = [1,1,1,0.3]):
print "plottiun ", config
#~ resolution = res
#~ global scene
#~ global b_id
#~ boxname = scene+"/"+str(b_id)
#~ b_id += 1
#~ gui.addBox(boxname,resolution,resolution,resolution, color)
#~ gui.applyConfiguration(boxname,[config[0],config[1],config[2],1,0,0,0])
#~ gui.addSceneToWindow(scene,winId)
#~ gui.refresh()
def test_ineq(stateid, constraints, n_samples = 10, color=[1,1,1,1.]):
Kin = get_com_constraint(fullBody, stateid, fullBody.getConfigAtState(stateid), constraints, interm = False)
#~ print "kin ", Kin
#create box around current com
fullBody.setCurrentConfig(fullBody.getConfigAtState(stateid))
com = fullBody.getCenterOfMass()
bounds_c = flatten([[com[i]-1., com[i]+1.] for i in range(3)]) # arbitrary
for i in range(n_samples):
c = array([uniform(bounds_c[2*i], bounds_c[2*i+1]) for i in range(3)])
print "c: ", c
if(Kin[0].dot(c)<=Kin[1]).all():
print "boundaries satisfied"
createPtBox(viewer.client.gui, 0, c, 0.01, color)
#~ test_ineq(0,{ rLegId : {'file': "hrp2/RL_com.ineq", 'effector' : 'RLEG_JOINT5'}}, 1000, [1,0,0,1])
#~ test_ineq(0,{ lLegId : {'file': "hrp2/LL_com.ineq", 'effector' : 'LLEG_JOINT5'}}, 1000, [0,1,0,1])
#~ test_ineq(0,{ rLegId : {'file': "hrp2/RA_com.ineq", 'effector' : rHand}}, 1000, [0,0,1,1])
#~ test_ineq(0,{ rLegId : {'file': "hrp2/RL_com.ineq", 'effector' : 'RLEG_JOINT5'}}, 1000, [0,1,1,1])
#~ test_ineq(0, limbsCOMConstraints, 1000, [0,1,1,1])
def gen(s1, s2, num_optim = 0, ine_curve =True, s = 1., effector = False, mu =0.5, gen_traj = True, use_Kin = True):
n_fail = 0;
#~ viewer(configs[i])
res = test(s1, s2, True, False, ine_curve,num_optim, effector, mu, use_Kin)
if(not res[0]):
print "lp failed"
createPtBox(viewer.client.gui, 0, res[1][0], 0.01, [1,0,0,1.])
createPtBox(viewer.client.gui, 0, res[2][0], 0.01, [1,0,0,1.])
found = False
for j in range(1):
res = test(s1, s2, True, True, ine_curve, num_optim, effector, mu, use_Kin)
createPtBox(viewer.client.gui, 0, res[1][0], 0.01, [0,1,0,1.])
createPtBox(viewer.client.gui, 0, res[2][0], 0.01, [0,1,0,1.])
if res[0]:
break
if not res[0]:
n_fail += 1
print "n_fail ", n_fail
if(gen_traj):
#~ a = gen_trajectory_to_play(fullBody, ppl, allpaths[:-3], flatten([[s*0.2, s* 0.6, s* 0.2] for _ in range(len(allpaths[:-3]) / 3)]))
a = gen_trajectory_to_play(fullBody, ppl, allpaths[-3:], flatten([[s*0.2, s* 0.6, s* 0.2] for _ in range(1)]))
#~ a = gen_trajectory_to_play(fullBody, ppl, allpaths, flatten([[s] for _ in range(len(allpaths) )]))
return a
def gen_several_states(states, num_optim = 0, ine_curve =True, s = 1., effector = False, mu =0.5, init_vel = [0.,0.,0.], init_acc = [0.,0.,0.], use_Kin = True):
com_1 = states[0].getCenterOfMass()
com_2 = states[-1].getCenterOfMass()
stateid = states[0].sId
stateid1 = states[-1].sId
com_vel = init_vel[:]
com_acc = init_acc[:]
start = states[0].sId
len_con = len(states)
print "AAAAAAAAAAAAAAAAAAAAAAAAAAAAA com_vel", com_vel
print "AAAAAAAAAAAAAAAAAAAAAAAAAAAA com_acc", com_acc
print "going from, to ", com_1, "->", com_2
print "going from, to ", start, "->", start + len_con
allpoints = [com_1]
all_partitions = []
n_fail = 0;
for i in range (len(states)-1):
#~ viewer(configs[i])
res = test(states[i], states[i+1], False, False, ine_curve,num_optim, effector, mu, use_Kin)
if(not res[0]):
print "lp failed"
createPtBox(viewer.client.gui, 0, res[1][0], 0.01, [1,0,0,1.])
createPtBox(viewer.client.gui, 0, res[2][0], 0.01, [1,0,0,1.])
found = False
for j in range(1):
res = test(i, False, True, ine_curve, num_optim, effector, mu, use_Kin)
createPtBox(viewer.client.gui, 0, res[1][0], 0.01, [0,1,0,1.])
createPtBox(viewer.client.gui, 0, res[2][0], 0.01, [0,1,0,1.])
if res[0]:
allpoints+=[res[1][0],res[2][0]]
step = (1./ len_con)
idx = step * (i - start)
all_partitions += [idx +0.3*step,idx+0.7*step,idx+step]
break
if not res[0]:
n_fail += 1
else:
allpoints+=[res[1][0],res[2][0]]
step = (1./ len_con)
#~ idx = step * (i - start)
idx = step * i
all_partitions += [idx +0.2*step,idx+0.8*step,idx+step]
all_partitions = [0.] + all_partitions
print "n_fail ", n_fail
print "generating super curve"
print all_partitions
allpoints+=[com_2]
bezier_0, curve = __Bezier(allpoints, init_acc = init_acc, init_vel = init_vel)
com_vel = curve.derivate(0.5,1)
com_acc = curve.derivate(0.5,2)
com_vel = flatten(asarray(com_vel).transpose().tolist())
com_acc = flatten(asarray(com_acc).transpose().tolist())
print "at", 0.5
print "com_vel", com_vel
print "com_acc", com_acc
com_vel = curve.derivate(all_partitions[-1],1)
com_acc = curve.derivate(all_partitions[-1],2)
com_vel = flatten(asarray(com_vel).transpose().tolist())
com_acc = flatten(asarray(com_acc).transpose().tolist())
p0 = fullBody.generateCurveTrajParts(bezier_0,all_partitions) + 1
ppl.displayPath(p0-1)
# now we need to project all states to the new com positions
print "WTF ", len(all_partitions)
for k in range(3, len(all_partitions),3):
print "k ", k
print all_partitions[k]
new_com = flatten(asarray(curve(all_partitions[k]).transpose()).tolist())
print "curve end ", curve(1.)
ok = False
#~ try:
st = states[k/3]
sid = st.sId
print "for state", sid
print "before project to new com ", new_com
print "before previous com", st.getCenterOfMass()
for _ in range(7):
print "WRTFF", all_partitions[k]
new_com = flatten(asarray(curve(all_partitions[k]).transpose()).tolist())
#~ com_interm1 = flatten(asarray(curve(all_partitions[k]).transpose()).tolist())
print "com_interm1", new_com
ok = project_com_colfree(fullBody, sid , new_com)
if ok:
#~ new_com = asarray((com_interm1).transpose()).tolist()[0]
print "ok !!!!!!!!!!!!!!!!!"
break
else:
print "decreasing com"
all_partitions[k] -= 0.04
ok = fullBody.projectStateToCOM(sid, new_com,50)
print "projection", ok
if ok:
q1 = fullBody.getConfigAtState(sid)
ok = fullBody.isConfigValid(q1)[0]
print "is config valud", ok
#~ except:
#~ print "hpperr"
#~ break
if not ok:
print "faield to project"
return
j = 0;
print "WTF2"
print "len con", len_con
print "p0", p0
for i in range(p0,p0+(len_con-1)*3,3):
print "paths ids", i, " ", i+1, " ", i+3
print "state ", start + j
#~ paths_ids = [int(el) for el in fullBody.comRRTFromPos(start+j,i,i+1,i+2,num_optim)]
#~ ppl.displayPath(p0)
if(effector):
#~ assert False, "Cant deal with effectors right now"
paths_ids = [int(el) for el in fullBody.effectorRRT(start+j,i,i+1,i+2,num_optim)]
else:
paths_ids = [int(el) for el in fullBody.comRRTFromPos(start+j,i,i+1,i+2,num_optim)]
#~ paths_ids = [int(el) for el in fullBody.comRRTFromPosBetweenState(stateid,stateid1,p0+1,p0+2,p0+3,num_optim)]
j += 1
global allpaths
allpaths += paths_ids[:-1]
#~ p0 = fullBody.generateCurveTrajParts(bezier_0,partions)
a = gen_trajectory_to_play(fullBody, ppl, allpaths, flatten([[s*0.2, s* 0.6, s* 0.2] for _ in range(len(allpaths) / 3)]))
return a, com_vel, com_acc
def gen_several_states_partial(start = 0, len_con = 1, num_optim = 0, ine_curve =True, s = 1., effector = False, mu =0.5, init_vel = [0.,0.,0.], init_acc = [0.,0.,0.], path = False):
com_1 = __get_com(fullBody, fullBody.getConfigAtState(start))
com_2 = __get_com(fullBody, fullBody.getConfigAtState(start+len_con))
com_vel = init_vel[:]
com_acc = init_acc[:]
print "going from, to ", com_1, "->", com_2
#~ print "going from, to ", start, "->", start + len_con
allpoints = [com_1]
all_partitions = []
n_fail = 0;
for i in range (start, start+len_con):
#~ viewer(configs[i])
res = test(i, False, False, ine_curve,num_optim, effector, mu)
if(not res[0]):
print "lp failed"
createPtBox(viewer.client.gui, 0, res[1][0], 0.01, [1,0,0,1.])
createPtBox(viewer.client.gui, 0, res[2][0], 0.01, [1,0,0,1.])
found = False
for j in range(10):
res = test(i, False, True, ine_curve, num_optim, effector, mu)
createPtBox(viewer.client.gui, 0, res[1][0], 0.01, [0,1,0,1.])
createPtBox(viewer.client.gui, 0, res[2][0], 0.01, [0,1,0,1.])
if res[0]:
allpoints+=[res[1][0],res[2][0]]
step = (1./ len_con)
idx = step * (i - start)
all_partitions += [idx +0.2*step,idx+0.8*step,idx+step]
break
if not res[0]:
n_fail += 1
else:
allpoints+=[res[1][0],res[2][0]]
step = (1./ len_con)
idx = step * (i - start)
all_partitions += [idx +0.2*step,idx+0.8*step,idx+step]
print "[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[", all_partitions
allpoints+=[com_2]
bezier_0, curve = __Bezier(allpoints, init_acc = com_acc, init_vel = com_vel)
all_partitions = [0.] + all_partitions[:-3]
com_vel = curve.derivate(all_partitions[-1],1)
com_acc = curve.derivate(all_partitions[-1],2)
com_vel = flatten(asarray(com_vel).transpose().tolist())
com_acc = flatten(asarray(com_acc).transpose().tolist())
p0 = fullBody.generateCurveTrajParts(bezier_0,all_partitions) + 1
print all_partitions
#~ ppl.displayPath(p0-1)
ppl.displayPath(p0)
ppl.displayPath(p0+1)
ppl.displayPath(p0+2)
#~ ppl.displayPath(p0)
# now we need to project all states to the new com positions
for k in range(3, len(all_partitions),3):
print "k ", k
print all_partitions[k]
new_com = flatten(asarray(curve(all_partitions[k]).transpose()).tolist())
ok = False
#~ try:
sid = start+k/3
print "for state", sid
print "before project to new com ", new_com
print "before previous com", __get_com(fullBody, fullBody.getConfigAtState(sid))
#~ new_com[0]+=0.02
ok = fullBody.projectStateToCOM(sid, new_com)
#~ print "projection", ok
if ok:
q1 = fullBody.getConfigAtState(sid)
ok = fullBody.isConfigValid(q1)[0]
#~ print "is config valud", ok
#~ except:
#~ print "hpperr"
#~ break
if not ok:
print "faield to project"
return [], com_vel, com_acc
j = 0;
#~ print "WTF2"
if path:
for i in range(p0,p0+len_con*3-3,3):
try:
#~ print "FOR STATE ", start+j
#~ print "USING PATHS", i
paths_ids = [int(el) for el in fullBody.comRRTFromPos(start+j,i,i+1,i+2,num_optim)]
#~ paths_ids = [int(el) for el in fullBody.effectorRRT(start+j,i,i+1,i+2,num_optim)]
except:
print "COULD NOT SOLVE COMRRT"
return [], com_vel, com_acc
j += 1
global allpaths
allpaths += paths_ids[:-1]
#~ p0 = fullBody.generateCurveTrajParts(bezier_0,partions)
#~ a = gen_trajectory_to_play(fullBody, ppl, allpaths, flatten([[s*0.2, s* 0.6, s* 0.2] for _ in range(len(allpaths) / 3)]))
a = [] #TODO
return a, com_vel, com_acc
viewer = None
tp = None
ppl = None
fullBody = None
b_id = 0
scene = "bos"
first_init = True
def clean_path():
global allpaths
allpaths = []
def init_bezier_traj(robot, r, pplayer, qs, comConstraints):
global viewer
global tp
global ppl
global fullBody
global viewer<|fim▁hole|> global first_init
configs = qs
viewer = r
ppl = pplayer
fullBody = robot
if first_init:
viewer.client.gui.createScene(scene)
first_init = False
global limbsCOMConstraints
limbsCOMConstraints = comConstraints
com_vel = [0.,0.,0.]
com_acc = [0.,0.,0.]
vels = []
accs = []
path = []
a_s = []
def go0(states, one_curve = True, num_optim = 0, mu = 0.6, s =None, use_kin = True, effector = False):
global com_vel
global com_acc
global vels
global accs
global path
sc = s
for i, el in enumerate(states[:-1]):
if s == None:
sc = max(norm(array(states[i+1].q()) - array(el.q())), 1.) * 0.5
path += gen(el,states[i+1],mu=mu,num_optim=num_optim, s=sc, ine_curve = one_curve, use_Kin = use_kin, effector = effector)
print "path", len(path)
return path
def go2(states, one_curve = True, num_optim = 0, mu = 0.6, s =None, use_kin = True, effector = False, init_vel =com_vel, init_acc = com_acc):
global com_vel
global com_acc
global vels
global accs
if init_vel == None:
init_vel =com_vel
if init_acc == None:
init_acc =com_acc
path = []
sc = s
try:
for i, el in enumerate(states[:-2]):
print "************ one call to ", i
if s == None:
sc = max(norm(array(states[i+1].q()) - array(el.q())), 1.) * 0.6
print "states idds ", i, " ", i+2, " ", len (states[i:i+2])
a, ve, ac = gen_several_states(states[i:i+2],mu=mu,num_optim=num_optim, s=sc, ine_curve = one_curve, use_Kin = use_kin, effector = effector, init_vel =com_vel, init_acc = com_acc)
com_vel = ve
com_acc = ac
clean_path();
path += a
a, ve, ac = gen_several_states(states[-2:],mu=mu,num_optim=num_optim, s=sc, ine_curve = one_curve, use_Kin = use_kin, effector = effector, init_vel =com_vel, init_acc = com_acc)
com_vel = ve
com_acc = ac
path += a
except:
print "FAILT"
return path
print "path", len(path)
return path
def reset():
global com_vel
global com_acc
global vels
global accs
global a_s
global path
com_vel = [0.,0.,0.]
com_acc = [0.,0.,0.]
clean_path();
vels = []
accs = []
path = []
a_s = []
for i, config in enumerate(configs):
fullBody.setConfigAtState(i,config)<|fim▁end|> | global configs |
<|file_name|>gravmag_magdir_dipolemagdir.py<|end_file_name|><|fim▁begin|>"""
GravMag: Use the DipoleMagDir class to estimate the magnetization direction
of dipoles with known centers
"""
import numpy
from fatiando import mesher, gridder
from fatiando.utils import ang2vec, vec2ang, contaminate
from fatiando.gravmag import sphere
from fatiando.vis import mpl
from fatiando.gravmag.magdir import DipoleMagDir
from fatiando.constants import CM
# Make noise-corrupted synthetic data
inc, dec = -10.0, -15.0 # inclination and declination of the Geomagnetic Field
model = [mesher.Sphere(3000, 3000, 1000, 1000,
{'magnetization': ang2vec(6.0, -20.0, -10.0)}),
mesher.Sphere(7000, 7000, 1000, 1000,
{'magnetization': ang2vec(10.0, 3.0, -67.0)})]
area = (0, 10000, 0, 10000)
x, y, z = gridder.scatter(area, 1000, z=-150, seed=0)
tf = contaminate(sphere.tf(x, y, z, model, inc, dec), 5.0, seed=0)
# Give the centers of the dipoles
centers = [[3000, 3000, 1000], [7000, 7000, 1000]]
# Estimate the magnetization vectors
solver = DipoleMagDir(x, y, z, tf, inc, dec, centers).fit()
# Print the estimated and true dipole monents, inclinations and declinations
print 'Estimated magnetization (intensity, inclination, declination)'
for e in solver.estimate_:
print e
# Plot the fit and the normalized histogram of the residuals<|fim▁hole|>mpl.axis('scaled')
nlevels = mpl.contour(y, x, tf, (50, 50), 15, interp=True, color='r',
label='Observed', linewidth=2.0)
mpl.contour(y, x, solver.predicted(), (50, 50), nlevels, interp=True,
color='b', label='Predicted', style='dashed', linewidth=2.0)
mpl.legend(loc='upper left', shadow=True, prop={'size': 13})
mpl.xlabel('East y (m)', fontsize=14)
mpl.ylabel('North x (m)', fontsize=14)
mpl.subplot(1, 2, 2)
residuals_mean = numpy.mean(solver.residuals())
residuals_std = numpy.std(solver.residuals())
# Each residual is subtracted from the mean and the resulting
# difference is divided by the standard deviation
s = (solver.residuals() - residuals_mean) / residuals_std
mpl.hist(s, bins=21, range=None, normed=True, weights=None,
cumulative=False, bottom=None, histtype='bar', align='mid',
orientation='vertical', rwidth=None, log=False,
color=None, label=None)
mpl.xlim(-4, 4)
mpl.title("mean = %.3f std = %.3f" % (residuals_mean, residuals_std),
fontsize=14)
mpl.ylabel("P(z)", fontsize=14)
mpl.xlabel("z", fontsize=14)
mpl.show()<|fim▁end|> | mpl.figure(figsize=(14, 5))
mpl.subplot(1, 2, 1)
mpl.title("Total Field Anomaly (nT)", fontsize=14) |
<|file_name|>SendService.ts<|end_file_name|><|fim▁begin|>import _ from 'lodash';
import {gettext} from 'core/utils';
import {showModal} from 'core/services/modalService';
import {IArticle} from 'superdesk-api';
import {appConfig} from 'appConfig';
import {AuthoringWorkspaceService} from 'apps/authoring/authoring/services/AuthoringWorkspaceService';
import {fileUploadErrorModal} from '../../archive/controllers/file-upload-error-modal';
SendService.$inject = ['desks', 'api', '$q', 'notify', 'multi', '$rootScope', '$injector'];
export function SendService(
desks,
api,
$q,
notify,
multi,
$rootScope,
$injector,
) {
this.one = sendOne;
this.validateAndSend = validateAndSend;
this.all = sendAll;
this.oneAs = sendOneAs;
this.allAs = sendAllAs;
this.config = null;
this.getConfig = getConfig;
this.startConfig = startConfig;
this.getItemsFromPackages = getItemsFromPackages;
this.getValidItems = getValidItems;
var self = this;
// workaround for circular dependencies
function getAuthoringWorkspace(): AuthoringWorkspaceService {
return $injector.get('authoringWorkspace');
}
function getValidItems(items: Array<IArticle>) {
const validItems = [];
const invalidItems = [];
items.forEach((item) => {
if (appConfig.pictures && item.type === 'picture' && item._type === 'ingest') {
const pictureWidth = item?.renditions.original.width;
const pictureHeight = item?.renditions.original.height;
if (appConfig.pictures.minWidth > pictureWidth || appConfig.pictures.minHeight > pictureHeight) {
invalidItems.push({
valid: false,
name: item.headline || item.slugline || 'image',
width: item.renditions.original.width,
height: item.renditions.original.height,
type: 'image',
});
} else {
validItems.push(item);
}
} else {
validItems.push(item);
}
});
if (invalidItems.length > 0) {
showModal(fileUploadErrorModal(invalidItems));
}
return validItems;
}
/**
* Send given item to a current user desk
*
* @param {Object} item
* @returns {Promise}
*/
function sendOne(item: IArticle) {
if (item._type === 'ingest') {
return api
.save('fetch', {}, {desk: desks.getCurrentDeskId()}, item)
.then(
(archiveItem) => {
item.task_id = archiveItem.task_id;
item.archived = archiveItem._created;
multi.reset();
return archiveItem;
}, (response) => {
var message = 'Failed to fetch the item';
if (angular.isDefined(response.data._message)) {
message = message + ': ' + response.data._message;
}
notify.error(gettext(message));
item.error = response;
},
)
.finally(() => {
if (item.actioning) {
item.actioning.archive = false;
}
});
} else if (item._type === 'externalsource') {
return api
.save(item.fetch_endpoint, {
guid: item.guid,
desk: desks.getCurrentDeskId(),
}, null, null, {repo: item.ingest_provider})
.then(
(fetched) => {
notify.success(gettext('Item Fetched.'));
return fetched;
}, (error) => {
item.error = error;
notify.error(gettext('Failed to get item.'));
return item;
},
)
.finally(() => {
if (item.actioning) {
item.actioning.externalsource = false;
}
});
}
}
function validateAndSend(item) {
const validItems = getValidItems([item]);
if (validItems.length > 0) {
return sendOne(item);
} else {
return $q.reject();
}
}
/**
* Send all given items to current user desk
*
* @param {Array} items
*/
function sendAll(items) {
const validItems = getValidItems(items);
if (validItems.length > 0) {
return Promise.all(validItems.map(sendOne));
}
}
/**
* Send given item using config
*
* @param {Object} item
* @param {Object} config
* @param {string} config.desk - desk id
* @param {string} config.stage - stage id
* @param {string} config.macro - macro name
* @param {string} action - name of the original action
* @returns {Promise}
*/
function sendOneAs(item, config, action) {
var data: any = getData(config);
if (item._type === 'ingest') {
return api.save('fetch', {}, data, item).then((archived) => {
item.archived = archived._created;
if (config.open) {
const authoringWorkspace: AuthoringWorkspaceService = $injector.get('authoringWorkspace');
authoringWorkspace.edit(archived);
}
return archived;
});
} else if (action && action === 'duplicateTo') {
return api.save('duplicate', {},
{desk: data.desk, stage: data.stage, type: item._type, item_id: item.item_id}, item)
.then((duplicate) => {
$rootScope.$broadcast('item:duplicate');
notify.success(gettext('Item Duplicated'));
if (config.open) {
getAuthoringWorkspace().edit({_id: duplicate._id}, 'edit');
}
return duplicate;
}, (response) => {
var message = 'Failed to duplicate the item';
if (angular.isDefined(response.data._message)) {
message = message + ': ' + response.data._message;
}
notify.error(gettext(message));
item.error = response;
});
} else if (action && action === 'externalsourceTo') {
return api.save(item.fetch_endpoint, {
guid: item.guid,
desk: data.desk,
stage: data.stage,
}, null, null, {repo: item.ingest_provider})
.then((fetched) => {
notify.success(gettext('Item Fetched.'));
if (config.open) {
getAuthoringWorkspace().edit({_id: fetched._id}, 'edit');
}
return fetched;
}, (error) => {
item.error = error;
notify.error(gettext('Failed to get item.'));
return item;
});
} else if (!item.lock_user) {
return api.save('move', {}, {task: data, allPackageItems: config.sendAllPackageItems}, item)
.then((_item) => {
$rootScope.$broadcast('item:update', {item: _item});
if (config.open) {
getAuthoringWorkspace().edit(_item);
}
return _item;
});
}
function getData(_config: any) {
var _data: any = {
desk: _config.desk,
};
if (_config.stage) {
_data.stage = _config.stage;
}
if (_config.macro) {
_data.macro = _config.macro;
}
return _data;
}
}
function getItemsFromPackages(packages) {
let items = [];
(packages || []).forEach((packageItem) => {
(packageItem.groups || [])
.filter((group) => group.id !== 'root')
.forEach((group) => {
group.refs.forEach((item) => items.push(item.residRef));
});
});
return items;
}
/**
* Send all given item using config once it's resolved
*
* At first it only creates a deferred config which is
* picked by SendItem directive, once used sets the destination
* it gets resolved and items are sent.
*
* @param {Array} items
* @return {Promise}
*/
function sendAllAs(items, action) {
const validItems = getValidItems(items);
if (validItems.length > 0) {
self.config = $q.defer();
self.config.action = action;
self.config.itemIds = _.map(validItems, '_id');
self.config.items = validItems;
self.config.isPackage = validItems.some((_item) => _item.type === 'composite');
if (self.config.isPackage) {
self.config.packageItemIds = getItemsFromPackages(validItems);
}
return self.config.promise.then((config) => {
self.config = null;
multi.reset();
return $q.all(validItems.map((_item) => sendOneAs(_item, config, action)));
}, () => {
self.config = null;
multi.reset();
});
}
}
/**
* Get deffered config if any. Used in $watch
*
* @returns {Object|null}
*/
function getConfig() {
return self.config;
}
/**
* reset deffered config if any.
*/
function resetConfig() {
if (self.config) {
self.config.reject();
self.config = null;
}
}
/**
* Start config via send item sidebar<|fim▁hole|> resetConfig();
self.config = $q.defer();
return self.config.promise.then((val) => {
self.config = null;
return val;
});
}
}<|fim▁end|> | *
* @return {Promise}
*/
function startConfig() { |
<|file_name|>user-list-controller.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|>
console.log('UserListController');
vm.userList = userList;
}
module.exports = UserListController;<|fim▁end|> |
function UserListController(userList) {
var vm = this; |
<|file_name|>postproc_db.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os, sys, logging, urllib, time, string, json, argparse, collections, datetime, re, bz2, math
from concurrent.futures import ThreadPoolExecutor, wait
import lz4
pool = ThreadPoolExecutor(max_workers=16)
logging.basicConfig(level=logging.DEBUG)
sys.path.append(os.path.join(os.path.dirname(__file__), "lib", "python"))
from carta import (logger, POI)
from mongoengine import *
connect('carta')
zoomspacing = [round(0.0001*(1.6**n), 4) for n in range(21, 1, -1)]
def compute_occlusions(box):
SW, NE = box
points = list(POI.objects(at__geo_within_box=(SW, NE)))
print("Starting", SW, NE, len(points))
for i, p1 in enumerate(points):
for j, p2 in enumerate(points[i+1:]):
coords1, coords2 = p1.at['coordinates'], p2.at['coordinates']
dist = math.sqrt(abs(coords1[0]-coords2[0])**2 + abs(coords1[1]-coords2[1])**2)
occluded_point = p1 if p1.rank < p2.rank else p2
for zoom, spacing in enumerate(zoomspacing):
if dist < spacing:
continue
break
occluded_point.min_zoom = max(occluded_point.min_zoom, zoom)
p1.save()
print("Finished", SW, NE, len(points))
step = 2
boxes = []
for lat in range(-90, 90, step):
for lng in range(-180, 180, step):
boxes.append([(lng, lat), (lng+step, lat+step)])
for result in pool.map(compute_occlusions, boxes):
pass
<|fim▁hole|># for doc2 in POI.objects(at__geo_within_center=(doc.at['coordinates'], zoomspacing[doc.min_zoom]), min_zoom__lte=doc.min_zoom).order_by('-rank'):<|fim▁end|> | # docs_by_rank = sorted(POI.objects(at__geo_within_center=(doc.at['coordinates'], spacing)),
# key=lambda point: point.rank or 0,
# reverse=True)
# for doc in POI.objects(at__geo_within_center=(doc.at['coordinates'], 1), min_zoom__gt=0).order_by('-rank'): |
<|file_name|>tree.py<|end_file_name|><|fim▁begin|>import pygame, math
pygame.init()
window = pygame.display.set_mode((600, 600))
pygame.display.set_caption("Fractal Tree")
screen = pygame.display.get_surface()
def drawTree(x1, y1, angle, depth):
if depth:
x2 = x1 + int(math.cos(math.radians(angle)) * depth * 10.0)
y2 = y1 + int(math.sin(math.radians(angle)) * depth * 10.0)
pygame.draw.line(screen, (255,255,255), (x1, y1), (x2, y2), 2)
drawTree(x2, y2, angle - 20, depth - 1)<|fim▁hole|>def input(event):
if event.type == pygame.QUIT:
exit(0)
drawTree(300, 550, -90, 9)
pygame.display.flip()
while True:
input(pygame.event.wait())<|fim▁end|> | drawTree(x2, y2, angle + 20, depth - 1)
|
<|file_name|>scalajsenv.js<|end_file_name|><|fim▁begin|>/* Scala.js runtime support
* Copyright 2013 LAMP/EPFL
* Author: Sébastien Doeraene
*/
/* ---------------------------------- *
* The top-level Scala.js environment *
* ---------------------------------- */
//!if outputMode == ECMAScript51Global
var ScalaJS = {};
//!endif
// Get the environment info
ScalaJS.env = (typeof __ScalaJSEnv === "object" && __ScalaJSEnv) ? __ScalaJSEnv : {};
// Global scope
ScalaJS.g =
(typeof ScalaJS.env["global"] === "object" && ScalaJS.env["global"])
? ScalaJS.env["global"]
: ((typeof global === "object" && global && global["Object"] === Object) ? global : this);
ScalaJS.env["global"] = ScalaJS.g;
// Where to send exports
//!if moduleKind == CommonJSModule
ScalaJS.e = exports;
//!else
ScalaJS.e =
(typeof ScalaJS.env["exportsNamespace"] === "object" && ScalaJS.env["exportsNamespace"])
? ScalaJS.env["exportsNamespace"] : ScalaJS.g;
//!endif
ScalaJS.env["exportsNamespace"] = ScalaJS.e;
// Freeze the environment info
ScalaJS.g["Object"]["freeze"](ScalaJS.env);
// Linking info - must be in sync with scala.scalajs.runtime.LinkingInfo
ScalaJS.linkingInfo = {
"envInfo": ScalaJS.env,
"semantics": {
//!if asInstanceOfs == Compliant
"asInstanceOfs": 0,
//!else
//!if asInstanceOfs == Fatal
"asInstanceOfs": 1,
//!else
"asInstanceOfs": 2,
//!endif
//!endif
//!if arrayIndexOutOfBounds == Compliant
"arrayIndexOutOfBounds": 0,
//!else
//!if arrayIndexOutOfBounds == Fatal
"arrayIndexOutOfBounds": 1,
//!else
"arrayIndexOutOfBounds": 2,
//!endif
//!endif
//!if moduleInit == Compliant
"moduleInit": 0,
//!else
//!if moduleInit == Fatal
"moduleInit": 1,
//!else
"moduleInit": 2,
//!endif
//!endif
//!if floats == Strict
"strictFloats": true,
//!else
"strictFloats": false,
//!endif
//!if productionMode == true
"productionMode": true
//!else
"productionMode": false
//!endif
},
//!if outputMode == ECMAScript6
"assumingES6": true,
//!else
"assumingES6": false,
//!endif
"linkerVersion": "{{LINKER_VERSION}}"
};
ScalaJS.g["Object"]["freeze"](ScalaJS.linkingInfo);
ScalaJS.g["Object"]["freeze"](ScalaJS.linkingInfo["semantics"]);
// Snapshots of builtins and polyfills
//!if outputMode == ECMAScript6
ScalaJS.imul = ScalaJS.g["Math"]["imul"];
ScalaJS.fround = ScalaJS.g["Math"]["fround"];
ScalaJS.clz32 = ScalaJS.g["Math"]["clz32"];
//!else
ScalaJS.imul = ScalaJS.g["Math"]["imul"] || (function(a, b) {
// See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/imul
const ah = (a >>> 16) & 0xffff;
const al = a & 0xffff;
const bh = (b >>> 16) & 0xffff;
const bl = b & 0xffff;
// the shift by 0 fixes the sign on the high part
// the final |0 converts the unsigned value into a signed value
return ((al * bl) + (((ah * bl + al * bh) << 16) >>> 0) | 0);
});
ScalaJS.fround = ScalaJS.g["Math"]["fround"] ||
//!if floats == Strict
(ScalaJS.g["Float32Array"] ? (function(v) {
const array = new ScalaJS.g["Float32Array"](1);
array[0] = v;
return array[0];
}) : (function(v) {
return ScalaJS.m.sjsr_package$().froundPolyfill__D__D(+v);
}));
//!else
(function(v) {
return +v;
});
//!endif
ScalaJS.clz32 = ScalaJS.g["Math"]["clz32"] || (function(i) {
// See Hacker's Delight, Section 5-3
if (i === 0) return 32;
let r = 1;
if ((i & 0xffff0000) === 0) { i <<= 16; r += 16; };
if ((i & 0xff000000) === 0) { i <<= 8; r += 8; };
if ((i & 0xf0000000) === 0) { i <<= 4; r += 4; };
if ((i & 0xc0000000) === 0) { i <<= 2; r += 2; };
return r + (i >> 31);
});
//!endif
// Other fields
//!if outputMode == ECMAScript51Global
ScalaJS.d = {}; // Data for types
ScalaJS.a = {}; // Scala.js-defined JS class value accessors
ScalaJS.b = {}; // Scala.js-defined JS class value fields
ScalaJS.c = {}; // Scala.js constructors
ScalaJS.h = {}; // Inheritable constructors (without initialization code)
ScalaJS.s = {}; // Static methods
ScalaJS.t = {}; // Static fields
ScalaJS.f = {}; // Default methods
ScalaJS.n = {}; // Module instances
ScalaJS.m = {}; // Module accessors
ScalaJS.is = {}; // isInstanceOf methods
ScalaJS.isArrayOf = {}; // isInstanceOfArrayOf methods
//!if asInstanceOfs != Unchecked
ScalaJS.as = {}; // asInstanceOf methods
ScalaJS.asArrayOf = {}; // asInstanceOfArrayOf methods
//!endif
ScalaJS.lastIDHash = 0; // last value attributed to an id hash code
ScalaJS.idHashCodeMap = ScalaJS.g["WeakMap"] ? new ScalaJS.g["WeakMap"]() : null;
//!else
let $lastIDHash = 0; // last value attributed to an id hash code
//!if outputMode == ECMAScript6
const $idHashCodeMap = new ScalaJS.g["WeakMap"]();
//!else
const $idHashCodeMap = ScalaJS.g["WeakMap"] ? new ScalaJS.g["WeakMap"]() : null;
//!endif
//!endif
// Core mechanism
ScalaJS.makeIsArrayOfPrimitive = function(primitiveData) {
return function(obj, depth) {
return !!(obj && obj.$classData &&
(obj.$classData.arrayDepth === depth) &&
(obj.$classData.arrayBase === primitiveData));
}
};
//!if asInstanceOfs != Unchecked
ScalaJS.makeAsArrayOfPrimitive = function(isInstanceOfFunction, arrayEncodedName) {
return function(obj, depth) {
if (isInstanceOfFunction(obj, depth) || (obj === null))
return obj;
else
ScalaJS.throwArrayCastException(obj, arrayEncodedName, depth);
}
};
//!endif
/** Encode a property name for runtime manipulation
* Usage:
* env.propertyName({someProp:0})
* Returns:
* "someProp"
* Useful when the property is renamed by a global optimizer (like Closure)
* but we must still get hold of a string of that name for runtime
* reflection.
*/
ScalaJS.propertyName = function(obj) {
for (const prop in obj)
return prop;
};
// Runtime functions
ScalaJS.isScalaJSObject = function(obj) {
return !!(obj && obj.$classData);
};
//!if asInstanceOfs != Unchecked
ScalaJS.throwClassCastException = function(instance, classFullName) {
//!if asInstanceOfs == Compliant
throw new ScalaJS.c.jl_ClassCastException().init___T(
instance + " is not an instance of " + classFullName);
//!else
throw new ScalaJS.c.sjsr_UndefinedBehaviorError().init___jl_Throwable(
new ScalaJS.c.jl_ClassCastException().init___T(
instance + " is not an instance of " + classFullName));
//!endif
};
ScalaJS.throwArrayCastException = function(instance, classArrayEncodedName, depth) {
for (; depth; --depth)
classArrayEncodedName = "[" + classArrayEncodedName;
ScalaJS.throwClassCastException(instance, classArrayEncodedName);
};
//!endif
//!if arrayIndexOutOfBounds != Unchecked
ScalaJS.throwArrayIndexOutOfBoundsException = function(i) {
const msg = (i === null) ? null : ("" + i);
//!if arrayIndexOutOfBounds == Compliant
throw new ScalaJS.c.jl_ArrayIndexOutOfBoundsException().init___T(msg);
//!else
throw new ScalaJS.c.sjsr_UndefinedBehaviorError().init___jl_Throwable(
new ScalaJS.c.jl_ArrayIndexOutOfBoundsException().init___T(msg));
//!endif
};
//!endif
ScalaJS.noIsInstance = function(instance) {
throw new ScalaJS.g["TypeError"](
"Cannot call isInstance() on a Class representing a raw JS trait/object");
};
ScalaJS.makeNativeArrayWrapper = function(arrayClassData, nativeArray) {
return new arrayClassData.constr(nativeArray);
};
ScalaJS.newArrayObject = function(arrayClassData, lengths) {
return ScalaJS.newArrayObjectInternal(arrayClassData, lengths, 0);
};
ScalaJS.newArrayObjectInternal = function(arrayClassData, lengths, lengthIndex) {
const result = new arrayClassData.constr(lengths[lengthIndex]);
if (lengthIndex < lengths.length-1) {
const subArrayClassData = arrayClassData.componentData;
const subLengthIndex = lengthIndex+1;
const underlying = result.u;
for (let i = 0; i < underlying.length; i++) {
underlying[i] = ScalaJS.newArrayObjectInternal(
subArrayClassData, lengths, subLengthIndex);
}
}
return result;
};
ScalaJS.objectToString = function(instance) {
if (instance === void 0)
return "undefined";
else
return instance.toString();
};
ScalaJS.objectGetClass = function(instance) {
switch (typeof instance) {
case "string":
return ScalaJS.d.T.getClassOf();
case "number": {
const v = instance | 0;
if (v === instance) { // is the value integral?
if (ScalaJS.isByte(v))
return ScalaJS.d.jl_Byte.getClassOf();
else if (ScalaJS.isShort(v))
return ScalaJS.d.jl_Short.getClassOf();
else
return ScalaJS.d.jl_Integer.getClassOf();
} else {
if (ScalaJS.isFloat(instance))
return ScalaJS.d.jl_Float.getClassOf();
else
return ScalaJS.d.jl_Double.getClassOf();
}
}
case "boolean":
return ScalaJS.d.jl_Boolean.getClassOf();
case "undefined":
return ScalaJS.d.sr_BoxedUnit.getClassOf();
default:
if (instance === null)
return instance.getClass__jl_Class();
else if (ScalaJS.is.sjsr_RuntimeLong(instance))
return ScalaJS.d.jl_Long.getClassOf();
else if (ScalaJS.isScalaJSObject(instance))
return instance.$classData.getClassOf();
else
return null; // Exception?
}
};
ScalaJS.objectClone = function(instance) {
if (ScalaJS.isScalaJSObject(instance) || (instance === null))
return instance.clone__O();
else
throw new ScalaJS.c.jl_CloneNotSupportedException().init___();
};
ScalaJS.objectNotify = function(instance) {
// final and no-op in java.lang.Object
if (instance === null)
instance.notify__V();
};
ScalaJS.objectNotifyAll = function(instance) {
// final and no-op in java.lang.Object
if (instance === null)
instance.notifyAll__V();
};
ScalaJS.objectFinalize = function(instance) {
if (ScalaJS.isScalaJSObject(instance) || (instance === null))
instance.finalize__V();
// else no-op
};
ScalaJS.objectEquals = function(instance, rhs) {
if (ScalaJS.isScalaJSObject(instance) || (instance === null))
return instance.equals__O__Z(rhs);
else if (typeof instance === "number")
return typeof rhs === "number" && ScalaJS.numberEquals(instance, rhs);
else
return instance === rhs;
};
ScalaJS.numberEquals = function(lhs, rhs) {
return (lhs === rhs) ? (
// 0.0.equals(-0.0) must be false
lhs !== 0 || 1/lhs === 1/rhs
) : (
// are they both NaN?
(lhs !== lhs) && (rhs !== rhs)
);
};
ScalaJS.objectHashCode = function(instance) {
switch (typeof instance) {
case "string":
return ScalaJS.m.sjsr_RuntimeString$().hashCode__T__I(instance);
case "number":
return ScalaJS.m.sjsr_Bits$().numberHashCode__D__I(instance);
case "boolean":
return instance ? 1231 : 1237;
case "undefined":
return 0;
default:
if (ScalaJS.isScalaJSObject(instance) || instance === null)
return instance.hashCode__I();
//!if outputMode != ECMAScript6
else if (ScalaJS.idHashCodeMap === null)
return 42;
//!endif
else
return ScalaJS.systemIdentityHashCode(instance);
}
};
ScalaJS.comparableCompareTo = function(instance, rhs) {
switch (typeof instance) {
case "string":
//!if asInstanceOfs != Unchecked
ScalaJS.as.T(rhs);
//!endif
return instance === rhs ? 0 : (instance < rhs ? -1 : 1);
case "number":
//!if asInstanceOfs != Unchecked
ScalaJS.as.jl_Number(rhs);
//!endif
return ScalaJS.m.jl_Double$().compare__D__D__I(instance, rhs);
case "boolean":
//!if asInstanceOfs != Unchecked
ScalaJS.asBoolean(rhs);
//!endif
return instance - rhs; // yes, this gives the right result
default:
return instance.compareTo__O__I(rhs);
}
};
ScalaJS.charSequenceLength = function(instance) {
if (typeof(instance) === "string")
//!if asInstanceOfs != Unchecked
return ScalaJS.uI(instance["length"]);
//!else
return instance["length"] | 0;
//!endif
else
return instance.length__I();
};
ScalaJS.charSequenceCharAt = function(instance, index) {
if (typeof(instance) === "string")
//!if asInstanceOfs != Unchecked
return ScalaJS.uI(instance["charCodeAt"](index)) & 0xffff;
//!else
return instance["charCodeAt"](index) & 0xffff;
//!endif
else
return instance.charAt__I__C(index);
};
ScalaJS.charSequenceSubSequence = function(instance, start, end) {
if (typeof(instance) === "string")
//!if asInstanceOfs != Unchecked
return ScalaJS.as.T(instance["substring"](start, end));
//!else
return instance["substring"](start, end);
//!endif
else
return instance.subSequence__I__I__jl_CharSequence(start, end);
};
ScalaJS.booleanBooleanValue = function(instance) {
if (typeof instance === "boolean") return instance;
else return instance.booleanValue__Z();
};
ScalaJS.numberByteValue = function(instance) {
if (typeof instance === "number") return (instance << 24) >> 24;
else return instance.byteValue__B();
};
ScalaJS.numberShortValue = function(instance) {
if (typeof instance === "number") return (instance << 16) >> 16;
else return instance.shortValue__S();
};
ScalaJS.numberIntValue = function(instance) {
if (typeof instance === "number") return instance | 0;
else return instance.intValue__I();
};
ScalaJS.numberLongValue = function(instance) {
if (typeof instance === "number")
return ScalaJS.m.sjsr_RuntimeLong$().fromDouble__D__sjsr_RuntimeLong(instance);
else
return instance.longValue__J();
};
ScalaJS.numberFloatValue = function(instance) {
if (typeof instance === "number") return ScalaJS.fround(instance);
else return instance.floatValue__F();
};
ScalaJS.numberDoubleValue = function(instance) {
if (typeof instance === "number") return instance;
else return instance.doubleValue__D();
};
ScalaJS.isNaN = function(instance) {
return instance !== instance;
};
ScalaJS.isInfinite = function(instance) {
return !ScalaJS.g["isFinite"](instance) && !ScalaJS.isNaN(instance);
};
ScalaJS.doubleToInt = function(x) {
return (x > 2147483647) ? (2147483647) : ((x < -2147483648) ? -2147483648 : (x | 0));
};
/** Instantiates a JS object with variadic arguments to the constructor. */
ScalaJS.newJSObjectWithVarargs = function(ctor, args) {
// This basically emulates the ECMAScript specification for 'new'.
const instance = ScalaJS.g["Object"]["create"](ctor.prototype);
const result = ctor["apply"](instance, args);
switch (typeof result) {
case "string": case "number": case "boolean": case "undefined": case "symbol":
return instance;
default:
return result === null ? instance : result;
}
};
ScalaJS.resolveSuperRef = function(initialProto, propName) {
const getPrototypeOf = ScalaJS.g["Object"]["getPrototypeOf"];
const getOwnPropertyDescriptor = ScalaJS.g["Object"]["getOwnPropertyDescriptor"];
let superProto = getPrototypeOf(initialProto);
while (superProto !== null) {
const desc = getOwnPropertyDescriptor(superProto, propName);
if (desc !== void 0)
return desc;
superProto = getPrototypeOf(superProto);
}
return void 0;
};
ScalaJS.superGet = function(initialProto, self, propName) {
const desc = ScalaJS.resolveSuperRef(initialProto, propName);
if (desc !== void 0) {
const getter = desc["get"];
if (getter !== void 0)
return getter["call"](self);
else
return desc["value"];
}
return void 0;
};
ScalaJS.superSet = function(initialProto, self, propName, value) {
const desc = ScalaJS.resolveSuperRef(initialProto, propName);
if (desc !== void 0) {
const setter = desc["set"];
if (setter !== void 0) {
setter["call"](self, value);
return void 0;
}
}
throw new ScalaJS.g["TypeError"]("super has no setter '" + propName + "'.");
};
//!if moduleKind == CommonJSModule
ScalaJS.moduleDefault = function(m) {
return (m && (typeof m === "object") && "default" in m) ? m["default"] : m;
};
//!endif
ScalaJS.propertiesOf = function(obj) {
const result = [];
for (const prop in obj)
result["push"](prop);
return result;
};
ScalaJS.systemArraycopy = function(src, srcPos, dest, destPos, length) {
const srcu = src.u;
const destu = dest.u;
//!if arrayIndexOutOfBounds != Unchecked
if (srcPos < 0 || destPos < 0 || length < 0 ||
(srcPos > ((srcu.length - length) | 0)) ||
(destPos > ((destu.length - length) | 0))) {
ScalaJS.throwArrayIndexOutOfBoundsException(null);
}
//!endif
if (srcu !== destu || destPos < srcPos || (((srcPos + length) | 0) < destPos)) {
for (let i = 0; i < length; i = (i + 1) | 0)
destu[(destPos + i) | 0] = srcu[(srcPos + i) | 0];
} else {
for (let i = (length - 1) | 0; i >= 0; i = (i - 1) | 0)
destu[(destPos + i) | 0] = srcu[(srcPos + i) | 0];
}
};
ScalaJS.systemIdentityHashCode =
//!if outputMode != ECMAScript6
(ScalaJS.idHashCodeMap !== null) ?
//!endif
(function(obj) {
switch (typeof obj) {
case "string": case "number": case "boolean": case "undefined":
return ScalaJS.objectHashCode(obj);
default:
if (obj === null) {
return 0;
} else {
let hash = ScalaJS.idHashCodeMap["get"](obj);
if (hash === void 0) {
hash = (ScalaJS.lastIDHash + 1) | 0;
ScalaJS.lastIDHash = hash;
ScalaJS.idHashCodeMap["set"](obj, hash);
}
return hash;
}
}
//!if outputMode != ECMAScript6
}) :
(function(obj) {
if (ScalaJS.isScalaJSObject(obj)) {
let hash = obj["$idHashCode$0"];
if (hash !== void 0) {
return hash;
} else if (!ScalaJS.g["Object"]["isSealed"](obj)) {
hash = (ScalaJS.lastIDHash + 1) | 0;
ScalaJS.lastIDHash = hash;
obj["$idHashCode$0"] = hash;
return hash;
} else {
return 42;
}
} else if (obj === null) {
return 0;
} else {
return ScalaJS.objectHashCode(obj);
}
//!endif
});
// is/as for hijacked boxed classes (the non-trivial ones)
ScalaJS.isByte = function(v) {
return (v << 24 >> 24) === v && 1/v !== 1/-0;
};
ScalaJS.isShort = function(v) {
return (v << 16 >> 16) === v && 1/v !== 1/-0;
};
ScalaJS.isInt = function(v) {
return (v | 0) === v && 1/v !== 1/-0;
};
ScalaJS.isFloat = function(v) {
//!if floats == Strict
return v !== v || ScalaJS.fround(v) === v;
//!else
return typeof v === "number";
//!endif
};
//!if asInstanceOfs != Unchecked
ScalaJS.asUnit = function(v) {
if (v === void 0 || v === null)
return v;
else
ScalaJS.throwClassCastException(v, "scala.runtime.BoxedUnit");
};
ScalaJS.asBoolean = function(v) {
if (typeof v === "boolean" || v === null)
return v;
else
ScalaJS.throwClassCastException(v, "java.lang.Boolean");
};
ScalaJS.asByte = function(v) {
if (ScalaJS.isByte(v) || v === null)
return v;
else
ScalaJS.throwClassCastException(v, "java.lang.Byte");
};
ScalaJS.asShort = function(v) {
if (ScalaJS.isShort(v) || v === null)
return v;
else
ScalaJS.throwClassCastException(v, "java.lang.Short");
};
ScalaJS.asInt = function(v) {
if (ScalaJS.isInt(v) || v === null)
return v;
else
ScalaJS.throwClassCastException(v, "java.lang.Integer");
};
ScalaJS.asFloat = function(v) {
if (ScalaJS.isFloat(v) || v === null)
return v;
else
ScalaJS.throwClassCastException(v, "java.lang.Float");
};
ScalaJS.asDouble = function(v) {
if (typeof v === "number" || v === null)
return v;
else
ScalaJS.throwClassCastException(v, "java.lang.Double");
};
//!endif
// Unboxes
//!if asInstanceOfs != Unchecked
ScalaJS.uZ = function(value) {
return !!ScalaJS.asBoolean(value);
};
ScalaJS.uB = function(value) {
return ScalaJS.asByte(value) | 0;
};
ScalaJS.uS = function(value) {
return ScalaJS.asShort(value) | 0;
};
ScalaJS.uI = function(value) {
return ScalaJS.asInt(value) | 0;
};
ScalaJS.uJ = function(value) {
return null === value ? ScalaJS.m.sjsr_RuntimeLong$().Zero$1
: ScalaJS.as.sjsr_RuntimeLong(value);
};
ScalaJS.uF = function(value) {
/* Here, it is fine to use + instead of fround, because asFloat already
* ensures that the result is either null or a float.
*/
return +ScalaJS.asFloat(value);
};
ScalaJS.uD = function(value) {
return +ScalaJS.asDouble(value);
};
//!else
ScalaJS.uJ = function(value) {
return null === value ? ScalaJS.m.sjsr_RuntimeLong$().Zero$1 : value;
};
//!endif
// TypeArray conversions
ScalaJS.byteArray2TypedArray = function(value) { return new ScalaJS.g["Int8Array"](value.u); };
ScalaJS.shortArray2TypedArray = function(value) { return new ScalaJS.g["Int16Array"](value.u); };
ScalaJS.charArray2TypedArray = function(value) { return new ScalaJS.g["Uint16Array"](value.u); };
ScalaJS.intArray2TypedArray = function(value) { return new ScalaJS.g["Int32Array"](value.u); };
ScalaJS.floatArray2TypedArray = function(value) { return new ScalaJS.g["Float32Array"](value.u); };
ScalaJS.doubleArray2TypedArray = function(value) { return new ScalaJS.g["Float64Array"](value.u); };
ScalaJS.typedArray2ByteArray = function(value) {
const arrayClassData = ScalaJS.d.B.getArrayOf();
return new arrayClassData.constr(new ScalaJS.g["Int8Array"](value));
};
ScalaJS.typedArray2ShortArray = function(value) {
const arrayClassData = ScalaJS.d.S.getArrayOf();
return new arrayClassData.constr(new ScalaJS.g["Int16Array"](value));
};
ScalaJS.typedArray2CharArray = function(value) {
const arrayClassData = ScalaJS.d.C.getArrayOf();
return new arrayClassData.constr(new ScalaJS.g["Uint16Array"](value));
};
ScalaJS.typedArray2IntArray = function(value) {
const arrayClassData = ScalaJS.d.I.getArrayOf();
return new arrayClassData.constr(new ScalaJS.g["Int32Array"](value));
};
ScalaJS.typedArray2FloatArray = function(value) {
const arrayClassData = ScalaJS.d.F.getArrayOf();
return new arrayClassData.constr(new ScalaJS.g["Float32Array"](value));
};
ScalaJS.typedArray2DoubleArray = function(value) {
const arrayClassData = ScalaJS.d.D.getArrayOf();
return new arrayClassData.constr(new ScalaJS.g["Float64Array"](value));
};
// TypeData class
//!if outputMode != ECMAScript6
/** @constructor */
ScalaJS.TypeData = function() {
//!else
class $TypeData {
constructor() {
//!endif
// Runtime support
this.constr = void 0;
this.parentData = void 0;
this.ancestors = null;
this.componentData = null;
this.arrayBase = null;
this.arrayDepth = 0;
this.zero = null;
this.arrayEncodedName = "";
this._classOf = void 0;
this._arrayOf = void 0;
this.isArrayOf = void 0;
// java.lang.Class support
this["name"] = "";
this["isPrimitive"] = false;
this["isInterface"] = false;
this["isArrayClass"] = false;
this["isRawJSType"] = false;
this["isInstance"] = void 0;
};
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype.initPrim = function(
//!else
initPrim(
//!endif
zero, arrayEncodedName, displayName) {
// Runtime support
this.ancestors = {};
this.componentData = null;
this.zero = zero;
this.arrayEncodedName = arrayEncodedName;
this.isArrayOf = function(obj, depth) { return false; };
// java.lang.Class support
this["name"] = displayName;
this["isPrimitive"] = true;
this["isInstance"] = function(obj) { return false; };
return this;
};
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype.initClass = function(
//!else
initClass(
//!endif
internalNameObj, isInterface, fullName,
ancestors, isRawJSType, parentData, isInstance, isArrayOf) {
const internalName = ScalaJS.propertyName(internalNameObj);
isInstance = isInstance || function(obj) {
return !!(obj && obj.$classData && obj.$classData.ancestors[internalName]);
};
isArrayOf = isArrayOf || function(obj, depth) {
return !!(obj && obj.$classData && (obj.$classData.arrayDepth === depth)
&& obj.$classData.arrayBase.ancestors[internalName])
};
// Runtime support
this.parentData = parentData;
this.ancestors = ancestors;
this.arrayEncodedName = "L"+fullName+";";
this.isArrayOf = isArrayOf;
// java.lang.Class support
this["name"] = fullName;
this["isInterface"] = isInterface;
this["isRawJSType"] = !!isRawJSType;
this["isInstance"] = isInstance;
return this;
};
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype.initArray = function(
//!else
initArray(
//!endif
componentData) {
// The constructor
const componentZero0 = componentData.zero;
// The zero for the Long runtime representation
// is a special case here, since the class has not
// been defined yet, when this file is read
const componentZero = (componentZero0 == "longZero")
? ScalaJS.m.sjsr_RuntimeLong$().Zero$1
: componentZero0;
//!if outputMode != ECMAScript6
/** @constructor */
const ArrayClass = function(arg) {
if (typeof(arg) === "number") {
// arg is the length of the array
this.u = new Array(arg);
for (let i = 0; i < arg; i++)
this.u[i] = componentZero;
} else {
// arg is a native array that we wrap
this.u = arg;
}
}
ArrayClass.prototype = new ScalaJS.h.O;
ArrayClass.prototype.constructor = ArrayClass;
//!if arrayIndexOutOfBounds != Unchecked
ArrayClass.prototype.get = function(i) {
if (i < 0 || i >= this.u.length)
ScalaJS.throwArrayIndexOutOfBoundsException(i);
return this.u[i];
};
ArrayClass.prototype.set = function(i, v) {
if (i < 0 || i >= this.u.length)
ScalaJS.throwArrayIndexOutOfBoundsException(i);
this.u[i] = v;
};
//!endif
ArrayClass.prototype.clone__O = function() {
if (this.u instanceof Array)
return new ArrayClass(this.u["slice"](0));
else
// The underlying Array is a TypedArray
return new ArrayClass(new this.u.constructor(this.u));
};
//!else
class ArrayClass extends ScalaJS.c.O {
constructor(arg) {
super();
if (typeof(arg) === "number") {
// arg is the length of the array
this.u = new Array(arg);
for (let i = 0; i < arg; i++)
this.u[i] = componentZero;
} else {
// arg is a native array that we wrap
this.u = arg;
}
};
//!if arrayIndexOutOfBounds != Unchecked
get(i) {
if (i < 0 || i >= this.u.length)
ScalaJS.throwArrayIndexOutOfBoundsException(i);
return this.u[i];
};
set(i, v) {
if (i < 0 || i >= this.u.length)
ScalaJS.throwArrayIndexOutOfBoundsException(i);
this.u[i] = v;
};
//!endif
clone__O() {
if (this.u instanceof Array)
return new ArrayClass(this.u["slice"](0));
else
// The underlying Array is a TypedArray
return new ArrayClass(new this.u.constructor(this.u));
};
};
//!endif
ArrayClass.prototype.$classData = this;
// Don't generate reflective call proxies. The compiler special cases
// reflective calls to methods on scala.Array
// The data
const encodedName = "[" + componentData.arrayEncodedName;
const componentBase = componentData.arrayBase || componentData;
const arrayDepth = componentData.arrayDepth + 1;
const isInstance = function(obj) {
return componentBase.isArrayOf(obj, arrayDepth);
}
// Runtime support
this.constr = ArrayClass;
this.parentData = ScalaJS.d.O;
this.ancestors = {O: 1, jl_Cloneable: 1, Ljava_io_Serializable: 1};
this.componentData = componentData;
this.arrayBase = componentBase;
this.arrayDepth = arrayDepth;
this.zero = null;
this.arrayEncodedName = encodedName;
this._classOf = undefined;
this._arrayOf = undefined;
this.isArrayOf = undefined;
// java.lang.Class support
this["name"] = encodedName;
this["isPrimitive"] = false;
this["isInterface"] = false;
this["isArrayClass"] = true;
this["isInstance"] = isInstance;
return this;
};
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype.getClassOf = function() {
//!else
getClassOf() {
//!endif
if (!this._classOf)
this._classOf = new ScalaJS.c.jl_Class().init___jl_ScalaJSClassData(this);
return this._classOf;
};
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype.getArrayOf = function() {
//!else
getArrayOf() {
//!endif
if (!this._arrayOf)
this._arrayOf = new ScalaJS.TypeData().initArray(this);
return this._arrayOf;
};
// java.lang.Class support
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype["getFakeInstance"] = function() {
//!else
"getFakeInstance"() {
//!endif
if (this === ScalaJS.d.T)
return "some string";
else if (this === ScalaJS.d.jl_Boolean)
return false;
else if (this === ScalaJS.d.jl_Byte ||
this === ScalaJS.d.jl_Short ||
this === ScalaJS.d.jl_Integer ||
this === ScalaJS.d.jl_Float ||
this === ScalaJS.d.jl_Double)
return 0;
else if (this === ScalaJS.d.jl_Long)
return ScalaJS.m.sjsr_RuntimeLong$().Zero$1;
else if (this === ScalaJS.d.sr_BoxedUnit)
return void 0;
else
return {$classData: this};
};<|fim▁hole|>"getSuperclass"() {
//!endif
return this.parentData ? this.parentData.getClassOf() : null;
};
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype["getComponentType"] = function() {
//!else
"getComponentType"() {
//!endif
return this.componentData ? this.componentData.getClassOf() : null;
};
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype["newArrayOfThisClass"] = function(lengths) {
//!else
"newArrayOfThisClass"(lengths) {
//!endif
let arrayClassData = this;
for (let i = 0; i < lengths.length; i++)
arrayClassData = arrayClassData.getArrayOf();
return ScalaJS.newArrayObject(arrayClassData, lengths);
};
//!if outputMode == ECMAScript6
};
//!endif
// Create primitive types
ScalaJS.d.V = new ScalaJS.TypeData().initPrim(undefined, "V", "void");
ScalaJS.d.Z = new ScalaJS.TypeData().initPrim(false, "Z", "boolean");
ScalaJS.d.C = new ScalaJS.TypeData().initPrim(0, "C", "char");
ScalaJS.d.B = new ScalaJS.TypeData().initPrim(0, "B", "byte");
ScalaJS.d.S = new ScalaJS.TypeData().initPrim(0, "S", "short");
ScalaJS.d.I = new ScalaJS.TypeData().initPrim(0, "I", "int");
ScalaJS.d.J = new ScalaJS.TypeData().initPrim("longZero", "J", "long");
ScalaJS.d.F = new ScalaJS.TypeData().initPrim(0.0, "F", "float");
ScalaJS.d.D = new ScalaJS.TypeData().initPrim(0.0, "D", "double");
// Instance tests for array of primitives
ScalaJS.isArrayOf.Z = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.Z);
ScalaJS.d.Z.isArrayOf = ScalaJS.isArrayOf.Z;
ScalaJS.isArrayOf.C = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.C);
ScalaJS.d.C.isArrayOf = ScalaJS.isArrayOf.C;
ScalaJS.isArrayOf.B = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.B);
ScalaJS.d.B.isArrayOf = ScalaJS.isArrayOf.B;
ScalaJS.isArrayOf.S = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.S);
ScalaJS.d.S.isArrayOf = ScalaJS.isArrayOf.S;
ScalaJS.isArrayOf.I = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.I);
ScalaJS.d.I.isArrayOf = ScalaJS.isArrayOf.I;
ScalaJS.isArrayOf.J = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.J);
ScalaJS.d.J.isArrayOf = ScalaJS.isArrayOf.J;
ScalaJS.isArrayOf.F = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.F);
ScalaJS.d.F.isArrayOf = ScalaJS.isArrayOf.F;
ScalaJS.isArrayOf.D = ScalaJS.makeIsArrayOfPrimitive(ScalaJS.d.D);
ScalaJS.d.D.isArrayOf = ScalaJS.isArrayOf.D;
//!if asInstanceOfs != Unchecked
// asInstanceOfs for array of primitives
ScalaJS.asArrayOf.Z = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.Z, "Z");
ScalaJS.asArrayOf.C = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.C, "C");
ScalaJS.asArrayOf.B = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.B, "B");
ScalaJS.asArrayOf.S = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.S, "S");
ScalaJS.asArrayOf.I = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.I, "I");
ScalaJS.asArrayOf.J = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.J, "J");
ScalaJS.asArrayOf.F = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.F, "F");
ScalaJS.asArrayOf.D = ScalaJS.makeAsArrayOfPrimitive(ScalaJS.isArrayOf.D, "D");
//!endif<|fim▁end|> |
//!if outputMode != ECMAScript6
ScalaJS.TypeData.prototype["getSuperclass"] = function() {
//!else |
<|file_name|>test_args.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"Test the function for mapping Terraform arguments."
import pytest
import tftest
ARGS_TESTS = (<|fim▁hole|> ({'backend': False}, ['-backend=false']),
({'color': True}, []),
({'color': False}, ['-no-color']),
({'color': False, 'input': False}, ['-no-color', '-input=false']),
({'force_copy': True}, ['-force-copy']),
({'force_copy': None}, []),
({'force_copy': False}, []),
({'input': True}, []),
({'input': False}, ['-input=false']),
({'json_format': True}, ['-json']),
({'json_format': False}, []),
({'lock': True}, []),
({'lock': False}, ['-lock=false']),
({'plugin_dir': ''}, []),
({'plugin_dir': 'abc'}, ['-plugin-dir', 'abc']),
({'refresh': True}, []),
({'refresh': None}, []),
({'refresh': False}, ['-refresh=false']),
({'upgrade': True}, ['-upgrade']),
({'upgrade': False}, []),
({'tf_var_file': None}, []),
({'tf_var_file': 'foo.tfvar'}, ['-var-file=foo.tfvar']),
)
@pytest.mark.parametrize("kwargs, expected", ARGS_TESTS)
def test_args(kwargs, expected):
assert tftest.parse_args() == []
assert tftest.parse_args(**kwargs) == expected
TERRAGRUNT_ARGS_TESTCASES = [
({"tg_config": "Obama"}, ['--terragrunt-config', 'Obama']),
({"tg_tfpath": "Barrack"}, ['--terragrunt-tfpath', 'Barrack']),
({"tg_no_auto_init": True}, ['--terragrunt-no-auto-init']),
({"tg_no_auto_init": False}, []),
({"tg_no_auto_retry": True}, ['--terragrunt-no-auto-retry']),
({"tg_no_auto_retry": False}, []),
({"tg_non_interactive": True}, ['--terragrunt-non-interactive']),
({"tg_non_interactive": False}, []),
({"tg_working_dir": "George"}, ['--terragrunt-working-dir', 'George']),
({"tg_download_dir": "Bush"}, ['--terragrunt-download-dir', 'Bush']),
({"tg_source": "Clinton"}, ['--terragrunt-source', 'Clinton']),
({"tg_source_update": True}, ['--terragrunt-source-update']),
({"tg_source_update": False}, []),
({"tg_iam_role": "Bill"}, ['--terragrunt-iam-role', 'Bill']),
({"tg_ignore_dependency_errors": True}, ['--terragrunt-ignore-dependency-errors']),
({"tg_ignore_dependency_errors": False}, []),
({"tg_ignore_dependency_order": True}, ['--terragrunt-ignore-dependency-order']),
({"tg_ignore_dependency_order": False}, []),
({"tg_ignore_external_dependencies": "dont care what is here"},
['--terragrunt-ignore-external-dependencies']),
({"tg_include_external_dependencies": True}, ['--terragrunt-include-external-dependencies']),
({"tg_include_external_dependencies": False}, []),
({"tg_parallelism": 20}, ['--terragrunt-parallelism 20']),
({"tg_exclude_dir": "Ronald"}, ['--terragrunt-exclude-dir', 'Ronald']),
({"tg_include_dir": "Reagan"}, ['--terragrunt-include-dir', 'Reagan']),
({"tg_check": True}, ['--terragrunt-check']),
({"tg_check": False}, []),
({"tg_hclfmt_file": "Biden"}, ['--terragrunt-hclfmt-file', 'Biden']),
({"tg_override_attr": {"Iron": "Man", "Captain": "America"}},
['--terragrunt-override-attr=Iron=Man', '--terragrunt-override-attr=Captain=America']),
({"tg_debug": True}, ['--terragrunt-debug']),
({"tg_debug": False}, []),
]
@pytest.mark.parametrize("kwargs, expected", TERRAGRUNT_ARGS_TESTCASES)
def test_terragrunt_args(kwargs, expected):
assert tftest.parse_args(**kwargs) == expected
def test_var_args():
assert sorted(tftest.parse_args(init_vars={'a': 1, 'b': '["2"]'})) == sorted(
["-backend-config=a=1", '-backend-config=b=["2"]'])
assert sorted(tftest.parse_args(tf_vars={'a': 1, 'b': '["2"]'})) == sorted(
['-var', 'b=["2"]', '-var', 'a=1'])
def test_targets():
assert tftest.parse_args(targets=['one', 'two']) == sorted(
['-target=one', '-target=two'])<|fim▁end|> | ({'auto_approve': True}, ['-auto-approve']),
({'auto_approve': False}, []),
({'backend': True}, []),
({'backend': None}, []), |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#coding: utf-8
#!/usr/bin/env python3
#Initial test code for MiSynth Wave Generator
#Opens Wave Files And Cuts And Plays Them As The FPGA will
#Synth plays back 2048 samples at frequency of note
#Effective sample rate is 901,120Hz @ 440Hz
#CURRENTLY A DRAWING LOOP TO BE SOLVED, THANKS WX/PYTHON FOR YOUR
#COMPLETE LACK OF TRANSPARENCY
#ALWAYS USE TKINTER
import wave
import wx
import audiothread
import wavehandle
import sdisp
class MyFrame(wx.Frame):
def __init__(self, parent, title, wavehandle):
wx.Frame.__init__(self, parent, -1, title, size=(1024, 624))
self.wavehandle = wavehandle
self.scale = 8
self.shift = 0
self.drawcnt = 0
self.scope = [0]
# Create the menubar
menuBar = wx.MenuBar()
menu = wx.Menu()
menu.Append(wx.ID_OPEN, "Open\tAlt-O", "Open Wave")
menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit")
# bind the menu event s
self.Bind(wx.EVT_MENU, self.OnOpenButton, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.OnQuitButton, id=wx.ID_EXIT)
menuBar.Append(menu, "&Actions")
self.SetMenuBar(menuBar)
self.wavepanel = WavePanel(self, self.getscale, self.setsector)
self.wavepanel.SetBackgroundColour(wx.Colour(32,55,91))
self.scopepanel = ScopePanel(self)
self.scopepanel.SetBackgroundColour(wx.Colour(20,25,20))
self.buttonpanel = wx.Panel(self, -1, pos=(0, 384), size=(1024, 40))
self.textpanel = sdisp.TextPanel(self)
self.timestamp = wx.StaticText(self.wavepanel, -1,
("Time: " + str(0.0)
+ "/" + str(0.0)),
pos=(2, 2),
style=wx.ALIGN_LEFT)
self.timestamp.SetForegroundColour((217, 66, 244))
btnOpen = wx.Button(self.buttonpanel, wx.ID_OPEN, "Open",
pos=(2, 0), size=(80, 40))
btnExport = wx.Button(self.buttonpanel, -1, "Export",
pos=(84, 0), size=(80, 40))
btnQuit = wx.Button(self.buttonpanel, wx.ID_EXIT, "Quit",
pos=(166, 0), size=(80, 40))
self.btnPlay = wx.ToggleButton(self.buttonpanel, -1, "Play",
pos=(943, 0), size=(80, 40))
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.OnOpenButton, btnOpen)
self.Bind(wx.EVT_BUTTON, self.OnExportButton, btnExport)
self.Bind(wx.EVT_BUTTON, self.OnQuitButton, btnQuit)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnPlayButton, self.btnPlay)
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
self.wavepanel.Bind(wx.EVT_PAINT, self.onPaint)
self.contentNotSaved = False
self.fileloaded = False
self.quadrant = -1
self.Centre()
def setsector(self, sector):
self.quadrant = abs(sector)
self.Refresh()
def getscale(self):
return self.scale
def getSample(self, sector):
print("obtaining sample")
if self.quadrant == -1:
self.setsector(1)
sample = self.wavehandle.getaudiodata(self.shift, 0, sector)
return sample
def onPaint(self, event):
self.drawcnt += 1
#print("Drawing" + str(self.drawcnt))
dc = wx.PaintDC(self.wavepanel)
dc.Clear()
totalseconds = self.wavehandle.gettotaltime()
shiftseconds = self.wavehandle.framestoseconds(self.shift)
self.timestamp.SetLabel("Time: " + str(shiftseconds) + "/" + str(
totalseconds))
dc.SetBrush(wx.Brush(wx.Colour(16, 28, 45), wx.SOLID))
dc.DrawRectangle(256, 0, 512, 256)
# Centre Line
pointdata = self.wavehandle.getdrawpoints(self.shift)
for x in range(1, 1024): # Ugly
if (x > 256) and (x < 768):
dc.SetPen(wx.Pen((0, 255, 242), 1, wx.PENSTYLE_SOLID))
else:
dc.SetPen(wx.Pen((183, 204, 163), 1, wx.PENSTYLE_SOLID))
dc.DrawLine(x - 1, pointdata[x - 1], x, pointdata[x])
#dc.DrawPoint(x, pointdata[x])
if (x == 256) or (x == 768):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
if (x == 496) or (x == 528):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
dc = wx.PaintDC(self.scopepanel)
dc.Clear()
dc.SetPen(wx.Pen((256,0,0), 1, wx.PENSTYLE_SOLID))
for x in range(0, 1024):
if len(self.scope) > 1:
p = self.scope[x % len(self.scope)] + 64
else:
p = 64
dc.DrawPoint(x, p)
def OnPlayButton(self, event):
if self.btnPlay.GetValue():
self.audiohandle = audiothread.AudioHandler()
if self.fileloaded:
self.audiohandle.setsample(self.getSample(self.quadrant), 2048)
self.scope = self.audiohandle.getscopesample()
print("sample length: " + str(len(self.scope)))
self.audiohandle.start()
else:
self.audiohandle.stop()
self.audiohandle = None
def onMouseWheel(self, event):
if self.wavepanel.mouseOver:
if self.wavepanel.ctrlDown:
if event.GetWheelRotation() > 0:
if(self.scale > 1):
self.scale = self.scale >> 1
else:
if(self.scale < 2097151):
self.scale = self.scale << 1
self.Refresh()
else:
if event.GetWheelRotation() > 0:
if(self.shift > 0):
self.shift -= 2000
else:
if (self.shift < 10000000):
self.shift += 2000
self.Refresh()
if self.scopepanel.mouseOver:
if event.GetWheelRotation() > 0:
self.audiohandle.setshift(1)
else:
self.audiohandle.setshift(-1)
self.scope = self.audiohandle.getscopesample()
self.Refresh()
def OnOpenButton(self, evt):
#Open file
with wx.FileDialog(self, "Open .wav file.", wildcard="WAV files (*.wav)|*.wav",
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed their mind
pathname = fileDialog.GetPath()
try:
with wave.open(pathname, 'r') as file:
self.wavehandle.loadwave(file)
self.Refresh()
self.fileloaded = True
except IOError:
wx.LogError("Cannot open file '%s'." % pathname)
def OnExportButton(self, evt):<|fim▁hole|> print("Export")
def OnQuitButton(self, evt):
self.Close()
class WavePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent, getter, sender):
wx.Panel.__init__(self, parent, pos=(0,0),size=(1024, 256))
self.mouseOver = False
self.ctrlDown = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_KEY_UP, self.onKeyRelease)
self.Bind(wx.EVT_LEFT_DOWN, self.onMouseClick)
self.getter = getter
self.sender = sender
def onMouseClick(self, event):
if self.mouseOver:
x, y = self.ScreenToClient(wx.GetMousePosition())
sector = abs(x // (2048 / self.getter()))
self.sender(sector)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
def onKeyPress(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = True
def onKeyRelease(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = False
class ScopePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent):
wx.Panel.__init__(self, parent, pos=(0, 256), size=(1024, 128))
self.mouseOver = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
class MyApp(wx.App):
def OnInit(self):
waveHandle = wavehandle.WaveHandler()
frame = MyFrame(None, "MiSynth Editor", waveHandle)
self.SetTopWindow(frame)
frame.Show(True)
return True
if __name__ == '__main__':
app = MyApp(redirect=True)
app.MainLoop()<|fim▁end|> | |
<|file_name|>testSegment.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
###########################################################################
# This is part of the module phystricks
#
# phystricks is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# phystricks is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with phystricks.py. If not, see <http://www.gnu.org/licenses/>.
###########################################################################
# copyright (c) Laurent Claessens, 2017
# email: [email protected]
from __future__ import division
from phystricks import *
from Testing import assert_true
from Testing import assert_false
from Testing import assert_equal
from Testing import assert_almost_equal
from Testing import echo_function
from Testing import echo_single_test
from Testing import SilentOutput
def test_almost_equal():
echo_function("test_almost_equal")
s= Segment(Point(1,1),Point(2,2))
v=s.get_normal_vector()
assert_equal(v.I,Point(1.5,1.5))
assert_almost_equal(v.length,1,epsilon=0.001)
assert_almost_equal(v.F,Point(1/2*sqrt(2) + 1.5,-1/2*sqrt(2) + 1.5),epsilon=0.001)
def test_constructors():
echo_single_test("Usual constructor")
seg=Segment( Point(0,0),Point(2,10) )
assert_equal(seg.I,Point(0,0))
assert_equal(seg.F,Point(2,10))
echo_single_test("Construct with a vector")
seg=Segment( Point(-3,4),vector=Vector(1,2) )
assert_equal(seg.I,Point(-3,4))
assert_equal(seg.F,Point(-2,6))
echo_single_test("Construct with an affine vector")
v=AffineVector( Point(1,2),Point(-2,5) )
seg=Segment( Point(-3,4),vector=v )
assert_equal(seg.I,Point(-3,4))
assert_equal(seg.F,Point(-6,7))
<|fim▁hole|> test_almost_equal()<|fim▁end|> | def testSegment():
test_constructors() |
<|file_name|>mips_unknown_linux_uclibc.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use spec::{LinkerFlavor, Target, TargetOptions, TargetResult};
pub fn target() -> TargetResult {
Ok(Target {
llvm_target: "mips-unknown-linux-uclibc".to_string(),
target_endian: "big".to_string(),
target_pointer_width: "32".to_string(),
target_c_int_width: "32".to_string(),
data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".to_string(),
arch: "mips".to_string(),<|fim▁hole|> options: TargetOptions {
cpu: "mips32r2".to_string(),
features: "+mips32r2,+soft-float".to_string(),
max_atomic_width: Some(32),
..super::linux_base::opts()
},
})
}<|fim▁end|> | target_os: "linux".to_string(),
target_env: "uclibc".to_string(),
target_vendor: "unknown".to_string(),
linker_flavor: LinkerFlavor::Gcc, |
<|file_name|>migration.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import distutils.version as dist_version
import os
import sys
from dragon.db.sqlalchemy.session import get_engine
from dragon.db import migration
import sqlalchemy
import migrate
from migrate.versioning import util as migrate_util
<|fim▁hole|>from dragon.openstack.common import exception
from dragon.openstack.common.gettextutils import _
_REPOSITORY = None
@migrate_util.decorator
def patched_with_engine(f, *a, **kw):
url = a[0]
engine = migrate_util.construct_engine(url, **kw)
try:
kw['engine'] = engine
return f(*a, **kw)
finally:
if isinstance(engine, migrate_util.Engine) and engine is not url:
migrate_util.log.debug('Disposing SQLAlchemy engine %s', engine)
engine.dispose()
# TODO(jkoelker) When migrate 0.7.3 is released and nova depends
# on that version or higher, this can be removed
MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
if (not hasattr(migrate, '__version__') or
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
migrate_util.with_engine = patched_with_engine
# NOTE(jkoelker) Delay importing migrate until we are patched
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
try:
from migrate.versioning import exceptions as versioning_exceptions
except ImportError:
try:
from migrate import exceptions as versioning_exceptions
except ImportError:
sys.exit(_("python-migrate is not installed. Exiting."))
#_REPOSITORY = None
def db_sync(version=None):
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.Error(_("version should be an integer"))
current_version = db_version()
repository = _find_migrate_repo()
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(), repository, version)
else:
return versioning_api.downgrade(get_engine(), repository,
version)
def db_version():
repository = _find_migrate_repo()
try:
return versioning_api.db_version(get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError as exc:
# If we aren't version controlled there may be an existing,
# non-version controlled database present.
meta = sqlalchemy.MetaData()
engine = get_engine()
meta.reflect(bind=engine)
tables = meta.tables
if len(tables):
raise exc
db_version_control(migration.INIT_VERSION)
return versioning_api.db_version(get_engine(), repository)
def db_version_control(version=None):
repository = _find_migrate_repo()
versioning_api.version_control(get_engine(), repository, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
global _REPOSITORY
if _REPOSITORY is None:
_REPOSITORY = Repository(path)
return _REPOSITORY<|fim▁end|> | |
<|file_name|>GetHealthCheckLastFailureReasonResultStaxUnmarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.route53.model.transform;
import java.util.ArrayList;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.route53.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* GetHealthCheckLastFailureReasonResult StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetHealthCheckLastFailureReasonResultStaxUnmarshaller implements Unmarshaller<GetHealthCheckLastFailureReasonResult, StaxUnmarshallerContext> {
public GetHealthCheckLastFailureReasonResult unmarshall(StaxUnmarshallerContext context) throws Exception {
GetHealthCheckLastFailureReasonResult getHealthCheckLastFailureReasonResult = new GetHealthCheckLastFailureReasonResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return getHealthCheckLastFailureReasonResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("HealthCheckObservations", targetDepth)) {
getHealthCheckLastFailureReasonResult.withHealthCheckObservations(new ArrayList<HealthCheckObservation>());
continue;
}
if (context.testExpression("HealthCheckObservations/HealthCheckObservation", targetDepth)) {
getHealthCheckLastFailureReasonResult.withHealthCheckObservations(HealthCheckObservationStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return getHealthCheckLastFailureReasonResult;
}
}
}
}<|fim▁hole|>
private static GetHealthCheckLastFailureReasonResultStaxUnmarshaller instance;
public static GetHealthCheckLastFailureReasonResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new GetHealthCheckLastFailureReasonResultStaxUnmarshaller();
return instance;
}
}<|fim▁end|> | |
<|file_name|>0049_preprintprovider_preprint_word.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-09 17:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('osf', '0048_merge_20170804_0910'),
]
operations = [
migrations.AddField(
model_name='preprintprovider',
name='preprint_word',
field=models.CharField(choices=[('preprint', 'Preprint'), ('paper', 'Paper'), ('thesis', 'Thesis'), ('none', 'None')], default='preprint', max_length=10),<|fim▁hole|> ]<|fim▁end|> | ), |
<|file_name|>perl.js<|end_file_name|><|fim▁begin|>// CodeMirror2 mode/perl/perl.js (text/x-perl) beta 0.10 (2011-11-08)
// This is a part of CodeMirror from https://github.com/sabaca/CodeMirror_mode_perl ([email protected])
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("perl",function(){
// http://perldoc.perl.org
var PERL={ // null - magic touch
// 1 - keyword
// 2 - def
// 3 - atom
// 4 - operator
// 5 - variable-2 (predefined)
// [x,y] - x=1,2,3; y=must be defined if x{...}
// PERL operators
'->' : 4,
'++' : 4,
'--' : 4,
'**' : 4,
// ! ~ \ and unary + and -
'=~' : 4,
'!~' : 4,
'*' : 4,
'/' : 4,
'%' : 4,
'x' : 4,
'+' : 4,
'-' : 4,
'.' : 4,
'<<' : 4,
'>>' : 4,
// named unary operators
'<' : 4,
'>' : 4,
'<=' : 4,
'>=' : 4,
'lt' : 4,
'gt' : 4,
'le' : 4,
'ge' : 4,
'==' : 4,
'!=' : 4,
'<=>' : 4,
'eq' : 4,
'ne' : 4,
'cmp' : 4,
'~~' : 4,
'&' : 4,
'|' : 4,
'^' : 4,
'&&' : 4,
'||' : 4,
'//' : 4,
'..' : 4,
'...' : 4,
'?' : 4,
':' : 4,
'=' : 4,
'+=' : 4,
'-=' : 4,
'*=' : 4, // etc. ???
',' : 4,
'=>' : 4,
'::' : 4,
// list operators (rightward)
'not' : 4,
'and' : 4,
'or' : 4,
'xor' : 4,
// PERL predefined variables (I know, what this is a paranoid idea, but may be needed for people, who learn PERL, and for me as well, ...and may be for you?;)
'BEGIN' : [5,1],
'END' : [5,1],
'PRINT' : [5,1],
'PRINTF' : [5,1],
'GETC' : [5,1],
'READ' : [5,1],
'READLINE' : [5,1],
'DESTROY' : [5,1],
'TIE' : [5,1],
'TIEHANDLE' : [5,1],
'UNTIE' : [5,1],
'STDIN' : 5,
'STDIN_TOP' : 5,
'STDOUT' : 5,
'STDOUT_TOP' : 5,
'STDERR' : 5,
'STDERR_TOP' : 5,
'$ARG' : 5,
'$_' : 5,
'@ARG' : 5,
'@_' : 5,
'$LIST_SEPARATOR' : 5,
'$"' : 5,
'$PROCESS_ID' : 5,
'$PID' : 5,
'$$' : 5,
'$REAL_GROUP_ID' : 5,
'$GID' : 5,
'$(' : 5,
'$EFFECTIVE_GROUP_ID' : 5,
'$EGID' : 5,
'$)' : 5,
'$PROGRAM_NAME' : 5,
'$0' : 5,
'$SUBSCRIPT_SEPARATOR' : 5,
'$SUBSEP' : 5,
'$;' : 5,
'$REAL_USER_ID' : 5,
'$UID' : 5,
'$<' : 5,
'$EFFECTIVE_USER_ID' : 5,
'$EUID' : 5,
'$>' : 5,
'$a' : 5,
'$b' : 5,
'$COMPILING' : 5,
'$^C' : 5,
'$DEBUGGING' : 5,
'$^D' : 5,
'${^ENCODING}' : 5,
'$ENV' : 5,
'%ENV' : 5,
'$SYSTEM_FD_MAX' : 5,
'$^F' : 5,
'@F' : 5,
'${^GLOBAL_PHASE}' : 5,
'$^H' : 5,
'%^H' : 5,
'@INC' : 5,
'%INC' : 5,
'$INPLACE_EDIT' : 5,
'$^I' : 5,
'$^M' : 5,
'$OSNAME' : 5,
'$^O' : 5,
'${^OPEN}' : 5,
'$PERLDB' : 5,
'$^P' : 5,
'$SIG' : 5,
'%SIG' : 5,
'$BASETIME' : 5,
'$^T' : 5,
'${^TAINT}' : 5,
'${^UNICODE}' : 5,
'${^UTF8CACHE}' : 5,
'${^UTF8LOCALE}' : 5,
'$PERL_VERSION' : 5,
'$^V' : 5,
'${^WIN32_SLOPPY_STAT}' : 5,
'$EXECUTABLE_NAME' : 5,
'$^X' : 5,
'$1' : 5, // - regexp $1, $2...
'$MATCH' : 5,
'$&' : 5,
'${^MATCH}' : 5,
'$PREMATCH' : 5,
'$`' : 5,
'${^PREMATCH}' : 5,
'$POSTMATCH' : 5,
"$'" : 5,
'${^POSTMATCH}' : 5,
'$LAST_PAREN_MATCH' : 5,
'$+' : 5,
'$LAST_SUBMATCH_RESULT' : 5,
'$^N' : 5,
'@LAST_MATCH_END' : 5,
'@+' : 5,
'%LAST_PAREN_MATCH' : 5,
'%+' : 5,
'@LAST_MATCH_START' : 5,
'@-' : 5,
'%LAST_MATCH_START' : 5,
'%-' : 5,
'$LAST_REGEXP_CODE_RESULT' : 5,
'$^R' : 5,
'${^RE_DEBUG_FLAGS}' : 5,
'${^RE_TRIE_MAXBUF}' : 5,
'$ARGV' : 5,
'@ARGV' : 5,
'ARGV' : 5,
'ARGVOUT' : 5,
'$OUTPUT_FIELD_SEPARATOR' : 5,
'$OFS' : 5,
'$,' : 5,
'$INPUT_LINE_NUMBER' : 5,
'$NR' : 5,
'$.' : 5,
'$INPUT_RECORD_SEPARATOR' : 5,
'$RS' : 5,
'$/' : 5,
'$OUTPUT_RECORD_SEPARATOR' : 5,
'$ORS' : 5,
'$\\' : 5,
'$OUTPUT_AUTOFLUSH' : 5,
'$|' : 5,
'$ACCUMULATOR' : 5,
'$^A' : 5,
'$FORMAT_FORMFEED' : 5,
'$^L' : 5,
'$FORMAT_PAGE_NUMBER' : 5,
'$%' : 5,
'$FORMAT_LINES_LEFT' : 5,
'$-' : 5,
'$FORMAT_LINE_BREAK_CHARACTERS' : 5,
'$:' : 5,
'$FORMAT_LINES_PER_PAGE' : 5,
'$=' : 5,
'$FORMAT_TOP_NAME' : 5,
'$^' : 5,
'$FORMAT_NAME' : 5,
'$~' : 5,
'${^CHILD_ERROR_NATIVE}' : 5,
'$EXTENDED_OS_ERROR' : 5,
'$^E' : 5,
'$EXCEPTIONS_BEING_CAUGHT' : 5,
'$^S' : 5,
'$WARNING' : 5,
'$^W' : 5,
'${^WARNING_BITS}' : 5,
'$OS_ERROR' : 5,
'$ERRNO' : 5,
'$!' : 5,
'%OS_ERROR' : 5,
'%ERRNO' : 5,
'%!' : 5,
'$CHILD_ERROR' : 5,
'$?' : 5,
'$EVAL_ERROR' : 5,
'$@' : 5,
'$OFMT' : 5,
'$#' : 5,
'$*' : 5,
'$ARRAY_BASE' : 5,
'$[' : 5,
'$OLD_PERL_VERSION' : 5,
'$]' : 5,
// PERL blocks
'if' :[1,1],
elsif :[1,1],
'else' :[1,1],
'while' :[1,1],
unless :[1,1],
'for' :[1,1],
foreach :[1,1],
// PERL functions
'abs' :1, // - absolute value function
accept :1, // - accept an incoming socket connect
alarm :1, // - schedule a SIGALRM
'atan2' :1, // - arctangent of Y/X in the range -PI to PI
bind :1, // - binds an address to a socket
binmode :1, // - prepare binary files for I/O
bless :1, // - create an object
bootstrap :1, //
'break' :1, // - break out of a "given" block
caller :1, // - get context of the current subroutine call
chdir :1, // - change your current working directory
chmod :1, // - changes the permissions on a list of files
chomp :1, // - remove a trailing record separator from a string
chop :1, // - remove the last character from a string
chown :1, // - change the owership on a list of files
chr :1, // - get character this number represents
chroot :1, // - make directory new root for path lookups
close :1, // - close file (or pipe or socket) handle
closedir :1, // - close directory handle
connect :1, // - connect to a remote socket
'continue' :[1,1], // - optional trailing block in a while or foreach
'cos' :1, // - cosine function
crypt :1, // - one-way passwd-style encryption
dbmclose :1, // - breaks binding on a tied dbm file
dbmopen :1, // - create binding on a tied dbm file
'default' :1, //
defined :1, // - test whether a value, variable, or function is defined
'delete' :1, // - deletes a value from a hash
die :1, // - raise an exception or bail out
'do' :1, // - turn a BLOCK into a TERM
dump :1, // - create an immediate core dump
each :1, // - retrieve the next key/value pair from a hash
endgrent :1, // - be done using group file
endhostent :1, // - be done using hosts file
endnetent :1, // - be done using networks file
endprotoent :1, // - be done using protocols file
endpwent :1, // - be done using passwd file
endservent :1, // - be done using services file
eof :1, // - test a filehandle for its end
'eval' :1, // - catch exceptions or compile and run code
'exec' :1, // - abandon this program to run another
exists :1, // - test whether a hash key is present
exit :1, // - terminate this program
'exp' :1, // - raise I to a power
fcntl :1, // - file control system call
fileno :1, // - return file descriptor from filehandle
flock :1, // - lock an entire file with an advisory lock
fork :1, // - create a new process just like this one
format :1, // - declare a picture format with use by the write() function
formline :1, // - internal function used for formats
getc :1, // - get the next character from the filehandle
getgrent :1, // - get next group record
getgrgid :1, // - get group record given group user ID
getgrnam :1, // - get group record given group name
gethostbyaddr :1, // - get host record given its address
gethostbyname :1, // - get host record given name
gethostent :1, // - get next hosts record
getlogin :1, // - return who logged in at this tty
getnetbyaddr :1, // - get network record given its address
getnetbyname :1, // - get networks record given name
getnetent :1, // - get next networks record
getpeername :1, // - find the other end of a socket connection
getpgrp :1, // - get process group
getppid :1, // - get parent process ID
getpriority :1, // - get current nice value
getprotobyname :1, // - get protocol record given name
getprotobynumber :1, // - get protocol record numeric protocol
getprotoent :1, // - get next protocols record
getpwent :1, // - get next passwd record
getpwnam :1, // - get passwd record given user login name
getpwuid :1, // - get passwd record given user ID
getservbyname :1, // - get services record given its name
getservbyport :1, // - get services record given numeric port
getservent :1, // - get next services record
getsockname :1, // - retrieve the sockaddr for a given socket
getsockopt :1, // - get socket options on a given socket
given :1, //
glob :1, // - expand filenames using wildcards
gmtime :1, // - convert UNIX time into record or string using Greenwich time
'goto' :1, // - create spaghetti code
grep :1, // - locate elements in a list test true against a given criterion
hex :1, // - convert a string to a hexadecimal number
'import' :1, // - patch a module's namespace into your own
index :1, // - find a substring within a string
'int' :1, // - get the integer portion of a number
ioctl :1, // - system-dependent device control system call
'join' :1, // - join a list into a string using a separator
keys :1, // - retrieve list of indices from a hash
kill :1, // - send a signal to a process or process group
last :1, // - exit a block prematurely
lc :1, // - return lower-case version of a string
lcfirst :1, // - return a string with just the next letter in lower case
length :1, // - return the number of bytes in a string
'link' :1, // - create a hard link in the filesytem
listen :1, // - register your socket as a server
local : 2, // - create a temporary value for a global variable (dynamic scoping)
localtime :1, // - convert UNIX time into record or string using local time
lock :1, // - get a thread lock on a variable, subroutine, or method
'log' :1, // - retrieve the natural logarithm for a number
lstat :1, // - stat a symbolic link
m :null, // - match a string with a regular expression pattern
map :1, // - apply a change to a list to get back a new list with the changes
mkdir :1, // - create a directory
msgctl :1, // - SysV IPC message control operations
msgget :1, // - get SysV IPC message queue
msgrcv :1, // - receive a SysV IPC message from a message queue
msgsnd :1, // - send a SysV IPC message to a message queue
my : 2, // - declare and assign a local variable (lexical scoping)
'new' :1, //
next :1, // - iterate a block prematurely
no :1, // - unimport some module symbols or semantics at compile time
oct :1, // - convert a string to an octal number
open :1, // - open a file, pipe, or descriptor
opendir :1, // - open a directory
ord :1, // - find a character's numeric representation
our : 2, // - declare and assign a package variable (lexical scoping)
pack :1, // - convert a list into a binary representation
'package' :1, // - declare a separate global namespace
pipe :1, // - open a pair of connected filehandles
pop :1, // - remove the last element from an array and return it
pos :1, // - find or set the offset for the last/next m//g search
print :1, // - output a list to a filehandle
printf :1, // - output a formatted list to a filehandle
prototype :1, // - get the prototype (if any) of a subroutine
push :1, // - append one or more elements to an array
q :null, // - singly quote a string
qq :null, // - doubly quote a string
qr :null, // - Compile pattern
quotemeta :null, // - quote regular expression magic characters
qw :null, // - quote a list of words
qx :null, // - backquote quote a string
rand :1, // - retrieve the next pseudorandom number
read :1, // - fixed-length buffered input from a filehandle
readdir :1, // - get a directory from a directory handle
readline :1, // - fetch a record from a file
readlink :1, // - determine where a symbolic link is pointing
readpipe :1, // - execute a system command and collect standard output
recv :1, // - receive a message over a Socket
redo :1, // - start this loop iteration over again
ref :1, // - find out the type of thing being referenced
rename :1, // - change a filename
require :1, // - load in external functions from a library at runtime
reset :1, // - clear all variables of a given name
'return' :1, // - get out of a function early
reverse :1, // - flip a string or a list
rewinddir :1, // - reset directory handle
rindex :1, // - right-to-left substring search
rmdir :1, // - remove a directory
s :null, // - replace a pattern with a string
say :1, // - print with newline
scalar :1, // - force a scalar context<|fim▁hole|> seekdir :1, // - reposition directory pointer
select :1, // - reset default output or do I/O multiplexing
semctl :1, // - SysV semaphore control operations
semget :1, // - get set of SysV semaphores
semop :1, // - SysV semaphore operations
send :1, // - send a message over a socket
setgrent :1, // - prepare group file for use
sethostent :1, // - prepare hosts file for use
setnetent :1, // - prepare networks file for use
setpgrp :1, // - set the process group of a process
setpriority :1, // - set a process's nice value
setprotoent :1, // - prepare protocols file for use
setpwent :1, // - prepare passwd file for use
setservent :1, // - prepare services file for use
setsockopt :1, // - set some socket options
shift :1, // - remove the first element of an array, and return it
shmctl :1, // - SysV shared memory operations
shmget :1, // - get SysV shared memory segment identifier
shmread :1, // - read SysV shared memory
shmwrite :1, // - write SysV shared memory
shutdown :1, // - close down just half of a socket connection
'sin' :1, // - return the sine of a number
sleep :1, // - block for some number of seconds
socket :1, // - create a socket
socketpair :1, // - create a pair of sockets
'sort' :1, // - sort a list of values
splice :1, // - add or remove elements anywhere in an array
'split' :1, // - split up a string using a regexp delimiter
sprintf :1, // - formatted print into a string
'sqrt' :1, // - square root function
srand :1, // - seed the random number generator
stat :1, // - get a file's status information
state :1, // - declare and assign a state variable (persistent lexical scoping)
study :1, // - optimize input data for repeated searches
'sub' :1, // - declare a subroutine, possibly anonymously
'substr' :1, // - get or alter a portion of a stirng
symlink :1, // - create a symbolic link to a file
syscall :1, // - execute an arbitrary system call
sysopen :1, // - open a file, pipe, or descriptor
sysread :1, // - fixed-length unbuffered input from a filehandle
sysseek :1, // - position I/O pointer on handle used with sysread and syswrite
system :1, // - run a separate program
syswrite :1, // - fixed-length unbuffered output to a filehandle
tell :1, // - get current seekpointer on a filehandle
telldir :1, // - get current seekpointer on a directory handle
tie :1, // - bind a variable to an object class
tied :1, // - get a reference to the object underlying a tied variable
time :1, // - return number of seconds since 1970
times :1, // - return elapsed time for self and child processes
tr :null, // - transliterate a string
truncate :1, // - shorten a file
uc :1, // - return upper-case version of a string
ucfirst :1, // - return a string with just the next letter in upper case
umask :1, // - set file creation mode mask
undef :1, // - remove a variable or function definition
unlink :1, // - remove one link to a file
unpack :1, // - convert binary structure into normal perl variables
unshift :1, // - prepend more elements to the beginning of a list
untie :1, // - break a tie binding to a variable
use :1, // - load in a module at compile time
utime :1, // - set a file's last access and modify times
values :1, // - return a list of the values in a hash
vec :1, // - test or set particular bits in a string
wait :1, // - wait for any child process to die
waitpid :1, // - wait for a particular child process to die
wantarray :1, // - get void vs scalar vs list context of current subroutine call
warn :1, // - print debugging info
when :1, //
write :1, // - print a picture record
y :null}; // - transliterate a string
var RXstyle="string-2";
var RXmodifiers=/[goseximacplud]/; // NOTE: "m", "s", "y" and "tr" need to correct real modifiers for each regexp type
function tokenChain(stream,state,chain,style,tail){ // NOTE: chain.length > 2 is not working now (it's for s[...][...]geos;)
state.chain=null; // 12 3tail
state.style=null;
state.tail=null;
state.tokenize=function(stream,state){
var e=false,c,i=0;
while(c=stream.next()){
if(c===chain[i]&&!e){
if(chain[++i]!==undefined){
state.chain=chain[i];
state.style=style;
state.tail=tail;}
else if(tail)
stream.eatWhile(tail);
state.tokenize=tokenPerl;
return style;}
e=!e&&c=="\\";}
return style;};
return state.tokenize(stream,state);}
function tokenSOMETHING(stream,state,string){
state.tokenize=function(stream,state){
if(stream.string==string)
state.tokenize=tokenPerl;
stream.skipToEnd();
return "string";};
return state.tokenize(stream,state);}
function tokenPerl(stream,state){
if(stream.eatSpace())
return null;
if(state.chain)
return tokenChain(stream,state,state.chain,state.style,state.tail);
if(stream.match(/^\-?[\d\.]/,false))
if(stream.match(/^(\-?(\d*\.\d+(e[+-]?\d+)?|\d+\.\d*)|0x[\da-fA-F]+|0b[01]+|\d+(e[+-]?\d+)?)/))
return 'number';
if(stream.match(/^<<(?=\w)/)){ // NOTE: <<SOMETHING\n...\nSOMETHING\n
stream.eatWhile(/\w/);
return tokenSOMETHING(stream,state,stream.current().substr(2));}
if(stream.sol()&&stream.match(/^\=item(?!\w)/)){// NOTE: \n=item...\n=cut\n
return tokenSOMETHING(stream,state,'=cut');}
var ch=stream.next();
if(ch=='"'||ch=="'"){ // NOTE: ' or " or <<'SOMETHING'\n...\nSOMETHING\n or <<"SOMETHING"\n...\nSOMETHING\n
if(prefix(stream, 3)=="<<"+ch){
var p=stream.pos;
stream.eatWhile(/\w/);
var n=stream.current().substr(1);
if(n&&stream.eat(ch))
return tokenSOMETHING(stream,state,n);
stream.pos=p;}
return tokenChain(stream,state,[ch],"string");}
if(ch=="q"){
var c=look(stream, -2);
if(!(c&&/\w/.test(c))){
c=look(stream, 0);
if(c=="x"){
c=look(stream, 1);
if(c=="("){
eatSuffix(stream, 2);
return tokenChain(stream,state,[")"],RXstyle,RXmodifiers);}
if(c=="["){
eatSuffix(stream, 2);
return tokenChain(stream,state,["]"],RXstyle,RXmodifiers);}
if(c=="{"){
eatSuffix(stream, 2);
return tokenChain(stream,state,["}"],RXstyle,RXmodifiers);}
if(c=="<"){
eatSuffix(stream, 2);
return tokenChain(stream,state,[">"],RXstyle,RXmodifiers);}
if(/[\^'"!~\/]/.test(c)){
eatSuffix(stream, 1);
return tokenChain(stream,state,[stream.eat(c)],RXstyle,RXmodifiers);}}
else if(c=="q"){
c=look(stream, 1);
if(c=="("){
eatSuffix(stream, 2);
return tokenChain(stream,state,[")"],"string");}
if(c=="["){
eatSuffix(stream, 2);
return tokenChain(stream,state,["]"],"string");}
if(c=="{"){
eatSuffix(stream, 2);
return tokenChain(stream,state,["}"],"string");}
if(c=="<"){
eatSuffix(stream, 2);
return tokenChain(stream,state,[">"],"string");}
if(/[\^'"!~\/]/.test(c)){
eatSuffix(stream, 1);
return tokenChain(stream,state,[stream.eat(c)],"string");}}
else if(c=="w"){
c=look(stream, 1);
if(c=="("){
eatSuffix(stream, 2);
return tokenChain(stream,state,[")"],"bracket");}
if(c=="["){
eatSuffix(stream, 2);
return tokenChain(stream,state,["]"],"bracket");}
if(c=="{"){
eatSuffix(stream, 2);
return tokenChain(stream,state,["}"],"bracket");}
if(c=="<"){
eatSuffix(stream, 2);
return tokenChain(stream,state,[">"],"bracket");}
if(/[\^'"!~\/]/.test(c)){
eatSuffix(stream, 1);
return tokenChain(stream,state,[stream.eat(c)],"bracket");}}
else if(c=="r"){
c=look(stream, 1);
if(c=="("){
eatSuffix(stream, 2);
return tokenChain(stream,state,[")"],RXstyle,RXmodifiers);}
if(c=="["){
eatSuffix(stream, 2);
return tokenChain(stream,state,["]"],RXstyle,RXmodifiers);}
if(c=="{"){
eatSuffix(stream, 2);
return tokenChain(stream,state,["}"],RXstyle,RXmodifiers);}
if(c=="<"){
eatSuffix(stream, 2);
return tokenChain(stream,state,[">"],RXstyle,RXmodifiers);}
if(/[\^'"!~\/]/.test(c)){
eatSuffix(stream, 1);
return tokenChain(stream,state,[stream.eat(c)],RXstyle,RXmodifiers);}}
else if(/[\^'"!~\/(\[{<]/.test(c)){
if(c=="("){
eatSuffix(stream, 1);
return tokenChain(stream,state,[")"],"string");}
if(c=="["){
eatSuffix(stream, 1);
return tokenChain(stream,state,["]"],"string");}
if(c=="{"){
eatSuffix(stream, 1);
return tokenChain(stream,state,["}"],"string");}
if(c=="<"){
eatSuffix(stream, 1);
return tokenChain(stream,state,[">"],"string");}
if(/[\^'"!~\/]/.test(c)){
return tokenChain(stream,state,[stream.eat(c)],"string");}}}}
if(ch=="m"){
var c=look(stream, -2);
if(!(c&&/\w/.test(c))){
c=stream.eat(/[(\[{<\^'"!~\/]/);
if(c){
if(/[\^'"!~\/]/.test(c)){
return tokenChain(stream,state,[c],RXstyle,RXmodifiers);}
if(c=="("){
return tokenChain(stream,state,[")"],RXstyle,RXmodifiers);}
if(c=="["){
return tokenChain(stream,state,["]"],RXstyle,RXmodifiers);}
if(c=="{"){
return tokenChain(stream,state,["}"],RXstyle,RXmodifiers);}
if(c=="<"){
return tokenChain(stream,state,[">"],RXstyle,RXmodifiers);}}}}
if(ch=="s"){
var c=/[\/>\]})\w]/.test(look(stream, -2));
if(!c){
c=stream.eat(/[(\[{<\^'"!~\/]/);
if(c){
if(c=="[")
return tokenChain(stream,state,["]","]"],RXstyle,RXmodifiers);
if(c=="{")
return tokenChain(stream,state,["}","}"],RXstyle,RXmodifiers);
if(c=="<")
return tokenChain(stream,state,[">",">"],RXstyle,RXmodifiers);
if(c=="(")
return tokenChain(stream,state,[")",")"],RXstyle,RXmodifiers);
return tokenChain(stream,state,[c,c],RXstyle,RXmodifiers);}}}
if(ch=="y"){
var c=/[\/>\]})\w]/.test(look(stream, -2));
if(!c){
c=stream.eat(/[(\[{<\^'"!~\/]/);
if(c){
if(c=="[")
return tokenChain(stream,state,["]","]"],RXstyle,RXmodifiers);
if(c=="{")
return tokenChain(stream,state,["}","}"],RXstyle,RXmodifiers);
if(c=="<")
return tokenChain(stream,state,[">",">"],RXstyle,RXmodifiers);
if(c=="(")
return tokenChain(stream,state,[")",")"],RXstyle,RXmodifiers);
return tokenChain(stream,state,[c,c],RXstyle,RXmodifiers);}}}
if(ch=="t"){
var c=/[\/>\]})\w]/.test(look(stream, -2));
if(!c){
c=stream.eat("r");if(c){
c=stream.eat(/[(\[{<\^'"!~\/]/);
if(c){
if(c=="[")
return tokenChain(stream,state,["]","]"],RXstyle,RXmodifiers);
if(c=="{")
return tokenChain(stream,state,["}","}"],RXstyle,RXmodifiers);
if(c=="<")
return tokenChain(stream,state,[">",">"],RXstyle,RXmodifiers);
if(c=="(")
return tokenChain(stream,state,[")",")"],RXstyle,RXmodifiers);
return tokenChain(stream,state,[c,c],RXstyle,RXmodifiers);}}}}
if(ch=="`"){
return tokenChain(stream,state,[ch],"variable-2");}
if(ch=="/"){
if(!/~\s*$/.test(prefix(stream)))
return "operator";
else
return tokenChain(stream,state,[ch],RXstyle,RXmodifiers);}
if(ch=="$"){
var p=stream.pos;
if(stream.eatWhile(/\d/)||stream.eat("{")&&stream.eatWhile(/\d/)&&stream.eat("}"))
return "variable-2";
else
stream.pos=p;}
if(/[$@%]/.test(ch)){
var p=stream.pos;
if(stream.eat("^")&&stream.eat(/[A-Z]/)||!/[@$%&]/.test(look(stream, -2))&&stream.eat(/[=|\\\-#?@;:&`~\^!\[\]*'"$+.,\/<>()]/)){
var c=stream.current();
if(PERL[c])
return "variable-2";}
stream.pos=p;}
if(/[$@%&]/.test(ch)){
if(stream.eatWhile(/[\w$\[\]]/)||stream.eat("{")&&stream.eatWhile(/[\w$\[\]]/)&&stream.eat("}")){
var c=stream.current();
if(PERL[c])
return "variable-2";
else
return "variable";}}
if(ch=="#"){
if(look(stream, -2)!="$"){
stream.skipToEnd();
return "comment";}}
if(/[:+\-\^*$&%@=<>!?|\/~\.]/.test(ch)){
var p=stream.pos;
stream.eatWhile(/[:+\-\^*$&%@=<>!?|\/~\.]/);
if(PERL[stream.current()])
return "operator";
else
stream.pos=p;}
if(ch=="_"){
if(stream.pos==1){
if(suffix(stream, 6)=="_END__"){
return tokenChain(stream,state,['\0'],"comment");}
else if(suffix(stream, 7)=="_DATA__"){
return tokenChain(stream,state,['\0'],"variable-2");}
else if(suffix(stream, 7)=="_C__"){
return tokenChain(stream,state,['\0'],"string");}}}
if(/\w/.test(ch)){
var p=stream.pos;
if(look(stream, -2)=="{"&&(look(stream, 0)=="}"||stream.eatWhile(/\w/)&&look(stream, 0)=="}"))
return "string";
else
stream.pos=p;}
if(/[A-Z]/.test(ch)){
var l=look(stream, -2);
var p=stream.pos;
stream.eatWhile(/[A-Z_]/);
if(/[\da-z]/.test(look(stream, 0))){
stream.pos=p;}
else{
var c=PERL[stream.current()];
if(!c)
return "meta";
if(c[1])
c=c[0];
if(l!=":"){
if(c==1)
return "keyword";
else if(c==2)
return "def";
else if(c==3)
return "atom";
else if(c==4)
return "operator";
else if(c==5)
return "variable-2";
else
return "meta";}
else
return "meta";}}
if(/[a-zA-Z_]/.test(ch)){
var l=look(stream, -2);
stream.eatWhile(/\w/);
var c=PERL[stream.current()];
if(!c)
return "meta";
if(c[1])
c=c[0];
if(l!=":"){
if(c==1)
return "keyword";
else if(c==2)
return "def";
else if(c==3)
return "atom";
else if(c==4)
return "operator";
else if(c==5)
return "variable-2";
else
return "meta";}
else
return "meta";}
return null;}
return{
startState:function(){
return{
tokenize:tokenPerl,
chain:null,
style:null,
tail:null};},
token:function(stream,state){
return (state.tokenize||tokenPerl)(stream,state);},
electricChars:"{}"};});
CodeMirror.registerHelper("wordChars", "perl", /[\\w$]/);
CodeMirror.defineMIME("text/x-perl", "perl");
// it's like "peek", but need for look-ahead or look-behind if index < 0
function look(stream, c){
return stream.string.charAt(stream.pos+(c||0));
}
// return a part of prefix of current stream from current position
function prefix(stream, c){
if(c){
var x=stream.pos-c;
return stream.string.substr((x>=0?x:0),c);}
else{
return stream.string.substr(0,stream.pos-1);
}
}
// return a part of suffix of current stream from current position
function suffix(stream, c){
var y=stream.string.length;
var x=y-stream.pos+1;
return stream.string.substr(stream.pos,(c&&c<y?c:x));
}
// eating and vomiting a part of stream from current position
function eatSuffix(stream, c){
var x=stream.pos+c;
var y;
if(x<=0)
stream.pos=0;
else if(x>=(y=stream.string.length-1))
stream.pos=y;
else
stream.pos=x;
}
});<|fim▁end|> | seek :1, // - reposition file pointer for random-access I/O |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2016 Edouard Lambert
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
<|fim▁hole|>
from .module import BinckModule
__all__ = ['BinckModule']<|fim▁end|> | |
<|file_name|>RgbImage.cpp<|end_file_name|><|fim▁begin|>/*
*
* RayTrace Software Package, release 1.0.3, July 2003.
*
* Author: Samuel R. Buss
*
* Software accompanying the book
* 3D Computer Graphics: A Mathematical Introduction with OpenGL,
* by S. Buss, Cambridge University Press, 2003.
*
* Software is "as-is" and carries no warranty. It may be used without
* restriction, but if you modify it, please change the filenames to
* prevent confusion between different versions. Please acknowledge
* all use of the software in any publications or products based on it.
*
* Bug reports: Sam Buss, [email protected].
* Web page: http://math.ucsd.edu/~sbuss/MathCG
*
*/
#include "RgbImage.h"
#ifndef RGBIMAGE_DONT_USE_OPENGL
#include <windows.h>
#include "GL/gl.h"
#endif
RgbImage::RgbImage( int numRows, int numCols )
{
NumRows = numRows;
NumCols = numCols;
ImagePtr = new unsigned char[NumRows*GetNumBytesPerRow()];
if ( !ImagePtr ) {
fprintf(stderr, "Unable to allocate memory for %ld x %ld bitmap.\n",
NumRows, NumCols);
Reset();
ErrorCode = MemoryError;
}
// Zero out the image
unsigned char* c = ImagePtr;
int rowLen = GetNumBytesPerRow();
for ( int i=0; i<NumRows; i++ ) {
for ( int j=0; j<rowLen; j++ ) {
*(c++) = 0;
}
}
}
/* ********************************************************************
* LoadBmpFile
* Read into memory an RGB image from an uncompressed BMP file.
* Return true for success, false for failure. Error code is available
* with a separate call.
* Author: Sam Buss December 2001.
**********************************************************************/
bool RgbImage::LoadBmpFile( const char* filename )
{
Reset();
FILE* infile = fopen( filename, "rb" ); // Open for reading binary data
if ( !infile ) {
fprintf(stderr, "Unable to open file: %s\n", filename);
ErrorCode = OpenError;
return false;
}
bool fileFormatOK = false;
int bChar = fgetc( infile );
int mChar = fgetc( infile );
if ( bChar=='B' && mChar=='M' ) { // If starts with "BM" for "BitMap"
skipChars( infile, 4+2+2+4+4 ); // Skip 4 fields we don't care about
NumCols = readLong( infile );
NumRows = readLong( infile );
skipChars( infile, 2 ); // Skip one field
int bitsPerPixel = readShort( infile );
skipChars( infile, 4+4+4+4+4+4 ); // Skip 6 more fields
if ( NumCols>0 && NumCols<=100000 && NumRows>0 && NumRows<=100000
&& bitsPerPixel==24 && !feof(infile) ) {
fileFormatOK = true;
}
}
if ( !fileFormatOK ) {
Reset();
ErrorCode = FileFormatError;
fprintf(stderr, "Not a valid 24-bit bitmap file: %s.\n", filename);
fclose ( infile );
return false;
}
// Allocate memory
ImagePtr = new unsigned char[NumRows*GetNumBytesPerRow()];
if ( !ImagePtr ) {
fprintf(stderr, "Unable to allocate memory for %ld x %ld bitmap: %s.\n",
NumRows, NumCols, filename);
Reset();
ErrorCode = MemoryError;
fclose ( infile );
return false;
}
unsigned char* cPtr = ImagePtr;
for ( int i=0; i<NumRows; i++ ) {
int j;
for ( j=0; j<NumCols; j++ ) {
*(cPtr+2) = fgetc( infile ); // Blue color value
*(cPtr+1) = fgetc( infile ); // Green color value
*cPtr = fgetc( infile ); // Red color value
cPtr += 3;
}
int k=3*j; // Num bytes already read
for ( ; k<GetNumBytesPerRow(); k++ ) {
fgetc( infile ); // Read and ignore padding;
*(cPtr++) = 0;
}
}
if ( feof( infile ) ) {
fprintf( stderr, "Premature end of file: %s.\n", filename );
Reset();
ErrorCode = ReadError;
fclose ( infile );
return false;
}
fclose( infile ); // Close the file
return true;
}
short RgbImage::readShort( FILE* infile )
{
// read a 16 bit integer
unsigned char lowByte, hiByte;
lowByte = fgetc(infile); // Read the low order byte (little endian form)
hiByte = fgetc(infile); // Read the high order byte
// Pack together
short ret = hiByte;
ret <<= 8;
ret |= lowByte;
return ret;
}
long RgbImage::readLong( FILE* infile )
{
// Read in 32 bit integer
unsigned char byte0, byte1, byte2, byte3;
byte0 = fgetc(infile); // Read bytes, low order to high order
byte1 = fgetc(infile);
byte2 = fgetc(infile);
byte3 = fgetc(infile);
// Pack together
long ret = byte3;
ret <<= 8;
ret |= byte2;
ret <<= 8;
ret |= byte1;
ret <<= 8;
ret |= byte0;
return ret;
}
void RgbImage::skipChars( FILE* infile, int numChars )
{
for ( int i=0; i<numChars; i++ ) {
fgetc( infile );
}
}
/* ********************************************************************
* WriteBmpFile
* Write an RGB image to an uncompressed BMP file.
* Return true for success, false for failure. Error code is available
* with a separate call.
* Author: Sam Buss, January 2003.
**********************************************************************/
bool RgbImage::WriteBmpFile( const char* filename )
{
FILE* outfile = fopen( filename, "wb" ); // Open for reading binary data
if ( !outfile ) {
fprintf(stderr, "Unable to open file: %s\n", filename);
ErrorCode = OpenError;
return false;
}
fputc('B',outfile);
fputc('M',outfile);
int rowLen = GetNumBytesPerRow();
writeLong( 40+14+NumRows*rowLen, outfile ); // Length of file
writeShort( 0, outfile ); // Reserved for future use
writeShort( 0, outfile );
writeLong( 40+14, outfile ); // Offset to pixel data
writeLong( 40, outfile ); // header length
writeLong( NumCols, outfile ); // width in pixels
writeLong( NumRows, outfile ); // height in pixels (pos for bottom up)
writeShort( 1, outfile ); // number of planes
writeShort( 24, outfile ); // bits per pixel
writeLong( 0, outfile ); // no compression
writeLong( 0, outfile ); // not used if no compression
writeLong( 0, outfile ); // Pixels per meter
writeLong( 0, outfile ); // Pixels per meter
writeLong( 0, outfile ); // unused for 24 bits/pixel
writeLong( 0, outfile ); // unused for 24 bits/pixel
// Now write out the pixel data:
unsigned char* cPtr = ImagePtr;
for ( int i=0; i<NumRows; i++ ) {
// Write out i-th row's data
int j;
for ( j=0; j<NumCols; j++ ) {
fputc( *(cPtr+2), outfile); // Blue color value
fputc( *(cPtr+1), outfile); // Blue color value
fputc( *(cPtr+0), outfile); // Blue color value
cPtr+=3;
}
// Pad row to word boundary
int k=3*j; // Num bytes already read
for ( ; k<GetNumBytesPerRow(); k++ ) {
fputc( 0, outfile ); // Read and ignore padding;
cPtr++;
}
}
fclose( outfile ); // Close the file
return true;
}
void RgbImage::writeLong( long data, FILE* outfile )
{
// Read in 32 bit integer
unsigned char byte0, byte1, byte2, byte3;
byte0 = (unsigned char)(data&0x000000ff); // Write bytes, low order to high order
byte1 = (unsigned char)((data>>8)&0x000000ff);
byte2 = (unsigned char)((data>>16)&0x000000ff);
byte3 = (unsigned char)((data>>24)&0x000000ff);
fputc( byte0, outfile );
fputc( byte1, outfile );
fputc( byte2, outfile );
fputc( byte3, outfile );
}
void RgbImage::writeShort( short data, FILE* outfile )
{
// Read in 32 bit integer
unsigned char byte0, byte1;
byte0 = data&0x000000ff; // Write bytes, low order to high order
byte1 = (data>>8)&0x000000ff;
fputc( byte0, outfile );
fputc( byte1, outfile );
}
/*********************************************************************
* SetRgbPixel routines allow changing the contents of the RgbImage. *
*********************************************************************/
void RgbImage::SetRgbPixelf( long row, long col, double red, double green, double blue )
{
SetRgbPixelc( row, col, doubleToUnsignedChar(red),
doubleToUnsignedChar(green),
doubleToUnsignedChar(blue) );
}
void RgbImage::SetRgbPixelc( long row, long col,
unsigned char red, unsigned char green, unsigned char blue )
{
assert ( row<NumRows && col<NumCols );
unsigned char* thePixel = GetRgbPixel( row, col );
*(thePixel++) = red;
*(thePixel++) = green;
*(thePixel) = blue;
}
unsigned char RgbImage::doubleToUnsignedChar( double x )
{
if ( x>=1.0 ) {
return (unsigned char)255;
}
else if ( x<=0.0 ) {
return (unsigned char)0;
}
else {
return (unsigned char)(x*255.0); // Rounds down
}
}
// Bitmap file format (24 bit/pixel form) BITMAPFILEHEADER
// Header (14 bytes)
// 2 bytes: "BM"
// 4 bytes: long int, file size
// 4 bytes: reserved (actually 2 bytes twice)
// 4 bytes: long int, offset to raster data
// Info header (40 bytes) BITMAPINFOHEADER
// 4 bytes: long int, size of info header (=40)
// 4 bytes: long int, bitmap width in pixels
// 4 bytes: long int, bitmap height in pixels
// 2 bytes: short int, number of planes (=1)
// 2 bytes: short int, bits per pixel
// 4 bytes: long int, type of compression (not applicable to 24 bits/pixel)
// 4 bytes: long int, image size (not used unless compression is used)
// 4 bytes: long int, x pixels per meter
// 4 bytes: long int, y pixels per meter<|fim▁hole|>// 4 bytes: colors used (not applicable to 24 bit color)
// 4 bytes: colors important (not applicable to 24 bit color)
// "long int" really means "unsigned long int"
// Pixel data: 3 bytes per pixel: RGB values (in reverse order).
// Rows padded to multiples of four.
#ifndef RGBIMAGE_DONT_USE_OPENGL
bool RgbImage::LoadFromOpenglBuffer() // Load the bitmap from the current OpenGL buffer
{
int viewportData[4];
glGetIntegerv( GL_VIEWPORT, viewportData );
int& vWidth = viewportData[2];
int& vHeight = viewportData[3];
if ( ImagePtr==0 ) { // If no memory allocated
NumRows = vHeight;
NumCols = vWidth;
ImagePtr = new unsigned char[NumRows*GetNumBytesPerRow()];
if ( !ImagePtr ) {
fprintf(stderr, "Unable to allocate memory for %ld x %ld buffer.\n",
NumRows, NumCols);
Reset();
ErrorCode = MemoryError;
return false;
}
}
assert ( vWidth>=NumCols && vHeight>=NumRows );
int oldGlRowLen;
if ( vWidth>=NumCols ) {
glGetIntegerv( GL_UNPACK_ROW_LENGTH, &oldGlRowLen );
glPixelStorei( GL_UNPACK_ROW_LENGTH, NumCols );
}
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
// Get the frame buffer data.
glReadPixels( 0, 0, NumCols, NumRows, GL_RGB, GL_UNSIGNED_BYTE, ImagePtr);
// Restore the row length in glPixelStorei (really ought to restore alignment too).
if ( vWidth>=NumCols ) {
glPixelStorei( GL_UNPACK_ROW_LENGTH, oldGlRowLen );
}
return true;
}
#endif // RGB_IMAGE_DONT_USE_OPENGL<|fim▁end|> | |
<|file_name|>TransitionEdge.java<|end_file_name|><|fim▁begin|>/**
* The MIT License
* Copyright (c) 2003 David G Jones
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package info.dgjones.abora.white.edgeregion;
import java.io.PrintWriter;
import info.dgjones.abora.white.rcvr.Rcvr;
import info.dgjones.abora.white.rcvr.Xmtr;
import info.dgjones.abora.white.spaces.basic.Position;
import info.dgjones.abora.white.xpp.basic.Heaper;
/**
* Clients of EdgeManager define concrete subclasses of this, which are then used by the
* EdgeManager code
*/
public abstract class TransitionEdge extends Heaper {
/*
udanax-top.st:63348:
Heaper subclass: #TransitionEdge
instanceVariableNames: ''
classVariableNames: ''
poolDictionaries: ''
category: 'Xanadu-EdgeRegion'!
*/
/*
udanax-top.st:63352:
TransitionEdge comment:
'Clients of EdgeManager define concrete subclasses of this, which are then used by the EdgeManager code'!
*/
/*
udanax-top.st:63354:
(TransitionEdge getOrMakeCxxClassDescription)
attributes: ((Set new) add: #DEFERRED; add: #COPY; yourself)!
*/
/////////////////////////////////////////////<|fim▁hole|> }
public TransitionEdge ceiling(TransitionEdge other) {
if (other.isGE(this)) {
return other;
} else {
return this;
}
/*
udanax-top.st:63359:TransitionEdge methodsFor: 'accessing'!
{TransitionEdge} ceiling: other {TransitionEdge}
(other isGE: self)
ifTrue: [^other]
ifFalse: [^self]!
*/
}
public TransitionEdge floor(TransitionEdge other) {
if (isGE(other)) {
return other;
} else {
return this;
}
/*
udanax-top.st:63365:TransitionEdge methodsFor: 'accessing'!
{TransitionEdge} floor: other {TransitionEdge}
(self isGE: other)
ifTrue: [^other]
ifFalse: [^self]!
*/
}
public int actualHashForEqual() {
return System.identityHashCode(this);
// return Heaper.takeOop();
/*
udanax-top.st:63373:TransitionEdge methodsFor: 'testing'!
{UInt32} actualHashForEqual
^Heaper takeOop!
*/
}
/**
* Whether the position is strictly less than this edge
*/
public abstract boolean follows(Position pos);
/*
udanax-top.st:63377:TransitionEdge methodsFor: 'testing'!
{BooleanVar} follows: pos {Position}
"Whether the position is strictly less than this edge"
self subclassResponsibility!
*/
public abstract boolean isEqual(Heaper other);
/*
udanax-top.st:63382:TransitionEdge methodsFor: 'testing'!
{BooleanVar} isEqual: other {Heaper}
self subclassResponsibility!
*/
/**
* Whether there is precisely one position between this edge and the next one
*/
public abstract boolean isFollowedBy(TransitionEdge next);
/*
udanax-top.st:63386:TransitionEdge methodsFor: 'testing'!
{BooleanVar} isFollowedBy: next {TransitionEdge}
"Whether there is precisely one position between this edge and the next one"
self subclassResponsibility!
*/
/**
* Defines a full ordering among all edges in a given CoordinateSpace
*/
public abstract boolean isGE(TransitionEdge other);
/*
udanax-top.st:63391:TransitionEdge methodsFor: 'testing'!
{BooleanVar} isGE: other {TransitionEdge}
"Defines a full ordering among all edges in a given CoordinateSpace"
self subclassResponsibility!
*/
/**
* Whether this edge touches the same position the other does
*/
public abstract boolean touches(TransitionEdge other);
/*
udanax-top.st:63396:TransitionEdge methodsFor: 'testing'!
{BooleanVar} touches: other {TransitionEdge}
"Whether this edge touches the same position the other does"
self subclassResponsibility!
*/
/**
* Print a description of this transition
*/
public abstract void printTransitionOn(PrintWriter oo, boolean entering, boolean touchesPrevious);
/*
udanax-top.st:63403:TransitionEdge methodsFor: 'printing'!
{void} printTransitionOn: oo {ostream reference}
with: entering {BooleanVar}
with: touchesPrevious {BooleanVar}
"Print a description of this transition"
self subclassResponsibility!
*/
public TransitionEdge(Rcvr receiver) {
super(receiver);
/*
udanax-top.st:63412:TransitionEdge methodsFor: 'generated:'!
create.Rcvr: receiver {Rcvr}
super create.Rcvr: receiver.!
*/
}
public void sendSelfTo(Xmtr xmtr) {
super.sendSelfTo(xmtr);
/*
udanax-top.st:63415:TransitionEdge methodsFor: 'generated:'!
{void} sendSelfTo: xmtr {Xmtr}
super sendSelfTo: xmtr.!
*/
}
}<|fim▁end|> | // Constructors
protected TransitionEdge() {
super(); |
<|file_name|>Listener.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 Santhosh Kumar Tekuri
*
* The JLibs authors license this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package jlibs.wamp4j.spi;
import java.io.InputStream;
public interface Listener{
public void onMessage(WAMPSocket socket, MessageType type, InputStream is);
public void onReadComplete(WAMPSocket socket);
public void readyToWrite(WAMPSocket socket);
public void onError(WAMPSocket socket, Throwable error);
public void onClose(WAMPSocket socket);<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>MemTable.cpp<|end_file_name|><|fim▁begin|>#include "MemTable.h"
using namespace std;
json MemTable::Table;
template <class type>
LinkedList<type> ListAdapter<type>::SmartList;
int MemTable::getSize(long ID) {
int size = 0;
std::string number;
std::stringstream strstream;
strstream << ID;
strstream >> number;
for (json::iterator it = Table.begin(); it != Table.end(); ++it){
std::string Key = it.key();
if(number == Key){
size = it.value().at(1);
break;
}
}<|fim▁hole|>void *MemTable::getPosition(long ID) {
void* ptr = nullptr;
std::string number;
std::stringstream strstream;
strstream << ID;
strstream >> number;
for (json::iterator it = Table.begin(); it != Table.end(); ++it){
std::string Key = it.key();
if(number == Key){
intptr_t pointer = it.value().at(0);
ptr = reinterpret_cast<void*>(pointer);
break;
}
}
return ptr;
}
void *MemTable::burp(void * destination, void * source, size_t objectSize, std::string ID) {
memcpy(destination, source, objectSize);
memset(source, 0, objectSize);
BurpingTable.erase(ID);
intptr_t newPointer = (intptr_t) destination;
BurpingTable[ID] = {newPointer, objectSize};
void * finalPointer = destination + objectSize;
return finalPointer;
}
void MemTable::deleteFromTable(long ID) {
void * voidPointer= getPosition(ID);
size_t pointerSize = (size_t) getSize(ID);
Manager.FreeMem(voidPointer, pointerSize);
std::string number;
std::stringstream strstream;
strstream << ID;
strstream >> number;
Table.erase(number);
void * iterPointer = voidPointer;
BurpingTable = Table;
for (json::iterator it = Table.begin(); it != Table.end(); ++it) {
std::string key = it.key();
size_t tempObjSize = (size_t) it.value().at(1);
intptr_t tempPointer = it.value().at(0);
void * burpPointer = reinterpret_cast<void*>(tempPointer);
if (voidPointer < burpPointer == 1){
void * newObjectPointer = burp(iterPointer, burpPointer, tempObjSize, key);
iterPointer = newObjectPointer;
}
}
Manager.setCurrentMem(iterPointer);
Table = BurpingTable;
BurpingTable.clear();
std::cout << "ID: [pointer address, memory size]" << std::endl;
std::cout << Table <<"\n"<< std::endl;
}<|fim▁end|> |
return size;
}
|
<|file_name|>SipReplaces.py<|end_file_name|><|fim▁begin|># Copyright (c) 2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2007 Sippy Software, Inc. All rights reserved.
#
# This file is part of SIPPY, a free RFC3261 SIP stack and B2BUA.
#
# SIPPY is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# For a license to use the SIPPY software under conditions
# other than those described here, or to purchase support for this
# software, please contact Sippy Software, Inc. by e-mail at the
# following addresses: [email protected].
#
# SIPPY is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.<|fim▁hole|>from SipGenericHF import SipGenericHF
class SipReplaces(SipGenericHF):
hf_names = ('replaces',)
call_id = None
from_tag = None
to_tag = None
early_only = False
params = None
def __init__(self, body = None, call_id = None, from_tag = None, to_tag = None, \
early_only = False, params = None):
SipGenericHF.__init__(self, body)
if body != None:
return
self.parsed = True
self.params = []
self.call_id = call_id
self.from_tag = from_tag
self.to_tag = to_tag
self.early_only = early_only
if params != None:
self.params = params[:]
def parse(self):
self.parsed = True
self.params = []
params = self.body.split(';')
self.call_id = params.pop(0)
for param in params:
if param.startswith('from-tag='):
self.from_tag = param[len('from-tag='):]
elif param.startswith('to-tag='):
self.to_tag = param[len('to-tag='):]
elif param == 'early-only':
self.early_only = True
else:
self.params.append(param)
def __str__(self):
if not self.parsed:
return self.body
res = '%s;from-tag=%s;to-tag=%s' % (self.call_id, self.from_tag, self.to_tag)
if self.early_only:
res += ';early-only'
for param in self.params:
res += ';' + param
return res
def getCopy(self):
if not self.parsed:
return SipReplaces(self.body)
return SipReplaces(call_id = self.call_id, from_tag = self.from_tag, to_tag = self.to_tag, \
early_only = self.early_only, params = self.params)<|fim▁end|> | |
<|file_name|>compose.go<|end_file_name|><|fim▁begin|>// Package compose aims to provide simple "helper" methods to ease the use of
// compose (through libcompose) in (integration) tests.
package compose
import (
"fmt"
"regexp"
"strings"
"golang.org/x/net/context"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/filters"
"github.com/docker/docker/client"
"github.com/docker/libcompose/config"
"github.com/docker/libcompose/docker"
"github.com/docker/libcompose/docker/ctx"
"github.com/docker/libcompose/project"
"github.com/docker/libcompose/project/events"
"github.com/docker/libcompose/project/options"
d "github.com/libkermit/docker"
)
// Project holds compose related project attributes
type Project struct {
composeFiles []string
composeProject project.APIProject
name string
listenChan chan events.Event
started chan struct{}
stopped chan struct{}
deleted chan struct{}
client client.APIClient
hasOpenedChan bool
}
// CreateProject creates a compose project with the given name based on the
// specified compose files
func CreateProject(name string, composeFiles ...string) (*Project, error) {
// FIXME(vdemeester) temporarly normalize the project name, should not be needed.
r := regexp.MustCompile("[^a-z0-9]+")
name = r.ReplaceAllString(strings.ToLower(name), "")
apiClient, err := client.NewEnvClient()
if err != nil {
return nil, err
}
// FIXME(vdemeester) fix this
apiClient.UpdateClientVersion(d.CurrentAPIVersion)
composeProject, err := docker.NewProject(&ctx.Context{
Context: project.Context{
ComposeFiles: composeFiles,
ProjectName: name,
},
}, &config.ParseOptions{
Interpolate: true,
Validate: true,
})
if err != nil {
return nil, err
}
p := &Project{
composeFiles: composeFiles,
composeProject: composeProject,
name: name,
listenChan: make(chan events.Event),
started: make(chan struct{}),
stopped: make(chan struct{}),
deleted: make(chan struct{}),
client: apiClient,
hasOpenedChan: true,
}
// Listen to compose events
go p.startListening()
p.composeProject.AddListener(p.listenChan)
return p, nil
}
// Start creates and starts the compose project.
func (p *Project) Start(services ...string) error {
// If project chan are closed, recreate new compose project
if !p.hasOpenedChan {
newProject, _ := CreateProject(p.name, p.composeFiles...)
*p = *newProject
}
ctx := context.Background()
err := p.composeProject.Create(ctx, options.Create{})
if err != nil {
return err
}
return p.StartOnly(services...)
}
// StartOnly only starts created services which are stopped.
func (p *Project) StartOnly(services ...string) error {
ctx := context.Background()
err := p.composeProject.Start(ctx, services...)
if err != nil {
return err
}
// Wait for compose to start
<-p.started
return nil
}
// StopOnly only stop services without delete them.
func (p *Project) StopOnly(services ...string) error {
ctx := context.Background()
err := p.composeProject.Stop(ctx, 10, services...)
if err != nil {
return err
}
<-p.stopped
return nil
}
// Stop shuts down and clean the project
func (p *Project) Stop(services ...string) error {
// FIXME(vdemeester) handle timeout
err := p.StopOnly(services...)
if err != nil {
return err
}
err = p.composeProject.Delete(context.Background(), options.Delete{}, services...)
if err != nil {
return err
}
<-p.deleted
existingContainers, err := p.existContainers(project.AnyState)
if err != nil {
return err
}
// Close channels only if there are no running services
if !existingContainers {
p.hasOpenedChan = false
close(p.started)
close(p.stopped)
close(p.deleted)
close(p.listenChan)
}
return nil
}<|fim▁hole|>// Check if containers exist in the desirated state for the given services
func (p *Project) existContainers(stateFiltered project.State, services ...string) (bool, error) {
existingContainers := false
var err error
containersFound, err := p.composeProject.Containers(context.Background(), project.Filter{stateFiltered})
if err == nil && containersFound != nil && len(containersFound) > 0 {
existingContainers = true
}
return existingContainers, err
}
// Scale scale a service up
func (p *Project) Scale(service string, count int) error {
return p.composeProject.Scale(context.Background(), 10, map[string]int{
service: count,
})
}
func (p *Project) startListening() {
for event := range p.listenChan {
// FIXME Add a timeout on event ?
if event.EventType == events.ProjectStartDone {
p.started <- struct{}{}
}
if event.EventType == events.ProjectStopDone {
p.stopped <- struct{}{}
}
if event.EventType == events.ProjectDeleteDone {
p.deleted <- struct{}{}
}
}
}
// Containers lists containers for a given services.
func (p *Project) Containers(service string) ([]types.ContainerJSON, error) {
ctx := context.Background()
containers := []types.ContainerJSON{}
// Let's use engine-api for now as there is nothing really useful in
// libcompose for now.
filter := filters.NewArgs()
filter.Add("label", "com.docker.compose.project="+p.name)
filter.Add("label", "com.docker.compose.service="+service)
containerList, err := p.client.ContainerList(ctx, types.ContainerListOptions{
Filters: filter,
})
if err != nil {
return containers, err
}
for _, c := range containerList {
container, err := p.client.ContainerInspect(ctx, c.ID)
if err != nil {
return containers, err
}
containers = append(containers, container)
}
return containers, nil
}
// Container returns the one and only container for a given services. It returns an error
// if the service has more than one container (in case of scale)
func (p *Project) Container(service string) (types.ContainerJSON, error) {
containers, err := p.Containers(service)
if err != nil {
return types.ContainerJSON{}, err
}
if len(containers) > 1 {
return types.ContainerJSON{}, fmt.Errorf("More than one container are running for '%s' service", service)
}
if len(containers) == 0 {
return types.ContainerJSON{}, fmt.Errorf("No container found for '%s' service", service)
}
return containers[0], nil
}<|fim▁end|> | |
<|file_name|>tuple.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations on tuples
//!
//! To access the _N_-th element of a tuple one can use `N` itself
//! as a field of the tuple.
//!
//! Indexing starts from zero, so `0` returns first value, `1`
//! returns second value, and so on. In general, a tuple with _S_
//! elements provides aforementioned fields from `0` to `S-1`.
//!
//! If every type inside a tuple implements one of the following
//! traits, then a tuple itself also implements it.
//!
//! * `Clone`
//! * `PartialEq`
//! * `Eq`
//! * `PartialOrd`
//! * `Ord`
//! * `Default`
//!
//! # Examples
//!
//! Accessing elements of a tuple at specified indices:
//!
//! ```<|fim▁hole|>//!
//! let v = (3, 3);
//! let u = (1, -5);
//! assert_eq!(v.0 * u.0 + v.1 * u.1, -12);
//! ```
//!
//! Using traits implemented for tuples:
//!
//! ```
//! let a = (1, 2);
//! let b = (3, 4);
//! assert!(a != b);
//!
//! let c = b.clone();
//! assert!(b == c);
//!
//! let d : (u32, f32) = Default::default();
//! assert_eq!(d, (0, 0.0f32));
//! ```
#![doc(primitive = "tuple")]
#![stable(feature = "rust1", since = "1.0.0")]<|fim▁end|> | //! let x = ("colorless", "green", "ideas", "sleep", "furiously");
//! assert_eq!(x.3, "sleep"); |
<|file_name|>test_hostgroup.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""Test class for :class:`robottelo.cli.hostgroup.HostGroup` CLI.
@Requirement: Hostgroup
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: CLI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from fauxfactory import gen_string
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.contentview import ContentView
from robottelo.cli.hostgroup import HostGroup<|fim▁hole|> make_content_view,
make_domain,
make_environment,
make_hostgroup,
make_lifecycle_environment,
make_location,
make_medium,
make_org,
make_os,
make_partition_table,
make_subnet,
)
from robottelo.config import settings
from robottelo.datafactory import (
invalid_id_list,
invalid_values_list,
valid_hostgroups_list,
)
from robottelo.decorators import (
bz_bug_is_open,
run_only_on,
skip_if_bug_open,
tier1,
tier2,
)
from robottelo.test import CLITestCase
class HostGroupTestCase(CLITestCase):
"""Test class for Host Group CLI"""
@tier1
def test_positive_create_with_name(self):
"""Successfully creates an HostGroup.
@id: f5f2056f-d090-4e0d-8fb9-d29255a47908
@Assert: HostGroup is created.
"""
for name in valid_hostgroups_list():
with self.subTest(name):
hostgroup = make_hostgroup({'name': name})
self.assertEqual(hostgroup['name'], name)
@tier1
def test_negative_create_with_name(self):
"""Don't create an HostGroup with invalid data.
@id: 853a6d43-129a-497b-94f0-08dc622862f8
@Assert: HostGroup is not created.
"""
for name in invalid_values_list():
with self.subTest(name):
with self.assertRaises(CLIReturnCodeError):
HostGroup.create({'name': name})
@run_only_on('sat')
@tier1
def test_positive_create_with_env(self):
"""Check if hostgroup with environment can be created
@id: f1bfb333-90cf-4a9f-b183-cf77c1773247
@Assert: Hostgroup is created and has new environment assigned
"""
environment = make_environment()
hostgroup = make_hostgroup({'environment-id': environment['id']})
self.assertEqual(environment['name'], hostgroup['environment'])
@run_only_on('sat')
@tier1
def test_positive_create_with_loc(self):
"""Check if hostgroup with location can be created
@id: 84ae02a4-ea7e-43ce-87bd-7bbde3766b14
@Assert: Hostgroup is created and has new location assigned
"""
location = make_location()
hostgroup = make_hostgroup({'location-ids': location['id']})
self.assertIn(location['name'], hostgroup['locations'])
@run_only_on('sat')
@tier1
def test_positive_create_with_os(self):
"""Check if hostgroup with operating system can be created
@id: d12c5939-1aac-44f5-8aa3-a04a824f4e83
@Assert: Hostgroup is created and has operating system assigned
"""
os = make_os()
hostgroup = make_hostgroup({'operatingsystem-id': os['id']})
self.assertEqual(hostgroup['operating-system'], os['title'])
@run_only_on('sat')
@tier1
def test_positive_create_with_org(self):
"""Check if hostgroup with organization can be created
@id: 780d4b93-f35a-4c5b-a645-4053aed4c37b
@Assert: Hostgroup is created and has new organization assigned
"""
org = make_org()
hostgroup = make_hostgroup({'organization-ids': org['id']})
self.assertIn(org['name'], hostgroup['organizations'])
@tier1
def test_positive_create_with_orgs(self):
"""Check if hostgroup with multiple organizations can be created
@id: 32be4630-0032-4f5f-89d4-44f8d05fe585
@Assert: Hostgroup is created and has both new organizations assigned
"""
orgs = [make_org() for _ in range(2)]
hostgroup = make_hostgroup({
'organization-ids': [org['id'] for org in orgs],
})
self.assertEqual(
set(org['name'] for org in orgs),
set(hostgroup['organizations'])
)
@run_only_on('sat')
@tier1
def test_positive_create_with_puppet_ca_proxy(self):
"""Check if hostgroup with puppet CA proxy server can be created
@id: f7ea1c94-8a0e-4500-98b3-0ecd63b3ce3c
@Assert: Hostgroup is created and has puppet CA proxy server assigned
"""
puppet_proxy = Proxy.list({
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})[0]
hostgroup = make_hostgroup({'puppet-ca-proxy': puppet_proxy['name']})
self.assertEqual(puppet_proxy['id'], hostgroup['puppet-ca-proxy-id'])
@run_only_on('sat')
@tier1
def test_positive_create_with_puppet_proxy(self):
"""Check if hostgroup with puppet proxy server can be created
@id: 3a922d9f-7466-4565-b279-c1481f63a4ce
@Assert: Hostgroup is created and has puppet proxy server assigned
"""
puppet_proxy = Proxy.list({
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})[0]
hostgroup = make_hostgroup({'puppet-proxy': puppet_proxy['name']})
self.assertEqual(
puppet_proxy['id'],
hostgroup['puppet-master-proxy-id'],
)
@skip_if_bug_open('bugzilla', 1354544)
@run_only_on('sat')
@tier1
def test_positive_create_with_architecture(self):
"""Check if hostgroup with architecture can be created
@id: 21c619f4-7339-4fb0-9e29-e12dae65f943
@Assert: Hostgroup should be created and has architecture assigned
@BZ: 1354544
"""
arch = 'x86_64'
hostgroup = make_hostgroup({'architecture': arch})
self.assertEqual(arch, hostgroup['architecture'])
@run_only_on('sat')
@tier1
def test_positive_create_with_domain(self):
"""Check if hostgroup with domain can be created
@id: c468fcac-9e42-4ee6-a431-abe29b6848ce
@Assert: Hostgroup should be created and has domain assigned
"""
domain = make_domain()
hostgroup = make_hostgroup({'domain-id': domain['id']})
self.assertEqual(domain['name'], hostgroup['domain'])
@skip_if_bug_open('bugzilla', 1313056)
@run_only_on('sat')
@tier1
def test_positive_create_with_lifecycle_environment(self):
"""Check if hostgroup with lifecyle environment can be created
@id: 24bc3010-4e61-47d8-b8ae-0d66e1055aea
@Assert: Hostgroup should be created and has lifecycle env assigned
@BZ: 1359694
"""
org = make_org()
lc_env = make_lifecycle_environment({'organization-id': org['id']})
hostgroup = make_hostgroup({
'lifecycle-environment': lc_env['name'],
'organization-id': org['id'],
})
self.assertEqual(
lc_env['name'],
hostgroup['lifecycle-environment'],
)
@tier1
def test_positive_create_with_orgs_and_lce(self):
"""Check if hostgroup with multiple organizations can be created
if one of them is associated with lifecycle environment
@id: ca110a74-401d-48f9-9700-6c57f1c10f11
@Assert: Hostgroup is created, has both new organizations assigned
and has lifecycle env assigned
"""
orgs = [make_org() for _ in range(2)]
lce = make_lifecycle_environment({'organization-id': orgs[0]['id']})
hostgroup = make_hostgroup({
'organization-ids': [org['id'] for org in orgs],
'lifecycle-environment-id': lce['id'],
})
self.assertEqual(
set(org['name'] for org in orgs),
set(hostgroup['organizations'])
)
@run_only_on('sat')
@tier2
def test_positive_create_with_multiple_entities(self):
"""Check if hostgroup with multiple options can be created
@id: a3ef4f0e-971d-4307-8d0a-35103dff6586
@Assert: Hostgroup should be created and has all defined entities
assigned
@CaseLevel: Integration
"""
# Common entities
loc = make_location()
org = make_org()
env = make_environment({
'location-ids': loc['id'],
'organization-ids': org['id'],
})
lce = make_lifecycle_environment({'organization-id': org['id']})
puppet_proxy = Proxy.list({
'search': 'url = https://{0}:9090'.format(settings.server.hostname)
})[0]
# Content View should be promoted to be used with LC Env
cv = make_content_view({'organization-id': org['id']})
ContentView.publish({'id': cv['id']})
cv = ContentView.info({'id': cv['id']})
ContentView.version_promote({
'id': cv['versions'][0]['id'],
'to-lifecycle-environment-id': lce['id'],
})
# Network
domain = make_domain({
'location-ids': loc['id'],
'organization-ids': org['id'],
})
subnet = make_subnet({
'domain-ids': domain['id'],
'organization-ids': org['id'],
})
# Operating System
arch = make_architecture()
ptable = make_partition_table({
'location-ids': loc['id'],
'organization-ids': org['id'],
})
os = make_os({
'architecture-ids': arch['id'],
'partition-table-ids': ptable['id'],
})
media = make_medium({
'operatingsystem-ids': os['id'],
'location-ids': loc['id'],
'organization-ids': org['id'],
})
make_hostgroup_params = {
'location-ids': loc['id'],
'environment-id': env['id'],
'lifecycle-environment': lce['name'],
'puppet-proxy-id': puppet_proxy['id'],
'puppet-ca-proxy-id': puppet_proxy['id'],
'content-view-id': cv['id'],
'domain-id': domain['id'],
'subnet-id': subnet['id'],
'organization-ids': org['id'],
'architecture-id': arch['id'],
'partition-table-id': ptable['id'],
'medium-id': media['id'],
'operatingsystem-id': os['id'],
}
# If bug is open provide LCE id as parameter
# because LCE name cause errors
if bz_bug_is_open(1395254):
make_hostgroup_params.pop('lifecycle-environment')
make_hostgroup_params['lifecycle-environment-id'] = lce['id']
hostgroup = make_hostgroup(make_hostgroup_params)
self.assertIn(org['name'], hostgroup['organizations'])
self.assertIn(loc['name'], hostgroup['locations'])
self.assertEqual(env['name'], hostgroup['environment'])
self.assertEqual(
puppet_proxy['id'], hostgroup['puppet-master-proxy-id']
)
self.assertEqual(puppet_proxy['id'], hostgroup['puppet-ca-proxy-id'])
self.assertEqual(domain['name'], hostgroup['domain'])
self.assertEqual(subnet['name'], hostgroup['subnet'])
self.assertEqual(arch['name'], hostgroup['architecture'])
self.assertEqual(ptable['name'], hostgroup['partition-table'])
self.assertEqual(media['name'], hostgroup['medium'])
self.assertEqual(
"{0} {1}.{2}".format(
os['name'],
os['major-version'],
os['minor-version']
),
hostgroup['operating-system']
)
if not bz_bug_is_open('1313056'):
self.assertEqual(cv['name'], hostgroup['content-view'])
self.assertEqual(
lce['name'], hostgroup['lifecycle-environment']
)
@skip_if_bug_open('bugzilla', 1354568)
@run_only_on('sat')
@tier1
def test_negative_create_with_subnet_id(self):
"""Check if hostgroup with invalid subnet id raises proper error
@id: c352d7ea-4fc6-4b78-863d-d3ee4c0ad439
@Assert: Proper error should be raised
@BZ: 1354568
"""
subnet_id = gen_string('numeric', 4)
with self.assertRaises(CLIReturnCodeError) as exception:
HostGroup.create({
'name': gen_string('alpha'),
'subnet-id': subnet_id
})
self.assertIs(
exception.exception.stderr,
'Could not find subnet {0}'.format(subnet_id)
)
@skip_if_bug_open('bugzilla', 1354568)
@run_only_on('sat')
@tier1
def test_negative_create_with_domain_id(self):
"""Check if hostgroup with invalid domain id raises proper error
@id: b36c83d6-b27c-4f1a-ac45-6c4999005bf7
@Assert: Proper error should be raised
@BZ: 1354568
"""
domain_id = gen_string('numeric', 4)
with self.assertRaises(CLIReturnCodeError) as exception:
HostGroup.create({
'name': gen_string('alpha'),
'domain-id': domain_id
})
self.assertIs(
exception.exception.stderr,
'Could not find domain {0}'.format(domain_id)
)
@skip_if_bug_open('bugzilla', 1354568)
@run_only_on('sat')
@tier1
def test_negative_create_with_architecture_id(self):
"""Check if hostgroup with invalid architecture id raises proper error
@id: 7b7de0fa-aee9-4163-adc2-354c1e720d90
@Assert: Proper error should be raised
@BZ: 1354568
"""
arch_id = gen_string('numeric', 4)
with self.assertRaises(CLIReturnCodeError) as exception:
HostGroup.create({
'name': gen_string('alpha'),
'architecture-id': arch_id
})
self.assertIs(
exception.exception.stderr,
'Could not find architecture {0}'.format(arch_id)
)
@tier1
def test_positive_update_name(self):
"""Successfully update an HostGroup.
@id: a36e3cbe-83d9-44ce-b8f7-5fab2a2cadf9
@Assert: HostGroup is updated.
"""
hostgroup = make_hostgroup()
for new_name in valid_hostgroups_list():
with self.subTest(new_name):
HostGroup.update({
'id': hostgroup['id'],
'new-name': new_name,
})
hostgroup = HostGroup.info({'id': hostgroup['id']})
self.assertEqual(hostgroup['name'], new_name)
@run_only_on('sat')
@tier1
def test_negative_update_name(self):
"""Create HostGroup then fail to update its name
@id: 42d208a4-f518-4ff2-9b7a-311adb460abd
@assert: HostGroup name is not updated
"""
hostgroup = make_hostgroup()
for new_name in invalid_values_list():
with self.subTest(new_name):
with self.assertRaises(CLIReturnCodeError):
HostGroup.update({
'id': hostgroup['id'],
'new-name': new_name,
})
result = HostGroup.info({'id': hostgroup['id']})
self.assertEqual(hostgroup['name'], result['name'])
@run_only_on('sat')
@tier1
def test_positive_delete_by_id(self):
"""Create HostGroup with valid values then delete it
by ID
@id: fe7dedd4-d7c3-4c70-b70d-c2deff357b76
@assert: HostGroup is deleted
"""
for name in valid_hostgroups_list():
with self.subTest(name):
hostgroup = make_hostgroup({'name': name})
HostGroup.delete({'id': hostgroup['id']})
with self.assertRaises(CLIReturnCodeError):
HostGroup.info({'id': hostgroup['id']})
@run_only_on('sat')
@tier1
def test_negative_delete_by_id(self):
"""Create HostGroup then delete it by wrong ID
@id: 047c9f1a-4dd6-4fdc-b7ed-37cc725c68d3
@assert: HostGroup is not deleted
"""
for entity_id in invalid_id_list():
with self.subTest(entity_id):
with self.assertRaises(CLIReturnCodeError):
HostGroup.delete({'id': entity_id})<|fim▁end|> | from robottelo.cli.proxy import Proxy
from robottelo.cli.factory import (
make_architecture, |
<|file_name|>system.src.js<|end_file_name|><|fim▁begin|>/*
* SystemJS v0.18.5
*/
(function() {
function bootstrap() {(function(__global) {
var isWorker = typeof window == 'undefined' && typeof self != 'undefined' && typeof importScripts != 'undefined';
var isBrowser = typeof window != 'undefined' && typeof document != 'undefined';
var isWindows = typeof process != 'undefined' && !!process.platform.match(/^win/);
if (!__global.console)
__global.console = { assert: function() {} };
// IE8 support
var indexOf = Array.prototype.indexOf || function(item) {
for (var i = 0, thisLen = this.length; i < thisLen; i++) {
if (this[i] === item) {
return i;
}
}
return -1;
};
var defineProperty;
(function () {
try {
if (!!Object.defineProperty({}, 'a', {}))
defineProperty = Object.defineProperty;
}
catch (e) {
defineProperty = function(obj, prop, opt) {
try {
obj[prop] = opt.value || opt.get.call(obj);
}
catch(e) {}
}
}
})();
function addToError(err, msg) {
var newErr;
if (err instanceof Error) {
var newErr = new Error(err.message, err.fileName, err.lineNumber);
if (isBrowser) {
newErr.message = err.message + '\n\t' + msg;
newErr.stack = err.stack;
}
else {
// node errors only look correct with the stack modified
newErr.message = err.message;
newErr.stack = err.stack + '\n\t' + msg;
}
}
else {
newErr = err + '\n\t' + msg;
}
return newErr;
}
function __eval(source, debugName, context) {
try {
new Function(source).call(context);
}
catch(e) {
throw addToError(e, 'Evaluating ' + debugName);
}
}
var baseURI;
// environent baseURI detection
if (typeof document != 'undefined' && document.getElementsByTagName) {
baseURI = document.baseURI;
if (!baseURI) {
var bases = document.getElementsByTagName('base');
baseURI = bases[0] && bases[0].href || window.location.href;
}
// sanitize out the hash and querystring
baseURI = baseURI.split('#')[0].split('?')[0];
baseURI = baseURI.substr(0, baseURI.lastIndexOf('/') + 1);
}
else if (typeof process != 'undefined' && process.cwd) {
baseURI = 'file://' + (isWindows ? '/' : '') + process.cwd() + '/';
if (isWindows)
baseURI = baseURI.replace(/\\/g, '/');
}
else if (typeof location != 'undefined') {
baseURI = __global.location.href;
}
else {
throw new TypeError('No environment baseURI');
}
var URL = __global.URLPolyfill || __global.URL;
/*
*********************************************************************************************
Dynamic Module Loader Polyfill
- Implemented exactly to the former 2014-08-24 ES6 Specification Draft Rev 27, Section 15
http://wiki.ecmascript.org/doku.php?id=harmony:specification_drafts#august_24_2014_draft_rev_27
- Functions are commented with their spec numbers, with spec differences commented.
- Spec bugs are commented in this code with links.
- Abstract functions have been combined where possible, and their associated functions
commented.
- Realm implementation is entirely omitted.
*********************************************************************************************
*/
function Module() {}
// http://www.ecma-international.org/ecma-262/6.0/#sec-@@tostringtag
defineProperty(Module.prototype, 'toString', {
value: function() {
return 'Module';
}
});
function Loader(options) {
this._loader = {
loaderObj: this,
loads: [],
modules: {},
importPromises: {},
moduleRecords: {}
};
// 26.3.3.6
defineProperty(this, 'global', {
get: function() {
return __global;
}
});
// 26.3.3.13 realm not implemented
}
(function() {
// Some Helpers
// logs a linkset snapshot for debugging
/* function snapshot(loader) {
console.log('---Snapshot---');
for (var i = 0; i < loader.loads.length; i++) {
var load = loader.loads[i];
var linkSetLog = ' ' + load.name + ' (' + load.status + '): ';
for (var j = 0; j < load.linkSets.length; j++) {
linkSetLog += '{' + logloads(load.linkSets[j].loads) + '} ';
}
console.log(linkSetLog);
}
console.log('');
}
function logloads(loads) {
var log = '';
for (var k = 0; k < loads.length; k++)
log += loads[k].name + (k != loads.length - 1 ? ' ' : '');
return log;
} */
/* function checkInvariants() {
// see https://bugs.ecmascript.org/show_bug.cgi?id=2603#c1
var loads = System._loader.loads;
var linkSets = [];
for (var i = 0; i < loads.length; i++) {
var load = loads[i];
console.assert(load.status == 'loading' || load.status == 'loaded', 'Each load is loading or loaded');
for (var j = 0; j < load.linkSets.length; j++) {
var linkSet = load.linkSets[j];
for (var k = 0; k < linkSet.loads.length; k++)
console.assert(loads.indexOf(linkSet.loads[k]) != -1, 'linkSet loads are a subset of loader loads');
if (linkSets.indexOf(linkSet) == -1)
linkSets.push(linkSet);
}
}
for (var i = 0; i < loads.length; i++) {
var load = loads[i];
for (var j = 0; j < linkSets.length; j++) {
var linkSet = linkSets[j];
if (linkSet.loads.indexOf(load) != -1)
console.assert(load.linkSets.indexOf(linkSet) != -1, 'linkSet contains load -> load contains linkSet');
if (load.linkSets.indexOf(linkSet) != -1)
console.assert(linkSet.loads.indexOf(load) != -1, 'load contains linkSet -> linkSet contains load');
}
}
for (var i = 0; i < linkSets.length; i++) {
var linkSet = linkSets[i];
for (var j = 0; j < linkSet.loads.length; j++) {
var load = linkSet.loads[j];
for (var k = 0; k < load.dependencies.length; k++) {
var depName = load.dependencies[k].value;
var depLoad;
for (var l = 0; l < loads.length; l++) {
if (loads[l].name != depName)
continue;
depLoad = loads[l];
break;
}
// loading records are allowed not to have their dependencies yet
// if (load.status != 'loading')
// console.assert(depLoad, 'depLoad found');
// console.assert(linkSet.loads.indexOf(depLoad) != -1, 'linkset contains all dependencies');
}
}
}
} */
// 15.2.3 - Runtime Semantics: Loader State
// 15.2.3.11
function createLoaderLoad(object) {
return {
// modules is an object for ES5 implementation
modules: {},
loads: [],
loaderObj: object
};
}
// 15.2.3.2 Load Records and LoadRequest Objects
// 15.2.3.2.1
function createLoad(name) {
return {
status: 'loading',
name: name,
linkSets: [],
dependencies: [],
metadata: {}
};
}
// 15.2.3.2.2 createLoadRequestObject, absorbed into calling functions
// 15.2.4
// 15.2.4.1
function loadModule(loader, name, options) {
return new Promise(asyncStartLoadPartwayThrough({
step: options.address ? 'fetch' : 'locate',
loader: loader,
moduleName: name,
// allow metadata for import https://bugs.ecmascript.org/show_bug.cgi?id=3091
moduleMetadata: options && options.metadata || {},
moduleSource: options.source,
moduleAddress: options.address
}));
}
// 15.2.4.2
function requestLoad(loader, request, refererName, refererAddress) {
// 15.2.4.2.1 CallNormalize
return new Promise(function(resolve, reject) {
resolve(loader.loaderObj.normalize(request, refererName, refererAddress));
})
// 15.2.4.2.2 GetOrCreateLoad
.then(function(name) {
var load;
if (loader.modules[name]) {
load = createLoad(name);
load.status = 'linked';
// https://bugs.ecmascript.org/show_bug.cgi?id=2795
load.module = loader.modules[name];
return load;
}
for (var i = 0, l = loader.loads.length; i < l; i++) {
load = loader.loads[i];
if (load.name != name)
continue;
console.assert(load.status == 'loading' || load.status == 'loaded', 'loading or loaded');
return load;
}
load = createLoad(name);
loader.loads.push(load);
proceedToLocate(loader, load);
return load;
});
}
// 15.2.4.3
function proceedToLocate(loader, load) {
proceedToFetch(loader, load,
Promise.resolve()
// 15.2.4.3.1 CallLocate
.then(function() {
return loader.loaderObj.locate({ name: load.name, metadata: load.metadata });
})
);
}
// 15.2.4.4
function proceedToFetch(loader, load, p) {
proceedToTranslate(loader, load,
p
// 15.2.4.4.1 CallFetch
.then(function(address) {
// adjusted, see https://bugs.ecmascript.org/show_bug.cgi?id=2602
if (load.status != 'loading')
return;
load.address = address;
return loader.loaderObj.fetch({ name: load.name, metadata: load.metadata, address: address });
})
);
}
var anonCnt = 0;
// 15.2.4.5
function proceedToTranslate(loader, load, p) {
p
// 15.2.4.5.1 CallTranslate
.then(function(source) {
if (load.status != 'loading')
return;
return Promise.resolve(loader.loaderObj.translate({ name: load.name, metadata: load.metadata, address: load.address, source: source }))
// 15.2.4.5.2 CallInstantiate
.then(function(source) {
load.source = source;
return loader.loaderObj.instantiate({ name: load.name, metadata: load.metadata, address: load.address, source: source });
})
// 15.2.4.5.3 InstantiateSucceeded
.then(function(instantiateResult) {
if (instantiateResult === undefined) {
load.address = load.address || '<Anonymous Module ' + ++anonCnt + '>';
// instead of load.kind, use load.isDeclarative
load.isDeclarative = true;
return transpile.call(loader.loaderObj, load)
.then(function(transpiled) {
// Hijack System.register to set declare function
var curSystem = __global.System;
var curRegister = curSystem.register;
curSystem.register = function(name, deps, declare) {
if (typeof name != 'string') {
declare = deps;
deps = name;
}
// store the registered declaration as load.declare
// store the deps as load.deps
load.declare = declare;
load.depsList = deps;
}
// empty {} context is closest to undefined 'this' we can get
__eval(transpiled, load.address, {});
curSystem.register = curRegister;
});
}
else if (typeof instantiateResult == 'object') {
load.depsList = instantiateResult.deps || [];
load.execute = instantiateResult.execute;
load.isDeclarative = false;
}
else
throw TypeError('Invalid instantiate return value');
})
// 15.2.4.6 ProcessLoadDependencies
.then(function() {
load.dependencies = [];
var depsList = load.depsList;
var loadPromises = [];
for (var i = 0, l = depsList.length; i < l; i++) (function(request, index) {
loadPromises.push(
requestLoad(loader, request, load.name, load.address)
// 15.2.4.6.1 AddDependencyLoad (load is parentLoad)
.then(function(depLoad) {
// adjusted from spec to maintain dependency order
// this is due to the System.register internal implementation needs
load.dependencies[index] = {
key: request,
value: depLoad.name
};
if (depLoad.status != 'linked') {
var linkSets = load.linkSets.concat([]);
for (var i = 0, l = linkSets.length; i < l; i++)
addLoadToLinkSet(linkSets[i], depLoad);
}
// console.log('AddDependencyLoad ' + depLoad.name + ' for ' + load.name);
// snapshot(loader);
})
);
})(depsList[i], i);
return Promise.all(loadPromises);
})
// 15.2.4.6.2 LoadSucceeded
.then(function() {
// console.log('LoadSucceeded ' + load.name);
// snapshot(loader);
console.assert(load.status == 'loading', 'is loading');
load.status = 'loaded';
var linkSets = load.linkSets.concat([]);
for (var i = 0, l = linkSets.length; i < l; i++)
updateLinkSetOnLoad(linkSets[i], load);
});
})
// 15.2.4.5.4 LoadFailed
['catch'](function(exc) {
load.status = 'failed';
load.exception = exc;
var linkSets = load.linkSets.concat([]);
for (var i = 0, l = linkSets.length; i < l; i++) {
linkSetFailed(linkSets[i], load, exc);
}
console.assert(load.linkSets.length == 0, 'linkSets not removed');
});
}
// 15.2.4.7 PromiseOfStartLoadPartwayThrough absorbed into calling functions
// 15.2.4.7.1
function asyncStartLoadPartwayThrough(stepState) {
return function(resolve, reject) {
var loader = stepState.loader;
var name = stepState.moduleName;
var step = stepState.step;
if (loader.modules[name])
throw new TypeError('"' + name + '" already exists in the module table');
// adjusted to pick up existing loads
var existingLoad;
for (var i = 0, l = loader.loads.length; i < l; i++) {
if (loader.loads[i].name == name) {
existingLoad = loader.loads[i];
if(step == 'translate' && !existingLoad.source) {
existingLoad.address = stepState.moduleAddress;
proceedToTranslate(loader, existingLoad, Promise.resolve(stepState.moduleSource));
}
return existingLoad.linkSets[0].done.then(function() {
resolve(existingLoad);
});
}
}
var load = createLoad(name);
load.metadata = stepState.moduleMetadata;
var linkSet = createLinkSet(loader, load);
loader.loads.push(load);
resolve(linkSet.done);
if (step == 'locate')
proceedToLocate(loader, load);
else if (step == 'fetch')
proceedToFetch(loader, load, Promise.resolve(stepState.moduleAddress));
else {
console.assert(step == 'translate', 'translate step');
load.address = stepState.moduleAddress;
proceedToTranslate(loader, load, Promise.resolve(stepState.moduleSource));
}
}
}
// Declarative linking functions run through alternative implementation:
// 15.2.5.1.1 CreateModuleLinkageRecord not implemented
// 15.2.5.1.2 LookupExport not implemented
// 15.2.5.1.3 LookupModuleDependency not implemented
// 15.2.5.2.1
function createLinkSet(loader, startingLoad) {
var linkSet = {
loader: loader,
loads: [],
startingLoad: startingLoad, // added see spec bug https://bugs.ecmascript.org/show_bug.cgi?id=2995
loadingCount: 0
};
linkSet.done = new Promise(function(resolve, reject) {
linkSet.resolve = resolve;
linkSet.reject = reject;
});
addLoadToLinkSet(linkSet, startingLoad);
return linkSet;
}
// 15.2.5.2.2
function addLoadToLinkSet(linkSet, load) {
if (load.status == 'failed')
return;
console.assert(load.status == 'loading' || load.status == 'loaded', 'loading or loaded on link set');
for (var i = 0, l = linkSet.loads.length; i < l; i++)
if (linkSet.loads[i] == load)
return;
linkSet.loads.push(load);
load.linkSets.push(linkSet);
// adjustment, see https://bugs.ecmascript.org/show_bug.cgi?id=2603
if (load.status != 'loaded') {
linkSet.loadingCount++;
}
var loader = linkSet.loader;
for (var i = 0, l = load.dependencies.length; i < l; i++) {
if (!load.dependencies[i])
continue;
var name = load.dependencies[i].value;
if (loader.modules[name])
continue;
for (var j = 0, d = loader.loads.length; j < d; j++) {
if (loader.loads[j].name != name)
continue;
addLoadToLinkSet(linkSet, loader.loads[j]);
break;
}
}
// console.log('add to linkset ' + load.name);
// snapshot(linkSet.loader);
}
// linking errors can be generic or load-specific
// this is necessary for debugging info
function doLink(linkSet) {
var error = false;
try {
link(linkSet, function(load, exc) {
linkSetFailed(linkSet, load, exc);
error = true;
});
}
catch(e) {
linkSetFailed(linkSet, null, e);
error = true;
}
return error;
}
// 15.2.5.2.3
function updateLinkSetOnLoad(linkSet, load) {
// console.log('update linkset on load ' + load.name);
// snapshot(linkSet.loader);
console.assert(load.status == 'loaded' || load.status == 'linked', 'loaded or linked');
linkSet.loadingCount--;
if (linkSet.loadingCount > 0)
return;
// adjusted for spec bug https://bugs.ecmascript.org/show_bug.cgi?id=2995
var startingLoad = linkSet.startingLoad;
// non-executing link variation for loader tracing
// on the server. Not in spec.
/***/
if (linkSet.loader.loaderObj.execute === false) {
var loads = [].concat(linkSet.loads);
for (var i = 0, l = loads.length; i < l; i++) {
var load = loads[i];
load.module = !load.isDeclarative ? {
module: _newModule({})
} : {
name: load.name,
module: _newModule({}),
evaluated: true
};
load.status = 'linked';
finishLoad(linkSet.loader, load);
}
return linkSet.resolve(startingLoad);
}
/***/
var abrupt = doLink(linkSet);
if (abrupt)
return;
console.assert(linkSet.loads.length == 0, 'loads cleared');
linkSet.resolve(startingLoad);
}
// 15.2.5.2.4
function linkSetFailed(linkSet, load, exc) {
var loader = linkSet.loader;
var requests;
checkError:
if (load) {
if (linkSet.loads[0].name == load.name) {
exc = addToError(exc, 'Error loading ' + load.name);
}
else {
for (var i = 0; i < linkSet.loads.length; i++) {
var pLoad = linkSet.loads[i];
for (var j = 0; j < pLoad.dependencies.length; j++) {
var dep = pLoad.dependencies[j];
if (dep.value == load.name) {
exc = addToError(exc, 'Error loading ' + load.name + ' as "' + dep.key + '" from ' + pLoad.name);
break checkError;
}
}
}
exc = addToError(exc, 'Error loading ' + load.name + ' from ' + linkSet.loads[0].name);
}
}
else {
exc = addToError(exc, 'Error linking ' + linkSet.loads[0].name);
}
var loads = linkSet.loads.concat([]);
for (var i = 0, l = loads.length; i < l; i++) {
var load = loads[i];
// store all failed load records
loader.loaderObj.failed = loader.loaderObj.failed || [];
if (indexOf.call(loader.loaderObj.failed, load) == -1)
loader.loaderObj.failed.push(load);
var linkIndex = indexOf.call(load.linkSets, linkSet);
console.assert(linkIndex != -1, 'link not present');
load.linkSets.splice(linkIndex, 1);
if (load.linkSets.length == 0) {
var globalLoadsIndex = indexOf.call(linkSet.loader.loads, load);
if (globalLoadsIndex != -1)
linkSet.loader.loads.splice(globalLoadsIndex, 1);
}
}
linkSet.reject(exc);
}
// 15.2.5.2.5
function finishLoad(loader, load) {
// add to global trace if tracing
if (loader.loaderObj.trace) {
if (!loader.loaderObj.loads)
loader.loaderObj.loads = {};
var depMap = {};
load.dependencies.forEach(function(dep) {
depMap[dep.key] = dep.value;
});
loader.loaderObj.loads[load.name] = {
name: load.name,
deps: load.dependencies.map(function(dep){ return dep.key }),
depMap: depMap,
address: load.address,
metadata: load.metadata,
source: load.source,
kind: load.isDeclarative ? 'declarative' : 'dynamic'
};
}
// if not anonymous, add to the module table
if (load.name) {
console.assert(!loader.modules[load.name], 'load not in module table');
loader.modules[load.name] = load.module;
}
var loadIndex = indexOf.call(loader.loads, load);
if (loadIndex != -1)
loader.loads.splice(loadIndex, 1);
for (var i = 0, l = load.linkSets.length; i < l; i++) {
loadIndex = indexOf.call(load.linkSets[i].loads, load);
if (loadIndex != -1)
load.linkSets[i].loads.splice(loadIndex, 1);
}
load.linkSets.splice(0, load.linkSets.length);
}
function doDynamicExecute(linkSet, load, linkError) {
try {
var module = load.execute();
}
catch(e) {
linkError(load, e);
return;
}
if (!module || !(module instanceof Module))
linkError(load, new TypeError('Execution must define a Module instance'));
else
return module;
}
// 26.3 Loader
// 26.3.1.1
// defined at top
// importPromises adds ability to import a module twice without error - https://bugs.ecmascript.org/show_bug.cgi?id=2601
function createImportPromise(loader, name, promise) {
var importPromises = loader._loader.importPromises;
return importPromises[name] = promise.then(function(m) {
importPromises[name] = undefined;
return m;
}, function(e) {
importPromises[name] = undefined;
throw e;
});
}
Loader.prototype = {
// 26.3.3.1
constructor: Loader,
// 26.3.3.2
define: function(name, source, options) {
// check if already defined
if (this._loader.importPromises[name])
throw new TypeError('Module is already loading.');
return createImportPromise(this, name, new Promise(asyncStartLoadPartwayThrough({
step: 'translate',
loader: this._loader,
moduleName: name,
moduleMetadata: options && options.metadata || {},
moduleSource: source,
moduleAddress: options && options.address
})));
},
// 26.3.3.3
'delete': function(name) {
var loader = this._loader;
delete loader.importPromises[name];
delete loader.moduleRecords[name];
return loader.modules[name] ? delete loader.modules[name] : false;
},
// 26.3.3.4 entries not implemented
// 26.3.3.5
get: function(key) {
if (!this._loader.modules[key])
return;
doEnsureEvaluated(this._loader.modules[key], [], this);
return this._loader.modules[key].module;
},
// 26.3.3.7
has: function(name) {
return !!this._loader.modules[name];
},
// 26.3.3.8
'import': function(name, parentName, parentAddress) {
if (typeof parentName == 'object')
parentName = parentName.name;
// run normalize first
var loaderObj = this;
// added, see https://bugs.ecmascript.org/show_bug.cgi?id=2659
return Promise.resolve(loaderObj.normalize(name, parentName))
.then(function(name) {
var loader = loaderObj._loader;
if (loader.modules[name]) {
doEnsureEvaluated(loader.modules[name], [], loader._loader);
return loader.modules[name].module;
}
return loader.importPromises[name] || createImportPromise(loaderObj, name,
loadModule(loader, name, {})
.then(function(load) {
delete loader.importPromises[name];
return evaluateLoadedModule(loader, load);
}));
});
},
// 26.3.3.9 keys not implemented
// 26.3.3.10
load: function(name, options) {
var loader = this._loader;
if (loader.modules[name]) {
doEnsureEvaluated(loader.modules[name], [], loader);
return Promise.resolve(loader.modules[name].module);
}
return loader.importPromises[name] || createImportPromise(this, name,
loadModule(loader, name, {})
.then(function(load) {
delete loader.importPromises[name];
return evaluateLoadedModule(loader, load);
}));
},
// 26.3.3.11
module: function(source, options) {
var load = createLoad();
load.address = options && options.address;
var linkSet = createLinkSet(this._loader, load);
var sourcePromise = Promise.resolve(source);
var loader = this._loader;
var p = linkSet.done.then(function() {
return evaluateLoadedModule(loader, load);
});
proceedToTranslate(loader, load, sourcePromise);
return p;
},
// 26.3.3.12
newModule: function (obj) {
if (typeof obj != 'object')
throw new TypeError('Expected object');
// we do this to be able to tell if a module is a module privately in ES5
// by doing m instanceof Module
var m = new Module();
var pNames;
if (Object.getOwnPropertyNames && obj != null) {
pNames = Object.getOwnPropertyNames(obj);
}
else {
pNames = [];
for (var key in obj)
pNames.push(key);
}
for (var i = 0; i < pNames.length; i++) (function(key) {
defineProperty(m, key, {
configurable: false,
enumerable: true,
get: function () {
return obj[key];
}
});
})(pNames[i]);
if (Object.preventExtensions)
Object.preventExtensions(m);
return m;
},
// 26.3.3.14
set: function(name, module) {
if (!(module instanceof Module))
throw new TypeError('Loader.set(' + name + ', module) must be a module');
this._loader.modules[name] = {
module: module
};
},
// 26.3.3.15 values not implemented
// 26.3.3.16 @@iterator not implemented
// 26.3.3.17 @@toStringTag not implemented
// 26.3.3.18.1
normalize: function(name, referrerName, referrerAddress) {
return name;
},
// 26.3.3.18.2
locate: function(load) {
return load.name;
},
// 26.3.3.18.3
fetch: function(load) {
},
// 26.3.3.18.4
translate: function(load) {
return load.source;
},
// 26.3.3.18.5
instantiate: function(load) {
}
};
var _newModule = Loader.prototype.newModule;
/*
* ES6 Module Declarative Linking Code - Dev Build Only
*/
function link(linkSet, linkError) {
var loader = linkSet.loader;
if (!linkSet.loads.length)
return;
var loads = linkSet.loads.concat([]);
for (var i = 0; i < loads.length; i++) {
var load = loads[i];
var module = doDynamicExecute(linkSet, load, linkError);
if (!module)
return;
load.module = {
name: load.name,
module: module
};
load.status = 'linked';
finishLoad(loader, load);
}
}
function evaluateLoadedModule(loader, load) {
console.assert(load.status == 'linked', 'is linked ' + load.name);
return load.module.module;
}
function doEnsureEvaluated() {}
function transpile() {
throw new TypeError('ES6 transpilation is only provided in the dev module loader build.');
}
})();/*
*********************************************************************************************
System Loader Implementation
- Implemented to https://github.com/jorendorff/js-loaders/blob/master/browser-loader.js
- <script type="module"> supported
*********************************************************************************************
*/
var System;
function SystemLoader() {
Loader.call(this);
this.paths = {};
}
// NB no specification provided for System.paths, used ideas discussed in https://github.com/jorendorff/js-loaders/issues/25
function applyPaths(paths, name) {
// most specific (most number of slashes in path) match wins
var pathMatch = '', wildcard, maxSlashCount = 0;
// check to see if we have a paths entry
for (var p in paths) {
var pathParts = p.split('*');
if (pathParts.length > 2)
throw new TypeError('Only one wildcard in a path is permitted');
// exact path match
if (pathParts.length == 1) {
if (name == p) {
pathMatch = p;
break;
}
}
// wildcard path match
else {
var slashCount = p.split('/').length;
if (slashCount >= maxSlashCount &&
name.substr(0, pathParts[0].length) == pathParts[0] &&
name.substr(name.length - pathParts[1].length) == pathParts[1]) {
maxSlashCount = slashCount;
pathMatch = p;
wildcard = name.substr(pathParts[0].length, name.length - pathParts[1].length - pathParts[0].length);
}
}
}
var outPath = paths[pathMatch] || name;
if (wildcard)
outPath = outPath.replace('*', wildcard);
return outPath;
}
// inline Object.create-style class extension
function LoaderProto() {}
LoaderProto.prototype = Loader.prototype;
SystemLoader.prototype = new LoaderProto();
var fetchTextFromURL;
if (typeof XMLHttpRequest != 'undefined') {
fetchTextFromURL = function(url, fulfill, reject) {
// percent encode just '#' in urls
// according to https://github.com/jorendorff/js-loaders/blob/master/browser-loader.js#L238
// we should encode everything, but it breaks for servers that don't expect it
// like in (https://github.com/systemjs/systemjs/issues/168)
if (isBrowser)
url = url.replace(/#/g, '%23');
var xhr = new XMLHttpRequest();
var sameDomain = true;
var doTimeout = false;
if (!('withCredentials' in xhr)) {
// check if same domain
var domainCheck = /^(\w+:)?\/\/([^\/]+)/.exec(url);
if (domainCheck) {
sameDomain = domainCheck[2] === window.location.host;
if (domainCheck[1])
sameDomain &= domainCheck[1] === window.location.protocol;
}
}
if (!sameDomain && typeof XDomainRequest != 'undefined') {
xhr = new XDomainRequest();
xhr.onload = load;
xhr.onerror = error;
xhr.ontimeout = error;
xhr.onprogress = function() {};
xhr.timeout = 0;
doTimeout = true;
}
function load() {
fulfill(xhr.responseText);
}
function error() {
reject(new Error('XHR error' + (xhr.status ? ' (' + xhr.status + (xhr.statusText ? ' ' + xhr.statusText : '') + ')' : '') + ' loading ' + url));
}
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200 || (xhr.status == 0 && xhr.responseText)) {
load();
} else {
error();
}
}
};
xhr.open("GET", url, true);
xhr.setRequestHeader('Accept', 'application/x-es-module */*');
if (doTimeout)
setTimeout(function() {
xhr.send();
}, 0);
xhr.send(null);
};
}
else if (typeof require != 'undefined') {
var fs;
fetchTextFromURL = function(url, fulfill, reject) {
if (url.substr(0, 8) != 'file:///')
throw new Error('Unable to fetch "' + url + '". Only file URLs of the form file:/// allowed running in Node.');
fs = fs || require('fs');
if (isWindows)
url = url.replace(/\//g, '\\').substr(8);
else
url = url.substr(7);
return fs.readFile(url, function(err, data) {
if (err) {
return reject(err);
}
else {
// Strip Byte Order Mark out if it's the leading char
var dataString = data + '';
if (dataString[0] === '\ufeff')
dataString = dataString.substr(1);
fulfill(dataString);
}
});
};
}
else {
throw new TypeError('No environment fetch API available.');
}
SystemLoader.prototype.fetch = function(load) {
return new Promise(function(resolve, reject) {
fetchTextFromURL(load.address, resolve, reject);
});
};
/*
* Traceur, Babel and TypeScript transpile hook for Loader
*/
var transpile = (function() {
// use Traceur by default
Loader.prototype.transpiler = 'traceur';
function transpile(load) {
var self = this;
return Promise.resolve(__global[self.transpiler == 'typescript' ? 'ts' : self.transpiler]
|| (self.pluginLoader || self)['import'](self.transpiler))
.then(function(transpiler) {
if (transpiler.__useDefault)
transpiler = transpiler['default'];
var transpileFunction;
if (transpiler.Compiler)
transpileFunction = traceurTranspile;
else if (transpiler.createLanguageService)
transpileFunction = typescriptTranspile;
else
transpileFunction = babelTranspile;
// note __moduleName will be part of the transformer meta in future when we have the spec for this
return '(function(__moduleName){' + transpileFunction.call(self, load, transpiler) + '\n})("' + load.name + '");\n//# sourceURL=' + load.address + '!transpiled';
});
};
function traceurTranspile(load, traceur) {
var options = this.traceurOptions || {};
options.modules = 'instantiate';
options.script = false;
if (options.sourceMaps === undefined)
options.sourceMaps = 'inline';
options.filename = load.address;
options.inputSourceMap = load.metadata.sourceMap;
options.moduleName = false;
var compiler = new traceur.Compiler(options);
return doTraceurCompile(load.source, compiler, options.filename);
}
function doTraceurCompile(source, compiler, filename) {
try {
return compiler.compile(source, filename);
}<|fim▁hole|> // traceur throws an error array
throw e[0];
}
}
function babelTranspile(load, babel) {
var options = this.babelOptions || {};
options.modules = 'system';
if (options.sourceMap === undefined)
options.sourceMap = 'inline';
options.inputSourceMap = load.metadata.sourceMap;
options.filename = load.address;
options.code = true;
options.ast = false;
return babel.transform(load.source, options).code;
}
function typescriptTranspile(load, ts) {
var options = this.typescriptOptions || {};
options.target = options.target || ts.ScriptTarget.ES5;
if (options.sourceMap === undefined)
options.sourceMap = true;
if (options.sourceMap)
options.inlineSourceMap = true;
options.module = ts.ModuleKind.System;
return ts.transpile(load.source, options, load.address);
}
return transpile;
})();
// we define a __exec for globally-scoped execution
// used by module format implementations
var __exec;
(function() {
// System clobbering protection (mostly for Traceur)
var curSystem;
function preExec(loader) {
curSystem = __global.System;
__global.System = loader;
}
function postExec() {
__global.System = curSystem;
}
var hasBtoa = typeof btoa != 'undefined';
function getSource(load) {
var lastLineIndex = load.source.lastIndexOf('\n');
return load.source
// adds the sourceURL comment if not already present
+ (load.source.substr(lastLineIndex, 15) != '\n//# sourceURL='
? '\n//# sourceURL=' + load.address + (load.metadata.sourceMap ? '!transpiled' : '') : '')
// add sourceMappingURL if load.metadata.sourceMap is set
+ (load.metadata.sourceMap && hasBtoa &&
'\n//# sourceMappingURL=data:application/json;base64,' + btoa(unescape(encodeURIComponent(load.metadata.sourceMap))) || '')
}
// Web Worker and Chrome Extensions use original ESML eval
// this may lead to some global module execution differences (eg var not defining onto global)
if (isWorker || isBrowser && window.chrome && window.chrome.extension) {
__exec = function(load) {
try {
preExec(this);
new Function(getSource(load)).call(__global);
postExec();
}
catch(e) {
throw addToError(e, 'Evaluating ' + load.address);
}
};
}
// use script injection eval to get identical global script behaviour
else if (typeof document != 'undefined') {
var head;
var scripts = document.getElementsByTagName('script');
$__curScript = scripts[scripts.length - 1];
__exec = function(load) {
if (!head)
head = document.head || document.body || document.documentElement;
var script = document.createElement('script');
script.text = getSource(load);
var onerror = window.onerror;
var e;
window.onerror = function(_e) {
e = addToError(_e, 'Evaluating ' + load.address);
}
preExec(this);
head.appendChild(script);
head.removeChild(script);
postExec();
window.onerror = onerror;
if (e)
throw e;
}
}
else {
// global scoped eval for node
var vmModule = 'vm';
var vm = require(vmModule);
__exec = function(load) {
try {
preExec(this);
vm.runInThisContext(getSource(load));
postExec();
}
catch(e) {
throw addToError(e.toString(), 'Evaluating ' + load.address);
}
};
}
})();// SystemJS Loader Class and Extension helpers
function SystemJSLoader() {
SystemLoader.call(this);
systemJSConstructor.call(this);
}
// inline Object.create-style class extension
function SystemProto() {};
SystemProto.prototype = SystemLoader.prototype;
SystemJSLoader.prototype = new SystemProto();
var systemJSConstructor;
function hook(name, hook) {
SystemJSLoader.prototype[name] = hook(SystemJSLoader.prototype[name]);
}
function hookConstructor(hook) {
systemJSConstructor = hook(systemJSConstructor || function() {});
}
function dedupe(deps) {
var newDeps = [];
for (var i = 0, l = deps.length; i < l; i++)
if (indexOf.call(newDeps, deps[i]) == -1)
newDeps.push(deps[i])
return newDeps;
}
function group(deps) {
var names = [];
var indices = [];
for (var i = 0, l = deps.length; i < l; i++) {
var index = indexOf.call(names, deps[i]);
if (index === -1) {
names.push(deps[i]);
indices.push([i]);
}
else {
indices[index].push(i);
}
}
return { names: names, indices: indices };
}
function extend(a, b, prepend) {
for (var p in b) {
if (!prepend || !(p in a))
a[p] = b[p];
}
return a;
}
// meta first-level extends where:
// array + array appends
// object + object extends
// other properties replace
function extendMeta(a, b, prepend) {
for (var p in b) {
var val = b[p];
if (!(p in a))
a[p] = val;
else if (val instanceof Array && a[p] instanceof Array)
a[p] = [].concat(prepend ? val : a[p]).concat(prepend ? a[p] : val);
else if (typeof val == 'object' && typeof a[p] == 'object')
a[p] = extend(extend({}, a[p]), val, prepend);
else if (!prepend)
a[p] = val;
}
}var absURLRegEx = /^[^\/]+:\/\//;
function readMemberExpression(p, value) {
var pParts = p.split('.');
while (pParts.length)
value = value[pParts.shift()];
return value;
}
var baseURLCache = {};
function getBaseURLObj() {
if (baseURLCache[this.baseURL])
return baseURLCache[this.baseURL];
// normalize baseURL if not already
if (this.baseURL[this.baseURL.length - 1] != '/')
this.baseURL += '/';
var baseURL = new URL(this.baseURL, baseURI);
this.baseURL = baseURL.href;
return (baseURLCache[this.baseURL] = baseURL);
}
var baseURIObj = new URL(baseURI);
(function() {
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
// support baseURL
this.baseURL = baseURI.substr(0, baseURI.lastIndexOf('/') + 1);
// support the empty module, as a concept
this.set('@empty', this.newModule({}));
};
});
/*
Normalization
If a name is relative, we apply URL normalization to the page
If a name is an absolute URL, we leave it as-is
Plain names (neither of the above) run through the map and package
normalization phases (applying before and after this one).
The paths normalization phase applies last (paths extension), which
defines the `normalizeSync` function and normalizes everything into
a URL.
The final normalization
*/
hook('normalize', function() {
return function(name, parentName) {
// relative URL-normalization
if (name[0] == '.' || name[0] == '/')
return new URL(name, parentName || baseURIObj).href;
return name;
};
});
/*
__useDefault
When a module object looks like:
newModule(
__useDefault: true,
default: 'some-module'
})
Then importing that module provides the 'some-module'
result directly instead of the full module.
Useful for eg module.exports = function() {}
*/
hook('import', function(systemImport) {
return function(name, parentName, parentAddress) {
return systemImport.call(this, name, parentName, parentAddress).then(function(module) {
return module.__useDefault ? module['default'] : module;
});
};
});
/*
Extend config merging one deep only
loader.config({
some: 'random',
config: 'here',
deep: {
config: { too: 'too' }
}
});
<=>
loader.some = 'random';
loader.config = 'here'
loader.deep = loader.deep || {};
loader.deep.config = { too: 'too' };
Normalizes meta and package configs allowing for:
System.config({
meta: {
'./index.js': {}
}
});
To become
System.meta['https://thissite.com/index.js'] = {};
For easy normalization canonicalization with latest URL support.
*/
SystemJSLoader.prototype.config = function(cfg) {
// always configure baseURL first
if (cfg.baseURL) {
var hasConfig = false;
function checkHasConfig(obj) {
for (var p in obj)
return true;
}
if (checkHasConfig(this.packages) || checkHasConfig(this.meta) || checkHasConfig(this.depCache) || checkHasConfig(this.bundles))
throw new TypeError('baseURL should only be configured once and must be configured first.');
this.baseURL = cfg.baseURL;
// sanitize baseURL
getBaseURLObj.call(this);
}
if (cfg.paths) {
for (var p in cfg.paths)
this.paths[p] = cfg.paths[p];
}
if (cfg.map) {
for (var p in cfg.map) {
var v = cfg.map[p];
// object map backwards-compat into packages configuration
if (typeof v !== 'string') {
var normalized = this.normalizeSync(p);
// if doing default js extensions, undo to get package name
if (this.defaultJSExtensions && p.substr(p.length - 3, 3) != '.js')
normalized = normalized.substr(0, normalized.length - 3);
// if a package main, revert it
var pkgMatch = '';
for (var pkg in this.packages) {
if (normalized.substr(0, pkg.length) == pkg
&& (!normalized[pkg.length] || normalized[pkg.length] == '/')
&& pkgMatch.split('/').length < pkg.split('/').length)
pkgMatch = pkg;
}
if (pkgMatch && this.packages[pkgMatch].main)
normalized = normalized.substr(0, normalized.length - this.packages[pkgMatch].main.length - 1);
var pkg = this.packages[normalized] = this.packages[normalized] || {};
pkg.map = v;
}
else {
this.map[p] = v;
}
}
}
if (cfg.packages) {
for (var p in cfg.packages) {
var prop = this.normalizeSync(p);
// if doing default js extensions, undo to get package name
if (this.defaultJSExtensions && p.substr(p.length - 3, 3) != '.js')
prop = prop.substr(0, prop.length - 3);
this.packages[prop]= this.packages[prop] || {};
for (var q in cfg.packages[p])
this.packages[prop][q] = cfg.packages[p][q];
}
}
if (cfg.bundles) {
for (var p in cfg.bundles) {
var bundle = [];
for (var i = 0; i < cfg.bundles[p].length; i++)
bundle.push(this.normalizeSync(cfg.bundles[p][i]));
this.bundles[p] = bundle;
}
}
for (var c in cfg) {
var v = cfg[c];
var normalizeProp = false, normalizeValArray = false;
if (c == 'baseURL' || c == 'map' || c == 'packages' || c == 'bundles' || c == 'paths')
continue;
if (typeof v != 'object' || v instanceof Array) {
this[c] = v;
}
else {
this[c] = this[c] || {};
if (c == 'meta' || c == 'depCache')
normalizeProp = true;
for (var p in v) {
if (c == 'meta' && p[0] == '*')
this[c][p] = v[p];
else if (normalizeProp)
this[c][this.normalizeSync(p)] = v[p];
else
this[c][p] = v[p];
}
}
}
};
})();/*
* Script tag fetch
*
* When load.metadata.scriptLoad is true, we load via script tag injection.
*/
(function() {
if (typeof document != 'undefined')
var head = document.getElementsByTagName('head')[0];
// call this functione everytime a wrapper executes
var curSystem;
// System clobbering protection for Traceur
SystemJSLoader.prototype.onScriptLoad = function() {
__global.System = curSystem;
};
function webWorkerImport(loader, load) {
return new Promise(function(resolve, reject) {
try {
importScripts(load.address);
}
catch(e) {
reject(e);
}
loader.onScriptLoad(load);
// if nothing registered, then something went wrong
if (!load.metadata.registered)
reject(load.address + ' did not call System.register or AMD define');
resolve('');
});
}
// override fetch to use script injection
hook('fetch', function(fetch) {
return function(load) {
var loader = this;
if (!load.metadata.scriptLoad || (!isBrowser && !isWorker))
return fetch.call(this, load);
if (isWorker)
return webWorkerImport(loader, load);
return new Promise(function(resolve, reject) {
var s = document.createElement('script');
s.async = true;
function complete(evt) {
if (s.readyState && s.readyState != 'loaded' && s.readyState != 'complete')
return;
cleanup();
// this runs synchronously after execution
// we now need to tell the wrapper handlers that
// this load record has just executed
loader.onScriptLoad(load);
// if nothing registered, then something went wrong
if (!load.metadata.registered)
reject(load.address + ' did not call System.register or AMD define');
resolve('');
}
function error(evt) {
cleanup();
reject(new Error('Unable to load script ' + load.address));
}
if (s.attachEvent) {
s.attachEvent('onreadystatechange', complete);
}
else {
s.addEventListener('load', complete, false);
s.addEventListener('error', error, false);
}
curSystem = __global.System;
__global.System = loader;
s.src = load.address;
head.appendChild(s);
function cleanup() {
if (s.detachEvent)
s.detachEvent('onreadystatechange', complete);
else {
s.removeEventListener('load', complete, false);
s.removeEventListener('error', error, false);
}
head.removeChild(s);
}
});
};
});
})();
/*
* Instantiate registry extension
*
* Supports Traceur System.register 'instantiate' output for loading ES6 as ES5.
*
* - Creates the loader.register function
* - Also supports metadata.format = 'register' in instantiate for anonymous register modules
* - Also supports metadata.deps, metadata.execute and metadata.executingRequire
* for handling dynamic modules alongside register-transformed ES6 modules
*
*
* The code here replicates the ES6 linking groups algorithm to ensure that
* circular ES6 compiled into System.register can work alongside circular AMD
* and CommonJS, identically to the actual ES6 loader.
*
*/
(function() {
var getOwnPropertyDescriptor = true;
try {
Object.getOwnPropertyDescriptor({ a: 0 }, 'a');
}
catch(e) {
getOwnPropertyDescriptor = false;
}
/*
* There are two variations of System.register:
* 1. System.register for ES6 conversion (2-3 params) - System.register([name, ]deps, declare)
* see https://github.com/ModuleLoader/es6-module-loader/wiki/System.register-Explained
*
* 2. System.registerDynamic for dynamic modules (3-4 params) - System.registerDynamic([name, ]deps, executingRequire, execute)
* the true or false statement
*
* this extension implements the linking algorithm for the two variations identical to the spec
* allowing compiled ES6 circular references to work alongside AMD and CJS circular references.
*
*/
var anonRegister;
var calledRegister = false;
function doRegister(loader, name, register) {
calledRegister = true;
// named register
if (name) {
var ext = loader.defaultJSExtensions && name.split('/').pop().split('.').pop();
name = (loader.normalizeSync || loader.normalize).call(loader, name);
if (ext && name.substr(name.length - ext.length - 1, ext.length + 1) != '.' + ext)
name = name.substr(0, name.lastIndexOf('.'));
register.name = name;
if (!(name in loader.defined))
loader.defined[name] = register;
}
// anonymous register
else if (register.declarative) {
if (anonRegister)
throw new TypeError('Invalid anonymous System.register module load. If loading a single module, ensure anonymous System.register is loaded via System.import. If loading a bundle, ensure all the System.register calls are named.');
anonRegister = register;
}
}
SystemJSLoader.prototype.register = function(name, deps, declare) {
if (typeof name != 'string') {
declare = deps;
deps = name;
name = null;
}
// dynamic backwards-compatibility
// can be deprecated eventually
if (typeof declare == 'boolean')
return this.registerDynamic.apply(this, arguments);
doRegister(this, name, {
declarative: true,
deps: deps,
declare: declare
});
};
SystemJSLoader.prototype.registerDynamic = function(name, deps, declare, execute) {
if (typeof name != 'string') {
execute = declare;
declare = deps;
deps = name;
name = null;
}
// dynamic
doRegister(this, name, {
declarative: false,
deps: deps,
execute: execute,
executingRequire: declare
});
};
/*
* Registry side table - loader.defined
* Registry Entry Contains:
* - name
* - deps
* - declare for declarative modules
* - execute for dynamic modules, different to declarative execute on module
* - executingRequire indicates require drives execution for circularity of dynamic modules
* - declarative optional boolean indicating which of the above
*
* Can preload modules directly on System.defined['my/module'] = { deps, execute, executingRequire }
*
* Then the entry gets populated with derived information during processing:
* - normalizedDeps derived from deps, created in instantiate
* - groupIndex used by group linking algorithm
* - evaluated indicating whether evaluation has happend
* - module the module record object, containing:
* - exports actual module exports
*
* For dynamic we track the es module with:
* - esModule actual es module value
*
* Then for declarative only we track dynamic bindings with the 'module' records:
* - name
* - exports
* - setters declarative setter functions
* - dependencies, module records of dependencies
* - importers, module records of dependents
*
* After linked and evaluated, entries are removed, declarative module records remain in separate
* module binding table
*
*/
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
this.defined = {};
this._loader.moduleRecords = {};
};
});
// script injection mode calls this function synchronously on load
hook('onScriptLoad', function(onScriptLoad) {
return function(load) {
onScriptLoad.call(this, load);
if (calledRegister) {
// anonymous define
if (anonRegister)
load.metadata.entry = anonRegister;
load.metadata.format = load.metadata.format || 'defined';
load.metadata.registered = true;
calledRegister = false;
anonRegister = null;
}
};
});
function buildGroups(entry, loader, groups) {
groups[entry.groupIndex] = groups[entry.groupIndex] || [];
if (indexOf.call(groups[entry.groupIndex], entry) != -1)
return;
groups[entry.groupIndex].push(entry);
for (var i = 0, l = entry.normalizedDeps.length; i < l; i++) {
var depName = entry.normalizedDeps[i];
var depEntry = loader.defined[depName];
// not in the registry means already linked / ES6
if (!depEntry || depEntry.evaluated)
continue;
// now we know the entry is in our unlinked linkage group
var depGroupIndex = entry.groupIndex + (depEntry.declarative != entry.declarative);
// the group index of an entry is always the maximum
if (depEntry.groupIndex === undefined || depEntry.groupIndex < depGroupIndex) {
// if already in a group, remove from the old group
if (depEntry.groupIndex !== undefined) {
groups[depEntry.groupIndex].splice(indexOf.call(groups[depEntry.groupIndex], depEntry), 1);
// if the old group is empty, then we have a mixed depndency cycle
if (groups[depEntry.groupIndex].length == 0)
throw new TypeError("Mixed dependency cycle detected");
}
depEntry.groupIndex = depGroupIndex;
}
buildGroups(depEntry, loader, groups);
}
}
function link(name, loader) {
var startEntry = loader.defined[name];
// skip if already linked
if (startEntry.module)
return;
startEntry.groupIndex = 0;
var groups = [];
buildGroups(startEntry, loader, groups);
var curGroupDeclarative = !!startEntry.declarative == groups.length % 2;
for (var i = groups.length - 1; i >= 0; i--) {
var group = groups[i];
for (var j = 0; j < group.length; j++) {
var entry = group[j];
// link each group
if (curGroupDeclarative)
linkDeclarativeModule(entry, loader);
else
linkDynamicModule(entry, loader);
}
curGroupDeclarative = !curGroupDeclarative;
}
}
// module binding records
function Module() {}
defineProperty(Module, 'toString', {
value: function() {
return 'Module';
}
});
function getOrCreateModuleRecord(name, moduleRecords) {
return moduleRecords[name] || (moduleRecords[name] = {
name: name,
dependencies: [],
exports: new Module(), // start from an empty module and extend
importers: []
});
}
function linkDeclarativeModule(entry, loader) {
// only link if already not already started linking (stops at circular)
if (entry.module)
return;
var moduleRecords = loader._loader.moduleRecords;
var module = entry.module = getOrCreateModuleRecord(entry.name, moduleRecords);
var exports = entry.module.exports;
var declaration = entry.declare.call(__global, function(name, value) {
module.locked = true;
if (typeof name == 'object') {
for (var p in name)
exports[p] = name[p];
}
else {
exports[name] = value;
}
for (var i = 0, l = module.importers.length; i < l; i++) {
var importerModule = module.importers[i];
if (!importerModule.locked) {
var importerIndex = indexOf.call(importerModule.dependencies, module);
importerModule.setters[importerIndex](exports);
}
}
module.locked = false;
return value;
});
module.setters = declaration.setters;
module.execute = declaration.execute;
if (!module.setters || !module.execute) {
throw new TypeError('Invalid System.register form for ' + entry.name);
}
// now link all the module dependencies
for (var i = 0, l = entry.normalizedDeps.length; i < l; i++) {
var depName = entry.normalizedDeps[i];
var depEntry = loader.defined[depName];
var depModule = moduleRecords[depName];
// work out how to set depExports based on scenarios...
var depExports;
if (depModule) {
depExports = depModule.exports;
}
// dynamic, already linked in our registry
else if (depEntry && !depEntry.declarative) {
depExports = depEntry.esModule;
}
// in the loader registry
else if (!depEntry) {
depExports = loader.get(depName);
}
// we have an entry -> link
else {
linkDeclarativeModule(depEntry, loader);
depModule = depEntry.module;
depExports = depModule.exports;
}
// only declarative modules have dynamic bindings
if (depModule && depModule.importers) {
depModule.importers.push(module);
module.dependencies.push(depModule);
}
else {
module.dependencies.push(null);
}
// run setters for all entries with the matching dependency name
var originalIndices = entry.originalIndices[i];
for (var j = 0, len = originalIndices.length; j < len; ++j) {
var index = originalIndices[j];
if (module.setters[index]) {
module.setters[index](depExports);
}
}
}
}
// An analog to loader.get covering execution of all three layers (real declarative, simulated declarative, simulated dynamic)
function getModule(name, loader) {
var exports;
var entry = loader.defined[name];
if (!entry) {
exports = loader.get(name);
if (!exports)
throw new Error('Unable to load dependency ' + name + '.');
}
else {
if (entry.declarative)
ensureEvaluated(name, [], loader);
else if (!entry.evaluated)
linkDynamicModule(entry, loader);
exports = entry.module.exports;
}
if ((!entry || entry.declarative) && exports && exports.__useDefault)
return exports['default'];
return exports;
}
function linkDynamicModule(entry, loader) {
if (entry.module)
return;
var exports = {};
var module = entry.module = { exports: exports, id: entry.name };
// AMD requires execute the tree first
if (!entry.executingRequire) {
for (var i = 0, l = entry.normalizedDeps.length; i < l; i++) {
var depName = entry.normalizedDeps[i];
// we know we only need to link dynamic due to linking algorithm
var depEntry = loader.defined[depName];
if (depEntry)
linkDynamicModule(depEntry, loader);
}
}
// now execute
entry.evaluated = true;
var output = entry.execute.call(__global, function(name) {
for (var i = 0, l = entry.deps.length; i < l; i++) {
if (entry.deps[i] != name)
continue;
return getModule(entry.normalizedDeps[i], loader);
}
throw new TypeError('Module ' + name + ' not declared as a dependency.');
}, exports, module);
if (output)
module.exports = output;
// create the esModule object, which allows ES6 named imports of dynamics
exports = module.exports;
if (exports && exports.__esModule) {
entry.esModule = exports;
}
else {
entry.esModule = {};
// don't trigger getters/setters in environments that support them
if (typeof exports == 'object' || typeof exports == 'function') {
if (getOwnPropertyDescriptor) {
var d;
for (var p in exports)
if (d = Object.getOwnPropertyDescriptor(exports, p))
defineProperty(entry.esModule, p, d);
}
else {
var hasOwnProperty = exports && exports.hasOwnProperty;
for (var p in exports) {
if (!hasOwnProperty || exports.hasOwnProperty(p))
entry.esModule[p] = exports[p];
}
}
}
entry.esModule['default'] = exports;
defineProperty(entry.esModule, '__useDefault', {
value: true
});
}
}
/*
* Given a module, and the list of modules for this current branch,
* ensure that each of the dependencies of this module is evaluated
* (unless one is a circular dependency already in the list of seen
* modules, in which case we execute it)
*
* Then we evaluate the module itself depth-first left to right
* execution to match ES6 modules
*/
function ensureEvaluated(moduleName, seen, loader) {
var entry = loader.defined[moduleName];
// if already seen, that means it's an already-evaluated non circular dependency
if (!entry || entry.evaluated || !entry.declarative)
return;
// this only applies to declarative modules which late-execute
seen.push(moduleName);
for (var i = 0, l = entry.normalizedDeps.length; i < l; i++) {
var depName = entry.normalizedDeps[i];
if (indexOf.call(seen, depName) == -1) {
if (!loader.defined[depName])
loader.get(depName);
else
ensureEvaluated(depName, seen, loader);
}
}
if (entry.evaluated)
return;
entry.evaluated = true;
entry.module.execute.call(__global);
}
// override the delete method to also clear the register caches
hook('delete', function(del) {
return function(name) {
delete this._loader.moduleRecords[name];
delete this.defined[name];
return del.call(this, name);
};
});
var registerRegEx = /^\s*(\/\*[\s\S]*?\*\/\s*|\/\/[^\n]*\s*)*System\.register(Dynamic)?\s*\(/;
hook('fetch', function(fetch) {
return function(load) {
if (this.defined[load.name]) {
load.metadata.format = 'defined';
return '';
}
// this is the synchronous chain for onScriptLoad
anonRegister = null;
calledRegister = false;
if (load.metadata.format == 'register')
load.metadata.scriptLoad = true;
// NB remove when "deps " is deprecated
load.metadata.deps = load.metadata.deps || [];
return fetch.call(this, load);
};
});
hook('translate', function(translate) {
// we run the meta detection here (register is after meta)
return function(load) {
return Promise.resolve(translate.call(this, load)).then(function(source) {
if (typeof load.metadata.deps === 'string')
load.metadata.deps = load.metadata.deps.split(',');
load.metadata.deps = load.metadata.deps || [];
// run detection for register format
if (load.metadata.format == 'register' || !load.metadata.format && load.source.match(registerRegEx))
load.metadata.format = 'register';
return source;
});
};
});
hook('instantiate', function(instantiate) {
return function(load) {
var loader = this;
var entry;
// first we check if this module has already been defined in the registry
if (loader.defined[load.name]) {
entry = loader.defined[load.name];
entry.deps = entry.deps.concat(load.metadata.deps);
}
// picked up already by a script injection
else if (load.metadata.entry)
entry = load.metadata.entry;
// otherwise check if it is dynamic
else if (load.metadata.execute) {
entry = {
declarative: false,
deps: load.metadata.deps || [],
execute: load.metadata.execute,
executingRequire: load.metadata.executingRequire // NodeJS-style requires or not
};
}
// Contains System.register calls
else if (load.metadata.format == 'register' || load.metadata.format == 'esm' || load.metadata.format == 'es6') {
anonRegister = null;
calledRegister = false;
__exec.call(loader, load);
if (!calledRegister && !load.metadata.registered)
throw new TypeError(load.name + ' detected as System.register but didn\'t execute.');
if (anonRegister)
entry = anonRegister;
else
load.metadata.bundle = true;
if (!entry && loader.defined[load.name])
entry = loader.defined[load.name];
anonRegister = null;
calledRegister = false;
}
// named bundles are just an empty module
if (!entry)
entry = {
declarative: false,
deps: load.metadata.deps,
execute: function() {
return loader.newModule({});
}
};
// place this module onto defined for circular references
loader.defined[load.name] = entry;
var grouped = group(entry.deps);
entry.deps = grouped.names;
entry.originalIndices = grouped.indices;
entry.name = load.name;
// first, normalize all dependencies
var normalizePromises = [];
for (var i = 0, l = entry.deps.length; i < l; i++)
normalizePromises.push(Promise.resolve(loader.normalize(entry.deps[i], load.name)));
return Promise.all(normalizePromises).then(function(normalizedDeps) {
entry.normalizedDeps = normalizedDeps;
return {
deps: entry.deps,
execute: function() {
// recursively ensure that the module and all its
// dependencies are linked (with dependency group handling)
link(load.name, loader);
// now handle dependency execution in correct order
ensureEvaluated(load.name, [], loader);
// remove from the registry
loader.defined[load.name] = undefined;
// return the defined module object
return loader.newModule(entry.declarative ? entry.module.exports : entry.esModule);
}
};
});
};
});
})();
/*
* Extension to detect ES6 and auto-load Traceur or Babel for processing
*/
(function() {
// good enough ES6 module detection regex - format detections not designed to be accurate, but to handle the 99% use case
var esmRegEx = /(^\s*|[}\);\n]\s*)(import\s+(['"]|(\*\s+as\s+)?[^"'\(\)\n;]+\s+from\s+['"]|\{)|export\s+\*\s+from\s+["']|export\s+(\{|default|function|class|var|const|let|async\s+function))/;
var traceurRuntimeRegEx = /\$traceurRuntime\s*\./;
var babelHelpersRegEx = /babelHelpers\s*\./;
hook('translate', function(translate) {
return function(load) {
var loader = this;
return translate.call(loader, load)
.then(function(source) {
// detect & transpile ES6
if (load.metadata.format == 'esm' || load.metadata.format == 'es6' || !load.metadata.format && source.match(esmRegEx)) {
load.metadata.format = 'esm';
// setting _loadedTranspiler = false tells the next block to
// do checks for setting transpiler metadata
loader._loadedTranspiler = loader._loadedTranspiler || false;
if (loader.pluginLoader)
loader.pluginLoader._loadedTranspiler = loader._loadedTranspiler || false;
// defined in es6-module-loader/src/transpile.js
return transpile.call(loader, load)
.then(function(source) {
// clear sourceMap as transpiler embeds it
load.metadata.sourceMap = undefined;
return source;
});
}
// load the transpiler correctly
if (loader._loadedTranspiler === false && load.name == loader.normalizeSync(loader.transpiler)) {
// always load transpiler as a global
if (source.length > 100) {
load.metadata.format = load.metadata.format || 'global';
if (loader.transpiler === 'traceur')
load.metadata.exports = 'traceur';
if (loader.transpiler === 'typescript')
load.metadata.exports = 'ts';
}
loader._loadedTranspiler = true;
}
// load the transpiler runtime correctly
if (loader._loadedTranspilerRuntime === false) {
if (load.name == loader.normalizeSync('traceur-runtime')
|| load.name == loader.normalizeSync('babel/external-helpers*')) {
if (source.length > 100)
load.metadata.format = load.metadata.format || 'global';
loader._loadedTranspilerRuntime = true;
}
}
// detect transpiler runtime usage to load runtimes
if (load.metadata.format == 'register' && loader._loadedTranspilerRuntime !== true) {
if (!__global.$traceurRuntime && load.source.match(traceurRuntimeRegEx)) {
loader._loadedTranspilerRuntime = loader._loadedTranspilerRuntime || false;
return loader['import']('traceur-runtime').then(function() {
return source;
});
}
if (!__global.babelHelpers && load.source.match(babelHelpersRegEx)) {
loader._loadedTranspilerRuntime = loader._loadedTranspilerRuntime || false;
return loader['import']('babel/external-helpers').then(function() {
return source;
});
}
}
return source;
});
};
});
})();
/*
SystemJS Global Format
Supports
metadata.deps
metadata.globals
metadata.exports
Without metadata.exports, detects writes to the global object.
*/
var __globalName = typeof self != 'undefined' ? 'self' : 'global';
hook('onScriptLoad', function(onScriptLoad) {
return function(load) {
if (load.metadata.format == 'global') {
load.metadata.registered = true;
var globalValue = readMemberExpression(load.metadata.exports, __global);
load.metadata.execute = function() {
return globalValue;
}
}
return onScriptLoad.call(this, load);
};
});
hook('fetch', function(fetch) {
return function(load) {
if (load.metadata.exports)
load.metadata.format = 'global';
// A global with exports, no globals and no deps
// can be loaded via a script tag
if (load.metadata.format == 'global'
&& load.metadata.exports && !load.metadata.globals
&& (!load.metadata.deps || load.metadata.deps.length == 0))
load.metadata.scriptLoad = true;
return fetch.call(this, load);
};
});
// ideally we could support script loading for globals, but the issue with that is that
// we can't do it with AMD support side-by-side since AMD support means defining the
// global define, and global support means not definining it, yet we don't have any hook
// into the "pre-execution" phase of a script tag being loaded to handle both cases
hook('instantiate', function(instantiate) {
return function(load) {
var loader = this;
if (!load.metadata.format)
load.metadata.format = 'global';
// globals shorthand support for:
// globals = ['Buffer'] where we just require 'Buffer' in the current context
if (load.metadata.globals) {
if (load.metadata.globals instanceof Array) {
var globals = {};
for (var i = 0; i < load.metadata.globals.length; i++)
globals[load.metadata.globals[i]] = load.metadata.globals[i];
load.metadata.globals = globals;
}
}
// global is a fallback module format
if (load.metadata.format == 'global' && !load.metadata.registered) {
for (var g in load.metadata.globals)
load.metadata.deps.push(load.metadata.globals[g]);
load.metadata.execute = function(require, exports, module) {
var globals;
if (load.metadata.globals) {
globals = {};
for (var g in load.metadata.globals)
globals[g] = require(load.metadata.globals[g]);
}
var exportName = load.metadata.exports;
var retrieveGlobal = loader.get('@@global-helpers').prepareGlobal(module.id, exportName, globals);
if (exportName)
load.source += '\n' + __globalName + '["' + exportName + '"] = ' + exportName + ';';
// disable module detection
var define = __global.define;
var cRequire = __global.require;
__global.define = undefined;
__global.module = undefined;
__global.exports = undefined;
__exec.call(loader, load);
__global.require = cRequire;
__global.define = define;
return retrieveGlobal();
}
}
return instantiate.call(this, load);
};
});
hookConstructor(function(constructor) {
return function() {
var loader = this;
constructor.call(loader);
var hasOwnProperty = Object.prototype.hasOwnProperty;
// bare minimum ignores for IE8
var ignoredGlobalProps = ['_g', 'sessionStorage', 'localStorage', 'clipboardData', 'frames', 'external', 'mozAnimationStartTime', 'webkitStorageInfo', 'webkitIndexDB'];
var globalSnapshot;
function forEachGlobal(callback) {
if (Object.keys)
Object.keys(__global).forEach(callback);
else
for (var g in __global) {
if (!hasOwnProperty.call(__global, g))
continue;
callback(g);
}
}
function forEachGlobalValue(callback) {
forEachGlobal(function(globalName) {
if (indexOf.call(ignoredGlobalProps, globalName) != -1)
return;
try {
var value = __global[globalName];
}
catch (e) {
ignoredGlobalProps.push(globalName);
}
callback(globalName, value);
});
}
loader.set('@@global-helpers', loader.newModule({
prepareGlobal: function(moduleName, exportName, globals) {
// set globals
var oldGlobals;
if (globals) {
oldGlobals = {};
for (var g in globals) {
oldGlobals[g] = globals[g];
__global[g] = globals[g];
}
}
// store a complete copy of the global object in order to detect changes
if (!exportName) {
globalSnapshot = {};
forEachGlobalValue(function(name, value) {
globalSnapshot[name] = value;
});
}
// return function to retrieve global
return function() {
var globalValue;
if (exportName) {
globalValue = readMemberExpression(exportName, __global);
}
else {
var singleGlobal;
var multipleExports;
var exports = {};
forEachGlobalValue(function(name, value) {
if (globalSnapshot[name] === value)
return;
if (typeof value == 'undefined')
return;
exports[name] = value;
if (typeof singleGlobal != 'undefined') {
if (!multipleExports && singleGlobal !== value)
multipleExports = true;
}
else {
singleGlobal = value;
}
});
globalValue = multipleExports ? exports : singleGlobal;
}
// revert globals
if (oldGlobals) {
for (var g in oldGlobals)
__global[g] = oldGlobals[g];
}
return globalValue;
};
}
}));
};
});/*
SystemJS CommonJS Format
*/
(function() {
// CJS Module Format
// require('...') || exports[''] = ... || exports.asd = ... || module.exports = ...
var cjsExportsRegEx = /(?:^\uFEFF?|[^$_a-zA-Z\xA0-\uFFFF.]|module\.)exports\s*(\[['"]|\.)|(?:^\uFEFF?|[^$_a-zA-Z\xA0-\uFFFF.])module\.exports\s*[=,]/;
// RegEx adjusted from https://github.com/jbrantly/yabble/blob/master/lib/yabble.js#L339
var cjsRequireRegEx = /(?:^\uFEFF?|[^$_a-zA-Z\xA0-\uFFFF."'])require\s*\(\s*("[^"\\]*(?:\\.[^"\\]*)*"|'[^'\\]*(?:\\.[^'\\]*)*')\s*\)/g;
var commentRegEx = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg;
function getCJSDeps(source) {
cjsRequireRegEx.lastIndex = 0;
var deps = [];
// remove comments from the source first, if not minified
if (source.length / source.split('\n').length < 200)
source = source.replace(commentRegEx, '');
var match;
while (match = cjsRequireRegEx.exec(source))
deps.push(match[1].substr(1, match[1].length - 2));
return deps;
}
if (typeof window != 'undefined' && typeof document != 'undefined' && window.location)
var windowOrigin = location.protocol + '//' + location.hostname + (location.port ? ':' + location.port : '');
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
// include the node require since we're overriding it
if (typeof require != 'undefined' && require.resolve && typeof process != 'undefined')
this._nodeRequire = require;
};
});
hook('instantiate', function(instantiate) {
return function(load) {
var loader = this;
if (!load.metadata.format) {
cjsExportsRegEx.lastIndex = 0;
cjsRequireRegEx.lastIndex = 0;
if (cjsRequireRegEx.exec(load.source) || cjsExportsRegEx.exec(load.source))
load.metadata.format = 'cjs';
}
if (load.metadata.format == 'cjs') {
var metaDeps = load.metadata.deps || [];
load.metadata.deps = metaDeps.concat(getCJSDeps(load.source));
for (var g in load.metadata.globals)
load.metadata.deps.push(load.metadata.globals[g]);
load.metadata.executingRequire = true;
load.metadata.execute = function(require, exports, module) {
// ensure meta deps execute first
for (var i = 0; i < metaDeps.length; i++)
require(metaDeps[i]);
var address = load.address || '';
var dirname = address.split('/');
dirname.pop();
dirname = dirname.join('/');
if (windowOrigin && address.substr(0, windowOrigin.length) === windowOrigin) {
address = address.substr(windowOrigin.length);
dirname = dirname.substr(windowOrigin.length);
}
else if (address.substr(0, 8) == 'file:///') {
address = address.substr(7);
dirname = dirname.substr(7);
// on windows remove leading '/'
if (isWindows) {
address = address.substr(1);
dirname = dirname.substr(1);
}
}
// disable AMD detection
var define = __global.define;
__global.define = undefined;
__global.__cjsWrapper = {
exports: exports,
args: [require, exports, module, address, dirname, __global]
};
var globals = '';
if (load.metadata.globals) {
for (var g in load.metadata.globals)
globals += 'var ' + g + ' = require("' + load.metadata.globals[g] + '");';
}
load.source = "(function(require, exports, module, __filename, __dirname, global) {" + globals
+ load.source + "\n}).apply(__cjsWrapper.exports, __cjsWrapper.args);";
__exec.call(loader, load);
__global.__cjsWrapper = undefined;
__global.define = define;
};
}
return instantiate.call(loader, load);
};
});
})();
/*
* AMD Helper function module
* Separated into its own file as this is the part needed for full AMD support in SFX builds
*
*/
hookConstructor(function(constructor) {
return function() {
var loader = this;
constructor.call(this);
var commentRegEx = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg;
var cjsRequirePre = "(?:^|[^$_a-zA-Z\\xA0-\\uFFFF.])";
var cjsRequirePost = "\\s*\\(\\s*(\"([^\"]+)\"|'([^']+)')\\s*\\)";
var fnBracketRegEx = /\(([^\)]*)\)/;
var wsRegEx = /^\s+|\s+$/g;
var requireRegExs = {};
function getCJSDeps(source, requireIndex) {
// remove comments
source = source.replace(commentRegEx, '');
// determine the require alias
var params = source.match(fnBracketRegEx);
var requireAlias = (params[1].split(',')[requireIndex] || 'require').replace(wsRegEx, '');
// find or generate the regex for this requireAlias
var requireRegEx = requireRegExs[requireAlias] || (requireRegExs[requireAlias] = new RegExp(cjsRequirePre + requireAlias + cjsRequirePost, 'g'));
requireRegEx.lastIndex = 0;
var deps = [];
var match;
while (match = requireRegEx.exec(source))
deps.push(match[2] || match[3]);
return deps;
}
/*
AMD-compatible require
To copy RequireJS, set window.require = window.requirejs = loader.amdRequire
*/
function require(names, callback, errback, referer) {
// in amd, first arg can be a config object... we just ignore
if (typeof names == 'object' && !(names instanceof Array))
return require.apply(null, Array.prototype.splice.call(arguments, 1, arguments.length - 1));
// amd require
if (typeof names == 'string' && typeof callback == 'function')
names = [names];
if (names instanceof Array) {
var dynamicRequires = [];
for (var i = 0; i < names.length; i++)
dynamicRequires.push(loader['import'](names[i], referer));
Promise.all(dynamicRequires).then(function(modules) {
if (callback)
callback.apply(null, modules);
}, errback);
}
// commonjs require
else if (typeof names == 'string') {
var module = loader.get(loader.normalizeSync(names, referer));
if (!module)
throw new Error('Module not already loaded loading "' + names + '" from "' + referer + '".');
return module.__useDefault ? module['default'] : module;
}
else
throw new TypeError('Invalid require');
}
function define(name, deps, factory) {
if (typeof name != 'string') {
factory = deps;
deps = name;
name = null;
}
if (!(deps instanceof Array)) {
factory = deps;
deps = ['require', 'exports', 'module'].splice(0, factory.length);
}
if (typeof factory != 'function')
factory = (function(factory) {
return function() { return factory; }
})(factory);
// in IE8, a trailing comma becomes a trailing undefined entry
if (deps[deps.length - 1] === undefined)
deps.pop();
// remove system dependencies
var requireIndex, exportsIndex, moduleIndex;
if ((requireIndex = indexOf.call(deps, 'require')) != -1) {
deps.splice(requireIndex, 1);
// only trace cjs requires for non-named
// named defines assume the trace has already been done
if (!name)
deps = deps.concat(getCJSDeps(factory.toString(), requireIndex));
}
if ((exportsIndex = indexOf.call(deps, 'exports')) != -1)
deps.splice(exportsIndex, 1);
if ((moduleIndex = indexOf.call(deps, 'module')) != -1)
deps.splice(moduleIndex, 1);
var define = {
name: name,
deps: deps,
execute: function(req, exports, module) {
var depValues = [];
for (var i = 0; i < deps.length; i++)
depValues.push(req(deps[i]));
module.uri = module.id;
module.config = function() {};
// add back in system dependencies
if (moduleIndex != -1)
depValues.splice(moduleIndex, 0, module);
if (exportsIndex != -1)
depValues.splice(exportsIndex, 0, exports);
if (requireIndex != -1) {
function contextualRequire(names, callback, errback) {
if (typeof names == 'string' && typeof callback != 'function')
return req(names);
return require.call(loader, names, callback, errback, module.id);
}
contextualRequire.toUrl = function(name) {
// normalize without defaultJSExtensions
var defaultJSExtension = loader.defaultJSExtensions && name.substr(name.length - 3, 3) != '.js';
var url = loader.normalizeSync(name, module.id);
if (defaultJSExtension && url.substr(url.length - 3, 3) == '.js')
url = url.substr(0, url.length - 3);
return url;
};
depValues.splice(requireIndex, 0, contextualRequire);
}
// set global require to AMD require
var curRequire = __global.require;
__global.require = require;
var output = factory.apply(exportsIndex == -1 ? __global : exports, depValues);
__global.require = curRequire;
if (typeof output == 'undefined' && module)
output = module.exports;
if (typeof output != 'undefined')
return output;
}
};
// anonymous define
if (!name) {
// already defined anonymously -> throw
if (lastModule.anonDefine)
throw new TypeError('Multiple defines for anonymous module');
lastModule.anonDefine = define;
}
// named define
else {
// if it has no dependencies and we don't have any other
// defines, then let this be an anonymous define
// this is just to support single modules of the form:
// define('jquery')
// still loading anonymously
// because it is done widely enough to be useful
if (deps.length == 0 && !lastModule.anonDefine && !lastModule.isBundle) {
lastModule.anonDefine = define;
}
// otherwise its a bundle only
else {
// if there is an anonDefine already (we thought it could have had a single named define)
// then we define it now
// this is to avoid defining named defines when they are actually anonymous
if (lastModule.anonDefine && lastModule.anonDefine.name)
loader.registerDynamic(lastModule.anonDefine.name, lastModule.anonDefine.deps, false, lastModule.anonDefine.execute);
lastModule.anonDefine = null;
}
// note this is now a bundle
lastModule.isBundle = true;
// define the module through the register registry
loader.registerDynamic(name, define.deps, false, define.execute);
}
}
define.amd = {};
// adds define as a global (potentially just temporarily)
function createDefine(loader) {
lastModule.anonDefine = null;
lastModule.isBundle = false;
// ensure no NodeJS environment detection
var oldModule = __global.module;
var oldExports = __global.exports;
var oldDefine = __global.define;
__global.module = undefined;
__global.exports = undefined;
__global.define = define;
return function() {
__global.define = oldDefine;
__global.module = oldModule;
__global.exports = oldExports;
};
}
var lastModule = {
isBundle: false,
anonDefine: null
};
loader.set('@@amd-helpers', loader.newModule({
createDefine: createDefine,
require: require,
define: define,
lastModule: lastModule
}));
loader.amdDefine = define;
loader.amdRequire = require;
};
});/*
SystemJS AMD Format
Provides the AMD module format definition at System.format.amd
as well as a RequireJS-style require on System.require
*/
(function() {
// AMD Module Format Detection RegEx
// define([.., .., ..], ...)
// define(varName); || define(function(require, exports) {}); || define({})
var amdRegEx = /(?:^\uFEFF?|[^$_a-zA-Z\xA0-\uFFFF.])define\s*\(\s*("[^"]+"\s*,\s*|'[^']+'\s*,\s*)?\s*(\[(\s*(("[^"]+"|'[^']+')\s*,|\/\/.*\r?\n|\/\*(.|\s)*?\*\/))*(\s*("[^"]+"|'[^']+')\s*,?)?(\s*(\/\/.*\r?\n|\/\*(.|\s)*?\*\/))*\s*\]|function\s*|{|[_$a-zA-Z\xA0-\uFFFF][_$a-zA-Z0-9\xA0-\uFFFF]*\))/;
// script injection mode calls this function synchronously on load
hook('onScriptLoad', function(onScriptLoad) {
return function(load) {
onScriptLoad.call(this, load);
var lastModule = this.get('@@amd-helpers').lastModule;
if (lastModule.anonDefine || lastModule.isBundle) {
load.metadata.format = 'defined';
load.metadata.registered = true;
lastModule.isBundle = false;
}
if (lastModule.anonDefine) {
load.metadata.deps = load.metadata.deps ? load.metadata.deps.concat(lastModule.anonDefine.deps) : lastModule.anonDefine.deps;
load.metadata.execute = lastModule.anonDefine.execute;
lastModule.anonDefine = null;
}
};
});
hook('fetch', function(fetch) {
return function(load) {
if (load.metadata.format === 'amd')
load.metadata.scriptLoad = true;
if (load.metadata.scriptLoad)
this.get('@@amd-helpers').createDefine(this);
return fetch.call(this, load);
};
});
hook('instantiate', function(instantiate) {
return function(load) {
var loader = this;
if (load.metadata.format == 'amd' || !load.metadata.format && load.source.match(amdRegEx)) {
load.metadata.format = 'amd';
if (!loader.builder || loader.execute === false) {
var removeDefine = this.get('@@amd-helpers').createDefine(loader);
__exec.call(loader, load);
removeDefine(loader);
var lastModule = this.get('@@amd-helpers').lastModule;
if (!lastModule.anonDefine && !lastModule.isBundle)
throw new TypeError('AMD module ' + load.name + ' did not define');
if (lastModule.anonDefine) {
load.metadata.deps = load.metadata.deps ? load.metadata.deps.concat(lastModule.anonDefine.deps) : lastModule.anonDefine.deps;
load.metadata.execute = lastModule.anonDefine.execute;
}
lastModule.isBundle = false;
lastModule.anonDefine = null;
}
else {
load.metadata.execute = function() {
return load.metadata.builderExecute();
};
}
return instantiate.call(loader, load);
}
return instantiate.call(loader, load);
};
});
})();
/*
SystemJS map support
Provides map configuration through
System.map['jquery'] = 'some/module/map'
Note that this applies for subpaths, just like RequireJS:
jquery -> 'some/module/map'
jquery/path -> 'some/module/map/path'
bootstrap -> 'bootstrap'
The most specific map is always taken, as longest path length
*/
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
this.map = {};
};
});
hook('normalize', function(normalize) {
return function(name, parentName, parentAddress) {
if (name.substr(0, 1) != '.' && name.substr(0, 1) != '/' && !name.match(absURLRegEx)) {
var bestMatch, bestMatchLength = 0;
// now do the global map
for (var p in this.map) {
if (name.substr(0, p.length) == p && (name.length == p.length || name[p.length] == '/')) {
var curMatchLength = p.split('/').length;
if (curMatchLength <= bestMatchLength)
continue;
bestMatch = p;
bestMatchLength = curMatchLength;
}
}
if (bestMatch)
name = this.map[bestMatch] + name.substr(bestMatch.length);
}
return normalize.call(this, name, parentName, parentAddress);
};
});
/*
* Paths extension
*
* Applies paths and normalizes to a full URL
*/
hook('normalize', function(normalize) {
return function(name, parentName) {
var normalized = normalize.call(this, name, parentName);
// if the module is in the registry already, use that
if (this.has(normalized))
return normalized;
if (normalized.match(absURLRegEx)) {
// defaultJSExtensions backwards compatibility
if (this.defaultJSExtensions && normalized.substr(normalized.length - 3, 3) != '.js')
normalized += '.js';
return normalized;
}
// applyPaths implementation provided from ModuleLoader system.js source
normalized = applyPaths(this.paths, normalized) || normalized;
// defaultJSExtensions backwards compatibility
if (this.defaultJSExtensions && normalized.substr(normalized.length - 3, 3) != '.js')
normalized += '.js';
// ./x, /x -> page-relative
if (normalized[0] == '.' || normalized[0] == '/')
return new URL(normalized, baseURIObj).href;
// x -> baseURL-relative
else
return new URL(normalized, getBaseURLObj.call(this)).href;
};
});/*
* Package Configuration Extension
*
* Example:
*
* System.packages = {
* jquery: {
* main: 'index.js', // when not set, package name is requested directly
* format: 'amd',
* defaultExtension: 'js',
* meta: {
* '*.ts': {
* loader: 'typescript'
* },
* 'vendor/sizzle.js': {
* format: 'global'
* }
* },
* map: {
* // map internal require('sizzle') to local require('./vendor/sizzle')
* sizzle: './vendor/sizzle.js',
* // map any internal or external require of 'jquery/vendor/another' to 'another/index.js'
* './vendor/another.js': './another/index.js',
* // test.js / test -> lib/test.js
* './test.js': './lib/test.js',
* },
* env: {
* 'browser': {
* main: 'browser.js'
* }
* }
* }
* };
*
* Then:
* import 'jquery' -> jquery/index.js
* import 'jquery/submodule' -> jquery/submodule.js
* import 'jquery/submodule.ts' -> jquery/submodule.ts loaded as typescript
* import 'jquery/vendor/another' -> another/index.js
*
* Detailed Behaviours
* - main is the only property where a leading "./" can be added optionally
* - map and defaultExtension are applied to the main
* - defaultExtension adds the extension only if no other extension is present
* - defaultJSExtensions applies after map when defaultExtension is not set
* - if a meta value is available for a module, map and defaultExtension are skipped
* - like global map, package map also applies to subpaths (sizzle/x, ./vendor/another/sub)
*
* In addition, the following meta properties will be allowed to be package
* -relative as well in the package meta config:
*
* - loader
* - alias
*
*/
(function() {
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
this.packages = {};
};
});
function getPackage(name) {
for (var p in this.packages) {
if (name.substr(0, p.length) === p && (name.length === p.length || name[p.length] === '/'))
return p;
}
}
function getPackageConfig(loader, pkgName) {
var pkgConfig = loader.packages[pkgName];
if (!pkgConfig.env)
return Promise.resolve(pkgConfig);
// check environment conditions
// default environment condition is '@env' in package or '@system-env' globally
return loader['import'](pkgConfig.map['@env'] || '@system-env', pkgName)
.then(function(env) {
// derived config object
var pkg = {};
for (var p in pkgConfig)
if (p !== 'map' & p !== 'env')
pkg[p] = pkgConfig[p];
pkg.map = {};
for (var p in pkgConfig.map)
pkg.map[p] = pkgConfig.map[p];
for (var e in pkgConfig.env) {
if (env[e]) {
var envConfig = pkgConfig.env[e];
if (envConfig.main)
pkg.main = envConfig.main;
for (var m in envConfig.map)
pkg.map[m] = envConfig.map[m];
}
}
// store the derived environment config so we have this cached for next time
loader.packages[pkgName] = pkg;
return pkg;
});
}
function applyMap(map, name) {
var bestMatch, bestMatchLength = 0;
for (var p in map) {
if (name.substr(0, p.length) == p && (name.length == p.length || name[p.length] == '/')) {
var curMatchLength = p.split('/').length;
if (curMatchLength <= bestMatchLength)
continue;
bestMatch = p;
bestMatchLength = curMatchLength;
}
}
if (bestMatch)
return map[bestMatch] + name.substr(bestMatch.length);
}
SystemJSLoader.prototype.normalizeSync = SystemJSLoader.prototype.normalize;
hook('normalize', function(normalize) {
return function(name, parentName) {
// apply contextual package map first
if (parentName) {
var parentPackage = getPackage.call(this, parentName) ||
this.defaultJSExtensions && parentName.substr(parentName.length - 3, 3) == '.js' &&
getPackage.call(this, parentName.substr(0, parentName.length - 3));
}
if (parentPackage && name[0] !== '.') {
var parentMap = this.packages[parentPackage].map;
if (parentMap) {
name = applyMap(parentMap, name) || name;
// relative maps are package-relative
if (name[0] === '.')
parentName = parentPackage + '/';
}
}
var defaultJSExtension = this.defaultJSExtensions && name.substr(name.length - 3, 3) != '.js';
// apply global map, relative normalization
var normalized = normalize.call(this, name, parentName);
// undo defaultJSExtension
if (normalized.substr(normalized.length - 3, 3) != '.js')
defaultJSExtension = false;
if (defaultJSExtension)
normalized = normalized.substr(0, normalized.length - 3);
// check if we are inside a package
var pkgName = getPackage.call(this, normalized);
var loader = this;
if (pkgName) {
return getPackageConfig(this, pkgName)
.then(function(pkg) {
// main
if (pkgName === normalized && pkg.main)
normalized += '/' + (pkg.main.substr(0, 2) == './' ? pkg.main.substr(2) : pkg.main);
if (normalized.substr(pkgName.length) == '/')
return normalized;
// defaultExtension & defaultJSExtension
// if we have meta for this package, don't do defaultExtensions
var defaultExtension = '';
if (!pkg.meta || !pkg.meta[normalized.substr(pkgName.length + 1)]) {
// apply defaultExtension
if ('defaultExtension' in pkg) {
if (pkg.defaultExtension !== false && normalized.split('/').pop().lastIndexOf('.') == -1)
defaultExtension = '.' + pkg.defaultExtension;
}
// apply defaultJSExtensions if defaultExtension not set
else if (defaultJSExtension) {
defaultExtension = '.js';
}
}
// apply submap checking without then with defaultExtension
var subPath = '.' + normalized.substr(pkgName.length);
var mapped = applyMap(pkg.map, subPath) || defaultExtension && applyMap(pkg.map, subPath + defaultExtension);
if (mapped)
normalized = mapped.substr(0, 2) == './' ? pkgName + mapped.substr(1) : normalize.call(loader, mapped);
else
normalized += defaultExtension;
return normalized;
});
}
// add back defaultJSExtension if not a package
if (defaultJSExtension)
normalized += '.js';
return normalized;
};
});
hook('locate', function(locate) {
return function(load) {
var loader = this;
return Promise.resolve(locate.call(this, load))
.then(function(address) {
var pkgName = getPackage.call(loader, load.name);
if (pkgName) {
var pkg = loader.packages[pkgName];
// format
if (pkg.format)
load.metadata.format = load.metadata.format || pkg.format;
// loader
if (pkg.loader)
load.metadata.loader = load.metadata.loader || pkg.loader;
if (pkg.meta) {
// wildcard meta
var meta = {};
var bestDepth = 0;
var wildcardIndex;
for (var module in pkg.meta) {
wildcardIndex = module.indexOf('*');
if (wildcardIndex === -1)
continue;
if (module.substr(0, wildcardIndex) === load.name.substr(0, wildcardIndex)
&& module.substr(wildcardIndex + 1) === load.name.substr(load.name.length - module.length + wildcardIndex + 1)) {
var depth = module.split('/').length;
if (depth > bestDepth)
bestDetph = depth;
extendMeta(meta, pkg.meta[module], bestDepth != depth);
}
}
// exact meta
var exactMeta = pkg.meta[load.name.substr(pkgName.length + 1)];
if (exactMeta)
extendMeta(meta, exactMeta);
// allow alias and loader to be package-relative
if (meta.alias && meta.alias.substr(0, 2) == './')
meta.alias = pkgName + meta.alias.substr(1);
if (meta.loader && meta.loader.substr(0, 2) == './')
meta.loader = pkgName + meta.loader.substr(1);
extendMeta(load.metadata, meta);
}
}
return address;
});
};
});
})();/*
SystemJS Loader Plugin Support
Supports plugin loader syntax with "!", or via metadata.loader
The plugin name is loaded as a module itself, and can override standard loader hooks
for the plugin resource. See the plugin section of the systemjs readme.
*/
(function() {
// sync or async plugin normalize function
function normalizePlugin(normalize, name, parentName, sync) {
var loader = this;
// if parent is a plugin, normalize against the parent plugin argument only
var parentPluginIndex;
if (parentName && (parentPluginIndex = parentName.indexOf('!')) != -1)
parentName = parentName.substr(0, parentPluginIndex);
// if this is a plugin, normalize the plugin name and the argument
var pluginIndex = name.lastIndexOf('!');
if (pluginIndex != -1) {
var argumentName = name.substr(0, pluginIndex);
var pluginName = name.substr(pluginIndex + 1) || argumentName.substr(argumentName.lastIndexOf('.') + 1);
// note if normalize will add a default js extension
// if so, remove for backwards compat
// this is strange and sucks, but will be deprecated
var defaultExtension = loader.defaultJSExtensions && argumentName.substr(argumentName.length - 3, 3) != '.js';
if (sync) {
argumentName = loader.normalizeSync(argumentName, parentName);
pluginName = loader.normalizeSync(pluginName, parentName);
if (defaultExtension && argumentName.substr(argumentName.length - 3, 3) == '.js')
argumentName = argumentName.substr(0, argumentName.length - 3);
return argumentName + '!' + pluginName;
}
else {
return Promise.all([
loader.normalize(argumentName, parentName),
loader.normalize(pluginName, parentName)
])
.then(function(normalized) {
argumentName = normalized[0];
if (defaultExtension && argumentName.substr(argumentName.length - 3, 3) == '.js')
argumentName = argumentName.substr(0, argumentName.length - 3);
return argumentName + '!' + normalized[1];
});
}
}
else {
return normalize.call(loader, name, parentName);
}
}
// async plugin normalize
hook('normalize', function(normalize) {
return function(name, parentName) {
return normalizePlugin.call(this, normalize, name, parentName, false);
};
});
hook('normalizeSync', function(normalizeSync) {
return function(name, parentName) {
return normalizePlugin.call(this, normalizeSync, name, parentName, true);
};
});
hook('locate', function(locate) {
return function(load) {
var loader = this;
var name = load.name;
// plugin syntax
var pluginSyntaxIndex = name.lastIndexOf('!');
if (pluginSyntaxIndex != -1) {
load.metadata.loader = name.substr(pluginSyntaxIndex + 1);
load.name = name.substr(0, pluginSyntaxIndex);
}
return locate.call(loader, load)
.then(function(address) {
var plugin = load.metadata.loader;
if (!plugin)
return address;
// only fetch the plugin itself if this name isn't defined
if (loader.defined && loader.defined[name])
return address;
var pluginLoader = loader.pluginLoader || loader;
// load the plugin module and run standard locate
return pluginLoader['import'](plugin)
.then(function(loaderModule) {
// store the plugin module itself on the metadata
load.metadata.loaderModule = loaderModule;
load.metadata.loaderArgument = name;
load.address = address;
if (loaderModule.locate)
return loaderModule.locate.call(loader, load);
return address;
});
});
};
});
hook('fetch', function(fetch) {
return function(load) {
var loader = this;
if (load.metadata.loaderModule && load.metadata.loaderModule.fetch) {
load.metadata.scriptLoad = false;
return load.metadata.loaderModule.fetch.call(loader, load, function(load) {
return fetch.call(loader, load);
});
}
else {
return fetch.call(loader, load);
}
};
});
hook('translate', function(translate) {
return function(load) {
var loader = this;
if (load.metadata.loaderModule && load.metadata.loaderModule.translate)
return Promise.resolve(load.metadata.loaderModule.translate.call(loader, load)).then(function(result) {
if (typeof result == 'string')
load.source = result;
return translate.call(loader, load);
});
else
return translate.call(loader, load);
};
});
hook('instantiate', function(instantiate) {
return function(load) {
var loader = this;
/*
* Source map sanitization for load.metadata.sourceMap
* Used to set browser and build-level source maps for
* translated sources in a general way.
*/
var sourceMap = load.metadata.sourceMap;
// if an object not a JSON string do sanitizing
if (sourceMap && typeof sourceMap == 'object') {
var originalName = load.name.split('!')[0];
// force set the filename of the original file
sourceMap.file = originalName + '!transpiled';
// force set the sources list if only one source
if (!sourceMap.sources || sourceMap.sources.length == 1)
sourceMap.sources = [originalName];
load.metadata.sourceMap = JSON.stringify(sourceMap);
}
if (load.metadata.loaderModule && load.metadata.loaderModule.instantiate)
return Promise.resolve(load.metadata.loaderModule.instantiate.call(loader, load)).then(function(result) {
load.metadata.format = 'defined';
load.metadata.execute = function() {
return result;
};
return instantiate.call(loader, load);
});
else
return instantiate.call(loader, load);
};
});
})();
/*
* Alias Extension
*
* Allows a module to be a plain copy of another module by module name
*
* System.meta['mybootstrapalias'] = { alias: 'bootstrap' };
*
*/
(function() {
// aliases
hook('fetch', function(fetch) {
return function(load) {
var alias = load.metadata.alias;
var aliasDeps = load.metadata.deps || [];
if (alias) {
load.metadata.format = 'defined';
this.defined[load.name] = {
declarative: true,
deps: aliasDeps.concat([alias]),
declare: function(_export) {
return {
setters: [function(module) {
for (var p in module)
_export(p, module[p]);
}],
execute: function() {}
};
}
};
return '';
}
return fetch.call(this, load);
};
});
})();/*
* Meta Extension
*
* Sets default metadata on a load record (load.metadata) from
* loader.metadata via System.meta function.
*
*
* Also provides an inline meta syntax for module meta in source.
*
* Eg:
*
* loader.meta({
* 'my/module': { deps: ['jquery'] }
* 'my/*': { format: 'amd' }
* });
*
* Which in turn populates loader.metadata.
*
* load.metadata.deps and load.metadata.format will then be set
* for 'my/module'
*
* The same meta could be set with a my/module.js file containing:
*
* my/module.js
* "format amd";
* "deps[] jquery";
* "globals.some value"
* console.log('this is my/module');
*
* Configuration meta always takes preference to inline meta.
*
* Multiple matches in wildcards are supported and ammend the meta.
*
*
* The benefits of the function form is that paths are URL-normalized
* supporting say
*
* loader.meta({ './app': { format: 'cjs' } });
*
* Instead of needing to set against the absolute URL (https://site.com/app.js)
*
*/
(function() {
hookConstructor(function(constructor) {
return function() {
this.meta = {};
constructor.call(this);
};
});
hook('locate', function(locate) {
return function(load) {
var meta = this.meta;
var name = load.name;
// NB for perf, maybe introduce a fast-path wildcard lookup cache here
// which is checked first
// apply wildcard metas
var bestDepth = 0;
var wildcardIndex;
for (var module in meta) {
wildcardIndex = indexOf.call(module, '*');
if (wildcardIndex === -1)
continue;
if (module.substr(0, wildcardIndex) === name.substr(0, wildcardIndex)
&& module.substr(wildcardIndex + 1) === name.substr(name.length - module.length + wildcardIndex + 1)) {
var depth = module.split('/').length;
if (depth > bestDepth)
bestDetph = depth;
extendMeta(load.metadata, meta[module], bestDepth != depth);
}
}
// apply exact meta
if (meta[name])
extendMeta(load.metadata, meta[name]);
return locate.call(this, load);
};
});
// detect any meta header syntax
// only set if not already set
var metaRegEx = /^(\s*\/\*[\s\S]*?\*\/|\s*\/\/[^\n]*|\s*"[^"]+"\s*;?|\s*'[^']+'\s*;?)+/;
var metaPartRegEx = /\/\*[\s\S]*?\*\/|\/\/[^\n]*|"[^"]+"\s*;?|'[^']+'\s*;?/g;
function setMetaProperty(target, p, value) {
var pParts = p.split('.');
var curPart;
while (pParts.length > 1) {
curPart = pParts.shift();
target = target[curPart] = target[curPart] || {};
}
curPart = pParts.shift();
if (!(curPart in target))
target[curPart] = value;
}
hook('translate', function(translate) {
return function(load) {
// NB meta will be post-translate pending transpiler conversion to plugins
var meta = load.source.match(metaRegEx);
if (meta) {
var metaParts = meta[0].match(metaPartRegEx);
for (var i = 0; i < metaParts.length; i++) {
var curPart = metaParts[i];
var len = curPart.length;
var firstChar = curPart.substr(0, 1);
if (curPart.substr(len - 1, 1) == ';')
len--;
if (firstChar != '"' && firstChar != "'")
continue;
var metaString = curPart.substr(1, curPart.length - 3);
var metaName = metaString.substr(0, metaString.indexOf(' '));
if (metaName) {
var metaValue = metaString.substr(metaName.length + 1, metaString.length - metaName.length - 1);
if (metaName.substr(metaName.length - 2, 2) == '[]') {
metaName = metaName.substr(0, metaName.length - 2);
load.metadata[metaName] = load.metadata[metaName] || [];
}
// temporary backwards compat for previous "deps" syntax
if (load.metadata[metaName] instanceof Array)
load.metadata[metaName].push(metaValue);
else
setMetaProperty(load.metadata, metaName, metaValue);
}
}
}
return translate.call(this, load);
};
});
})();/*
System bundles
Allows a bundle module to be specified which will be dynamically
loaded before trying to load a given module.
For example:
System.bundles['mybundle'] = ['jquery', 'bootstrap/js/bootstrap']
Will result in a load to "mybundle" whenever a load to "jquery"
or "bootstrap/js/bootstrap" is made.
In this way, the bundle becomes the request that provides the module
*/
(function() {
// bundles support (just like RequireJS)
// bundle name is module name of bundle itself
// bundle is array of modules defined by the bundle
// when a module in the bundle is requested, the bundle is loaded instead
// of the form System.bundles['mybundle'] = ['jquery', 'bootstrap/js/bootstrap']
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
this.bundles = {};
this.loadedBundles_ = {};
};
});
function loadFromBundle(loader, bundle) {
return Promise.resolve(loader.normalize(bundle))
.then(function(normalized) {
loader.loadedBundles_[normalized] = true;
loader.bundles[normalized] = loader.bundles[normalized] || loader.bundles[bundle];
return loader.load(normalized);
})
.then(function() {
return '';
});
}
// assign bundle metadata for bundle loads
hook('locate', function(locate) {
return function(load) {
if (load.name in this.loadedBundles_ || load.name in this.bundles)
load.metadata.bundle = true;
return locate.call(this, load);
};
});
hook('fetch', function(fetch) {
return function(load) {
var loader = this;
if (loader.trace)
return fetch.call(loader, load);
// if already defined, no need to load a bundle
if (load.name in loader.defined)
return '';
// check if it is in an already-loaded bundle
for (var b in loader.loadedBundles_) {
if (indexOf.call(loader.bundles[b], load.name) != -1)
return loadFromBundle(loader, b);
}
// check if it is a new bundle
for (var b in loader.bundles) {
if (indexOf.call(loader.bundles[b], load.name) != -1)
return loadFromBundle(loader, b);
}
return fetch.call(loader, load);
};
});
})();
/*
* Dependency Tree Cache
*
* Allows a build to pre-populate a dependency trace tree on the loader of
* the expected dependency tree, to be loaded upfront when requesting the
* module, avoinding the n round trips latency of module loading, where
* n is the dependency tree depth.
*
* eg:
* System.depCache = {
* 'app': ['normalized', 'deps'],
* 'normalized': ['another'],
* 'deps': ['tree']
* };
*
* System.import('app')
* // simultaneously starts loading all of:
* // 'normalized', 'deps', 'another', 'tree'
* // before "app" source is even loaded
*/
(function() {
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
this.depCache = {};
}
});
hook('locate', function(locate) {
return function(load) {
var loader = this;
// load direct deps, in turn will pick up their trace trees
var deps = loader.depCache[load.name];
if (deps)
for (var i = 0; i < deps.length; i++)
loader['import'](deps[i]);
return locate.call(loader, load);
};
});
})();
/*
* Conditions Extension
*
* Allows a condition module to alter the resolution of an import via syntax:
*
* import $ from 'jquery/#{browser}';
*
* Will first load the module 'browser' via `System.import('browser')` and
* take the default export of that module.
* If the default export is not a string, an error is thrown.
*
* We then substitute the string into the require to get the conditional resolution
* enabling environment-specific variations like:
*
* import $ from 'jquery/ie'
* import $ from 'jquery/firefox'
* import $ from 'jquery/chrome'
* import $ from 'jquery/safari'
*
* It can be useful for a condition module to define multiple conditions.
* This can be done via the `.` modifier to specify a member expression:
*
* import 'jquery/#{browser.grade}'
*
* Where the `grade` export of the `browser` module is taken for substitution.
*
* Note that `/` and a leading `.` are not permitted within conditional modules
* so that this syntax can be well-defined.
*
*
* Boolean Conditionals
*
* For polyfill modules, that are used as imports but have no module value,
* a binary conditional allows a module not to be loaded at all if not needed:
*
* import 'es5-shim#?conditions.needs-es5shim'
*
*/
(function() {
var conditionalRegEx = /#\{[^\}]+\}|#\?.+$/;
hookConstructor(function(constructor) {
return function() {
constructor.call(this);
// standard environment module, starting small as backwards-compat matters!
this.set('@system-env', this.newModule({
browser: isBrowser
}));
};
});
hook('normalize', function(normalize) {
return function(name, parentName, parentAddress) {
var loader = this;
var conditionalMatch = name.match(conditionalRegEx);
if (conditionalMatch) {
var substitution = conditionalMatch[0][1] != '?';
var conditionModule = substitution ? conditionalMatch[0].substr(2, conditionalMatch[0].length - 3) : conditionalMatch[0].substr(2);
if (conditionModule[0] == '.' || conditionModule.indexOf('/') != -1)
throw new TypeError('Invalid condition ' + conditionalMatch[0] + '\n\tCondition modules cannot contain . or / in the name.');
var conditionExport;
var conditionExportIndex = conditionModule.indexOf('.');
if (conditionExportIndex != -1) {
conditionExport = conditionModule.substr(conditionExportIndex + 1);
conditionModule = conditionModule.substr(0, conditionExportIndex);
}
var booleanNegation = !substitution && conditionModule[0] == '~';
if (booleanNegation)
conditionModule = conditionModule.substr(1);
var pluginLoader = loader.pluginLoader || loader;
return pluginLoader['import'](conditionModule, parentName, parentAddress)
.then(function(m) {
if (conditionExport === undefined) {
// CommonJS case
if (typeof m == 'string')
return m;
else
return m['default'];
}
return readMemberExpression(conditionExport, m);
})
.then(function(conditionValue) {
if (substitution) {
if (typeof conditionValue !== 'string')
throw new TypeError('The condition value for ' + conditionModule + ' doesn\'t resolve to a string.');
name = name.replace(conditionalRegEx, conditionValue);
}
else {
if (typeof conditionValue !== 'boolean')
throw new TypeError('The condition value for ' + conditionModule + ' isn\'t resolving to a boolean.');
if (booleanNegation)
conditionValue = !conditionValue;
if (!conditionValue)
name = '@empty';
else
name = name.replace(conditionalRegEx, '');
}
return normalize.call(loader, name, parentName, parentAddress);
});
}
return Promise.resolve(normalize.call(loader, name, parentName, parentAddress));
};
});
})();System = new SystemJSLoader();
System.constructor = SystemJSLoader; // -- exporting --
if (typeof exports === 'object')
module.exports = Loader;
__global.Reflect = __global.Reflect || {};
__global.Reflect.Loader = __global.Reflect.Loader || Loader;
__global.Reflect.global = __global.Reflect.global || __global;
__global.LoaderPolyfill = Loader;
if (!System) {
System = new SystemLoader();
System.constructor = SystemLoader;
}
if (typeof exports === 'object')
module.exports = System;
__global.System = System;
})(typeof self != 'undefined' ? self : global);}
// auto-load Promise and URL polyfills if needed in the browser
try {
var hasURL = typeof URLPolyfill != 'undefined' || new URL('test:///').protocol == 'test:';
}
catch(e) {}
if (typeof Promise === 'undefined' || !hasURL) {
// document.write
if (typeof document !== 'undefined') {
var scripts = document.getElementsByTagName('script');
$__curScript = scripts[scripts.length - 1];
var curPath = $__curScript.src;
var basePath = curPath.substr(0, curPath.lastIndexOf('/') + 1);
window.systemJSBootstrap = bootstrap;
document.write(
'<' + 'script type="text/javascript" src="' + basePath + 'system-polyfills.js">' + '<' + '/script>'
);
}
// importScripts
else if (typeof importScripts !== 'undefined') {
var basePath = '';
try {
throw new Error('_');
} catch (e) {
e.stack.replace(/(?:at|@).*(http.+):[\d]+:[\d]+/, function(m, url) {
basePath = url.replace(/\/[^\/]*$/, '/');
});
}
importScripts(basePath + 'system-polyfills.js');
bootstrap();
}
else {
bootstrap();
}
}
else {
bootstrap();
}
})();<|fim▁end|> | catch(e) { |
<|file_name|>BracketedKeywordInContextDelegate.hh<|end_file_name|><|fim▁begin|>#ifndef BRACKETEDKEYWORDINCONTEXTDELEGATE_HH
#define BRACKETEDKEYWORDINCONTEXTDELEGATE_HH
#include <vector>
#include <AlpinoCorpus/LexItem.hh><|fim▁hole|>{
Q_OBJECT
public:
BracketedKeywordInContextDelegate(CorpusReaderPtr);
virtual ~BracketedKeywordInContextDelegate() {}
void paint(QPainter *painter, QStyleOptionViewItem const &option, QModelIndex const &index) const;
QSize sizeHint(QStyleOptionViewItem const &option, QModelIndex const &index) const;
private:
void loadColorSettings();
QString extractFragment(std::vector<alpinocorpus::LexItem> const &items, size_t first, size_t last) const;
mutable QColor d_highlightColor;
};
#endif<|fim▁end|> |
#include "BracketedDelegate.hh"
class BracketedKeywordInContextDelegate : public BracketedDelegate |
<|file_name|>consumer_test.go<|end_file_name|><|fim▁begin|>package sarama
import (
"log"
"os"
"os/signal"
"sync"
"testing"
"time"
)
func TestConsumerOffsetManual(t *testing.T) {
seedBroker := newMockBroker(t, 1)
leader := newMockBroker(t, 2)
metadataResponse := new(MetadataResponse)
metadataResponse.AddBroker(leader.Addr(), leader.BrokerID())
metadataResponse.AddTopicPartition("my_topic", 0, leader.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
for i := 0; i <= 10; i++ {
fetchResponse := new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(i+1234))
leader.Returns(fetchResponse)
}
master, err := NewConsumer([]string{seedBroker.Addr()}, nil)
if err != nil {
t.Fatal(err)
}
consumer, err := master.ConsumePartition("my_topic", 0, 1234)
if err != nil {
t.Fatal(err)
}
seedBroker.Close()
for i := 0; i < 10; i++ {
select {
case message := <-consumer.Messages():
if message.Offset != int64(i+1234) {
t.Error("Incorrect message offset!")
}
case err := <-consumer.Errors():
t.Error(err)
}
}
safeClose(t, consumer)
safeClose(t, master)
leader.Close()
}
func TestConsumerLatestOffset(t *testing.T) {
seedBroker := newMockBroker(t, 1)
leader := newMockBroker(t, 2)
metadataResponse := new(MetadataResponse)
metadataResponse.AddBroker(leader.Addr(), leader.BrokerID())
metadataResponse.AddTopicPartition("my_topic", 0, leader.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
offsetResponse := new(OffsetResponse)
offsetResponse.AddTopicPartition("my_topic", 0, 0x010101)
leader.Returns(offsetResponse)
fetchResponse := new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), 0x010101)
leader.Returns(fetchResponse)
master, err := NewConsumer([]string{seedBroker.Addr()}, nil)
if err != nil {
t.Fatal(err)
}
seedBroker.Close()
consumer, err := master.ConsumePartition("my_topic", 0, OffsetNewest)
if err != nil {
t.Fatal(err)
}
leader.Close()
safeClose(t, consumer)
safeClose(t, master)
// we deliver one message, so it should be one higher than we return in the OffsetResponse
if consumer.(*partitionConsumer).offset != 0x010102 {
t.Error("Latest offset not fetched correctly:", consumer.(*partitionConsumer).offset)
}
}
func TestConsumerFunnyOffsets(t *testing.T) {
// for topics that are compressed and/or compacted (different things!) we have to be
// able to handle receiving offsets that are non-sequential (though still strictly increasing) and
// possibly starting prior to the actual value we requested
seedBroker := newMockBroker(t, 1)
leader := newMockBroker(t, 2)
metadataResponse := new(MetadataResponse)
metadataResponse.AddBroker(leader.Addr(), leader.BrokerID())
metadataResponse.AddTopicPartition("my_topic", 0, leader.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
fetchResponse := new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(1))
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(3))
leader.Returns(fetchResponse)
fetchResponse = new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(5))
leader.Returns(fetchResponse)
master, err := NewConsumer([]string{seedBroker.Addr()}, nil)
if err != nil {
t.Fatal(err)
}
consumer, err := master.ConsumePartition("my_topic", 0, 2)
if err != nil {
t.Fatal(err)
}
message := <-consumer.Messages()
if message.Offset != 3 {
t.Error("Incorrect message offset!")
}
leader.Close()
seedBroker.Close()
safeClose(t, consumer)
safeClose(t, master)
}
func TestConsumerRebalancingMultiplePartitions(t *testing.T) {
// initial setup
seedBroker := newMockBroker(t, 1)
leader0 := newMockBroker(t, 2)
leader1 := newMockBroker(t, 3)
metadataResponse := new(MetadataResponse)
metadataResponse.AddBroker(leader0.Addr(), leader0.BrokerID())
metadataResponse.AddBroker(leader1.Addr(), leader1.BrokerID())
metadataResponse.AddTopicPartition("my_topic", 0, leader0.BrokerID(), nil, nil, ErrNoError)
metadataResponse.AddTopicPartition("my_topic", 1, leader1.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
// launch test goroutines
config := NewConfig()
config.Consumer.Retry.Backoff = 0
master, err := NewConsumer([]string{seedBroker.Addr()}, config)
if err != nil {
t.Fatal(err)
}
// we expect to end up (eventually) consuming exactly ten messages on each partition
var wg sync.WaitGroup
for i := 0; i < 2; i++ {
consumer, err := master.ConsumePartition("my_topic", int32(i), 0)
if err != nil {
t.Error(err)
}
go func(c PartitionConsumer) {
for err := range c.Errors() {
t.Error(err)
}
}(consumer)
wg.Add(1)
go func(partition int32, c PartitionConsumer) {
for i := 0; i < 10; i++ {
message := <-consumer.Messages()
if message.Offset != int64(i) {
t.Error("Incorrect message offset!", i, partition, message.Offset)
}
if message.Partition != partition {
t.Error("Incorrect message partition!")
}
}
safeClose(t, consumer)
wg.Done()
}(int32(i), consumer)
}
// leader0 provides first four messages on partition 0
fetchResponse := new(FetchResponse)
for i := 0; i < 4; i++ {
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(i))
}
leader0.Returns(fetchResponse)
// leader0 says no longer leader of partition 0
fetchResponse = new(FetchResponse)
fetchResponse.AddError("my_topic", 0, ErrNotLeaderForPartition)
leader0.Returns(fetchResponse)
// metadata assigns both partitions to leader1
metadataResponse = new(MetadataResponse)
metadataResponse.AddTopicPartition("my_topic", 0, leader1.BrokerID(), nil, nil, ErrNoError)
metadataResponse.AddTopicPartition("my_topic", 1, leader1.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
time.Sleep(50 * time.Millisecond) // dumbest way to force a particular response ordering
// leader1 provides five messages on partition 1
fetchResponse = new(FetchResponse)
for i := 0; i < 5; i++ {
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(i))
}
leader1.Returns(fetchResponse)
// leader1 provides three more messages on both partitions
fetchResponse = new(FetchResponse)
for i := 0; i < 3; i++ {
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(i+4))
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(i+5))
}
leader1.Returns(fetchResponse)
// leader1 provides three more messages on partition0, says no longer leader of partition1
fetchResponse = new(FetchResponse)
for i := 0; i < 3; i++ {
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(i+7))
}
fetchResponse.AddError("my_topic", 1, ErrNotLeaderForPartition)
leader1.Returns(fetchResponse)
// metadata assigns 0 to leader1 and 1 to leader0
metadataResponse = new(MetadataResponse)
metadataResponse.AddTopicPartition("my_topic", 0, leader1.BrokerID(), nil, nil, ErrNoError)
metadataResponse.AddTopicPartition("my_topic", 1, leader0.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
time.Sleep(50 * time.Millisecond) // dumbest way to force a particular response ordering
// leader0 provides two messages on partition 1
fetchResponse = new(FetchResponse)
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(8))
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(9))
leader0.Returns(fetchResponse)
// leader0 provides last message on partition 1
fetchResponse = new(FetchResponse)
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(10))
leader0.Returns(fetchResponse)
// leader1 provides last message on partition 0
fetchResponse = new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(10))
leader1.Returns(fetchResponse)
wg.Wait()
leader1.Close()
leader0.Close()
seedBroker.Close()
safeClose(t, master)
}
func TestConsumerInterleavedClose(t *testing.T) {
t.Skip("Enable once bug #325 is fixed.")
seedBroker := newMockBroker(t, 1)
leader := newMockBroker(t, 2)
metadataResponse := new(MetadataResponse)
metadataResponse.AddBroker(leader.Addr(), leader.BrokerID())
metadataResponse.AddTopicPartition("my_topic", 0, leader.BrokerID(), nil, nil, ErrNoError)
metadataResponse.AddTopicPartition("my_topic", 1, leader.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
config := NewConfig()
config.ChannelBufferSize = 0
master, err := NewConsumer([]string{seedBroker.Addr()}, config)
if err != nil {
t.Fatal(err)
}
c0, err := master.ConsumePartition("my_topic", 0, 0)
if err != nil {
t.Fatal(err)
}
fetchResponse := new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(0))
leader.Returns(fetchResponse)
c1, err := master.ConsumePartition("my_topic", 1, 0)
if err != nil {
t.Fatal(err)
}
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(0))
leader.Returns(fetchResponse)
safeClose(t, c1)
safeClose(t, c0)
safeClose(t, master)
leader.Close()
seedBroker.Close()
}
func TestConsumerBounceWithReferenceOpen(t *testing.T) {
seedBroker := newMockBroker(t, 1)
leader := newMockBroker(t, 2)
leaderAddr := leader.Addr()
metadataResponse := new(MetadataResponse)
metadataResponse.AddBroker(leader.Addr(), leader.BrokerID())
metadataResponse.AddTopicPartition("my_topic", 0, leader.BrokerID(), nil, nil, ErrNoError)
metadataResponse.AddTopicPartition("my_topic", 1, leader.BrokerID(), nil, nil, ErrNoError)
seedBroker.Returns(metadataResponse)
config := NewConfig()
config.Consumer.Return.Errors = true
config.Consumer.Retry.Backoff = 0
config.ChannelBufferSize = 0
master, err := NewConsumer([]string{seedBroker.Addr()}, config)
if err != nil {
t.Fatal(err)
}
c0, err := master.ConsumePartition("my_topic", 0, 0)
if err != nil {
t.Fatal(err)
}
c1, err := master.ConsumePartition("my_topic", 1, 0)
if err != nil {
t.Fatal(err)
}
fetchResponse := new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(0))
fetchResponse.AddError("my_topic", 1, ErrNoError)
leader.Returns(fetchResponse)
<-c0.Messages()
fetchResponse = new(FetchResponse)
fetchResponse.AddError("my_topic", 0, ErrNoError)
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(0))
leader.Returns(fetchResponse)
<-c1.Messages()
leader.Close()
leader = newMockBrokerAddr(t, 2, leaderAddr)
// unblock one of the two (it doesn't matter which)
select {
case <-c0.Errors():
case <-c1.Errors():
}
// send it back to the same broker
seedBroker.Returns(metadataResponse)
fetchResponse = new(FetchResponse)
fetchResponse.AddMessage("my_topic", 0, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(1))
fetchResponse.AddMessage("my_topic", 1, nil, ByteEncoder([]byte{0x00, 0x0E}), int64(1))
leader.Returns(fetchResponse)
time.Sleep(5 * time.Millisecond)
// unblock the other one
select {
case <-c0.Errors():
case <-c1.Errors():
}
// send it back to the same broker
seedBroker.Returns(metadataResponse)
select {
case <-c0.Messages():
case <-c1.Messages():
}
leader.Close()
seedBroker.Close()
wg := sync.WaitGroup{}
wg.Add(2)
go func() {
_ = c0.Close()
wg.Done()
}()
go func() {
_ = c1.Close()
wg.Done()
}()
wg.Wait()
safeClose(t, master)
}
// This example has the simplest use case of the consumer. It simply
// iterates over the messages channel using a for/range loop. Because
// a producer never stopsunless requested, a signal handler is registered
// so we can trigger a clean shutdown of the consumer.
func ExampleConsumer_for_loop() {
master, err := NewConsumer([]string{"localhost:9092"}, nil)
if err != nil {
log.Fatalln(err)
}
defer func() {
if err := master.Close(); err != nil {
log.Fatalln(err)
}
}()
consumer, err := master.ConsumePartition("my_topic", 0, 0)
if err != nil {
log.Fatalln(err)
}
go func() {
// By default, the consumer will always keep going, unless we tell it to stop.
// In this case, we capture the SIGINT signal so we can tell the consumer to stop
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
<-signals
consumer.AsyncClose()
}()
msgCount := 0
for message := range consumer.Messages() {
log.Println(string(message.Value))
msgCount++
}
log.Println("Processed", msgCount, "messages.")
}
// This example shows how to use a consumer with a select statement
// dealing with the different channels.
func ExampleConsumer_select() {
config := NewConfig()
config.Consumer.Return.Errors = true // Handle errors manually instead of letting Sarama log them.
master, err := NewConsumer([]string{"localhost:9092"}, config)
if err != nil {
log.Fatalln(err)
}
defer func() {
if err := master.Close(); err != nil {
log.Fatalln(err)
}
}()
consumer, err := master.ConsumePartition("my_topic", 0, 0)
if err != nil {
log.Fatalln(err)
}
defer func() {
if err := consumer.Close(); err != nil {
log.Fatalln(err)
}
}()
msgCount := 0
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
consumerLoop:
for {
select {
case err := <-consumer.Errors():
log.Println(err)
case <-consumer.Messages():
msgCount++
case <-signals:
log.Println("Received interrupt")
break consumerLoop
}
}
log.Println("Processed", msgCount, "messages.")
}
// This example shows how to use a consumer with different goroutines
// to read from the Messages and Errors channels.
func ExampleConsumer_goroutines() {<|fim▁hole|> config.Consumer.Return.Errors = true // Handle errors manually instead of letting Sarama log them.
master, err := NewConsumer([]string{"localhost:9092"}, config)
if err != nil {
log.Fatalln(err)
}
defer func() {
if err := master.Close(); err != nil {
panic(err)
}
}()
consumer, err := master.ConsumePartition("my_topic", 0, OffsetOldest)
if err != nil {
log.Fatalln(err)
}
var (
wg sync.WaitGroup
msgCount int
)
wg.Add(1)
go func() {
defer wg.Done()
for message := range consumer.Messages() {
log.Printf("Consumed message with offset %d", message.Offset)
msgCount++
}
}()
wg.Add(1)
go func() {
defer wg.Done()
for err := range consumer.Errors() {
log.Println(err)
}
}()
// Wait for an interrupt signal to trigger the shutdown
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
<-signals
consumer.AsyncClose()
// Wait for the Messages and Errors channel to be fully drained.
wg.Wait()
log.Println("Processed", msgCount, "messages.")
}<|fim▁end|> | config := NewConfig() |
<|file_name|>_tornado.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import Cookie
import base64
import calendar
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import httplib
import logging
import mimetypes
import os.path
import re
import stat
import sys
import time
import types
import urllib
import urlparse
import uuid
from tornado import web
from tornado.web import HTTPError, utf8
from tld_name import tld_name
from tornado import escape
from tornado import locale
from tornado import stack_context
from tornado import template
def set_cookie(self, name, value, domain=None, expires=None, path='/',
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
if domain is None:
domain = '.%s'%tld_name(self.request.host)<|fim▁hole|> if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
if type(expires) is not str:
timestamp = calendar.timegm(expires.utctimetuple())
expires = email.utils.formatdate(
timestamp, localtime=False, usegmt=True
)
else:
expires = 'Tue, 01 Jan 2030 00:00:00 GMT'
morsel['expires'] = expires
if path:
morsel["path"] = path
for k, v in kwargs.iteritems():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
web.RequestHandler.set_cookie = set_cookie
def clear_cookie(self, name, path='/', domain=None):
"""Deletes the cookie with the given name."""
expires = 'Tue, 01 Jun 2000 00:00:00 GMT'
self.set_cookie(name, value='', path=path, expires=expires, domain=domain)
web.RequestHandler.clear_cookie = clear_cookie
#from model._db import SQLSTORE, mc
from os import getpid
PID = str(getpid()).ljust(7)
#logging.warn("PID:%s", PID)
def _init(self, *args, **kwds):
pass
web.RequestHandler.init = _init
def redirect(self, url, permanent=False):
"""Sends a redirect to the given (optionally relative) URL."""
if self._headers_written:
raise Exception('Cannot redirect after headers have been written')
self.set_status(301 if permanent else 302)
self.set_header('Location', url)
self.finish()
web.RequestHandler.redirect = redirect
def xsrf_form_html(self):
return '<input type="hidden" name="_xsrf" value="%s">'%self.xsrf_token
web.RequestHandler.xsrf_form_html = property(xsrf_form_html)<|fim▁end|> |
name = escape.native_str(name)
value = escape.native_str(value) |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp'),
plugins = require('gulp-load-plugins')(),
Karma = require('karma').Server;
var paths = {
scripts: {
src: ['src/**/*.js'],
dest: 'dist',
file: 'mention.js'
},
styles: {
src: ['src/**/*.scss'],
dest: 'dist',
file: 'mention.css'
},
example: {
scripts: {
src: ['example/**/*.es6.js'],
dest: 'example',
file: 'example.js'
},
styles: {
src: ['example/**/*.scss'],
dest: 'example'
}
}
};
gulp.task('default', ['scripts']);
gulp.task('example', ['scripts:example', 'styles:example']);
gulp.task('watch', function(){
gulp.watch(paths.scripts.src, 'scripts');
gulp.watch(paths.styles.src, 'styles');
});
<|fim▁hole|> gulp.watch(paths.example.styles.src, 'styles:example');
});
gulp.task('scripts', scripts(paths.scripts));
gulp.task('scripts:example', scripts(paths.example.scripts));
function scripts(path, concat) {
return function() {
return gulp.src(path.src)
.pipe(plugins.sourcemaps.init())
.pipe(plugins.babel())
.pipe(plugins.angularFilesort())
.pipe(plugins.concat(path.file))
.pipe(gulp.dest(path.dest))
.pipe(plugins.uglify({ mangle: false }))
.pipe(plugins.extReplace('.min.js'))
.pipe(gulp.dest(path.dest))
.pipe(plugins.sourcemaps.write('.'));
}
}
gulp.task('styles', styles(paths.styles));
gulp.task('styles:example', styles(paths.example.styles));
function styles(path) {
return function() {
return gulp.src(path.src)
.pipe(plugins.sourcemaps.init())
.pipe(plugins.sass())
.pipe(gulp.dest(path.dest))
.pipe(plugins.sourcemaps.write('.'));
}
}
gulp.task('karma', karma());
gulp.task('watch:karma', karma({ singleRun: false, autoWatch: true }));
function karma (opts) {
opts = opts || {};
opts.configFile = __dirname + '/karma.conf.js';
return function (done) {
return new Karma(opts, done).start();
}
}<|fim▁end|> | gulp.task('watch:example', function(){
gulp.watch(paths.example.scripts.src, 'scripts:example'); |
<|file_name|>validator.utils.js<|end_file_name|><|fim▁begin|>function isString(value) {
if (typeof value !== 'string' || value === '' || value === null) {
return false;
}
return true;
}
function isNumber(value) {
if (typeof Number.parseInt(value, 10) !== 'number'
|| Number.isNaN(value) || value === null) {
return false;
}
return true;
}
function isInRange(value, start, end) {
if (value < start || value > end) {
return false;
}
return true;
}
function isEmail(value) {<|fim▁hole|> if (value.substring(atIndex, value.length).indexOf('.') < 1) {
return false;
}
return true;
}
module.exports = {
isNumber,
isString,
isInRange,
isEmail,
};<|fim▁end|> | const atIndex = value.indexOf('@');
if (atIndex < 1) {
return false;
} |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
from django.conf import settings
from common.views import AbsSegmentSelection
#from common.views import AbsTargetSelection
from common.views import AbsTargetSelectionTable
# from common.alignment_SITE_NAME import Alignment
from protwis.context_processors import site_title
Alignment = getattr(__import__('common.alignment_' + settings.SITE_NAME, fromlist=['Alignment']), 'Alignment')
from collections import OrderedDict
class TargetSelection(AbsTargetSelectionTable):
step = 1
number_of_steps = 2
title = "SELECT RECEPTORS"
description = "Select receptors in the table (below) or browse the classification tree (right). You can select entire" \
+ " families or individual receptors.\n\nOnce you have selected all your receptors, click the green button."
docs = "sequences.html#similarity-matrix"
selection_boxes = OrderedDict([
("reference", False),
("targets", True),
("segments", False),
])
buttons = {
"continue": {
"label": "Next",
"onclick": "submitSelection('/similaritymatrix/segmentselection');",
"color": "success",
},
}
# class TargetSelection(AbsTargetSelection):
# step = 1
# number_of_steps = 2
# docs = 'sequences.html#similarity-matrix'
# selection_boxes = OrderedDict([
# ('reference', False),
# ('targets', True),
# ('segments', False),
# ])
# buttons = {
# 'continue': {
# 'label': 'Continue to next step',
# 'url': '/similaritymatrix/segmentselection',
# 'color': 'success',
# },
# }
class SegmentSelection(AbsSegmentSelection):
step = 2
number_of_steps = 2
docs = 'sequences.html#similarity-matrix'
selection_boxes = OrderedDict([<|fim▁hole|> ('reference', False),
('targets', False),
('segments', True),
])
buttons = {
'continue': {
'label': 'Show matrix',
'url': '/similaritymatrix/render',
'color': 'success',
},
}
def render_matrix(request):
# get the user selection from session
simple_selection = request.session.get('selection', False)
# create an alignment object
a = Alignment()
# load data from selection into the alignment
a.load_proteins_from_selection(simple_selection)
a.load_segments_from_selection(simple_selection)
# build the alignment data matrix
a.build_alignment()
# NOTE: NOT necessary for similarity matrix
# calculate consensus sequence + amino acid and feature frequency
# a.calculate_statistics()
# calculate identity and similarity of each row compared to the reference
a.calculate_similarity_matrix()
return render(request, 'similaritymatrix/matrix.html', {'p': a.proteins, 'm': a.similarity_matrix})
def render_csv_matrix(request):
# get the user selection from session
simple_selection = request.session.get('selection', False)
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from selection into the alignment
a.load_proteins_from_selection(simple_selection)
a.load_segments_from_selection(simple_selection)
# build the alignment data matrix
a.build_alignment()
# calculate consensus sequence + amino acid and feature frequency
# NOTE: NOT necessary for similarity matrix
# a.calculate_statistics()
# calculate identity and similarity of each row compared to the reference
a.calculate_similarity_matrix()
response = render(request, 'similaritymatrix/matrix_csv.html', {'p': a.proteins, 'm': a.similarity_matrix})
response['Content-Disposition'] = "attachment; filename=" + site_title(request)["site_title"] + "_similaritymatrix.csv"
return response<|fim▁end|> | |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// Copyright 2020 Google LLC. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"bytes"
"compress/gzip"
"encoding/hex"
"fmt"
"io"
"log"
"os"
)
// This is equivalent to running
// gzip --stdout | xxd -p | tr -d '\n'
// but with a platform-independent gzip encoding to provide more stable results.
func main() {
// gzip the spec
var buf bytes.Buffer
zw, _ := gzip.NewWriterLevel(&buf, gzip.BestCompression)
if _, err := io.Copy(zw, os.Stdin); err != nil {
log.Fatal(err)
}
if err := zw.Close(); err != nil {
log.Fatal(err)
}
// hex-encode the spec
s := hex.EncodeToString(buf.Bytes())
fmt.Printf("%s", s)<|fim▁hole|><|fim▁end|> | } |
<|file_name|>multi_request.rs<|end_file_name|><|fim▁begin|>use rand::seq::SliceRandom;
use rand::thread_rng;
use yaml_rust::Yaml;
use crate::interpolator::INTERPOLATION_REGEX;
use crate::actions::Request;
use crate::benchmark::Benchmark;
pub fn is_that_you(item: &Yaml) -> bool {
item["request"].as_hash().is_some() && item["with_items"].as_vec().is_some()
}
pub fn expand(item: &Yaml, benchmark: &mut Benchmark) {
if let Some(with_items) = item["with_items"].as_vec() {
let mut with_items_list = with_items.clone();
if let Some(shuffle) = item["shuffle"].as_bool() {
if shuffle {
let mut rng = thread_rng();
with_items_list.shuffle(&mut rng);
}
}
for (index, with_item) in with_items_list.iter().enumerate() {
let index = index as u32;
let value: &str = with_item.as_str().unwrap_or("");
if INTERPOLATION_REGEX.is_match(value) {
panic!("Interpolations not supported in 'with_items' children!");
}
benchmark.push(Box::new(Request::new(item, Some(with_item.clone()), Some(index))));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn expand_multi() {
let text = "---\nname: foobar\nrequest:\n url: /api/{{ item }}\nwith_items:\n - 1\n - 2\n - 3";
let docs = yaml_rust::YamlLoader::load_from_str(text).unwrap();
let doc = &docs[0];
let mut benchmark: Benchmark = Benchmark::new();
expand(&doc, &mut benchmark);
assert_eq!(is_that_you(&doc), true);
assert_eq!(benchmark.len(), 3);
}
#[test]
#[should_panic]
fn runtime_expand() {
let text = "---\nname: foobar\nrequest:\n url: /api/{{ item }}\nwith_items:\n - 1\n - 2\n - foo{{ memory }}";
let docs = yaml_rust::YamlLoader::load_from_str(text).unwrap();
let doc = &docs[0];
let mut benchmark: Benchmark = Benchmark::new();
expand(&doc, &mut benchmark);<|fim▁hole|>}<|fim▁end|> | } |
<|file_name|>uiLocalization.js<|end_file_name|><|fim▁begin|>/**************************************************************************************
* Copyright (c) 2013-2015, Finnish Social Science Data Archive/University of Tampere *
* *
* All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without modification, *
* are permitted provided that the following conditions are met: *
* 1. Redistributions of source code must retain the above copyright notice, this *
* list of conditions and the following disclaimer. *
* 2. Redistributions in binary form must reproduce the above copyright notice, *
* this list of conditions and the following disclaimer in the documentation *
* and/or other materials provided with the distribution. *
* 3. Neither the name of the copyright holder nor the names of its contributors *
* may be used to endorse or promote products derived from this software *
* without specific prior written permission. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND *
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED *
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE *
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR *
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES *
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; *
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON *
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT *
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS *
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
**************************************************************************************/
define(function (require) {
'use strict';
/**
* Add localizations for general UI elements such as buttons
* as well as all error messages even though most of these will not be needed at one time
*/
require('./addTranslation')('', {
"page": {
"&title": {
"default": "Yhteiskuntatieteellinen tietoarkisto - Metka"
}
},
"topmenu": {
"&desktop": {
"default": "Työpöytä"
},
"&expert": {
"default": "Eksperttihaku"
},
"&study": {
"default": "Aineistot"
},
"&variables": {
"default": "Muuttujat"
},
"&publication": {
"default": "Julkaisut"
},
"&series": {
"default": "Sarjat"
},
"&binder": {
"default": "Mapit"
},
"&report": {
"default": "Raportit"
},
"&settings": {
"default": "Asetukset"
},
"&help": {
"default": "Ohjeet"
},
"&logout": {
"default": "Kirjaudu ulos"
}
},
"state": {
"&DRAFT": {
"default": "LUONNOS"
},
"&APPROVED": {
"default": "HYVÄKSYTTY"
},
"&REMOVED": {
"default": "POISTETTU"
}
},
"type": {
"SERIES": {
"&title": {
"default": "Sarja"
},
"&search": {
"default": "Sarjahaku"
}
},
"STUDY": {
"&title": {
"default": "Aineisto"
},
"&search": {
"default": "Aineistohaku"
},
"erroneous": {
"&title": {
"default": "Virheelliset"
},
"table": {
"&id": {
"default": "Aineistonumero"
},
"&name": {
"default": "Aineiston nimi"
},
"&errorPointCount": {
"default": "Virhepisteet"
}
}
}
},
"STUDY_VARIABLES": {
"&title": {
"default": "Muuttujat"
},
"&search": {
"default": "Muuttujahaku"
}
},
"STUDY_VARIABLE": {
"&title": {
"default": "Muuttuja"
},
"&search": {
"default": "Muuttujahaku"
}
},
"STUDY_ATTACHMENT": {
"&title": {
"default": "Liite"
},
"&search": {
"default": "Liitehaku"
}
},
"PUBLICATION": {
"&title": {
"default": "Julkaisu"
},
"&search": {
"default": "Julkaisuhaku"
}
},
"BINDERS": {
"&title": {
"default": "Mapit"
}
},
"SETTINGS": {
"&title": {
"default": "Hallinta"
}
},
"BINDER_PAGE": {
"&title": {
"default": "Mapitus"
}
},
"STUDY_ERROR": {
"&title": {
"default": "Aineistovirhe"
}
}
},
"general": {
"result": {
"&amount": {
"default": "Rivejä: {length}"
}
},
"downloadInfo": {
"¤tlyDownloading": {
"default": "Lataus on jo käynnissä. Odota latauksen valmistumista ladataksesi uudelleen."
}
},
"buttons": {
"&add": {
"default": "Lisää"
},
"&addSeries": {
"default": "Lisää sarja"
},
"&cancel": {
"default": "Peruuta"
},
"&close": {
"default": "Sulje"
},
"&download": {
"default": "Lataa"
},
"&upload": {
"default": "Lataa"
},
"&ok": {
"default": "OK"
},
"&save": {
"default": "Tallenna"
},
"&search": {
"default": "Hae"
},
"&remove": {
"default": "Poista"
},
"&no": {
"default": "Ei"
},
"&yes": {
"default": "Kyllä"
},
"&addGroup": {
"default": "Lisää ryhmä"
}
},
"revision": {
"compare": {
"&begin": {
"default": "Alku"
},
"&changed": {
"default": "Muutos"
},
"&end": {
"default": "Loppu"
},
"&modifier": {
"default": "Muuttaja"
},
"&modifyDate": {
"default": "Muutospvm"
},
"&property": {
"default": "Ominaisuus"
},
"&original": {
"default": "Alkuperäinen"
},
"&title": {
"default": "Revisioiden vertailu (revisio {0} -> revisio {1})"
}
},
"&compare": {
"default": "Vertaa"
},
"&publishDate": {
"default": "Julkaisupvm"
},
"&replace": {
"default": "Korvaa"
},
"&revisions": {
"default": "Revisiot"
}
},
"&referenceValue": {
"default": "Referenssiarvo"
},
"&referenceType": {
"default": "Tyyppi"
},
"saveInfo": {
"&savedAt": {
"default": "Päivämäärä"
},
"&savedBy": {
"default": "Tallentaja"
}
},
"refSaveInfo": {
"&savedAt": {
"default": "Päivämäärä (viittaus)"
},
"&savedBy": {
"default": "Tallentaja (viittaus)"
}
},
"&refState": {
"default": "Tila"
},
"refApproveInfo": {
"&approvedAt": {
"default": "Hyväksytty (viittaus)"
},
"&approvedBy": {
"default": "Hyväksyjä (viittaus)"
},
"&approvedRevision": {
"default": "Revisio (viittaus)"
}
},
"selection": {
"&empty": {
"default": "-- Valitse --"
}
},
"table": {
"&add": {
"default": "Lisää"
},
"countries": {
"&addFinland": {
"default": "Lisää Suomi"
}
}
},
"&id": {
"default": "ID"
},
"&revision": {
"default": "Revisio"
},
"&handler": {
"default": "Käsittelijä"
},
"&noHandler": {
"default": "Ei käsittelijää"
}
},
"search": {
"state": {
"&title": {
"default": "Hae:"
},
"&APPROVED": {
"default": "Hyväksyttyjä"
},
"&DRAFT": {
"default": "Luonnoksia"
},
"&REMOVED": {
"default": "Poistettuja"
}
},
"result": {
"&title": {
"default": "Hakutulos"
},
"&amount": {
"default": "Hakutuloksia: {length}"
},
"state": {
"&title": {
"default": "Tila"
},
"&APPROVED": {
"default": "Hyväksytty"
},
"&DRAFT": {
"default": "Luonnos"
},
"&REMOVED": {
"default": "Poistettu"
}
}
}
},
"settings": {
"&title": {
"default": "Asetukset"
},
"upload": {
"dataConfiguration": {
"&title": {<|fim▁hole|> "default": "Lataa datan konfiguraatio"
}
},
"guiConfiguration": {
"&title": {
"default": "GUI konfiguraatio"
},
"&upload": {
"default": "Lataa GUI konfiguraatio"
}
},
"miscJson": {
"&title": {
"default": "Json tiedosto"
},
"&upload": {
"default": "Lataa Json tiedosto"
}
}
}
},
"dialog": {
"waitDialog": {
"title": "Toimintoa suoritetaan..."
}
},
"alert": {
"notice": {
"&title": {
"default": "Huomio"
},
"approve": {
"success": "Luonnos hyväksytty onnistuneesti."
},
"save": {
"success": "Luonnos tallennettu onnistuneesti."
}
},
"error": {
"&title": {
"default": "Virhe"
},
"approve": {
"fail": {
"save": "Luonnoksen hyväksymisessä tapahtui virhe tallennuksen aikana.",
"validate": "Luonnoksen hyväksymisessä tapahtui virhe datan validoinnin aikana."
}
},
"save": {
"fail": "Luonnoksen tallentamisessa tapahtui virhe."
}
},
"gui": {
"missingButtonHandler": {
"&text": {
"default": 'Ei käsittelijää painikkeelle [{0}] otsikolla "{1}"'
}
}
}
},
"confirmation": {
"&title": {
"default": "Varmistus"
},
"remove": {
"revision": {
"&title": {
"default": "Revision poiston varmistus"
},
"draft": {
"&text": {
"default": "Haluatko varmasti poistaa {target} id:llä {id} luonnoksen {no}?"
},
"data": {
"&SERIES": {
"default": "sarjalta"
},
"&STUDY": {
"default": "aineistolta"
},
"&STUDY_VARIABLES": {
"default": "aineistomuuttujilta"
},
"&STUDY_VARIABLE": {
"default": "muuttujalta"
},
"&STUDY_ATTACHMENT": {
"default": "aineistoliitteistä"
},
"&PUBLICATION": {
"default": "julkaisulta"
},
"&BINDER_PAGE": {
"default": "mapitukselta"
}
}
},
"logical": {
"&text": {
"default": "Haluatko varmasti poistaa {target} id:llä {id}?"
},
"data": {
"&SERIES": {
"default": "sarjan"
},
"&STUDY": {
"default": "aineiston"
},
"&STUDY_ATTACHMENT": {
"default": "aineistoliitteen"
},
"&PUBLICATION": {
"default": "julkaisun"
},
"&BINDER_PAGE": {
"default": "mapituksen"
}
}
}
}
}
}
});
});<|fim▁end|> | "default": "Datan konfiguraatio"
},
"&upload": { |
<|file_name|>reclamos.component.spec.ts<|end_file_name|><|fim▁begin|>import {expect} from 'chai';
import {Controller} from '../../src/component/reclamos.component.ts';
<|fim▁hole|>
it("getTitle must return hello plus prestacionName", () => {
let controller = new Controller();
controller.solicitudesSimilares = [ {"nombrePrestacion": "aceras", "rubro": "rubro", "refuerzos": 5} ];
expect(controller.getTitle()).to.equal("hello -- aceras");
});
});<|fim▁end|> | describe("Controller Test", () => { |
<|file_name|>GPUCommon.cpp<|end_file_name|><|fim▁begin|>#include <algorithm>
#include "native/base/mutex.h"
#include "native/base/timeutil.h"
#include "GeDisasm.h"
#include "GPUCommon.h"
#include "GPUState.h"
#include "ChunkFile.h"
#include "Core/Config.h"
#include "Core/CoreTiming.h"
#include "Core/MemMap.h"
#include "Core/Host.h"
#include "Core/Reporting.h"
#include "Core/HLE/sceKernelMemory.h"
#include "Core/HLE/sceKernelInterrupt.h"
#include "Core/HLE/sceGe.h"
GPUCommon::GPUCommon() :
currentList(NULL),
isbreak(false),
drawCompleteTicks(0),
busyTicks(0),
dumpNextFrame_(false),
dumpThisFrame_(false),
interruptsEnabled_(true),
curTickEst_(0)
{
memset(dls, 0, sizeof(dls));
for (int i = 0; i < DisplayListMaxCount; ++i) {
dls[i].state = PSP_GE_DL_STATE_NONE;
dls[i].waitTicks = 0;
}
SetThreadEnabled(g_Config.bSeparateCPUThread);
}
void GPUCommon::PopDLQueue() {
easy_guard guard(listLock);
if(!dlQueue.empty()) {
dlQueue.pop_front();
if(!dlQueue.empty()) {
bool running = currentList->state == PSP_GE_DL_STATE_RUNNING;
currentList = &dls[dlQueue.front()];
if (running)
currentList->state = PSP_GE_DL_STATE_RUNNING;
} else {
currentList = NULL;
}
}
}
u32 GPUCommon::DrawSync(int mode) {
// FIXME: Workaround for displaylists sometimes hanging unprocessed. Not yet sure of the cause.
if (g_Config.bSeparateCPUThread) {
// FIXME: Workaround for displaylists sometimes hanging unprocessed. Not yet sure of the cause.
ScheduleEvent(GPU_EVENT_PROCESS_QUEUE);
// Sync first, because the CPU is usually faster than the emulated GPU.
SyncThread();
}
easy_guard guard(listLock);
if (mode < 0 || mode > 1)
return SCE_KERNEL_ERROR_INVALID_MODE;
if (mode == 0) {
if (!__KernelIsDispatchEnabled()) {
return SCE_KERNEL_ERROR_CAN_NOT_WAIT;
}
if (__IsInInterrupt()) {
return SCE_KERNEL_ERROR_ILLEGAL_CONTEXT;
}
if (drawCompleteTicks > CoreTiming::GetTicks()) {
__GeWaitCurrentThread(WAITTYPE_GEDRAWSYNC, 1, "GeDrawSync");
} else {
for (int i = 0; i < DisplayListMaxCount; ++i) {
if (dls[i].state == PSP_GE_DL_STATE_COMPLETED) {
dls[i].state = PSP_GE_DL_STATE_NONE;
}
}
}
return 0;
}
// If there's no current list, it must be complete.
DisplayList *top = NULL;
for (auto it = dlQueue.begin(), end = dlQueue.end(); it != end; ++it) {
if (dls[*it].state != PSP_GE_DL_STATE_COMPLETED) {
top = &dls[*it];
break;
}
}
if (!top || top->state == PSP_GE_DL_STATE_COMPLETED)
return PSP_GE_LIST_COMPLETED;
if (currentList->pc == currentList->stall)
return PSP_GE_LIST_STALLING;
return PSP_GE_LIST_DRAWING;
}
<|fim▁hole|> if (dlQueue.empty()) {
for (int i = 0; i < DisplayListMaxCount; ++i)
dls[i].state = PSP_GE_DL_STATE_NONE;
}
}
int GPUCommon::ListSync(int listid, int mode) {
if (g_Config.bSeparateCPUThread) {
// FIXME: Workaround for displaylists sometimes hanging unprocessed. Not yet sure of the cause.
ScheduleEvent(GPU_EVENT_PROCESS_QUEUE);
// Sync first, because the CPU is usually faster than the emulated GPU.
SyncThread();
}
easy_guard guard(listLock);
if (listid < 0 || listid >= DisplayListMaxCount)
return SCE_KERNEL_ERROR_INVALID_ID;
if (mode < 0 || mode > 1)
return SCE_KERNEL_ERROR_INVALID_MODE;
DisplayList& dl = dls[listid];
if (mode == 1) {
switch (dl.state) {
case PSP_GE_DL_STATE_QUEUED:
if (dl.interrupted)
return PSP_GE_LIST_PAUSED;
return PSP_GE_LIST_QUEUED;
case PSP_GE_DL_STATE_RUNNING:
if (dl.pc == dl.stall)
return PSP_GE_LIST_STALLING;
return PSP_GE_LIST_DRAWING;
case PSP_GE_DL_STATE_COMPLETED:
return PSP_GE_LIST_COMPLETED;
case PSP_GE_DL_STATE_PAUSED:
return PSP_GE_LIST_PAUSED;
default:
return SCE_KERNEL_ERROR_INVALID_ID;
}
}
if (!__KernelIsDispatchEnabled()) {
return SCE_KERNEL_ERROR_CAN_NOT_WAIT;
}
if (__IsInInterrupt()) {
return SCE_KERNEL_ERROR_ILLEGAL_CONTEXT;
}
if (dl.waitTicks > CoreTiming::GetTicks()) {
__GeWaitCurrentThread(WAITTYPE_GELISTSYNC, listid, "GeListSync");
}
return PSP_GE_LIST_COMPLETED;
}
u32 GPUCommon::EnqueueList(u32 listpc, u32 stall, int subIntrBase, bool head) {
easy_guard guard(listLock);
// TODO Check the stack values in missing arg and ajust the stack depth
// Check alignment
// TODO Check the context and stack alignement too
if (((listpc | stall) & 3) != 0)
return 0x80000103;
int id = -1;
bool oldCompatibility = true;
if (sceKernelGetCompiledSdkVersion() > 0x01FFFFFF) {
//numStacks = 0;
//stack = NULL;
oldCompatibility = false;
}
u64 currentTicks = CoreTiming::GetTicks();
for (int i = 0; i < DisplayListMaxCount; ++i)
{
if (dls[i].state != PSP_GE_DL_STATE_NONE && dls[i].state != PSP_GE_DL_STATE_COMPLETED) {
if (dls[i].pc == listpc && !oldCompatibility) {
ERROR_LOG(G3D, "sceGeListEnqueue: can't enqueue, list address %08X already used", listpc);
return 0x80000021;
}
//if(dls[i].stack == stack) {
// ERROR_LOG(G3D, "sceGeListEnqueue: can't enqueue, list stack %08X already used", context);
// return 0x80000021;
//}
}
if (dls[i].state == PSP_GE_DL_STATE_NONE && !dls[i].pendingInterrupt)
{
// Prefer a list that isn't used
id = i;
break;
}
if (id < 0 && dls[i].state == PSP_GE_DL_STATE_COMPLETED && !dls[i].pendingInterrupt && dls[i].waitTicks < currentTicks)
{
id = i;
}
}
if (id < 0)
{
ERROR_LOG_REPORT(G3D, "No DL ID available to enqueue");
for(auto it = dlQueue.begin(); it != dlQueue.end(); ++it) {
DisplayList &dl = dls[*it];
DEBUG_LOG(G3D, "DisplayList %d status %d pc %08x stall %08x", *it, dl.state, dl.pc, dl.stall);
}
return SCE_KERNEL_ERROR_OUT_OF_MEMORY;
}
DisplayList &dl = dls[id];
dl.id = id;
dl.startpc = listpc & 0x0FFFFFFF;
dl.pc = listpc & 0x0FFFFFFF;
dl.stall = stall & 0x0FFFFFFF;
dl.subIntrBase = std::max(subIntrBase, -1);
dl.stackptr = 0;
dl.signal = PSP_GE_SIGNAL_NONE;
dl.interrupted = false;
dl.waitTicks = (u64)-1;
dl.interruptsEnabled = interruptsEnabled_;
if (head) {
if (currentList) {
if (currentList->state != PSP_GE_DL_STATE_PAUSED)
return SCE_KERNEL_ERROR_INVALID_VALUE;
currentList->state = PSP_GE_DL_STATE_QUEUED;
}
dl.state = PSP_GE_DL_STATE_PAUSED;
currentList = &dl;
dlQueue.push_front(id);
} else if (currentList) {
dl.state = PSP_GE_DL_STATE_QUEUED;
dlQueue.push_back(id);
} else {
dl.state = PSP_GE_DL_STATE_RUNNING;
currentList = &dl;
dlQueue.push_front(id);
drawCompleteTicks = (u64)-1;
// TODO save context when starting the list if param is set
guard.unlock();
ProcessDLQueue();
}
return id;
}
u32 GPUCommon::DequeueList(int listid) {
easy_guard guard(listLock);
if (listid < 0 || listid >= DisplayListMaxCount || dls[listid].state == PSP_GE_DL_STATE_NONE)
return SCE_KERNEL_ERROR_INVALID_ID;
if (dls[listid].state == PSP_GE_DL_STATE_RUNNING || dls[listid].state == PSP_GE_DL_STATE_PAUSED)
return 0x80000021;
dls[listid].state = PSP_GE_DL_STATE_NONE;
if (listid == dlQueue.front())
PopDLQueue();
else
dlQueue.remove(listid);
dls[listid].waitTicks = 0;
__GeTriggerWait(WAITTYPE_GELISTSYNC, listid);
CheckDrawSync();
return 0;
}
u32 GPUCommon::UpdateStall(int listid, u32 newstall) {
easy_guard guard(listLock);
if (listid < 0 || listid >= DisplayListMaxCount || dls[listid].state == PSP_GE_DL_STATE_NONE)
return SCE_KERNEL_ERROR_INVALID_ID;
dls[listid].stall = newstall & 0x0FFFFFFF;
if (dls[listid].signal == PSP_GE_SIGNAL_HANDLER_PAUSE)
dls[listid].signal = PSP_GE_SIGNAL_HANDLER_SUSPEND;
guard.unlock();
ProcessDLQueue();
return 0;
}
u32 GPUCommon::Continue() {
easy_guard guard(listLock);
if (!currentList)
return 0;
if (currentList->state == PSP_GE_DL_STATE_PAUSED)
{
if (!isbreak)
{
if (currentList->signal == PSP_GE_SIGNAL_HANDLER_PAUSE)
return 0x80000021;
currentList->state = PSP_GE_DL_STATE_RUNNING;
currentList->signal = PSP_GE_SIGNAL_NONE;
// TODO Restore context of DL is necessary
// TODO Restore BASE
// We have a list now, so it's not complete.
drawCompleteTicks = (u64)-1;
}
else
currentList->state = PSP_GE_DL_STATE_QUEUED;
}
else if (currentList->state == PSP_GE_DL_STATE_RUNNING)
{
if (sceKernelGetCompiledSdkVersion() >= 0x02000000)
return 0x80000020;
return -1;
}
else
{
if (sceKernelGetCompiledSdkVersion() >= 0x02000000)
return 0x80000004;
return -1;
}
guard.unlock();
ProcessDLQueue();
return 0;
}
u32 GPUCommon::Break(int mode) {
easy_guard guard(listLock);
if (mode < 0 || mode > 1)
return SCE_KERNEL_ERROR_INVALID_MODE;
if (!currentList)
return 0x80000020;
if (mode == 1)
{
// Clear the queue
dlQueue.clear();
for (int i = 0; i < DisplayListMaxCount; ++i)
{
dls[i].state = PSP_GE_DL_STATE_NONE;
dls[i].signal = PSP_GE_SIGNAL_NONE;
}
currentList = NULL;
return 0;
}
if (currentList->state == PSP_GE_DL_STATE_NONE || currentList->state == PSP_GE_DL_STATE_COMPLETED)
{
if (sceKernelGetCompiledSdkVersion() >= 0x02000000)
return 0x80000004;
return -1;
}
if (currentList->state == PSP_GE_DL_STATE_PAUSED)
{
if (sceKernelGetCompiledSdkVersion() > 0x02000010)
{
if (currentList->signal == PSP_GE_SIGNAL_HANDLER_PAUSE)
{
ERROR_LOG_REPORT(G3D, "sceGeBreak: can't break signal-pausing list");
}
else
return 0x80000020;
}
return 0x80000021;
}
if (currentList->state == PSP_GE_DL_STATE_QUEUED)
{
currentList->state = PSP_GE_DL_STATE_PAUSED;
return currentList->id;
}
// TODO Save BASE
// TODO Adjust pc to be just before SIGNAL/END
// TODO: Is this right?
if (currentList->signal == PSP_GE_SIGNAL_SYNC)
currentList->pc += 8;
currentList->interrupted = true;
currentList->state = PSP_GE_DL_STATE_PAUSED;
currentList->signal = PSP_GE_SIGNAL_HANDLER_SUSPEND;
isbreak = true;
return currentList->id;
}
bool GPUCommon::InterpretList(DisplayList &list) {
// Initialized to avoid a race condition with bShowDebugStats changing.
double start = 0.0;
if (g_Config.bShowDebugStats) {
time_update();
start = time_now_d();
}
easy_guard guard(listLock);
// TODO: This has to be right... but it freezes right now?
//if (list.state == PSP_GE_DL_STATE_PAUSED)
// return false;
currentList = &list;
// I don't know if this is the correct place to zero this, but something
// need to do it. See Sol Trigger title screen.
// TODO: Maybe this is per list? Should a stalled list remember the old value?
gstate_c.offsetAddr = 0;
if (!Memory::IsValidAddress(list.pc)) {
ERROR_LOG_REPORT(G3D, "DL PC = %08x WTF!!!!", list.pc);
return true;
}
#if defined(USING_QT_UI)
if (host->GpuStep()) {
host->SendGPUStart();
}
#endif
cycleLastPC = list.pc;
downcount = list.stall == 0 ? 0x0FFFFFFF : (list.stall - list.pc) / 4;
list.state = PSP_GE_DL_STATE_RUNNING;
list.interrupted = false;
gpuState = list.pc == list.stall ? GPUSTATE_STALL : GPUSTATE_RUNNING;
guard.unlock();
const bool dumpThisFrame = dumpThisFrame_;
// TODO: Add check for displaylist debugger.
const bool useFastRunLoop = !dumpThisFrame;
while (gpuState == GPUSTATE_RUNNING) {
{
easy_guard innerGuard(listLock);
if (list.pc == list.stall) {
gpuState = GPUSTATE_STALL;
downcount = 0;
}
}
if (useFastRunLoop) {
FastRunLoop(list);
} else {
SlowRunLoop(list);
}
{
easy_guard innerGuard(listLock);
downcount = list.stall == 0 ? 0x0FFFFFFF : (list.stall - list.pc) / 4;
if (gpuState == GPUSTATE_STALL && list.stall != list.pc) {
// Unstalled.
gpuState = GPUSTATE_RUNNING;
}
}
}
// We haven't run the op at list.pc, so it shouldn't count.
if (cycleLastPC != list.pc) {
UpdatePC(list.pc - 4, list.pc);
}
if (g_Config.bShowDebugStats) {
time_update();
gpuStats.msProcessingDisplayLists += time_now_d() - start;
}
return gpuState == GPUSTATE_DONE || gpuState == GPUSTATE_ERROR;
}
void GPUCommon::SlowRunLoop(DisplayList &list)
{
const bool dumpThisFrame = dumpThisFrame_;
while (downcount > 0)
{
u32 op = Memory::ReadUnchecked_U32(list.pc);
u32 cmd = op >> 24;
#if defined(USING_QT_UI)
if (host->GpuStep())
host->SendGPUWait(cmd, list.pc, &gstate);
#endif
u32 diff = op ^ gstate.cmdmem[cmd];
PreExecuteOp(op, diff);
if (dumpThisFrame) {
char temp[256];
u32 prev = Memory::ReadUnchecked_U32(list.pc - 4);
GeDisassembleOp(list.pc, op, prev, temp);
NOTICE_LOG(G3D, "%s", temp);
}
gstate.cmdmem[cmd] = op;
ExecuteOp(op, diff);
list.pc += 4;
--downcount;
}
}
// The newPC parameter is used for jumps, we don't count cycles between.
inline void GPUCommon::UpdatePC(u32 currentPC, u32 newPC) {
// Rough estimate, 2 CPU ticks (it's double the clock rate) per GPU instruction.
int executed = (currentPC - cycleLastPC) / 4;
cyclesExecuted += 2 * executed;
gpuStats.otherGPUCycles += 2 * executed;
cycleLastPC = newPC == 0 ? currentPC : newPC;
gpuStats.gpuCommandsAtCallLevel[std::min(currentList->stackptr, 3)] += executed;
// Exit the runloop and recalculate things. This isn't common.
downcount = 0;
}
void GPUCommon::ReapplyGfxState() {
if (IsOnSeparateCPUThread()) {
ScheduleEvent(GPU_EVENT_REAPPLY_GFX_STATE);
} else {
ReapplyGfxStateInternal();
}
}
void GPUCommon::ReapplyGfxStateInternal() {
// ShaderManager_DirtyShader();
// The commands are embedded in the command memory so we can just reexecute the words. Convenient.
// To be safe we pass 0xFFFFFFFF as the diff.
/*
ExecuteOp(gstate.cmdmem[GE_CMD_ALPHABLENDENABLE], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_ALPHATESTENABLE], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_BLENDMODE], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_ZTEST], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_ZTESTENABLE], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_CULL], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_CULLFACEENABLE], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_SCISSOR1], 0xFFFFFFFF);
ExecuteOp(gstate.cmdmem[GE_CMD_SCISSOR2], 0xFFFFFFFF);
*/
for (int i = GE_CMD_VERTEXTYPE; i < GE_CMD_BONEMATRIXNUMBER; i++) {
if (i != GE_CMD_ORIGIN) {
ExecuteOp(gstate.cmdmem[i], 0xFFFFFFFF);
}
}
// Can't write to bonematrixnumber here
for (int i = GE_CMD_MORPHWEIGHT0; i < GE_CMD_PATCHFACING; i++) {
ExecuteOp(gstate.cmdmem[i], 0xFFFFFFFF);
}
// There are a few here in the middle that we shouldn't execute...
for (int i = GE_CMD_VIEWPORTX1; i < GE_CMD_TRANSFERSTART; i++) {
ExecuteOp(gstate.cmdmem[i], 0xFFFFFFFF);
}
// TODO: there's more...
}
inline void GPUCommon::UpdateState(GPUState state) {
gpuState = state;
if (state != GPUSTATE_RUNNING)
downcount = 0;
}
void GPUCommon::ProcessEvent(GPUEvent ev) {
switch (ev.type) {
case GPU_EVENT_PROCESS_QUEUE:
ProcessDLQueueInternal();
break;
case GPU_EVENT_REAPPLY_GFX_STATE:
ReapplyGfxStateInternal();
break;
default:
ERROR_LOG_REPORT(G3D, "Unexpected GPU event type: %d", (int)ev);
}
}
int GPUCommon::GetNextListIndex() {
easy_guard guard(listLock);
auto iter = dlQueue.begin();
if (iter != dlQueue.end()) {
return *iter;
} else {
return -1;
}
}
bool GPUCommon::ProcessDLQueue() {
ScheduleEvent(GPU_EVENT_PROCESS_QUEUE);
return true;
}
void GPUCommon::ProcessDLQueueInternal() {
startingTicks = CoreTiming::GetTicks();
cyclesExecuted = 0;
UpdateTickEstimate(std::max(busyTicks, startingTicks + cyclesExecuted));
// Seems to be correct behaviour to process the list anyway?
if (startingTicks < busyTicks) {
DEBUG_LOG(G3D, "Can't execute a list yet, still busy for %lld ticks", busyTicks - startingTicks);
//return;
}
for (int listIndex = GetNextListIndex(); listIndex != -1; listIndex = GetNextListIndex()) {
DisplayList &l = dls[listIndex];
DEBUG_LOG(G3D, "Okay, starting DL execution at %08x - stall = %08x", l.pc, l.stall);
if (!InterpretList(l)) {
return;
} else {
easy_guard guard(listLock);
// At the end, we can remove it from the queue and continue.
dlQueue.erase(std::remove(dlQueue.begin(), dlQueue.end(), listIndex), dlQueue.end());
UpdateTickEstimate(std::max(busyTicks, startingTicks + cyclesExecuted));
}
}
easy_guard guard(listLock);
currentList = NULL;
drawCompleteTicks = startingTicks + cyclesExecuted;
busyTicks = std::max(busyTicks, drawCompleteTicks);
__GeTriggerSync(WAITTYPE_GEDRAWSYNC, 1, drawCompleteTicks);
// Since the event is in CoreTiming, we're in sync. Just set 0 now.
UpdateTickEstimate(0);
}
void GPUCommon::PreExecuteOp(u32 op, u32 diff) {
// Nothing to do
}
void GPUCommon::ExecuteOp(u32 op, u32 diff) {
u32 cmd = op >> 24;
u32 data = op & 0xFFFFFF;
// Handle control and drawing commands here directly. The others we delegate.
switch (cmd) {
case GE_CMD_NOP:
break;
case GE_CMD_OFFSETADDR:
gstate_c.offsetAddr = data << 8;
break;
case GE_CMD_ORIGIN:
{
easy_guard guard(listLock);
gstate_c.offsetAddr = currentList->pc;
}
break;
case GE_CMD_JUMP:
{
easy_guard guard(listLock);
u32 target = gstate_c.getRelativeAddress(data);
if (Memory::IsValidAddress(target)) {
UpdatePC(currentList->pc, target - 4);
currentList->pc = target - 4; // pc will be increased after we return, counteract that
} else {
ERROR_LOG_REPORT(G3D, "JUMP to illegal address %08x - ignoring! data=%06x", target, data);
}
}
break;
case GE_CMD_CALL:
{
easy_guard guard(listLock);
// Saint Seiya needs correct support for relative calls.
u32 retval = currentList->pc + 4;
u32 target = gstate_c.getRelativeAddress(data);
if (currentList->stackptr == ARRAY_SIZE(currentList->stack)) {
ERROR_LOG_REPORT(G3D, "CALL: Stack full!");
} else if (!Memory::IsValidAddress(target)) {
ERROR_LOG_REPORT(G3D, "CALL to illegal address %08x - ignoring! data=%06x", target, data);
} else {
auto &stackEntry = currentList->stack[currentList->stackptr++];
stackEntry.pc = retval;
stackEntry.offsetAddr = gstate_c.offsetAddr;
UpdatePC(currentList->pc, target - 4);
currentList->pc = target - 4; // pc will be increased after we return, counteract that
}
}
break;
case GE_CMD_RET:
{
easy_guard guard(listLock);
if (currentList->stackptr == 0) {
ERROR_LOG_REPORT(G3D, "RET: Stack empty!");
} else {
auto &stackEntry = currentList->stack[--currentList->stackptr];
gstate_c.offsetAddr = stackEntry.offsetAddr;
u32 target = (currentList->pc & 0xF0000000) | (stackEntry.pc & 0x0FFFFFFF);
UpdatePC(currentList->pc, target - 4);
currentList->pc = target - 4;
if (!Memory::IsValidAddress(currentList->pc)) {
ERROR_LOG_REPORT(G3D, "Invalid DL PC %08x on return", currentList->pc);
UpdateState(GPUSTATE_ERROR);
}
}
}
break;
case GE_CMD_SIGNAL:
case GE_CMD_FINISH:
// Processed in GE_END.
break;
case GE_CMD_END: {
easy_guard guard(listLock);
u32 prev = Memory::ReadUnchecked_U32(currentList->pc - 4);
UpdatePC(currentList->pc);
switch (prev >> 24) {
case GE_CMD_SIGNAL:
{
// TODO: see http://code.google.com/p/jpcsp/source/detail?r=2935#
SignalBehavior behaviour = static_cast<SignalBehavior>((prev >> 16) & 0xFF);
int signal = prev & 0xFFFF;
int enddata = data & 0xFFFF;
bool trigger = true;
currentList->subIntrToken = signal;
switch (behaviour) {
case PSP_GE_SIGNAL_HANDLER_SUSPEND:
if (sceKernelGetCompiledSdkVersion() <= 0x02000010)
currentList->state = PSP_GE_DL_STATE_PAUSED;
currentList->signal = behaviour;
DEBUG_LOG(G3D, "Signal with Wait UNIMPLEMENTED! signal/end: %04x %04x", signal, enddata);
break;
case PSP_GE_SIGNAL_HANDLER_CONTINUE:
currentList->signal = behaviour;
DEBUG_LOG(G3D, "Signal without wait. signal/end: %04x %04x", signal, enddata);
break;
case PSP_GE_SIGNAL_HANDLER_PAUSE:
currentList->state = PSP_GE_DL_STATE_PAUSED;
currentList->signal = behaviour;
ERROR_LOG_REPORT(G3D, "Signal with Pause UNIMPLEMENTED! signal/end: %04x %04x", signal, enddata);
break;
case PSP_GE_SIGNAL_SYNC:
currentList->signal = behaviour;
DEBUG_LOG(G3D, "Signal with Sync. signal/end: %04x %04x", signal, enddata);
break;
case PSP_GE_SIGNAL_JUMP:
{
trigger = false;
currentList->signal = behaviour;
// pc will be increased after we return, counteract that.
u32 target = ((signal << 16) | enddata) - 4;
if (!Memory::IsValidAddress(target)) {
ERROR_LOG_REPORT(G3D, "Signal with Jump: bad address. signal/end: %04x %04x", signal, enddata);
} else {
UpdatePC(currentList->pc, target);
currentList->pc = target;
DEBUG_LOG(G3D, "Signal with Jump. signal/end: %04x %04x", signal, enddata);
}
}
break;
case PSP_GE_SIGNAL_CALL:
{
trigger = false;
currentList->signal = behaviour;
// pc will be increased after we return, counteract that.
u32 target = ((signal << 16) | enddata) - 4;
if (currentList->stackptr == ARRAY_SIZE(currentList->stack)) {
ERROR_LOG_REPORT(G3D, "Signal with Call: stack full. signal/end: %04x %04x", signal, enddata);
} else if (!Memory::IsValidAddress(target)) {
ERROR_LOG_REPORT(G3D, "Signal with Call: bad address. signal/end: %04x %04x", signal, enddata);
} else {
// TODO: This might save/restore other state...
auto &stackEntry = currentList->stack[currentList->stackptr++];
stackEntry.pc = currentList->pc;
stackEntry.offsetAddr = gstate_c.offsetAddr;
UpdatePC(currentList->pc, target);
currentList->pc = target;
DEBUG_LOG(G3D, "Signal with Call. signal/end: %04x %04x", signal, enddata);
}
}
break;
case PSP_GE_SIGNAL_RET:
{
trigger = false;
currentList->signal = behaviour;
if (currentList->stackptr == 0) {
ERROR_LOG_REPORT(G3D, "Signal with Return: stack empty. signal/end: %04x %04x", signal, enddata);
} else {
// TODO: This might save/restore other state...
auto &stackEntry = currentList->stack[--currentList->stackptr];
gstate_c.offsetAddr = stackEntry.offsetAddr;
UpdatePC(currentList->pc, stackEntry.pc);
currentList->pc = stackEntry.pc;
DEBUG_LOG(G3D, "Signal with Return. signal/end: %04x %04x", signal, enddata);
}
}
break;
default:
ERROR_LOG_REPORT(G3D, "UNKNOWN Signal UNIMPLEMENTED %i ! signal/end: %04x %04x", behaviour, signal, enddata);
break;
}
// TODO: Technically, jump/call/ret should generate an interrupt, but before the pc change maybe?
if (currentList->interruptsEnabled && trigger) {
if (__GeTriggerInterrupt(currentList->id, currentList->pc, startingTicks + cyclesExecuted)) {
currentList->pendingInterrupt = true;
UpdateState(GPUSTATE_INTERRUPT);
}
}
}
break;
case GE_CMD_FINISH:
switch (currentList->signal) {
case PSP_GE_SIGNAL_HANDLER_PAUSE:
if (currentList->interruptsEnabled) {
if (__GeTriggerInterrupt(currentList->id, currentList->pc, startingTicks + cyclesExecuted)) {
currentList->pendingInterrupt = true;
UpdateState(GPUSTATE_INTERRUPT);
}
}
break;
case PSP_GE_SIGNAL_SYNC:
currentList->signal = PSP_GE_SIGNAL_NONE;
// TODO: Technically this should still cause an interrupt. Probably for memory sync.
break;
default:
currentList->subIntrToken = prev & 0xFFFF;
currentList->state = PSP_GE_DL_STATE_COMPLETED;
UpdateState(GPUSTATE_DONE);
if (currentList->interruptsEnabled && __GeTriggerInterrupt(currentList->id, currentList->pc, startingTicks + cyclesExecuted)) {
currentList->pendingInterrupt = true;
} else {
currentList->waitTicks = startingTicks + cyclesExecuted;
busyTicks = std::max(busyTicks, currentList->waitTicks);
__GeTriggerSync(WAITTYPE_GELISTSYNC, currentList->id, currentList->waitTicks);
}
break;
}
break;
default:
DEBUG_LOG(G3D,"Ah, not finished: %06x", prev & 0xFFFFFF);
break;
}
break;
}
default:
DEBUG_LOG(G3D,"DL Unknown: %08x @ %08x", op, currentList == NULL ? 0 : currentList->pc);
break;
}
}
void GPUCommon::DoState(PointerWrap &p) {
easy_guard guard(listLock);
p.Do<int>(dlQueue);
p.DoArray(dls, ARRAY_SIZE(dls));
int currentID = 0;
if (currentList != NULL) {
ptrdiff_t off = currentList - &dls[0];
currentID = (int) (off / sizeof(DisplayList));
}
p.Do(currentID);
if (currentID == 0) {
currentList = NULL;
} else {
currentList = &dls[currentID];
}
p.Do(interruptRunning);
p.Do(gpuState);
p.Do(isbreak);
p.Do(drawCompleteTicks);
p.Do(busyTicks);
p.DoMarker("GPUCommon");
}
void GPUCommon::InterruptStart(int listid) {
interruptRunning = true;
}
void GPUCommon::InterruptEnd(int listid) {
easy_guard guard(listLock);
interruptRunning = false;
isbreak = false;
DisplayList &dl = dls[listid];
dl.pendingInterrupt = false;
// TODO: Unless the signal handler could change it?
if (dl.state == PSP_GE_DL_STATE_COMPLETED || dl.state == PSP_GE_DL_STATE_NONE) {
dl.waitTicks = 0;
__GeTriggerWait(WAITTYPE_GELISTSYNC, listid);
}
if (dl.signal == PSP_GE_SIGNAL_HANDLER_PAUSE)
dl.signal = PSP_GE_SIGNAL_HANDLER_SUSPEND;
guard.unlock();
ProcessDLQueue();
}
// TODO: Maybe cleaner to keep this in GE and trigger the clear directly?
void GPUCommon::SyncEnd(WaitType waitType, int listid, bool wokeThreads) {
easy_guard guard(listLock);
if (waitType == WAITTYPE_GEDRAWSYNC && wokeThreads)
{
for (int i = 0; i < DisplayListMaxCount; ++i) {
if (dls[i].state == PSP_GE_DL_STATE_COMPLETED) {
dls[i].state = PSP_GE_DL_STATE_NONE;
}
}
}
}<|fim▁end|> | void GPUCommon::CheckDrawSync() {
easy_guard guard(listLock); |
<|file_name|>test_anonymous_epic.py<|end_file_name|><|fim▁begin|><|fim▁hole|>Anonymous Epic
Storyboard is defined within the comments of the program itself
"""
import unittest
from flask import url_for
from biblib.views.http_errors import NO_PERMISSION_ERROR
from biblib.tests.stubdata.stub_data import UserShop, LibraryShop
from biblib.tests.base import TestCaseDatabase, MockSolrBigqueryService, MockEndPoint
class TestAnonymousEpic(TestCaseDatabase):
"""
Base class used to test the Big Share Admin Epic
"""
def test_anonymous_epic(self):
"""
Carries out the epic 'Anonymous', where a user tries to access a
private library and also a public library. The user also (artificial)
tries to access any other endpoints that do not have any scopes set
:return: no return
"""
# Define two sets of stub data
# user: who makes a library (e.g., Dave the librarian)
# anon: someone using the BBB client
user_anonymous = UserShop()
user_dave = UserShop()
library_dave_private = LibraryShop(public=False)
library_dave_public = LibraryShop(public=True)
# Dave makes two libraries
# One private library
# One public library
url = url_for('userview')
response = self.client.post(
url,
data=library_dave_private.user_view_post_data_json,
headers=user_dave.headers
)
library_id_private = response.json['id']
self.assertEqual(response.status_code, 200, response)
response = self.client.post(
url,
data=library_dave_public.user_view_post_data_json,
headers=user_dave.headers
)
library_id_public = response.json['id']
self.assertEqual(response.status_code, 200, response)
# Anonymous user tries to access the private library. But cannot.
url = url_for('libraryview', library=library_id_private)
with MockSolrBigqueryService(number_of_bibcodes=0) as BQ, \
MockEndPoint([user_dave, user_anonymous]) as EP:
response = self.client.get(
url,
headers=user_anonymous.headers
)
self.assertEqual(response.status_code, NO_PERMISSION_ERROR['number'])
self.assertEqual(response.json['error'], NO_PERMISSION_ERROR['body'])
# Anonymous user tries to access the public library. And can.
url = url_for('libraryview', library=library_id_public)
with MockSolrBigqueryService(number_of_bibcodes=0) as BQ, \
MockEndPoint([user_dave, user_anonymous]) as EP:
response = self.client.get(
url,
headers=user_anonymous.headers
)
self.assertEqual(response.status_code, 200)
self.assertIn('documents', response.json)
def test_scopes(self):
"""
Separately test the number of scopes that are scopeless. This will only
fail during staging when the scopes are all set to be open. In the
production system, there is only once end point that will be scopeless.
"""
response = self.client.get('/resources')
end_points = []
for end_point in response.json.keys():
if len(response.json[end_point]['scopes']) == 0:
end_points.append(end_point)
self.assertEqual(1, len(end_points))
self.assertEqual('/libraries/<string:library>', end_points[0])
if __name__ == '__main__':
unittest.main(verbosity=2)<|fim▁end|> | """
Functional test
|
<|file_name|>RobotTemplate.java<|end_file_name|><|fim▁begin|>/*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.Servo;
import edu.wpi.first.wpilibj.SimpleRobot;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the SimpleRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class RobotTemplate extends SimpleRobot {
private Joystick joystick = new Joystick(1);
private Drivetrain drivetrain;
private BowlerArm arm;
Compressor compressor;
Pan pan;
//int port_1 = 7; //these ports were placeholders, no longer applicable
//int port_2 = 7;
public RobotTemplate() {
drivetrain = new Drivetrain();
arm = new BowlerArm();
pan = new Pan();
compressor = new Compressor(7, 7);//7 for the switch, 7 for the relay
}
/**
* This function is called once each time the robot enters autonomous mode.
*/
public void autonomous() {
drivetrain.set(1, 1);
sleep(5000);
drivetrain.set(0,0);
// arm.auto();
}
/**
* This function is called once each time the robot enters operator control.
*/
public void operatorControl() {
compressor.start();
arm.setSolenoid(-1);
while (isOperatorControl()) {
//drivetrain updates
double lstick = -joystick.getRawAxis(2);
double rstick = -joystick.getRawAxis(4);
drivetrain.set(Math.abs(lstick) * lstick, Math.abs(rstick) * rstick); //If I'm not mistaken, this is the most convenient way to square in Java?
//pan updates version 2 (Amita); this is basic and can be used for backup
if(joystick.getRawButton(10)){
pan.endGame();
}
else{
pan.resetServo();
}
//bowler arm updates
if (joystick.getRawButton(7)) {
arm.rampDown();
} else if (joystick.getRawButton(5)) {
arm.rampUp();
} else {
arm.setRamp(0);
}
arm.setSolenoid((int) joystick.getRawAxis(6));
}
}
/*
*changes the servo state based on the button being pressed.
*once it is pressed, it is set to the opposite of what is was at the start, ditto for release.
*/
/**
* This function is called once each time the robot enters test mode.
*/
public void test() {
<|fim▁hole|> public void updateArm(){
}
public void updatePan(){
}
public static void sleep(long ms){
long t=System.currentTimeMillis()+ms;
while(System.currentTimeMillis()<t){
//do nothing!
}
}
}<|fim▁end|> | }
public void updateDrivetrain(){
} |
<|file_name|>operation_progress.go<|end_file_name|><|fim▁begin|>// Copyright 2018 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main<|fim▁hole|>import (
"context"
"log"
"golang.org/x/oauth2/google"
compute "google.golang.org/api/compute/v1"
)
const (
projectID = "some-project-id"
zone = "some-zone"
operationID = "some-operation-id"
)
func operationProgressMain() {
ctx := context.Background()
client, err := google.DefaultClient(ctx, compute.CloudPlatformScope)
if err != nil {
log.Fatal(err)
}
svc, err := compute.New(client)
if err != nil {
log.Fatal(err)
}
for {
resp, err := svc.ZoneOperations.Get(projectID, zone, operationID).Do()
if err != nil {
log.Fatal(err)
}
// Note: the response Status may differ between APIs. The string values
// checked here may need to be changed depending on the API.
if resp.Status != "WORKING" && resp.Status != "QUEUED" {
break
}
}
log.Println("operation complete")
}<|fim▁end|> | |
<|file_name|>autoload.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
<|fim▁hole|>for app in settings.INSTALLED_APPS:
if '.' not in app:
continue
mod = import_module(app)
try:
import_module('%s.internal_urls' % app)
except:
if module_has_submodule(mod, 'internal_urls'):
raise<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Native thread-blocking I/O implementation
//!
//! This module contains the implementation of native thread-blocking
//! implementations of I/O on all platforms. This module is not intended to be
//! used directly, but rather the rust runtime will fall back to using it if
//! necessary.
//!
//! Rust code normally runs inside of green tasks with a local scheduler using
//! asynchronous I/O to cooperate among tasks. This model is not always
//! available, however, and that's where these native implementations come into
//! play. The only dependencies of these modules are the normal system libraries
//! that you would find on the respective platform.
use std::c_str::CString;
use std::io;
use std::io::IoError;
use std::io::net::ip::SocketAddr;
use std::io::process::ProcessConfig;<|fim▁hole|>use libc;
use std::os;
use std::rt::rtio;
use std::rt::rtio::{RtioTcpStream, RtioTcpListener, RtioUdpSocket,
RtioUnixListener, RtioPipe, RtioFileStream, RtioProcess,
RtioSignal, RtioTTY, CloseBehavior, RtioTimer};
use ai = std::io::net::addrinfo;
// Local re-exports
pub use self::file::FileDesc;
pub use self::process::Process;
// Native I/O implementations
pub mod addrinfo;
pub mod net;
pub mod process;
#[cfg(unix)]
#[path = "file_unix.rs"]
pub mod file;
#[cfg(windows)]
#[path = "file_win32.rs"]
pub mod file;
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "android")]
#[cfg(target_os = "linux")]
#[path = "timer_unix.rs"]
pub mod timer;
#[cfg(target_os = "win32")]
#[path = "timer_win32.rs"]
pub mod timer;
#[cfg(unix)]
#[path = "pipe_unix.rs"]
pub mod pipe;
#[cfg(windows)]
#[path = "pipe_win32.rs"]
pub mod pipe;
#[cfg(unix)] #[path = "c_unix.rs"] mod c;
#[cfg(windows)] #[path = "c_win32.rs"] mod c;
mod timer_helper;
pub type IoResult<T> = Result<T, IoError>;
fn unimpl() -> IoError {
IoError {
kind: io::IoUnavailable,
desc: "unimplemented I/O interface",
detail: None,
}
}
fn last_error() -> IoError {
IoError::last_error()
}
// unix has nonzero values as errors
fn mkerr_libc(ret: libc::c_int) -> IoResult<()> {
if ret != 0 {
Err(last_error())
} else {
Ok(())
}
}
// windows has zero values as errors
#[cfg(windows)]
fn mkerr_winbool(ret: libc::c_int) -> IoResult<()> {
if ret == 0 {
Err(last_error())
} else {
Ok(())
}
}
#[cfg(windows)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::WSAEINTR as int => {}
n => return n,
}
}
}
#[cfg(unix)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::EINTR as int => {}
n => return n,
}
}
}
fn keep_going(data: &[u8], f: |*u8, uint| -> i64) -> i64 {
let origamt = data.len();
let mut data = data.as_ptr();
let mut amt = origamt;
while amt > 0 {
let ret = retry(|| f(data, amt) as libc::c_int);
if ret == 0 {
break
} else if ret != -1 {
amt -= ret as uint;
data = unsafe { data.offset(ret as int) };
} else {
return ret as i64;
}
}
return (origamt - amt) as i64;
}
/// Implementation of rt::rtio's IoFactory trait to generate handles to the
/// native I/O functionality.
pub struct IoFactory {
cannot_construct_outside_of_this_module: ()
}
impl IoFactory {
pub fn new() -> IoFactory {
net::init();
IoFactory { cannot_construct_outside_of_this_module: () }
}
}
impl rtio::IoFactory for IoFactory {
// networking
fn tcp_connect(&mut self, addr: SocketAddr,
timeout: Option<u64>) -> IoResult<~RtioTcpStream:Send> {
net::TcpStream::connect(addr, timeout).map(|s| ~s as ~RtioTcpStream:Send)
}
fn tcp_bind(&mut self, addr: SocketAddr) -> IoResult<~RtioTcpListener:Send> {
net::TcpListener::bind(addr).map(|s| ~s as ~RtioTcpListener:Send)
}
fn udp_bind(&mut self, addr: SocketAddr) -> IoResult<~RtioUdpSocket:Send> {
net::UdpSocket::bind(addr).map(|u| ~u as ~RtioUdpSocket:Send)
}
fn unix_bind(&mut self, path: &CString) -> IoResult<~RtioUnixListener:Send> {
pipe::UnixListener::bind(path).map(|s| ~s as ~RtioUnixListener:Send)
}
fn unix_connect(&mut self, path: &CString) -> IoResult<~RtioPipe:Send> {
pipe::UnixStream::connect(path).map(|s| ~s as ~RtioPipe:Send)
}
fn get_host_addresses(&mut self, host: Option<&str>, servname: Option<&str>,
hint: Option<ai::Hint>) -> IoResult<~[ai::Info]> {
addrinfo::GetAddrInfoRequest::run(host, servname, hint)
}
// filesystem operations
fn fs_from_raw_fd(&mut self, fd: c_int,
close: CloseBehavior) -> ~RtioFileStream:Send {
let close = match close {
rtio::CloseSynchronously | rtio::CloseAsynchronously => true,
rtio::DontClose => false
};
~file::FileDesc::new(fd, close) as ~RtioFileStream:Send
}
fn fs_open(&mut self, path: &CString, fm: io::FileMode, fa: io::FileAccess)
-> IoResult<~RtioFileStream:Send> {
file::open(path, fm, fa).map(|fd| ~fd as ~RtioFileStream:Send)
}
fn fs_unlink(&mut self, path: &CString) -> IoResult<()> {
file::unlink(path)
}
fn fs_stat(&mut self, path: &CString) -> IoResult<io::FileStat> {
file::stat(path)
}
fn fs_mkdir(&mut self, path: &CString,
mode: io::FilePermission) -> IoResult<()> {
file::mkdir(path, mode)
}
fn fs_chmod(&mut self, path: &CString,
mode: io::FilePermission) -> IoResult<()> {
file::chmod(path, mode)
}
fn fs_rmdir(&mut self, path: &CString) -> IoResult<()> {
file::rmdir(path)
}
fn fs_rename(&mut self, path: &CString, to: &CString) -> IoResult<()> {
file::rename(path, to)
}
fn fs_readdir(&mut self, path: &CString, _flags: c_int) -> IoResult<Vec<Path>> {
file::readdir(path)
}
fn fs_lstat(&mut self, path: &CString) -> IoResult<io::FileStat> {
file::lstat(path)
}
fn fs_chown(&mut self, path: &CString, uid: int, gid: int) -> IoResult<()> {
file::chown(path, uid, gid)
}
fn fs_readlink(&mut self, path: &CString) -> IoResult<Path> {
file::readlink(path)
}
fn fs_symlink(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::symlink(src, dst)
}
fn fs_link(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::link(src, dst)
}
fn fs_utime(&mut self, src: &CString, atime: u64,
mtime: u64) -> IoResult<()> {
file::utime(src, atime, mtime)
}
// misc
fn timer_init(&mut self) -> IoResult<~RtioTimer:Send> {
timer::Timer::new().map(|t| ~t as ~RtioTimer:Send)
}
fn spawn(&mut self, config: ProcessConfig)
-> IoResult<(~RtioProcess:Send, ~[Option<~RtioPipe:Send>])> {
process::Process::spawn(config).map(|(p, io)| {
(~p as ~RtioProcess:Send,
io.move_iter().map(|p| p.map(|p| ~p as ~RtioPipe:Send)).collect())
})
}
fn kill(&mut self, pid: libc::pid_t, signum: int) -> IoResult<()> {
process::Process::kill(pid, signum)
}
fn pipe_open(&mut self, fd: c_int) -> IoResult<~RtioPipe:Send> {
Ok(~file::FileDesc::new(fd, true) as ~RtioPipe:Send)
}
fn tty_open(&mut self, fd: c_int, _readable: bool)
-> IoResult<~RtioTTY:Send>
{
if unsafe { libc::isatty(fd) } != 0 {
Ok(~file::FileDesc::new(fd, true) as ~RtioTTY:Send)
} else {
Err(IoError {
kind: io::MismatchedFileTypeForOperation,
desc: "file descriptor is not a TTY",
detail: None,
})
}
}
fn signal(&mut self, _signal: Signum, _channel: Sender<Signum>)
-> IoResult<~RtioSignal:Send> {
Err(unimpl())
}
}<|fim▁end|> | use std::io::signal::Signum;
use libc::c_int; |
<|file_name|>docscrape_sphinx.py<|end_file_name|><|fim▁begin|>from __future__ import division, absolute_import, print_function
import sys, re, inspect, textwrap, pydoc
import sphinx
import collections<|fim▁hole|>from .docscrape import NumpyDocString, FunctionDoc, ClassDoc
if sys.version_info[0] >= 3:
sixu = lambda s: s
else:
sixu = lambda s: unicode(s, 'unicode_escape')
class SphinxDocString(NumpyDocString):
def __init__(self, docstring, config={}):
NumpyDocString.__init__(self, docstring, config=config)
self.load_config(config)
def load_config(self, config):
self.use_plots = config.get('use_plots', False)
self.class_members_toctree = config.get('class_members_toctree', True)
# string conversion routines
def _str_header(self, name, symbol='`'):
return ['.. rubric:: ' + name, '']
def _str_field_list(self, name):
return [':' + name + ':']
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
return ['']
if self['Signature']:
return ['``%s``' % self['Signature']] + ['']
else:
return ['']
def _str_summary(self):
return self['Summary'] + ['']
def _str_extended_summary(self):
return self['Extended Summary'] + ['']
def _str_returns(self):
out = []
if self['Returns']:
out += self._str_field_list('Returns')
out += ['']
for param, param_type, desc in self['Returns']:
if param_type:
out += self._str_indent(['**%s** : %s' % (param.strip(),
param_type)])
else:
out += self._str_indent([param.strip()])
if desc:
out += ['']
out += self._str_indent(desc, 8)
out += ['']
return out
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param, param_type, desc in self[name]:
if param_type:
out += self._str_indent(['**%s** : %s' % (param.strip(),
param_type)])
else:
out += self._str_indent(['**%s**' % param.strip()])
if desc:
out += ['']
out += self._str_indent(desc, 8)
out += ['']
return out
@property
def _obj(self):
if hasattr(self, '_cls'):
return self._cls
elif hasattr(self, '_f'):
return self._f
return None
def _str_member_list(self, name):
"""
Generate a member listing, autosummary:: table where possible,
and a table where not.
"""
out = []
if self[name]:
out += ['.. rubric:: %s' % name, '']
prefix = getattr(self, '_name', '')
if prefix:
prefix = '~%s.' % prefix
autosum = []
others = []
for param, param_type, desc in self[name]:
param = param.strip()
# Check if the referenced member can have a docstring or not
param_obj = getattr(self._obj, param, None)
if not (callable(param_obj)
or isinstance(param_obj, property)
or inspect.isgetsetdescriptor(param_obj)):
param_obj = None
if param_obj and (pydoc.getdoc(param_obj) or not desc):
# Referenced object has a docstring
autosum += [" %s%s" % (prefix, param)]
else:
others.append((param, param_type, desc))
if autosum:
out += ['.. autosummary::']
if self.class_members_toctree:
out += [' :toctree:']
out += [''] + autosum
if others:
maxlen_0 = max(3, max([len(x[0]) for x in others]))
hdr = sixu("=")*maxlen_0 + sixu(" ") + sixu("=")*10
fmt = sixu('%%%ds %%s ') % (maxlen_0,)
out += ['', hdr]
for param, param_type, desc in others:
desc = sixu(" ").join(x.strip() for x in desc).strip()
if param_type:
desc = "(%s) %s" % (param_type, desc)
out += [fmt % (param.strip(), desc)]
out += [hdr]
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += ['']
content = textwrap.dedent("\n".join(self[name])).split("\n")
out += content
out += ['']
return out
def _str_see_also(self, func_role):
out = []
if self['See Also']:
see_also = super(SphinxDocString, self)._str_see_also(func_role)
out = ['.. seealso::', '']
out += self._str_indent(see_also[2:])
return out
def _str_warnings(self):
out = []
if self['Warnings']:
out = ['.. warning::', '']
out += self._str_indent(self['Warnings'])
return out
def _str_index(self):
idx = self['index']
out = []
if len(idx) == 0:
return out
out += ['.. index:: %s' % idx.get('default','')]
for section, references in idx.items():
if section == 'default':
continue
elif section == 'refguide':
out += [' single: %s' % (', '.join(references))]
else:
out += [' %s: %s' % (section, ','.join(references))]
return out
def _str_references(self):
out = []
if self['References']:
out += self._str_header('References')
if isinstance(self['References'], str):
self['References'] = [self['References']]
out.extend(self['References'])
out += ['']
# Latex collects all references to a separate bibliography,
# so we need to insert links to it
if sphinx.__version__ >= "0.6":
out += ['.. only:: latex','']
else:
out += ['.. latexonly::','']
items = []
for line in self['References']:
m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
if m:
items.append(m.group(1))
out += [' ' + ", ".join(["[%s]_" % item for item in items]), '']
return out
def _str_examples(self):
examples_str = "\n".join(self['Examples'])
if (self.use_plots and 'import matplotlib' in examples_str
and 'plot::' not in examples_str):
out = []
out += self._str_header('Examples')
out += ['.. plot::', '']
out += self._str_indent(self['Examples'])
out += ['']
return out
else:
return self._str_section('Examples')
def __str__(self, indent=0, func_role="obj"):
out = []
out += self._str_signature()
out += self._str_index() + ['']
out += self._str_summary()
out += self._str_extended_summary()
out += self._str_param_list('Parameters')
out += self._str_returns()
for param_list in ('Other Parameters', 'Raises', 'Warns'):
out += self._str_param_list(param_list)
out += self._str_warnings()
out += self._str_see_also(func_role)
out += self._str_section('Notes')
out += self._str_references()
out += self._str_examples()
for param_list in ('Attributes', 'Methods'):
out += self._str_member_list(param_list)
out = self._str_indent(out,indent)
return '\n'.join(out)
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
def __init__(self, obj, doc=None, config={}):
self.load_config(config)
FunctionDoc.__init__(self, obj, doc=doc, config=config)
class SphinxClassDoc(SphinxDocString, ClassDoc):
def __init__(self, obj, doc=None, func_doc=None, config={}):
self.load_config(config)
ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config)
class SphinxObjDoc(SphinxDocString):
def __init__(self, obj, doc=None, config={}):
self._f = obj
self.load_config(config)
SphinxDocString.__init__(self, doc, config=config)
def get_doc_object(obj, what=None, doc=None, config={}):
if what is None:
if inspect.isclass(obj):
what = 'class'
elif inspect.ismodule(obj):
what = 'module'
elif isinstance(obj, collections.Callable):
what = 'function'
else:
what = 'object'
if what == 'class':
return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
config=config)
elif what in ('function', 'method'):
return SphinxFunctionDoc(obj, doc=doc, config=config)
else:
if doc is None:
doc = pydoc.getdoc(obj)
return SphinxObjDoc(obj, doc, config=config)<|fim▁end|> | |
<|file_name|>TitelConverter.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.kore.runtime.jsf.converter;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.faces.convert.FacesConverter;
import org.kore.runtime.person.Titel;
/**
*
* @author Konrad Renner
*/
@FacesConverter(value = "CurrencyConverter")
public class TitelConverter implements Converter {
@Override
public Titel getAsObject(FacesContext fc, UIComponent uic, String string) {<|fim▁hole|> }
@Override
public String getAsString(FacesContext fc, UIComponent uic, Object o) {
if (o == null) {
return null;
}
if (o instanceof Titel) {
return ((Titel) o).getValue();
}
throw new IllegalArgumentException("Given object is not a org.kore.runtime.person.Titel");
}
}<|fim▁end|> | if (string == null || string.trim().length() == 0) {
return null;
}
return new Titel(string.trim()); |
<|file_name|>monitor.js<|end_file_name|><|fim▁begin|>// Run if you want to monitor unresolved promises (in properly working
// application there should be no promises that are never resolved)
'use strict';
var max = Math.max
, callable = require('es5-ext/lib/Object/valid-callable')
, isCallable = require('es5-ext/lib/Object/is-callable')
, toUint = require('es5-ext/lib/Number/to-uint')
, deferred = require('./deferred');
exports = module.exports = function (timeout, cb) {
if (timeout === false) {<|fim▁hole|> delete exports.timeout;
delete exports.callback;
return;
}
exports.timeout = timeout = max(toUint(timeout) || 5000, 50);
if (cb == null) {
if ((typeof console !== 'undefined') && console &&
isCallable(console.error)) {
cb = function (e) {
console.error(((e.stack && e.stack.toString()) ||
"Unresolved promise: no stack available"));
};
}
} else {
callable(cb);
}
exports.callback = cb;
deferred._monitor = function () {
var e = new Error("Unresolved promise");
return setTimeout(function () {
if (cb) cb(e);
}, timeout);
};
};<|fim▁end|> | // Cancel monitor
delete deferred._monitor; |
<|file_name|>File_posix.cpp<|end_file_name|><|fim▁begin|>#include "libplatform/impl.h"
namespace mp4v2 { namespace platform { namespace io {
///////////////////////////////////////////////////////////////////////////////
class StandardFileProvider : public FileProvider
{
public:
StandardFileProvider();
bool open( std::string name, Mode mode );
bool seek( Size pos );
bool read( void* buffer, Size size, Size& nin, Size maxChunkSize );
bool write( const void* buffer, Size size, Size& nout, Size maxChunkSize );
bool close();
private:
bool _seekg;
bool _seekp;
std::fstream _fstream;
};
///////////////////////////////////////////////////////////////////////////////
StandardFileProvider::StandardFileProvider()
: _seekg ( false )<|fim▁hole|>{
}
bool
StandardFileProvider::open( std::string name, Mode mode )
{
ios::openmode om = ios::binary;
switch( mode ) {
case MODE_UNDEFINED:
case MODE_READ:
default:
om |= ios::in;
_seekg = true;
_seekp = false;
break;
case MODE_MODIFY:
om |= ios::in | ios::out;
_seekg = true;
_seekp = true;
break;
case MODE_CREATE:
om |= ios::in | ios::out | ios::trunc;
_seekg = true;
_seekp = true;
break;
}
_fstream.open( name.c_str(), om );
return _fstream.fail();
}
bool
StandardFileProvider::seek( Size pos )
{
if( _seekg )
_fstream.seekg( pos, ios::beg );
if( _seekp )
_fstream.seekp( pos, ios::beg );
return _fstream.fail();
}
bool
StandardFileProvider::read( void* buffer, Size size, Size& nin, Size maxChunkSize )
{
_fstream.read( (char*)buffer, size );
if( _fstream.fail() )
return true;
nin = _fstream.gcount();
return false;
}
bool
StandardFileProvider::write( const void* buffer, Size size, Size& nout, Size maxChunkSize )
{
_fstream.write( (const char*)buffer, size );
if( _fstream.fail() )
return true;
nout = size;
return false;
}
bool
StandardFileProvider::close()
{
_fstream.close();
return _fstream.fail();
}
///////////////////////////////////////////////////////////////////////////////
FileProvider&
FileProvider::standard()
{
return *new StandardFileProvider();
}
///////////////////////////////////////////////////////////////////////////////
}}} // namespace mp4v2::platform::io<|fim▁end|> | , _seekp ( false ) |
<|file_name|>prefix.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 Michael Yang. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.<|fim▁hole|> (Complex<f32>, $f: ident) => (cblas_c::$f);
(Complex<f64>, $f: ident) => (cblas_z::$f);
(Complex32, $f: ident) => (cblas_c::$f);
(Complex64, $f: ident) => (cblas_z::$f);
);<|fim▁end|> |
macro_rules! prefix(
(f32, $f: ident) => (cblas_s::$f);
(f64, $f: ident) => (cblas_d::$f); |
<|file_name|>builder.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# LICENCE MIT
#
# DESCRIPTION Callgraph builder.
#
# AUTHOR Michal Bukovsky <[email protected]>
#
from operator import attrgetter
from inspect import signature
from callgraph.hooks import Hooks
from callgraph.utils import AuPair
from callgraph.symbols import Symbol, UnarySymbol
from callgraph.symbols import IterableConstantSymbol, MappingConstantSymbol
from callgraph.nodes import make_node
from callgraph.indent_printer import IndentPrinter, NonePrinter, dump_tree
# TODO(burlog): hooks as callbacks
# TODO(burlog): properties tests
# TODO(burlog): process signature? are defs invoked during import?
# TODO(burlog): tests for global variables
# TODO(burlog): __getattr__, __getattribute__ overrides will be problem
# TODO(burlog): make result of list(), tuple(), dict(), ... iterable
class CallGraphBuilder(object):
def __init__(self, global_variables={}, silent=False):
self.printer = NonePrinter() if silent else IndentPrinter()
self.global_symbols = self.make_kwargs_symbols(global_variables)
self.hooks = Hooks(self)
self.current_lineno = 0
self.tot = None
def print_banner(self, printer, node):
extra = "<" + node.qualname + "> " if node.qualname != node.name else ""
printer("@ Analyzing: {0} {1}at {2}:{3}"\
.format(node.ast.name, extra, node.filename, node.lineno))
def set_current_lineno(self, printer, expr_lineno):
lineno = self.tot.lineno + expr_lineno
if lineno == self.current_lineno: return
self.current_lineno = lineno
printer("+ line at {0}:{1}".format(self.tot.filename, lineno))
printer("+", self.tot.source_line(expr_lineno).strip())
def make_kwargs_symbols(self, kwargs):
return dict((k, UnarySymbol(self, k, v)) for k, v in kwargs.items())
def build(self, function, kwargs={}):
self.root = None
self.hooks.clear()
symbol = UnarySymbol(self, function.__name__, function)
return self.process(symbol, kwargs=self.make_kwargs_symbols(kwargs))
def process(self, symbol, parent=None, args=[], kwargs={}):
# attach new node to parent list
node = make_node(symbol)
with AuPair(self, node):
if parent:
where = parent.filename, self.current_lineno
if not parent.attach(node, where): return node
# builtins or c/c++ objects have no code
if node.is_opaque: return node
if not symbol.iscallable(): return node
# print nice banner
self.print_banner(self.printer, node)
# magic follows
with self.printer as printer:
self.inject_arguments(printer, node, args, kwargs)
self.process_function(printer, node, args, kwargs)
return node
def process_function(self, printer, node, args, kwargs):
for expr in node.ast.body:
for callee, args, kwargs in expr.evaluate(printer, node.symbol):
self.process(callee, node, args.copy(), kwargs.copy())
def inject_arguments(self, printer, node, args, kwargs):
sig = signature(node.symbol.value)
self.inject_self(printer, node, sig, args, kwargs)
bound = sig.bind_partial(*args, **self.polish_kwargs(sig, kwargs))
self.inject_defaults(printer, node, sig, bound)
for name, value in bound.arguments.items():
value_symbol = self.as_symbol(value)
printer("% Binding argument:", name + "=" + str(value_symbol))
node.symbol.set(name, value_symbol)
def polish_kwargs(self, sig, kwargs):
for param in sig.parameters.values():
if param.kind == param.VAR_KEYWORD:
return kwargs
return dict(self.iter_kwargs(sig, kwargs))
def iter_kwargs(self, sig, kwargs):
for param in sig.parameters.values():
if param.kind == param.POSITIONAL_OR_KEYWORD:
if param.name in kwargs:
yield param.name, kwargs[param.name]
def inject_self(self, printer, node, sig, args, kwargs):
if node.symbol.myself and sig.parameters:
# TODO(burlog): better bound method detection
if next(iter(sig.parameters.keys())) == "self":
args.insert(0, node.symbol.myself)
else:
# TODO(burlog): improve detection logic
kwargs["self"] = node.symbol.myself
def inject_defaults(self, printer, node, sig, bound):
for param in sig.parameters.values():
if param.name not in bound.arguments:
if param.default is not param.empty:
symbol = UnarySymbol(self, param.name, param.default)
bound.arguments[param.name] = symbol
def as_symbol(self, value):
if isinstance(value, Symbol):
return value
elif isinstance(value, (tuple, list)):
return IterableConstantSymbol(self, tuple, value)
elif isinstance(value, dict):
values = list(value.values())
keys = list(UnarySymbol(self, "str", k) for k in value.keys())
return MappingConstantSymbol(self, dict, keys, values)
raise RuntimeError("Can't convert value to symbol: " + str(value))
# dogfooding build function
if __name__ == "__main__":
builder = CallGraphBuilder()<|fim▁hole|> print(80 * "=")
dump_tree(root, lambda x: x.children)<|fim▁end|> | kwargs = {"self": CallGraphBuilder, "function": CallGraphBuilder.build}
root = builder.build(CallGraphBuilder.build, kwargs) |
<|file_name|>seed.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Noise-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// TODO: Use PrimInt + Signed instead of SignedInt + NumCast once num has
// PrimInt implementations
use num::{NumCast,Signed,PrimInt};
use rand::{Rand, Rng, SeedableRng, XorShiftRng};
use math;
const TABLE_SIZE: usize = 256;
/// A seed table, required by all noise functions.
///
/// Table creation is expensive, so in most circumstances you'll only want to
/// create one of these and reuse it everywhere.
#[allow(missing_copy_implementations)]
pub struct Seed {
values: [u8; TABLE_SIZE],
}
impl Rand for Seed {
/// Generates a random seed.
///
/// # Examples
///
/// ```rust
/// extern crate noise;
/// extern crate rand;
///
/// use noise::Seed;
///
/// # fn main() {
/// let seed = rand::random::<Seed>();<|fim▁hole|> /// extern crate noise;
/// extern crate rand;
///
/// use noise::Seed;
/// use rand::{SeedableRng, Rng, XorShiftRng};
///
/// # fn main() {
/// let mut rng: XorShiftRng = SeedableRng::from_seed([1, 2, 3, 4]);
/// let seed = rng.gen::<Seed>();
/// # }
/// ```
fn rand<R: Rng>(rng: &mut R) -> Seed {
let mut seq: Vec<u8> = (0 .. TABLE_SIZE).map(|x| x as u8).collect();
rng.shuffle(&mut *seq);
// It's unfortunate that this double-initializes the array, but Rust doesn't currently provide a
// clean way to do this in one pass. Hopefully won't matter, as Seed creation will usually be a
// one-time event.
let mut seed = Seed { values: [0; TABLE_SIZE] };
let seq_it = seq.iter();
for (x, y) in seed.values.iter_mut().zip(seq_it) { *x = *y }
seed
}
}
impl Seed {
/// Deterministically generates a new seed table based on a `u32` value.
///
/// Internally this uses a `XorShiftRng`, but we don't really need to worry
/// about cryptographic security when working with procedural noise.
///
/// # Example
///
/// ```rust
/// use noise::Seed;
///
/// let seed = Seed::new(12);
/// ```
pub fn new(seed: u32) -> Seed {
let mut rng: XorShiftRng = SeedableRng::from_seed([1, seed, seed, seed]);
rng.gen()
}
#[inline(always)]
pub fn get1<T: Signed + PrimInt + NumCast>(&self, x: T) -> usize {
let x: usize = math::cast(x & math::cast(0xff));
self.values[x] as usize
}
#[inline(always)]
pub fn get2<T: Signed + PrimInt + NumCast>(&self, pos: math::Point2<T>) -> usize {
let y: usize = math::cast(pos[1] & math::cast(0xff));
self.values[self.get1(pos[0]) ^ y] as usize
}
#[inline(always)]
pub fn get3<T: Signed + PrimInt + NumCast>(&self, pos: math::Point3<T>) -> usize {
let z: usize = math::cast(pos[2] & math::cast(0xff));
self.values[self.get2([pos[0], pos[1]]) ^ z] as usize
}
#[inline(always)]
pub fn get4<T: Signed + PrimInt + NumCast>(&self, pos: math::Point4<T>) -> usize {
let w: usize = math::cast(pos[3] & math::cast(0xff));
self.values[self.get3([pos[0], pos[1], pos[2]]) ^ w] as usize
}
}
#[cfg(test)]
mod tests {
use rand::random;
use perlin::perlin3;
use super::Seed;
#[test]
fn test_random_seed() {
let _ = perlin3::<f32>(&random(), &[1.0, 2.0, 3.0]);
}
#[test]
fn test_negative_params() {
let _ = perlin3::<f32>(&Seed::new(0), &[-1.0, 2.0, 3.0]);
}
}<|fim▁end|> | /// # }
/// ```
///
/// ```rust |
<|file_name|>script_tests.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2015 The C-Bit Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "data/script_invalid.json.h"
#include "data/script_valid.json.h"
#include "core_io.h"
#include "key.h"
#include "keystore.h"
#include "script/script.h"
#include "script/script_error.h"
#include "script/sign.h"
#include "util.h"
#include "utilstrencodings.h"
#include "test/test_bitcoin.h"
#if defined(HAVE_CONSENSUS_LIB)
#include "script/bitcoinconsensus.h"
#endif
#include <fstream>
#include <stdint.h>
#include <string>
#include <vector>
#include <boost/foreach.hpp>
#include <boost/test/unit_test.hpp>
#include <univalue.h>
using namespace std;
// Uncomment if you want to output updated JSON tests.
// #define UPDATE_JSON_TESTS
static const unsigned int flags = SCRIPT_VERIFY_P2SH | SCRIPT_VERIFY_STRICTENC;
unsigned int ParseScriptFlags(string strFlags);
string FormatScriptFlags(unsigned int flags);
UniValue
read_json(const std::string& jsondata)
{
UniValue v;
if (!v.read(jsondata) || !v.isArray())
{
BOOST_ERROR("Parse error.");
return UniValue(UniValue::VARR);
}
return v.get_array();
}
BOOST_FIXTURE_TEST_SUITE(script_tests, BasicTestingSetup)
CMutableTransaction BuildCreditingTransaction(const CScript& scriptPubKey)
{
CMutableTransaction txCredit;
txCredit.nVersion = 1;
txCredit.nLockTime = 0;
txCredit.vin.resize(1);
txCredit.vout.resize(1);
txCredit.vin[0].prevout.SetNull();
txCredit.vin[0].scriptSig = CScript() << CScriptNum(0) << CScriptNum(0);
txCredit.vin[0].nSequence = std::numeric_limits<unsigned int>::max();
txCredit.vout[0].scriptPubKey = scriptPubKey;
txCredit.vout[0].nValue = 0;
return txCredit;
}
CMutableTransaction BuildSpendingTransaction(const CScript& scriptSig, const CMutableTransaction& txCredit)
{
CMutableTransaction txSpend;
txSpend.nVersion = 1;
txSpend.nLockTime = 0;
txSpend.vin.resize(1);
txSpend.vout.resize(1);
txSpend.vin[0].prevout.hash = txCredit.GetHash();
txSpend.vin[0].prevout.n = 0;
txSpend.vin[0].scriptSig = scriptSig;
txSpend.vin[0].nSequence = std::numeric_limits<unsigned int>::max();
txSpend.vout[0].scriptPubKey = CScript();
txSpend.vout[0].nValue = 0;
return txSpend;
}
void DoTest(const CScript& scriptPubKey, const CScript& scriptSig, int flags, bool expect, const std::string& message)
{
ScriptError err;
CMutableTransaction tx = BuildSpendingTransaction(scriptSig, BuildCreditingTransaction(scriptPubKey));
CMutableTransaction tx2 = tx;
BOOST_CHECK_MESSAGE(VerifyScript(scriptSig, scriptPubKey, flags, MutableTransactionSignatureChecker(&tx, 0), &err) == expect, message);
BOOST_CHECK_MESSAGE(expect == (err == SCRIPT_ERR_OK), std::string(ScriptErrorString(err)) + ": " + message);
#if defined(HAVE_CONSENSUS_LIB)
CDataStream stream(SER_NETWORK, PROTOCOL_VERSION);
stream << tx2;
BOOST_CHECK_MESSAGE(bitcoinconsensus_verify_script(begin_ptr(scriptPubKey), scriptPubKey.size(), (const unsigned char*)&stream[0], stream.size(), 0, flags, NULL) == expect,message);
#endif
}
void static NegateSignatureS(std::vector<unsigned char>& vchSig) {
// Parse the signature.
std::vector<unsigned char> r, s;
r = std::vector<unsigned char>(vchSig.begin() + 4, vchSig.begin() + 4 + vchSig[3]);
s = std::vector<unsigned char>(vchSig.begin() + 6 + vchSig[3], vchSig.begin() + 6 + vchSig[3] + vchSig[5 + vchSig[3]]);
// Really ugly to implement mod-n negation here, but it would be feature creep to expose such functionality from libsecp256k1.
static const unsigned char order[33] = {
0x00,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE,
0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B,
0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x41
};
while (s.size() < 33) {
s.insert(s.begin(), 0x00);
}
int carry = 0;
for (int p = 32; p >= 1; p--) {
int n = (int)order[p] - s[p] - carry;
s[p] = (n + 256) & 0xFF;
carry = (n < 0);
}
assert(carry == 0);
if (s.size() > 1 && s[0] == 0 && s[1] < 0x80) {
s.erase(s.begin());
}
// Reconstruct the signature.
vchSig.clear();
vchSig.push_back(0x30);
vchSig.push_back(4 + r.size() + s.size());
vchSig.push_back(0x02);
vchSig.push_back(r.size());
vchSig.insert(vchSig.end(), r.begin(), r.end());
vchSig.push_back(0x02);
vchSig.push_back(s.size());
vchSig.insert(vchSig.end(), s.begin(), s.end());
}
namespace
{
const unsigned char vchKey0[32] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1};
const unsigned char vchKey1[32] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0};
const unsigned char vchKey2[32] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0};
struct KeyData
{
CKey key0, key0C, key1, key1C, key2, key2C;
CPubKey pubkey0, pubkey0C, pubkey0H;
CPubKey pubkey1, pubkey1C;
CPubKey pubkey2, pubkey2C;
KeyData()
{
key0.Set(vchKey0, vchKey0 + 32, false);
key0C.Set(vchKey0, vchKey0 + 32, true);
pubkey0 = key0.GetPubKey();
pubkey0H = key0.GetPubKey();
pubkey0C = key0C.GetPubKey();
*const_cast<unsigned char*>(&pubkey0H[0]) = 0x06 | (pubkey0H[64] & 1);
key1.Set(vchKey1, vchKey1 + 32, false);
key1C.Set(vchKey1, vchKey1 + 32, true);
pubkey1 = key1.GetPubKey();
pubkey1C = key1C.GetPubKey();
key2.Set(vchKey2, vchKey2 + 32, false);
key2C.Set(vchKey2, vchKey2 + 32, true);
pubkey2 = key2.GetPubKey();
pubkey2C = key2C.GetPubKey();
}
};
class TestBuilder
{
private:
CScript scriptPubKey;
CTransaction creditTx;
CMutableTransaction spendTx;
bool havePush;
std::vector<unsigned char> push;
std::string comment;
int flags;
void DoPush()
{
if (havePush) {
spendTx.vin[0].scriptSig << push;
havePush = false;
}
}
void DoPush(const std::vector<unsigned char>& data)
{
DoPush();
push = data;
havePush = true;
}
public:
TestBuilder(const CScript& redeemScript, const std::string& comment_, int flags_, bool P2SH = false) : scriptPubKey(redeemScript), havePush(false), comment(comment_), flags(flags_)
{
if (P2SH) {
creditTx = BuildCreditingTransaction(CScript() << OP_HASH160 << ToByteVector(CScriptID(redeemScript)) << OP_EQUAL);
} else {
creditTx = BuildCreditingTransaction(redeemScript);
}
spendTx = BuildSpendingTransaction(CScript(), creditTx);
}
TestBuilder& Add(const CScript& script)
{
DoPush();
spendTx.vin[0].scriptSig += script;
return *this;
}
TestBuilder& Num(int num)
{
DoPush();
spendTx.vin[0].scriptSig << num;
return *this;
}
TestBuilder& Push(const std::string& hex)
{
DoPush(ParseHex(hex));
return *this;
}
TestBuilder& PushSig(const CKey& key, int nHashType = SIGHASH_ALL, unsigned int lenR = 32, unsigned int lenS = 32)
{
uint256 hash = SignatureHash(scriptPubKey, spendTx, 0, nHashType);
std::vector<unsigned char> vchSig, r, s;
uint32_t iter = 0;
do {
key.Sign(hash, vchSig, iter++);
if ((lenS == 33) != (vchSig[5 + vchSig[3]] == 33)) {
NegateSignatureS(vchSig);
}
r = std::vector<unsigned char>(vchSig.begin() + 4, vchSig.begin() + 4 + vchSig[3]);
s = std::vector<unsigned char>(vchSig.begin() + 6 + vchSig[3], vchSig.begin() + 6 + vchSig[3] + vchSig[5 + vchSig[3]]);
} while (lenR != r.size() || lenS != s.size());
vchSig.push_back(static_cast<unsigned char>(nHashType));
DoPush(vchSig);
return *this;
}
TestBuilder& Push(const CPubKey& pubkey)
{
DoPush(std::vector<unsigned char>(pubkey.begin(), pubkey.end()));
return *this;
}
TestBuilder& PushRedeem()
{
DoPush(std::vector<unsigned char>(scriptPubKey.begin(), scriptPubKey.end()));
return *this;
}
TestBuilder& EditPush(unsigned int pos, const std::string& hexin, const std::string& hexout)
{
assert(havePush);
std::vector<unsigned char> datain = ParseHex(hexin);
std::vector<unsigned char> dataout = ParseHex(hexout);
assert(pos + datain.size() <= push.size());
BOOST_CHECK_MESSAGE(std::vector<unsigned char>(push.begin() + pos, push.begin() + pos + datain.size()) == datain, comment);
push.erase(push.begin() + pos, push.begin() + pos + datain.size());
push.insert(push.begin() + pos, dataout.begin(), dataout.end());
return *this;
}
TestBuilder& DamagePush(unsigned int pos)
{
assert(havePush);
assert(pos < push.size());
push[pos] ^= 1;
return *this;
}
TestBuilder& Test(bool expect)
{
TestBuilder copy = *this; // Make a copy so we can rollback the push.
DoPush();
DoTest(creditTx.vout[0].scriptPubKey, spendTx.vin[0].scriptSig, flags, expect, comment);
*this = copy;
return *this;
}
UniValue GetJSON()
{
DoPush();
UniValue array(UniValue::VARR);
array.push_back(FormatScript(spendTx.vin[0].scriptSig));
array.push_back(FormatScript(creditTx.vout[0].scriptPubKey));
array.push_back(FormatScriptFlags(flags));
array.push_back(comment);
return array;
}
std::string GetComment()
{
return comment;
}
const CScript& GetScriptPubKey()
{
return creditTx.vout[0].scriptPubKey;
}
};
}
BOOST_AUTO_TEST_CASE(script_build)
{
const KeyData keys;
std::vector<TestBuilder> good;
std::vector<TestBuilder> bad;
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
"P2PK", 0
).PushSig(keys.key0));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
"P2PK, bad sig", 0
).PushSig(keys.key0).DamagePush(10));
good.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1C.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
"P2PKH", 0
).PushSig(keys.key1).Push(keys.pubkey1C));
bad.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey2C.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
"P2PKH, bad pubkey", 0
).PushSig(keys.key2).Push(keys.pubkey2C).DamagePush(5));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
"P2PK anyonecanpay", 0
).PushSig(keys.key1, SIGHASH_ALL | SIGHASH_ANYONECANPAY));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
"P2PK anyonecanpay marked with normal hashtype", 0
).PushSig(keys.key1, SIGHASH_ALL | SIGHASH_ANYONECANPAY).EditPush(70, "81", "01"));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0C) << OP_CHECKSIG,
"P2SH(P2PK)", SCRIPT_VERIFY_P2SH, true
).PushSig(keys.key0).PushRedeem());
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0C) << OP_CHECKSIG,
"P2SH(P2PK), bad redeemscript", SCRIPT_VERIFY_P2SH, true
).PushSig(keys.key0).PushRedeem().DamagePush(10));
good.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
"P2SH(P2PKH), bad sig but no VERIFY_P2SH", 0, true
).PushSig(keys.key0).DamagePush(10).PushRedeem());
bad.push_back(TestBuilder(CScript() << OP_DUP << OP_HASH160 << ToByteVector(keys.pubkey1.GetID()) << OP_EQUALVERIFY << OP_CHECKSIG,
"P2SH(P2PKH), bad sig", SCRIPT_VERIFY_P2SH, true
).PushSig(keys.key0).DamagePush(10).PushRedeem());
good.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
"3-of-3", 0
).Num(0).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
bad.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
"3-of-3, 2 sigs", 0
).Num(0).PushSig(keys.key0).PushSig(keys.key1).Num(0));
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
"P2SH(2-of-3)", SCRIPT_VERIFY_P2SH, true
).Num(0).PushSig(keys.key1).PushSig(keys.key2).PushRedeem());
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
"P2SH(2-of-3), 1 sig", SCRIPT_VERIFY_P2SH, true
).Num(0).PushSig(keys.key1).Num(0).PushRedeem());
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"P2PK with too much R padding but no DERSIG", 0
).PushSig(keys.key1, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"P2PK with too much R padding", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key1, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"P2PK with too much S padding but no DERSIG", 0
).PushSig(keys.key1, SIGHASH_ALL).EditPush(1, "44", "45").EditPush(37, "20", "2100"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"P2PK with too much S padding", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key1, SIGHASH_ALL).EditPush(1, "44", "45").EditPush(37, "20", "2100"));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"P2PK with too little R padding but no DERSIG", 0
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"P2PK with too little R padding", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with bad sig with too much R padding but no DERSIG", 0
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").DamagePush(10));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with bad sig with too much R padding", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000").DamagePush(10));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with too much R padding but no DERSIG", 0
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with too much R padding", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key2, SIGHASH_ALL, 31, 32).EditPush(1, "43021F", "44022000"));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"BIP66 example 1, without DERSIG", 0
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"BIP66 example 1, with DERSIG", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
"BIP66 example 2, without DERSIG", 0
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
"BIP66 example 2, with DERSIG", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"BIP66 example 3, without DERSIG", 0
).Num(0));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"BIP66 example 3, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
"BIP66 example 4, without DERSIG", 0
).Num(0));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
"BIP66 example 4, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"BIP66 example 5, without DERSIG", 0
).Num(1));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG,
"BIP66 example 5, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(1));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
"BIP66 example 6, without DERSIG", 0
).Num(1));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1C) << OP_CHECKSIG << OP_NOT,
"BIP66 example 6, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(1));
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
"BIP66 example 7, without DERSIG", 0
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
"BIP66 example 7, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
"BIP66 example 8, without DERSIG", 0
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
"BIP66 example 8, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").PushSig(keys.key2));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
"BIP66 example 9, without DERSIG", 0
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
"BIP66 example 9, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
"BIP66 example 10, without DERSIG", 0
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
"BIP66 example 10, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0).Num(0).PushSig(keys.key2, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220"));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
"BIP66 example 11, without DERSIG", 0
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG,
"BIP66 example 11, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
"BIP66 example 12, without DERSIG", 0
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_2 << OP_CHECKMULTISIG << OP_NOT,
"BIP66 example 12, with DERSIG", SCRIPT_VERIFY_DERSIG
).Num(0).PushSig(keys.key1, SIGHASH_ALL, 33, 32).EditPush(1, "45022100", "440220").Num(0));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
"P2PK with multi-byte hashtype, without DERSIG", 0
).PushSig(keys.key2, SIGHASH_ALL).EditPush(70, "01", "0101"));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
"P2PK with multi-byte hashtype, with DERSIG", SCRIPT_VERIFY_DERSIG
).PushSig(keys.key2, SIGHASH_ALL).EditPush(70, "01", "0101"));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
"P2PK with high S but no LOW_S", 0
).PushSig(keys.key2, SIGHASH_ALL, 32, 33));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
"P2PK with high S", SCRIPT_VERIFY_LOW_S
).PushSig(keys.key2, SIGHASH_ALL, 32, 33));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG,
"P2PK with hybrid pubkey but no STRICTENC", 0
).PushSig(keys.key0, SIGHASH_ALL));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG,
"P2PK with hybrid pubkey", SCRIPT_VERIFY_STRICTENC
).PushSig(keys.key0, SIGHASH_ALL));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with hybrid pubkey but no STRICTENC", 0
).PushSig(keys.key0, SIGHASH_ALL));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with hybrid pubkey", SCRIPT_VERIFY_STRICTENC
).PushSig(keys.key0, SIGHASH_ALL));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with invalid hybrid pubkey but no STRICTENC", 0
).PushSig(keys.key0, SIGHASH_ALL).DamagePush(10));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0H) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with invalid hybrid pubkey", SCRIPT_VERIFY_STRICTENC
).PushSig(keys.key0, SIGHASH_ALL).DamagePush(10));
good.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey0H) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
"1-of-2 with the second 1 hybrid pubkey and no STRICTENC", 0
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
good.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey0H) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
"1-of-2 with the second 1 hybrid pubkey", SCRIPT_VERIFY_STRICTENC
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
bad.push_back(TestBuilder(CScript() << OP_1 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey0H) << OP_2 << OP_CHECKMULTISIG,
"1-of-2 with the first 1 hybrid pubkey", SCRIPT_VERIFY_STRICTENC
).Num(0).PushSig(keys.key1, SIGHASH_ALL));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
"P2PK with undefined hashtype but no STRICTENC", 0
).PushSig(keys.key1, 5));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG,
"P2PK with undefined hashtype", SCRIPT_VERIFY_STRICTENC
).PushSig(keys.key1, 5));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with invalid sig and undefined hashtype but no STRICTENC", 0
).PushSig(keys.key1, 5).DamagePush(10));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey1) << OP_CHECKSIG << OP_NOT,
"P2PK NOT with invalid sig and undefined hashtype", SCRIPT_VERIFY_STRICTENC
).PushSig(keys.key1, 5).DamagePush(10));
good.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
"3-of-3 with nonzero dummy but no NULLDUMMY", 0
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
bad.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG,
"3-of-3 with nonzero dummy", SCRIPT_VERIFY_NULLDUMMY
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2));
good.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG << OP_NOT,
"3-of-3 NOT with invalid sig and nonzero dummy but no NULLDUMMY", 0
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2).DamagePush(10));
bad.push_back(TestBuilder(CScript() << OP_3 << ToByteVector(keys.pubkey0C) << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey2C) << OP_3 << OP_CHECKMULTISIG << OP_NOT,
"3-of-3 NOT with invalid sig with nonzero dummy", SCRIPT_VERIFY_NULLDUMMY
).Num(1).PushSig(keys.key0).PushSig(keys.key1).PushSig(keys.key2).DamagePush(10));
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
"2-of-2 with two identical keys and sigs pushed using OP_DUP but no SIGPUSHONLY", 0
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP));
bad.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
"2-of-2 with two identical keys and sigs pushed using OP_DUP", SCRIPT_VERIFY_SIGPUSHONLY
).Num(0).PushSig(keys.key1).Add(CScript() << OP_DUP));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
"P2SH(P2PK) with non-push scriptSig but no SIGPUSHONLY", 0
).PushSig(keys.key2).PushRedeem());
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey2C) << OP_CHECKSIG,
"P2SH(P2PK) with non-push scriptSig", SCRIPT_VERIFY_SIGPUSHONLY
).PushSig(keys.key2).PushRedeem());
good.push_back(TestBuilder(CScript() << OP_2 << ToByteVector(keys.pubkey1C) << ToByteVector(keys.pubkey1C) << OP_2 << OP_CHECKMULTISIG,
"2-of-2 with two identical keys and sigs pushed", SCRIPT_VERIFY_SIGPUSHONLY
).Num(0).PushSig(keys.key1).PushSig(keys.key1));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
"P2PK with unnecessary input but no CLEANSTACK", SCRIPT_VERIFY_P2SH
).Num(11).PushSig(keys.key0));
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
"P2PK with unnecessary input", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH
).Num(11).PushSig(keys.key0));
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
"P2SH with unnecessary input but no CLEANSTACK", SCRIPT_VERIFY_P2SH, true
).Num(11).PushSig(keys.key0).PushRedeem());
bad.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
"P2SH with unnecessary input", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH, true
).Num(11).PushSig(keys.key0).PushRedeem());
good.push_back(TestBuilder(CScript() << ToByteVector(keys.pubkey0) << OP_CHECKSIG,
"P2SH with CLEANSTACK", SCRIPT_VERIFY_CLEANSTACK | SCRIPT_VERIFY_P2SH, true
).PushSig(keys.key0).PushRedeem());
std::set<std::string> tests_good;
std::set<std::string> tests_bad;
{
UniValue json_good = read_json(std::string(json_tests::script_valid, json_tests::script_valid + sizeof(json_tests::script_valid)));
UniValue json_bad = read_json(std::string(json_tests::script_invalid, json_tests::script_invalid + sizeof(json_tests::script_invalid)));
for (unsigned int idx = 0; idx < json_good.size(); idx++) {
const UniValue& tv = json_good[idx];
tests_good.insert(tv.get_array().write());
}
for (unsigned int idx = 0; idx < json_bad.size(); idx++) {
const UniValue& tv = json_bad[idx];
tests_bad.insert(tv.get_array().write());
}
}
std::string strGood;
std::string strBad;
BOOST_FOREACH(TestBuilder& test, good) {
test.Test(true);
std::string str = test.GetJSON().write();
#ifndef UPDATE_JSON_TESTS
if (tests_good.count(str) == 0) {
BOOST_CHECK_MESSAGE(false, "Missing auto script_valid test: " + test.GetComment());
}
#endif
strGood += str + ",\n";
}
BOOST_FOREACH(TestBuilder& test, bad) {
test.Test(false);
std::string str = test.GetJSON().write();
#ifndef UPDATE_JSON_TESTS
if (tests_bad.count(str) == 0) {
BOOST_CHECK_MESSAGE(false, "Missing auto script_invalid test: " + test.GetComment());
}
#endif
strBad += str + ",\n";
}
#ifdef UPDATE_JSON_TESTS
FILE* valid = fopen("script_valid.json.gen", "w");
fputs(strGood.c_str(), valid);
fclose(valid);
FILE* invalid = fopen("script_invalid.json.gen", "w");
fputs(strBad.c_str(), invalid);
fclose(invalid);
#endif
}
BOOST_AUTO_TEST_CASE(script_valid)
{
// Read tests from test/data/script_valid.json
// Format is an array of arrays
// Inner arrays are [ "scriptSig", "scriptPubKey", "flags" ]
// ... where scriptSig and scriptPubKey are stringified
// scripts.
UniValue tests = read_json(std::string(json_tests::script_valid, json_tests::script_valid + sizeof(json_tests::script_valid)));
for (unsigned int idx = 0; idx < tests.size(); idx++) {
UniValue test = tests[idx];
string strTest = test.write();
if (test.size() < 3) // Allow size > 3; extra stuff ignored (useful for comments)
{
if (test.size() != 1) {
BOOST_ERROR("Bad test: " << strTest);
}
continue;
}
string scriptSigString = test[0].get_str();
CScript scriptSig = ParseScript(scriptSigString);
string scriptPubKeyString = test[1].get_str();
CScript scriptPubKey = ParseScript(scriptPubKeyString);
unsigned int scriptflags = ParseScriptFlags(test[2].get_str());
DoTest(scriptPubKey, scriptSig, scriptflags, true, strTest);
}
}
BOOST_AUTO_TEST_CASE(script_invalid)
{
// Scripts that should evaluate as invalid
UniValue tests = read_json(std::string(json_tests::script_invalid, json_tests::script_invalid + sizeof(json_tests::script_invalid)));
for (unsigned int idx = 0; idx < tests.size(); idx++) {
UniValue test = tests[idx];
string strTest = test.write();
if (test.size() < 3) // Allow size > 2; extra stuff ignored (useful for comments)
{
if (test.size() != 1) {
BOOST_ERROR("Bad test: " << strTest);
}
continue;
}
string scriptSigString = test[0].get_str();
CScript scriptSig = ParseScript(scriptSigString);
string scriptPubKeyString = test[1].get_str();
CScript scriptPubKey = ParseScript(scriptPubKeyString);
unsigned int scriptflags = ParseScriptFlags(test[2].get_str());
DoTest(scriptPubKey, scriptSig, scriptflags, false, strTest);
}
}
BOOST_AUTO_TEST_CASE(script_PushData)
{
// Check that PUSHDATA1, PUSHDATA2, and PUSHDATA4 create the same value on
// the stack as the 1-75 opcodes do.
static const unsigned char direct[] = { 1, 0x5a };
static const unsigned char pushdata1[] = { OP_PUSHDATA1, 1, 0x5a };
static const unsigned char pushdata2[] = { OP_PUSHDATA2, 1, 0, 0x5a };
static const unsigned char pushdata4[] = { OP_PUSHDATA4, 1, 0, 0, 0, 0x5a };
ScriptError err;
vector<vector<unsigned char> > directStack;
BOOST_CHECK(EvalScript(directStack, CScript(&direct[0], &direct[sizeof(direct)]), SCRIPT_VERIFY_P2SH, BaseSignatureChecker(), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
vector<vector<unsigned char> > pushdata1Stack;
BOOST_CHECK(EvalScript(pushdata1Stack, CScript(&pushdata1[0], &pushdata1[sizeof(pushdata1)]), SCRIPT_VERIFY_P2SH, BaseSignatureChecker(), &err));
BOOST_CHECK(pushdata1Stack == directStack);
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
vector<vector<unsigned char> > pushdata2Stack;
BOOST_CHECK(EvalScript(pushdata2Stack, CScript(&pushdata2[0], &pushdata2[sizeof(pushdata2)]), SCRIPT_VERIFY_P2SH, BaseSignatureChecker(), &err));
BOOST_CHECK(pushdata2Stack == directStack);
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
vector<vector<unsigned char> > pushdata4Stack;
BOOST_CHECK(EvalScript(pushdata4Stack, CScript(&pushdata4[0], &pushdata4[sizeof(pushdata4)]), SCRIPT_VERIFY_P2SH, BaseSignatureChecker(), &err));
BOOST_CHECK(pushdata4Stack == directStack);
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
}
CScript
sign_multisig(CScript scriptPubKey, std::vector<CKey> keys, CTransaction transaction)
{
uint256 hash = SignatureHash(scriptPubKey, transaction, 0, SIGHASH_ALL);
CScript result;
//
// NOTE: CHECKMULTISIG has an unfortunate bug; it requires
// one extra item on the stack, before the signatures.
// Putting OP_0 on the stack is the workaround;
// fixing the bug would mean splitting the block chain (old
// clients would not accept new CHECKMULTISIG transactions,
// and vice-versa)
//
result << OP_0;
BOOST_FOREACH(const CKey &key, keys)
{
vector<unsigned char> vchSig;
BOOST_CHECK(key.Sign(hash, vchSig));
vchSig.push_back((unsigned char)SIGHASH_ALL);
result << vchSig;
}
return result;
}
CScript
sign_multisig(CScript scriptPubKey, const CKey &key, CTransaction transaction)
{
std::vector<CKey> keys;
keys.push_back(key);
return sign_multisig(scriptPubKey, keys, transaction);
}
BOOST_AUTO_TEST_CASE(script_CHECKMULTISIG12)
{
ScriptError err;
CKey key1, key2, key3;
key1.MakeNewKey(true);
key2.MakeNewKey(false);
key3.MakeNewKey(true);
CScript scriptPubKey12;
scriptPubKey12 << OP_1 << ToByteVector(key1.GetPubKey()) << ToByteVector(key2.GetPubKey()) << OP_2 << OP_CHECKMULTISIG;
CMutableTransaction txFrom12 = BuildCreditingTransaction(scriptPubKey12);
CMutableTransaction txTo12 = BuildSpendingTransaction(CScript(), txFrom12);
CScript goodsig1 = sign_multisig(scriptPubKey12, key1, txTo12);
BOOST_CHECK(VerifyScript(goodsig1, scriptPubKey12, flags, MutableTransactionSignatureChecker(&txTo12, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
txTo12.vout[0].nValue = 2;
BOOST_CHECK(!VerifyScript(goodsig1, scriptPubKey12, flags, MutableTransactionSignatureChecker(&txTo12, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_EVAL_FALSE, ScriptErrorString(err));
CScript goodsig2 = sign_multisig(scriptPubKey12, key2, txTo12);
BOOST_CHECK(VerifyScript(goodsig2, scriptPubKey12, flags, MutableTransactionSignatureChecker(&txTo12, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
CScript badsig1 = sign_multisig(scriptPubKey12, key3, txTo12);
BOOST_CHECK(!VerifyScript(badsig1, scriptPubKey12, flags, MutableTransactionSignatureChecker(&txTo12, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_EVAL_FALSE, ScriptErrorString(err));
}
BOOST_AUTO_TEST_CASE(script_CHECKMULTISIG23)
{
ScriptError err;
CKey key1, key2, key3, key4;
key1.MakeNewKey(true);
key2.MakeNewKey(false);
key3.MakeNewKey(true);
key4.MakeNewKey(false);
CScript scriptPubKey23;
scriptPubKey23 << OP_2 << ToByteVector(key1.GetPubKey()) << ToByteVector(key2.GetPubKey()) << ToByteVector(key3.GetPubKey()) << OP_3 << OP_CHECKMULTISIG;
CMutableTransaction txFrom23 = BuildCreditingTransaction(scriptPubKey23);
CMutableTransaction txTo23 = BuildSpendingTransaction(CScript(), txFrom23);
std::vector<CKey> keys;
keys.push_back(key1); keys.push_back(key2);
CScript goodsig1 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(VerifyScript(goodsig1, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
keys.clear();
keys.push_back(key1); keys.push_back(key3);
CScript goodsig2 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(VerifyScript(goodsig2, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
keys.clear();
keys.push_back(key2); keys.push_back(key3);
CScript goodsig3 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(VerifyScript(goodsig3, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
keys.clear();
keys.push_back(key2); keys.push_back(key2); // Can't re-use sig
CScript badsig1 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(!VerifyScript(badsig1, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_EVAL_FALSE, ScriptErrorString(err));
keys.clear();
keys.push_back(key2); keys.push_back(key1); // sigs must be in correct order
CScript badsig2 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(!VerifyScript(badsig2, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_EVAL_FALSE, ScriptErrorString(err));
keys.clear();
keys.push_back(key3); keys.push_back(key2); // sigs must be in correct order
CScript badsig3 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(!VerifyScript(badsig3, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_EVAL_FALSE, ScriptErrorString(err));
keys.clear();
keys.push_back(key4); keys.push_back(key2); // sigs must match pubkeys
CScript badsig4 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(!VerifyScript(badsig4, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_EVAL_FALSE, ScriptErrorString(err));
keys.clear();
keys.push_back(key1); keys.push_back(key4); // sigs must match pubkeys
CScript badsig5 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(!VerifyScript(badsig5, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_EVAL_FALSE, ScriptErrorString(err));
keys.clear(); // Must have signatures
CScript badsig6 = sign_multisig(scriptPubKey23, keys, txTo23);
BOOST_CHECK(!VerifyScript(badsig6, scriptPubKey23, flags, MutableTransactionSignatureChecker(&txTo23, 0), &err));
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_INVALID_STACK_OPERATION, ScriptErrorString(err));
}
BOOST_AUTO_TEST_CASE(script_combineSigs)
{
// Test the CombineSignatures function
CBasicKeyStore keystore;
vector<CKey> keys;
vector<CPubKey> pubkeys;
for (int i = 0; i < 3; i++)
{
CKey key;
key.MakeNewKey(i%2 == 1);
keys.push_back(key);
pubkeys.push_back(key.GetPubKey());
keystore.AddKey(key);
}
CMutableTransaction txFrom = BuildCreditingTransaction(GetScriptForDestination(keys[0].GetPubKey().GetID()));
CMutableTransaction txTo = BuildSpendingTransaction(CScript(), txFrom);
CScript& scriptPubKey = txFrom.vout[0].scriptPubKey;
CScript& scriptSig = txTo.vin[0].scriptSig;
CScript empty;
CScript combined = CombineSignatures(scriptPubKey, txTo, 0, empty, empty);
BOOST_CHECK(combined.empty());
// Single signature case:
SignSignature(keystore, txFrom, txTo, 0); // changes scriptSig
combined = CombineSignatures(scriptPubKey, txTo, 0, scriptSig, empty);
BOOST_CHECK(combined == scriptSig);
combined = CombineSignatures(scriptPubKey, txTo, 0, empty, scriptSig);
BOOST_CHECK(combined == scriptSig);
CScript scriptSigCopy = scriptSig;
// Signing again will give a different, valid signature:
SignSignature(keystore, txFrom, txTo, 0);
combined = CombineSignatures(scriptPubKey, txTo, 0, scriptSigCopy, scriptSig);
BOOST_CHECK(combined == scriptSigCopy || combined == scriptSig);
// P2SH, single-signature case:
CScript pkSingle; pkSingle << ToByteVector(keys[0].GetPubKey()) << OP_CHECKSIG;
keystore.AddCScript(pkSingle);
scriptPubKey = GetScriptForDestination(CScriptID(pkSingle));
SignSignature(keystore, txFrom, txTo, 0);
combined = CombineSignatures(scriptPubKey, txTo, 0, scriptSig, empty);
BOOST_CHECK(combined == scriptSig);
combined = CombineSignatures(scriptPubKey, txTo, 0, empty, scriptSig);
BOOST_CHECK(combined == scriptSig);
scriptSigCopy = scriptSig;
SignSignature(keystore, txFrom, txTo, 0);
combined = CombineSignatures(scriptPubKey, txTo, 0, scriptSigCopy, scriptSig);
BOOST_CHECK(combined == scriptSigCopy || combined == scriptSig);
// dummy scriptSigCopy with placeholder, should always choose non-placeholder:
scriptSigCopy = CScript() << OP_0 << vector<unsigned char>(pkSingle.begin(), pkSingle.end());
combined = CombineSignatures(scriptPubKey, txTo, 0, scriptSigCopy, scriptSig);
BOOST_CHECK(combined == scriptSig);
combined = CombineSignatures(scriptPubKey, txTo, 0, scriptSig, scriptSigCopy);
BOOST_CHECK(combined == scriptSig);
// Hardest case: Multisig 2-of-3
scriptPubKey = GetScriptForMultisig(2, pubkeys);
keystore.AddCScript(scriptPubKey);
SignSignature(keystore, txFrom, txTo, 0);
combined = CombineSignatures(scriptPubKey, txTo, 0, scriptSig, empty);
BOOST_CHECK(combined == scriptSig);
combined = CombineSignatures(scriptPubKey, txTo, 0, empty, scriptSig);
BOOST_CHECK(combined == scriptSig);
// A couple of partially-signed versions:
vector<unsigned char> sig1;
uint256 hash1 = SignatureHash(scriptPubKey, txTo, 0, SIGHASH_ALL);
BOOST_CHECK(keys[0].Sign(hash1, sig1));
sig1.push_back(SIGHASH_ALL);
vector<unsigned char> sig2;
uint256 hash2 = SignatureHash(scriptPubKey, txTo, 0, SIGHASH_NONE);
BOOST_CHECK(keys[1].Sign(hash2, sig2));
sig2.push_back(SIGHASH_NONE);
vector<unsigned char> sig3;
uint256 hash3 = SignatureHash(scriptPubKey, txTo, 0, SIGHASH_SINGLE);
BOOST_CHECK(keys[2].Sign(hash3, sig3));
sig3.push_back(SIGHASH_SINGLE);
// Not fussy about order (or even existence) of placeholders or signatures:
CScript partial1a = CScript() << OP_0 << sig1 << OP_0;
CScript partial1b = CScript() << OP_0 << OP_0 << sig1;
CScript partial2a = CScript() << OP_0 << sig2;
CScript partial2b = CScript() << sig2 << OP_0;
CScript partial3a = CScript() << sig3;
CScript partial3b = CScript() << OP_0 << OP_0 << sig3;
CScript partial3c = CScript() << OP_0 << sig3 << OP_0;
CScript complete12 = CScript() << OP_0 << sig1 << sig2;
CScript complete13 = CScript() << OP_0 << sig1 << sig3;
CScript complete23 = CScript() << OP_0 << sig2 << sig3;
combined = CombineSignatures(scriptPubKey, txTo, 0, partial1a, partial1b);
BOOST_CHECK(combined == partial1a);
combined = CombineSignatures(scriptPubKey, txTo, 0, partial1a, partial2a);
BOOST_CHECK(combined == complete12);
combined = CombineSignatures(scriptPubKey, txTo, 0, partial2a, partial1a);
BOOST_CHECK(combined == complete12);
combined = CombineSignatures(scriptPubKey, txTo, 0, partial1b, partial2b);
BOOST_CHECK(combined == complete12);
combined = CombineSignatures(scriptPubKey, txTo, 0, partial3b, partial1b);
BOOST_CHECK(combined == complete13);
combined = CombineSignatures(scriptPubKey, txTo, 0, partial2a, partial3a);
BOOST_CHECK(combined == complete23);
combined = CombineSignatures(scriptPubKey, txTo, 0, partial3b, partial2b);
BOOST_CHECK(combined == complete23);
combined = CombineSignatures(scriptPubKey, txTo, 0, partial3b, partial3a);
BOOST_CHECK(combined == partial3c);
}
BOOST_AUTO_TEST_CASE(script_standard_push)
{<|fim▁hole|> for (int i=0; i<67000; i++) {
CScript script;
script << i;
BOOST_CHECK_MESSAGE(script.IsPushOnly(), "Number " << i << " is not pure push.");
BOOST_CHECK_MESSAGE(VerifyScript(script, CScript() << OP_1, SCRIPT_VERIFY_MINIMALDATA, BaseSignatureChecker(), &err), "Number " << i << " push is not minimal data.");
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
}
for (unsigned int i=0; i<=MAX_SCRIPT_ELEMENT_SIZE; i++) {
std::vector<unsigned char> data(i, '\111');
CScript script;
script << data;
BOOST_CHECK_MESSAGE(script.IsPushOnly(), "Length " << i << " is not pure push.");
BOOST_CHECK_MESSAGE(VerifyScript(script, CScript() << OP_1, SCRIPT_VERIFY_MINIMALDATA, BaseSignatureChecker(), &err), "Length " << i << " push is not minimal data.");
BOOST_CHECK_MESSAGE(err == SCRIPT_ERR_OK, ScriptErrorString(err));
}
}
BOOST_AUTO_TEST_CASE(script_IsPushOnly_on_invalid_scripts)
{
// IsPushOnly returns false when given a script containing only pushes that
// are invalid due to truncation. IsPushOnly() is consensus critical
// because P2SH evaluation uses it, although this specific behavior should
// not be consensus critical as the P2SH evaluation would fail first due to
// the invalid push. Still, it doesn't hurt to test it explicitly.
static const unsigned char direct[] = { 1 };
BOOST_CHECK(!CScript(direct, direct+sizeof(direct)).IsPushOnly());
}
BOOST_AUTO_TEST_CASE(script_GetScriptAsm)
{
BOOST_CHECK_EQUAL("OP_CHECKLOCKTIMEVERIFY", ScriptToAsmStr(CScript() << OP_NOP2, true));
BOOST_CHECK_EQUAL("OP_CHECKLOCKTIMEVERIFY", ScriptToAsmStr(CScript() << OP_CHECKLOCKTIMEVERIFY, true));
BOOST_CHECK_EQUAL("OP_CHECKLOCKTIMEVERIFY", ScriptToAsmStr(CScript() << OP_NOP2));
BOOST_CHECK_EQUAL("OP_CHECKLOCKTIMEVERIFY", ScriptToAsmStr(CScript() << OP_CHECKLOCKTIMEVERIFY));
string derSig("304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c5090");
string pubKey("03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2");
vector<unsigned char> vchPubKey = ToByteVector(ParseHex(pubKey));
BOOST_CHECK_EQUAL(derSig + "00 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "00")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "80 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "80")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "[ALL] " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "01")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "[NONE] " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "02")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "[SINGLE] " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "03")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "[ALL|ANYONECANPAY] " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "81")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "[NONE|ANYONECANPAY] " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "82")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "[SINGLE|ANYONECANPAY] " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "83")) << vchPubKey, true));
BOOST_CHECK_EQUAL(derSig + "00 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "00")) << vchPubKey));
BOOST_CHECK_EQUAL(derSig + "80 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "80")) << vchPubKey));
BOOST_CHECK_EQUAL(derSig + "01 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "01")) << vchPubKey));
BOOST_CHECK_EQUAL(derSig + "02 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "02")) << vchPubKey));
BOOST_CHECK_EQUAL(derSig + "03 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "03")) << vchPubKey));
BOOST_CHECK_EQUAL(derSig + "81 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "81")) << vchPubKey));
BOOST_CHECK_EQUAL(derSig + "82 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "82")) << vchPubKey));
BOOST_CHECK_EQUAL(derSig + "83 " + pubKey, ScriptToAsmStr(CScript() << ToByteVector(ParseHex(derSig + "83")) << vchPubKey));
}
BOOST_AUTO_TEST_SUITE_END()<|fim▁end|> | ScriptError err; |
<|file_name|>GSheet2Python.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python<|fim▁hole|>
"""
Takes Google's json encoded spreadsheet and prints a python dictionary keyed by
the values in the first column of the SS. ©2017 J. J. Crump, GNU general public
license
"""
import urllib2
from pprint import pprint
import re
import json
# This is the url of a sample google spreadsheet that I've published to the web. The url returns a prettyprinted json string:
ssURL = "https://spreadsheets.google.com/feeds/list/1OPNQC3xBp3iQTpjVfd6cpvvA0BpHWhb3QiNOvGFZ9z8/od6/public/basic?prettyprint=true&alt=json"
response = urllib2.urlopen(ssURL)
jsonIn = response.read()
pyDict = json.loads(jsonIn)
entryList = pyDict['feed']['entry']
fields = ["name", "city", "state", "zip"]
SSdict = {}
def parsestring(rowstring, fields):
"""yields tuples of (fieldname, fieldvalue)"""
i = iter(fields[1:])
field = i.next()
start = end = 0
try:
while True:
lastfield = field
field = i.next()
if rowstring.find(field) == -1:
field = lastfield
continue
end = rowstring.find(field)
yield lastfield, re.sub('^.*?:', '', rowstring[start:end].strip().strip(',')).strip()
start = end
except StopIteration:
start = rowstring.find(field)
yield lastfield, re.sub('^.*?:', '', rowstring[start:].strip().strip(',')).strip()
for e in entryList:
entrydict = dict([x for x in parsestring(e['content']['$t'], fields)])
entrykey = e['title']['$t']
SSdict[entrykey] = entrydict
#print stringIn
pprint(SSdict)<|fim▁end|> | # -*- coding: utf-8 -*- |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# ObservationTools documentation build configuration file, created by
# sphinx-quickstart on Sun Apr 30 14:32:48 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'ObservationTools'
copyright = '2017, IA'
author = 'IA'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ObservationToolsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {<|fim▁hole|> # The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ObservationTools.tex', 'ObservationTools Documentation',
'IA', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'observationtools', 'ObservationTools Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ObservationTools', 'ObservationTools Documentation',
author, 'ObservationTools', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}<|fim▁end|> | |
<|file_name|>vimenv.py<|end_file_name|><|fim▁begin|>"""
vimenv.py
Vim-specific environment helpers. This module uses git and pathogen to manage
vim plugins.
"""
from collections import namedtuple
from os import path, makedirs, walk, chdir, getcwd
from urllib import urlretrieve
from subprocess import check_call
VimPlugin = namedtuple('VimPlugin', ['find_file', 'friendly', 'clone_url'])
plugins = [
VimPlugin('NERD_tree.vim', 'NERDTree', 'https://github.com/scrooloose/nerdtree.git'),
VimPlugin('unite.vim', 'Unite', 'https://github.com/Shougo/unite.vim.git'),
VimPlugin('airline.vim', 'Airline', 'https://github.com/bling/vim-airline'),
VimPlugin('fugitive.vim', 'Fugitive', 'git://github.com/tpope/vim-fugitive.git'),
VimPlugin('vimproc.vim', 'vimproc', 'https://github.com/Shougo/vimproc.vim.git'),
VimPlugin('molokai.vim', 'Molokai', 'https://github.com/tomasr/molokai.git'),
]
_dotvim = path.expanduser('~/.vim')
_autoload = path.join(_dotvim, 'autoload')
_bundle = path.join(_dotvim, 'bundle')
def ensure_pathogen():
if path.isfile(path.join(_dotvim, 'autoload/pathogen.vim')):
return
print 'Pathogen not installed, getting it.'
if not path.exists(_autoload):
print 'making autoload dir'
makedirs(_autoload)
if not path.exists(_bundle):
print 'making bundle dir'
makedirs(_bundle)
print 'downloading pathogen'
urlretrieve('https://tpo.pe/pathogen.vim',
path.join(_autoload, 'pathogen.vim'))
def install_plugins():
ensure_pathogen()
def find_vim_file(dv):
for root, dirs, files in walk(_dotvim):
for file in files:
if file == vp.find_file:
return True
return False
origwd = getcwd()<|fim▁hole|> chdir(_bundle)
ex = None
for vp in plugins:
if find_vim_file(vp.find_file):
print 'found ' + vp.friendly
continue
print 'cloning ' + vp.friendly
clonecmd = ['git', 'clone', vp.clone_url]
try:
check_call(clonecmd)
except Exception as e:
ex = e
break
chdir(origwd)
if ex is not None:
raise ex<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.md')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_jinja2',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
]
setup(name='guestbook',
version='0.1',
description='guestbook',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python :: 3",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,<|fim▁hole|> test_suite='guestbook',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = guestbook:main
[console_scripts]
initialize_guestbook_db = guestbook.scripts.initializedb:main
""",
)<|fim▁end|> | zip_safe=False, |
<|file_name|>htmltitleelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::HTMLTitleElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLTitleElementBinding::HTMLTitleElementMethods;
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{ChildrenMutation, Node};
use crate::dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLTitleElement {
htmlelement: HTMLElement,
}
impl HTMLTitleElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLTitleElement {
HTMLTitleElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLTitleElement> {
Node::reflect_node(
Box::new(HTMLTitleElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLTitleElementBinding::Wrap,
)
}
}
impl HTMLTitleElementMethods for HTMLTitleElement {
// https://html.spec.whatwg.org/multipage/#dom-title-text
fn Text(&self) -> DOMString {
self.upcast::<Node>().child_text_content()
}
// https://html.spec.whatwg.org/multipage/#dom-title-text
fn SetText(&self, value: DOMString) {
self.upcast::<Node>().SetTextContent(Some(value))
}
}
<|fim▁hole|> Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
let node = self.upcast::<Node>();
if node.is_in_doc() {
node.owner_doc().title_changed();
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
let node = self.upcast::<Node>();
if tree_in_doc {
node.owner_doc().title_changed();
}
}
}<|fim▁end|> | impl VirtualMethods for HTMLTitleElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> { |
<|file_name|>test_deletechannel.py<|end_file_name|><|fim▁begin|>from django.core.management import call_command
from django.test import TestCase
from mock import call
from mock import patch
from kolibri.core.content import models as content
class DeleteChannelTestCase(TestCase):
"""
Testcase for delete channel management command
"""
fixtures = ["content_test.json"]
the_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
def delete_channel(self):
call_command("deletechannel", self.the_channel_id)
def test_channelmetadata_delete_remove_metadata_object(self):
self.delete_channel()<|fim▁hole|> self.assertEquals(0, content.ContentNode.objects.count())
def test_channelmetadata_delete_leave_unrelated_contentnodes(self):
c2c1 = content.ContentNode.objects.get(title="c2c1")
new_id = c2c1.id[:-1] + "1"
content.ContentNode.objects.create(
id=new_id,
content_id=c2c1.content_id,
kind=c2c1.kind,
channel_id=c2c1.channel_id,
available=True,
title=c2c1.title,
)
self.delete_channel()
self.assertEquals(1, content.ContentNode.objects.count())
def test_channelmetadata_delete_remove_file_objects(self):
self.delete_channel()
self.assertEquals(0, content.File.objects.count())
@patch("kolibri.core.content.models.paths.get_content_storage_file_path")
@patch("kolibri.core.content.models.os.remove")
def test_channelmetadata_delete_files(self, os_remove_mock, content_file_path):
path = "testing"
content_file_path.return_value = path
num_files = content.LocalFile.objects.filter(available=True).count()
self.delete_channel()
os_remove_mock.assert_has_calls([call(path)] * num_files)<|fim▁end|> | self.assertEquals(0, content.ChannelMetadata.objects.count())
def test_channelmetadata_delete_remove_contentnodes(self):
self.delete_channel() |
<|file_name|>associated-types-bound-failure.rs<|end_file_name|><|fim▁begin|>// run-rustfix
// Test equality constraints on associated types in a where clause.
#![allow(dead_code)]
pub trait ToInt {
fn to_int(&self) -> isize;
}
pub trait GetToInt
{
type R;<|fim▁hole|>
fn get(&self) -> <Self as GetToInt>::R;
}
fn foo<G>(g: G) -> isize
where G : GetToInt
{
ToInt::to_int(&g.get()) //~ ERROR E0277
}
fn bar<G : GetToInt>(g: G) -> isize
where G::R : ToInt
{
ToInt::to_int(&g.get()) // OK
}
pub fn main() {
}<|fim▁end|> | |
<|file_name|>test_raid.py<|end_file_name|><|fim▁begin|><|fim▁hole|># pdev cbd
# pdev asd cbd
# pdev asd sd cbd
from hba_util import HBA
from raid_util import Raid_Util
from subdev import *
def test_create_remove_raid(raid_util, bdevs):
sub_dev_list = []
for bdev in bdevs:
path = '/dev/' + bdev
sub_dev_list.append(path)
raid_util.set_sub_dev_list(sub_dev_list)
raid_util.zero_raid_sub_dev()
raid_util.create_raid()
raid_util.show_raid_info()
raid_util.wait_sync()
raid_util.fail_one()
raid_util.add_one()
raid_util.wait_recovery_time()
raid_util.remove_raid()
def get_raid_util():
#raid_util = Raid_Util('/root/src/mdadm_ext/', '/root/src/md_ext/')
raid_util = Raid_Util('/sbin/', None)
raid_util.set_raid_txn(False)
raid_util.init_raid()
raid_util.set_raid_level(6)
raid_util.set_raid_name('raid6')
raid_util.set_cmd_args('-e1.0')
raid_util.set_raid_sub_dev_size_KB(4 * 1024 * 1024)
return raid_util
def test_pdev_raid():
hba = HBA('mv64xx')
hba.get_bdev()
raid_util = get_raid_util()
for i in range(4, 16):
bdevs = hba.get_bdev_balanced(i)
if len(bdevs):
print bdevs
test_create_remove_raid(raid_util, bdevs)
raid_util.exit_raid()
#test_pdev_raid()
def test_pdev_cbd_raid():
# hba = HBA('mv64xx')
hba = HBA('mptspi')
hba.get_bdev()
raid_util = get_raid_util()
init_cbd()
for i in range(4, 16):
bdevs = hba.get_bdev_balanced(i)
if len(bdevs):
print bdevs
cbds = create_multi_cbd(bdevs)
print '%d %d: %s' % (len(cbds), i, cbds)
if len(cbds) <= i:
remove_multi_cbd(cbds)
break
test_create_remove_raid(raid_util, cbds)
remove_multi_cbd(cbds)
exit_cbd()
raid_util.exit_raid()
test_pdev_cbd_raid()<|fim▁end|> | #!/usr/bin/python
# pdev |
<|file_name|>azure.go<|end_file_name|><|fim▁begin|>package boshinit
import (
"fmt"
"github.com/enaml-ops/enaml"
"github.com/enaml-ops/enaml/cloudproperties/azure"
"github.com/enaml-ops/omg-cli/plugins/products/bosh-init/enaml-gen/azure_cpi"
)
const (
azureCPIJobName = "cpi"
azureCPIReleaseName = "bosh-azure-cpi"
)
type AzureInitConfig struct {
AzureInstanceSize string
AzureVnet string
AzureSubnet string
AzureSubscriptionID string
AzureTenantID string
AzureClientID string
AzureClientSecret string
AzureResourceGroup string
AzureStorageAccount string
AzureDefaultSecurityGroup string
AzureSSHPubKey string
AzureSSHUser string
AzureEnvironment string
AzurePrivateKeyPath string
}
func GetAzureDefaults() *BoshBase {
return &BoshBase{
NetworkCIDR: "10.0.0.0/24",
NetworkGateway: "10.0.0.1",
NetworkDNS: []string{"168.63.129.16"},
BoshReleaseURL: "https://bosh.io/d/github.com/cloudfoundry/bosh?v=256.2",
BoshReleaseSHA: "ff2f4e16e02f66b31c595196052a809100cfd5a8",
CPIReleaseURL: "https://bosh.io/d/github.com/cloudfoundry-incubator/bosh-azure-cpi-release?v=11",
CPIReleaseSHA: "395fc05c11ead59711188ebd0a684842a03dc93d",
GOAgentReleaseURL: "https://bosh.io/d/stemcells/bosh-azure-hyperv-ubuntu-trusty-go_agent?v=3262.4",
GOAgentSHA: "1ec76310cd99d4ad2dd2b239b3dfde09c609b292",
PrivateIP: "10.0.0.4",
NtpServers: []string{"0.pool.ntp.org", "1.pool.ntp.org"},
CPIJobName: azureCPIJobName,
PersistentDiskSize: 20000,
}
}
type AzureBosh struct {
cfg AzureInitConfig
boshbase *BoshBase
}
func NewAzureIaaSProvider(cfg AzureInitConfig, boshBase *BoshBase) IAASManifestProvider {
boshBase.CPIJobName = azureCPIJobName
return &AzureBosh{
cfg: cfg,
boshbase: boshBase,
}
}
func (a *AzureBosh) CreateCPIRelease() enaml.Release {
return enaml.Release{
Name: azureCPIReleaseName,
URL: a.boshbase.CPIReleaseURL,
SHA1: a.boshbase.CPIReleaseSHA,
}
}
func (a *AzureBosh) CreateCPITemplate() enaml.Template {
return enaml.Template{
Name: a.boshbase.CPIJobName,
Release: azureCPIReleaseName}
}
func (a *AzureBosh) CreateDiskPool() enaml.DiskPool {
return enaml.DiskPool{
Name: "disks",
DiskSize: a.boshbase.PersistentDiskSize,
}
}
<|fim▁hole|> }
}
func (a *AzureBosh) CreateResourcePool() (*enaml.ResourcePool, error) {
return a.boshbase.CreateResourcePool(a.resourcePoolCloudProperties)
}
func (a *AzureBosh) CreateManualNetwork() enaml.ManualNetwork {
net := enaml.NewManualNetwork("private")
net.AddSubnet(enaml.Subnet{
Range: a.boshbase.NetworkCIDR,
Gateway: a.boshbase.NetworkGateway,
DNS: a.boshbase.NetworkDNS,
CloudProperties: azurecloudproperties.Network{
VnetName: a.cfg.AzureVnet,
SubnetName: a.cfg.AzureSubnet,
},
})
return net
}
func (a *AzureBosh) CreateVIPNetwork() enaml.VIPNetwork {
return enaml.NewVIPNetwork("public")
}
func (a *AzureBosh) CreateJobNetwork() *enaml.Network {
if a.boshbase.PublicIP != "" {
return &enaml.Network{
Name: "public",
StaticIPs: []string{a.boshbase.PublicIP},
}
}
return nil
}
func (a *AzureBosh) CreateCloudProvider() enaml.CloudProvider {
return enaml.CloudProvider{
Template: a.CreateCPITemplate(),
MBus: fmt.Sprintf("https://mbus:%s@%s:6868", a.boshbase.MBusPassword, a.boshbase.GetRoutableIP()),
SSHTunnel: enaml.SSHTunnel{
Host: a.boshbase.GetRoutableIP(),
Port: 22,
User: "vcap",
PrivateKeyPath: a.cfg.AzurePrivateKeyPath,
},
Properties: azure_cpi.AzureCpiJob{
Azure: a.createAzure(),
Agent: &azure_cpi.Agent{
Mbus: fmt.Sprintf("https://mbus:%[email protected]:6868", a.boshbase.MBusPassword),
},
Blobstore: &azure_cpi.Blobstore{
Provider: "local",
Path: "/var/vcap/micro_bosh/data/cache",
},
Ntp: a.boshbase.NtpServers,
},
}
}
func (a *AzureBosh) createAzure() *azure_cpi.Azure {
return &azure_cpi.Azure{
Environment: a.cfg.AzureEnvironment,
SubscriptionId: a.cfg.AzureSubscriptionID,
TenantId: a.cfg.AzureTenantID,
ClientId: a.cfg.AzureClientID,
ClientSecret: a.cfg.AzureClientSecret,
ResourceGroupName: a.cfg.AzureResourceGroup,
StorageAccountName: a.cfg.AzureStorageAccount,
DefaultSecurityGroup: a.cfg.AzureDefaultSecurityGroup,
SshUser: a.cfg.AzureSSHUser,
SshPublicKey: a.cfg.AzureSSHPubKey,
}
}
func (a *AzureBosh) CreateCPIJobProperties() map[string]interface{} {
return map[string]interface{}{
"azure": a.createAzure(),
"agent": &azure_cpi.Agent{
Mbus: fmt.Sprintf("nats://nats:%s@%s:4222", a.boshbase.NatsPassword, a.boshbase.PrivateIP),
},
}
}
func (a *AzureBosh) CreateDeploymentManifest() (*enaml.DeploymentManifest, error) {
manifest := a.boshbase.CreateDeploymentManifest()
manifest.AddRelease(a.CreateCPIRelease())
if rp, err := a.CreateResourcePool(); err != nil {
return nil, err
} else {
manifest.AddResourcePool(*rp)
}
manifest.AddDiskPool(a.CreateDiskPool())
manifest.AddNetwork(a.CreateManualNetwork())
manifest.AddNetwork(a.CreateVIPNetwork())
boshJob := manifest.Jobs[0]
boshJob.AddTemplate(a.CreateCPITemplate())
n := a.CreateJobNetwork()
if n != nil {
boshJob.AddNetwork(*n)
}
for name, val := range a.CreateCPIJobProperties() {
boshJob.AddProperty(name, val)
}
manifest.Jobs[0] = boshJob
manifest.SetCloudProvider(a.CreateCloudProvider())
return manifest, nil
}<|fim▁end|> | func (a *AzureBosh) resourcePoolCloudProperties() interface{} {
return azurecloudproperties.ResourcePool{
InstanceType: a.cfg.AzureInstanceSize, |
<|file_name|>ControllerB2.java<|end_file_name|><|fim▁begin|>package foo;
<|fim▁hole|>
public void foo() {
super.foo();
System.out.println("ControllerB.foo() running again!");
}
}<|fim▁end|> | public class ControllerB2 extends grails.TopB { |
<|file_name|>signal_linux_mipsx.go<|end_file_name|><|fim▁begin|>// +build linux
// +build mips mipsle mips64 mips64le
package signal // import "github.com/tiborvass/docker/pkg/signal"
import (
"syscall"
"golang.org/x/sys/unix"
)
const (
sigrtmin = 34
sigrtmax = 127
)
// SignalMap is a map of Linux signals.
var SignalMap = map[string]syscall.Signal{
"ABRT": unix.SIGABRT,
"ALRM": unix.SIGALRM,
"BUS": unix.SIGBUS,
"CHLD": unix.SIGCHLD,
"CLD": unix.SIGCLD,
"CONT": unix.SIGCONT,
"FPE": unix.SIGFPE,
"HUP": unix.SIGHUP,
"ILL": unix.SIGILL,<|fim▁hole|> "IO": unix.SIGIO,
"IOT": unix.SIGIOT,
"KILL": unix.SIGKILL,
"PIPE": unix.SIGPIPE,
"POLL": unix.SIGPOLL,
"PROF": unix.SIGPROF,
"PWR": unix.SIGPWR,
"QUIT": unix.SIGQUIT,
"SEGV": unix.SIGSEGV,
"EMT": unix.SIGEMT,
"STOP": unix.SIGSTOP,
"SYS": unix.SIGSYS,
"TERM": unix.SIGTERM,
"TRAP": unix.SIGTRAP,
"TSTP": unix.SIGTSTP,
"TTIN": unix.SIGTTIN,
"TTOU": unix.SIGTTOU,
"URG": unix.SIGURG,
"USR1": unix.SIGUSR1,
"USR2": unix.SIGUSR2,
"VTALRM": unix.SIGVTALRM,
"WINCH": unix.SIGWINCH,
"XCPU": unix.SIGXCPU,
"XFSZ": unix.SIGXFSZ,
"RTMIN": sigrtmin,
"RTMIN+1": sigrtmin + 1,
"RTMIN+2": sigrtmin + 2,
"RTMIN+3": sigrtmin + 3,
"RTMIN+4": sigrtmin + 4,
"RTMIN+5": sigrtmin + 5,
"RTMIN+6": sigrtmin + 6,
"RTMIN+7": sigrtmin + 7,
"RTMIN+8": sigrtmin + 8,
"RTMIN+9": sigrtmin + 9,
"RTMIN+10": sigrtmin + 10,
"RTMIN+11": sigrtmin + 11,
"RTMIN+12": sigrtmin + 12,
"RTMIN+13": sigrtmin + 13,
"RTMIN+14": sigrtmin + 14,
"RTMIN+15": sigrtmin + 15,
"RTMAX-14": sigrtmax - 14,
"RTMAX-13": sigrtmax - 13,
"RTMAX-12": sigrtmax - 12,
"RTMAX-11": sigrtmax - 11,
"RTMAX-10": sigrtmax - 10,
"RTMAX-9": sigrtmax - 9,
"RTMAX-8": sigrtmax - 8,
"RTMAX-7": sigrtmax - 7,
"RTMAX-6": sigrtmax - 6,
"RTMAX-5": sigrtmax - 5,
"RTMAX-4": sigrtmax - 4,
"RTMAX-3": sigrtmax - 3,
"RTMAX-2": sigrtmax - 2,
"RTMAX-1": sigrtmax - 1,
"RTMAX": sigrtmax,
}<|fim▁end|> | "INT": unix.SIGINT, |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>import django.conf
class AppSettings(object):
"""
A holder for app-specific default settings that allows overriding via
the project's settings.
"""
def __getattribute__(self, attr):
if attr == attr.upper():
try:
return getattr(django.conf.settings, attr)
except AttributeError:
pass
return super(AppSettings, self).__getattribute__(attr)
class Settings(AppSettings):
COUNTRIES_FLAG_URL = 'flags/{code}.gif'
"""
The URL for a flag.
It can either be relative to the static url, or an absolute url.
The location is parsed using Python's string formatting and is passed the
following arguments:
* code
* code_upper
For example: ``COUNTRIES_FLAG_URL = 'flags/16x10/{code_upper}.png'``
"""
COUNTRIES_COMMON_NAMES = True
"""
Whether to use the common names for some countries, as opposed to the
official ISO name.
Some examples:
"Bolivia" instead of "Bolivia, Plurinational State of"
"South Korea" instead of "Korea (the Republic of)"
"Taiwan" instead of "Taiwan (Province of China)"
"""
COUNTRIES_OVERRIDE = {}
"""
A dictionary of names to override the defaults.
Note that you will need to handle translation of customised country names.
Setting a country's name to ``None`` will exclude it from the country list.
For example::
COUNTRIES_OVERRIDE = {
'NZ': _('Middle Earth'),
'AU': None
}
"""
COUNTRIES_ONLY = {}
"""
Similar to COUNTRIES_OVERRIDE
A dictionary of names to include in selection.
Note that you will need to handle translation of customised country names.
For example::
COUNTRIES_ONLY = {
'NZ': _('Middle Earth'),
'AU': _('Desert'),
}
"""
COUNTRIES_FIRST = []
"""
Countries matching the country codes provided in this list will be shown
first in the countries list (in the order specified) before all the
alphanumerically sorted countries.
"""
COUNTRIES_FIRST_REPEAT = False
"""
Countries listed in :attr:`COUNTRIES_FIRST` will be repeated again in the
alphanumerically sorted list if set to ``True``.<|fim▁hole|>
COUNTRIES_FIRST_BREAK = None
"""
Countries listed in :attr:`COUNTRIES_FIRST` will be followed by a null
choice with this title (if set) before all the alphanumerically sorted
countries.
"""
COUNTRIES_FIRST_SORT = False
"""
Countries listed in :attr:`COUNTRIES_FIRST` will be alphanumerically
sorted based on their translated name instead of relying on their
order in :attr:`COUNTRIES_FIRST`.
"""
settings = Settings()<|fim▁end|> | """ |
<|file_name|>polynomials.py<|end_file_name|><|fim▁begin|>from ..libmp.backend import xrange
from .calculus import defun
#----------------------------------------------------------------------------#
# Polynomials #
#----------------------------------------------------------------------------#
# XXX: extra precision
@defun
def polyval(ctx, coeffs, x, derivative=False):
r"""
Given coefficients `[c_n, \ldots, c_2, c_1, c_0]` and a number `x`,
:func:`~mpmath.polyval` evaluates the polynomial
.. math ::
P(x) = c_n x^n + \ldots + c_2 x^2 + c_1 x + c_0.
If *derivative=True* is set, :func:`~mpmath.polyval` simultaneously
evaluates `P(x)` with the derivative, `P'(x)`, and returns the
tuple `(P(x), P'(x))`.
>>> from mpmath import *
>>> mp.pretty = True
>>> polyval([3, 0, 2], 0.5)
2.75
>>> polyval([3, 0, 2], 0.5, derivative=True)
(2.75, 3.0)
The coefficients and the evaluation point may be any combination
of real or complex numbers.
"""
if not coeffs:
return ctx.zero
p = ctx.convert(coeffs[0])
q = ctx.zero
for c in coeffs[1:]:
if derivative:
q = p + x*q
p = c + x*p
if derivative:
return p, q
else:
return p
@defun
def polyroots(ctx, coeffs, maxsteps=50, cleanup=True, extraprec=10,
error=False, roots_init=None):
"""
Computes all roots (real or complex) of a given polynomial.
The roots are returned as a sorted list, where real roots appear first
followed by complex conjugate roots as adjacent elements. The polynomial
should be given as a list of coefficients, in the format used by
:func:`~mpmath.polyval`. The leading coefficient must be nonzero.
With *error=True*, :func:`~mpmath.polyroots` returns a tuple *(roots, err)*
where *err* is an estimate of the maximum error among the computed roots.
**Examples**
Finding the three real roots of `x^3 - x^2 - 14x + 24`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> nprint(polyroots([1,-1,-14,24]), 4)
[-4.0, 2.0, 3.0]
Finding the two complex conjugate roots of `4x^2 + 3x + 2`, with an
error estimate::
>>> roots, err = polyroots([4,3,2], error=True)
>>> for r in roots:
... print(r)
...
(-0.375 + 0.59947894041409j)
(-0.375 - 0.59947894041409j)
>>>
>>> err
2.22044604925031e-16
>>>
>>> polyval([4,3,2], roots[0])
(2.22044604925031e-16 + 0.0j)
>>> polyval([4,3,2], roots[1])
(2.22044604925031e-16 + 0.0j)
The following example computes all the 5th roots of unity; that is,
the roots of `x^5 - 1`::
>>> mp.dps = 20
>>> for r in polyroots([1, 0, 0, 0, 0, -1]):
... print(r)
...
1.0
(-0.8090169943749474241 + 0.58778525229247312917j)
(-0.8090169943749474241 - 0.58778525229247312917j)
(0.3090169943749474241 + 0.95105651629515357212j)
(0.3090169943749474241 - 0.95105651629515357212j)
**Precision and conditioning**
The roots are computed to the current working precision accuracy. If this
accuracy cannot be achieved in ``maxsteps`` steps, then a
``NoConvergence`` exception is raised. The algorithm internally is using
the current working precision extended by ``extraprec``. If
``NoConvergence`` was raised, that is caused either by not having enough
extra precision to achieve convergence (in which case increasing
``extraprec`` should fix the problem) or too low ``maxsteps`` (in which
case increasing ``maxsteps`` should fix the problem), or a combination of
both.
The user should always do a convergence study with regards to
``extraprec`` to ensure accurate results. It is possible to get
convergence to a wrong answer with too low ``extraprec``.
Provided there are no repeated roots, :func:`~mpmath.polyroots` can
typically compute all roots of an arbitrary polynomial to high precision::
>>> mp.dps = 60
>>> for r in polyroots([1, 0, -10, 0, 1]):
... print(r)
...
-3.14626436994197234232913506571557044551247712918732870123249
-0.317837245195782244725757617296174288373133378433432554879127
0.317837245195782244725757617296174288373133378433432554879127
3.14626436994197234232913506571557044551247712918732870123249
>>>
>>> sqrt(3) + sqrt(2)
3.14626436994197234232913506571557044551247712918732870123249
>>> sqrt(3) - sqrt(2)
0.317837245195782244725757617296174288373133378433432554879127
**Algorithm**
:func:`~mpmath.polyroots` implements the Durand-Kerner method [1], which
uses complex arithmetic to locate all roots simultaneously.
The Durand-Kerner method can be viewed as approximately performing
simultaneous Newton iteration for all the roots. In particular,
the convergence to simple roots is quadratic, just like Newton's
method.
Although all roots are internally calculated using complex arithmetic, any
root found to have an imaginary part smaller than the estimated numerical
error is truncated to a real number (small real parts are also chopped).
Real roots are placed first in the returned list, sorted by value. The
remaining complex roots are sorted by their real parts so that conjugate
roots end up next to each other.
**References**
1. http://en.wikipedia.org/wiki/Durand-Kerner_method
"""
if len(coeffs) <= 1:
if not coeffs or not coeffs[0]:
raise ValueError("Input to polyroots must not be the zero polynomial")
# Constant polynomial with no roots
return []
orig = ctx.prec
tol = +ctx.eps
with ctx.extraprec(extraprec):
deg = len(coeffs) - 1
# Must be monic
lead = ctx.convert(coeffs[0])
if lead == 1:
coeffs = [ctx.convert(c) for c in coeffs]
else:
coeffs = [c/lead for c in coeffs]
f = lambda x: ctx.polyval(coeffs, x)
if roots_init is None:
roots = [ctx.mpc((0.4+0.9j)**n) for n in xrange(deg)]
else:
roots = [None]*deg;
deg_init = min(deg, len(roots_init))
roots[:deg_init] = list(roots_init[:deg_init])
roots[deg_init:] = [ctx.mpc((0.4+0.9j)**n) for n
in xrange(deg_init,deg)]
err = [ctx.one for n in xrange(deg)]
# Durand-Kerner iteration until convergence
for step in xrange(maxsteps):
if abs(max(err)) < tol:
break
for i in xrange(deg):
p = roots[i]
x = f(p)
for j in range(deg):
if i != j:
try:<|fim▁hole|> x /= (p-roots[j])
except ZeroDivisionError:
continue
roots[i] = p - x
err[i] = abs(x)
if abs(max(err)) >= tol:
raise ctx.NoConvergence("Didn't converge in maxsteps=%d steps." \
% maxsteps)
# Remove small real or imaginary parts
if cleanup:
for i in xrange(deg):
if abs(roots[i]) < tol:
roots[i] = ctx.zero
elif abs(ctx._im(roots[i])) < tol:
roots[i] = roots[i].real
elif abs(ctx._re(roots[i])) < tol:
roots[i] = roots[i].imag * 1j
roots.sort(key=lambda x: (abs(ctx._im(x)), ctx._re(x)))
if error:
err = max(err)
err = max(err, ctx.ldexp(1, -orig+1))
return [+r for r in roots], +err
else:
return [+r for r in roots]<|fim▁end|> | |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from django.conf import settings as django_settings
from django.contrib.auth.decorators import login_required
from django.http import Http404
from django.http import HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.cache import never_cache
from helfertool.utils import nopermission
from registration.models import Event
from registration.permissions import has_access, ACCESS_BADGES_EDIT
from ..forms import BadgeSettingsForm, BadgeDefaultsForm, BadgeJobDefaultsForm
from .utils import notactive
@login_required
@never_cache
def settings(request, event_url_name):
event = get_object_or_404(Event, url_name=event_url_name)
# check permission
if not has_access(request.user, event, ACCESS_BADGES_EDIT):
return nopermission(request)
# check if badge system is active
if not event.badges:
return notactive(request)
# roles
roles = event.badge_settings.badgerole_set.all()
# designs
designs = event.badge_settings.badgedesign_set.all()<|fim▁hole|>
# forms for defaults
defaults_form = BadgeDefaultsForm(request.POST or None,
instance=event.badge_settings.defaults,
settings=event.badge_settings,
prefix='event')
job_defaults_form = BadgeJobDefaultsForm(request.POST or None, event=event,
prefix='jobs')
if defaults_form.is_valid() and job_defaults_form.is_valid():
defaults_form.save()
job_defaults_form.save()
return redirect('badges:settings', event_url_name=event.url_name)
context = {'event': event,
'roles': roles,
'designs': designs,
'defaults_form': defaults_form,
'job_defaults_form': job_defaults_form}
return render(request, 'badges/settings.html', context)
@login_required
@never_cache
def settings_advanced(request, event_url_name):
event = get_object_or_404(Event, url_name=event_url_name)
# check permission
if not has_access(request.user, event, ACCESS_BADGES_EDIT):
return nopermission(request)
# check if badge system is active
if not event.badges:
return notactive(request)
# form for settings
form = BadgeSettingsForm(request.POST or None, request.FILES or None,
instance=event.badge_settings)
# for for permissions
permissions = event.badge_settings.badgepermission_set.all()
if form.is_valid():
form.save()
return redirect('badges:settings_advanced', event_url_name=event.url_name)
# render
context = {'event': event,
'form': form,
'permissions': permissions}
return render(request, 'badges/settings_advanced.html',
context)
@login_required
@never_cache
def default_template(request, event_url_name):
event = get_object_or_404(Event, url_name=event_url_name)
# check permission
if not has_access(request.user, event, ACCESS_BADGES_EDIT):
return nopermission(request)
# check if badge system is active
if not event.badges:
return notactive(request)
# output
response = HttpResponse(content_type='application/x-tex')
response['Content-Disposition'] = 'attachment; filename="template.tex"'
# send file
with open(django_settings.BADGE_DEFAULT_TEMPLATE, 'rb') as f:
response.write(f.read())
return response
@login_required
@never_cache
def current_template(request, event_url_name):
event = get_object_or_404(Event, url_name=event_url_name)
# check permission
if not has_access(request.user, event, ACCESS_BADGES_EDIT):
return nopermission(request)
# check if badge system is active
if not event.badges:
return notactive(request)
# check if file is there
if not event.badge_settings.latex_template:
raise Http404()
# output
response = HttpResponse(content_type='application/x-tex')
response['Content-Disposition'] = 'attachment; filename="template_{}.tex"'.format(event.url_name)
# send file
with event.badge_settings.latex_template.open('rb') as f:
response.write(f.read())
return response<|fim▁end|> | |
<|file_name|>verify_test.go<|end_file_name|><|fim▁begin|>/*
Copyright IBM Corp. 2017 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.<|fim▁hole|>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sw
import (
"errors"
"reflect"
"testing"
mocks2 "github.com/hyperledger/fabric/bccsp/mocks"
"github.com/hyperledger/fabric/bccsp/sw/mocks"
"github.com/stretchr/testify/assert"
)
func TestVerify(t *testing.T) {
expectedKey := &mocks2.MockKey{}
expectetSignature := []byte{1, 2, 3, 4, 5}
expectetDigest := []byte{1, 2, 3, 4}
expectedOpts := &mocks2.SignerOpts{}
expectetValue := true
expectedErr := errors.New("Expected Error")
verifiers := make(map[reflect.Type]Verifier)
verifiers[reflect.TypeOf(&mocks2.MockKey{})] = &mocks.Verifier{
KeyArg: expectedKey,
SignatureArg: expectetSignature,
DigestArg: expectetDigest,
OptsArg: expectedOpts,
Value: expectetValue,
Err: nil,
}
csp := impl{verifiers: verifiers}
value, err := csp.Verify(expectedKey, expectetSignature, expectetDigest, expectedOpts)
assert.Equal(t, expectetValue, value)
assert.Nil(t, err)
verifiers = make(map[reflect.Type]Verifier)
verifiers[reflect.TypeOf(&mocks2.MockKey{})] = &mocks.Verifier{
KeyArg: expectedKey,
SignatureArg: expectetSignature,
DigestArg: expectetDigest,
OptsArg: expectedOpts,
Value: false,
Err: expectedErr,
}
csp = impl{verifiers: verifiers}
value, err = csp.Verify(expectedKey, expectetSignature, expectetDigest, expectedOpts)
assert.False(t, value)
assert.Contains(t, err.Error(), expectedErr.Error())
}<|fim▁end|> | You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>upgrade.js<|end_file_name|><|fim▁begin|>/**
* @license Angular v4.0.3
* (c) 2010-2017 Google, Inc. https://angular.io/
* License: MIT
*/
import { Compiler, ComponentFactoryResolver, Directive, ElementRef, EventEmitter, Inject, Injector, NgModule, NgZone, ReflectiveInjector, SimpleChange, Testability, Version } from '@angular/core';
import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @module
* @description
* Entry point for all public APIs of the common package.
*/
/**
* \@stable
*/
const VERSION = new Version('4.0.3');
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @return {?}
*/
function noNg() {
throw new Error('AngularJS v1.x is not loaded!');
}
let angular = ({
bootstrap: noNg,
module: noNg,
element: noNg,
version: noNg,
resumeBootstrap: noNg,
getTestability: noNg
});
try {
if (window.hasOwnProperty('angular')) {
angular = ((window)).angular;
}
}
catch (e) {
}
/**
* Resets the AngularJS library.
*
* Used when angularjs is loaded lazily, and not available on `window`.
*
* \@stable
* @param {?} ng
* @return {?}
*/
/**
* Returns the current version of the AngularJS library.
*
* \@stable
* @return {?}
*/
const bootstrap = (e, modules, config) => angular.bootstrap(e, modules, config);
const module$1 = (prefix, dependencies) => angular.module(prefix, dependencies);
const element = (e) => angular.element(e);
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const $COMPILE = '$compile';
const $CONTROLLER = '$controller';
const $HTTP_BACKEND = '$httpBackend';
const $INJECTOR = '$injector';
const $PARSE = '$parse';
const $ROOT_SCOPE = '$rootScope';
const $SCOPE = '$scope';
const $TEMPLATE_CACHE = '$templateCache';
const $$TESTABILITY = '$$testability';
const COMPILER_KEY = '$$angularCompiler';
const INJECTOR_KEY = '$$angularInjector';
const NG_ZONE_KEY = '$$angularNgZone';
const REQUIRE_INJECTOR = '?^^' + INJECTOR_KEY;
const REQUIRE_NG_MODEL = '?ngModel';
/**
* A `PropertyBinding` represents a mapping between a property name
* and an attribute name. It is parsed from a string of the form
* `"prop: attr"`; or simply `"propAndAttr" where the property
* and attribute have the same identifier.
*/
class PropertyBinding {
/**
* @param {?} prop
* @param {?} attr
*/<|fim▁hole|> this.parseBinding();
}
/**
* @return {?}
*/
parseBinding() {
this.bracketAttr = `[${this.attr}]`;
this.parenAttr = `(${this.attr})`;
this.bracketParenAttr = `[(${this.attr})]`;
const /** @type {?} */ capitalAttr = this.attr.charAt(0).toUpperCase() + this.attr.substr(1);
this.onAttr = `on${capitalAttr}`;
this.bindAttr = `bind${capitalAttr}`;
this.bindonAttr = `bindon${capitalAttr}`;
}
}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @param {?} e
* @return {?}
*/
function onError(e) {
// TODO: (misko): We seem to not have a stack trace here!
if (console.error) {
console.error(e, e.stack);
}
else {
// tslint:disable-next-line:no-console
console.log(e, e.stack);
}
throw e;
}
/**
* @param {?} name
* @return {?}
*/
function controllerKey(name) {
return '$' + name + 'Controller';
}
/**
* @param {?} node
* @return {?}
*/
/**
* @param {?} component
* @return {?}
*/
function getComponentName(component) {
// Return the name of the component or the first line of its stringified version.
return ((component)).overriddenName || component.name || component.toString().split('\n')[0];
}
class Deferred {
constructor() {
this.promise = new Promise((res, rej) => {
this.resolve = res;
this.reject = rej;
});
}
}
/**
* @param {?} component
* @return {?} Whether the passed-in component implements the subset of the
* `ControlValueAccessor` interface needed for AngularJS `ng-model`
* compatibility.
*/
function supportsNgModel(component) {
return typeof component.writeValue === 'function' &&
typeof component.registerOnChange === 'function';
}
/**
* Glue the AngularJS `NgModelController` (if it exists) to the component
* (if it implements the needed subset of the `ControlValueAccessor` interface).
* @param {?} ngModel
* @param {?} component
* @return {?}
*/
function hookupNgModel(ngModel, component) {
if (ngModel && supportsNgModel(component)) {
ngModel.$render = () => { component.writeValue(ngModel.$viewValue); };
component.registerOnChange(ngModel.$setViewValue.bind(ngModel));
}
}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const INITIAL_VALUE = {
__UNINITIALIZED__: true
};
class DowngradeComponentAdapter {
/**
* @param {?} id
* @param {?} element
* @param {?} attrs
* @param {?} scope
* @param {?} ngModel
* @param {?} parentInjector
* @param {?} $injector
* @param {?} $compile
* @param {?} $parse
* @param {?} componentFactory
*/
constructor(id, element, attrs, scope, ngModel, parentInjector, $injector, $compile, $parse, componentFactory) {
this.id = id;
this.element = element;
this.attrs = attrs;
this.scope = scope;
this.ngModel = ngModel;
this.parentInjector = parentInjector;
this.$injector = $injector;
this.$compile = $compile;
this.$parse = $parse;
this.componentFactory = componentFactory;
this.inputChangeCount = 0;
this.inputChanges = null;
this.componentRef = null;
this.component = null;
this.changeDetector = null;
this.element[0].id = id;
this.componentScope = scope.$new();
}
/**
* @return {?}
*/
compileContents() {
const /** @type {?} */ compiledProjectableNodes = [];
const /** @type {?} */ projectableNodes = this.groupProjectableNodes();
const /** @type {?} */ linkFns = projectableNodes.map(nodes => this.$compile(nodes));
this.element.empty();
linkFns.forEach(linkFn => {
linkFn(this.scope, (clone) => {
compiledProjectableNodes.push(clone);
this.element.append(clone);
});
});
return compiledProjectableNodes;
}
/**
* @param {?} projectableNodes
* @return {?}
*/
createComponent(projectableNodes) {
const /** @type {?} */ childInjector = ReflectiveInjector.resolveAndCreate([{ provide: $SCOPE, useValue: this.componentScope }], this.parentInjector);
this.componentRef =
this.componentFactory.create(childInjector, projectableNodes, this.element[0]);
this.changeDetector = this.componentRef.changeDetectorRef;
this.component = this.componentRef.instance;
hookupNgModel(this.ngModel, this.component);
}
/**
* @return {?}
*/
setupInputs() {
const /** @type {?} */ attrs = this.attrs;
const /** @type {?} */ inputs = this.componentFactory.inputs || [];
for (let /** @type {?} */ i = 0; i < inputs.length; i++) {
const /** @type {?} */ input = new PropertyBinding(inputs[i].propName, inputs[i].templateName);
let /** @type {?} */ expr = null;
if (attrs.hasOwnProperty(input.attr)) {
const /** @type {?} */ observeFn = (prop => {
let /** @type {?} */ prevValue = INITIAL_VALUE;
return (currValue) => {
if (prevValue === INITIAL_VALUE) {
prevValue = currValue;
}
this.updateInput(prop, prevValue, currValue);
prevValue = currValue;
};
})(input.prop);
attrs.$observe(input.attr, observeFn);
}
else if (attrs.hasOwnProperty(input.bindAttr)) {
expr = ((attrs) /** TODO #9100 */)[input.bindAttr];
}
else if (attrs.hasOwnProperty(input.bracketAttr)) {
expr = ((attrs) /** TODO #9100 */)[input.bracketAttr];
}
else if (attrs.hasOwnProperty(input.bindonAttr)) {
expr = ((attrs) /** TODO #9100 */)[input.bindonAttr];
}
else if (attrs.hasOwnProperty(input.bracketParenAttr)) {
expr = ((attrs) /** TODO #9100 */)[input.bracketParenAttr];
}
if (expr != null) {
const /** @type {?} */ watchFn = (prop => (currValue, prevValue) => this.updateInput(prop, prevValue, currValue))(input.prop);
this.componentScope.$watch(expr, watchFn);
}
}
const /** @type {?} */ prototype = this.componentFactory.componentType.prototype;
if (prototype && ((prototype)).ngOnChanges) {
// Detect: OnChanges interface
this.inputChanges = {};
this.componentScope.$watch(() => this.inputChangeCount, () => {
const /** @type {?} */ inputChanges = this.inputChanges;
this.inputChanges = {};
((this.component)).ngOnChanges(inputChanges);
});
}
this.componentScope.$watch(() => this.changeDetector && this.changeDetector.detectChanges());
}
/**
* @return {?}
*/
setupOutputs() {
const /** @type {?} */ attrs = this.attrs;
const /** @type {?} */ outputs = this.componentFactory.outputs || [];
for (let /** @type {?} */ j = 0; j < outputs.length; j++) {
const /** @type {?} */ output = new PropertyBinding(outputs[j].propName, outputs[j].templateName);
let /** @type {?} */ expr = null;
let /** @type {?} */ assignExpr = false;
const /** @type {?} */ bindonAttr = output.bindonAttr ? output.bindonAttr.substring(0, output.bindonAttr.length - 6) : null;
const /** @type {?} */ bracketParenAttr = output.bracketParenAttr ?
`[(${output.bracketParenAttr.substring(2, output.bracketParenAttr.length - 8)})]` :
null;
if (attrs.hasOwnProperty(output.onAttr)) {
expr = ((attrs) /** TODO #9100 */)[output.onAttr];
}
else if (attrs.hasOwnProperty(output.parenAttr)) {
expr = ((attrs) /** TODO #9100 */)[output.parenAttr];
}
else if (attrs.hasOwnProperty(bindonAttr)) {
expr = ((attrs) /** TODO #9100 */)[bindonAttr];
assignExpr = true;
}
else if (attrs.hasOwnProperty(bracketParenAttr)) {
expr = ((attrs) /** TODO #9100 */)[bracketParenAttr];
assignExpr = true;
}
if (expr != null && assignExpr != null) {
const /** @type {?} */ getter = this.$parse(expr);
const /** @type {?} */ setter = getter.assign;
if (assignExpr && !setter) {
throw new Error(`Expression '${expr}' is not assignable!`);
}
const /** @type {?} */ emitter = (this.component[output.prop]);
if (emitter) {
emitter.subscribe({
next: assignExpr ?
((setter) => (v /** TODO #9100 */) => setter(this.scope, v))(setter) :
((getter) => (v /** TODO #9100 */) => getter(this.scope, { $event: v }))(getter)
});
}
else {
throw new Error(`Missing emitter '${output.prop}' on component '${getComponentName(this.componentFactory.componentType)}'!`);
}
}
}
}
/**
* @return {?}
*/
registerCleanup() {
this.element.bind('$destroy', () => {
this.componentScope.$destroy();
this.componentRef.destroy();
});
}
/**
* @return {?}
*/
getInjector() { return this.componentRef && this.componentRef.injector; }
/**
* @param {?} prop
* @param {?} prevValue
* @param {?} currValue
* @return {?}
*/
updateInput(prop, prevValue, currValue) {
if (this.inputChanges) {
this.inputChangeCount++;
this.inputChanges[prop] = new SimpleChange(prevValue, currValue, prevValue === currValue);
}
this.component[prop] = currValue;
}
/**
* @return {?}
*/
groupProjectableNodes() {
let /** @type {?} */ ngContentSelectors = this.componentFactory.ngContentSelectors;
return groupNodesBySelector(ngContentSelectors, this.element.contents());
}
}
/**
* Group a set of DOM nodes into `ngContent` groups, based on the given content selectors.
* @param {?} ngContentSelectors
* @param {?} nodes
* @return {?}
*/
function groupNodesBySelector(ngContentSelectors, nodes) {
const /** @type {?} */ projectableNodes = [];
let /** @type {?} */ wildcardNgContentIndex;
for (let /** @type {?} */ i = 0, /** @type {?} */ ii = ngContentSelectors.length; i < ii; ++i) {
projectableNodes[i] = [];
}
for (let /** @type {?} */ j = 0, /** @type {?} */ jj = nodes.length; j < jj; ++j) {
const /** @type {?} */ node = nodes[j];
const /** @type {?} */ ngContentIndex = findMatchingNgContentIndex(node, ngContentSelectors);
if (ngContentIndex != null) {
projectableNodes[ngContentIndex].push(node);
}
}
return projectableNodes;
}
/**
* @param {?} element
* @param {?} ngContentSelectors
* @return {?}
*/
function findMatchingNgContentIndex(element, ngContentSelectors) {
const /** @type {?} */ ngContentIndices = [];
let /** @type {?} */ wildcardNgContentIndex;
for (let /** @type {?} */ i = 0; i < ngContentSelectors.length; i++) {
const /** @type {?} */ selector = ngContentSelectors[i];
if (selector === '*') {
wildcardNgContentIndex = i;
}
else {
if (matchesSelector(element, selector)) {
ngContentIndices.push(i);
}
}
}
ngContentIndices.sort();
if (wildcardNgContentIndex !== undefined) {
ngContentIndices.push(wildcardNgContentIndex);
}
return ngContentIndices.length ? ngContentIndices[0] : null;
}
let _matches;
/**
* @param {?} el
* @param {?} selector
* @return {?}
*/
function matchesSelector(el, selector) {
if (!_matches) {
const /** @type {?} */ elProto = (Element.prototype);
_matches = elProto.matches || elProto.matchesSelector || elProto.mozMatchesSelector ||
elProto.msMatchesSelector || elProto.oMatchesSelector || elProto.webkitMatchesSelector;
}
return el.nodeType === Node.ELEMENT_NODE ? _matches.call(el, selector) : false;
}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
let downgradeCount = 0;
/**
* \@whatItDoes
*
* *Part of the [upgrade/static](/docs/ts/latest/api/#!?query=upgrade%2Fstatic)
* library for hybrid upgrade apps that support AoT compilation*
*
* Allows an Angular component to be used from AngularJS.
*
* \@howToUse
*
* Let's assume that you have an Angular component called `ng2Heroes` that needs
* to be made available in AngularJS templates.
*
* {\@example upgrade/static/ts/module.ts region="ng2-heroes"}
*
* We must create an AngularJS [directive](https://docs.angularjs.org/guide/directive)
* that will make this Angular component available inside AngularJS templates.
* The `downgradeComponent()` function returns a factory function that we
* can use to define the AngularJS directive that wraps the "downgraded" component.
*
* {\@example upgrade/static/ts/module.ts region="ng2-heroes-wrapper"}
*
* \@description
*
* A helper function that returns a factory function to be used for registering an
* AngularJS wrapper directive for "downgrading" an Angular component.
*
* The parameter contains information about the Component that is being downgraded:
*
* * `component: Type<any>`: The type of the Component that will be downgraded
*
* \@experimental
* @param {?} info
* @return {?}
*/
function downgradeComponent(info) {
const /** @type {?} */ idPrefix = `NG2_UPGRADE_${downgradeCount++}_`;
let /** @type {?} */ idCount = 0;
const /** @type {?} */ directiveFactory = function ($compile, $injector, $parse) {
return {
restrict: 'E',
terminal: true,
require: [REQUIRE_INJECTOR, REQUIRE_NG_MODEL],
link: (scope, element, attrs, required) => {
// We might have to compile the contents asynchronously, because this might have been
// triggered by `UpgradeNg1ComponentAdapterBuilder`, before the Angular templates have
// been compiled.
const /** @type {?} */ parentInjector = required[0] || $injector.get(INJECTOR_KEY);
const /** @type {?} */ ngModel = required[1];
const /** @type {?} */ downgradeFn = (injector) => {
const /** @type {?} */ componentFactoryResolver = injector.get(ComponentFactoryResolver);
const /** @type {?} */ componentFactory = componentFactoryResolver.resolveComponentFactory(info.component);
if (!componentFactory) {
throw new Error('Expecting ComponentFactory for: ' + getComponentName(info.component));
}
const /** @type {?} */ id = idPrefix + (idCount++);
const /** @type {?} */ injectorPromise = new ParentInjectorPromise$1(element);
const /** @type {?} */ facade = new DowngradeComponentAdapter(id, element, attrs, scope, ngModel, injector, $injector, $compile, $parse, componentFactory);
const /** @type {?} */ projectableNodes = facade.compileContents();
facade.createComponent(projectableNodes);
facade.setupInputs();
facade.setupOutputs();
facade.registerCleanup();
injectorPromise.resolve(facade.getInjector());
};
if (parentInjector instanceof ParentInjectorPromise$1) {
parentInjector.then(downgradeFn);
}
else {
downgradeFn(parentInjector);
}
}
};
};
// bracket-notation because of closure - see #14441
directiveFactory['$inject'] = [$COMPILE, $INJECTOR, $PARSE];
return directiveFactory;
}
/**
* Synchronous promise-like object to wrap parent injectors,
* to preserve the synchronous nature of Angular 1's $compile.
*/
class ParentInjectorPromise$1 {
/**
* @param {?} element
*/
constructor(element) {
this.element = element;
this.injectorKey = controllerKey(INJECTOR_KEY);
this.callbacks = [];
// Store the promise on the element.
element.data(this.injectorKey, this);
}
/**
* @param {?} callback
* @return {?}
*/
then(callback) {
if (this.injector) {
callback(this.injector);
}
else {
this.callbacks.push(callback);
}
}
/**
* @param {?} injector
* @return {?}
*/
resolve(injector) {
this.injector = injector;
// Store the real injector on the element.
this.element.data(this.injectorKey, injector);
// Release the element to prevent memory leaks.
this.element = null;
// Run the queued callbacks.
this.callbacks.forEach(callback => callback(injector));
this.callbacks.length = 0;
}
}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* \@whatItDoes
*
* *Part of the [upgrade/static](/docs/ts/latest/api/#!?query=upgrade%2Fstatic)
* library for hybrid upgrade apps that support AoT compilation*
*
* Allow an Angular service to be accessible from AngularJS.
*
* \@howToUse
*
* First ensure that the service to be downgraded is provided in an {\@link NgModule}
* that will be part of the upgrade application. For example, let's assume we have
* defined `HeroesService`
*
* {\@example upgrade/static/ts/module.ts region="ng2-heroes-service"}
*
* and that we have included this in our upgrade app {\@link NgModule}
*
* {\@example upgrade/static/ts/module.ts region="ng2-module"}
*
* Now we can register the `downgradeInjectable` factory function for the service
* on an AngularJS module.
*
* {\@example upgrade/static/ts/module.ts region="downgrade-ng2-heroes-service"}
*
* Inside an AngularJS component's controller we can get hold of the
* downgraded service via the name we gave when downgrading.
*
* {\@example upgrade/static/ts/module.ts region="example-app"}
*
* \@description
*
* Takes a `token` that identifies a service provided from Angular.
*
* Returns a [factory function](https://docs.angularjs.org/guide/di) that can be
* used to register the service on an AngularJS module.
*
* The factory function provides access to the Angular service that
* is identified by the `token` parameter.
*
* \@experimental
* @param {?} token
* @return {?}
*/
function downgradeInjectable(token) {
const /** @type {?} */ factory = function (i) { return i.get(token); };
((factory)).$inject = [INJECTOR_KEY];
return factory;
}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const CAMEL_CASE = /([A-Z])/g;
const INITIAL_VALUE$1 = {
__UNINITIALIZED__: true
};
const NOT_SUPPORTED = 'NOT_SUPPORTED';
class UpgradeNg1ComponentAdapterBuilder {
/**
* @param {?} name
*/
constructor(name) {
this.name = name;
this.inputs = [];
this.inputsRename = [];
this.outputs = [];
this.outputsRename = [];
this.propertyOutputs = [];
this.checkProperties = [];
this.propertyMap = {};
this.linkFn = null;
this.directive = null;
this.$controller = null;
const selector = name.replace(CAMEL_CASE, (all /** TODO #9100 */, next) => '-' + next.toLowerCase());
const self = this;
this.type =
Directive({ selector: selector, inputs: this.inputsRename, outputs: this.outputsRename })
.Class({
constructor: [
new Inject($SCOPE), ElementRef,
function (scope, elementRef) {
return new UpgradeNg1ComponentAdapter(self.linkFn, scope, self.directive, elementRef, self.$controller, self.inputs, self.outputs, self.propertyOutputs, self.checkProperties, self.propertyMap);
}
],
ngOnInit: function () { },
ngOnChanges: function () { },
ngDoCheck: function () { },
ngOnDestroy: function () { },
});
}
/**
* @param {?} injector
* @return {?}
*/
extractDirective(injector) {
const /** @type {?} */ directives = injector.get(this.name + 'Directive');
if (directives.length > 1) {
throw new Error('Only support single directive definition for: ' + this.name);
}
const /** @type {?} */ directive = directives[0];
if (directive.replace)
this.notSupported('replace');
if (directive.terminal)
this.notSupported('terminal');
const /** @type {?} */ link = directive.link;
if (typeof link == 'object') {
if (((link)).post)
this.notSupported('link.post');
}
return directive;
}
/**
* @param {?} feature
* @return {?}
*/
notSupported(feature) {
throw new Error(`Upgraded directive '${this.name}' does not support '${feature}'.`);
}
/**
* @return {?}
*/
extractBindings() {
const /** @type {?} */ btcIsObject = typeof this.directive.bindToController === 'object';
if (btcIsObject && Object.keys(this.directive.scope).length) {
throw new Error(`Binding definitions on scope and controller at the same time are not supported.`);
}
const /** @type {?} */ context = (btcIsObject) ? this.directive.bindToController : this.directive.scope;
if (typeof context == 'object') {
for (const /** @type {?} */ name in context) {
if (((context)).hasOwnProperty(name)) {
let /** @type {?} */ localName = context[name];
const /** @type {?} */ type = localName.charAt(0);
const /** @type {?} */ typeOptions = localName.charAt(1);
localName = typeOptions === '?' ? localName.substr(2) : localName.substr(1);
localName = localName || name;
const /** @type {?} */ outputName = 'output_' + name;
const /** @type {?} */ outputNameRename = outputName + ': ' + name;
const /** @type {?} */ outputNameRenameChange = outputName + ': ' + name + 'Change';
const /** @type {?} */ inputName = 'input_' + name;
const /** @type {?} */ inputNameRename = inputName + ': ' + name;
switch (type) {
case '=':
this.propertyOutputs.push(outputName);
this.checkProperties.push(localName);
this.outputs.push(outputName);
this.outputsRename.push(outputNameRenameChange);
this.propertyMap[outputName] = localName;
this.inputs.push(inputName);
this.inputsRename.push(inputNameRename);
this.propertyMap[inputName] = localName;
break;
case '@':
// handle the '<' binding of angular 1.5 components
case '<':
this.inputs.push(inputName);
this.inputsRename.push(inputNameRename);
this.propertyMap[inputName] = localName;
break;
case '&':
this.outputs.push(outputName);
this.outputsRename.push(outputNameRename);
this.propertyMap[outputName] = localName;
break;
default:
let /** @type {?} */ json = JSON.stringify(context);
throw new Error(`Unexpected mapping '${type}' in '${json}' in '${this.name}' directive.`);
}
}
}
}
}
/**
* @param {?} compile
* @param {?} templateCache
* @param {?} httpBackend
* @return {?}
*/
compileTemplate(compile, templateCache, httpBackend) {
if (this.directive.template !== undefined) {
this.linkFn = compileHtml(isFunction(this.directive.template) ? this.directive.template() :
this.directive.template);
}
else if (this.directive.templateUrl) {
const /** @type {?} */ url = isFunction(this.directive.templateUrl) ? this.directive.templateUrl() :
this.directive.templateUrl;
const /** @type {?} */ html = templateCache.get(url);
if (html !== undefined) {
this.linkFn = compileHtml(html);
}
else {
return new Promise((resolve, err) => {
httpBackend('GET', url, null, (status /** TODO #9100 */, response /** TODO #9100 */) => {
if (status == 200) {
resolve(this.linkFn = compileHtml(templateCache.put(url, response)));
}
else {
err(`GET ${url} returned ${status}: ${response}`);
}
});
});
}
}
else {
throw new Error(`Directive '${this.name}' is not a component, it is missing template.`);
}
return null;
/**
* @param {?} html
* @return {?}
*/
function compileHtml(html /** TODO #9100 */) {
const /** @type {?} */ div = document.createElement('div');
div.innerHTML = html;
return compile(div.childNodes);
}
}
/**
* Upgrade ng1 components into Angular.
* @param {?} exportedComponents
* @param {?} injector
* @return {?}
*/
static resolve(exportedComponents, injector) {
const /** @type {?} */ promises = [];
const /** @type {?} */ compile = injector.get($COMPILE);
const /** @type {?} */ templateCache = injector.get($TEMPLATE_CACHE);
const /** @type {?} */ httpBackend = injector.get($HTTP_BACKEND);
const /** @type {?} */ $controller = injector.get($CONTROLLER);
for (const /** @type {?} */ name in exportedComponents) {
if (((exportedComponents)).hasOwnProperty(name)) {
const /** @type {?} */ exportedComponent = exportedComponents[name];
exportedComponent.directive = exportedComponent.extractDirective(injector);
exportedComponent.$controller = $controller;
exportedComponent.extractBindings();
const /** @type {?} */ promise = exportedComponent.compileTemplate(compile, templateCache, httpBackend);
if (promise)
promises.push(promise);
}
}
return Promise.all(promises);
}
}
class UpgradeNg1ComponentAdapter {
/**
* @param {?} linkFn
* @param {?} scope
* @param {?} directive
* @param {?} elementRef
* @param {?} $controller
* @param {?} inputs
* @param {?} outputs
* @param {?} propOuts
* @param {?} checkProperties
* @param {?} propertyMap
*/
constructor(linkFn, scope, directive, elementRef, $controller, inputs, outputs, propOuts, checkProperties, propertyMap) {
this.linkFn = linkFn;
this.directive = directive;
this.$controller = $controller;
this.inputs = inputs;
this.outputs = outputs;
this.propOuts = propOuts;
this.checkProperties = checkProperties;
this.propertyMap = propertyMap;
this.controllerInstance = null;
this.destinationObj = null;
this.checkLastValues = [];
this.$element = null;
this.element = elementRef.nativeElement;
this.componentScope = scope.$new(!!directive.scope);
this.$element = element(this.element);
const controllerType = directive.controller;
if (directive.bindToController && controllerType) {
this.controllerInstance = this.buildController(controllerType);
this.destinationObj = this.controllerInstance;
}
else {
this.destinationObj = this.componentScope;
}
for (let i = 0; i < inputs.length; i++) {
this /** TODO #9100 */[inputs[i]] = null;
}
for (let j = 0; j < outputs.length; j++) {
const emitter = this /** TODO #9100 */[outputs[j]] = new EventEmitter();
this.setComponentProperty(outputs[j], ((emitter /** TODO #9100 */) => (value /** TODO #9100 */) => emitter.emit(value))(emitter));
}
for (let k = 0; k < propOuts.length; k++) {
this /** TODO #9100 */[propOuts[k]] = new EventEmitter();
this.checkLastValues.push(INITIAL_VALUE$1);
}
}
/**
* @return {?}
*/
ngOnInit() {
if (!this.directive.bindToController && this.directive.controller) {
this.controllerInstance = this.buildController(this.directive.controller);
}
if (this.controllerInstance && isFunction(this.controllerInstance.$onInit)) {
this.controllerInstance.$onInit();
}
let /** @type {?} */ link = this.directive.link;
if (typeof link == 'object')
link = ((link)).pre;
if (link) {
const /** @type {?} */ attrs = NOT_SUPPORTED;
const /** @type {?} */ transcludeFn = NOT_SUPPORTED;
const /** @type {?} */ linkController = this.resolveRequired(this.$element, this.directive.require);
((this.directive.link))(this.componentScope, this.$element, attrs, linkController, transcludeFn);
}
const /** @type {?} */ childNodes = [];
let /** @type {?} */ childNode;
while (childNode = this.element.firstChild) {
this.element.removeChild(childNode);
childNodes.push(childNode);
}
this.linkFn(this.componentScope, (clonedElement, scope) => {
for (let /** @type {?} */ i = 0, /** @type {?} */ ii = clonedElement.length; i < ii; i++) {
this.element.appendChild(clonedElement[i]);
}
}, {
parentBoundTranscludeFn: (scope /** TODO #9100 */, cloneAttach /** TODO #9100 */) => { cloneAttach(childNodes); }
});
if (this.controllerInstance && isFunction(this.controllerInstance.$postLink)) {
this.controllerInstance.$postLink();
}
}
/**
* @param {?} changes
* @return {?}
*/
ngOnChanges(changes) {
const /** @type {?} */ ng1Changes = {};
Object.keys(changes).forEach(name => {
const /** @type {?} */ change = changes[name];
this.setComponentProperty(name, change.currentValue);
ng1Changes[this.propertyMap[name]] = change;
});
if (isFunction(this.destinationObj.$onChanges)) {
this.destinationObj.$onChanges(ng1Changes);
}
}
/**
* @return {?}
*/
ngDoCheck() {
const /** @type {?} */ destinationObj = this.destinationObj;
const /** @type {?} */ lastValues = this.checkLastValues;
const /** @type {?} */ checkProperties = this.checkProperties;
for (let /** @type {?} */ i = 0; i < checkProperties.length; i++) {
const /** @type {?} */ value = destinationObj[checkProperties[i]];
const /** @type {?} */ last = lastValues[i];
if (value !== last) {
if (typeof value == 'number' && isNaN(value) && typeof last == 'number' && isNaN(last)) {
}
else {
const /** @type {?} */ eventEmitter = ((this) /** TODO #9100 */)[this.propOuts[i]];
eventEmitter.emit(lastValues[i] = value);
}
}
}
if (this.controllerInstance && isFunction(this.controllerInstance.$doCheck)) {
this.controllerInstance.$doCheck();
}
}
/**
* @return {?}
*/
ngOnDestroy() {
if (this.controllerInstance && isFunction(this.controllerInstance.$onDestroy)) {
this.controllerInstance.$onDestroy();
}
}
/**
* @param {?} name
* @param {?} value
* @return {?}
*/
setComponentProperty(name, value) {
this.destinationObj[this.propertyMap[name]] = value;
}
/**
* @param {?} controllerType
* @return {?}
*/
buildController(controllerType /** TODO #9100 */) {
const /** @type {?} */ locals = { $scope: this.componentScope, $element: this.$element };
const /** @type {?} */ controller = this.$controller(controllerType, locals, null, this.directive.controllerAs);
this.$element.data(controllerKey(this.directive.name), controller);
return controller;
}
/**
* @param {?} $element
* @param {?} require
* @return {?}
*/
resolveRequired($element, require) {
if (!require) {
return undefined;
}
else if (typeof require == 'string') {
let /** @type {?} */ name = (require);
let /** @type {?} */ isOptional = false;
let /** @type {?} */ startParent = false;
let /** @type {?} */ searchParents = false;
if (name.charAt(0) == '?') {
isOptional = true;
name = name.substr(1);
}
if (name.charAt(0) == '^') {
searchParents = true;
name = name.substr(1);
}
if (name.charAt(0) == '^') {
startParent = true;
name = name.substr(1);
}
const /** @type {?} */ key = controllerKey(name);
if (startParent)
$element = $element.parent();
const /** @type {?} */ dep = searchParents ? $element.inheritedData(key) : $element.data(key);
if (!dep && !isOptional) {
throw new Error(`Can not locate '${require}' in '${this.directive.name}'.`);
}
return dep;
}
else if (require instanceof Array) {
const /** @type {?} */ deps = [];
for (let /** @type {?} */ i = 0; i < require.length; i++) {
deps.push(this.resolveRequired($element, require[i]));
}
return deps;
}
throw new Error(`Directive '${this.directive.name}' require syntax unrecognized: ${this.directive.require}`);
}
}
/**
* @param {?} value
* @return {?}
*/
function isFunction(value) {
return typeof value === 'function';
}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
let upgradeCount = 0;
/**
* Use `UpgradeAdapter` to allow AngularJS and Angular to coexist in a single application.
*
* The `UpgradeAdapter` allows:
* 1. creation of Angular component from AngularJS component directive
* (See [UpgradeAdapter#upgradeNg1Component()])
* 2. creation of AngularJS directive from Angular component.
* (See [UpgradeAdapter#downgradeNg2Component()])
* 3. Bootstrapping of a hybrid Angular application which contains both of the frameworks
* coexisting in a single application.
*
* ## Mental Model
*
* When reasoning about how a hybrid application works it is useful to have a mental model which
* describes what is happening and explains what is happening at the lowest level.
*
* 1. There are two independent frameworks running in a single application, each framework treats
* the other as a black box.
* 2. Each DOM element on the page is owned exactly by one framework. Whichever framework
* instantiated the element is the owner. Each framework only updates/interacts with its own
* DOM elements and ignores others.
* 3. AngularJS directives always execute inside AngularJS framework codebase regardless of
* where they are instantiated.
* 4. Angular components always execute inside Angular framework codebase regardless of
* where they are instantiated.
* 5. An AngularJS component can be upgraded to an Angular component. This creates an
* Angular directive, which bootstraps the AngularJS component directive in that location.
* 6. An Angular component can be downgraded to an AngularJS component directive. This creates
* an AngularJS directive, which bootstraps the Angular component in that location.
* 7. Whenever an adapter component is instantiated the host element is owned by the framework
* doing the instantiation. The other framework then instantiates and owns the view for that
* component. This implies that component bindings will always follow the semantics of the
* instantiation framework. The syntax is always that of Angular syntax.
* 8. AngularJS is always bootstrapped first and owns the bottom most view.
* 9. The new application is running in Angular zone, and therefore it no longer needs calls to
* `$apply()`.
*
* ### Example
*
* ```
* const adapter = new UpgradeAdapter(forwardRef(() => MyNg2Module), myCompilerOptions);
* const module = angular.module('myExample', []);
* module.directive('ng2Comp', adapter.downgradeNg2Component(Ng2Component));
*
* module.directive('ng1Hello', function() {
* return {
* scope: { title: '=' },
* template: 'ng1[Hello {{title}}!](<span ng-transclude></span>)'
* };
* });
*
*
* \@Component({
* selector: 'ng2-comp',
* inputs: ['name'],
* template: 'ng2[<ng1-hello [title]="name">transclude</ng1-hello>](<ng-content></ng-content>)',
* directives:
* })
* class Ng2Component {
* }
*
* \@NgModule({
* declarations: [Ng2Component, adapter.upgradeNg1Component('ng1Hello')],
* imports: [BrowserModule]
* })
* class MyNg2Module {}
*
*
* document.body.innerHTML = '<ng2-comp name="World">project</ng2-comp>';
*
* adapter.bootstrap(document.body, ['myExample']).ready(function() {
* expect(document.body.textContent).toEqual(
* "ng2[ng1[Hello World!](transclude)](project)");
* });
*
* ```
*
* \@stable
*/
class UpgradeAdapter {
/**
* @param {?} ng2AppModule
* @param {?=} compilerOptions
*/
constructor(ng2AppModule, compilerOptions) {
this.ng2AppModule = ng2AppModule;
this.compilerOptions = compilerOptions;
this.idPrefix = `NG2_UPGRADE_${upgradeCount++}_`;
this.downgradedComponents = [];
/**
* An internal map of ng1 components which need to up upgraded to ng2.
*
* We can't upgrade until injector is instantiated and we can retrieve the component metadata.
* For this reason we keep a list of components to upgrade until ng1 injector is bootstrapped.
*
* \@internal
*/
this.ng1ComponentsToBeUpgraded = {};
this.upgradedProviders = [];
this.moduleRef = null;
if (!ng2AppModule) {
throw new Error('UpgradeAdapter cannot be instantiated without an NgModule of the Angular app.');
}
}
/**
* Allows Angular Component to be used from AngularJS.
*
* Use `downgradeNg2Component` to create an AngularJS Directive Definition Factory from
* Angular Component. The adapter will bootstrap Angular component from within the
* AngularJS template.
*
* ## Mental Model
*
* 1. The component is instantiated by being listed in AngularJS template. This means that the
* host element is controlled by AngularJS, but the component's view will be controlled by
* Angular.
* 2. Even thought the component is instantiated in AngularJS, it will be using Angular
* syntax. This has to be done, this way because we must follow Angular components do not
* declare how the attributes should be interpreted.
* 3. `ng-model` is controlled by AngularJS and communicates with the downgraded Angular component
* by way of the `ControlValueAccessor` interface from \@angular/forms. Only components that
* implement this interface are eligible.
*
* ## Supported Features
*
* - Bindings:
* - Attribute: `<comp name="World">`
* - Interpolation: `<comp greeting="Hello {{name}}!">`
* - Expression: `<comp [name]="username">`
* - Event: `<comp (close)="doSomething()">`
* - ng-model: `<comp ng-model="name">`
* - Content projection: yes
*
* ### Example
*
* ```
* const adapter = new UpgradeAdapter(forwardRef(() => MyNg2Module));
* const module = angular.module('myExample', []);
* module.directive('greet', adapter.downgradeNg2Component(Greeter));
*
* \@Component({
* selector: 'greet',
* template: '{{salutation}} {{name}}! - <ng-content></ng-content>'
* })
* class Greeter {
* \@Input() salutation: string;
* \@Input() name: string;
* }
*
* \@NgModule({
* declarations: [Greeter],
* imports: [BrowserModule]
* })
* class MyNg2Module {}
*
* document.body.innerHTML =
* 'ng1 template: <greet salutation="Hello" [name]="world">text</greet>';
*
* adapter.bootstrap(document.body, ['myExample']).ready(function() {
* expect(document.body.textContent).toEqual("ng1 template: Hello world! - text");
* });
* ```
* @param {?} component
* @return {?}
*/
downgradeNg2Component(component) {
this.downgradedComponents.push(component);
return downgradeComponent({ component });
}
/**
* Allows AngularJS Component to be used from Angular.
*
* Use `upgradeNg1Component` to create an Angular component from AngularJS Component
* directive. The adapter will bootstrap AngularJS component from within the Angular
* template.
*
* ## Mental Model
*
* 1. The component is instantiated by being listed in Angular template. This means that the
* host element is controlled by Angular, but the component's view will be controlled by
* AngularJS.
*
* ## Supported Features
*
* - Bindings:
* - Attribute: `<comp name="World">`
* - Interpolation: `<comp greeting="Hello {{name}}!">`
* - Expression: `<comp [name]="username">`
* - Event: `<comp (close)="doSomething()">`
* - Transclusion: yes
* - Only some of the features of
* [Directive Definition Object](https://docs.angularjs.org/api/ng/service/$compile) are
* supported:
* - `compile`: not supported because the host element is owned by Angular, which does
* not allow modifying DOM structure during compilation.
* - `controller`: supported. (NOTE: injection of `$attrs` and `$transclude` is not supported.)
* - `controllerAs`: supported.
* - `bindToController`: supported.
* - `link`: supported. (NOTE: only pre-link function is supported.)
* - `name`: supported.
* - `priority`: ignored.
* - `replace`: not supported.
* - `require`: supported.
* - `restrict`: must be set to 'E'.
* - `scope`: supported.
* - `template`: supported.
* - `templateUrl`: supported.
* - `terminal`: ignored.
* - `transclude`: supported.
*
*
* ### Example
*
* ```
* const adapter = new UpgradeAdapter(forwardRef(() => MyNg2Module));
* const module = angular.module('myExample', []);
*
* module.directive('greet', function() {
* return {
* scope: {salutation: '=', name: '=' },
* template: '{{salutation}} {{name}}! - <span ng-transclude></span>'
* };
* });
*
* module.directive('ng2', adapter.downgradeNg2Component(Ng2Component));
*
* \@Component({
* selector: 'ng2',
* template: 'ng2 template: <greet salutation="Hello" [name]="world">text</greet>'
* })
* class Ng2Component {
* }
*
* \@NgModule({
* declarations: [Ng2Component, adapter.upgradeNg1Component('greet')],
* imports: [BrowserModule]
* })
* class MyNg2Module {}
*
* document.body.innerHTML = '<ng2></ng2>';
*
* adapter.bootstrap(document.body, ['myExample']).ready(function() {
* expect(document.body.textContent).toEqual("ng2 template: Hello world! - text");
* });
* ```
* @param {?} name
* @return {?}
*/
upgradeNg1Component(name) {
if (((this.ng1ComponentsToBeUpgraded)).hasOwnProperty(name)) {
return this.ng1ComponentsToBeUpgraded[name].type;
}
else {
return (this.ng1ComponentsToBeUpgraded[name] = new UpgradeNg1ComponentAdapterBuilder(name))
.type;
}
}
/**
* Registers the adapter's AngularJS upgrade module for unit testing in AngularJS.
* Use this instead of `angular.mock.module()` to load the upgrade module into
* the AngularJS testing injector.
*
* ### Example
*
* ```
* const upgradeAdapter = new UpgradeAdapter(MyNg2Module);
*
* // configure the adapter with upgrade/downgrade components and services
* upgradeAdapter.downgradeNg2Component(MyComponent);
*
* let upgradeAdapterRef: UpgradeAdapterRef;
* let $compile, $rootScope;
*
* // We must register the adapter before any calls to `inject()`
* beforeEach(() => {
* upgradeAdapterRef = upgradeAdapter.registerForNg1Tests(['heroApp']);
* });
*
* beforeEach(inject((_$compile_, _$rootScope_) => {
* $compile = _$compile_;
* $rootScope = _$rootScope_;
* }));
*
* it("says hello", (done) => {
* upgradeAdapterRef.ready(() => {
* const element = $compile("<my-component></my-component>")($rootScope);
* $rootScope.$apply();
* expect(element.html()).toContain("Hello World");
* done();
* })
* });
*
* ```
*
* @param {?=} modules any AngularJS modules that the upgrade module should depend upon
* @return {?} an {\@link UpgradeAdapterRef}, which lets you register a `ready()` callback to
* run assertions once the Angular components are ready to test through AngularJS.
*/
registerForNg1Tests(modules) {
const /** @type {?} */ windowNgMock = ((window))['angular'].mock;
if (!windowNgMock || !windowNgMock.module) {
throw new Error('Failed to find \'angular.mock.module\'.');
}
this.declareNg1Module(modules);
windowNgMock.module(this.ng1Module.name);
const /** @type {?} */ upgrade = new UpgradeAdapterRef();
this.ng2BootstrapDeferred.promise.then((ng1Injector) => { ((upgrade))._bootstrapDone(this.moduleRef, ng1Injector); }, onError);
return upgrade;
}
/**
* Bootstrap a hybrid AngularJS / Angular application.
*
* This `bootstrap` method is a direct replacement (takes same arguments) for AngularJS
* [`bootstrap`](https://docs.angularjs.org/api/ng/function/angular.bootstrap) method. Unlike
* AngularJS, this bootstrap is asynchronous.
*
* ### Example
*
* ```
* const adapter = new UpgradeAdapter(MyNg2Module);
* const module = angular.module('myExample', []);
* module.directive('ng2', adapter.downgradeNg2Component(Ng2));
*
* module.directive('ng1', function() {
* return {
* scope: { title: '=' },
* template: 'ng1[Hello {{title}}!](<span ng-transclude></span>)'
* };
* });
*
*
* \@Component({
* selector: 'ng2',
* inputs: ['name'],
* template: 'ng2[<ng1 [title]="name">transclude</ng1>](<ng-content></ng-content>)'
* })
* class Ng2 {
* }
*
* \@NgModule({
* declarations: [Ng2, adapter.upgradeNg1Component('ng1')],
* imports: [BrowserModule]
* })
* class MyNg2Module {}
*
* document.body.innerHTML = '<ng2 name="World">project</ng2>';
*
* adapter.bootstrap(document.body, ['myExample']).ready(function() {
* expect(document.body.textContent).toEqual(
* "ng2[ng1[Hello World!](transclude)](project)");
* });
* ```
* @param {?} element
* @param {?=} modules
* @param {?=} config
* @return {?}
*/
bootstrap(element$$1, modules, config) {
this.declareNg1Module(modules);
const /** @type {?} */ upgrade = new UpgradeAdapterRef();
// Make sure resumeBootstrap() only exists if the current bootstrap is deferred
const /** @type {?} */ windowAngular = ((window) /** TODO #???? */)['angular'];
windowAngular.resumeBootstrap = undefined;
this.ngZone.run(() => { bootstrap(element$$1, [this.ng1Module.name], config); });
const /** @type {?} */ ng1BootstrapPromise = new Promise((resolve) => {
if (windowAngular.resumeBootstrap) {
const /** @type {?} */ originalResumeBootstrap = windowAngular.resumeBootstrap;
windowAngular.resumeBootstrap = function () {
windowAngular.resumeBootstrap = originalResumeBootstrap;
windowAngular.resumeBootstrap.apply(this, arguments);
resolve();
};
}
else {
resolve();
}
});
Promise.all([this.ng2BootstrapDeferred.promise, ng1BootstrapPromise]).then(([ng1Injector]) => {
element(element$$1).data(controllerKey(INJECTOR_KEY), this.moduleRef.injector);
this.moduleRef.injector.get(NgZone).run(() => { ((upgrade))._bootstrapDone(this.moduleRef, ng1Injector); });
}, onError);
return upgrade;
}
/**
* Allows AngularJS service to be accessible from Angular.
*
*
* ### Example
*
* ```
* class Login { ... }
* class Server { ... }
*
* \@Injectable()
* class Example {
* constructor(\@Inject('server') server, login: Login) {
* ...
* }
* }
*
* const module = angular.module('myExample', []);
* module.service('server', Server);
* module.service('login', Login);
*
* const adapter = new UpgradeAdapter(MyNg2Module);
* adapter.upgradeNg1Provider('server');
* adapter.upgradeNg1Provider('login', {asToken: Login});
*
* adapter.bootstrap(document.body, ['myExample']).ready((ref) => {
* const example: Example = ref.ng2Injector.get(Example);
* });
*
* ```
* @param {?} name
* @param {?=} options
* @return {?}
*/
upgradeNg1Provider(name, options) {
const /** @type {?} */ token = options && options.asToken || name;
this.upgradedProviders.push({
provide: token,
useFactory: ($injector) => $injector.get(name),
deps: [$INJECTOR]
});
}
/**
* Allows Angular service to be accessible from AngularJS.
*
*
* ### Example
*
* ```
* class Example {
* }
*
* const adapter = new UpgradeAdapter(MyNg2Module);
*
* const module = angular.module('myExample', []);
* module.factory('example', adapter.downgradeNg2Provider(Example));
*
* adapter.bootstrap(document.body, ['myExample']).ready((ref) => {
* const example: Example = ref.ng1Injector.get('example');
* });
*
* ```
* @param {?} token
* @return {?}
*/
downgradeNg2Provider(token) { return downgradeInjectable(token); }
/**
* Declare the AngularJS upgrade module for this adapter without bootstrapping the whole
* hybrid application.
*
* This method is automatically called by `bootstrap()` and `registerForNg1Tests()`.
*
* @param {?=} modules The AngularJS modules that this upgrade module should depend upon.
* @return {?} The AngularJS upgrade module that is declared by this method
*
* ### Example
*
* ```
* const upgradeAdapter = new UpgradeAdapter(MyNg2Module);
* upgradeAdapter.declareNg1Module(['heroApp']);
* ```
*/
declareNg1Module(modules = []) {
const /** @type {?} */ delayApplyExps = [];
let /** @type {?} */ original$applyFn;
let /** @type {?} */ rootScopePrototype;
let /** @type {?} */ rootScope;
const /** @type {?} */ upgradeAdapter = this;
const /** @type {?} */ ng1Module = this.ng1Module = module$1(this.idPrefix, modules);
const /** @type {?} */ platformRef = platformBrowserDynamic();
this.ngZone = new NgZone({ enableLongStackTrace: Zone.hasOwnProperty('longStackTraceZoneSpec') });
this.ng2BootstrapDeferred = new Deferred();
ng1Module.factory(INJECTOR_KEY, () => this.moduleRef.injector.get(Injector))
.constant(NG_ZONE_KEY, this.ngZone)
.factory(COMPILER_KEY, () => this.moduleRef.injector.get(Compiler))
.config([
'$provide', '$injector',
(provide, ng1Injector) => {
provide.decorator($ROOT_SCOPE, [
'$delegate',
function (rootScopeDelegate) {
// Capture the root apply so that we can delay first call to $apply until we
// bootstrap Angular and then we replay and restore the $apply.
rootScopePrototype = rootScopeDelegate.constructor.prototype;
if (rootScopePrototype.hasOwnProperty('$apply')) {
original$applyFn = rootScopePrototype.$apply;
rootScopePrototype.$apply = (exp) => delayApplyExps.push(exp);
}
else {
throw new Error('Failed to find \'$apply\' on \'$rootScope\'!');
}
return rootScope = rootScopeDelegate;
}
]);
if (ng1Injector.has($$TESTABILITY)) {
provide.decorator($$TESTABILITY, [
'$delegate',
function (testabilityDelegate) {
const /** @type {?} */ originalWhenStable = testabilityDelegate.whenStable;
// Cannot use arrow function below because we need the context
const /** @type {?} */ newWhenStable = function (callback) {
originalWhenStable.call(this, function () {
const /** @type {?} */ ng2Testability = upgradeAdapter.moduleRef.injector.get(Testability);
if (ng2Testability.isStable()) {
callback.apply(this, arguments);
}
else {
ng2Testability.whenStable(newWhenStable.bind(this, callback));
}
});
};
testabilityDelegate.whenStable = newWhenStable;
return testabilityDelegate;
}
]);
}
}
]);
ng1Module.run([
'$injector', '$rootScope',
(ng1Injector, rootScope) => {
UpgradeNg1ComponentAdapterBuilder.resolve(this.ng1ComponentsToBeUpgraded, ng1Injector)
.then(() => {
// At this point we have ng1 injector and we have lifted ng1 components into ng2, we
// now can bootstrap ng2.
const /** @type {?} */ DynamicNgUpgradeModule = NgModule({
providers: [
{ provide: $INJECTOR, useFactory: () => ng1Injector },
{ provide: $COMPILE, useFactory: () => ng1Injector.get($COMPILE) },
this.upgradedProviders
],
imports: [this.ng2AppModule],
entryComponents: this.downgradedComponents
}).Class({
constructor: function DynamicNgUpgradeModule() { },
ngDoBootstrap: function () { }
});
((platformRef))
._bootstrapModuleWithZone(DynamicNgUpgradeModule, this.compilerOptions, this.ngZone)
.then((ref) => {
this.moduleRef = ref;
this.ngZone.run(() => {
if (rootScopePrototype) {
rootScopePrototype.$apply = original$applyFn; // restore original $apply
while (delayApplyExps.length) {
rootScope.$apply(delayApplyExps.shift());
}
rootScopePrototype = null;
}
});
})
.then(() => this.ng2BootstrapDeferred.resolve(ng1Injector), onError)
.then(() => {
let /** @type {?} */ subscription = this.ngZone.onMicrotaskEmpty.subscribe({ next: () => rootScope.$digest() });
rootScope.$on('$destroy', () => { subscription.unsubscribe(); });
});
})
.catch((e) => this.ng2BootstrapDeferred.reject(e));
}
]);
return ng1Module;
}
}
/**
* Use `UpgradeAdapterRef` to control a hybrid AngularJS / Angular application.
*
* \@stable
*/
class UpgradeAdapterRef {
constructor() {
this._readyFn = null;
this.ng1RootScope = null;
this.ng1Injector = null;
this.ng2ModuleRef = null;
this.ng2Injector = null;
}
/**
* @param {?} ngModuleRef
* @param {?} ng1Injector
* @return {?}
*/
_bootstrapDone(ngModuleRef, ng1Injector) {
this.ng2ModuleRef = ngModuleRef;
this.ng2Injector = ngModuleRef.injector;
this.ng1Injector = ng1Injector;
this.ng1RootScope = ng1Injector.get($ROOT_SCOPE);
this._readyFn && this._readyFn(this);
}
/**
* Register a callback function which is notified upon successful hybrid AngularJS / Angular
* application has been bootstrapped.
*
* The `ready` callback function is invoked inside the Angular zone, therefore it does not
* require a call to `$apply()`.
* @param {?} fn
* @return {?}
*/
ready(fn) { this._readyFn = fn; }
/**
* Dispose of running hybrid AngularJS / Angular application.
* @return {?}
*/
dispose() {
this.ng1Injector.get($ROOT_SCOPE).$destroy();
this.ng2ModuleRef.destroy();
}
}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @module
* @description
* Entry point for all public APIs of the upgrade/dynamic package, allowing
* Angular 1 and Angular 2+ to run side by side in the same application.
*/
// This file only re-exports content of the `src` folder. Keep it that way.
/**
* Generated bundle index. Do not edit.
*/
export { VERSION, UpgradeAdapter, UpgradeAdapterRef };
//# sourceMappingURL=upgrade.js.map<|fim▁end|> | constructor(prop, attr) {
this.prop = prop;
this.attr = attr; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.