prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>/** * Ядро булевой логики */ /** * @author Алексей Кляузер <[email protected]><|fim▁hole|>package org.deneblingvo.booleans.core;<|fim▁end|>
* Ядро булевой логики */
<|file_name|>Print.js<|end_file_name|><|fim▁begin|>/* Copyright (C) 2014 Härnösands kommun This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ /** * Action for print using mapfish. */ Ext.define('OpenEMap.action.Print', { extend : 'OpenEMap.action.Action', require : 'GeoExt.plugins.PrintExtent', constructor : function(config) { var mapPanel = config.mapPanel; var printExtent = mapPanel.plugins[0]; var printProvider = printExtent.printProvider; printProvider.customParams = {attribution: config.mapPanel.config.attribution.trim(), mapTitle: ''}; var printDialog = null; var page = null; var onTransformComplete = function() { var scale = printDialog.down('#scale'); scale.select(page.scale); }; var onBeforedownload = function() { if (printDialog) printDialog.setLoading(false); }; var onPrintexception = function(printProvider, response) { if (printDialog) printDialog.setLoading(false); Ext.Msg.show({ title:'Felmeddelande', msg: 'Print failed.\n\n' + response.responseText, icon: Ext.Msg.ERROR }); }; var close = function() { printProvider.un('beforedownload', onBeforedownload); printProvider.on('printexception', onPrintexception); printExtent.control.events.unregister('transformcomplete', null, onTransformComplete); printExtent.removePage(page); printExtent.hide(); printDialog = null;<|fim▁hole|> control.deactivate(); }; config.iconCls = config.iconCls || 'action-print'; config.tooltip = config.tooltip || 'Skriv ut'; config.toggleGroup = 'extraTools'; var Custom = OpenLayers.Class(OpenLayers.Control, { initialize: function(options) { OpenLayers.Control.prototype.initialize.apply( this, arguments ); }, type: OpenLayers.Control.TYPE_TOGGLE, activate: function() { if (printDialog) { return; } // NOTE: doing a hide/show at first display fixes interaction problems with preview extent for unknown reasons printExtent.hide(); printExtent.show(); page = printExtent.addPage(); printProvider.dpis.data.items.forEach(function(d){ var validDpi = false; if (d.data.name === '72'){ validDpi = true; d.data.name = 'Låg (' +d.data.name + ' dpi)'; } else if (d.data.name === '150'){ validDpi = true; d.data.name = 'Medel (' +d.data.name + ' dpi)'; } else if (d.data.name === '300'){ validDpi = true; d.data.name = 'Hög (' +d.data.name + ' dpi)'; } }); printProvider.layouts.data.items.forEach(function(p){ if (/landscape$/.test(p.data.name)){ p.data.displayName = p.data.name.replace('landscape', 'liggande'); } else if (/portrait$/.test(p.data.name)){ p.data.displayName = p.data.name.replace('portrait', 'stående'); } }); printDialog = new Ext.Window({ autoHeight : true, width : 290, resizable: false, layout : 'fit', bodyPadding : '5 5 0', title: 'Utskriftsinst&auml;llningar', listeners: { close: onClose }, items : [ { xtype : 'form', layout : 'anchor', defaults : { anchor : '100%' }, fieldDefaults : { labelWidth : 120 }, items : [ { xtype : 'textfield', fieldLabel: 'Rubrik', valueField: 'mapTitle', itemId : 'mapTitle', queryMode: 'local', value: printProvider.customParams.mapTitle, listeners: { change: function(textfield){ printProvider.customParams.mapTitle = textfield.value; } } },{ xtype : 'combo', fieldLabel: 'Pappersformat', store : printProvider.layouts, displayField : 'displayName', valueField : 'name', itemId : 'printLayouts', queryMode: 'local', value : printProvider.layouts.getAt(0).get("name"), listeners: { select: function(combo, records, eOpts) { var record = records[0]; printProvider.setLayout(record); } } }, { xtype : 'combo', fieldLabel: 'Kvalité', store : printProvider.dpis, displayField : 'name', valueField : 'value', queryMode: 'local', value: printProvider.dpis.first().get("value"), listeners: { select: function(combo, records, eOpts) { var record = records[0]; printProvider.setDpi(record); } } }, { xtype : 'combo', fieldLabel: 'Skala', store : printProvider.scales, displayField : 'name', valueField : 'value', queryMode: 'local', itemId: 'scale', value: printProvider.scales.first().get("value"), listeners: { select: function(combo, records, eOpts) { var record = records[0]; page.setScale(record, "m"); } } } ] } ], bbar : [ '->', { text : "Skriv ut", handler : function() { printDialog.setLoading(true); printExtent.print(); } } ] }); printDialog.show(); var scale = printDialog.down('#scale'); scale.select(page.scale); var layoutId = 6; var printLayouts = printDialog.down('#printLayouts'); printLayouts.select(printLayouts.store.data.get(layoutId)); var currentPrintLayout = printLayouts.store.data.items[layoutId]; printProvider.setLayout(currentPrintLayout); printExtent.control.events.register('transformcomplete', null, onTransformComplete); printExtent.control.events.register('transformcomplete', null, onTransformComplete); printProvider.on('beforedownload', onBeforedownload); printProvider.on('printexception', onPrintexception); OpenLayers.Control.prototype.activate.apply(this, arguments); }, deactivate: function() { if (printDialog) printDialog.close(); OpenLayers.Control.prototype.deactivate.apply(this, arguments); } }); var control = new Custom({ type: OpenLayers.Control.TYPE_TOGGLE }); config.control = control; this.callParent(arguments); } });<|fim▁end|>
}; var onClose = function() { close();
<|file_name|>Fonts.java<|end_file_name|><|fim▁begin|>package de.uks.beast.editor.util; public enum Fonts { //@formatter:off HADOOP_MASTER_TITEL ("Arial", 10, true, true),<|fim▁hole|> HADOOP_MASTER_PROPERTY ("Arial", 8, false, true), HADOOP_SLAVE_PROPERTY ("Arial", 8, false, true), NETWORK_PROPERTY ("Arial", 8, false, true), CONTROL_CENTER_PROPERTY ("Arial", 8, false, true), ;//@formatter:on private final String name; private final int size; private final boolean italic; private final boolean bold; private Fonts(final String name, final int size, final boolean italic, final boolean bold) { this.name = name; this.size = size; this.italic = italic; this.bold = bold; } /** * @return the name */ public String getName() { return name; } /** * @return the size */ public int getSize() { return size; } /** * @return the italic */ public boolean isItalic() { return italic; } /** * @return the bold */ public boolean isBold() { return bold; } }<|fim▁end|>
HADOOP_SLAVE_TITEL ("Arial", 10, true, true), NETWORK_TITEL ("Arial", 10, true, true), CONTROL_CENTER_TITEL ("Arial", 10, true, true),
<|file_name|>page.py<|end_file_name|><|fim▁begin|>""" Created on Sep 14, 2015 @author: Mikhail """ from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support.expected_conditions import visibility_of_element_located, visibility_of from selenium.common.exceptions import TimeoutException __author__ = 'Mikhail'<|fim▁hole|> class Page(object): def __init__(self, driver, url): self.driver = driver self.url = url self.wait = WebDriverWait(self.driver, 5) def open_page(self, url): self.driver.get(url) def is_element_visible_by_locator(self, locator): try: self.wait.until(visibility_of_element_located(locator)) except TimeoutException: return False return True def is_element_visible(self, element): try: self.wait.until(visibility_of(element)) except TimeoutException: return False return True<|fim▁end|>
<|file_name|>Assignment.py<|end_file_name|><|fim▁begin|>from django.core import serializers from django.http import HttpResponse, JsonResponse from Assessment.models import * from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST, require_GET import json @csrf_exempt @require_GET def getAssignmentByCode(request): response_data = {} try: C = Assignment.objects.getAssignmentByCode(request.GET) except Exception as e:<|fim▁hole|> data = serializers.serialize('json', [C, ]) response_data["assignment"] = json.loads(data) return JsonResponse(response_data) @csrf_exempt @require_GET def getAssignmentsByCourse(request): print (request) response_data = {} try: C = Assignment.objects.getAssignmentsByCourse(request.GET) except Exception as e: response_data["success"] = 0 response_data['exception'] = str(e) else: response_data["success"] = 1 data = serializers.serialize('json', [C, ]) response_data["assignment"] = json.loads(data) return JsonResponse(response_data) @csrf_exempt @require_GET def retrieveAssignmentByBranch(request): response_data = {} try: C = Assignment.objects.filter(assignmentCode__contains="SE") except Exception as e: response_data['success'] = '0' response_data['exception'] = str(e) else: response_data['success'] = '1' global data try: data = serializers.serialize('json', C) except Exception as e: data = serializers.serialize('json', [C, ]) response_data["assignment"] = json.loads(data) return JsonResponse(response_data) @csrf_exempt @require_GET def retrieveAssignmentResponses(request): response_data = {} try: C = AssignmentResponse.objects.retrieveAssignmentResponsesByStudent(request.GET) except Exception as e: response_data['success'] = '0' response_data['exception'] = str(e) else: response_data['success'] = '1' global data try: data = serializers.serialize('json', C) except Exception as e: data = serializers.serialize('json', [C, ]) response_data["assignment"] = json.loads(data) return JsonResponse(response_data) @csrf_exempt @require_GET def retrieveAssignments(request): response_data = {} try: C = Assignment.objects.retrieveAssignments(request.GET) except Exception as e: response_data['success'] = '0' response_data['exception'] = str(e) else: response_data['success'] = '1' global data try: data = serializers.serialize('json', C) except Exception as e: data = serializers.serialize('json', [C, ]) response_data["assignment"] = json.loads(data) return JsonResponse(response_data)<|fim▁end|>
response_data["success"] = 0 response_data['exception'] = str(e) else: response_data["success"] = 1
<|file_name|>delete_handler.go<|end_file_name|><|fim▁begin|>package users import ( "net/http" "regexp" "strings" "github.com/pivotal-cf-experimental/warrant/internal/server/common" "github.com/pivotal-cf-experimental/warrant/internal/server/domain" ) type deleteHandler struct { users *domain.Users tokens *domain.Tokens } func (h deleteHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { token := strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") if ok := h.tokens.Validate(token, domain.Token{ Audiences: []string{"scim"}, Authorities: []string{"scim.write"}, }); !ok { common.JSONError(w, http.StatusUnauthorized, "Full authentication is required to access this resource", "unauthorized") return }<|fim▁hole|> matches := regexp.MustCompile(`/Users/(.*)$`).FindStringSubmatch(req.URL.Path) id := matches[1] if ok := h.users.Delete(id); !ok { common.JSONError(w, http.StatusNotFound, "User non-existant-user-guid does not exist", "scim_resource_not_found") return } w.WriteHeader(http.StatusOK) }<|fim▁end|>
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals from case import Mock, patch from amqp.five import text_t from amqp.utils import (NullHandler, bytes_to_str, coro, get_errno, get_logger, str_to_bytes) class test_get_errno: def test_has_attr(self): exc = KeyError('foo') exc.errno = 23 assert get_errno(exc) == 23 def test_in_args(self): exc = KeyError(34, 'foo') exc.args = (34, 'foo') assert get_errno(exc) == 34 def test_args_short(self): exc = KeyError(34) assert not get_errno(exc) def test_no_args(self): assert not get_errno(object()) class test_coro: def test_advances(self): @coro def x(): yield 1 yield 2 it = x() assert next(it) == 2 class test_str_to_bytes: def test_from_unicode(self): assert isinstance(str_to_bytes(u'foo'), bytes) def test_from_bytes(self): assert isinstance(str_to_bytes(b'foo'), bytes) def test_supports_surrogates(self): bytes_with_surrogates = '\ud83d\ude4f'.encode('utf-8', 'surrogatepass') assert str_to_bytes(u'\ud83d\ude4f') == bytes_with_surrogates class test_bytes_to_str: def test_from_unicode(self): assert isinstance(bytes_to_str(u'foo'), text_t) def test_from_bytes(self): assert bytes_to_str(b'foo') def test_support_surrogates(self): assert bytes_to_str(u'\ud83d\ude4f') == u'\ud83d\ude4f' class test_NullHandler: def test_emit(self): NullHandler().emit(Mock(name='record')) class test_get_logger: def test_as_str(self): with patch('logging.getLogger') as getLogger: x = get_logger('foo.bar') getLogger.assert_called_with('foo.bar') assert x is getLogger() <|fim▁hole|> with patch('amqp.utils.NullHandler') as _NullHandler: m = Mock(name='logger') m.handlers = None x = get_logger(m) assert x is m x.addHandler.assert_called_with(_NullHandler())<|fim▁end|>
def test_as_logger(self):
<|file_name|>range_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Cockroach Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. See the AUTHORS file // for names of contributors. // // Author: Spencer Kimball ([email protected]) package storage import ( "bytes" "encoding/gob" "reflect" "testing" "github.com/cockroachdb/cockroach/gossip" ) var ( testRangeDescriptor = RangeDescriptor{ StartKey: KeyMin, Replicas: []Replica{ { NodeID: 1, StoreID: 1, RangeID: 1, Attrs: Attributes([]string{"dc1", "mem"}), }, { NodeID: 2, StoreID: 1, RangeID: 1, Attrs: Attributes([]string{"dc2", "mem"}), }, }, } testDefaultAcctConfig = AcctConfig{} testDefaultPermConfig = PermConfig{ Perms: []Permission{ {Read: true, Write: true}, }, } testDefaultZoneConfig = ZoneConfig{ Replicas: []Attributes{ Attributes([]string{"dc1", "mem"}), Attributes([]string{"dc2", "mem"}), }, } ) // createTestEngine creates an in-memory engine and initializes some // default configuration settings. func createTestEngine(t *testing.T) Engine { engine := NewInMem(Attributes([]string{"dc1", "mem"}), 1<<20) if err := putI(engine, KeyConfigAccountingPrefix, testDefaultAcctConfig); err != nil { t.Fatal(err) } if err := putI(engine, KeyConfigPermissionPrefix, testDefaultPermConfig); err != nil { t.Fatal(err) } if err := putI(engine, KeyConfigZonePrefix, testDefaultZoneConfig); err != nil { t.Fatal(err) } return engine } // createTestRange creates a new range initialized to the full extent // of the keyspace. The gossip instance is also returned for testing. func createTestRange(engine Engine, t *testing.T) (*Range, *gossip.Gossip) { rm := RangeMetadata{ RangeID: 0, StartKey: KeyMin, EndKey: KeyMax, Replicas: testRangeDescriptor, } g := gossip.New() r := NewRange(rm, engine, nil, g) r.Start() return r, g } // TestRangeGossipFirstRange verifies that the first range gossips its location. func TestRangeGossipFirstRange(t *testing.T) { r, g := createTestRange(createTestEngine(t), t) defer r.Stop() info, err := g.GetInfo(gossip.KeyFirstRangeMetadata) if err != nil { t.Fatal(err) } if !reflect.DeepEqual(info.(RangeDescriptor), testRangeDescriptor) { t.Errorf("expected gossipped range locations to be equal: %s vs %s", info.(RangeDescriptor), testRangeDescriptor) } } // TestRangeGossipAllConfigs verifies that all config types are // gossipped. func TestRangeGossipAllConfigs(t *testing.T) { r, g := createTestRange(createTestEngine(t), t) defer r.Stop() testData := []struct { gossipKey string configs []*prefixConfig }{<|fim▁hole|> {gossip.KeyConfigZone, []*prefixConfig{&prefixConfig{KeyMin, &testDefaultZoneConfig}}}, } for _, test := range testData { info, err := g.GetInfo(test.gossipKey) if err != nil { t.Fatal(err) } configs := info.([]*prefixConfig) if !reflect.DeepEqual(configs, test.configs) { t.Errorf("expected gossiped configs to be equal %s vs %s", configs, test.configs) } } } // TestRangeGossipConfigWithMultipleKeyPrefixes verifies that multiple // key prefixes for a config are gossipped. func TestRangeGossipConfigWithMultipleKeyPrefixes(t *testing.T) { engine := createTestEngine(t) // Add a permission for a new key prefix. db1Perm := PermConfig{ Perms: []Permission{ {Users: []string{"spencer"}, Read: true, Write: true, Priority: 100.0}, {Users: []string{"foo", "bar", "baz"}, Read: true, Write: false, Priority: 10.0}, }, } key := MakeKey(KeyConfigPermissionPrefix, Key("/db1")) if err := putI(engine, key, db1Perm); err != nil { t.Fatal(err) } r, g := createTestRange(engine, t) defer r.Stop() info, err := g.GetInfo(gossip.KeyConfigPermission) if err != nil { t.Fatal(err) } configs := info.([]*prefixConfig) expConfigs := []*prefixConfig{ &prefixConfig{KeyMin, &testDefaultPermConfig}, &prefixConfig{Key("/db1"), &db1Perm}, } if !reflect.DeepEqual(configs, expConfigs) { t.Errorf("expected gossiped configs to be equal %s vs %s", configs, expConfigs) } } // TestRangeGossipConfigUpdates verifies that writes to the // permissions cause the updated configs to be re-gossipped. func TestRangeGossipConfigUpdates(t *testing.T) { r, g := createTestRange(createTestEngine(t), t) defer r.Stop() // Add a permission for a new key prefix. db1Perm := PermConfig{ Perms: []Permission{ {Users: []string{"spencer"}, Read: true, Write: true, Priority: 100.0}, }, } key := MakeKey(KeyConfigPermissionPrefix, Key("/db1")) reply := &PutResponse{} var buf bytes.Buffer if err := gob.NewEncoder(&buf).Encode(db1Perm); err != nil { t.Fatal(err) } r.Put(&PutRequest{Key: key, Value: Value{Bytes: buf.Bytes()}}, reply) if reply.Error != nil { t.Fatal(reply.Error) } info, err := g.GetInfo(gossip.KeyConfigPermission) if err != nil { t.Fatal(err) } configs := info.([]*prefixConfig) expConfigs := []*prefixConfig{ &prefixConfig{KeyMin, &testDefaultPermConfig}, &prefixConfig{Key("/db1"), &db1Perm}, } if !reflect.DeepEqual(configs, expConfigs) { t.Errorf("expected gossiped configs to be equal %s vs %s", configs, expConfigs) } }<|fim▁end|>
{gossip.KeyConfigAccounting, []*prefixConfig{&prefixConfig{KeyMin, &testDefaultAcctConfig}}}, {gossip.KeyConfigPermission, []*prefixConfig{&prefixConfig{KeyMin, &testDefaultPermConfig}}},
<|file_name|>vsock_others.go<|end_file_name|><|fim▁begin|>//go:build !linux // +build !linux package vsock import ( "fmt" "net" "os" "runtime" "syscall" "time" ) // errUnimplemented is returned by all functions on platforms that // cannot make use of VM sockets. var errUnimplemented = fmt.Errorf("vsock: not implemented on %s", runtime.GOOS) func fileListener(_ *os.File) (*Listener, error) { return nil, errUnimplemented } func listen(_, _ uint32, _ *Config) (*Listener, error) { return nil, errUnimplemented } type listener struct{} func (*listener) Accept() (net.Conn, error) { return nil, errUnimplemented }<|fim▁hole|>func (*listener) Close() error { return errUnimplemented } func (*listener) SetDeadline(_ time.Time) error { return errUnimplemented } func dial(_, _ uint32, _ *Config) (*Conn, error) { return nil, errUnimplemented } type conn struct{} func (*conn) Close() error { return errUnimplemented } func (*conn) CloseRead() error { return errUnimplemented } func (*conn) CloseWrite() error { return errUnimplemented } func (*conn) Read(_ []byte) (int, error) { return 0, errUnimplemented } func (*conn) Write(_ []byte) (int, error) { return 0, errUnimplemented } func (*conn) SetDeadline(_ time.Time) error { return errUnimplemented } func (*conn) SetReadDeadline(_ time.Time) error { return errUnimplemented } func (*conn) SetWriteDeadline(_ time.Time) error { return errUnimplemented } func (*conn) SyscallConn() (syscall.RawConn, error) { return nil, errUnimplemented } func contextID() (uint32, error) { return 0, errUnimplemented } func isErrno(_ error, _ int) bool { return false }<|fim▁end|>
func (*listener) Addr() net.Addr { return nil }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
from .lsm import *
<|file_name|>test_colors.py<|end_file_name|><|fim▁begin|># # Unit Tests for the colors.py functions # # Rajul Srivastava ([email protected]) # import unittest import logging import numpy as np import ginga.colors class TestError(Exception): pass class TestColors(unittest.TestCase): def setUp(self): self.logger = logging.getLogger("TestColors") self.color_list_length = len(ginga.colors.color_dict) # Tests for the lookup_color() funtion def test_lookup_color_white_tuple(self): expected = (1.0, 1.0, 1.0) actual = ginga.colors.lookup_color("white", "tuple") assert np.allclose(expected, actual) def test_lookup_color_black_tuple(self): expected = (0.0, 0.0, 0.0) actual = ginga.colors.lookup_color("black", "tuple") assert np.allclose(expected, actual) def test_lookup_color_white_hash(self): expected = "#ffffff" actual = ginga.colors.lookup_color("white", "hash") assert expected == actual def test_lookup_color_black_black(self): expected = "#000000" actual = ginga.colors.lookup_color("black", "hash") assert expected == actual def test_lookup_color_yellow_tuple(self): expected = (1.0, 1.0, 0.0) actual = ginga.colors.lookup_color("yellow") assert np.allclose(expected, actual) def test_lookup_color_unknown(self): self.assertRaises(KeyError, ginga.colors.lookup_color, "unknown_color") def test_lookup_color_raise_exception_unknown_key(self): self.assertRaises(KeyError, ginga.colors.lookup_color, "unknown_key") def test_lookup_color_raise_exception_unknown_format(self): self.assertRaises(ValueError, ginga.colors.lookup_color, "white", "unknown_format") # Tests for the get_colors() function def test_get_colors_len(self): expected = self.color_list_length actual = len(ginga.colors.get_colors()) assert expected == actual def test_add_and_get_colors_len(self): ginga.colors.add_color("test_color_white", (0.0, 0.0, 0.0)) expected = self.color_list_length + 1 actual = len(ginga.colors.get_colors()) assert expected == actual ginga.colors.remove_color("test_color_white") # Tests for the add_color() and remove_color() function def test_add_and_remove_color_len(self): ginga.colors.add_color("test_color_white", (0.0, 0.0, 0.0)) expected = self.color_list_length + 1 actual = len(ginga.colors.color_dict) assert expected == actual expected = len(ginga.colors.color_dict) actual = len(ginga.colors.color_list) assert expected == actual ginga.colors.remove_color("test_color_white") expected = self.color_list_length actual = len(ginga.colors.color_dict) assert expected == actual expected = len(ginga.colors.color_dict) actual = len(ginga.colors.color_list) assert expected == actual def test_add_and_remove_color_rbg(self): ginga.colors.add_color("test_color_white", (0.0, 0.0, 0.0)) expected = (0.0, 0.0, 0.0) actual = ginga.colors.lookup_color("test_color_white") assert np.allclose(expected, actual) ginga.colors.remove_color("test_color_white") self.assertRaises(KeyError, ginga.colors.remove_color, "test_color_white") def test_add_color_wrong_rbg_type(self): self.assertRaises(TypeError, ginga.colors.add_color, "white", "string_wrong_format") def test_add_color_wrong_rbg_values(self): self.assertRaises(ValueError, ginga.colors.add_color, "test_color", (-1.0, 0.0, 0.0)) def test_add_color_wrong_tuple_length(self): self.assertRaises(ValueError, ginga.colors.add_color, "test_color", (0.0, 0.0)) def test_remove_color_unknown(self): self.assertRaises(KeyError, ginga.colors.remove_color, "unknown_color") # Tests for recalc_color_list() function def test_recalc_color_list(self): ginga.colors.color_dict["test_color_white"] = (0.0, 0.0, 0.0) expected = len(ginga.colors.color_dict) - 1 actual = len(ginga.colors.color_list) assert expected == actual ginga.colors.recalc_color_list() expected = len(ginga.colors.color_dict) actual = len(ginga.colors.color_list) assert expected == actual del ginga.colors.color_dict["test_color_white"] expected = len(ginga.colors.color_dict) + 1 actual = len(ginga.colors.color_list) assert expected == actual ginga.colors.recalc_color_list() <|fim▁hole|> # Tests for scan_rgbtxt_buf() function def test_scan_rgbtxt_buf(self): test_rgb_lines = ''' 255 255 255 white 0 0 0 black 255 0 0 red 0 255 0 green 0 0 255 blue ''' result = ginga.colors.scan_rgbtxt_buf(test_rgb_lines) assert isinstance(result, dict) expected = 5 actual = len(result) assert expected == actual expected = (1.0, 1.0, 1.0) actual = result["white"] assert np.allclose(expected, actual) def tearDown(self): pass if __name__ == '__main__': unittest.main() #END<|fim▁end|>
expected = len(ginga.colors.color_dict) actual = len(ginga.colors.color_list) assert expected == actual
<|file_name|>errors.py<|end_file_name|><|fim▁begin|>""" The MIT License (MIT) Copyright (c) 2014 Chris Wimbrow Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: <|fim▁hole|>THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """<|fim▁end|>
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
<|file_name|>difficulty_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package tests import ( "path/filepath" "strings" "testing" ) func TestETHDifficulty(t *testing.T) { fileNames, _ := filepath.Glob(filepath.Join(ethBasicTestDir, "*")) supportedTests := map[string]bool{<|fim▁hole|> "difficultyByzantium.json": true, } // Loop through each file for _, fn := range fileNames { fileName := fn[strings.LastIndex(fn, "/")+1 : len(fn)] if !supportedTests[fileName] { continue } t.Run(fileName, func(t *testing.T) { config := ChainConfigs[fileName] tests := make(map[string]DifficultyTest) if err := readJsonFile(fn, &tests); err != nil { t.Error(err) } // Loop through each test in file for key, test := range tests { // Subtest within the JSON file t.Run(key, func(t *testing.T) { if err := test.runDifficulty(t, &config); err != nil { t.Error(err) } }) } }) } }<|fim▁end|>
// "difficulty.json": true, // Testing ETH mainnet config "difficultyFrontier.json": true, "difficultyHomestead.json": true,
<|file_name|>ink.formvalidator-2.js<|end_file_name|><|fim▁begin|>/** * Form Validation * @module Ink.UI.FormValidator_2 * @version 2 */ Ink.createModule('Ink.UI.FormValidator', '2', [ 'Ink.UI.Common_1','Ink.Dom.Element_1','Ink.Dom.Event_1','Ink.Dom.Selector_1','Ink.Dom.Css_1','Ink.Util.Array_1','Ink.Util.I18n_1','Ink.Util.Validator_1'], function( Common, Element, Event, Selector, Css, InkArray, I18n, InkValidator ) { 'use strict'; /** * Validation Functions to be used * Some functions are a port from PHP, others are the 'best' solutions available * * @private * @static */ var validationFunctions = { /** * Checks if a value is defined and not empty * @method required * @param {String} value Value to be checked * @return {Boolean} True case is defined, false if it's empty or not defined. */ 'required': function( value ){ return ( (typeof value !== 'undefined') && ( !(/^\s*$/).test(value) ) ); }, /** * Checks if a value has a minimum length * * @method min_length * @param {String} value Value to be checked. * @param {String|Number} minSize Minimum number of characters. * @return {Boolean} True if the length of value is equal or bigger than the minimum chars defined. False if not. */ 'min_length': function( value, minSize ){ return ( (typeof value === 'string') && ( value.length >= parseInt(minSize,10) ) ); }, /** * Checks if a value has a maximum length * * @method max_length * @param {String} value Value to be checked. * @param {String|Number} maxSize Maximum number of characters. * @return {Boolean} True if the length of value is equal or smaller than the maximum chars defined. False if not. */ 'max_length': function( value, maxSize ){ return ( (typeof value === 'string') && ( value.length <= parseInt(maxSize,10) ) ); }, /** * Checks if a value has an exact length * * @method exact_length * @param {String} value Value to be checked * @param {String|Number} exactSize Exact number of characters. * @return {Boolean} True if the length of value is equal to the size defined. False if not. */ 'exact_length': function( value, exactSize ){ return ( (typeof value === 'string') && ( value.length === parseInt(exactSize,10) ) ); }, /** * Checks if a value is a valid email address * * @method email * @param {String} value Value to be checked * @return {Boolean} True if the value is a valid email address. False if not. */ 'email': function( value ){ return ( ( typeof value === 'string' ) && InkValidator.mail( value ) ); }, /** * Checks if a value has a valid URL * * @method url * @param {String} value Value to be checked * @param {Boolean} fullCheck Flag to validate a full url (with the protocol). * @return {Boolean} True if the URL is considered valid. False if not. */ 'url': function( value, fullCheck ){ fullCheck = fullCheck || false; return ( (typeof value === 'string') && InkValidator.url( value, fullCheck ) ); }, /** * Checks if a value is a valid IP. Supports ipv4 and ipv6 * * @method ip * @param {String} value Value to be checked * @param {String} ipType Type of IP to be validated. The values are: ipv4, ipv6. By default is ipv4. * @return {Boolean} True if the value is a valid IP address. False if not. */ 'ip': function( value, ipType ){ if( typeof value !== 'string' ){ return false; } return InkValidator.isIP(value, ipType); }, /** * Checks if a value is a valid phone number. * Supports several countries, based in the Ink.Util.Validator class. * * @method phone * @param {String} value Value to be checked * @param {String} phoneType Country's initials to specify the type of phone number to be validated. Ex: 'AO'. * @return {Boolean} True if it's a valid phone number. False if not. */ 'phone': function( value, phoneType ){ if( typeof value !== 'string' ){ return false; } var countryCode = phoneType ? phoneType.toUpperCase() : ''; return InkValidator['is' + countryCode + 'Phone'](value); }, /** * Checks if a value is a valid credit card. * * @method credit_card * @param {String} value Value to be checked * @param {String} cardType Type of credit card to be validated. The card types available are in the Ink.Util.Validator class. * @return {Boolean} True if the value is a valid credit card number. False if not. */ 'credit_card': function( value, cardType ){ if( typeof value !== 'string' ){ return false; } return InkValidator.isCreditCard( value, cardType || 'default' ); }, /** * Checks if a value is a valid date. * * @method date * @param {String} value Value to be checked * @param {String} format Specific format of the date. * @return {Boolean} True if the value is a valid date. False if not. */ 'date': function( value, format ){ return ( (typeof value === 'string' ) && InkValidator.isDate(format, value) ); }, /** * Checks if a value only contains alphabetical values. * * @method alpha * @param {String} value Value to be checked * @param {Boolean} supportSpaces Allow whitespace * @return {Boolean} True if the value is alphabetical-only. False if not. */ 'alpha': function( value, supportSpaces ){ return InkValidator.ascii(value, {singleLineWhitespace: supportSpaces}); }, /* * Checks if a value contains only printable BMP unicode characters * Optionally allow punctuation and whitespace * * @method text * @param {String} value Value to be checked * @return {Boolean} Whether the value only contains printable text characters **/ 'text': function (value, whitespace, punctuation) { return InkValidator.unicode(value, { singleLineWhitespace: whitespace, unicodePunctuation: punctuation}); }, /* * Checks if a value contains only printable latin-1 text characters. * Optionally allow punctuation and whitespace. * * @method text * @param {String} value Value to be checked * @return {Boolean} Whether the value only contains printable text characters **/ 'latin': function (value, punctuation, whitespace) { if ( typeof value !== 'string') { return false; } return InkValidator.latin1(value, {latin1Punctuation: punctuation, singleLineWhitespace: whitespace}); }, /** * Checks if a value contains only alphabetical or numerical characters. * * @method alpha_numeric * @param {String} value Value to be checked * @return {Boolean} True if the value is a valid alphanumerical. False if not. */ 'alpha_numeric': function( value ){ return InkValidator.ascii(value, {numbers: true}); }, /** * Checks if a value contains only alphabetical, dash or underscore characteres. * * @method alpha_dashes * @param {String} value Value to be checked * @return {Boolean} True if the value is a valid. False if not. */ 'alpha_dash': function( value ){ return InkValidator.ascii(value, {dash: true, underscore: true}); }, /** * Checks if a value is a single digit. * * @method digit * @param {String} value Value to be checked * @return {Boolean} True if the value is a valid digit. False if not. */ 'digit': function( value ){ return ((typeof value === 'string') && /^[0-9]{1}$/.test(value)); }, /** * Checks if a value is a valid integer. * * @method integer * @param {String} value Value to be checked * @param {String} positive Flag that specifies if the integer is must be positive (unsigned). * @return {Boolean} True if the value is a valid integer. False if not. */ 'integer': function( value, positive ){ return InkValidator.number(value, { negative: !positive, decimalPlaces: 0 }); }, /** * Checks if a value is a valid decimal number. * * @method decimal * @param {String} value Value to be checked * @param {String} decimalSeparator Character that splits the integer part from the decimal one. By default is '.'. * @param {String} [decimalPlaces] Maximum number of digits that the decimal part must have. * @param {String} [leftDigits] Maximum number of digits that the integer part must have, when provided. * @return {Boolean} True if the value is a valid decimal number. False if not. */ 'decimal': function( value, decimalSeparator, decimalPlaces, leftDigits ){ return InkValidator.number(value, { decimalSep: decimalSeparator || '.', decimalPlaces: +decimalPlaces || null, maxDigits: +leftDigits }); }, /** * Checks if a value is a numeric value. * * @method numeric * @param {String} value Value to be checked * @param {String} decimalSeparator Checks if it's a valid decimal. Otherwise checks if it's a valid integer. * @param {String} [decimalPlaces] Maximum number of digits the decimal part must have. * @param {String} [leftDigits] Maximum number of digits the integer part must have, when provided. * @return {Boolean} True if the value is numeric. False if not. */ 'numeric': function( value, decimalSeparator, decimalPlaces, leftDigits ){ decimalSeparator = decimalSeparator || '.'; if( value.indexOf(decimalSeparator) !== -1 ){ return validationFunctions.decimal( value, decimalSeparator, decimalPlaces, leftDigits ); } else { return validationFunctions.integer( value ); } }, /** * Checks if a value is in a specific range of values. * The parameters after the first one are used to specify the range, and are similar in function to python's range() function. * * @method range * @param {String} value Value to be checked * @param {String} minValue Left limit of the range. * @param {String} maxValue Right limit of the range. * @param {String} [multipleOf] In case you want numbers that are only multiples of another number. * @return {Boolean} True if the value is within the range. False if not. */ 'range': function( value, minValue, maxValue, multipleOf ){ value = +value; minValue = +minValue; maxValue = +maxValue; if (isNaN(value) || isNaN(minValue) || isNaN(maxValue)) { return false; } if( value < minValue || value > maxValue ){ return false; } if (multipleOf) { return (value - minValue) % multipleOf === 0; } else { return true; } }, /** * Checks if a value is a valid color. * * @method color * @param {String} value Value to be checked * @return {Boolean} True if the value is a valid color. False if not. */ 'color': function( value ){ return InkValidator.isColor(value); }, /** * Checks if a value matches the value of a different field. * * @method matches * @param {String} value Value to be checked * @param {String} fieldToCompare Name or ID of the field to compare. * @return {Boolean} True if the values match. False if not. */ 'matches': function( value, fieldToCompare ){ return ( value === this.getFormElements()[fieldToCompare][0].getValue() ); } }; /** * Error messages for the validation functions above * @private * @static */ var validationMessages = new I18n({ en_US: { 'formvalidator.required' : 'The {field} filling is mandatory', 'formvalidator.min_length': 'The {field} must have a minimum size of {param1} characters', 'formvalidator.max_length': 'The {field} must have a maximum size of {param1} characters', 'formvalidator.exact_length': 'The {field} must have an exact size of {param1} characters', 'formvalidator.email': 'The {field} must have a valid e-mail address', 'formvalidator.url': 'The {field} must have a valid URL', 'formvalidator.ip': 'The {field} does not contain a valid {param1} IP address', 'formvalidator.phone': 'The {field} does not contain a valid {param1} phone number', 'formvalidator.credit_card': 'The {field} does not contain a valid {param1} credit card', 'formvalidator.date': 'The {field} should contain a date in the {param1} format', 'formvalidator.alpha': 'The {field} should only contain letters', 'formvalidator.text': 'The {field} should only contain alphabetic characters', 'formvalidator.latin': 'The {field} should only contain alphabetic characters', 'formvalidator.alpha_numeric': 'The {field} should only contain letters or numbers', 'formvalidator.alpha_dashes': 'The {field} should only contain letters or dashes', 'formvalidator.digit': 'The {field} should only contain a digit', 'formvalidator.integer': 'The {field} should only contain an integer', 'formvalidator.decimal': 'The {field} should contain a valid decimal number', 'formvalidator.numeric': 'The {field} should contain a number', 'formvalidator.range': 'The {field} should contain a number between {param1} and {param2}', 'formvalidator.color': 'The {field} should contain a valid color', 'formvalidator.matches': 'The {field} should match the field {param1}', 'formvalidator.validation_function_not_found': 'The rule {rule} has not been defined' }, pt_PT: { 'formvalidator.required' : 'Preencher {field} é obrigatório', 'formvalidator.min_length': '{field} deve ter no mínimo {param1} caracteres', 'formvalidator.max_length': '{field} tem um tamanho máximo de {param1} caracteres', 'formvalidator.exact_length': '{field} devia ter exactamente {param1} caracteres', 'formvalidator.email': '{field} deve ser um e-mail válido', 'formvalidator.url': 'O {field} deve ser um URL válido', 'formvalidator.ip': '{field} não tem um endereço IP {param1} válido', 'formvalidator.phone': '{field} deve ser preenchido com um número de telefone {param1} válido.', 'formvalidator.credit_card': '{field} não tem um cartão de crédito {param1} válido', 'formvalidator.date': '{field} deve conter uma data no formato {param1}', 'formvalidator.alpha': 'O campo {field} deve conter apenas caracteres alfabéticos', 'formvalidator.text': 'O campo {field} deve conter apenas caracteres alfabéticos', 'formvalidator.latin': 'O campo {field} deve conter apenas caracteres alfabéticos', 'formvalidator.alpha_numeric': '{field} deve conter apenas letras e números', 'formvalidator.alpha_dashes': '{field} deve conter apenas letras e traços', 'formvalidator.digit': '{field} destina-se a ser preenchido com apenas um dígito', 'formvalidator.integer': '{field} deve conter um número inteiro', 'formvalidator.decimal': '{field} deve conter um número válido', 'formvalidator.numeric': '{field} deve conter um número válido', 'formvalidator.range': '{field} deve conter um número entre {param1} e {param2}', 'formvalidator.color': '{field} deve conter uma cor válida', 'formvalidator.matches': '{field} deve corresponder ao campo {param1}', 'formvalidator.validation_function_not_found': '[A regra {rule} não foi definida]' } }, 'en_US'); /** * Constructor of a FormElement. * This type of object has particular methods to parse rules and validate them in a specific DOM Element. * * @param {DOMElement} element DOM Element * @param {Object} options Object with configuration options * @return {FormElement} FormElement object */ var FormElement = function( element, options ){ this._element = Common.elOrSelector( element, 'Invalid FormElement' ); this._errors = {}; this._rules = {}; this._value = null; this._options = Ink.extendObj( { label: this._getLabel() }, Element.data(this._element) ); this._options = Ink.extendObj( this._options, options || {} ); }; /** * FormElement's prototype */ FormElement.prototype = { /** * Function to get the label that identifies the field. * If it can't find one, it will use the name or the id * (depending on what is defined) * * @method _getLabel * @return {String} Label to be used in the error messages * @private */ _getLabel: function(){ var controlGroup = Element.findUpwardsByClass(this._element,'control-group'); var label = Ink.s('label',controlGroup); if( label ){ label = Element.textContent(label); } else { label = this._element.name || this._element.id || ''; } return label; }, /** * Function to parse a rules' string. * Ex: required|number|max_length[30] * * @method _parseRules * @param {String} rules String with the rules * @private */ _parseRules: function( rules ){ this._rules = {}; rules = rules.split("|"); var i, rulesLength = rules.length, rule, params, paramStartPos ; if( rulesLength > 0 ){ for( i = 0; i < rulesLength; i++ ){ rule = rules[i]; if( !rule ){ continue; } if( ( paramStartPos = rule.indexOf('[') ) !== -1 ){ params = rule.substr( paramStartPos+1 ); params = params.split(']'); params = params[0]; params = params.split(','); for (var p = 0, len = params.length; p < len; p++) { params[p] = params[p] === 'true' ? true : params[p] === 'false' ? false : params[p]; } params.splice(0,0,this.getValue()); rule = rule.substr(0,paramStartPos); this._rules[rule] = params; } else { this._rules[rule] = [this.getValue()]; } } } }, /** * Function to add an error to the FormElement's 'errors' object. * It basically receives the rule where the error occurred, the parameters passed to it (if any) * and the error message. * Then it replaces some tokens in the message for a more 'custom' reading * * @method _addError * @param {String|null} rule Rule that failed, or null if no rule was found. * @private * @static */ _addError: function(rule){ var params = this._rules[rule] || []; var paramObj = { field: this._options.label, value: this.getValue() }; for( var i = 1; i < params.length; i++ ){ paramObj['param' + i] = params[i]; } var i18nKey = 'formvalidator.' + rule; this._errors[rule] = validationMessages.text(i18nKey, paramObj); if (this._errors[rule] === i18nKey) { this._errors[rule] = 'Validation message not found'; } }, /** * Gets an element's value * * @method getValue * @return {mixed} The DOM Element's value * @public */ getValue: function(){ switch(this._element.nodeName.toLowerCase()){ case 'select': return Ink.s('option:selected',this._element).value; case 'textarea': return this._element.value; case 'input': if( "type" in this._element ){ if( (this._element.type === 'radio') || (this._element.type === 'checkbox') ){ if( this._element.checked ){ return this._element.value; } } else if( this._element.type !== 'file' ){ return this._element.value; } } else { return this._element.value; } return; default: return this._element.innerHTML; } }, /** * Gets the constructed errors' object. * * @method getErrors * @return {Object} Errors' object * @public */ getErrors: function(){ return this._errors; }, /** * Gets the DOM element related to the instance. * * @method getElement * @return {Object} DOM Element * @public */ getElement: function(){ return this._element; }, /** * Gets other elements in the same form. * * @method getFormElements * @return {Object} A mapping of keys to other elements in this form. * @public */ getFormElements: function () { return this._options.form._formElements; }, /** * Validates the element based on the rules defined. * It parses the rules defined in the _options.rules property. * * @method validate * @return {Boolean} True if every rule was valid. False if one fails. * @public */ validate: function(){ this._errors = {}; if( "rules" in this._options || 1){ this._parseRules( this._options.rules ); } if( ("required" in this._rules) || (this.getValue() !== '') ){ for(var rule in this._rules) { if (this._rules.hasOwnProperty(rule)) { if( (typeof validationFunctions[rule] === 'function') ){ if( validationFunctions[rule].apply(this, this._rules[rule] ) === false ){ this._addError( rule ); return false; } } else { Ink.warn('Rule "' + rule + '" not found. Used in element:', this._element); this._addError( null ); return false; } } } } return true; } }; /** * @class Ink.UI.FormValidator_2 * @version 2 * @constructor * @param {String|DOMElement} selector Either a CSS Selector string, or the form's DOMElement * @param {Object} [options] Options object, containing the following options: * @param {String} [options.eventTrigger] Event that will trigger the validation. Defaults to 'submit'. * @param {Boolean} [options.neverSubmit] Flag to cancel the submit event. Use this to avoid submitting the form. * @param {Selector} [options.searchFor] Selector containing the validation data-attributes. Defaults to 'input, select, textarea, .control-group'. * @param {Function} [options.beforeValidation] Callback to be executed before validating the form * @param {Function} [options.onError] Validation error callback * @param {Function} [options.onSuccess] Validation success callback * * @sample Ink_UI_FormValidator_2.html */ var FormValidator = function( selector, options ){ /** * DOMElement of the form being validated * * @property _rootElement * @type {DOMElement} */ this._rootElement = Common.elOrSelector( selector ); /** * Object that will gather the form elements by name * * @property _formElements * @type {Object} */ this._formElements = {}; /** * Error message DOMElements * * @property _errorMessages */ this._errorMessages = []; /** * Array of elements marked with validation errors * * @property _markedErrorElements */ this._markedErrorElements = []; /** * Configuration options. Fetches the data attributes first, then the ones passed when executing the constructor. * By doing that, the latter will be the one with highest priority. * * @property _options * @type {Object} */ this._options = Ink.extendObj({ eventTrigger: 'submit', neverSubmit: 'false', searchFor: 'input, select, textarea, .control-group', beforeValidation: undefined, onError: undefined, onSuccess: undefined },Element.data(this._rootElement)); this._options = Ink.extendObj( this._options, options || {} ); // Sets an event listener for a specific event in the form, if defined. // By default is the 'submit' event. if( typeof this._options.eventTrigger === 'string' ){ Event.observe( this._rootElement,this._options.eventTrigger, Ink.bindEvent(this.validate,this) ); } Common.registerInstance(this, this._rootElement); this._init(); }; /** * Sets or modifies validation functions * * @method setRule * @param {String} name Name of the function. E.g. 'required' * @param {String} errorMessage Error message to be displayed in case of returning false. E.g. 'Oops, you passed {param1} as parameter1, lorem ipsum dolor...' * @param {Function} cb Function to be executed when calling this rule * @public * @static */ FormValidator.setRule = function( name, errorMessage, cb ){ validationFunctions[ name ] = cb; if (validationMessages.getKey('formvalidator.' + name) !== errorMessage) { var langObj = {}; langObj['formvalidator.' + name] = errorMessage; var dictObj = {}; dictObj[validationMessages.lang()] = langObj; validationMessages.append(dictObj); } }; /** * Gets the i18n object in charge of the error messages * * @method getI18n * @return {Ink.Util.I18n} The i18n object the FormValidator is using. */ FormValidator.getI18n = function () { return validationMessages; }; /** * Sets the I18n object for validation error messages * * @method setI18n * @param {Ink.Util.I18n} i18n The I18n object. */ FormValidator.setI18n = function (i18n) { validationMessages = i18n; }; /** * Add to the I18n dictionary. * See `Ink.Util.I18n.append()` documentation. * * @method AppendI18n */<|fim▁hole|> }; /** * Sets the language of the error messages. * pt_PT and en_US are available, but you can add new languages by using append() * * See the `Ink.Util.I18n.lang()` setter * * @method setLanguage * @param language The language to set i18n to. */ FormValidator.setLanguage = function (language) { validationMessages.lang(language); }; /** * Method used to get the existing defined validation functions * * @method getRules * @return {Object} Object with the rules defined * @public * @static */ FormValidator.getRules = function(){ return validationFunctions; }; FormValidator.prototype = { _init: function(){ }, /** * Searches for the elements in the form. * This method is based in the this._options.searchFor configuration. * * @method getElements * @return {Object} An object with the elements in the form, indexed by name/id * @public */ getElements: function(){ this._formElements = {}; var formElements = Selector.select( this._options.searchFor, this._rootElement ); if( formElements.length ){ var i, element; for( i=0; i<formElements.length; i+=1 ){ element = formElements[i]; var dataAttrs = Element.data( element ); if( !("rules" in dataAttrs) ){ continue; } var options = { form: this }; var key; if( ("name" in element) && element.name ){ key = element.name; } else if( ("id" in element) && element.id ){ key = element.id; } else { key = 'element_' + Math.floor(Math.random()*100); element.id = key; } if( !(key in this._formElements) ){ this._formElements[key] = [ new FormElement( element, options ) ]; } else { this._formElements[key].push( new FormElement( element, options ) ); } } } return this._formElements; }, /** * Validates every registered FormElement * This method looks inside the this._formElements object for validation targets. * Also, based on the this._options.beforeValidation, this._options.onError, and this._options.onSuccess, this callbacks are executed when defined. * * @method validate * @param {Event} event Window.event object * @return {Boolean} * @public */ validate: function( event ) { if(this._options.neverSubmit+'' === 'true' && event) { Event.stopDefault(event); } if( typeof this._options.beforeValidation === 'function' ){ this._options.beforeValidation(); } InkArray.each( this._markedErrorElements, function (errorElement) { Css.removeClassName(errorElement, ['validation', 'error']); }); InkArray.each( this._errorMessages, Element.remove); this.getElements(); var errorElements = []; for( var key in this._formElements ){ if( this._formElements.hasOwnProperty(key) ){ for( var counter = 0; counter < this._formElements[key].length; counter+=1 ){ if( !this._formElements[key][counter].validate() ) { errorElements.push(this._formElements[key][counter]); } } } } if( errorElements.length === 0 ){ if( typeof this._options.onSuccess === 'function' ){ this._options.onSuccess(); } // [3.0.0] remove this, it's a little backwards compat quirk if(event && this._options.cancelEventOnSuccess + '' === 'true') { Event.stopDefault(event); return false; } return true; } else { if(event) { Event.stopDefault(event); } if( typeof this._options.onError === 'function' ){ this._options.onError( errorElements ); } this._errorMessages = []; this._markedErrorElements = []; InkArray.each( errorElements, Ink.bind(function( formElement ){ var controlGroupElement; var controlElement; if( Css.hasClassName(formElement.getElement(),'control-group') ){ controlGroupElement = formElement.getElement(); controlElement = Ink.s('.control',formElement.getElement()); } else { controlGroupElement = Element.findUpwardsByClass(formElement.getElement(),'control-group'); controlElement = Element.findUpwardsByClass(formElement.getElement(),'control'); } if(controlGroupElement) { Css.addClassName( controlGroupElement, ['validation', 'error'] ); this._markedErrorElements.push(controlGroupElement); } var paragraph = document.createElement('p'); Css.addClassName(paragraph,'tip'); if (controlElement || controlGroupElement) { (controlElement || controlGroupElement).appendChild(paragraph); } else { Element.insertAfter(paragraph, formElement.getElement()); } var errors = formElement.getErrors(); var errorArr = []; for (var k in errors) { if (errors.hasOwnProperty(k)) { errorArr.push(errors[k]); } } paragraph.innerHTML = errorArr.join('<br/>'); this._errorMessages.push(paragraph); }, this)); return false; } } }; /** * Returns the FormValidator's Object */ return FormValidator; });<|fim▁end|>
FormValidator.appendI18n = function () { validationMessages.append.apply(validationMessages, [].slice.call(arguments));
<|file_name|>CumulusCI.py<|end_file_name|><|fim▁begin|>import logging from cumulusci.cli.config import CliConfig from cumulusci.core.config import TaskConfig from cumulusci.core.exceptions import TaskNotFoundError from cumulusci.core.exceptions import TaskOptionsError from cumulusci.core.tasks import CURRENT_TASK from cumulusci.core.utils import import_class from cumulusci.tasks.robotframework.robotframework import Robot from robot.api import logger from robot.libraries.BuiltIn import BuiltIn from simple_salesforce import Salesforce class CumulusCI(object): """ Library for accessing CumulusCI for the local git project This library allows Robot Framework tests to access credentials to a Salesforce org created by CumulusCI, including Scratch Orgs. It also exposes the core logic of CumulusCI including interactions with the Salesforce API's and project specific configuration including custom and customized tasks and flows. Initialization requires a single argument, the org name for the target CumulusCI org. If running your tests via cci's robot task (recommended), you can initialize the library in your tests taking advantage of the variable set by the robot task: | ``*** Settings ***`` | | Library cumulusci.robotframework.CumulusCI ${ORG} """ ROBOT_LIBRARY_SCOPE = "GLOBAL" def __init__(self, org_name=None): if not org_name: org_name = 'dev' self.org_name = org_name self._project_config = None self._org = None self._sf = None self._tooling = None # Turn off info logging of all http requests logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN) @property def project_config(self): if self._project_config is None: if CURRENT_TASK and isinstance(CURRENT_TASK, Robot): # If CumulusCI is running a task, use that task's config return CURRENT_TASK.project_config else: logger.console('Initializing CumulusCI config\n') self._project_config = CliConfig().project_config return self._project_config def set_project_config(self, project_config): logger.console('\n') self._project_config = project_config @property def keychain(self): return self.project_config.keychain @property def org(self): if self._org is None: if CURRENT_TASK and isinstance(CURRENT_TASK, Robot): # If CumulusCI is running a task, use that task's org return CURRENT_TASK.org_config else: self._org = self.keychain.get_org(self.org_name) return self._org @property def sf(self): if self._sf is None: self._sf = self._init_api() return self._sf @property def tooling(self): if self._tooling is None: self._tooling = self._init_api('tooling/') return self._tooling def set_login_url(self): """ Sets the LOGIN_URL variable in the suite scope which will automatically log into the target Salesforce org. Typically, this is run during Suite Setup """ BuiltIn().set_suite_variable('${LOGIN_URL}', self.org.start_url) def get_org_info(self): """ Returns a dictionary of the org information for the current target Salesforce org<|fim▁hole|> def login_url(self, org=None): """ Returns the login url which will automatically log into the target Salesforce org. By default, the org_name passed to the library constructor is used but this can be overridden with the org option to log into a different org. """ if org is None: org = self.org else: org = self.keychain.get_org(org) return org.start_url def run_task(self, task_name, **options): """ Runs a named CumulusCI task for the current project with optional support for overriding task options via kwargs. Examples: | =Keyword= | =task_name= | =task_options= | =comment= | | Run Task | deploy | | Run deploy with standard options | | Run Task | deploy | path=path/to/some/metadata | Run deploy with custom path | """ task_config = self.project_config.get_task(task_name) class_path = task_config.class_path logger.console('\n') task_class, task_config = self._init_task(class_path, options, task_config) return self._run_task(task_class, task_config) def run_task_class(self, class_path, **options): """ Runs a CumulusCI task class with task options via kwargs. Use this keyword to run logic from CumulusCI tasks which have not been configured in the project's cumulusci.yml file. This is most useful in cases where a test needs to use task logic for logic unique to the test and thus not worth making into a named task for the project Examples: | =Keyword= | =task_class= | =task_options= | | Run Task Class | cumulusci.task.utils.DownloadZip | url=http://test.com/test.zip dir=test_zip | """ logger.console('\n') task_class, task_config = self._init_task(class_path, options, TaskConfig()) return self._run_task(task_class, task_config) def _init_api(self, base_url=None): api_version = self.project_config.project__package__api_version rv = Salesforce( instance=self.org.instance_url.replace('https://', ''), session_id=self.org.access_token, version=api_version, ) if base_url is not None: rv.base_url += base_url return rv def _init_task(self, class_path, options, task_config): task_class = import_class(class_path) task_config = self._parse_task_options(options, task_class, task_config) return task_class, task_config def _parse_task_options(self, options, task_class, task_config): if 'options' not in task_config.config: task_config.config['options'] = {} # Parse options and add to task config if options: for name, value in options.items(): # Validate the option if name not in task_class.task_options: raise TaskOptionsError( 'Option "{}" is not available for task {}'.format( name, task_name, ), ) # Override the option in the task config task_config.config['options'][name] = value return task_config def _run_task(self, task_class, task_config): exception = None task = task_class(self.project_config, task_config, org_config=self.org) task() return task.return_values<|fim▁end|>
""" return self.org.config
<|file_name|>relative-date-formatter-example.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-magic-numbers */ import { Component } from '@angular/core'; import { DateAdapter, MC_DATE_LOCALE } from '@ptsecurity/cdk/datetime'; import { LuxonDateAdapter } from '@ptsecurity/mosaic-luxon-adapter/adapter'; import { DateFormatter } from '@ptsecurity/mosaic/core'; import { DateTime } from 'luxon'; /** * @title Basic progress relative-date-formatter */ @Component({ selector: 'relative-date-formatter-example', templateUrl: 'relative-date-formatter-example.html', styleUrls: ['relative-date-formatter-example.css'], providers: [ { provide: MC_DATE_LOCALE, useValue: 'ru' }, { provide: DateAdapter, useClass: LuxonDateAdapter, deps: [MC_DATE_LOCALE] } ] }) export class RelativeDateFormatterExample { formats = { ru: { relative: { long: { beforeYesterdayNotCurrentYear: '', beforeYesterdayCurrentYear: '', beforeYesterdayCurrentYearSeconds: '', beforeYesterdayCurrentYearMilliseconds: '', yesterday: '', yesterdaySeconds: '', yesterdayMilliseconds: '', today: '', todaySeconds: '', todayMilliseconds: '', tomorrow: '', tomorrowSeconds: '', tomorrowMilliseconds: '', afterTomorrowCurrentYear: '',<|fim▁hole|> afterTomorrowNotCurrentYear: '' }, short: { beforeYesterdayNotCurrentYear: '', beforeYesterdayCurrentYear: '', beforeYesterdayCurrentYearSeconds: '', beforeYesterdayCurrentYearMilliseconds: '', yesterday: '', yesterdaySeconds: '', yesterdayMilliseconds: '', today: '', todaySeconds: '', todayMilliseconds: '', tomorrow: '', tomorrowSeconds: '', tomorrowMilliseconds: '', afterTomorrowCurrentYear: '', afterTomorrowCurrentYearSeconds: '', afterTomorrowCurrentYearMilliseconds: '', afterTomorrowNotCurrentYear: '' } } }, en: { relative: { long: { beforeYesterdayNotCurrentYear: '', beforeYesterdayCurrentYear: '', beforeYesterdayCurrentYearSeconds: '', beforeYesterdayCurrentYearMilliseconds: '', yesterday: '', yesterdaySeconds: '', yesterdayMilliseconds: '', today: '', todaySeconds: '', todayMilliseconds: '', tomorrow: '', tomorrowSeconds: '', tomorrowMilliseconds: '', afterTomorrowCurrentYear: '', afterTomorrowCurrentYearSeconds: '', afterTomorrowCurrentYearMilliseconds: '', afterTomorrowNotCurrentYear: '' }, short: { beforeYesterdayNotCurrentYear: '', beforeYesterdayCurrentYear: '', beforeYesterdayCurrentYearSeconds: '', beforeYesterdayCurrentYearMilliseconds: '', yesterday: '', yesterdaySeconds: '', yesterdayMilliseconds: '', today: '', todaySeconds: '', todayMilliseconds: '', tomorrow: '', tomorrowSeconds: '', tomorrowMilliseconds: '', afterTomorrowCurrentYear: '', afterTomorrowCurrentYearSeconds: '', afterTomorrowCurrentYearMilliseconds: '', afterTomorrowNotCurrentYear: '' } } } }; constructor(private adapter: DateAdapter<DateTime>, private dateFormatter: DateFormatter<DateTime>) { this.populateRelativeLong('ru'); this.populateRelativeLong('en'); this.populateRelativeShort('ru'); this.populateRelativeShort('en'); } private populateRelativeShort(locale: string) { this.dateFormatter.setLocale(locale); this.adapter.setLocale(locale); const relativeShort = this.formats[locale].relative.short; const now = this.adapter.today(); relativeShort.beforeYesterdayNotCurrentYear = this.dateFormatter.relativeShortDate( now.minus({ years: 1, days: 2 }) ); relativeShort.beforeYesterdayCurrentYear = this.dateFormatter.relativeShortDate(now.minus({ days: 2 })); relativeShort.beforeYesterdayCurrentYearSeconds = this.dateFormatter.relativeShortDateTime( now.minus({ days: 2 }), {seconds: true} ); relativeShort.beforeYesterdayCurrentYearMilliseconds = this.dateFormatter.relativeShortDateTime( now.minus({ days: 2 }), {milliseconds: true} ); relativeShort.yesterday = this.dateFormatter.relativeShortDate(now.minus({ days: 1 })); relativeShort.yesterdaySeconds = this.dateFormatter.relativeShortDateTime( now.minus({ days: 1 }), {seconds: true} ); relativeShort.yesterdayMilliseconds = this.dateFormatter.relativeShortDateTime( now.minus({ days: 1 }), {milliseconds: true} ); relativeShort.today = this.dateFormatter.relativeShortDate(now.minus({ hours: 1 })); relativeShort.todaySeconds = this.dateFormatter.relativeShortDateTime( now.minus({ hours: 1 }), {seconds: true} ); relativeShort.todayMilliseconds = this.dateFormatter.relativeShortDateTime( now.minus({ hours: 1 }), {milliseconds: true} ); relativeShort.tomorrow = this.dateFormatter.relativeShortDate(now.plus({ days: 1, hours: 1 })); relativeShort.tomorrowSeconds = this.dateFormatter.relativeShortDateTime( now.plus({ days: 1, hours: 1 }), {seconds: true} ); relativeShort.tomorrowMilliseconds = this.dateFormatter.relativeShortDateTime( now.plus({ days: 1, hours: 1 }), {milliseconds: true} ); relativeShort.afterTomorrowCurrentYear = this.dateFormatter.relativeShortDate(now.plus({ days: 2 })); relativeShort.afterTomorrowCurrentYearSeconds = this.dateFormatter.relativeShortDateTime( now.plus({ days: 2 }), {seconds: true} ); relativeShort.afterTomorrowCurrentYearMilliseconds = this.dateFormatter.relativeShortDateTime( now.plus({ days: 2 }), {milliseconds: true} ); relativeShort.afterTomorrowNotCurrentYear = this.dateFormatter.relativeShortDate( now.plus({ years: 1, days: 2 }) ); } private populateRelativeLong(locale: string) { this.dateFormatter.setLocale(locale); this.adapter.setLocale(locale); const relativeLong = this.formats[locale].relative.long; const now = this.adapter.today(); relativeLong.beforeYesterdayNotCurrentYear = this.dateFormatter.relativeLongDate( now.minus({ years: 1, days: 2 }) ); relativeLong.beforeYesterdayCurrentYear = this.dateFormatter.relativeLongDate(now.minus({ days: 2 })); relativeLong.beforeYesterdayCurrentYearSeconds = this.dateFormatter.relativeLongDateTime(now.minus({ days: 2 }), {seconds: true}); relativeLong.beforeYesterdayCurrentYearMilliseconds = this.dateFormatter.relativeLongDateTime( now.minus({ days: 2 }), {milliseconds: true} ); relativeLong.yesterday = this.dateFormatter.relativeLongDate(now.minus({ days: 1 })); relativeLong.yesterdaySeconds = this.dateFormatter.relativeLongDateTime(now.minus({ days: 1 }), {seconds: true}); relativeLong.yesterdayMilliseconds = this.dateFormatter.relativeLongDateTime(now.minus({ days: 1 }), {milliseconds: true}); relativeLong.today = this.dateFormatter.relativeLongDate(now.minus({ hours: 1 })); relativeLong.todaySeconds = this.dateFormatter.relativeLongDateTime(now.minus({ hours: 1 }), {seconds: true}); relativeLong.todayMilliseconds = this.dateFormatter.relativeLongDateTime(now.minus({ hours: 1 }), {milliseconds: true}); relativeLong.tomorrow = this.dateFormatter.relativeLongDate(now.plus({ days: 1, hours: 1 })); relativeLong.tomorrowSeconds = this.dateFormatter.relativeLongDateTime(now.plus({ days: 1, hours: 1 }), {seconds: true}); relativeLong.tomorrowMilliseconds = this.dateFormatter.relativeLongDateTime(now.plus({ days: 1, hours: 1 }), {milliseconds: true}); relativeLong.afterTomorrowCurrentYear = this.dateFormatter.relativeLongDate(now.plus({ days: 2 })); relativeLong.afterTomorrowCurrentYearSeconds = this.dateFormatter.relativeLongDateTime(now.plus({ days: 2 }), {seconds: true}); relativeLong.afterTomorrowCurrentYearMilliseconds = this.dateFormatter.relativeLongDateTime( now.plus({ days: 2 }), {milliseconds: true} ); relativeLong.afterTomorrowNotCurrentYear = this.dateFormatter.relativeLongDate( now.plus({ years: 1, days: 2 }) ); } }<|fim▁end|>
afterTomorrowCurrentYearSeconds: '', afterTomorrowCurrentYearMilliseconds: '',
<|file_name|>admin_views.py<|end_file_name|><|fim▁begin|>from db_utils import deleteLinksByHost from db_utils import deleteHost from db_utils import addNewHost from db_utils import getAllHosts from error_message import showErrorPage from error_message import ErrorMessages import utils import webapp2 from google.appengine.api import users from google.appengine.ext import ndb JINJA_ENVIRONMENT = utils.getJinjaEnvironment() class AddHost(webapp2.RequestHandler): def get(self): """ descripion: adds a new host to the database, and redirect to '/' params: name - host name interval - pinging interval for all the links belonging to the host. response: redirect to '/admin' """ name = self.request.get('name') if name is None or len(name) == 0: showErrorPage(self, ErrorMessages.invalidHostName()) return if ndb.Key('Host', name).get() is not None: showErrorPage(self, ErrorMessages.duplicatingHostName()) return try: interval = int(self.request.get('interval')) except ValueError: showErrorPage(self, ErrorMessages.invalidHostInterval()) return if interval == 0: showErrorPage(self, ErrorMessages.invalidHostInterval()) return addNewHost(name, interval) self.redirect('/admin') <|fim▁hole|> deletes an existing host, and redirects to '/'. All the links belonging to the host will also be deleted. params: name - host name response: redirect to '/' """ name = self.request.get('name') if name is None or len(name) == 0: showErrorPage(self, ErrorMessages.invalidHostName()) return hostKey = ndb.Key('Host', name) if hostKey.get() is None: showErrorPage(self, ErrorMessages.hostDoesNotExist()) return deleteLinksByHost(name) deleteHost(name) self.redirect('/') class AdminPage(webapp2.RequestHandler): def get(self): user = users.get_current_user() hosts = getAllHosts() template_values = { 'hosts': hosts, 'user': user, } template = JINJA_ENVIRONMENT.get_template('admin.html') self.response.write(template.render(template_values)) app = webapp2.WSGIApplication([ ('/admin/host/add', AddHost), ('/admin/host/delete', DeleteHost), ], debug=True)<|fim▁end|>
class DeleteHost(webapp2.RequestHandler): def get(self): """ description:
<|file_name|>pseudo_legals.rs<|end_file_name|><|fim▁begin|>pub fn pseudo_legal_moves<T: PieceType>(src: Square, color: Color, combined: BitBoard, mask: BitBoard) {<|fim▁hole|><|fim▁end|>
}
<|file_name|>osfm.py<|end_file_name|><|fim▁begin|>""" OpenSfM related utils """ import os, shutil, sys, json, argparse import yaml from opendm import io from opendm import log from opendm import system from opendm import context from opendm import camera from opendm.utils import get_depthmap_resolution from opendm.photo import find_largest_photo_dim from opensfm.large import metadataset from opensfm.large import tools from opensfm.actions import undistort from opensfm.dataset import DataSet from opendm.multispectral import get_photos_by_band class OSFMContext: def __init__(self, opensfm_project_path): self.opensfm_project_path = opensfm_project_path def run(self, command): system.run('%s/bin/opensfm %s "%s"' % (context.opensfm_path, command, self.opensfm_project_path)) def is_reconstruction_done(self): tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv') reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json') return io.file_exists(tracks_file) and io.file_exists(reconstruction_file) def reconstruct(self, rerun=False): tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv') reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json') if not io.file_exists(tracks_file) or rerun: self.run('create_tracks') else: log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' % tracks_file) if not io.file_exists(reconstruction_file) or rerun: self.run('reconstruct') else: log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' % reconstruction_file) # Check that a reconstruction file has been created if not self.reconstructed(): log.ODM_ERROR("The program could not process this dataset using the current settings. " "Check that the images have enough overlap, " "that there are enough recognizable features " "and that the images are in focus. " "You could also try to increase the --min-num-features parameter." "The program will now exit.") exit(1) def setup(self, args, images_path, reconstruction, append_config = [], rerun=False): """ Setup a OpenSfM project """ if rerun and io.dir_exists(self.opensfm_project_path): shutil.rmtree(self.opensfm_project_path) if not io.dir_exists(self.opensfm_project_path): system.mkdir_p(self.opensfm_project_path) list_path = os.path.join(self.opensfm_project_path, 'image_list.txt') if not io.file_exists(list_path) or rerun: if reconstruction.multi_camera: photos = get_photos_by_band(reconstruction.multi_camera, args.primary_band) if len(photos) < 1: raise Exception("Not enough images in selected band %s" % args.primary_band.lower()) log.ODM_INFO("Reconstruction will use %s images from %s band" % (len(photos), args.primary_band.lower())) else: photos = reconstruction.photos # create file list has_alt = True has_gps = False with open(list_path, 'w') as fout: for photo in photos: if not photo.altitude: has_alt = False if photo.latitude is not None and photo.longitude is not None: has_gps = True fout.write('%s\n' % os.path.join(images_path, photo.filename)) # check for image_groups.txt (split-merge) image_groups_file = os.path.join(args.project_path, "image_groups.txt") if io.file_exists(image_groups_file): log.ODM_INFO("Copied image_groups.txt to OpenSfM directory") io.copy(image_groups_file, os.path.join(self.opensfm_project_path, "image_groups.txt")) # check for cameras if args.cameras: try: camera_overrides = camera.get_opensfm_camera_models(args.cameras) with open(os.path.join(self.opensfm_project_path, "camera_models_overrides.json"), 'w') as f: f.write(json.dumps(camera_overrides)) log.ODM_INFO("Wrote camera_models_overrides.json to OpenSfM directory") except Exception as e: log.ODM_WARNING("Cannot set camera_models_overrides.json: %s" % str(e)) use_bow = args.matcher_type == "bow" feature_type = "SIFT" # GPSDOP override if we have GPS accuracy information (such as RTK) if 'gps_accuracy_is_set' in args: log.ODM_INFO("Forcing GPS DOP to %s for all images" % args.gps_accuracy) log.ODM_INFO("Writing exif overrides") exif_overrides = {} for p in photos: if 'gps_accuracy_is_set' in args: dop = args.gps_accuracy elif p.get_gps_dop() is not None: dop = p.get_gps_dop() else: dop = args.gps_accuracy # default value if p.latitude is not None and p.longitude is not None: exif_overrides[p.filename] = { 'gps': { 'latitude': p.latitude, 'longitude': p.longitude, 'altitude': p.altitude if p.altitude is not None else 0, 'dop': dop, } } with open(os.path.join(self.opensfm_project_path, "exif_overrides.json"), 'w') as f: f.write(json.dumps(exif_overrides)) # Check image masks masks = [] for p in photos: if p.mask is not None: masks.append((p.filename, os.path.join(images_path, p.mask))) if masks: log.ODM_INFO("Found %s image masks" % len(masks)) with open(os.path.join(self.opensfm_project_path, "mask_list.txt"), 'w') as f: for fname, mask in masks: f.write("{} {}\n".format(fname, mask)) # Compute feature_process_size feature_process_size = 2048 # default if 'resize_to_is_set' in args: # Legacy log.ODM_WARNING("Legacy option --resize-to (this might be removed in a future version). Use --feature-quality instead.") feature_process_size = int(args.resize_to) else: feature_quality_scale = { 'ultra': 1, 'high': 0.5, 'medium': 0.25, 'low': 0.125, 'lowest': 0.0675, } max_dim = find_largest_photo_dim(photos) if max_dim > 0: log.ODM_INFO("Maximum photo dimensions: %spx" % str(max_dim)) feature_process_size = int(max_dim * feature_quality_scale[args.feature_quality]) else: log.ODM_WARNING("Cannot compute max image dimensions, going with defaults") depthmap_resolution = get_depthmap_resolution(args, photos) # create config file for OpenSfM config = [ "use_exif_size: no", "flann_algorithm: KDTREE", # more stable, faster than KMEANS "feature_process_size: %s" % feature_process_size, "feature_min_frames: %s" % args.min_num_features, "processes: %s" % args.max_concurrency, "matching_gps_neighbors: %s" % args.matcher_neighbors, "matching_gps_distance: %s" % args.matcher_distance, "depthmap_method: %s" % args.opensfm_depthmap_method, "depthmap_resolution: %s" % depthmap_resolution, "depthmap_min_patch_sd: %s" % args.opensfm_depthmap_min_patch_sd, "depthmap_min_consistent_views: %s" % args.opensfm_depthmap_min_consistent_views, "optimize_camera_parameters: %s" % ('no' if args.use_fixed_camera_params or args.cameras else 'yes'), "undistorted_image_format: tif", "bundle_outlier_filtering_type: AUTO", "align_orientation_prior: vertical", "triangulation_type: ROBUST" ] if args.camera_lens != 'auto': config.append("camera_projection_type: %s" % args.camera_lens.upper()) if not has_gps: log.ODM_INFO("No GPS information, using BOW matching") use_bow = True feature_type = args.feature_type.upper() if use_bow: config.append("matcher_type: WORDS") # Cannot use SIFT with BOW if feature_type == "SIFT": log.ODM_WARNING("Using BOW matching, will use HAHOG feature type, not SIFT") feature_type = "HAHOG" config.append("feature_type: %s" % feature_type) if has_alt: log.ODM_INFO("Altitude data detected, enabling it for GPS alignment") config.append("use_altitude_tag: yes") gcp_path = reconstruction.gcp.gcp_path if has_alt or gcp_path: config.append("align_method: auto") else: config.append("align_method: orientation_prior") if args.use_hybrid_bundle_adjustment: log.ODM_INFO("Enabling hybrid bundle adjustment") config.append("bundle_interval: 100") # Bundle after adding 'bundle_interval' cameras config.append("bundle_new_points_ratio: 1.2") # Bundle when (new points) / (bundled points) > bundle_new_points_ratio config.append("local_bundle_radius: 1") # Max image graph distance for images to be included in local bundle adjustment else: config.append("local_bundle_radius: 0") if gcp_path: config.append("bundle_use_gcp: yes") if not args.force_gps: config.append("bundle_use_gps: no") io.copy(gcp_path, self.path("gcp_list.txt")) config = config + append_config # write config file log.ODM_INFO(config) config_filename = self.get_config_file_path() with open(config_filename, 'w') as fout: fout.write("\n".join(config)) else: log.ODM_WARNING("%s already exists, not rerunning OpenSfM setup" % list_path) def get_config_file_path(self): return os.path.join(self.opensfm_project_path, 'config.yaml') def reconstructed(self): if not io.file_exists(self.path("reconstruction.json")): return False with open(self.path("reconstruction.json"), 'r') as f: return f.readline().strip() != "[]" def extract_metadata(self, rerun=False): metadata_dir = self.path("exif") if not io.dir_exists(metadata_dir) or rerun: self.run('extract_metadata') def is_feature_matching_done(self): features_dir = self.path("features") matches_dir = self.path("matches") return io.dir_exists(features_dir) and io.dir_exists(matches_dir) def feature_matching(self, rerun=False): features_dir = self.path("features") matches_dir = self.path("matches") if not io.dir_exists(features_dir) or rerun: self.run('detect_features') else: log.ODM_WARNING('Detect features already done: %s exists' % features_dir) if not io.dir_exists(matches_dir) or rerun: self.run('match_features') else: log.ODM_WARNING('Match features already done: %s exists' % matches_dir) def align_reconstructions(self, rerun): alignment_file = self.path('alignment_done.txt') if not io.file_exists(alignment_file) or rerun: log.ODM_INFO("Aligning submodels...") meta_data = metadataset.MetaDataSet(self.opensfm_project_path) reconstruction_shots = tools.load_reconstruction_shots(meta_data) transformations = tools.align_reconstructions(reconstruction_shots, tools.partial_reconstruction_name, True) tools.apply_transformations(transformations) self.touch(alignment_file) else: log.ODM_WARNING('Found a alignment done progress file in: %s' % alignment_file) def touch(self, file): with open(file, 'w') as fout: fout.write("Done!\n") def path(self, *paths): return os.path.join(self.opensfm_project_path, *paths) def extract_cameras(self, output, rerun=False): if not os.path.exists(output) or rerun: try: reconstruction_file = self.path("reconstruction.json") with open(output, 'w') as fout: fout.write(json.dumps(camera.get_cameras_from_opensfm(reconstruction_file), indent=4)) except Exception as e: log.ODM_WARNING("Cannot export cameras to %s. %s." % (output, str(e))) else: log.ODM_INFO("Already extracted cameras") def convert_and_undistort(self, rerun=False, imageFilter=None, image_list=None, runId="nominal"): log.ODM_INFO("Undistorting %s ..." % self.opensfm_project_path) done_flag_file = self.path("undistorted", "%s_done.txt" % runId) if not io.file_exists(done_flag_file) or rerun: ds = DataSet(self.opensfm_project_path) if image_list is not None: ds._set_image_list(image_list) undistort.run_dataset(ds, "reconstruction.json", 0, None, "undistorted", imageFilter) self.touch(done_flag_file) else: log.ODM_WARNING("Already undistorted (%s)" % runId) def restore_reconstruction_backup(self): if os.path.exists(self.recon_backup_file()): # This time export the actual reconstruction.json # (containing only the primary band) if os.path.exists(self.recon_file()): os.remove(self.recon_file()) os.rename(self.recon_backup_file(), self.recon_file()) log.ODM_INFO("Restored reconstruction.json") def backup_reconstruction(self): if os.path.exists(self.recon_backup_file()): os.remove(self.recon_backup_file()) log.ODM_INFO("Backing up reconstruction") shutil.copyfile(self.recon_file(), self.recon_backup_file()) def recon_backup_file(self): return self.path("reconstruction.backup.json") def recon_file(self): return self.path("reconstruction.json") def add_shots_to_reconstruction(self, p2s): with open(self.recon_file()) as f: reconstruction = json.loads(f.read()) # Augment reconstruction.json for recon in reconstruction: shots = recon['shots'] sids = list(shots) for shot_id in sids: secondary_photos = p2s.get(shot_id) if secondary_photos is None: log.ODM_WARNING("Cannot find secondary photos for %s" % shot_id) continue for p in secondary_photos: shots[p.filename] = shots[shot_id] with open(self.recon_file(), 'w') as f: f.write(json.dumps(reconstruction)) def update_config(self, cfg_dict): cfg_file = self.get_config_file_path() log.ODM_INFO("Updating %s" % cfg_file) if os.path.exists(cfg_file): try: with open(cfg_file) as fin: cfg = yaml.safe_load(fin) for k, v in cfg_dict.items(): cfg[k] = v log.ODM_INFO("%s: %s" % (k, v)) with open(cfg_file, 'w') as fout: fout.write(yaml.dump(cfg, default_flow_style=False)) except Exception as e: log.ODM_WARNING("Cannot update configuration file %s: %s" % (cfg_file, str(e))) else: log.ODM_WARNING("Tried to update configuration, but %s does not exist." % cfg_file) def name(self): return os.path.basename(os.path.abspath(self.path(".."))) def get_submodel_argv(args, submodels_path = None, submodel_name = None): """ Gets argv for a submodel starting from the args passed to the application startup. Additionally, if project_name, submodels_path and submodel_name are passed, the function handles the <project name> value and --project-path detection / override. When all arguments are set to None, --project-path and project name are always removed. :return the same as argv, but removing references to --split, setting/replacing --project-path and name removing --rerun-from, --rerun, --rerun-all, --sm-cluster removing --pc-las, --pc-csv, --pc-ept, --tiles flags (processing these is wasteful) adding --orthophoto-cutline adding --dem-euclidean-map adding --skip-3dmodel (split-merge does not support 3D model merging) tweaking --crop if necessary (DEM merging makes assumption about the area of DEMs and their euclidean maps that require cropping. If cropping is skipped, this leads to errors.) removing --gcp (the GCP path if specified is always "gcp_list.txt") reading the contents of --cameras """ assure_always = ['orthophoto_cutline', 'dem_euclidean_map', 'skip_3dmodel'] remove_always = ['split', 'split_overlap', 'rerun_from', 'rerun', 'gcp', 'end_with', 'sm_cluster', 'rerun_all', 'pc_csv', 'pc_las', 'pc_ept', 'tiles'] read_json_always = ['cameras'] argv = sys.argv result = [argv[0]] # Startup script (/path/to/run.py) args_dict = vars(args).copy() set_keys = [k[:-len("_is_set")] for k in args_dict.keys() if k.endswith("_is_set")] # Handle project name and project path (special case) if "name" in set_keys: del args_dict["name"] set_keys.remove("name") if "project_path" in set_keys: del args_dict["project_path"] set_keys.remove("project_path") # Remove parameters set_keys = [k for k in set_keys if k not in remove_always] # Assure parameters for k in assure_always: if not k in set_keys: set_keys.append(k) args_dict[k] = True # Read JSON always for k in read_json_always: if k in set_keys: try: if isinstance(args_dict[k], str): args_dict[k] = io.path_or_json_string_to_dict(args_dict[k]) if isinstance(args_dict[k], dict): args_dict[k] = json.dumps(args_dict[k]) except ValueError as e: log.ODM_WARNING("Cannot parse/read JSON: {}".format(str(e))) # Handle crop (cannot be zero for split/merge) if "crop" in set_keys: crop_value = float(args_dict["crop"]) if crop_value == 0: crop_value = 0.015625 args_dict["crop"] = crop_value # Populate result for k in set_keys: result.append("--%s" % k.replace("_", "-")) # No second value for booleans if isinstance(args_dict[k], bool) and args_dict[k] == True: continue result.append(str(args_dict[k])) if submodels_path: result.append("--project-path") result.append(submodels_path) if submodel_name: result.append(submodel_name) return result def get_submodel_args_dict(args): submodel_argv = get_submodel_argv(args) result = {} i = 0 while i < len(submodel_argv): arg = submodel_argv[i] next_arg = None if i == len(submodel_argv) - 1 else submodel_argv[i + 1] if next_arg and arg.startswith("--"): if next_arg.startswith("--"): result[arg[2:]] = True else: result[arg[2:]] = next_arg i += 1 elif arg.startswith("--"): result[arg[2:]] = True i += 1 return result def get_submodel_paths(submodels_path, *paths): """ :return Existing paths for all submodels """ result = [] if not os.path.exists(submodels_path): return result for f in os.listdir(submodels_path): if f.startswith('submodel'): p = os.path.join(submodels_path, f, *paths) if os.path.exists(p): result.append(p) else: log.ODM_WARNING("Missing %s from submodel %s" % (p, f)) return result def get_all_submodel_paths(submodels_path, *all_paths): """ :return Existing, multiple paths for all submodels as a nested list (all or nothing for each submodel)<|fim▁hole|> [["path/submodel_0000/odm_orthophoto.tif", "path/submodel_0000/dem.tif"], ["path/submodel_0001/odm_orthophoto.tif", "path/submodel_0001/dem.tif"]] """ result = [] if not os.path.exists(submodels_path): return result for f in os.listdir(submodels_path): if f.startswith('submodel'): all_found = True for ap in all_paths: p = os.path.join(submodels_path, f, ap) if not os.path.exists(p): log.ODM_WARNING("Missing %s from submodel %s" % (p, f)) all_found = False if all_found: result.append([os.path.join(submodels_path, f, ap) for ap in all_paths]) return result<|fim▁end|>
if a single file is missing from the submodule, no files are returned for that submodel. (i.e. get_multi_submodel_paths("path/", "odm_orthophoto.tif", "dem.tif")) -->
<|file_name|>reflector.ts<|end_file_name|><|fim▁begin|>import {Type, isPresent, stringify} from 'angular2/src/facade/lang'; import {BaseException, WrappedException} from 'angular2/src/facade/exceptions'; import { ListWrapper, Map, MapWrapper, Set, SetWrapper, StringMapWrapper } from 'angular2/src/facade/collection'; import {SetterFn, GetterFn, MethodFn} from './types'; import {ReflectorReader} from './reflector_reader'; import {PlatformReflectionCapabilities} from './platform_reflection_capabilities'; export {SetterFn, GetterFn, MethodFn} from './types'; export {PlatformReflectionCapabilities} from './platform_reflection_capabilities'; /** * Reflective information about a symbol, including annotations, interfaces, and other metadata. */ export class ReflectionInfo { constructor(public annotations?: any[], public parameters?: any[][], public factory?: Function, public interfaces?: any[], public propMetadata?: {[key: string]: any[]}) {} } /** * Provides access to reflection data about symbols. Used internally by Angular * to power dependency injection and compilation. */ export class Reflector extends ReflectorReader { /** @internal */ _injectableInfo = new Map<any, ReflectionInfo>(); /** @internal */ _getters = new Map<string, GetterFn>(); /** @internal */ _setters = new Map<string, SetterFn>(); /** @internal */ _methods = new Map<string, MethodFn>(); /** @internal */ _usedKeys: Set<any>; reflectionCapabilities: PlatformReflectionCapabilities;<|fim▁hole|> super(); this._usedKeys = null; this.reflectionCapabilities = reflectionCapabilities; } isReflectionEnabled(): boolean { return this.reflectionCapabilities.isReflectionEnabled(); } /** * Causes `this` reflector to track keys used to access * {@link ReflectionInfo} objects. */ trackUsage(): void { this._usedKeys = new Set(); } /** * Lists types for which reflection information was not requested since * {@link #trackUsage} was called. This list could later be audited as * potential dead code. */ listUnusedKeys(): any[] { if (this._usedKeys == null) { throw new BaseException('Usage tracking is disabled'); } var allTypes = MapWrapper.keys(this._injectableInfo); return allTypes.filter(key => !SetWrapper.has(this._usedKeys, key)); } registerFunction(func: Function, funcInfo: ReflectionInfo): void { this._injectableInfo.set(func, funcInfo); } registerType(type: Type, typeInfo: ReflectionInfo): void { this._injectableInfo.set(type, typeInfo); } registerGetters(getters: {[key: string]: GetterFn}): void { _mergeMaps(this._getters, getters); } registerSetters(setters: {[key: string]: SetterFn}): void { _mergeMaps(this._setters, setters); } registerMethods(methods: {[key: string]: MethodFn}): void { _mergeMaps(this._methods, methods); } factory(type: Type): Function { if (this._containsReflectionInfo(type)) { var res = this._getReflectionInfo(type).factory; return isPresent(res) ? res : null; } else { return this.reflectionCapabilities.factory(type); } } parameters(typeOrFunc: /*Type*/ any): any[][] { if (this._injectableInfo.has(typeOrFunc)) { var res = this._getReflectionInfo(typeOrFunc).parameters; return isPresent(res) ? res : []; } else { return this.reflectionCapabilities.parameters(typeOrFunc); } } annotations(typeOrFunc: /*Type*/ any): any[] { if (this._injectableInfo.has(typeOrFunc)) { var res = this._getReflectionInfo(typeOrFunc).annotations; return isPresent(res) ? res : []; } else { return this.reflectionCapabilities.annotations(typeOrFunc); } } propMetadata(typeOrFunc: /*Type*/ any): {[key: string]: any[]} { if (this._injectableInfo.has(typeOrFunc)) { var res = this._getReflectionInfo(typeOrFunc).propMetadata; return isPresent(res) ? res : {}; } else { return this.reflectionCapabilities.propMetadata(typeOrFunc); } } interfaces(type: Type): any[] { if (this._injectableInfo.has(type)) { var res = this._getReflectionInfo(type).interfaces; return isPresent(res) ? res : []; } else { return this.reflectionCapabilities.interfaces(type); } } getter(name: string): GetterFn { if (this._getters.has(name)) { return this._getters.get(name); } else { return this.reflectionCapabilities.getter(name); } } setter(name: string): SetterFn { if (this._setters.has(name)) { return this._setters.get(name); } else { return this.reflectionCapabilities.setter(name); } } method(name: string): MethodFn { if (this._methods.has(name)) { return this._methods.get(name); } else { return this.reflectionCapabilities.method(name); } } /** @internal */ _getReflectionInfo(typeOrFunc: any): ReflectionInfo { if (isPresent(this._usedKeys)) { this._usedKeys.add(typeOrFunc); } return this._injectableInfo.get(typeOrFunc); } /** @internal */ _containsReflectionInfo(typeOrFunc: any) { return this._injectableInfo.has(typeOrFunc); } importUri(type: Type): string { return this.reflectionCapabilities.importUri(type); } } function _mergeMaps(target: Map<string, Function>, config: {[key: string]: Function}): void { StringMapWrapper.forEach(config, (v: Function, k: string) => target.set(k, v)); }<|fim▁end|>
constructor(reflectionCapabilities: PlatformReflectionCapabilities) {
<|file_name|>55ef8d641456cf304b91346e5672f316c68f8da9.js<|end_file_name|><|fim▁begin|>var fb = "https://glaring-fire-5349.firebaseio.com"; var TodoCheck = React.createClass({displayName: "TodoCheck", getInitialState: function() { this.checked = false; return {checked: this.checked}; }, componentWillUnmount: function() { this.ref.off(); }, componentWillMount: function() { this.ref = new Firebase(fb + "/react_todos/" + this.props.todoKey + "/checked"); // Update the checked state when it changes. this.ref.on("value", function(snap) { if (snap.val() !== null) { this.checked = snap.val(); this.setState({ checked: this.checked }); this.props.todo.setDone(this.checked); } else { this.ref.set(false); this.props.todo.setDone(false); } }.bind(this)); }, toggleCheck: function(event) { this.ref.set(!this.checked); event.preventDefault(); }, render: function() { return ( React.createElement("a", { onClick: this.toggleCheck, href: "#", className: "pull-left todo-check"}, React.createElement("span", { className: "todo-check-mark glyphicon glyphicon-ok", "aria-hidden": "true"} ) ) ); }, }); var TodoText = React.createClass({displayName: "TodoText", componentWillUnmount: function() { this.ref.off(); $("#" + this.props.todoKey + "-text").off('blur'); }, setText: function(text) { this.text = text; this.props.todo.setHasText(!!text); }, componentWillMount: function() { this.ref = new Firebase(fb + "/react_todos/" + this.props.todoKey + "/text"); // Update the todo's text when it changes. this.setText(""); this.ref.on("value", function(snap) { if (snap.val() !== null) { $("#" + this.props.todoKey + "-text").text(snap.val()); this.setText(snap.val()); } else { this.ref.set(""); } }.bind(this)); }, onTextBlur: function(event) { this.ref.set($(event.target).text()); }, render: function() { setTimeout(function() { $("#" + this.props.todoKey + "-text").text(this.text); }.bind(this), 0); return ( React.createElement("span", { id: this.props.todoKey + "-text", onBlur: this.onTextBlur, contentEditable: "plaintext-only", "data-ph": "Todo", className: "todo-text"} ) ); }, }); var TodoDelete = React.createClass({displayName: "TodoDelete", getInitialState: function() { return {}; }, componentWillUnmount: function() { this.ref.off(); }, componentWillMount: function() { this.ref = new Firebase(fb + "/react_todos/" + this.props.todoKey + "/deleted"); }, onClick: function() { this.ref.set(true); }, render: function() { if (this.props.isLast) { return null; } return ( React.createElement("button", { onClick: this.onClick, type: "button", className: "close", "aria-label": "Close"}, React.createElement("span", { "aria-hidden": "true", dangerouslySetInnerHTML: {__html: '&times;'}}) ) ); }, }); var Todo = React.createClass({displayName: "Todo", getInitialState: function() { return {}; }, setDone: function(done) { this.setState({ done: done }); }, setHasText: function(hasText) { this.setState({ hasText: hasText }); }, render: function() { var doneClass = this.state.done ? "todo-done" : "todo-not-done"; return ( React.createElement("li", { id: this.props.todoKey, className: "list-group-item todo " + doneClass}, React.createElement(TodoCheck, {todo: this, todoKey: this.props.todoKey}), React.createElement(TodoText, {todo: this, todoKey: this.props.todoKey}), React.createElement(TodoDelete, {isLast: false, todoKey: this.props.todoKey}) ) ); } }); var TodoList = React.createClass({displayName: "TodoList", getInitialState: function() { this.todos = []; return {todos: this.todos}; }, componentWillMount: function() { this.ref = new Firebase("https://glaring-fire-5349.firebaseio.com/react_todos/"); // Add an empty todo if none currently exist. this.ref.on("value", function(snap) { if (snap.val() === null) { this.ref.push({ text: "", }); return; } // Add a new todo if no undeleted ones exist. var returnedTrue = snap.forEach(function(data) { if (!data.val().deleted) { return true; } }); if (!returnedTrue) { this.ref.push({ text: "", }); return; } }.bind(this)); // Add an added child to this.todos. this.ref.on("child_added", function(childSnap) { this.todos.push({ k: childSnap.key(), val: childSnap.val() }); this.replaceState({ todos: this.todos }); }.bind(this)); this.ref.on("child_removed", function(childSnap) { var key = childSnap.key(); var i; for (i = 0; i < this.todos.length; i++) { if (this.todos[i].k == key) { break; } } this.todos.splice(i, 1); this.replaceState({ todos: this.todos, }); }.bind(this)); this.ref.on("child_changed", function(childSnap) { var key = childSnap.key(); for (var i = 0; i < this.todos.length; i++) { if (this.todos[i].k == key) { this.todos[i].val = childSnap.val(); this.replaceState({ todos: this.todos, }); break; } } }.bind(this)); }, componentWillUnmount: function() { this.ref.off(); }, render: function() { console.log(this.todos); var todos = this.state.todos.map(function (todo) { if (todo.val.deleted) { return null; } return ( React.createElement(Todo, {todoKey: todo.k}) ); }).filter(function(todo) { return todo !== null; }); console.log(todos); return ( React.createElement("div", null, React.createElement("h1", {id: "list_title"}, this.props.title), React.createElement("ul", {id: "todo-list", className: "list-group"}, todos ) ) ); } }); var ListPage = React.createClass({displayName: "ListPage", render: function() { return ( React.createElement("div", null, React.createElement("div", {id: "list_page"}, React.createElement("a", { onClick: this.props.app.navOnClick({page: "LISTS"}), href: "/#/lists", id: "lists_link", className: "btn btn-primary"}, "Back to Lists" ) ), React.createElement("div", {className: "page-header"}, this.props.children )<|fim▁hole|>}); var Nav = React.createClass({displayName: "Nav", render: function() { return ( React.createElement("nav", {className: "navbar navbar-default navbar-static-top"}, React.createElement("div", {className: "container"}, React.createElement("div", {className: "navbar-header"}, React.createElement("a", {onClick: this.props.app.navOnClick({page: "LISTS"}), className: "navbar-brand", href: "/#/lists"}, "Firebase Todo") ), React.createElement("ul", {className: "nav navbar-nav"}, React.createElement("li", null, React.createElement("a", {onClick: this.props.app.navOnClick({page: "LISTS"}), href: "/#/lists"}, "Lists")) ) ) ) ); }, }); var App = React.createClass({displayName: "App", getInitialState: function() { var state = this.getState(); this.setHistory(state, true); return this.getState(); }, setHistory: function(state, replace) { // Don't bother pushing a history entry if the latest state is // the same. if (_.isEqual(state, this.state)) { return; } var histFunc = replace ? history.replaceState.bind(history) : history.pushState.bind(history); if (state.page === "LIST") { histFunc(state, "", "#/list/" + state.todoListKey); } else if (state.page === "LISTS") { histFunc(state, "", "#/lists"); } else { console.log("Unknown page: " + state.page); } }, getState: function() { var url = document.location.toString(); if (url.match(/#/)) { var path = url.split("#")[1]; var res = path.match(/\/list\/([^\/]*)$/); if (res) { return { page: "LIST", todoListKey: res[1], }; } res = path.match(/lists$/); if (res) { return { page: "LISTS" } } } return { page: "LISTS" } }, componentWillMount: function() { // Register history listeners. var app = this; window.onpopstate = function(event) { app.replaceState(event.state); }; }, navOnClick: function(state) { return function(event) { this.setHistory(state, false); this.replaceState(state); event.preventDefault(); }.bind(this); }, getPage: function() { if (this.state.page === "LIST") { return ( React.createElement(ListPage, {app: this}, React.createElement(TodoList, {todoListKey: this.state.todoListKey}) ) ); } else if (this.state.page === "LISTS") { return ( React.createElement("a", {onClick: this.navOnClick({page: "LIST", todoListKey: "-JjcFYgp1LyD5oDNNSe2"}), href: "/#/list/-JjcFYgp1LyD5oDNNSe2"}, "hi") ); } else { console.log("Unknown page: " + this.state.page); } }, render: function() { return ( React.createElement("div", null, React.createElement(Nav, {app: this}), React.createElement("div", {className: "container", role: "main"}, this.getPage() ) ) ); } }); React.render( React.createElement(App, null), document.getElementById('content') );<|fim▁end|>
) ); }
<|file_name|>WaitUntilCommand.cpp<|end_file_name|><|fim▁begin|>/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2011. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in $(WIND_BASE)/WPILib. */ /*----------------------------------------------------------------------------*/ #include "Commands/WaitUntilCommand.h" #include "Timer.h" /** * A {@link WaitCommand} will wait until a certain match time before finishing. * This will wait until the game clock reaches some value, then continue to the * next command. * @see CommandGroup */ WaitUntilCommand::WaitUntilCommand(double time) : Command("WaitUntilCommand", time) { m_time = time; } WaitUntilCommand::WaitUntilCommand(const std::string &name, double time) : Command(name, time) { m_time = time; } void WaitUntilCommand::Initialize() {} void WaitUntilCommand::Execute() {} /** * Check if we've reached the actual finish time. */ bool WaitUntilCommand::IsFinished() { return Timer::GetMatchTime() >= m_time; } void WaitUntilCommand::End() {} <|fim▁hole|><|fim▁end|>
void WaitUntilCommand::Interrupted() {}
<|file_name|>metrix++.py<|end_file_name|><|fim▁begin|># # Metrix++, Copyright 2009-2013, Metrix++ Project # Link: http://metrixplusplus.sourceforge.net # # This file is a part of Metrix++ Tool. # # Metrix++ is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3 of the License. # # Metrix++ is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. <|fim▁hole|># # You should have received a copy of the GNU General Public License # along with Metrix++. If not, see <http://www.gnu.org/licenses/>. # if __name__ == '__main__': import metrixpp metrixpp.start()<|fim▁end|>
<|file_name|>multi_view.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (C) 2008-2011, Luis Pedro Coelho <[email protected]> # vim: set ts=4 sts=4 sw=4 expandtab smartindent: # # License: MIT. See COPYING.MIT file in the milk distribution import numpy as np __all__ = [ 'multi_view_learner', ] class multi_view_model(object): def __init__(self, models): self.models = models def apply(self, features): if len(features) != len(self.models): raise ValueError('milk.supervised.two_view: Nr of features does not match training data (got %s, expected %s)' % (len(features) ,len(self.models)))<|fim▁hole|> if np.any(Ps >= 1.): return True # This is binary only: # if \prod Pi > \prod (1-Pi) return 1 # is equivalent to # if \prod Pi/(1-Pi) > 1. return 1 # if \sum \log( Pi/(1-Pi) ) > 0. return 1 return np.sum( np.log(Ps/(1-Ps)) ) > 0 class multi_view_learner(object): ''' Multi View Learner This learner learns different classifiers on multiple sets of features and combines them for classification. ''' def __init__(self, bases): self.bases = bases def train(self, features, labels, normalisedlabels=False): features = zip(*features) if len(features) != len(self.bases): raise ValueError('milk.supervised.multi_view_learner: ' + 'Nr of features does not match classifiser construction (got %s, expected %s)' % (len(features) ,len(self.bases))) models = [] for basis,f in zip(self.bases, features): try: f = np.array(f) except: f = np.array(f, dtype=object) models.append(basis.train(f, labels)) return multi_view_model(models) multi_view_classifier = multi_view_learner<|fim▁end|>
Ps = np.array([model.apply(f) for model,f in zip(self.models, features)]) if np.any(Ps <= 0.): return False
<|file_name|>XZCompressionInputStream.java<|end_file_name|><|fim▁begin|>package io.sensesecure.hadoop.xz; import java.io.BufferedInputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.compress.CompressionInputStream; import org.tukaani.xz.XZInputStream; /** * * @author yongtang */ public class XZCompressionInputStream extends CompressionInputStream { private BufferedInputStream bufferedIn; private XZInputStream xzIn; private boolean resetStateNeeded; public XZCompressionInputStream(InputStream in) throws IOException { super(in); resetStateNeeded = false; bufferedIn = new BufferedInputStream(super.in); } @Override public int read(byte[] b, int off, int len) throws IOException { if (resetStateNeeded) { resetStateNeeded = false; bufferedIn = new BufferedInputStream(super.in); xzIn = null; } return getInputStream().read(b, off, len); } @Override public void resetState() throws IOException { resetStateNeeded = true; } @Override public int read() throws IOException { byte b[] = new byte[1]; int result = this.read(b, 0, 1); return (result < 0) ? result : (b[0] & 0xff); } @Override<|fim▁hole|> public void close() throws IOException { if (!resetStateNeeded) { if (xzIn != null) { xzIn.close(); xzIn = null; } resetStateNeeded = true; } } /** * This compression stream ({@link #xzIn}) is initialized lazily, in case * the data is not available at the time of initialization. This is * necessary for the codec to be used in a {@link SequenceFile.Reader}, as * it constructs the {@link XZCompressionInputStream} before putting data * into its buffer. Eager initialization of {@link #xzIn} there results in * an {@link EOFException}. */ private XZInputStream getInputStream() throws IOException { if (xzIn == null) { xzIn = new XZInputStream(bufferedIn); } return xzIn; } }<|fim▁end|>
<|file_name|>inherited_box.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this<|fim▁hole|> * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ <%namespace name="helpers" file="/helpers.mako.rs" /> <% data.new_style_struct("InheritedBox", inherited=True, gecko_name="Visibility") %> ${helpers.single_keyword("direction", "ltr rtl", need_clone=True, animatable=False)} // TODO: collapse. Well, do tables first. ${helpers.single_keyword("visibility", "visible hidden", extra_gecko_values="collapse", gecko_ffi_name="mVisible", animatable=True)} // CSS Writing Modes Level 3 // http://dev.w3.org/csswg/css-writing-modes/ ${helpers.single_keyword("writing-mode", "horizontal-tb vertical-rl vertical-lr", experimental=True, need_clone=True, animatable=False)} // FIXME(SimonSapin): Add 'mixed' and 'upright' (needs vertical text support) // FIXME(SimonSapin): initial (first) value should be 'mixed', when that's implemented // FIXME(bholley): sideways-right is needed as an alias to sideways in gecko. ${helpers.single_keyword("text-orientation", "sideways", experimental=True, need_clone=True, extra_gecko_values="mixed upright", extra_servo_values="sideways-right sideways-left", animatable=False)} // CSS Color Module Level 4 // https://drafts.csswg.org/css-color/ ${helpers.single_keyword("color-adjust", "economy exact", products="gecko", animatable=False)} <% image_rendering_custom_consts = { "crisp-edges": "CRISPEDGES" } %> // According to to CSS-IMAGES-3, `optimizespeed` and `optimizequality` are synonyms for `auto` // And, firefox doesn't support `pixelated` yet (https://bugzilla.mozilla.org/show_bug.cgi?id=856337) ${helpers.single_keyword("image-rendering", "auto crisp-edges", extra_gecko_values="optimizespeed optimizequality", extra_servo_values="pixelated", custom_consts=image_rendering_custom_consts, animatable=False)} // Used in the bottom-up flow construction traversal to avoid constructing flows for // descendants of nodes with `display: none`. <%helpers:longhand name="-servo-under-display-none" derived_from="display" products="servo" animatable="False"> use cssparser::ToCss; use std::fmt; use values::computed::ComputedValueAsSpecified; use values::NoViewportPercentage; impl NoViewportPercentage for SpecifiedValue {} #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "servo", derive(HeapSizeOf, Deserialize, Serialize))] pub struct SpecifiedValue(pub bool); pub mod computed_value { pub type T = super::SpecifiedValue; } impl ComputedValueAsSpecified for SpecifiedValue {} pub fn get_initial_value() -> computed_value::T { SpecifiedValue(false) } impl ToCss for SpecifiedValue { fn to_css<W>(&self, _: &mut W) -> fmt::Result where W: fmt::Write { Ok(()) // Internal property } } #[inline] pub fn derive_from_display(context: &mut Context) { use super::display::computed_value::T as Display; if context.style().get_box().clone_display() == Display::none { context.mutate_style().mutate_inheritedbox() .set__servo_under_display_none(SpecifiedValue(true)); } } </%helpers:longhand><|fim▁end|>
<|file_name|>GetBarcodeRecognitionQuality.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from com.aspose.barcoderecognition import BarCodeReadType from com.aspose.barcoderecognition import BarCodeReader class GetBarcodeRecognitionQuality: def __init__(self): dataDir = Settings.dataDir + 'WorkingWithBarcodeRecognition/AdvancedBarcodeRecognitionFeatures/GetBarcodeRecognitionQuality/' img = dataDir + "barcode.jpg" # initialize barcode reader barcode_reader_type = BarCodeReadType reader = BarCodeReader(img, barcode_reader_type.Code39Standard) # Call read method while (reader.read()): print "Barcode CodeText: " + reader.getCodeText() print " Barcode Type: " print reader.getReadType() percent = reader.getRecognitionQuality() print "Barcode Quality Percentage: " print percent # Close reader reader.close() if __name__ == '__main__': GetBarcodeRecognitionQuality()<|fim▁end|>
from asposebarcode import Settings
<|file_name|>incar.py<|end_file_name|><|fim▁begin|>import os from .input import VaspInput __author__ = "Guillermo Avendano-Franco"<|fim▁hole|>__copyright__ = "Copyright 2016" __version__ = "0.1" __maintainer__ = "Guillermo Avendano-Franco" __email__ = "[email protected]" __status__ = "Development" __date__ = "May 13, 2016" def read_incar(filename='INCAR'): """ Load the file INCAR in the directory 'path' or read directly the file 'path' and return an object 'inputvars' for pychemia :param filename: (str) Filename of a INCAR file format :return: """ if os.path.isfile(filename): filename = filename elif os.path.isdir(filename) and os.path.isfile(filename + '/INCAR'): filename += '/INCAR' else: raise ValueError('[ERROR] INCAR path not found: %s' % filename) iv = VaspInput(filename=filename) return iv def write_incar(iv, filepath='INCAR'): """ Takes an object inputvars from pychemia and save the file INCAR in the directory 'path' or save the file 'path' as a VASP INCAR file :param iv: (VaspInput) VASP Input variables :param filepath: (str) File path to write the INCAR file """ if os.path.isdir(filepath): filename = filepath + '/INCAR' else: filename = filepath iv.write(filename)<|fim▁end|>
<|file_name|>data-table.js<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ var pref = new gadgets.Prefs(); var node = pref.getString('node') || undefined; var start = pref.getString('startTime') || undefined; var end = pref.getString('endTime') || undefined; var url = pref.getString('dataSource'); var template; function fetchData(startTime, endTime) { var url = pref.getString('dataSource'); var data = { start_time: start, end_time: end, node: node, action: pref.getString('appStatType') }; var appname = pref.getString('appname'); if (appname != '') { data.appname = appname; } $.ajax({ url: url, type: 'GET', dataType: 'json', data: data, success: onDataReceived }); } function onDataReceived(data) { var tableData = data.data; var tableHeadings = data.headings; var orderColumn = data.orderColumn; var applist = data.applist || undefined; var table; var headings; headings = getTableHeader(tableHeadings); $('#placeholder').html(template(headings)); var dataTableOptions = {}; dataTableOptions['data'] = tableData; dataTableOptions['order'] = [orderColumn]; if (!applist) { dataTableOptions['aoColumns'] = [ {'sWidth': '60%'}, {'sWidth': '20%'}, {'sWidth': '20%'} ]; } table = $('#table').dataTable(dataTableOptions); if (applist) { registerWebappSelect(table); } } function registerWebappSelect(table) { table.find('tbody').on('click', 'tr', function () { if ($(this).hasClass('selected')) { $(this).removeClass('selected'); } else { var param = '';<|fim▁hole|> param = 'node=' + node; } if (start && end) { param = param + (param == '' ? '' : '&') + 'start-time=' + moment(start, 'YYYY-MM-DD HH:mm').format('X') + '&end-time=' + moment(end, 'YYYY-MM-DD HH:mm').format('X'); } var webapp = table.fnGetData(this)[0]; table.$('tr.selected').removeClass('selected'); $(this).addClass('selected'); var webappUrl = webapp; if (param != '?') { webappUrl = webappUrl + '?' + param; } publishRedirectUrl(webappUrl); } }); } function getTableHeader(tableHeadings) { var headingArray = []; var row = []; var th = {}; var rowSpan = 1; var i, j, len, len2; for (i = 0, len = tableHeadings.length; i < len; i++) { if (tableHeadings[i] instanceof Object) { rowSpan = 2; break; } } for (i = 0, len = tableHeadings.length; i < len; i++) { th = {}; if (typeof(tableHeadings[i]) == 'string') { th.rowSpan = rowSpan; th.text = tableHeadings[i]; } else { th.colSpan = tableHeadings[i]["sub"].length; th.text = tableHeadings[i]['parent']; } row.push(th); } headingArray.push(row); if (rowSpan > 1) { row = []; for (i = 0, len = tableHeadings.length; i < len; i++) { if (tableHeadings[i] instanceof Object) { var subHeadings = tableHeadings[i]['sub']; for (j = 0, len2 = subHeadings.length; j < len2; j++) { th = {}; th.text = subHeadings[j]; row.push(th); } } } headingArray.push(row); } return headingArray; } function publishRedirectUrl(url) { gadgets.Hub.publish('wso2.as.http.dashboard.webapp.url', url); } $(function () { fetchData(); Handlebars.registerHelper('generateHeadingTag', function (th) { var properties = ''; properties += (th.rowSpan) ? " rowspan='" + th.rowSpan + "'" : ''; properties += (th.colSpan) ? " colspan='" + th.colSpan + "'" : ''; return new Handlebars.SafeString('<th' + properties + '>' + th.text + '</th>'); }); template = Handlebars.compile($('#table-template').html()); }); gadgets.HubSettings.onConnect = function () { gadgets.Hub.subscribe('wso2.gadgets.charts.timeRangeChange', function (topic, data, subscriberData) { start = data.start.format('YYYY-MM-DD HH:mm'); end = data.end.format('YYYY-MM-DD HH:mm'); fetchData(); } ); gadgets.Hub.subscribe('wso2.gadgets.charts.ipChange', function (topic, data, subscriberData) { node = data; fetchData(); } ); };<|fim▁end|>
if (node) {
<|file_name|>main.controller.js<|end_file_name|><|fim▁begin|>'use strict'; export default class MainController { /*@ngInject*/ constructor($scope, Auth) { $scope.loggedIn = false; $scope.isStudent = false; $scope.isInstructor = false; Auth.getCurrentUser((user) => { $scope.user = user; $scope.loggedIn = Auth.isLoggedInSync(); if ($scope.loggedIn){ $scope.isStudent = Auth.isStudentSync(); $scope.isInstructor = Auth.isInstructorSync(); } });<|fim▁hole|><|fim▁end|>
} }
<|file_name|>cli.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node var path = require('path'); var fs = require('fs'); var optimist = require('optimist'); var prompt = require('prompt'); var efs = require('efs'); var encext = require('./index'); var defaultAlgorithm = 'aes-128-cbc';<|fim▁hole|> .boolean('r') .alias('r', 'recursive') .default('r', false) .describe('a', 'encryption algorithm') .string('a') .alias('a', 'algorithm') .default('a', defaultAlgorithm) .argv; if (argv.help) { optimist.showHelp(); } var pwdPrompt = { name: 'password', description: 'Please enter the encryption password', required: true, hidden: true }; prompt.message = 'encext'; prompt.colors = false; prompt.start(); prompt.get(pwdPrompt, function(err, result) { if (err) { console.error('[ERROR]', err); process.exit(1); } efs = efs.init(argv.algorithm, result.password); argv._.forEach(processPath); }); function processPath(fspath) { fs.stat(fspath, onStat); function onStat(err, stats) { if (err) { return exit(err) } if (stats.isDirectory() && argv.recursive) { fs.readdir(fspath, onReaddir); } else if (stats.isFile() && encext.isSupported(fspath)) { encrypt(fspath); } } function onReaddir(err, fspaths) { if (err) { return exit(err) } fspaths.forEach(function(p) { processPath(path.join(fspath, p)); }); } } function encrypt(fspath) { var encpath = fspath + '_enc'; var writeStream = efs.createWriteStream(encpath); writeStream.on('error', exit); var readStream = fs.createReadStream(fspath); readStream.on('error', exit); readStream.on('end', function() { console.info(fspath, 'encrypted and written to', encpath); }); readStream.pipe(writeStream); } function exit(err) { console.error(err); process.exit(1); }<|fim▁end|>
var argv = optimist .usage('usage: encext [-r] [-a algorithm] [file ...]') .describe('r', 'recursively encrypt supported files')
<|file_name|>NewQuestionPanel.js<|end_file_name|><|fim▁begin|>/* * This file is part of ARSnova Mobile. * Copyright (C) 2011-2012 Christian Thomas Weber * Copyright (C) 2012-2015 The ARSnova Team * * ARSnova Mobile is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * ARSnova Mobile is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with ARSnova Mobile. If not, see <http://www.gnu.org/licenses/>. */ Ext.define('ARSnova.view.speaker.NewQuestionPanel', { extend: 'Ext.Panel', requires: [ 'ARSnova.view.speaker.form.AbstentionForm', 'ARSnova.view.speaker.form.ExpandingAnswerForm', 'ARSnova.view.speaker.form.IndexedExpandingAnswerForm', 'ARSnova.view.speaker.form.FlashcardQuestion', 'ARSnova.view.speaker.form.SchoolQuestion', 'ARSnova.view.speaker.form.VoteQuestion', 'ARSnova.view.speaker.form.YesNoQuestion', 'ARSnova.view.speaker.form.NullQuestion', 'ARSnova.view.speaker.form.GridQuestion', 'ARSnova.view.speaker.form.FreeTextQuestion', 'ARSnova.view.speaker.form.ImageUploadPanel', 'ARSnova.view.MarkDownEditorPanel' ], config: { title: 'NewQuestionPanel', fullscreen: true, scrollable: true, scroll: 'vertical', variant: 'lecture', releasedFor: 'all' }, /* toolbar items */ toolbar: null, backButton: null, saveButton: null, /* items */ text: null, subject: null, duration: null, image: null, /* for estudy */ userCourses: [], initialize: function () { this.callParent(arguments); <|fim▁hole|> var screenWidth = (window.innerWidth > 0) ? window.innerWidth : screen.width; this.backButton = Ext.create('Ext.Button', { text: Messages.QUESTIONS, ui: 'back', handler: function () { var sTP = ARSnova.app.mainTabPanel.tabPanel.speakerTabPanel; sTP.animateActiveItem(sTP.audienceQuestionPanel, { type: 'slide', direction: 'right', duration: 700 }); } }); this.saveButtonToolbar = Ext.create('Ext.Button', { text: Messages.SAVE, ui: 'confirm', cls: 'saveQuestionButton', style: 'width: 89px', handler: function (button) { this.saveHandler(button).then(function (response) { ARSnova.app.getController('Questions').details({ question: Ext.decode(response.responseText) }); }); }, scope: this }); this.subject = Ext.create('Ext.field.Text', { name: 'subject', placeHolder: Messages.CATEGORY_PLACEHOLDER }); this.textarea = Ext.create('Ext.plugins.ResizableTextArea', { name: 'text', placeHolder: Messages.FORMAT_PLACEHOLDER }); this.markdownEditPanel = Ext.create('ARSnova.view.MarkDownEditorPanel', { processElement: this.textarea }); // Preview button this.previewButton = Ext.create('Ext.Button', { text: Ext.os.is.Desktop ? Messages.QUESTION_PREVIEW_BUTTON_TITLE_DESKTOP : Messages.QUESTION_PREVIEW_BUTTON_TITLE, ui: 'action', cls: Ext.os.is.Desktop ? 'previewButtonLong' : 'previewButton', scope: this, handler: function () { this.defaultPreviewHandler(); } }); // Preview panel with integrated button this.previewPart = Ext.create('Ext.form.FormPanel', { cls: 'newQuestion', scrollable: null, hidden: true, items: [{ xtype: 'fieldset', items: [this.previewButton] }] }); this.mainPart = Ext.create('Ext.form.FormPanel', { cls: 'newQuestion', scrollable: null, items: [{ xtype: 'fieldset', items: [this.subject] }, { xtype: 'fieldset', items: [this.markdownEditPanel, this.textarea] }] }); this.abstentionPart = Ext.create('ARSnova.view.speaker.form.AbstentionForm', { id: 'abstentionPart' }); this.uploadView = Ext.create('ARSnova.view.speaker.form.ImageUploadPanel', { handlerScope: this, addRemoveButton: true, activateTemplates: false, urlUploadHandler: this.setImage, fsUploadHandler: this.setImage }); this.grid = Ext.create('ARSnova.view.components.GridImageContainer', { editable: false, gridIsHidden: true, hidden: true, style: "padding-top: 10px; margin-top: 30px" }); this.releasePart = Ext.create('Ext.Panel', { items: [ { cls: 'gravure', html: '<span class="coursemembersonlyicon"></span><span class="coursemembersonlymessage">' + Messages.MEMBERS_ONLY + '</span>' } ], hidden: true }); this.yesNoQuestion = Ext.create('ARSnova.view.speaker.form.YesNoQuestion', { hidden: true }); this.multipleChoiceQuestion = Ext.create('ARSnova.view.speaker.form.ExpandingAnswerForm', { hidden: true }); this.voteQuestion = Ext.create('ARSnova.view.speaker.form.VoteQuestion', { hidden: true }); this.schoolQuestion = Ext.create('ARSnova.view.speaker.form.SchoolQuestion', { hidden: true }); this.abcdQuestion = Ext.create('ARSnova.view.speaker.form.IndexedExpandingAnswerForm', { hidden: true }); this.freetextQuestion = Ext.create('ARSnova.view.speaker.form.FreeTextQuestion', { hidden: true }); var messageAppendix = screenWidth >= 650 ? "_LONG" : ""; var formatItems = [ {text: Messages["MC" + messageAppendix], itemId: Messages.MC}, {text: Messages["ABCD" + messageAppendix], itemId: Messages.ABCD}, {text: Messages["YESNO" + messageAppendix], itemId: Messages.YESNO}, {text: Messages["FREETEXT" + messageAppendix], itemId: Messages.FREETEXT}, {text: Messages["EVALUATION" + messageAppendix], itemId: Messages.EVALUATION}, {text: Messages["SCHOOL" + messageAppendix], itemId: Messages.SCHOOL} ]; var me = this; var config = ARSnova.app.globalConfig; if (config.features.flashcard) { formatItems.push({ itemId: Messages.FLASHCARD, text: messageAppendix.length ? Messages.FLASHCARD : Messages.FLASHCARD_SHORT }); me.flashcardQuestion = Ext.create('ARSnova.view.speaker.form.FlashcardQuestion', { editPanel: false, hidden: true }); } if (config.features.gridSquare) { formatItems.push({ itemId: Messages.GRID, text: Messages["GRID" + messageAppendix] }); me.gridQuestion = Ext.create('ARSnova.view.speaker.form.GridQuestion', { id: 'grid', hidden: true }); } me.questionOptions = Ext.create('Ext.SegmentedButton', { allowDepress: false, items: formatItems, defaults: { ui: 'action' }, listeners: { scope: me, toggle: function (container, button, pressed) { var label = Ext.bind(function (longv, shortv) { var screenWidth = (window.innerWidth > 0) ? window.innerWidth : screen.width; return (screenWidth >= 490 || me.backButton.isHidden()) ? longv : shortv; }, me); var title = ''; me.previewPart.hide(); me.previewButton.setHandler(this.defaultPreviewHandler); switch (button.getText()) { case Messages.GRID: case Messages.GRID_LONG: if (pressed) { me.gridQuestion.show(); me.previewButton.setHandler(me.gridQuestion.previewHandler); title = label(Messages.QUESTION_GRID, Messages.QUESTION_GRID_SHORT); this.previewPart.show(); this.uploadView.hide(); this.grid.hide(); } else { me.gridQuestion.hide(); this.uploadView.show(); if (this.grid.getImageFile()) { this.grid.show(); } } break; case Messages.EVALUATION: case Messages.EVALUATION_LONG: if (pressed) { me.voteQuestion.show(); title = label(Messages.QUESTION_RATING, Messages.QUESTION_RATING_SHORT); } else { me.voteQuestion.hide(); } break; case Messages.SCHOOL: case Messages.SCHOOL_LONG: if (pressed) { me.schoolQuestion.show(); title = label(Messages.QUESTION_GRADE, Messages.QUESTION_GRADE_SHORT); } else { me.schoolQuestion.hide(); } break; case Messages.MC: case Messages.MC_LONG: if (pressed) { me.multipleChoiceQuestion.show(); title = label(Messages.QUESTION_MC, Messages.QUESTION_MC_SHORT); } else { me.multipleChoiceQuestion.hide(); } break; case Messages.YESNO: case Messages.YESNO_LONG: if (pressed) { me.previewPart.show(); me.yesNoQuestion.show(); me.previewButton.setHandler(me.yesNoQuestion.previewHandler); title = label(Messages.QUESTION_YESNO, Messages.QUESTION_YESNO_SHORT); } else { me.yesNoQuestion.hide(); } break; case Messages.ABCD: case Messages.ABCD_LONG: if (pressed) { me.abcdQuestion.show(); title = label(Messages.QUESTION_SINGLE_CHOICE, Messages.QUESTION_SINGLE_CHOICE_SHORT); } else { me.abcdQuestion.hide(); } break; case Messages.FREETEXT: case Messages.FREETEXT_LONG: if (pressed) { me.previewPart.show(); me.freetextQuestion.show(); title = label(Messages.QUESTION_FREETEXT, Messages.QUESTION_FREETEXT_SHORT); } else { me.freetextQuestion.hide(); } break; case Messages.FLASHCARD: case Messages.FLASHCARD_SHORT: if (pressed) { me.textarea.setPlaceHolder(Messages.FLASHCARD_FRONT_PAGE); me.flashcardQuestion.show(); me.abstentionPart.hide(); title = Messages.FLASHCARD; me.uploadView.setUploadPanelConfig( Messages.PICTURE_SOURCE + " - " + Messages.FLASHCARD_BACK_PAGE, me.setFcImage, me.setFcImage ); } else { me.textarea.setPlaceHolder(Messages.FORMAT_PLACEHOLDER); me.flashcardQuestion.hide(); me.abstentionPart.show(); me.uploadView.setUploadPanelConfig( Messages.PICTURE_SOURCE, me.setImage, me.setImage ); } break; default: title = Messages.NEW_QUESTION_TITLE; break; } me.toolbar.setTitle(title); } } }); me.toolbar = Ext.create('Ext.Toolbar', { title: Messages.NEW_QUESTION_TITLE, cls: 'speakerTitleText', docked: 'top', ui: 'light', items: [ me.backButton, {xtype: 'spacer'}, me.saveButtonToolbar ] }); me.saveAndContinueButton = Ext.create('Ext.Button', { ui: 'confirm', cls: 'saveQuestionButton', text: Messages.SAVE_AND_CONTINUE, style: 'margin-top: 70px', handler: function (button) { me.saveHandler(button).then(function () { var theNotificationBox = {}; theNotificationBox = Ext.create('Ext.Panel', { cls: 'notificationBox', name: 'notificationBox', showAnimation: 'pop', modal: true, centered: true, width: 300, styleHtmlContent: true, styleHtmlCls: 'notificationBoxText', html: Messages.QUESTION_SAVED }); Ext.Viewport.add(theNotificationBox); theNotificationBox.show(); /* Workaround for Chrome 34+ */ Ext.defer(function () { theNotificationBox.destroy(); }, 3000); }).then(Ext.bind(function (response) { me.getScrollable().getScroller().scrollTo(0, 0, true); }, me)); }, scope: me }); me.add([me.toolbar, Ext.create('Ext.Toolbar', { cls: 'noBackground noBorder', docked: 'top', scrollable: { direction: 'horizontal', directionLock: true }, items: [{ xtype: 'spacer' }, me.questionOptions, { xtype: 'spacer' } ] }), me.mainPart, me.previewPart, /* only one of the question types will be shown at the same time */ me.voteQuestion, me.multipleChoiceQuestion, me.yesNoQuestion, me.schoolQuestion, me.abcdQuestion, me.freetextQuestion ]); if (me.flashcardQuestion) { me.add(me.flashcardQuestion); } me.add([ me.abstentionPart, me.uploadView, me.grid ]); if (me.gridQuestion) { me.add(me.gridQuestion); } me.add([ me.releasePart, me.saveAndContinueButton ]); me.on('activate', me.onActivate); }, onActivate: function () { this.questionOptions.setPressedButtons([0]); this.releasePart.setHidden(localStorage.getItem('courseId') === null || localStorage.getItem('courseId').length === 0); }, defaultPreviewHandler: function () { var questionPreview = Ext.create('ARSnova.view.QuestionPreviewBox'); questionPreview.showPreview(this.subject.getValue(), this.textarea.getValue()); }, saveHandler: function (button) { /* disable save button in order to avoid multiple question creation */ button.disable(); var panel = ARSnova.app.mainTabPanel.tabPanel.speakerTabPanel.newQuestionPanel; var values = {}; /* get text, subject of question from mainPart */ var mainPartValues = panel.mainPart.getValues(); values.text = mainPartValues.text; values.subject = mainPartValues.subject; values.abstention = !panel.abstentionPart.isHidden() && panel.abstentionPart.getAbstention(); values.questionVariant = panel.getVariant(); values.image = this.image; values.flashcardImage = null; values.imageQuestion = false; if (localStorage.getItem('courseId') != null && localStorage.getItem('courseId').length > 0) { values.releasedFor = 'courses'; } else { values.releasedFor = panel.getReleasedFor(); } /* fetch the values */ switch (panel.questionOptions.getPressedButtons()[0]._text) { case Messages.GRID: case Messages.GRID_LONG: values.questionType = "grid"; Ext.apply(values, panel.gridQuestion.getQuestionValues()); break; case Messages.EVALUATION: case Messages.EVALUATION_LONG: values.questionType = "vote"; Ext.apply(values, panel.voteQuestion.getQuestionValues()); break; case Messages.SCHOOL: case Messages.SCHOOL_LONG: values.questionType = "school"; Ext.apply(values, panel.schoolQuestion.getQuestionValues()); break; case Messages.MC: case Messages.MC_LONG: values.questionType = "mc"; Ext.apply(values, panel.multipleChoiceQuestion.getQuestionValues()); break; case Messages.YESNO: case Messages.YESNO_LONG: values.questionType = "yesno"; Ext.apply(values, panel.yesNoQuestion.getQuestionValues()); break; case Messages.ABCD: case Messages.ABCD_LONG: values.questionType = "abcd"; Ext.apply(values, panel.abcdQuestion.getQuestionValues()); break; case Messages.FREETEXT: case Messages.FREETEXT_LONG: values.questionType = "freetext"; values.possibleAnswers = []; Ext.apply(values, panel.freetextQuestion.getQuestionValues()); break; case Messages.FLASHCARD: case Messages.FLASHCARD_SHORT: values.questionType = "flashcard"; values.flashcardImage = this.fcImage; Ext.apply(values, panel.flashcardQuestion.getQuestionValues()); break; default: break; } var promise = panel.dispatch(values, button); promise.then(function () { panel.subject.reset(); panel.textarea.reset(); if (panel.flashcardQuestion) { panel.flashcardQuestion.answer.reset(); panel.flashcardQuestion.uploadView.resetButtons(); panel.setFcImage(null); } panel.multipleChoiceQuestion.resetFields(); panel.abcdQuestion.resetFields(); switch (panel.questionOptions.getPressedButtons()[0]._text) { case Messages.GRID: case Messages.GRID_LONG: panel.gridQuestion.resetView(); /* fall through */ default: panel.setImage(null); panel.uploadView.resetButtons(); panel.uploadView.setUploadPanelConfig( Messages.PICTURE_SOURCE, panel.setImage, panel.setImage ); break; } // animated scrolling to top panel.getScrollable().getScroller().scrollTo(0, 0, true); }); return promise; }, dispatch: function (values, button) { var promise = new RSVP.Promise(); ARSnova.app.getController('Questions').add({ sessionKeyword: sessionStorage.getItem('keyword'), text: values.text, subject: values.subject, type: "skill_question", questionType: values.questionType, questionVariant: values.questionVariant, duration: values.duration, number: 0, // unused active: 1, possibleAnswers: values.possibleAnswers, releasedFor: values.releasedFor, noCorrect: values.noCorrect, abstention: values.abstention, showStatistic: 1, gridSize: values.gridSize, offsetX: values.offsetX, offsetY: values.offsetY, zoomLvl: values.zoomLvl, image: values.image, fcImage: values.flashcardImage, gridOffsetX: values.gridOffsetX, gridOffsetY: values.gridOffsetY, gridZoomLvl: values.gridZoomLvl, gridSizeX: values.gridSizeX, gridSizeY: values.gridSizeY, gridIsHidden: values.gridIsHidden, imgRotation: values.imgRotation, toggleFieldsLeft: values.toggleFieldsLeft, numClickableFields: values.numClickableFields, thresholdCorrectAnswers: values.thresholdCorrectAnswers, cvIsColored: values.cvIsColored, gridLineColor: values.gridLineColor, numberOfDots: values.numberOfDots, gridType: values.gridType, scaleFactor: values.scaleFactor, gridScaleFactor: values.gridScaleFactor, imageQuestion: values.imageQuestion, textAnswerEnabled: values.textAnswerEnabled, saveButton: button, successFunc: function (response, opts) { promise.resolve(response); button.enable(); }, failureFunc: function (response, opts) { Ext.Msg.alert(Messages.NOTICE, Messages.QUESTION_CREATION_ERROR); promise.reject(response); button.enable(); } }); return promise; }, setGridConfiguration: function (grid) { grid.setEditable(false); grid.setGridIsHidden(true); }, setImage: function (image, test) { var title = this.toolbar.getTitle().getTitle(), isFlashcard = title === Messages.FLASHCARD, grid = isFlashcard ? this.flashcardQuestion.grid : this.grid; this.image = image; grid.setImage(image); if (image) { grid.show(); } else { grid.hide(); grid.clearImage(); this.setGridConfiguration(grid); } }, setFcImage: function (image) { this.fcImage = image; this.grid.setImage(image); if (image) { this.grid.show(); } else { this.grid.hide(); this.grid.clearImage(); this.setGridConfiguration(this.grid); } }, /** * Selects a button of the segmentation component with the given name. * * @param text The text of the button to be selected. */ activateButtonWithText: function (text) { var me = this; this.questionOptions.innerItems.forEach(function (item, index) { if (item.getItemId() === text) { me.questionOptions.setPressedButtons([index]); } }); } });<|fim▁end|>
<|file_name|>utility.ts<|end_file_name|><|fim▁begin|>/*tslint:disable:no-bitwise*/ import {InjectedRouter} from 'react-router'; import * as store from 'store'; import {EcpState} from '../store/ecp.state'; import * as session from '../login/session.model'; import * as moment from 'moment'; export class Utility { // temporary functions (this function will only be used until all information is available from the EngagementCenterAPI) public static generateDebugUUID() { let d = new Date().getTime(); if (window.performance && typeof window.performance.now === 'function') { d += performance.now(); // use high-precision timer if available } let uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => { let r = (d + Math.random() * 16) % 16 | 0; d = Math.floor(d / 16); return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16); }); return uuid; } public static getEcpState() { let state: any = {}; try { state.loginReducer = store.get('session');<|fim▁hole|> if (state.loginReducer && state.loginReducer.sessionExpires) { state.loginReducer.sessionExpires = moment.utc(state.loginReducer.sessionExpires); } } catch (e) { console.log(e); } return state; } public static setEcpState(session: session.SessionState) { try { store.set('session', session); } catch (e) { console.log(e); } } public static sessionIsValid(session: session.SessionState) { if (session && session.isLoggedIn && session.sessionExpires && session.sessionExpires > moment.utc()) { return true; } return false; } } export interface Baseprops { params: any; route: any; routeParams: any; router: InjectedRouter; }<|fim▁end|>
<|file_name|>render.rs<|end_file_name|><|fim▁begin|>use syntax::ast::{Expr, Ident, Pat, Stmt, TokenTree}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::ptr::P; use maud; #[derive(Copy, Clone)]<|fim▁hole|> pub struct Renderer<'cx> { pub cx: &'cx ExtCtxt<'cx>, w: Ident, stmts: Vec<P<Stmt>>, tail: String, } impl<'cx> Renderer<'cx> { /// Creates a new `Renderer` using the given extension context. pub fn new(cx: &'cx ExtCtxt<'cx>) -> Renderer<'cx> { Renderer { cx: cx, w: Ident::new(token::intern("w")), stmts: Vec::new(), tail: String::new(), } } /// Creates a new `Renderer` under the same context as `self`. pub fn fork(&self) -> Renderer<'cx> { Renderer { cx: self.cx, w: self.w, stmts: Vec::new(), tail: String::new(), } } /// Flushes the tail buffer, emitting a single `.write_str()` call. fn flush(&mut self) { if !self.tail.is_empty() { let expr = { let w = self.w; let s = &*self.tail; quote_expr!(self.cx, $w.write_str($s)) }; let stmt = self.cx.stmt_expr(self.cx.expr_try(expr.span, expr)); self.stmts.push(stmt); self.tail.clear(); } } /// Reifies the `Renderer` into a block of markup. pub fn into_expr(mut self) -> P<Expr> { let Renderer { cx, w, stmts, .. } = { self.flush(); self }; quote_expr!(cx, ::maud::rt::make_markup(|$w: &mut ::std::fmt::Write| -> Result<(), ::std::fmt::Error> { use ::std::fmt::Write; $stmts Ok(()) })) } /// Reifies the `Renderer` into a raw list of statements. pub fn into_stmts(mut self) -> Vec<P<Stmt>> { let Renderer { stmts, .. } = { self.flush(); self }; stmts } /// Pushes a statement, flushing the tail buffer in the process. fn push(&mut self, stmt: P<Stmt>) { self.flush(); self.stmts.push(stmt); } /// Pushes a literal string to the tail buffer. fn push_str(&mut self, s: &str) { self.tail.push_str(s); } /// Appends a literal string, with the specified escaping method. pub fn string(&mut self, s: &str, escape: Escape) { let escaped; let s = match escape { Escape::PassThru => s, Escape::Escape => { escaped = maud::escape(s); &*escaped }, }; self.push_str(s); } /// Appends the result of an expression, with the specified escaping method. pub fn splice(&mut self, expr: P<Expr>, escape: Escape) { let w = self.w; let expr = match escape { Escape::PassThru => quote_expr!(self.cx, write!($w, "{}", $expr)), Escape::Escape => quote_expr!(self.cx, write!( ::maud::rt::Escaper { inner: $w }, "{}", $expr)), }; let stmt = self.cx.stmt_expr(self.cx.expr_try(expr.span, expr)); self.push(stmt); } pub fn element_open_start(&mut self, name: &str) { self.push_str("<"); self.push_str(name); } pub fn attribute_start(&mut self, name: &str) { self.push_str(" "); self.push_str(name); self.push_str("=\""); } pub fn attribute_empty(&mut self, name: &str) { self.push_str(" "); self.push_str(name); } pub fn attribute_end(&mut self) { self.push_str("\""); } pub fn element_open_end(&mut self) { self.push_str(">"); } pub fn element_close(&mut self, name: &str) { self.push_str("</"); self.push_str(name); self.push_str(">"); } /// Emits an `if` expression. /// /// The condition is a token tree (not an expression) so we don't /// need to special-case `if let`. pub fn emit_if(&mut self, if_cond: Vec<TokenTree>, if_body: Vec<P<Stmt>>, else_body: Option<Vec<P<Stmt>>>) { let stmt = match else_body { None => quote_stmt!(self.cx, if $if_cond { $if_body }), Some(else_body) => quote_stmt!(self.cx, if $if_cond { $if_body } else { $else_body }), }.unwrap(); self.push(stmt); } pub fn emit_for(&mut self, pattern: P<Pat>, iterable: P<Expr>, body: Vec<P<Stmt>>) { let stmt = quote_stmt!(self.cx, for $pattern in $iterable { $body }).unwrap(); self.push(stmt); } }<|fim▁end|>
pub enum Escape { PassThru, Escape, }
<|file_name|>services-wrapper.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3 """ services-wrapper A small tool which wraps around check-services.php and tries to guide the services process with a more modern approach with a Queue and workers. Based on the original version of poller-wrapper.py by Job Snijders Author: Neil Lathwood <[email protected]> Orsiris de Jong <[email protected]> Date: Oct 2019 Usage: This program accepts one command line argument: the number of threads that should run simultaneously. If no argument is given it will assume a default of 1 thread. Ubuntu Linux: apt-get install python-mysqldb FreeBSD: cd /usr/ports/*/py-MySQLdb && make install clean RHEL 7: yum install MySQL-python RHEL 8: dnf install mariadb-connector-c-devel gcc && python -m pip install mysqlclient Tested on: Python 3.6.8 / PHP 7.2.11 / CentOS 8 License: This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/. LICENSE.txt contains a copy of the full GPLv3 licensing conditions. """ import LibreNMS.library as LNMS try: import json import os import queue import subprocess import sys import threading import time from optparse import OptionParser except ImportError as exc: print("ERROR: missing one or more of the following python modules:") print("threading, queue, sys, subprocess, time, os, json") print("ERROR: %s" % exc) sys.exit(2) APP_NAME = "services_wrapper" LOG_FILE = "logs/" + APP_NAME + ".log" _DEBUG = False servicedisco = False real_duration = 0 service_devices = 0 """ Threading helper functions """ # (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC0 def memc_alive(): try: global memc key = str(uuid.uuid4()) memc.set("poller.ping." + key, key, 60) if memc.get("poller.ping." + key) == key: memc.delete("poller.ping." + key) return True else: return False except: return False def memc_touch(key, time): try: global memc val = memc.get(key) memc.set(key, val, time) except: pass def get_time_tag(step): ts = int(time.time()) return ts - ts % step # EOC0 """ A seperate queue and a single worker for printing information to the screen prevents the good old joke: Some people, when confronted with a problem, think, "I know, I'll use threads," and then they two they hav erpoblesms. """ def printworker(): nodeso = 0 while True: # (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC4 global IsNode global servicedisco if servicedisco: if not IsNode: memc_touch("service.master", 10) nodes = memc.get("service.nodes") if nodes is None and not memc_alive(): print( "WARNING: Lost Memcached. Taking over all devices. Nodes will quit shortly." ) servicedisco = False nodes = nodeso if nodes is not nodeso: print("INFO: %s Node(s) Total" % (nodes)) nodeso = nodes else: memc_touch("service.nodes", 10) try: worker_id, device_id, elapsed_time = print_queue.get(False) except: pass try: time.sleep(1) except: pass continue else: worker_id, device_id, elapsed_time = print_queue.get() # EOC4 global real_duration global per_device_duration global service_devices real_duration += elapsed_time per_device_duration[device_id] = elapsed_time service_devices += 1 if elapsed_time < 300: print( "INFO: worker %s finished device %s in %s seconds" % (worker_id, device_id, elapsed_time) ) else: print( "WARNING: worker %s finished device %s in %s seconds" % (worker_id, device_id, elapsed_time) ) print_queue.task_done() """ This class will fork off single instances of the check-services.php process, record how long it takes, and push the resulting reports to the printer queue """ def poll_worker(): while True: device_id = poll_queue.get() # (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC5 if not servicedisco or memc.get("service.device." + str(device_id)) is None: if servicedisco: result = memc.add( "service.device." + str(device_id), config["distributed_poller_name"], 300, ) if not result: print( "This device (%s) appears to be being service checked by another service node" % (device_id) ) poll_queue.task_done() continue if not memc_alive() and IsNode: print( "Lost Memcached, Not service checking Device %s as Node. Master will check it." % device_id ) poll_queue.task_done() continue # EOC5 try: start_time = time.time() output = ( "-d >> %s/services_device_%s.log" % (log_dir, device_id) if debug else ">> /dev/null" ) # TODO replace with command_runner command = "/usr/bin/env php %s -h %s %s 2>&1" % ( service_path, device_id, output, ) subprocess.check_call(command, shell=True) elapsed_time = int(time.time() - start_time) print_queue.put( [threading.current_thread().name, device_id, elapsed_time] ) except (KeyboardInterrupt, SystemExit): raise except: pass poll_queue.task_done() if __name__ == "__main__": logger = LNMS.logger_get_logger(LOG_FILE, debug=_DEBUG) install_dir = os.path.dirname(os.path.realpath(__file__)) LNMS.check_for_file(install_dir + "/.env") config = json.loads(LNMS.get_config_data(install_dir)) service_path = config["install_dir"] + "/check-services.php" log_dir = config["log_dir"] # (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC1 if "distributed_poller_group" in config: service_group = str(config["distributed_poller_group"]) else: service_group = False if ( "distributed_poller" in config and "distributed_poller_memcached_host" in config and "distributed_poller_memcached_port" in config and config["distributed_poller"] ): try: import memcache import uuid memc = memcache.Client( [ config["distributed_poller_memcached_host"] + ":" + str(config["distributed_poller_memcached_port"]) ] ) if str(memc.get("service.master")) == config["distributed_poller_name"]: print("This system is already joined as the service master.") sys.exit(2) if memc_alive(): if memc.get("service.master") is None: print("Registered as Master") memc.set("service.master", config["distributed_poller_name"], 10) memc.set("service.nodes", 0, 300) IsNode = False else: print( "Registered as Node joining Master %s" % memc.get("service.master") ) IsNode = True memc.incr("service.nodes") servicedisco = True else: print( "Could not connect to memcached, disabling distributed service checks." ) servicedisco = False IsNode = False except SystemExit: raise except ImportError: print("ERROR: missing memcache python module:") print("On deb systems: apt-get install python3-memcache") print("On other systems: pip3 install python-memcached") print("Disabling distributed discovery.") servicedisco = False else: servicedisco = False # EOC1 s_time = time.time() real_duration = 0 per_device_duration = {} service_devices = 0 """ Take the amount of threads we want to run in parallel from the commandline if None are given or the argument was garbage, fall back to default of 16 """ usage = "usage: %prog [options] <workers> (Default: 1 (Do not set too high)" description = "Spawn multiple check-services.php processes in parallel." parser = OptionParser(usage=usage, description=description) parser.add_option( "-d", "--debug", action="store_true", default=False, help="Enable debug output. WARNING: Leaving this enabled will consume a lot of disk space.", ) (options, args) = parser.parse_args() debug = options.debug try: amount_of_workers = int(args[0]) except (IndexError, ValueError): amount_of_workers = 1 devices_list = [] # (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC2 if service_group is not False: query = ( "SELECT DISTINCT(`services`.`device_id`) FROM `services` LEFT JOIN `devices` ON `services`.`device_id` = `devices`.`device_id` WHERE `devices`.`poller_group` IN(" + service_group + ") AND `devices`.`disabled` = 0" ) else: query = "SELECT DISTINCT(`services`.`device_id`) FROM `services` LEFT JOIN `devices` ON `services`.`device_id` = `devices`.`device_id` WHERE `devices`.`disabled` = 0" # EOC2 db = LNMS.db_open( config["db_socket"], config["db_host"], config["db_port"], config["db_user"], config["db_pass"], config["db_name"], ) cursor = db.cursor() cursor.execute(query) devices = cursor.fetchall() for row in devices: devices_list.append(int(row[0])) # (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC3 if servicedisco and not IsNode: query = "SELECT MAX(`device_id`), MIN(`device_id`) FROM `services`" cursor.execute(query) devices = cursor.fetchall() maxlocks = devices[0][0] or 0 minlocks = devices[0][1] or 0 # EOC3 db.close() poll_queue = queue.Queue() print_queue = queue.Queue() print( "INFO: starting the service check at %s with %s threads" % (time.strftime("%Y-%m-%d %H:%M:%S"), amount_of_workers) ) for device_id in devices_list: poll_queue.put(device_id) for i in range(amount_of_workers): t = threading.Thread(target=poll_worker) t.setDaemon(True) t.start()<|fim▁hole|> p = threading.Thread(target=printworker) p.setDaemon(True) p.start() try: poll_queue.join() print_queue.join() except (KeyboardInterrupt, SystemExit): raise total_time = int(time.time() - s_time) print( "INFO: services-wrapper checked %s devices in %s seconds with %s workers" % (service_devices, total_time, amount_of_workers) ) # (c) 2015, GPLv3, Daniel Preussker <[email protected]> <<<EOC6 if servicedisco or memc_alive(): master = memc.get("service.master") if master == config["distributed_poller_name"] and not IsNode: print("Wait for all service-nodes to finish") nodes = memc.get("service.nodes") while nodes is not None and nodes > 0: try: time.sleep(1) nodes = memc.get("service.nodes") except: pass print("Clearing Locks") x = minlocks while x <= maxlocks: memc.delete("service.device." + str(x)) x = x + 1 print("%s Locks Cleared" % x) print("Clearing Nodes") memc.delete("service.master") memc.delete("service.nodes") else: memc.decr("service.nodes") print("Finished %s." % time.time()) # EOC6 show_stopper = False if total_time > 300: print( "WARNING: the process took more than 5 minutes to finish, you need faster hardware or more threads" ) print( "INFO: in sequential style service checks the elapsed time would have been: %s seconds" % real_duration ) for device in per_device_duration: if per_device_duration[device] > 300: print( "WARNING: device %s is taking too long: %s seconds" % (device, per_device_duration[device]) ) show_stopper = True if show_stopper: print( "ERROR: Some devices are taking more than 300 seconds, the script cannot recommend you what to do." ) else: recommend = int(total_time / 300.0 * amount_of_workers + 1) print( "WARNING: Consider setting a minimum of %d threads. (This does not constitute professional advice!)" % recommend ) sys.exit(2)<|fim▁end|>
<|file_name|>line.js<|end_file_name|><|fim▁begin|>import {Curve} from '../curve' export class Line extends Curve { constructor(p0, v) { super(); this.p0 = p0; this.v = v; this._pointsCache = new Map(); } intersectSurface(surface) { if (surface.isPlane) { const s0 = surface.normal.multiply(surface.w); return surface.normal.dot(s0.minus(this.p0)) / surface.normal.dot(this.v); // 4.7.4 } else { return super.intersectSurface(surface); } } intersectCurve(curve, surface) { if (curve.isLine && surface.isPlane) { const otherNormal = surface.normal.cross(curve.v)._normalize(); return otherNormal.dot(curve.p0.minus(this.p0)) / otherNormal.dot(this.v); // (4.8.3) } return super.intersectCurve(curve, surface); } parametricEquation(t) { return this.p0.plus(this.v.multiply(t)); } t(point) { return point.minus(this.p0).dot(this.v); } pointOfSurfaceIntersection(surface) { let point = this._pointsCache.get(surface); if (!point) { const t = this.intersectSurface(surface); point = this.parametricEquation(t); this._pointsCache.set(surface, point); } return point; } translate(vector) { return new Line(this.p0.plus(vector), this.v); } approximate(resolution, from, to, path) { } offset() {}; } Line.prototype.isLine = true; Line.fromTwoPlanesIntersection = function(plane1, plane2) {<|fim▁hole|> const n1 = plane1.normal; const n2 = plane2.normal; const v = n1.cross(n2)._normalize(); const pf1 = plane1.toParametricForm(); const pf2 = plane2.toParametricForm(); const r0diff = pf1.r0.minus(pf2.r0); const ww = r0diff.minus(n2.multiply(r0diff.dot(n2))); const p0 = pf2.r0.plus( ww.multiply( n1.dot(r0diff) / n1.dot(ww))); return new Line(p0, v); }; Line.fromSegment = function(a, b) { return new Line(a, b.minus(a)._normalize()); };<|fim▁end|>
<|file_name|>main.ts<|end_file_name|><|fim▁begin|>import { platformBrowserDynamic } from '@angular/platform-browser-dynamic'; import { enableProdMode } from '@angular/core'; import { AppModule } from './app/app.module'; <|fim▁hole|>platformBrowserDynamic().bootstrapModule(AppModule);<|fim▁end|>
enableProdMode();
<|file_name|>HTMLBodyElement.js<|end_file_name|><|fim▁begin|>"use strict"; const conversions = require("webidl-conversions"); const utils = require("./utils.js"); const HTMLElement = require("./HTMLElement.js"); const impl = utils.implSymbol; const mixin = utils.mixin; const WindowEventHandlers = require("./WindowEventHandlers.js"); function HTMLBodyElement() { throw new TypeError("Illegal constructor"); } HTMLBodyElement.prototype = Object.create(HTMLElement.interface.prototype); HTMLBodyElement.prototype.constructor = HTMLBodyElement; mixin(HTMLBodyElement.prototype, WindowEventHandlers.interface.prototype); WindowEventHandlers.mixedInto.push(HTMLBodyElement); HTMLBodyElement.prototype.toString = function () { if (this === HTMLBodyElement.prototype) { return "[object HTMLBodyElementPrototype]"; } return HTMLElement.interface.prototype.toString.call(this); }; Object.defineProperty(HTMLBodyElement.prototype, "text", { get() { const value = this.getAttribute("text"); return value === null ? "" : value; },<|fim▁hole|> this.setAttribute("text", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLBodyElement.prototype, "link", { get() { const value = this.getAttribute("link"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { treatNullAsEmptyString: true }); this.setAttribute("link", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLBodyElement.prototype, "vLink", { get() { const value = this.getAttribute("vLink"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { treatNullAsEmptyString: true }); this.setAttribute("vLink", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLBodyElement.prototype, "aLink", { get() { const value = this.getAttribute("aLink"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { treatNullAsEmptyString: true }); this.setAttribute("aLink", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLBodyElement.prototype, "bgColor", { get() { const value = this.getAttribute("bgColor"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { treatNullAsEmptyString: true }); this.setAttribute("bgColor", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLBodyElement.prototype, "background", { get() { const value = this.getAttribute("background"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V); this.setAttribute("background", V); }, enumerable: true, configurable: true }); module.exports = { mixedInto: [], is(obj) { if (obj) { if (obj[impl] instanceof Impl.implementation) { return true; } for (let i = 0; i < module.exports.mixedInto.length; ++i) { if (obj instanceof module.exports.mixedInto[i]) { return true; } } } return false; }, isImpl(obj) { if (obj) { if (obj instanceof Impl.implementation) { return true; } const wrapper = utils.wrapperForImpl(obj); for (let i = 0; i < module.exports.mixedInto.length; ++i) { if (wrapper instanceof module.exports.mixedInto[i]) { return true; } } } return false; }, create(constructorArgs, privateData) { let obj = Object.create(HTMLBodyElement.prototype); this.setup(obj, constructorArgs, privateData); return obj; }, createImpl(constructorArgs, privateData) { let obj = Object.create(HTMLBodyElement.prototype); this.setup(obj, constructorArgs, privateData); return utils.implForWrapper(obj); }, _internalSetup(obj) { HTMLElement._internalSetup(obj); }, setup(obj, constructorArgs, privateData) { if (!privateData) privateData = {}; privateData.wrapper = obj; this._internalSetup(obj); obj[impl] = new Impl.implementation(constructorArgs, privateData); obj[impl][utils.wrapperSymbol] = obj; }, interface: HTMLBodyElement, expose: { Window: { HTMLBodyElement: HTMLBodyElement } } }; const Impl = require("../nodes/HTMLBodyElement-impl.js");<|fim▁end|>
set(V) { V = conversions["DOMString"](V, { treatNullAsEmptyString: true });
<|file_name|>boxplus.transition.js<|end_file_name|><|fim▁begin|>/**@license boxplus image transition engine * @author Levente Hunyadi * @version 1.4.2 * @remarks Copyright (C) 2009-2010 Levente Hunyadi * @remarks Licensed under GNU/GPLv3, see http://www.gnu.org/licenses/gpl-3.0.html * @see http://hunyadi.info.hu/projects/boxplus **/ /* * boxplus: a lightweight pop-up window engine shipped with sigplus * Copyright 2009-2010 Levente Hunyadi * * boxplus is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * boxplus is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with boxplus. If not, see <http://www.gnu.org/licenses/>. */ if (typeof(__jQuery__) == 'undefined') { var __jQuery__ = jQuery; } (function ($) { var CLASS_DISABLED = 'boxplus-disabled'; var max = Math.max; var floor = Math.floor; var ceil = Math.ceil; /** * Maximum computed width of matched elements including margin, border and padding. */ $.fn.maxWidth = function () { var width = 0; this.each( function(index, el) { width = max(width, $(el).safeWidth()); }); return width; } /** * Maximum computed height of matched elements including margin, border and padding. */ $.fn.maxHeight = function () { var height = 0; this.each( function(index, el) { height = max(height, $(el).safeHeight()); }); return height; } /** * "Safe" dimension of an element. * Some browsers give invalid values with .width() but others give the meaningless, * value "auto" with .css('width'), this function bridges the differences. */ function _safeDimension(obj, dim) { var cssvalue = parseInt(obj.css(dim)); return isNaN(cssvalue) ? obj[dim]() : cssvalue; } $.fn.safeWidth = function () { return _safeDimension(this, 'width'); } $.fn.safeHeight = function () { return _safeDimension(this, 'height'); } /** * Creates a new image slider from a collection of images. * The method should be called on a ul or ol element that wraps a set of li elements. */ $.fn.boxplusTransition = function (settings) { // default configuration properties var defaults = { navigation: 'horizontal', // orientation of navigation buttons, or do not show navigation buttons at all ['horizontal'|'vertical'|false] loop: true, // whether the image sequence loops such that the first image follows the last [true|false] contextmenu: true, // whether the context menu appears when right-clicking an image [true|false] orientation: 'vertical', // alignment of bars used in transition ['vertical'|'horizontal'] slices: 15, // number of bars to use in transition animation effect: 'fade', // image transition effect ['fade'|'bars'|'bars+fade'|'shutter'|'shutter+fade'] easing: 'swing', duration: 500, // duration for transition animation [ms] delay: 4000 // delay between successive animation steps [ms] }; settings = $.extend(defaults, settings); var lists = this.filter('ul, ol'); // filter elements that are not lists // iterate over elements if invoked on an element collection lists.each(function () { // short-hand access to settings var isNavigationVertical = settings.navigation == 'vertical'; var isOrientationHorizontal = settings.orientation == 'horizontal'; var sliceCount = settings.slices; var duration = settings.duration; var delay = settings.delay; // status information var sliderIndexPosition = 0; // index of item currently shown var animation = false; // true if an animation is in progress // DOM elements var list = $(this).wrap('<div />').before('<div />').addClass('boxplus-hidden'); var wrapper = list.parent().addClass('boxplus-wrapper'); var items = $('li', list).css({ position: 'absolute', left: 0, top: 0 }).find('img:first'); // forces following an anchor (in a cancellable way) even when click event is triggered with jQuery items.parent('a').click(function (event) { if (!event.isDefaultPrevented()) { location.href = this.href; } }); var container = list.prev().addClass('boxplus-transition').addClass(CLASS_DISABLED).click(function () { items.eq(sliderIndexPosition).parent('a').click(); // when an image is clicked, the anchor wrapping the original image (if any) should be followed }); // get maximum width and height of image slider items var itemCount = items.length; var itemWidth = items.maxWidth(); var itemHeight = items.maxHeight(); // set width and height of image container wrapper.add(container).css({ width: itemWidth, height: itemHeight }); switch (settings.navigation) { case 'horizontal': case 'vertical': var cls = 'boxplus-' + settings.navigation; container.addClass(cls); // setup overlay navigation controls function _addButton(cls) { return '<div class="boxplus-' + cls + '" />'; } container.prepend( $(_addButton('prev') + _addButton('next')).addClass(cls).addClass( (isNavigationVertical ? itemWidth : itemHeight) < 120 ? 'boxplus-small' : 'boxplus-large' ) ); // bind events for navigation controls $('.boxplus-prev', container).click(scrollPrevious); $('.boxplus-next', container).click(scrollNext); } if (!settings.contextmenu) { $(document).bind('contextmenu', function (event) { // subscribe to right-click event return !container.children().add(container).filter(event.target).size(); // prevent right-click on image }); } // add bars to container for animation var sliceDim = (isOrientationHorizontal ? itemHeight : itemWidth) / sliceCount; for (var sliceIndex = 0; sliceIndex < sliceCount; sliceIndex++) { var sliceOffset = floor(sliceIndex*sliceDim); $('<div class="boxplus-transition-bars" />').css({ left: isOrientationHorizontal ? 0 : sliceOffset, top: isOrientationHorizontal ? sliceOffset : 0, height: isOrientationHorizontal ? sliceDim : itemHeight, width: isOrientationHorizontal ? itemWidth : sliceDim, visibility: 'hidden' }).appendTo(container); } // update visibility of navigation controls _updatePaging(); container.removeClass(CLASS_DISABLED); scrollFirst(); // slider animation if (delay > 0) { delay = max(delay, duration + 500); var intervalID = window.setInterval(scrollNext, delay); // stop animation when mouse moves over an image container.mouseover(function () { window.clearInterval(intervalID); }).mouseout(function () { intervalID = window.setInterval(scrollNext, delay); }); } // // Callback functions // function scrollFirst() { return scroll('first'); } function scrollPrevious() { return scroll('prev'); } function scrollNext() { return scroll('next'); } function scrollLast() { return scroll('last'); } /** * Sets the image shown as the background image of elements. * @param elem The element whose background-image property to set. */ function _setImage(e, x, y) { var item = items.eq(sliderIndexPosition); // item to be shown e.css({ backgroundImage: 'url("' + item.attr('src') + '")', backgroundPosition: ((itemWidth - item.safeWidth()) / 2 - x) + 'px ' + ((itemHeight - item.safeHeight()) / 2 - y) + 'px' }); } /** * Preloads an image for later display. * @param item The element to use to acquire the URL of the image. */ function _preloadImage(item) { var longdesc = item.attr('longdesc'); if (longdesc) { // higher-resolution image is available item.attr('src', longdesc).attr('longdesc', ''); } } function _preloadImages() { _preloadImage(items.eq(sliderIndexPosition)); _preloadImage(items.eq((sliderIndexPosition - 1) % itemCount)); _preloadImage(items.eq((sliderIndexPosition + 1) % itemCount)); } /** * Execute image transition. */ function scroll(dir) { var bars = $('.boxplus-transition-bars', container); if (animation) { // clear ongoing transitions _setImage(container, 0, 0); bars.clearQueue().stop().css('visibility', 'hidden'); } animation = true; // indicate an ongoing transition switch (dir) { case 'first': sliderIndexPosition = 0; break; case 'prev': sliderIndexPosition = (sliderIndexPosition - 1) % itemCount; break; case 'next': sliderIndexPosition = (sliderIndexPosition + 1) % itemCount; break; case 'last': sliderIndexPosition = itemCount - 1; break; default: return; }; _updatePaging(); _preloadImages(); bars.css({ // reset bars background image, height, width, opacity, etc. opacity: 1 }).each(function (index) { // set the image shown as the background image of bars with computing offset position var bar = $(this); var dim = ceil(index*sliceDim+sliceDim) - floor(index*sliceDim); bar.css({ height: isOrientationHorizontal ? dim : itemHeight, width: isOrientationHorizontal ? itemWidth : dim }); var position = bar.position(); _setImage(bar, position.left, position.top); }); function _transitionFade() { bars.css('opacity', 0).show(); return {opacity: 1}; } function _transitionBars() { bars.css(isOrientationHorizontal ? 'width' : 'height', 0); if (isOrientationHorizontal) { return {width: itemWidth}; } else { return {height: itemHeight}; } } function _transitionShutter() { bars.css(isOrientationHorizontal ? 'height' : 'width', 0); if (isOrientationHorizontal) { return {height: ceil(sliceDim)};<|fim▁hole|> } else { return {width: ceil(sliceDim)}; } } var target; switch (settings.effect) { case 'fade': target = _transitionFade(); break; case 'bars': target = _transitionBars(); break; case 'bars+fade': target = $.extend(_transitionBars(), _transitionFade()); break; case 'shutter': target = _transitionShutter(); break; case 'shutter+fade': target = $.extend(_transitionShutter(), _transitionFade()); break; } bars.css('visibility', 'visible'); // function to arrange bars in a specific order var ordfun = function (index) { return index; }; switch (dir) { case 'first': case 'prev': ordfun = function (index) { return sliceCount-1-index; }; break; } // register animation events for bars bars.each(function (index) { var k = ordfun(index); var options = { duration: 500, easing: settings.easing }; if (k == sliceCount-1) { $.extend(options, { complete: function () { animation = false; _setImage(container, 0, 0); bars.css('visibility', 'hidden'); } }); } // fire animation after an initial delay $(this).delay(k * duration / sliceCount).animate(target, options); }); return false; // prevent event propagation } /** * Update which navigation links are enabled. */ function _updatePaging() { if (!settings.loop) { $('.boxplus-prev', container).toggleClass(CLASS_DISABLED, sliderIndexPosition <= 0); $('.boxplus-next', container).toggleClass(CLASS_DISABLED, sliderIndexPosition >= itemCount-1); } } }); return this; // support chaining } })(__jQuery__);<|fim▁end|>
<|file_name|>mi_parse.test.ts<|end_file_name|><|fim▁begin|>import * as assert from 'assert'; import { parseMI, MINode } from '../../backend/mi_parse'; suite("MI Parse", () => { test("Simple out of band record", () => { const parsed = parseMI(`4=thread-exited,id="3",group-id="i1"`); assert.ok(parsed); assert.equal(parsed.token, 4); assert.equal(parsed.outOfBandRecord.length, 1); assert.equal(parsed.outOfBandRecord[0].isStream, false); assert.equal(parsed.outOfBandRecord[0].asyncClass, "thread-exited"); assert.equal(parsed.outOfBandRecord[0].output.length, 2); assert.deepEqual(parsed.outOfBandRecord[0].output[0], ["id", "3"]); assert.deepEqual(parsed.outOfBandRecord[0].output[1], ["group-id", "i1"]); assert.equal(parsed.resultRecords, undefined); }); test("Console stream output with new line", () => { const parsed = parseMI(`~"[Thread 0x7fffe993a700 (LWP 11002) exited]\\n"`); assert.ok(parsed); assert.equal(parsed.token, undefined); assert.equal(parsed.outOfBandRecord.length, 1); assert.equal(parsed.outOfBandRecord[0].isStream, true); assert.equal(parsed.outOfBandRecord[0].content, "[Thread 0x7fffe993a700 (LWP 11002) exited]\n"); assert.equal(parsed.resultRecords, undefined); }); test("Unicode", () => { let parsed = parseMI(`~"[Depuraci\\303\\263n de hilo usando libthread_db enabled]\\n"`); assert.ok(parsed); assert.equal(parsed.token, undefined); assert.equal(parsed.outOfBandRecord.length, 1); assert.equal(parsed.outOfBandRecord[0].isStream, true); assert.equal(parsed.outOfBandRecord[0].content, "[Depuración de hilo usando libthread_db enabled]\n"); assert.equal(parsed.resultRecords, undefined); parsed = parseMI(`~"4\\t std::cout << \\"\\345\\245\\275\\345\\245\\275\\345\\255\\246\\344\\271\\240\\357\\274\\214\\345\\244\\251\\345\\244\\251\\345\\220\\221\\344\\270\\212\\" << std::endl;\\n"`); assert.ok(parsed); assert.equal(parsed.token, undefined); assert.equal(parsed.outOfBandRecord.length, 1); assert.equal(parsed.outOfBandRecord[0].isStream, true); assert.equal(parsed.outOfBandRecord[0].content, `4\t std::cout << "好好学习,天天向上" << std::endl;\n`); assert.equal(parsed.resultRecords, undefined);<|fim▁hole|> const parsed = parseMI(``); assert.ok(parsed); assert.equal(parsed.token, undefined); assert.equal(parsed.outOfBandRecord.length, 0); assert.equal(parsed.resultRecords, undefined); }); test("'(gdb)' line", () => { const parsed = parseMI(`(gdb)`); assert.ok(parsed); assert.equal(parsed.token, undefined); assert.equal(parsed.outOfBandRecord.length, 0); assert.equal(parsed.resultRecords, undefined); }); test("Simple result record", () => { const parsed = parseMI(`1^running`); assert.ok(parsed); assert.equal(parsed.token, 1); assert.equal(parsed.outOfBandRecord.length, 0); assert.notEqual(parsed.resultRecords, undefined); assert.equal(parsed.resultRecords.resultClass, "running"); assert.equal(parsed.resultRecords.results.length, 0); }); test("Advanced out of band record (Breakpoint hit)", () => { const parsed = parseMI(`*stopped,reason="breakpoint-hit",disp="keep",bkptno="1",frame={addr="0x00000000004e807f",func="D main",args=[{name="args",value="..."}],file="source/app.d",fullname="/path/to/source/app.d",line="157"},thread-id="1",stopped-threads="all",core="0"`); assert.ok(parsed); assert.equal(parsed.token, undefined); assert.equal(parsed.outOfBandRecord.length, 1); assert.equal(parsed.outOfBandRecord[0].isStream, false); assert.equal(parsed.outOfBandRecord[0].asyncClass, "stopped"); assert.equal(parsed.outOfBandRecord[0].output.length, 7); assert.deepEqual(parsed.outOfBandRecord[0].output[0], ["reason", "breakpoint-hit"]); assert.deepEqual(parsed.outOfBandRecord[0].output[1], ["disp", "keep"]); assert.deepEqual(parsed.outOfBandRecord[0].output[2], ["bkptno", "1"]); const frame = [ ["addr", "0x00000000004e807f"], ["func", "D main"], ["args", [[["name", "args"], ["value", "..."]]]], ["file", "source/app.d"], ["fullname", "/path/to/source/app.d"], ["line", "157"] ]; assert.deepEqual(parsed.outOfBandRecord[0].output[3], ["frame", frame]); assert.deepEqual(parsed.outOfBandRecord[0].output[4], ["thread-id", "1"]); assert.deepEqual(parsed.outOfBandRecord[0].output[5], ["stopped-threads", "all"]); assert.deepEqual(parsed.outOfBandRecord[0].output[6], ["core", "0"]); assert.equal(parsed.resultRecords, undefined); }); test("Advanced result record", () => { const parsed = parseMI(`2^done,asm_insns=[src_and_asm_line={line="134",file="source/app.d",fullname="/path/to/source/app.d",line_asm_insn=[{address="0x00000000004e7da4",func-name="_Dmain",offset="0",inst="push %rbp"},{address="0x00000000004e7da5",func-name="_Dmain",offset="1",inst="mov %rsp,%rbp"}]}]`); assert.ok(parsed); assert.equal(parsed.token, 2); assert.equal(parsed.outOfBandRecord.length, 0); assert.notEqual(parsed.resultRecords, undefined); assert.equal(parsed.resultRecords.resultClass, "done"); assert.equal(parsed.resultRecords.results.length, 1); const asmInsns = [ "asm_insns", [ [ "src_and_asm_line", [ ["line", "134"], ["file", "source/app.d"], ["fullname", "/path/to/source/app.d"], [ "line_asm_insn", [ [ ["address", "0x00000000004e7da4"], ["func-name", "_Dmain"], ["offset", "0"], ["inst", "push %rbp"] ], [ ["address", "0x00000000004e7da5"], ["func-name", "_Dmain"], ["offset", "1"], ["inst", "mov %rsp,%rbp"] ] ] ] ] ] ] ]; assert.deepEqual(parsed.resultRecords.results[0], asmInsns); assert.equal(parsed.result("asm_insns.src_and_asm_line.line_asm_insn[1].address"), "0x00000000004e7da5"); }); test("valueof children", () => { const obj = [ [ "frame", [ ["level", "0"], ["addr", "0x0000000000435f70"], ["func", "D main"], ["file", "source/app.d"], ["fullname", "/path/to/source/app.d"], ["line", "5"] ] ], [ "frame", [ ["level", "1"], ["addr", "0x00000000004372d3"], ["func", "rt.dmain2._d_run_main()"] ] ], [ "frame", [ ["level", "2"], ["addr", "0x0000000000437229"], ["func", "rt.dmain2._d_run_main()"] ] ] ]; assert.equal(MINode.valueOf(obj[0], "@frame.level"), "0"); assert.equal(MINode.valueOf(obj[0], "@frame.addr"), "0x0000000000435f70"); assert.equal(MINode.valueOf(obj[0], "@frame.func"), "D main"); assert.equal(MINode.valueOf(obj[0], "@frame.file"), "source/app.d"); assert.equal(MINode.valueOf(obj[0], "@frame.fullname"), "/path/to/source/app.d"); assert.equal(MINode.valueOf(obj[0], "@frame.line"), "5"); assert.equal(MINode.valueOf(obj[1], "@frame.level"), "1"); assert.equal(MINode.valueOf(obj[1], "@frame.addr"), "0x00000000004372d3"); assert.equal(MINode.valueOf(obj[1], "@frame.func"), "rt.dmain2._d_run_main()"); assert.equal(MINode.valueOf(obj[1], "@frame.file"), undefined); assert.equal(MINode.valueOf(obj[1], "@frame.fullname"), undefined); assert.equal(MINode.valueOf(obj[1], "@frame.line"), undefined); }); test("empty string values", () => { const parsed = parseMI(`15^done,register-names=["r0","pc","","xpsr","","control"]`); const result = parsed.result('register-names'); assert.deepEqual(result, ["r0", "pc", "", "xpsr", "", "control"]); }); test("empty string value first and last", () => { const parsed = parseMI(`15^done,register-names=["","r0","pc","","xpsr","","control",""]`); const result = parsed.result('register-names'); assert.deepEqual(result, ["","r0","pc","","xpsr","","control", ""]); }); test("empty array values", () => { const parsed = parseMI(`15^done,foo={x=[],y="y"}`); assert.deepEqual(parsed.result('foo.x'), []); assert.equal(parsed.result('foo.y'), "y"); }); test("empty object values", () => { // GDB may send {} as empty array const parsed = parseMI(`15^done,foo={x={},y="y"}`); assert.deepEqual(parsed.result('foo.x'), []); assert.equal(parsed.result('foo.y'), "y"); }); });<|fim▁end|>
}); test("Empty line", () => {
<|file_name|>vacuum2Runner.py<|end_file_name|><|fim▁begin|>import agents as ag import envgui as gui # change this line ONLY to refer to your project import submissions.Porter.vacuum2 as v2 # ______________________________________________________________________________ # Vacuum environmenty class Dirt(ag.Thing): pass class VacuumEnvironment(ag.XYEnvironment): """The environment of [Ex. 2.12]. Agent perceives dirty or clean, and bump (into obstacle) or not; 2D discrete world of unknown size; performance measure is 100 for each dirt cleaned, and -1 for each turn taken.""" def __init__(self, width=4, height=3): super(VacuumEnvironment, self).__init__(width, height) self.add_walls() def thing_classes(self): return [ag.Wall, Dirt, # ReflexVacuumAgent, RandomVacuumAgent, # TableDrivenVacuumAgent, ModelBasedVacuumAgent ] def percept(self, agent): """The percept is a tuple of ('Dirty' or 'Clean', 'Bump' or 'None'). Unlike the TrivialVacuumEnvironment, location is NOT perceived.""" status = ('Dirty' if self.some_things_at( agent.location, Dirt) else 'Clean') bump = ('Bump' if agent.bump else'None') return (bump, status) def execute_action(self, agent, action): if action == 'Suck': dirt_list = self.list_things_at(agent.location, Dirt) if dirt_list != []: dirt = dirt_list[0] agent.performance += 100 self.delete_thing(dirt) else: super(VacuumEnvironment, self).execute_action(agent, action) if action != 'NoOp':<|fim▁hole|># # Launch a Text-Based Environment # print('Two Cells, Agent on Left:') # v = VacuumEnvironment(4, 3) # v.add_thing(Dirt(), (1, 1)) # v.add_thing(Dirt(), (2, 1)) # a = v2.HW2Agent() # a = ag.TraceAgent(a) # v.add_thing(a, (1, 1)) # t = gui.EnvTUI(v) # t.mapImageNames({ # ag.Wall: '#', # Dirt: '@', # ag.Agent: 'V', # }) # t.step(0) # t.list_things(Dirt) # t.step(4) # if len(t.env.get_things(Dirt)) > 0: # t.list_things(Dirt) # else: # print('All clean!') # # # Check to continue # if input('Do you want to continue [y/N]? ') != 'y': # exit(0) # else: # print('----------------------------------------') # # # Repeat, but put Agent on the Right # print('Two Cells, Agent on Right:') # v = VacuumEnvironment(4, 3) # v.add_thing(Dirt(), (1, 1)) # v.add_thing(Dirt(), (2, 1)) # a = v2.HW2Agent() # a = ag.TraceAgent(a) # v.add_thing(a, (2, 1)) # t = gui.EnvTUI(v) # t.mapImageNames({ # ag.Wall: '#', # Dirt: '@', # ag.Agent: 'V', # }) # t.step(0) # t.list_things(Dirt) # t.step(4) # if len(t.env.get_things(Dirt)) > 0: # t.list_things(Dirt) # else: # print('All clean!') # # # Check to continue # if input('Do you want to continue [y/N]? ') != 'y': # exit(0) # else: # print('----------------------------------------') # # # Repeat, but put Agent on the Right # print('Two Cells, Agent on Top:') # v = VacuumEnvironment(3, 4) # v.add_thing(Dirt(), (1, 1)) # v.add_thing(Dirt(), (1, 2)) # a = v2.HW2Agent() # a = ag.TraceAgent(a) # v.add_thing(a, (1, 1)) # t = gui.EnvTUI(v) # t.mapImageNames({ # ag.Wall: '#', # Dirt: '@', # ag.Agent: 'V', # }) # t.step(0) # t.list_things(Dirt) # t.step(4) # if len(t.env.get_things(Dirt)) > 0: # t.list_things(Dirt) # else: # print('All clean!') # # # Check to continue # if input('Do you want to continue [y/N]? ') != 'y': # exit(0) # else: # print('----------------------------------------') # # # Repeat, but put Agent on the Right # print('Two Cells, Agent on Bottom:') # v = VacuumEnvironment(3, 4) # v.add_thing(Dirt(), (1, 1)) # v.add_thing(Dirt(), (1, 2)) # a = v2.HW2Agent() # a = ag.TraceAgent(a) # v.add_thing(a, (1, 2)) # t = gui.EnvTUI(v) # t.mapImageNames({ # ag.Wall: '#', # Dirt: '@', # ag.Agent: 'V', # }) # t.step(0) # t.list_things(Dirt) # t.step(4) # if len(t.env.get_things(Dirt)) > 0: # t.list_things(Dirt) # else: # print('All clean!') # # # Check to continue # if input('Do you want to continue [y/N]? ') != 'y': # exit(0) # else: # print('----------------------------------------') def testVacuum(label, w=4, h=3, dloc=[(1,1),(2,1)], vloc=(1,1), limit=6): print(label) v = VacuumEnvironment(w, h) for loc in dloc: v.add_thing(Dirt(), loc) a = v2.HW2Agent() a = ag.TraceAgent(a) v.add_thing(a, vloc) t = gui.EnvTUI(v) t.mapImageNames({ ag.Wall: '#', Dirt: '@', ag.Agent: 'V', }) t.step(0) t.list_things(Dirt) t.step(limit) if len(t.env.get_things(Dirt)) > 0: t.list_things(Dirt) else: print('All clean!') # Check to continue if input('Do you want to continue [Y/n]? ') == 'n': exit(0) else: print('----------------------------------------') testVacuum('Two Cells, Agent on Left:') testVacuum('Two Cells, Agent on Right:', vloc=(2,1)) testVacuum('Two Cells, Agent on Top:', w=3, h=4, dloc=[(1,1), (1,2)], vloc=(1,1) ) testVacuum('Two Cells, Agent on Bottom:', w=3, h=4, dloc=[(1,1), (1,2)], vloc=(1,2) ) testVacuum('Five Cells, Agent on Left:', w=7, h=3, dloc=[(2,1), (4,1)], vloc=(1,1), limit=12) testVacuum('Five Cells, Agent near Right:', w=7, h=3, dloc=[(2,1), (3,1)], vloc=(4,1), limit=12) testVacuum('Five Cells, Agent on Top:', w=3, h=7, dloc=[(1,2), (1,4)], vloc=(1,1), limit=12 ) testVacuum('Five Cells, Agent Near Bottom:', w=3, h=7, dloc=[(1,2), (1,3)], vloc=(1,4), limit=12 ) testVacuum('5x4 Grid, Agent in Top Left:', w=7, h=6, dloc=[(1,4), (2,2), (3, 3), (4,1), (5,2)], vloc=(1,1), limit=46 ) testVacuum('5x4 Grid, Agent near Bottom Right:', w=7, h=6, dloc=[(1,3), (2,2), (3, 4), (4,1), (5,2)], vloc=(4, 3), limit=46 ) v = VacuumEnvironment(6, 3) a = v2.HW2Agent() a = ag.TraceAgent(a) loc = v.random_location_inbounds() v.add_thing(a, location=loc) v.scatter_things(Dirt) g = gui.EnvGUI(v, 'Vaccuum') c = g.getCanvas() c.mapImageNames({ ag.Wall: 'images/wall.jpg', # Floor: 'images/floor.png', Dirt: 'images/dirt.png', ag.Agent: 'images/vacuum.png', }) c.update() g.mainloop()<|fim▁end|>
agent.performance -= 1
<|file_name|>TimeSyncProxy.java<|end_file_name|><|fim▁begin|>package me.tatarka.timesync.lib; import android.content.Context; import java.util.Arrays; /** * A class for interacting with a {@link TimeSync}. You can get and set it's configuration, and * force it to sync immediately. Ta get an instance of the class for a given {@link TimeSync}, use * {@link TimeSync#get(android.content.Context, Class)}. */ public final class TimeSyncProxy { private Context context; private String name; private TimeSync listener; TimeSyncProxy(Context context, String name) { this.context = context; this.name = name; listener = TimeSyncParser.parseListeners(context).get(name); } /** * Syncs immediately. This is useful for a response to a user action. Use this sparingly, as * frequent syncs defeat the purpose of using this library. */ public void sync() { TimeSyncService.sync(context, name); } /** * Syncs sometime in the near future, randomizing per device. This is useful in response to a * server message, using GCM for example, so that the server is not overwhelmed with all devices * trying to sync at once. */ public void syncInexact() { TimeSyncService.syncInexact(context, name); } /** * Gets the current configuration for the {@link TimeSync}. * * @return the configuration * @see TimeSync.Config */ public TimeSync.Config config() { return listener.config(); } /** * Modifies the current configuration for the {@link TimeSync}. * * @param edits the edits * @see TimeSync#edit(TimeSync.Edit...)<|fim▁hole|> } /** * Modifies the current configuration for the {@link TimeSync}. * * @param edits the edits * @see TimeSync#edit(TimeSync.Edit...) */ public void edit(TimeSync.Edit... edits) { edit(Arrays.asList(edits)); } }<|fim▁end|>
*/ public void edit(Iterable<TimeSync.Edit> edits) { listener.edit(edits); TimeSyncService.update(context, name);
<|file_name|>dialog.ts<|end_file_name|><|fim▁begin|>// SPDX-FileCopyrightText: 2018-2020 The Manyverse Authors // // SPDX-License-Identifier: MPL-2.0 import {Stream} from 'xstream'; import {Image} from 'react-native-image-crop-picker'; import {DialogSource} from '../../drivers/dialogs'; import {t} from '../../drivers/localization'; import {Palette} from '../../global-styles/palette'; export type Actions = { openContentWarning$: Stream<any>; addPicture$: Stream<Image>; };<|fim▁hole|>export default function dialog(actions: Actions, dialogSource: DialogSource) { return { updateContentWarning$: actions.openContentWarning$ .map(() => dialogSource.prompt( t('compose.dialogs.content_warning.title'), t('compose.dialogs.content_warning.description'), { ...Palette.dialogColors, positiveText: t('call_to_action.done'), negativeText: t('call_to_action.cancel'), }, ), ) .flatten() .filter((res) => res.action === 'actionPositive') .map((res) => (res as any).text as string), addPictureWithCaption$: actions.addPicture$ .map((image) => dialogSource .prompt( t('compose.dialogs.image_caption.title'), t('compose.dialogs.image_caption.description'), { ...Palette.dialogColors, positiveColor: Palette.textDialogStrong, positiveText: t('call_to_action.done'), }, ) .map((res) => ({caption: (res as any).text, image})), ) .flatten(), }; }<|fim▁end|>
<|file_name|>mattex.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys, re output = open(sys.argv[1]) output = output.read() output = re.split('thisisalinebreak =',output) <|fim▁hole|> if line == "<?ml\n": matlab = True j = 0 for oline in output[i].split('\n'): if (j > 2) & (re.match('^(\s+[^\s]+|[^=]+)$',oline) != None): if oline.strip() != '': print oline j += 1 i += 1 if line == "?>\n": matlab = False continue if not matlab: print line,<|fim▁end|>
f = open(sys.argv[2]) i = 1 matlab = False for line in f:
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ website.api ~~~~~~~~~~~<|fim▁hole|>"""<|fim▁end|>
website api blueprint.
<|file_name|>BufferedWriter.java<|end_file_name|><|fim▁begin|>// The MIT License (MIT) // Copyright © 2015 AppsLandia. All rights reserved. // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. package com.appslandia.common.base; import java.io.IOException; import java.io.Writer; /** * @see java.io.BufferedWriter * * @author <a href="mailto:[email protected]">Loc Ha</a> * */ public class BufferedWriter extends Writer { private Writer out; private char cb[]; private int nChars, nextChar; private static int defaultCharBufferSize = 8192; public BufferedWriter(Writer out) { this(out, defaultCharBufferSize); } public BufferedWriter(Writer out, int sz) { super(out); if (sz <= 0) throw new IllegalArgumentException("sz"); this.out = out; cb = new char[sz]; nChars = sz; nextChar = 0; } private void ensureOpen() throws IOException { if (out == null) throw new IOException("Stream closed"); } void flushBuffer() throws IOException { ensureOpen(); if (nextChar == 0) return; out.write(cb, 0, nextChar); nextChar = 0; } public void write(int c) throws IOException { ensureOpen(); if (nextChar >= nChars) flushBuffer(); cb[nextChar++] = (char) c; } private int min(int a, int b) { if (a < b) return a; return b; } public void write(char cbuf[], int off, int len) throws IOException { ensureOpen(); if ((off < 0) || (off > cbuf.length) || (len < 0) || ((off + len) > cbuf.length) || ((off + len) < 0)) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return; } if (len >= nChars) { /* * If the request length exceeds the size of the output buffer, flush the buffer and then write the data directly. In this way buffered streams will cascade harmlessly. */ flushBuffer(); out.write(cbuf, off, len); return; } int b = off, t = off + len; while (b < t) { int d = min(nChars - nextChar, t - b); System.arraycopy(cbuf, b, cb, nextChar, d); b += d; nextChar += d; if (nextChar >= nChars) flushBuffer(); } } public void write(String s, int off, int len) throws IOException { ensureOpen(); int b = off, t = off + len; while (b < t) { int d = min(nChars - nextChar, t - b); s.getChars(b, b + d, cb, nextChar); b += d; nextChar += d; if (nextChar >= nChars) flushBuffer(); } } public void newLine() throws IOException { write(System.lineSeparator(), 0, System.lineSeparator().length()); } public void flush() throws IOException { flushBuffer(); out.flush(); } public void close() throws IOException { if (out == null) { return; }<|fim▁hole|> try (Writer w = out) { flushBuffer(); } finally { out = null; cb = null; } } }<|fim▁end|>
<|file_name|>validation.py<|end_file_name|><|fim▁begin|>import difflib import inflect import itertools import logging import netaddr import os import re import toposort import yaml import hotcidr.state def inflect_a(s, p=inflect.engine()): x = p.plural(s) if p.compare(s, x) == 'p:s': return s return p.a(s) logging.basicConfig(format='%(levelname)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') class Validator(object): logger = logging.getLogger('validation') info = logger.warn warn = logger.warn error = logger.error fatal = logger.fatal def load(self, x): if x not in self.files: try: with open(os.path.join(self.rootdir, x)) as f: try: self.files[x] = hotcidr.state.load(f) except yaml.YAMLError: self.fatal("Invalid YAML file %s" % x) except IOError: self.fatal("Could not read file %s" % x) return self.files[x] def register_check(self, f): if f not in self.checks: self.checks.append(f) else: raise Exception("Function %s is already registered" % f.__name__) def register_checks(self, *fs): for f in fs: self.register_check(f) required_map = {} def validate(self, wrap=True): # TODO: spawn multiple processes l = {f: Validator.required_map[f] if f in Validator.required_map else set() for f in self.checks} for f in toposort.toposort_flatten(l, sort=False): if wrap: try: f(self) except: self.fatal("Unexpected exception raised by %s" % f.__name__) raise else: f(self) def __init__(self, rootdir): self.rootdir = rootdir self.checks = [] self.files = {} def has_rules(g): for i in g: if isinstance(i, tuple): if len(i) > 1 and 'rules' in i[1]: yield i elif 'rules' in i: yield i def requires(*a): def decorator(f): Validator.required_map[f] = set(a) return f return decorator def load_groups(self, forced=False): if forced or not hasattr(self, 'groups'): groupsdir = os.path.join(self.rootdir, 'groups') groups = os.listdir(groupsdir) self.groups = {} for x in groups: if os.path.isfile(os.path.join(groupsdir, x)): if x.endswith('.yaml'): self.groups[x[:-5]] = self.load(os.path.join('groups', x)) def load_boxes(self, forced=False): if forced or not hasattr(self, 'boxes'): self.boxes = self.load('boxes.yaml') @requires(load_groups, load_boxes) def find_unused_groups(self): #TODO: include groups used in 'location' field used = set(itertools.chain(*(b['groups'] for b in self.boxes.values() if 'groups' in b))) for g in set(self.groups.keys()) - used: self.info("Group %s is unused" % g) @requires(load_groups, load_boxes) def validate_groups(self): used = set(itertools.chain(*(b['groups'] for b in self.boxes.values() if 'groups' in b))) valid_groups = set(self.groups.keys()) for g in used - valid_groups: guess = difflib.get_close_matches(g, valid_groups) if guess: guess = " (Did you mean %s?)" % guess[0] else: guess = "" self.fatal("%s is not defined%s" % (g, guess)) @requires(load_groups)<|fim▁hole|> valid_chars = set( 'abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' '0123456789' ' ._-:/()#,@[]+=&;{}!$*' ) for name in self.groups.keys(): if any(c not in valid_chars for c in name): self.fatal("%s is not a valid group name" % name) @requires(load_boxes) def validate_aws_instance_id(self): for name in self.boxes.keys(): if not re.match(r'^i\-[0-9a-f]{8}$', name): self.fatal("Instance ID %s is not a valid AWS instance ID" % name) @requires(load_groups) def validate_aws_group_id(self): seen = {} for group_name, group in self.groups.items(): if 'id' in group: name = group['id'] if not re.match(r'^sg\-[0-9a-f]{8}$', name): self.fatal("%s has an invalid AWS group ID" % group_name) elif name in seen: if seen[name]: self.fatal("%s has a duplicate AWS group ID" % seen[name]) seen[name] = False self.fatal("%s has a duplicate AWS group ID" % group_name) else: seen[name] = group_name @requires(load_groups) def validate_protocols(self): for group_name, group in has_rules(self.groups.iteritems()): for rule_num, rule in enumerate(group['rules'], 1): if 'protocol' not in rule: self.error("Rule %d in %s is missing a protocol" % (rule_num, group_name)) elif rule['protocol'] == '-1': self.error("Rule %d in %s has an invalid protocol" % (rule_num, group_name)) @requires(load_groups) def validate_ports(self): #TODO: handle ICMP fromport def port(x, default=-1): try: r = int(x) if 1 <= r <= 65535: return r except ValueError: pass for group_name, group in has_rules(self.groups.iteritems()): for rule_num, rule in enumerate(group['rules'], 1): valid = True if 'fromport' not in rule: self.error("Rule %d in %s is missing a fromport" % (rule_num, group_name)) valid = False if 'toport' not in rule: self.error("Rule %d in %s is missing a toport" % (rule_num, group_name)) valid = False if valid: fromport = port(rule['fromport']) toport = port(rule['toport']) valid = True if not fromport: self.error("Rule %d in %s has an invalid fromport" % (rule_num, group_name)) valid = False if not toport: self.error("Rule %d in %s has an invalid toport" % (rule_num, group_name)) valid = False if valid: if fromport > toport: self.error("Rule %d in %s has an invalid port range" % (rule_num, group_name)) elif (toport - fromport) >= 100: self.warn("Rule %d in %s has a large port range" % (rule_num, group_name)) @requires(load_groups) def validate_rule_fields(self): for group_name, group in has_rules(self.groups.iteritems()): for rule_num, rule in enumerate(group['rules'], 1): for field in ('description',): if field not in rule: self.warn("Rule %d in %s is missing %s" % (rule_num, group_name, inflect_a(field))) @requires(load_groups) def validate_group_fields(self): for group_name, group in self.groups.iteritems(): for field in ('description', 'rules'): if field not in group: self.warn("%s is missing %s" % (group_name, inflect_a(field))) @requires(load_boxes) def validate_instance_fields(self): for box_id, box in self.boxes.iteritems(): for field in ('ip', 'domain', 'groups'): if field not in box: self.warn("Box %s is missing %s" % (box_id, inflect_a(field))) @requires(load_groups) def validate_locations(self): valid_groups = set(self.groups.keys()) for group_name, group in has_rules(self.groups.iteritems()): for rule_num, rule in enumerate(group['rules'], 1): if 'location' in rule: if rule['location'] not in valid_groups: try: ip = netaddr.IPNetwork(rule['location']) if str(ip.cidr) != rule['location']: self.warn("Location for rule %d in %s " "will be interpreted as %s" % (rule_num, group_name, ip.cidr)) except netaddr.AddrFormatError: self.error("Rule %d in %s has an invalid location" % (rule_num, group_name)) else: self.error("Rule %d in %s is missing a location" % (rule_num, group_name))<|fim▁end|>
def validate_group_names(self):
<|file_name|>lda_testing.py<|end_file_name|><|fim▁begin|>__author__ = 'fpena' import numpy as np import lda import lda.datasets def run(): # document-term matrix X = lda.datasets.load_reuters() print("type(X): {}".format(type(X))) print("shape: {}\n".format(X.shape)) # the vocab vocab = lda.datasets.load_reuters_vocab() print("type(vocab): {}".format(type(vocab))) print("len(vocab): {}\n".format(len(vocab))) # titles for each story titles = lda.datasets.load_reuters_titles() print("type(titles): {}".format(type(titles))) print("len(titles): {}\n".format(len(titles))) doc_id = 0 word_id = 3117 print("doc id: {} word id: {}".format(doc_id, word_id)) print("-- count: {}".format(X[doc_id, word_id])) print("-- word : {}".format(vocab[word_id])) print("-- doc : {}".format(titles[doc_id])) model = lda.LDA(n_topics=20, n_iter=500, random_state=1) model.fit(X) topic_word = model.topic_word_ print("type(topic_word): {}".format(type(topic_word))) print("shape: {}".format(topic_word.shape)) for n in range(5): sum_pr = sum(topic_word[n,:]) print("topic: {} sum: {}".format(n, sum_pr)) n = 5 for i, topic_dist in enumerate(topic_word): topic_words = np.array(vocab)[np.argsort(topic_dist)][:-(n+1):-1] print('*Topic {}\n- {}'.format(i, ' '.join(topic_words))) doc_topic = model.doc_topic_ print("type(doc_topic): {}".format(type(doc_topic))) print("shape: {}".format(doc_topic.shape)) for n in range(5): sum_pr = sum(doc_topic[n,:]) print("document: {} sum: {}".format(n, sum_pr)) for n in range(10): topic_most_pr = doc_topic[n].argmax() print("doc: {} topic: {}\n{}...".format(n, topic_most_pr, titles[n][:50]))<|fim▁hole|> print('Dataset shape', reuters_dataset.shape) print(reuters_dataset[0].shape) print('Vocab shape', len(vocab)) print(vocab[0]) print('Titles shape', len(titles)) print(titles[0]) print(titles[1]) print(titles[100]) for word in reuters_dataset[0]: if word > 1: print(word)<|fim▁end|>
reuters_dataset = lda.datasets.load_reuters() vocab = lda.datasets.load_reuters_vocab() titles = lda.datasets.load_reuters_titles()
<|file_name|>FormRigDisplay.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Form implementation generated from reading ui file 'UI_RigDisplay.ui' # # Created: Wed Mar 21 21:43:33 2018 # by: pyside-uic 0.2.14 running on PySide 1.2.0 # # WARNING! All changes made in this file will be lost! from PySide import QtCore, QtGui class Ui_Form(object): def setupUi(self, Form): Form.setObjectName("Form") Form.resize(236, 179) self.verticalLayout = QtGui.QVBoxLayout(Form) self.verticalLayout.setObjectName("verticalLayout") self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") self.ChangeJointdrawStyle = QtGui.QPushButton(Form) self.ChangeJointdrawStyle.setMaximumSize(QtCore.QSize(100, 30)) self.ChangeJointdrawStyle.setObjectName("ChangeJointdrawStyle") self.horizontalLayout.addWidget(self.ChangeJointdrawStyle) self.verticalLayout.addLayout(self.horizontalLayout) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8)) self.ChangeJointdrawStyle.setText(QtGui.QApplication.translate("Form", "Joint DrawStyle", None, QtGui.QApplication.UnicodeUTF8))<|fim▁end|>
# -*- coding: utf-8 -*-
<|file_name|>StatefulIntersectionProps.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'; export interface StatefulProps { /** myProp description */ myProp: string; } export interface StatefulMoreProps { /** moreProp description */ moreProp: number; } /** StatefulIntersectionProps description */<|fim▁hole|> StatefulProps & StatefulMoreProps > { render() { return <div />; } }<|fim▁end|>
export class StatefulIntersectionProps extends React.Component<
<|file_name|>LUIInputField.py<|end_file_name|><|fim▁begin|>import re from LUIObject import LUIObject from LUISprite import LUISprite from LUILabel import LUILabel from LUIInitialState import LUIInitialState from LUILayouts import LUIHorizontalStretchedLayout __all__ = ["LUIInputField"] class LUIInputField(LUIObject): """ Simple input field, accepting text input. This input field supports entering text and navigating. Selecting text is (currently) not supported. The input field also supports various keyboard shortcuts: [pos1] Move to the beginning of the text [end] Move to the end of the text [arrow_left] Move one character to the left [arrow_right] Move one character to the right [ctrl] + [arrow_left] Move to the left, skipping over words [ctrl] + [arrow_right] Move to the right, skipping over words [escape] Un-focus input element """ re_skip = re.compile("\W*\w+\W") def __init__(self, parent=None, width=200, placeholder=u"Enter some text ..", value=u"", **kwargs): """ Constructs a new input field. An input field always needs a width specified """ LUIObject.__init__(self, x=0, y=0, solid=True) self.set_width(width) self._layout = LUIHorizontalStretchedLayout(parent=self, prefix="InputField", width="100%") # Container for the text self._text_content = LUIObject(self) self._text_content.margin = (5, 7, 5, 7) self._text_content.clip_bounds = (0,0,0,0) self._text_content.set_size("100%", "100%") # Scroller for the text, so we can move right and left self._text_scroller = LUIObject(parent=self._text_content) self._text_scroller.center_vertical = True self._text = LUILabel(parent=self._text_scroller, text="") # Cursor for the current position self._cursor = LUISprite(self._text_scroller, "blank", "skin", x=0, y=0, w=2, h=15) self._cursor.color = (0.5, 0.5, 0.5) self._cursor.margin.top = 2 self._cursor.z_offset = 20 self._cursor_index = 0 self._cursor.hide() self._value = value # Placeholder text, shown when out of focus and no value exists self._placeholder = LUILabel(parent=self._text_content, text=placeholder, shadow=False, center_vertical=True, alpha=0.2) # Various states self._tickrate = 1.0 self._tickstart = 0.0 self._render_text() if parent is not None: self.parent = parent LUIInitialState.init(self, kwargs) @property def value(self): """ Returns the value of the input field """ return self._value @value.setter def value(self, new_value): """ Sets the value of the input field """ self._value = new_value self._render_text() self.trigger_event("changed", self._value) def clear(self): """ Clears the input value """ self.value = u"" @property def cursor_pos(self): """ Set the cursor position """ return self._cursor_index @cursor_pos.setter def cursor_pos(self, pos): """ Set the cursor position """ if pos >= 0: self._cursor_index = max(0, min(len(self._value), pos)) else: self._cursor_index = max(len(self._value) + pos + 1, 0) self._reset_cursor_tick() self._render_text() def on_tick(self, event): """ Tick handler, gets executed every frame """ frame_time = globalClock.get_frame_time() - self._tickstart show_cursor = frame_time % self._tickrate < 0.5 * self._tickrate if show_cursor: self._cursor.color = (0.5, 0.5, 0.5, 1) else: self._cursor.color = (1, 1, 1, 0) def on_click(self, event): """ Internal on click handler """ self.request_focus() def on_mousedown(self, event): """ Internal mousedown handler """ local_x_offset = self._text.text_handle.get_relative_pos(event.coordinates).x self.cursor_pos = self._text.text_handle.get_char_index(local_x_offset) def _reset_cursor_tick(self): """ Internal method to reset the cursor tick """ self._tickstart = globalClock.get_frame_time() def on_focus(self, event): """ Internal focus handler """ self._cursor.show() self._placeholder.hide() self._reset_cursor_tick() self._layout.color = (0.9, 0.9, 0.9, 1) def on_keydown(self, event): """ Internal keydown handler. Processes the special keys, and if none are present, redirects the event """ key_name = event.message if key_name == "backspace": self._value = self._value[:max(0, self._cursor_index - 1)] + self._value[self._cursor_index:] self.cursor_pos -= 1 self.trigger_event("changed", self._value) elif key_name == "delete": post_value = self._value[min(len(self._value), self._cursor_index + 1):] self._value = self._value[:self._cursor_index] + post_value self.cursor_pos = self._cursor_index self.trigger_event("changed", self._value) elif key_name == "arrow_left": if event.get_modifier_state("alt") or event.get_modifier_state("ctrl"): self.cursor_skip_left() else: self.cursor_pos -= 1 elif key_name == "arrow_right": if event.get_modifier_state("alt") or event.get_modifier_state("ctrl"): self.cursor_skip_right() else: self.cursor_pos += 1 elif key_name == "escape": self.blur() elif key_name == "home": self.cursor_pos = 0 elif key_name == "end": self.cursor_pos = len(self.value) <|fim▁hole|> """ Internal keyrepeat handler """ self.on_keydown(event) def on_textinput(self, event): """ Internal textinput handler """ self._value = self._value[:self._cursor_index] + event.message + \ self._value[self._cursor_index:] self.cursor_pos = self._cursor_index + len(event.message) self.trigger_event("changed", self._value) def on_blur(self, event): """ Internal blur handler """ self._cursor.hide() if len(self._value) < 1: self._placeholder.show() self._layout.color = (1, 1, 1, 1) def _render_text(self): """ Internal method to render the text """ self._text.set_text(self._value) self._cursor.left = self._text.left + \ self._text.text_handle.get_char_pos(self._cursor_index) + 1 max_left = self.width - 15 if self._value: self._placeholder.hide() else: if not self.focused: self._placeholder.show() # Scroll if the cursor is outside of the clip bounds rel_pos = self.get_relative_pos(self._cursor.get_abs_pos()).x if rel_pos >= max_left: self._text_scroller.left = min(0, max_left - self._cursor.left) if rel_pos <= 0: self._text_scroller.left = min(0, - self._cursor.left - rel_pos) def cursor_skip_left(self): """ Moves the cursor to the left, skipping the previous word """ left_hand_str = ''.join(reversed(self.value[0:self.cursor_pos])) match = self.re_skip.match(left_hand_str) if match is not None: self.cursor_pos -= match.end() - 1 else: self.cursor_pos = 0 def cursor_skip_right(self): """ Moves the cursor to the right, skipping the next word """ right_hand_str = self.value[self.cursor_pos:] match = self.re_skip.match(right_hand_str) if match is not None: self.cursor_pos += match.end() - 1 else: self.cursor_pos = len(self.value)<|fim▁end|>
self.trigger_event(key_name, self._value) def on_keyrepeat(self, event):
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/** * Module dependencies. */ var express = require('express') , http = require('http') , path = require('path') , mongo = require('mongodb') , format = require('util').format; var app = express(); <|fim▁hole|>app.set('view engine', 'jade'); app.use(express.favicon()); app.use(express.logger('dev')); app.use(express.bodyParser()); app.use(express.methodOverride()); app.use(app.router); app.use(express.static(path.join(__dirname, 'public'))); // development only if ('development' == app.get('env')) { app.use(express.errorHandler()); } var server = new mongo.Server("localhost", 27017, {auto_reconnect: true}); var dbManager = new mongo.Db("applique-web", server, {safe:true}); dbManager.open(function(err, db) { require('./routes/index')(app, db); app.listen(app.get('port'), function(){ console.log('Express server listening on port ' + app.get('port')); }); });<|fim▁end|>
// all environments app.set('port', process.env.PORT || 3000); app.set('views', __dirname + '/views');
<|file_name|>UIMenuControllerArrowDirection.java<|end_file_name|><|fim▁begin|>package apple.uikit; import java.io.*; import java.nio.*; import java.util.*; import com.google.j2objc.annotations.*; import com.google.j2objc.runtime.*; import com.google.j2objc.runtime.block.*; import apple.audiotoolbox.*; import apple.corefoundation.*; import apple.coregraphics.*; import apple.coreservices.*; import apple.foundation.*; import apple.coreanimation.*; import apple.coredata.*;<|fim▁hole|>import apple.corelocation.*; @Library("UIKit/UIKit.h") @Mapping("UIMenuControllerArrowDirection") public final class UIMenuControllerArrowDirection extends ObjCEnum { @GlobalConstant("UIMenuControllerArrowDefault") public static final long Default = 0L; @GlobalConstant("UIMenuControllerArrowUp") public static final long Up = 1L; @GlobalConstant("UIMenuControllerArrowDown") public static final long Down = 2L; @GlobalConstant("UIMenuControllerArrowLeft") public static final long Left = 3L; @GlobalConstant("UIMenuControllerArrowRight") public static final long Right = 4L; }<|fim▁end|>
import apple.coreimage.*; import apple.coretext.*;
<|file_name|>hdu.py<|end_file_name|><|fim▁begin|>import time import requests import answer min_question_id = 1000 max_question_id = 5604 sleep_time = 10 submit_url = 'http://acm.hdu.edu.cn/submit.php?action=submit' login_url = 'http://acm.hdu.edu.cn/userloginex.php?action=login' headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36', } class HDU: def __init__(self, username, password): # os.chdir(sys.path[0]) self.session = requests.Session() self.session.headers = headers self.username = username self.password = password self.is_login = False # self.login(username, password) def login(self): data = { "userpass": self.password, "username": self.username, 'login': 'Sign In' } res = self.session.post(login_url, data=data) # print(res.text.encode('UTF-8')) if res.status_code == 200: self.is_login = True return True return False def submit(self, problem, code): data = { "check": "0", "problemid": str(problem), "usercode": code, "language": "0" } res = self.session.post(submit_url, data=data) # print(res.text.encode('UTF-8')) # TODO if res.status_code == 200:<|fim▁hole|> return True return False def get_session(self): return self.session # TODO def get_state(self, problem): return False def solve(user, id): answers = answer.get_answer("%s%d" % ('hdu', id)) if answers is None or answers == []: print(None) return None count = 1 for ans in answers: # print(ans) user.submit(id, ans) print('submit', count) if count is 2: break count += 1 time.sleep(sleep_time) return None def hdu(): print("HDU:") # name = input("Your username: ") # password = input("Your password: ") name = 'printhello' password = '123456' my = HDU(name, password) if my.login() is False: return # return for i in range(4979, max_question_id): print('id :', i) solve(my, i) time.sleep(sleep_time) return None if __name__ == '__main__': hdu()<|fim▁end|>
<|file_name|>option.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use core::option::*; use core::marker; use core::mem; use core::clone::Clone; #[test] fn test_get_ptr() { unsafe { let x = box 0; let addr_x: *const int = mem::transmute(&*x); let opt = Some(x); let y = opt.unwrap(); let addr_y: *const int = mem::transmute(&*y); assert_eq!(addr_x, addr_y); } } #[test] fn test_get_str() { let x = "test".to_string(); let addr_x = x.as_ptr(); let opt = Some(x); let y = opt.unwrap(); let addr_y = y.as_ptr(); assert_eq!(addr_x, addr_y); } #[test] fn test_get_resource() { use std::rc::Rc; use core::cell::RefCell; struct R { i: Rc<RefCell<int>>, } #[unsafe_destructor]<|fim▁hole|> impl Drop for R { fn drop(&mut self) { let ii = &*self.i; let i = *ii.borrow(); *ii.borrow_mut() = i + 1; } } fn r(i: Rc<RefCell<int>>) -> R { R { i: i } } let i = Rc::new(RefCell::new(0)); { let x = r(i.clone()); let opt = Some(x); let _y = opt.unwrap(); } assert_eq!(*i.borrow(), 1); } #[test] fn test_option_dance() { let x = Some(()); let mut y = Some(5); let mut y2 = 0; for _x in x.iter() { y2 = y.take().unwrap(); } assert_eq!(y2, 5); assert!(y.is_none()); } #[test] #[should_fail] fn test_option_too_much_dance() { let mut y = Some(marker::NoCopy); let _y2 = y.take().unwrap(); let _y3 = y.take().unwrap(); } #[test] fn test_and() { let x: Option<int> = Some(1); assert_eq!(x.and(Some(2)), Some(2)); assert_eq!(x.and(None::<int>), None); let x: Option<int> = None; assert_eq!(x.and(Some(2)), None); assert_eq!(x.and(None::<int>), None); } #[test] fn test_and_then() { let x: Option<int> = Some(1); assert_eq!(x.and_then(|x| Some(x + 1)), Some(2)); assert_eq!(x.and_then(|_| None::<int>), None); let x: Option<int> = None; assert_eq!(x.and_then(|x| Some(x + 1)), None); assert_eq!(x.and_then(|_| None::<int>), None); } #[test] fn test_or() { let x: Option<int> = Some(1); assert_eq!(x.or(Some(2)), Some(1)); assert_eq!(x.or(None), Some(1)); let x: Option<int> = None; assert_eq!(x.or(Some(2)), Some(2)); assert_eq!(x.or(None), None); } #[test] fn test_or_else() { let x: Option<int> = Some(1); assert_eq!(x.or_else(|| Some(2)), Some(1)); assert_eq!(x.or_else(|| None), Some(1)); let x: Option<int> = None; assert_eq!(x.or_else(|| Some(2)), Some(2)); assert_eq!(x.or_else(|| None), None); } #[test] fn test_unwrap() { assert_eq!(Some(1).unwrap(), 1); let s = Some("hello".to_string()).unwrap(); assert_eq!(s, "hello"); } #[test] #[should_fail] fn test_unwrap_panic1() { let x: Option<int> = None; x.unwrap(); } #[test] #[should_fail] fn test_unwrap_panic2() { let x: Option<String> = None; x.unwrap(); } #[test] fn test_unwrap_or() { let x: Option<int> = Some(1); assert_eq!(x.unwrap_or(2), 1); let x: Option<int> = None; assert_eq!(x.unwrap_or(2), 2); } #[test] fn test_unwrap_or_else() { let x: Option<int> = Some(1); assert_eq!(x.unwrap_or_else(|| 2), 1); let x: Option<int> = None; assert_eq!(x.unwrap_or_else(|| 2), 2); } #[test] fn test_iter() { let val = 5; let x = Some(val); let mut it = x.iter(); assert_eq!(it.size_hint(), (1, Some(1))); assert_eq!(it.next(), Some(&val)); assert_eq!(it.size_hint(), (0, Some(0))); assert!(it.next().is_none()); } #[test] fn test_mut_iter() { let val = 5; let new_val = 11; let mut x = Some(val); { let mut it = x.iter_mut(); assert_eq!(it.size_hint(), (1, Some(1))); match it.next() { Some(interior) => { assert_eq!(*interior, val); *interior = new_val; } None => assert!(false), } assert_eq!(it.size_hint(), (0, Some(0))); assert!(it.next().is_none()); } assert_eq!(x, Some(new_val)); } #[test] fn test_ord() { let small = Some(1.0f64); let big = Some(5.0f64); let nan = Some(0.0f64/0.0); assert!(!(nan < big)); assert!(!(nan > big)); assert!(small < big); assert!(None < big); assert!(big > None); } /* FIXME(#20575) #[test] fn test_collect() { let v: Option<Vec<int>> = (0..0).map(|_| Some(0)).collect(); assert!(v == Some(vec![])); let v: Option<Vec<int>> = (0..3).map(|x| Some(x)).collect(); assert!(v == Some(vec![0, 1, 2])); let v: Option<Vec<int>> = (0..3).map(|x| { if x > 1 { None } else { Some(x) } }).collect(); assert!(v == None); // test that it does not take more elements than it needs let mut functions: [Box<Fn() -> Option<()>>; 3] = [box || Some(()), box || None, box || panic!()]; let v: Option<Vec<()>> = functions.iter_mut().map(|f| (*f)()).collect(); assert!(v == None); } */ #[test] fn test_cloned() { let val1 = 1u32; let mut val2 = 2u32; let val1_ref = &val1; let opt_none: Option<&'static u32> = None; let opt_ref = Some(&val1); let opt_ref_ref = Some(&val1_ref); let opt_mut_ref = Some(&mut val2); // None works assert_eq!(opt_none.clone(), None); assert_eq!(opt_none.cloned(), None); // Mutable refs work assert_eq!(opt_mut_ref.cloned(), Some(2u32)); // Immutable ref works assert_eq!(opt_ref.clone(), Some(&val1)); assert_eq!(opt_ref.cloned(), Some(1u32)); // Double Immutable ref works assert_eq!(opt_ref_ref.clone(), Some(&val1_ref)); assert_eq!(opt_ref_ref.clone().cloned(), Some(&val1)); assert_eq!(opt_ref_ref.cloned().cloned(), Some(1u32)); }<|fim▁end|>
<|file_name|>status_icon_linux_wrapper.cc<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/views/status_icons/status_icon_linux_wrapper.h" #include <memory> #include "base/feature_list.h" #include "base/memory/ptr_util.h" #include "base/memory/scoped_refptr.h" #include "chrome/browser/ui/ui_features.h" #include "chrome/browser/ui/views/status_icons/status_icon_button_linux.h" #include "ui/message_center/public/cpp/notifier_id.h" #if defined(USE_DBUS) #include "chrome/browser/ui/views/status_icons/status_icon_linux_dbus.h" #endif namespace { gfx::ImageSkia GetBestImageRep(const gfx::ImageSkia& image) { float best_scale = 0.0f; SkBitmap best_rep; for (const auto& rep : image.image_reps()) { if (rep.scale() > best_scale) { best_scale = rep.scale(); best_rep = rep.GetBitmap(); } } // All status icon implementations want the image in pixel coordinates, so use // a scale factor of 1. return gfx::ImageSkia::CreateFromBitmap(best_rep, 1.0f); } } // namespace StatusIconLinuxWrapper::StatusIconLinuxWrapper( views::StatusIconLinux* status_icon, StatusIconType status_icon_type, const gfx::ImageSkia& image, const std::u16string& tool_tip) : status_icon_(status_icon), status_icon_type_(status_icon_type), image_(GetBestImageRep(image)), tool_tip_(tool_tip) { status_icon_->SetDelegate(this); } #if defined(USE_DBUS) StatusIconLinuxWrapper::StatusIconLinuxWrapper( scoped_refptr<StatusIconLinuxDbus> status_icon, const gfx::ImageSkia& image, const std::u16string& tool_tip) : StatusIconLinuxWrapper(status_icon.get(), kTypeDbus, image, tool_tip) { status_icon_dbus_ = status_icon; } #endif StatusIconLinuxWrapper::StatusIconLinuxWrapper( std::unique_ptr<views::StatusIconLinux> status_icon, StatusIconType status_icon_type, const gfx::ImageSkia& image, const std::u16string& tool_tip) : StatusIconLinuxWrapper(status_icon.get(), status_icon_type, image, tool_tip) { status_icon_linux_ = std::move(status_icon); } StatusIconLinuxWrapper::~StatusIconLinuxWrapper() { }<|fim▁hole|> if (status_icon_) status_icon_->SetIcon(image_); } void StatusIconLinuxWrapper::SetToolTip(const std::u16string& tool_tip) { tool_tip_ = tool_tip; if (status_icon_) status_icon_->SetToolTip(tool_tip); } void StatusIconLinuxWrapper::DisplayBalloon( const gfx::ImageSkia& icon, const std::u16string& title, const std::u16string& contents, const message_center::NotifierId& notifier_id) { notification_.DisplayBalloon(icon, title, contents, notifier_id); } void StatusIconLinuxWrapper::OnClick() { DispatchClickEvent(); } bool StatusIconLinuxWrapper::HasClickAction() { return HasObservers(); } const gfx::ImageSkia& StatusIconLinuxWrapper::GetImage() const { return image_; } const std::u16string& StatusIconLinuxWrapper::GetToolTip() const { return tool_tip_; } ui::MenuModel* StatusIconLinuxWrapper::GetMenuModel() const { return menu_model_; } void StatusIconLinuxWrapper::OnImplInitializationFailed() { switch (status_icon_type_) { case kTypeDbus: #if defined(USE_DBUS) status_icon_dbus_.reset(); #endif status_icon_linux_ = std::make_unique<StatusIconButtonLinux>(); status_icon_ = status_icon_linux_.get(); status_icon_type_ = kTypeWindowed; status_icon_->SetDelegate(this); return; case kTypeWindowed: status_icon_linux_.reset(); status_icon_ = nullptr; status_icon_type_ = kTypeNone; if (menu_model_) menu_model_->RemoveObserver(this); menu_model_ = nullptr; return; case kTypeNone: NOTREACHED(); } } void StatusIconLinuxWrapper::OnMenuStateChanged() { if (status_icon_) status_icon_->RefreshPlatformContextMenu(); } std::unique_ptr<StatusIconLinuxWrapper> StatusIconLinuxWrapper::CreateWrappedStatusIcon( const gfx::ImageSkia& image, const std::u16string& tool_tip) { #if defined(USE_DBUS) return base::WrapUnique(new StatusIconLinuxWrapper( base::MakeRefCounted<StatusIconLinuxDbus>(), image, tool_tip)); #else return base::WrapUnique( new StatusIconLinuxWrapper(std::make_unique<StatusIconButtonLinux>(), kTypeWindowed, image, tool_tip)); #endif } void StatusIconLinuxWrapper::UpdatePlatformContextMenu( ui::MenuModel* model) { if (!status_icon_) return; status_icon_->UpdatePlatformContextMenu(model); }<|fim▁end|>
void StatusIconLinuxWrapper::SetImage(const gfx::ImageSkia& image) { image_ = GetBestImageRep(image);
<|file_name|>LogisticErrorFunction.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import de.jungblut.math.DoubleMatrix; import de.jungblut.math.MathUtils; /** * Logistic error function implementation. * * @author thomas.jungblut * */ public final class LogisticErrorFunction implements ErrorFunction { @Override public double calculateError(DoubleMatrix y, DoubleMatrix hypothesis) { return (y.multiply(-1d) .multiplyElementWise(MathUtils.logMatrix(hypothesis)).subtract((y .subtractBy(1.0d)).multiplyElementWise(MathUtils.logMatrix(hypothesis .subtractBy(1d))))).sum(); } }<|fim▁end|>
package de.jungblut.math.squashing;
<|file_name|>keras_saved_model.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=protected-access """Utility functions to save/load keras Model to/from SavedModel.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import six from tensorflow.python.client import session from tensorflow.python.estimator import keras as estimator_keras_util from tensorflow.python.estimator import model_fn as model_fn_lib from tensorflow.python.estimator.export import export as export_helpers from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.keras import backend as K from tensorflow.python.keras import models as models_lib from tensorflow.python.keras import optimizers from tensorflow.python.keras.engine import sequential from tensorflow.python.keras.metrics import Metric from tensorflow.python.keras.models import model_from_json from tensorflow.python.lib.io import file_io from tensorflow.python.ops import variables from tensorflow.python.platform import gfile from tensorflow.python.platform import tf_logging as logging from tensorflow.python.saved_model import builder as saved_model_builder from tensorflow.python.saved_model import constants from tensorflow.python.saved_model import utils_impl as saved_model_utils from tensorflow.python.training import saver as saver_lib from tensorflow.python.training.checkpointable import util as checkpointable_utils from tensorflow.python.util import compat def save_keras_model( model, saved_model_path, custom_objects=None, as_text=None): """Save a `tf.keras.Model` into Tensorflow SavedModel format. `save_model` generates new files/folders under the `saved_model_path` folder: 1) an asset folder containing the json string of the model's configuration (topology). 2) a checkpoint containing the model weights. 3) a saved_model.pb file containing the model's MetaGraphs. The prediction graph is always exported. The evaluaton and training graphs are exported if the following conditions are met: - Evaluation: model loss is defined.<|fim▁hole|> saved to checkpoints. Model Requirements: - Model must be a sequential model or functional model. Subclassed models can not be saved via this function, unless you provide an implementation for get_config() and from_config(). - All variables must be saveable by the model. In general, this condition is met through the use of layers defined in the keras library. However, there is currently a bug with variables created in Lambda layer functions not being saved correctly (see https://github.com/keras-team/keras/issues/9740). Note that each mode is exported in separate graphs, so different modes do not share variables. To use the train graph with evaluation or prediction graphs, create a new checkpoint if variable values have been updated. Example: ```python import tensorflow as tf # Create a tf.keras model. model = tf.keras.Sequential() model.add(tf.keras.layers.Dense(1, input_shape=[10])) model.summary() # Save the tf.keras model in the SavedModel format. saved_to_path = tf.contrib.saved_model.save_keras_model( model, '/tmp/my_simple_tf_keras_saved_model') # Load the saved keras model back. model_prime = tf.contrib.saved_model.load_keras_model(saved_to_path) model_prime.summary() ``` Args: model: A `tf.keras.Model` to be saved. saved_model_path: a string specifying the path to the SavedModel directory. The SavedModel will be saved to a timestamped folder created within this directory. custom_objects: Optional dictionary mapping string names to custom classes or functions (e.g. custom loss functions). as_text: whether to write the `SavedModel` proto in text format. Returns: String path to the SavedModel folder, a subdirectory of `saved_model_path`. Raises: NotImplementedError: If the model is a subclassed model. ValueError: If a Sequential model does not have input shapes defined by the user, and is not built. """ if not model._is_graph_network: if isinstance(model, sequential.Sequential): # If input shape is not directly set in the model, the exported model # will assume that the inputs have the same shape as the shape the model # was built model with. if not model.built: raise ValueError( 'Sequential model must be built before it can be exported.') else: raise NotImplementedError( 'Exporting subclassed models is not yet supported.') export_dir = export_helpers.get_timestamped_export_dir(saved_model_path) temp_export_dir = export_helpers.get_temp_export_dir(export_dir) builder = saved_model_builder.SavedModelBuilder(temp_export_dir) # Manually save variables to export them in an object-based checkpoint. This # skips the `builder.add_meta_graph_and_variables()` step, which saves a # named-based checkpoint. # TODO(b/113134168): Add fn to Builder to save with object-based saver. # TODO(b/113178242): This should only export the model json structure. Only # one save is needed once the weights can be copied from the model to clone. checkpoint_path = _export_model_json_and_variables(model, temp_export_dir) # Export each mode. Use ModeKeys enums defined for `Estimator` to ensure that # Keras models and `Estimator`s are exported with the same format. # Every time a mode is exported, the code checks to see if new variables have # been created (e.g. optimizer slot variables). If that is the case, the # checkpoint is re-saved to include the new variables. export_args = {'builder': builder, 'model': model, 'custom_objects': custom_objects, 'checkpoint_path': checkpoint_path} has_saved_vars = False if model.optimizer: if isinstance(model.optimizer, optimizers.TFOptimizer): _export_mode(model_fn_lib.ModeKeys.TRAIN, has_saved_vars, **export_args) has_saved_vars = True _export_mode(model_fn_lib.ModeKeys.EVAL, has_saved_vars, **export_args) else: logging.warning( 'Model was compiled with an optimizer, but the optimizer is not from ' '`tf.train` (e.g. `tf.train.AdagradOptimizer`). Only the serving ' 'graph was exported. The train and evaluate graphs were not added to ' 'the SavedModel.') _export_mode(model_fn_lib.ModeKeys.PREDICT, has_saved_vars, **export_args) builder.save(as_text) gfile.Rename(temp_export_dir, export_dir) return export_dir def _export_model_json_and_variables(model, saved_model_path): """Save model variables and json structure into SavedModel subdirectories.""" # Save model configuration as a json string under assets folder. model_json = model.to_json() model_json_filepath = os.path.join( saved_model_utils.get_or_create_assets_dir(saved_model_path), compat.as_text(constants.SAVED_MODEL_FILENAME_JSON)) file_io.write_string_to_file(model_json_filepath, model_json) # Save model weights in checkpoint format under variables folder. saved_model_utils.get_or_create_variables_dir(saved_model_path) checkpoint_prefix = saved_model_utils.get_variables_path(saved_model_path) model.save_weights(checkpoint_prefix, save_format='tf', overwrite=True) return checkpoint_prefix def _get_var_list(model): """Return list of all checkpointed saveable objects in the model.""" return checkpointable_utils.named_saveables(model) def _export_mode( mode, has_saved_vars, builder, model, custom_objects, checkpoint_path): """Export a model, and optionally save new vars from the clone model. Args: mode: A `tf.estimator.ModeKeys` string. has_saved_vars: A `boolean` indicating whether the SavedModel has already exported variables. builder: A `SavedModelBuilder` object. model: A `tf.keras.Model` object. custom_objects: A dictionary mapping string names to custom classes or functions. checkpoint_path: String path to checkpoint. Raises: ValueError: If the train/eval mode is being exported, but the model does not have an optimizer. """ compile_clone = (mode != model_fn_lib.ModeKeys.PREDICT) if compile_clone and not model.optimizer: raise ValueError( 'Model does not have an optimizer. Cannot export mode %s' % mode) model_graph = ops.get_default_graph() with ops.Graph().as_default() as g: K.set_learning_phase(mode == model_fn_lib.ModeKeys.TRAIN) # Clone the model into blank graph. This will create placeholders for inputs # and targets. clone = models_lib.clone_and_build_model( model, custom_objects=custom_objects, compile_clone=compile_clone) # Make sure that iterations variable is added to the global step collection, # to ensure that, when the SavedModel graph is loaded, the iterations # variable is returned by `tf.train.get_global_step()`. This is required for # compatibility with the SavedModelEstimator. if compile_clone: g.add_to_collection(ops.GraphKeys.GLOBAL_STEP, clone.optimizer.iterations) # Extract update and train ops from train/test/predict functions. if mode == model_fn_lib.ModeKeys.TRAIN: clone._make_train_function() builder._add_train_op(clone.train_function.updates_op) elif mode == model_fn_lib.ModeKeys.EVAL: clone._make_test_function() else: clone._make_predict_function() g.get_collection_ref(ops.GraphKeys.UPDATE_OPS).extend(clone.state_updates) clone_var_list = checkpointable_utils.named_saveables(clone) with session.Session().as_default(): if has_saved_vars: # Confirm all variables in the clone have an entry in the checkpoint. status = clone.load_weights(checkpoint_path) status.assert_existing_objects_matched() else: # Confirm that variables between the clone and model match up exactly, # not counting optimizer objects. Optimizer objects are ignored because # if the model has not trained, the slot variables will not have been # created yet. # TODO(b/113179535): Replace with checkpointable equivalence. _assert_same_non_optimizer_objects(model, model_graph, clone, g) # TODO(b/113178242): Use value transfer for checkpointable objects. clone.load_weights(checkpoint_path) # Add graph and variables to SavedModel. # TODO(b/113134168): Switch to add_meta_graph_and_variables. clone.save_weights(checkpoint_path, save_format='tf', overwrite=True) builder._has_saved_variables = True # Add graph to the SavedModel builder. builder.add_meta_graph( model_fn_lib.EXPORT_TAG_MAP[mode], signature_def_map=_create_signature_def_map(clone, mode), saver=saver_lib.Saver(clone_var_list), main_op=variables.local_variables_initializer()) return None def _create_signature_def_map(model, mode): """Create a SignatureDef map from a Keras model.""" inputs_dict = {name: x for name, x in zip(model.input_names, model.inputs)} if model.optimizer: targets_dict = {x.name.split(':')[0]: x for x in model.targets if x is not None} inputs_dict.update(targets_dict) outputs_dict = {name: x for name, x in zip(model.output_names, model.outputs)} metrics = estimator_keras_util._convert_keras_metrics_to_estimator(model) # Add metric variables to the `LOCAL_VARIABLES` collection. Metric variables # are by default not added to any collections. We are doing this here, so # that metric variables get initialized. local_vars = set(ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES)) vars_to_add = set() if metrics is not None: for key, value in six.iteritems(metrics): if isinstance(value, Metric): vars_to_add.update(value.variables) # Convert Metric instances to (value_tensor, update_op) tuple. metrics[key] = (value.result(), value.updates[0]) # Remove variables that are in the local variables collection already. vars_to_add = vars_to_add.difference(local_vars) for v in vars_to_add: ops.add_to_collection(ops.GraphKeys.LOCAL_VARIABLES, v) export_outputs = model_fn_lib.export_outputs_for_mode( mode, predictions=outputs_dict, loss=model.total_loss if model.optimizer else None, metrics=metrics) return export_helpers.build_all_signature_defs( inputs_dict, export_outputs=export_outputs, serving_only=(mode == model_fn_lib.ModeKeys.PREDICT)) def _assert_same_non_optimizer_objects(model, model_graph, clone, clone_graph): """Assert model and clone contain the same checkpointable objects.""" def get_non_optimizer_objects(m, g): """Gather set of model and optimizer checkpointable objects.""" # Set default graph because optimizer.variables() returns optimizer # variables defined in the default graph. with g.as_default(): all_objects = set(checkpointable_utils.list_objects(m)) optimizer_and_variables = set() for obj in all_objects: if isinstance(obj, optimizers.TFOptimizer): optimizer_and_variables.update(checkpointable_utils.list_objects(obj)) optimizer_and_variables.update(set(obj.optimizer.variables())) return all_objects - optimizer_and_variables model_objects = get_non_optimizer_objects(model, model_graph) clone_objects = get_non_optimizer_objects(clone, clone_graph) if len(model_objects) != len(clone_objects): raise errors.InternalError( None, None, 'Model and clone must use the same variables.' '\n\tModel variables: %s\n\t Clone variables: %s' % (model_objects, clone_objects)) def load_keras_model(saved_model_path): """Load a keras.Model from SavedModel. load_model reinstantiates model state by: 1) loading model topology from json (this will eventually come from metagraph). 2) loading model weights from checkpoint. Example: ```python import tensorflow as tf # Create a tf.keras model. model = tf.keras.Sequential() model.add(tf.keras.layers.Dense(1, input_shape=[10])) model.summary() # Save the tf.keras model in the SavedModel format. saved_to_path = tf.contrib.saved_model.save_keras_model( model, '/tmp/my_simple_tf_keras_saved_model') # Load the saved keras model back. model_prime = tf.contrib.saved_model.load_keras_model(saved_to_path) model_prime.summary() ``` Args: saved_model_path: a string specifying the path to an existing SavedModel. Returns: a keras.Model instance. """ # restore model topology from json string model_json_filepath = os.path.join( compat.as_bytes(saved_model_path), compat.as_bytes(constants.ASSETS_DIRECTORY), compat.as_bytes(constants.SAVED_MODEL_FILENAME_JSON)) model_json = file_io.read_file_to_string(model_json_filepath) model = model_from_json(model_json) # restore model weights checkpoint_prefix = os.path.join( compat.as_text(saved_model_path), compat.as_text(constants.VARIABLES_DIRECTORY), compat.as_text(constants.VARIABLES_FILENAME)) model.load_weights(checkpoint_prefix) return model<|fim▁end|>
- Training: model is compiled with an optimizer defined under `tf.train`. This is because `tf.keras.optimizers.Optimizer` instances cannot be
<|file_name|>repl.js<|end_file_name|><|fim▁begin|>/* */ (function(Buffer) { var crypto = require("crypto"); var sign = require("./sign"); var m = new Buffer('AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF', 'hex'); var xbuf = new Buffer('009A4D6792295A7F730FC3F2B49CBC0F62E862272F', 'hex'); var bn = require("bn.js"); var x = new bn(xbuf); var q = new Buffer('04000000000000000000020108A2E0CC0D99F8A5EF', 'hex'); var qbuf = new Buffer('04000000000000000000020108A2E0CC0D99F8A5EF', 'hex'); var q = new bn(qbuf); var kv = sign.getKay(x, q, m, 'sha256', crypto);<|fim▁hole|>})(require("buffer").Buffer);<|fim▁end|>
var k = sign.makeKey(q, kv, 'sha256', crypto); console.log('k', k);
<|file_name|>hauling_v2.js<|end_file_name|><|fim▁begin|>$( document ).ready( function () { $( "#form" ).validate( { rules: { company: { required: true }, truckType: { required: true }, materialType: { required: true }, fromSite: { required: true }, toSite: { required: true }, hourIn: { required: true }, hourOut: { required: true }, payment: { required: true }, plate: { minlength: 3, maxlength:15 } }, errorElement: "em", errorPlacement: function ( error, element ) { // Add the `help-block` class to the error element error.addClass( "help-block" ); error.insertAfter( element ); }, highlight: function ( element, errorClass, validClass ) { $( element ).parents( ".col-sm-5" ).addClass( "has-error" ).removeClass( "has-success" ); }, unhighlight: function (element, errorClass, validClass) { $( element ).parents( ".col-sm-5" ).addClass( "has-success" ).removeClass( "has-error" ); }, submitHandler: function (form) { return true; } }); $("#btnClose").click(function(){ if(window.confirm('Are you sure you want to close this Hauling Report?')) { $.ajax({ type: "POST", url: base_url + "hauling/update_hauling_state", data: $("#form").serialize(), dataType: "json", contentType: "application/x-www-form-urlencoded;charset=UTF-8", cache: false, success: function(data){ if( data.result == "error" ) { //alert(data.mensaje); $("#div_cargando").css("display", "none"); $('#btnSubmit').removeAttr('disabled'); $("#span_msj").html(data.mensaje); $("#div_msj").css("display", "inline"); return false; } if( data.result )//true { $("#div_cargando").css("display", "none"); $("#div_guardado").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); var url = base_url + "hauling/add_hauling/" + data.idHauling; $(location).attr("href", url); } else { alert('Error. Reload the web page.'); $("#div_cargando").css("display", "none"); $("#div_error").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); } }, error: function(result) { alert('Error. Reload the web page.'); $("#div_cargando").css("display", "none"); $("#div_error").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); } }); } }); $("#btnSubmit").click(function(){ if ($("#form").valid() == true){ //Activa icono guardando $('#btnSubmit').attr('disabled','-1'); $("#div_guardado").css("display", "none"); $("#div_error").css("display", "none"); $("#div_msj").css("display", "none"); $("#div_cargando").css("display", "inline"); $.ajax({ type: "POST", url: base_url + "hauling/save_hauling", data: $("#form").serialize(), dataType: "json", contentType: "application/x-www-form-urlencoded;charset=UTF-8", cache: false, success: function(data){ if( data.result == "error" ) { //alert(data.mensaje); $("#div_cargando").css("display", "none"); $('#btnSubmit').removeAttr('disabled'); $("#div_error").css("display", "inline"); $("#span_msj").html(data.mensaje); return false; } if( data.result )//true { $("#div_cargando").css("display", "none"); $("#div_guardado").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); var url = base_url + "hauling/add_hauling/" + data.idHauling; $(location).attr("href", url); } else { alert('Error. Reload the web page.'); $("#div_cargando").css("display", "none"); $("#div_error").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); } }, error: function(result) { alert('Error. Reload the web page.'); $("#div_cargando").css("display", "none"); $("#div_error").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); } }); }//if }); <|fim▁hole|> if ($("#form").valid() == true){ //Activa icono guardando $('#btnSubmit').attr('disabled','-1'); $('#btnEmail').attr('disabled','-1'); $("#div_guardado").css("display", "none"); $("#div_error").css("display", "none"); $("#div_msj").css("display", "none"); $("#div_cargando").css("display", "inline"); $.ajax({ type: "POST", url: base_url + "hauling/save_hauling_and_send_email", data: $("#form").serialize(), dataType: "json", contentType: "application/x-www-form-urlencoded;charset=UTF-8", cache: false, success: function(data){ if( data.result == "error" ) { //alert(data.mensaje); $("#div_cargando").css("display", "none"); $('#btnSubmit').removeAttr('disabled'); $('#btnEmail').removeAttr('disabled'); $("#div_error").css("display", "inline"); $("#span_msj").html(data.mensaje); return false; } if( data.result )//true { $("#div_cargando").css("display", "none"); $("#div_guardado").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); $('#btnEmail').removeAttr('disabled'); var url = base_url + "hauling/add_hauling/" + data.idHauling; $(location).attr("href", url); } else { alert('Error. Reload the web page.'); $("#div_cargando").css("display", "none"); $("#div_error").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); $('#btnEmail').removeAttr('disabled'); } }, error: function(result) { alert('Error. Reload the web page.'); $("#div_cargando").css("display", "none"); $("#div_error").css("display", "inline"); $('#btnSubmit').removeAttr('disabled'); $('#btnEmail').removeAttr('disabled'); } }); }//if }); });<|fim▁end|>
$("#btnEmail").click(function(){
<|file_name|>chanclient.rs<|end_file_name|><|fim▁begin|>use std::sync::mpsc::{channel, Receiver, Sender};<|fim▁hole|>use routing::{NodeHandle, EffectMeta, EffectId}; /// Client that turns all messages into an enum variant /// and sends them accross a thread-safe channel. #[derive(Debug)] pub struct MpscClient { tx: Sender<ClientMessage>, } /// Message to send across a channel, which encodes all the client /// callback variants. #[derive(Debug)] pub enum ClientMessage { /// audio_rendered(buffer, idx, slot) call AudioRendered(Array2<f32>, u64), /// node_meta(handle, meta) call NodeMeta(NodeHandle, EffectMeta), /// node_id(handle, id) call NodeId(NodeHandle, EffectId), } impl MpscClient { pub fn new() -> (Self, Receiver<ClientMessage>) { let (tx, rx) = channel(); (Self{ tx }, rx) } fn send(&self, msg: ClientMessage) { trace!("Sending message to Client: {:?}", msg); if let Err(msg) = self.tx.send(msg) { warn!("Unable to send message to Client: {:?}", msg); } } } impl Client for MpscClient { fn audio_rendered(&mut self, buffer: Array2<f32>, idx: u64) { self.send(ClientMessage::AudioRendered(buffer, idx)); } fn node_meta(&mut self, handle: &NodeHandle, meta: &EffectMeta) { self.send(ClientMessage::NodeMeta(*handle, meta.clone())); } fn node_id(&mut self, handle: &NodeHandle, id: &EffectId) { self.send(ClientMessage::NodeId(*handle, id.clone())); } }<|fim▁end|>
use ndarray::Array2; use super::Client;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Service layer (domain model) of practice app<|fim▁hole|><|fim▁end|>
"""
<|file_name|>TransactionalFilesystemInterface.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2006-2008 Alfresco Software Limited. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * This program is distributed in the hope that it will be useful,<|fim▁hole|> * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * As a special exception to the terms and conditions of version 2.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * and Open Source Software ("FLOSS") applications as described in Alfresco's * FLOSS exception. You should have recieved a copy of the text describing * the FLOSS exception, and it is also available here: * http://www.alfresco.com/legal/licensing" */ package org.alfresco.jlan.server.filesys; import org.alfresco.jlan.server.SrvSession; /** * Transactional Filesystem Interface * * <p>Optional interface that a filesystem driver can implement to add support for transactions around filesystem calls. * * @author gkspencer */ public interface TransactionalFilesystemInterface { /** * Begin a read-only transaction * * @param sess SrvSession */ public void beginReadTransaction(SrvSession sess); /** * Begin a writeable transaction * * @param sess SrvSession */ public void beginWriteTransaction(SrvSession sess); /** * End an active transaction * * @param sess SrvSession * @param tx Object */ public void endTransaction(SrvSession sess, Object tx); }<|fim▁end|>
<|file_name|>saml.go<|end_file_name|><|fim▁begin|>// Package saml contains a partial implementation of the SAML standard in golang. // SAML is a standard for identity federation, i.e. either allowing a third party to authenticate your users or allowing third parties to rely on us to authenticate their users. // // In SAML parlance an Identity Provider (IDP) is a service that knows how to authenticate users. A Service Provider (SP) is a service that delegates authentication to an IDP. If you are building a service where users log in with someone else's credentials, then you are a Service Provider. This package supports implementing both service providers and identity providers. // // The core package contains the implementation of SAML. The package samlsp provides helper middleware suitable for use in Service Provider applications. The package samlidp provides a rudimentary IDP service that is useful for testing or as a starting point for other integrations. // // Getting Started as a Service Provider // // Let us assume we have a simple web appliation to protect. We'll modify this application so it uses SAML to authenticate users. // // package main // // import "net/http" // // func hello(w http.ResponseWriter, r *http.Request) { // fmt.Fprintf(w, "Hello, World!") // }) // // func main() { // app := http.HandlerFunc(hello) // http.Handle("/hello", app) // http.ListenAndServe(":8000", nil) // } // // Each service provider must have an self-signed X.509 key pair established. You can generate your own with something like this: // // openssl req -x509 -newkey rsa:2048 -keyout myservice.key -out myservice.cert -days 365 -nodes -subj "/CN=myservice.example.com"<|fim▁hole|>// We will use `samlsp.Middleware` to wrap the endpoint we want to protect. Middleware provides both an `http.Handler` to serve the SAML specific URLs and a set of wrappers to require the user to be logged in. We also provide the URL where the service provider can fetch the metadata from the IDP at startup. In our case, we'll use [testshib.org](testshib.org), an identity provider designed for testing. // // package main // // import ( // "fmt" // "io/ioutil" // "net/http" // // "github.com/Validic/saml/samlsp" // ) // // func hello(w http.ResponseWriter, r *http.Request) { // fmt.Fprintf(w, "Hello, %s!", r.Header.Get("X-Saml-Cn")) // } // // func main() { // key, _ := ioutil.ReadFile("myservice.key") // cert, _ := ioutil.ReadFile("myservice.cert") // samlSP, _ := samlsp.New(samlsp.Options{ // IDPMetadataURL: "https://www.testshib.org/metadata/testshib-providers.xml", // URL: "http://localhost:8000", // Key: string(key), // Certificate: string(cert), // }) // app := http.HandlerFunc(hello) // http.Handle("/hello", samlSP.RequireAccount(app)) // http.Handle("/saml/", samlSP) // http.ListenAndServe(":8000", nil) // } // // // Next we'll have to register our service provider with the identiy provider to establish trust from the service provider to the IDP. For [testshib.org](testshib.org), you can do something like: // // mdpath=saml-test-$USER-$HOST.xml // curl localhost:8000/saml/metadata > $mdpath // curl -i -F userfile=@$mdpath https://www.testshib.org/procupload.php // // Now you should be able to authenticate. The flow should look like this: // // 1. You browse to `localhost:8000/hello` // // 2. The middleware redirects you to `https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO` // // 3. testshib.org prompts you for a username and password. // // 4. testshib.org returns you an HTML document which contains an HTML form setup to POST to `localhost:8000/saml/acs`. The form is automatically submitted if you have javascript enabled. // // 5. The local service validates the response, issues a session cookie, and redirects you to the original URL, `localhost:8000/hello`. // // 6. This time when `localhost:8000/hello` is requested there is a valid session and so the main content is served. // // Getting Started as an Identity Provider // // Please see `examples/idp/` for a substantially complete example of how to use the library and helpers to be an identity provider. // // Support // // The SAML standard is huge and complex with many dark corners and strange, unused features. This package implements the most commonly used subset of these features required to provide a single sign on experience. The package supports at least the subset of SAML known as [interoperable SAML](http://saml2int.org). // // This package supports the Web SSO profile. Message flows from the service provider to the IDP are supported using the HTTP Redirect binding and the HTTP POST binding. Message flows fromthe IDP to the service provider are supported vai the HTTP POST binding. // // The package supports signed and encrypted SAML assertions. It does not support signed or encrypted requests. // // RelayState // // The *RelayState* parameter allows you to pass user state information across the authentication flow. The most common use for this is to allow a user to request a deep link into your site, be redirected through the SAML login flow, and upon successful completion, be directed to the originally requested link, rather than the root. // // Unfortunately, *RelayState* is less useful than it could be. Firstly, it is not authenticated, so anything you supply must be signed to avoid XSS or CSRF. Secondly, it is limited to 80 bytes in length, which precludes signing. (See section 3.6.3.1 of SAMLProfiles.) // // References // // The SAML specification is a collection of PDFs (sadly): // // - [SAMLCore](http://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf) defines data types. // // - [SAMLBindings](http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf) defines the details of the HTTP requests in play. // // - [SAMLProfiles](http://docs.oasis-open.org/security/saml/v2.0/saml-profiles-2.0-os.pdf) describes data flows. // // - [SAMLConformance](http://docs.oasis-open.org/security/saml/v2.0/saml-conformance-2.0-os.pdf) includes a support matrix for various parts of the protocol. // // [TestShib](http://www.testshib.org/) is a testing ground for SAML service and identity providers. package saml<|fim▁end|>
//
<|file_name|>user.go<|end_file_name|><|fim▁begin|>/* * Spreed WebRTC. * Copyright (C) 2013-2015 struktur AG * * This file is part of Spreed WebRTC. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package main import ( "log" "sort" "sync" ) type User struct { Id string sessionTable map[string]*Session mutex sync.RWMutex } func NewUser(id string) *User { user := &User{ Id: id, sessionTable: make(map[string]*Session), } return user } // AddSession adds a session to the session table and returns true if // s is the first session. func (u *User) AddSession(s *Session) bool { first := false u.mutex.Lock() u.sessionTable[s.Id] = s if len(u.sessionTable) == 1 { log.Println("First session registered for user", u.Id) first = true } u.mutex.Unlock() return first }<|fim▁hole|>func (u *User) RemoveSession(sessionID string) bool { last := false u.mutex.Lock() delete(u.sessionTable, sessionID) if len(u.sessionTable) == 0 { log.Println("Last session unregistered for user", u.Id) last = true } u.mutex.Unlock() return last } func (u *User) Data() *DataUser { u.mutex.RLock() defer u.mutex.RUnlock() return &DataUser{ Id: u.Id, Sessions: len(u.sessionTable), } } func (u *User) SubscribeSessions(from *Session) []*DataSession { sessions := make([]*DataSession, 0, len(u.sessionTable)) u.mutex.RLock() defer u.mutex.RUnlock() for _, session := range u.sessionTable { // TODO(longsleep): This does lots of locks - check if these can be streamlined. from.Subscribe(session) sessions = append(sessions, session.Data()) } sort.Sort(ByPrioAndStamp(sessions)) return sessions } type ByPrioAndStamp []*DataSession func (a ByPrioAndStamp) Len() int { return len(a) } func (a ByPrioAndStamp) Swap(i, j int) { a[i], a[j] = a[j], a[i] } func (a ByPrioAndStamp) Less(i, j int) bool { if a[i].Prio < a[j].Prio { return true } if a[i].Prio == a[j].Prio { return a[i].stamp < a[j].stamp } return false }<|fim▁end|>
// RemoveSession removes a session from the session table abd returns // true if no session is left left.
<|file_name|>linux.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright (c) 2014 Robert Clipsham <[email protected]> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(non_camel_case_types)] #![allow(non_snake_case)] extern crate libc; pub const SOL_PACKET: libc::c_int = 263; pub const PACKET_ADD_MEMBERSHIP: libc::c_int = 1; pub const PACKET_MR_PROMISC: libc::c_int = 1; // man 7 packet pub struct packet_mreq { pub mr_ifindex: libc::c_int, pub mr_type: libc::c_ushort, pub mr_alen: libc::c_ushort, pub mr_address: [libc::c_uchar; 8] }<|fim▁end|>
<|file_name|>FileParser.py<|end_file_name|><|fim▁begin|># This is a separate module for parser functions to be added. # This is being created as static, so only one parser exists for the whole game. from nota import Nota from timingpoint import TimingPoint from tools import * import random import math <|fim▁hole|> if len(Line) > 0: if Line.find('Title:', 0, len(Line)) != -1: title = Line.split(':', 1) return title[1].replace("\r", "") def get_PreviewTime (osufile): Splitlines = osufile.split('\n') for Line in Splitlines: if len(Line) > 0: if Line.find('PreviewTime:', 0, len(Line)) != -1: time = Line.split(':', 1) return int(time[1]) def get_Artist (osufile): Splitlines = osufile.split('\n') for Line in Splitlines: if len(Line) > 0: if Line.find('Artist:', 0, len(Line)) != -1: artist = Line.split(':', 1) return artist[1].replace("\r", "") def get_TimingPoints(osufile): Lines = osufile.split('\n') TimingPointString = [] TimingPoints = [] save = False; for line in Lines: if len(line) > 2: if save: TimingPointString.append(line) else: save = False if line.find("[TimingPoints]") != -1: save = True for point in TimingPointString: # self, offset, mpb, meter, sampleType, sampleSet, volume, inherited, kiai params = point.split(',') #print params offset = float(params[0]) mpb = float(params[1]) meter = int(params[2]) sampleType = int(params[3]) sampleSet = int(params[4]) volume = int(params[5]) inherited = int(params[6]) kiai = int(params[7]) newPoint = TimingPoint(offset, mpb, meter, sampleType, sampleSet, volume, inherited, kiai) TimingPoints.append(newPoint) return TimingPoints def get_NoteList (osufile, sprites, screen_width, screen_height, bpm): NoteList = [] SplitLines = [] #This function returns a list of notes with all their properties to the user #Make sure you have a list to receive it SplitLines = osufile.split('[HitObjects]\r\n', 1) SplitObjects = SplitLines[1].split('\n') for Line in SplitObjects: if len(Line) > 0: params = Line.split(',') posx = int(params[0]) posy = int(params[1]) time = int(params[2]) ntype = int(params[3]) IgnoreFirstLine = True if ntype == 1 or ntype == 5: nota = Nota(posx, posy, time, sprites[random.randint(0,3)], screen_width, screen_height, 1) NoteList.append(nota) elif ntype == 2 or ntype == 6: ## THE GOD LINE ## this.sliderTime = game.getBeatLength() * (hitObject.getPixelLength() / sliderMultiplier) / 100f; curva = params[5] repeat = int(params[6]) pixellength = float(params[7]) sliderEndTime = (bpm * (pixellength/1.4) / 100.0) curveParams = curva.split('|')[1:] xCoords = [] for i in curveParams: xCoords.append(int(i.split(':')[0])) #notai = Nota(posx, posy, time, spritinhotexture, screen_width, screen_height) #NoteList.append(notai) numSteps = (int)(math.ceil(sliderEndTime * 0.01)) #print(curveParams) SpriteValue = random.randint(0,3) for k in range(numSteps+1): t = float(k) / (numSteps) mnx = int(B(xCoords, 0, len(xCoords) - 1, t)) #print("time: " + str(time)) mny = time + (float(k)/float(numSteps)) * float(sliderEndTime) #print("mnx: " + str(mnx)) #print("t: " + str(t)) if t == 0 or t==1: notam = Nota(mnx, mny, mny, sprites[SpriteValue], screen_width, screen_height, 1) else: notam = Nota((random.randint(-11, 11)+mnx), mny, mny, sprites[4], screen_width, screen_height, 2) NoteList.append(notam) elif ntype == 8 or ntype == 12: endTime = int(params[5]) for i in range(20): notasp = Nota(random.randint(0, 512), posy, random.randint(time, endTime), sprites[5], screen_width, screen_height, 3) NoteList.append(notasp) return NoteList def get_BreakPeriods(osufile): Lines = osufile.split('\n') BreakPString = [] BreakPoints = [] save = False; for line in Lines: if line.find("//") == -1: if save: BreakPString.append(line) else: save = False if line.find("//Break Periods") != -1: save = True for splitted in BreakPString: params = splitted.split(",") StartBreakTime = int(params[1]) EndBreakTime = int(params[2]) BreakPoints.append((StartBreakTime, EndBreakTime)) #print(BreakPoints) return BreakPoints<|fim▁end|>
def get_Name (osufile): Splitlines = osufile.split('\n') for Line in Splitlines:
<|file_name|>Props.java<|end_file_name|><|fim▁begin|>package eu.monnetproject.util; import java.util.*; /** * Utility function to syntactically sugar properties for OSGi. * This allows you to create a property map as follows<|fim▁hole|> * <code>Props.prop("key1","value1")</code><br/> * <code> .prop("key2","value2")</code> */ public final class Props { public static PropsMap prop(String key, Object value) { PropsMap pm = new PropsMap(); pm.put(key,value); return pm; } public static class PropsMap extends Hashtable<String,Object> { public PropsMap prop(String key, Object value) { put(key,value); return this; } } }<|fim▁end|>
<|file_name|>triangle_making.py<|end_file_name|><|fim▁begin|>class TriangleMaking: def maxPerimeter(self, a, b, c): first = a second = b third = c sides = [first, second, third] for idx, side in enumerate(sides): one = (idx + 1) % 3 two = (idx + 2) % 3 total = sides[one] + sides[two] while sides[idx] >= total: sides[idx] -= 1 <|fim▁hole|><|fim▁end|>
return sum(sides)
<|file_name|>rtree.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*- # 使用 UTF-8 import sys reload(sys) sys.setdefaultencoding("utf-8") import sys from heap import Heap class Rectangle(object): """docstring for Rectangle""" def __init__(self, dimension, entry=None): super(Rectangle, self).__init__() self.dimension = dimension self.min_dim = [None for _ in xrange(dimension)] self.max_dim = [None for _ in xrange(dimension)] if entry is not None: for ipos in xrange(self.dimension): self.min_dim[ipos] = entry[ipos] self.max_dim[ipos] = entry[ipos] def resize(self, rects): """ 通过给定的 子节点Rectangle列表 重新计算当前 Rectangle 的 MBR(Minimal Boundary Rect) """ for ipos in xrange(self.dimension): self.min_dim[ipos] = min(map(lambda x: x.min_dim[ipos], rects)) self.max_dim[ipos] = max(map(lambda x: x.max_dim[ipos], rects)) def resize2(self, entry): """ 通过给定的 entry, 重新计算当前 Rectangle 的 MBR(Minimal Boundary Rect) entry 代表一条数据的所有维度 """ for ipos in xrange(self.dimension): if entry[ipos] < self.min_dim[ipos]: self.min_dim[ipos] = entry[ipos] elif entry[ipos] > self.max_dim[ipos]: self.max_dim[ipos] = entry[ipos] def expand_area(self, entry): new_area = 1.0 curr_area = 1.0 for ipos in xrange(self.dimension): max_value = self.max_dim[ipos] min_value = self.min_dim[ipos] try: curr_area *= (max_value - min_value) except TypeError as e: # 未完全初始化的 Rectangle return -1 if entry[ipos] > self.max_dim[ipos]: max_value = entry[ipos] elif entry[ipos] < self.min_dim[ipos]: min_value = entry[ipos] try: new_area *= (max_value - min_value) except TypeError as e: # 未完全初始化的 Rectangle return -1 return new_area - curr_area def overlap_area(self, rect): area = 1.0 for ipos in xrange(self.dimension): try: if self.max_dim[ipos] < rect.max_dim[ipos]: factor = self.max_dim[ipos] - rect.min_dim[ipos] else: factor = rect.max_dim[ipos] - self.min_dim[ipos] except TypeError as e: # 未完全初始化的 Rectangle return -1 if factor < 0: return 0.0 area *= factor return area def __contains__(self, rect): for ipos in xrange(self.dimension): if self.max_dim[ipos] < rect.min_dim[ipos]: return False if self.min_dim[ipos] > rect.max_dim[ipos]: return False return True def __str__(self): return "Min:{0}, Max:{1}".format( self.min_dim, self.max_dim) class RNode(object): def __init__(self, degree, dimension): super(RNode, self).__init__() self.num = 0 self.isleaf = True self.degree = degree self.dimension = dimension if dimension < 2: raise Exception("请使用 B/B+树 代替") if dimension > 6:<|fim▁hole|> print "WARNING:R树推荐维度为 [2,6]" self.mbr = Rectangle(self.dimension) self.threshold = degree*2 self.rects = [None for _ in xrange(self.threshold)] self.pnodes = [None for _ in xrange(self.threshold)] def adjust(self): self.mbr = Rectangle(self.dimension) self.mbr.resize(self.rects[:self.num]) def involve(self, entry): self.mbr.resize2(entry) def pointer(self): return self def most_overlap_pos(self, ipos): """ 从 self.pnodes 中找到与 self.pnodes[ipos] 重合度最大的点的位置 """ child = self.pnodes[ipos] ichild_pos, max_overlap, max_overlap_pos = 0, -1, 0 while ichild_pos < self.num: if ipos == ichild_pos: continue overlap = child.overlap_area(self.pnodes[ichild_pos].mbr) if max_overlap < overlap: max_overlap = overlap max_overlap_pos = ichild_pos ichild_pos += 1 return max_overlap_pos class DataNode(object): """docstring for DataNode""" def __init__(self, max_length=10): super(DataNode, self).__init__() self.num = 0 self.data = None self.max_length = max_length base, mode = divmod(self.max_length, 2) if mode > 0: base += 1 self.min_length = base self.mbr = Rectangle(self.dimension) class RTree(object): """docstring for RTree""" def __init__(self, degree, dimension): super(RTree, self).__init__() self.degree = degree self.dimension = dimension self.threshold = degree*2 self.root = self.allocate_namenode() def allocate_namenode(self): raise NotImplementedError() def deallocate_namenode(self, node): raise NotImplementedError() def allocate_datanode(self): raise NotImplementedError() def deallocate_datanode(self, node): raise NotImplementedError() def save_docs(self, metanode): raise NotImplementedError() def load_docs(self, metanode, ipos): raise NotImplementedError() def search(self, rect, node=None): if node is None: node = self.root indexes = [] ipos = node.num-1 while ipos >= 0: if rect in node.rects[ipos]: indexes.append(ipos) ipos -= 1 if len(indexes) == 0: return [] if node.isleaf is True: return map(lambda x: self.load_docs(node.pnodes[x]), indexes) results = [] for ipos in indexes: results.extend(self.search(rect, node.pnodes[ipos])) return results def split(self, parent, ipos, node): """ 由于 R树 中节点内部是无序的,为了减少移动数据的开销 分裂后的两个节点一个放在分裂前节点的位置,一个放在末尾 目前分裂的简单算法: 直接选取第一个点当作旧节点的核心rect 计算旧核心rect与其他rect的重合度 选取重合度最低的一个rect作为新节点的核心rect 计算新核心rect与其他rect的重合度 对比每个非核心rect与两个核心的重合度 选出与新核心重合度更高的 degree-1 个节点组成新节点 """ if parent.isleaf is False: new_node = self.allocate_namenode() new_node.isleaf = node.isleaf ancor = node.rects[0] heap = Heap(node.pnodes, reverse=True, key=lambda x: ancor.overlap_area(x.mbr)) ipos = 0 while ipos < node.degree: new_node.pnodes[ipos] = heap.pop() new_node.rects[ipos] = new_node.pnodes[ipos].mbr ipos += 1 new_node.num = node.degree new_node.adjust() ipos = 0 length = len(heap) while ipos < length: node.pnodes[ipos] = heap.heap[ipos] node.pnodes[ipos].adjust() node.rects[ipos] = heap.heap[ipos].mbr ipos += 1 node.num = length node.adjust() parent.pnodes[parent.num-1] = new_node.pointer() parent.rects[parent.num-1] = new_node.mbr parent.num += 1 return None new_node = node.split() parent.pnodes[parent.num-1] = new_node.pointer() parent.rects[parent.num-1] = new_node.mbr parent.num += 1 return None def insert(self, entry, doc): """ entry 是长度为 self.dimension 的数组 entry 中每一个维度都需要是数值型 """ if self.root.num != self.threshold: return self.insert_nonfull(self.root, entry, doc) old_root = self.root new_root = self.allocate_namenode() new_root.isleaf = False new_root.pnodes[0] = old_root.pointer() new_root.rects[0] = old_root.mbr new_root.num += 1 self.root = new_root self.split(new_root, 0, old_root) return self.insert_nonfull(new_root, entry, doc) def insert_nonfull(self, node, entry, doc): ipos = 0 min_expand = sys.maxint min_expand_pos = 0 while ipos < node.num: expand_area = node.pnodes[ipos].mbr.expand_area(entry) if min_expand > expand_area: min_expand = expand_area min_expand_pos = ipos ipos += 1 ipos = min_expand_pos node.involve(entry) if node.isleaf is True: datanode = node.pnodes[ipos] if datanode is None: datanode = self.allocate_datanode() node.pnodes[ipos] = datanode node.num += 1 # 此处不用连接 DataNode 的链表,因为此处仅在初始化时运行一次 if datanode.isfull() is True: self.split(node, ipos, datanode) if node.pnodes[ipos].mbr.expand_area(entry) < \ node.pnodes[ipos+1].mbr.expand_area(entry): ipos += 1 datanode = node.pnodes[ipos] datanode.insert(entry, doc) node.rects[ipos] = datanode.mbr return None child = node.pnodes[ipos] if child.num == self.threshold: self.split(node, ipos, child) if node.pnodes[ipos].mbr.expand_area(entry) < \ node.pnodes[ipos+1].mbr.expand_area(entry): child = node.pnodes[ipos+1] return self.insert_nonfull(child, entry, doc) def merge(self, node, ipos): """ 将当前节点 位置(ipos) 对应的孩子与其重合面积最大的兄弟合并 """ child = node.pnodes[ipos] # 在 node 中寻找与 child 重合面积最大的兄弟 max_overlap_pos = node.most_overlap_pos(ipos) mchild = node.pnodes[max_overlap_pos] if node.isleaf is True: child.merge(mchild) self.deallocate_datanode(mchild) else: impos = 0 while impos < mchild.num: child.rects[child.num+impos] = mchild.rects[impos] child.pnodes[child.num+impos] = mchild.pnodes[impos] impos += 1 child.num += mchild.num child.adjust() self.deallocate_namenode(mchild) node.rects[max_overlap_pos] = node.rects[node.num-1] node.pnodes[max_overlap_pos] = node.pnodes[node.num-1] node.num -= 1 # node 的 mbr 没有变化,不用调用 adjust() return ipos def guarantee(self, node, ipos): """ 确保 node.pnodes[ipos] 拥有至少 t 个孩子 注意: node 一定是非叶子节点 """ child = node.pnodes[ipos] if child.num > self.degree: return ipos # 在 node 中寻找与 child 重合面积最大的兄弟 ichild_pos, max_overlap, max_overlap_pos = 0, -1, -1 while ichild_pos < node.num: if ipos == ichild_pos: continue candidate = node.pnodes[ichild_pos] if candidate.num <= self.degree: continue overlap = child.overlap_area(candidate.mbr) if max_overlap < overlap: max_overlap = overlap max_overlap_pos = ichild_pos ichild_pos += 1 if max_overlap_pos > 0: mchild = node.pnodes[max_overlap_pos] # 在 mchild 中找到与 child 重合度最高的点, 将其合并到 child.pnodes 中 ichild_pos, max_overlap, max_overlap_pos = 0, -1, 0 while ichild_pos < mchild.num: overlap = child.overlap_area(mchild.pnodes[ichild_pos].mbr) if max_overlap < overlap: max_overlap = overlap max_overlap_pos = ichild_pos ichild_pos += 1 child.pnodes[child.num] = mchild.pnodes[max_overlap_pos] child.rects[child.num] = mchild.rects[max_overlap_pos] child.num += 1 child.adjust() impos = max_overlap_pos while impos < mchild.num-1: mchild.rects[impos] = mchild.rects[impos+1] mchild.pnodes[impos] = mchild.pnodes[impos+1] impos += 1 mchild.num -= 1 mchild.adjust() return ipos return self.merge(node, ipos) def remove_key(self, node, entry): ipos = 0 indexes = [] min_expand, min_expand_pos = sys.maxint, 0 while ipos < node.num: expand_area = node.pnodes[ipos].mbr.expand_area(entry) if expand_area == 0: indexes.append(ipos) ipos += 1 if len(indexes) == 0: return None if node.isleaf is False: icpos = self.guarantee(node, ipos) child = node.pnodes[icpos] self.remove_key(child, entry) # TODO if len(indexes) == 0: return [] if node.isleaf is True: return map(lambda x: self.load_docs(node.pnodes[x]), indexes) results = [] for ipos in indexes: results.extend(self.search(rect, node.pnodes[ipos])) return results<|fim▁end|>
<|file_name|>Achievement.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core'; import { NavParams, ViewController } from 'ionic-angular'; @Component({ selector: 'page-Achievement', templateUrl: 'Achievement.html' }) export class Achievement { <|fim▁hole|> } dismiss() { this.viewCtrl.dismiss(); } }<|fim▁end|>
readonly achievement:{title:string,description:string, recompense:number, imageUrl: string}; constructor(public navParams: NavParams, public viewCtrl: ViewController) { this.achievement = navParams.get("achievement");
<|file_name|>editSpecification_job_parameters.js<|end_file_name|><|fim▁begin|><!--<|fim▁hole|> this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <script type="text/javascript"> <!-- function s${SEQNUM}_addFieldMapping() { if (editjob.s${SEQNUM}_fieldmapping_source.value == "") { alert("$Encoder.bodyEscape($ResourceBundle.getString('SolrIngester.NoFieldNameSpecified'))"); editjob.s${SEQNUM}_fieldmapping_source.focus(); return; } editjob.s${SEQNUM}_fieldmapping_op.value="Add"; postFormSetAnchor("s${SEQNUM}_fieldmapping"); } function s${SEQNUM}_deleteFieldMapping(i) { // Set the operation eval("editjob.s${SEQNUM}_fieldmapping_op_"+i+".value=\"Delete\""); // Submit if (editjob.s${SEQNUM}_fieldmapping_count.value==i) postFormSetAnchor("s${SEQNUM}_fieldmapping"); else postFormSetAnchor("s${SEQNUM}_fieldmapping_"+i) // Undo, so we won't get two deletes next time eval("editjob.s${SEQNUM}_fieldmapping_op_"+i+".value=\"Continue\""); } //--> </script><|fim▁end|>
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with
<|file_name|>test-kube-client.js<|end_file_name|><|fim▁begin|>/* * This file is part of Cockpit. * * Copyright (C) 2015 Red Hat, Inc. * * Cockpit is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * Cockpit is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Cockpit; If not, see <http://www.gnu.org/licenses/>. */ import { FIXTURE_BASIC } from "./fixture-basic.js"; import { FIXTURE_LARGE } from "./fixture-large.js"; import QUnit from "qunit-tests"; var angular = require("angular"); require("./kube-client"); require("./kube-client-cockpit"); require("./kube-client-mock"); (function() { /* Filled in with a function */ var inject; var module = angular.module("kubeClient.tests", [ "kubeClient", "kubeClient.mock" ]) .config([ 'KubeWatchProvider', 'KubeRequestProvider', function(KubeWatchProvider, KubeRequestProvider) { KubeWatchProvider.KubeWatchFactory = "MockKubeWatch"; KubeRequestProvider.KubeRequestFactory = "MockKubeRequest"; } ]); function injectLoadFixtures(fixtures) { inject([ "kubeLoader", "MockKubeData", function(loader, data) { if (fixtures) data.load(fixtures); loader.reset(true); } ]); } QUnit.test("loader load", function (assert) { var done = assert.async(); assert.expect(7); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", function(loader) { var promise = loader.load("nodes"); assert.ok(!!promise, "promise returned"); assert.equal(typeof promise.then, "function", "promise has then"); assert.equal(typeof promise.catch, "function", "promise has catch"); assert.equal(typeof promise.finally, "function", "promise has finally"); return promise.then(function(items) { assert.ok(angular.isArray(items), "got items array"); assert.equal(items.length, 1, "one node"); assert.equal(items[0].metadata.name, "127.0.0.1", "localhost node"); done(); }); }]); }); QUnit.test("loader load encoding", function (assert) { var done = assert.async(); assert.expect(2); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeSelect", "$q", function(loader, select, $q) { assert.equal(select().kind("Encoded").length, 0); var defer = $q.defer(); var x = loader.listen(function() { assert.equal(select().kind("Image").length, 1); x.cancel(); defer.resolve(); done(); }); loader.handle([{ "apiVersion": "v1", "kind": "Image", "metadata": { "name": "encoded:one", "resourceVersion": 10000, "uid": "11768037-ab8a-11e4-9a7c-100001001", "namespace": "default", "selfLink": "/oapi/v1/images/encoded%3Aone", }, }, { "apiVersion": "v1", "kind": "Image", "metadata": { "name": "encoded:one", "resourceVersion": 10000, "uid": "11768037-ab8a-11e4-9a7c-100001001", "namespace": "default", }, }, { "apiVersion": "v1", "kind": "Image", "metadata": { "name": "encoded:one", "resourceVersion": 10000, "uid": "11768037-ab8a-11e4-9a7c-100001001", "namespace": "default", "selfLink": "/oapi/v1/images/encoded:one", }, }]); return defer.promise; }]); }); QUnit.test("loader load fail", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", function(loader) { var promise = loader.load("nonexistant"); return promise.then(function(data) { assert.ok(!true, "successfully loaded"); }, function(response) { assert.equal(response.code, 404, "not found"); assert.equal(response.message, "Not found here", "not found message"); assert.ok(true, "not sucessfully loaded"); done(); }); }]); }); QUnit.test("loader watch", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", function(loader) { return loader.watch("nodes").then(function(response) { assert.ok("/api/v1/nodes/127.0.0.1" in loader.objects, "found node"); var node = loader.objects["/api/v1/nodes/127.0.0.1"]; assert.equal(node.metadata.name, "127.0.0.1", "localhost node"); assert.equal(typeof node.spec.capacity, "object", "node has resources"); done(); }); }]); }); QUnit.test("list nodes", function (assert) { var done = assert.async(); assert.expect(6); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeSelect", function(loader, select) { return loader.watch("nodes").then(function() { var nodes = select().kind("Node"); assert.ok("/api/v1/nodes/127.0.0.1" in nodes, "found node"); var node = nodes["/api/v1/nodes/127.0.0.1"]; assert.equal(node.metadata.name, "127.0.0.1", "localhost node"); assert.equal(typeof node.spec.capacity, "object", "node has resources"); /* The same thing should be returned */ var nodes1 = select().kind("Node"); assert.strictEqual(nodes, nodes1, "same object returned"); /* Key should not be encoded as JSON */ var parsed = JSON.parse(JSON.stringify(node)); assert.ok(!("key" in parsed), "key should not be serialized"); assert.strictEqual(parsed.key, undefined, "key not be undefined after serialize"); done(); }); }]); }); QUnit.test("list pods", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeSelect", function(loader, select) { return loader.watch("pods").then(function() { var pods = select().kind("Pod"); assert.equal(pods.length, 3, "found pods"); var pod = pods["/api/v1/namespaces/default/pods/apache"]; assert.equal(typeof pod, "object", "found pod"); assert.equal(pod.metadata.labels.name, "apache", "pod has label"); done(); }); }]); }); QUnit.test("set namespace", function (assert) { var done = assert.async(); assert.expect(7); injectLoadFixtures(FIXTURE_BASIC); inject(["$q", "kubeLoader", "kubeSelect", function($q, loader, select) { return loader.watch("pods").then(function() { var pods = select().kind("Pod"); assert.equal(pods.length, 3, "number of pods"); assert.strictEqual(loader.limits.namespace, null, "namespace is null"); loader.limit({ namespace: "other" }); assert.strictEqual(loader.limits.namespace, "other", "namespace is other"); pods = select().kind("Pod"); assert.equal(pods.length, 1, "pods from namespace other"); assert.ok("/api/v1/namespaces/other/pods/apache" in pods, "other pod"); loader.limit({ namespace: null }); assert.strictEqual(loader.limits.namespace, null, "namespace is null again"); var defer = $q.defer(); var listened = false; var x = loader.listen(function() { if (listened) { pods = select().kind("Pod"); assert.equal(pods.length, 3, "all pods back"); x.cancel(); defer.resolve(); done(); } listened = true; }); return defer.promise; }); }]); }); QUnit.test("add pod", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["$q", "kubeLoader", "kubeSelect", "MockKubeData", function($q, loader, select, data) { return loader.watch("pods").then(function() { var pods = select().kind("Pod"); assert.equal(pods.length, 3, "number of pods"); assert.equal(pods["/api/v1/namespaces/default/pods/apache"].metadata.labels.name, "apache", "pod has label"); var defer = $q.defer(); var x = loader.listen(function() { var pods = select().kind("Pod"); if (pods.length === 4) { assert.equal(pods["/api/v1/namespaces/default/pods/aardvark"].metadata.labels.name, "aardvark", "new pod present in items"); x.cancel(); defer.resolve(); done(); } }); data.update("namespaces/default/pods/aardvark", { "kind": "Pod", "metadata": { "name": "aardvark", "uid": "22768037-ab8a-11e4-9a7c-080027300d85", "namespace": "default", "labels": { "name": "aardvark" }, }, "spec": { "volumes": null, "containers": [ ], "imagePullPolicy": "IfNotPresent" } }); return defer.promise; }); }]); }); QUnit.test("update pod", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["$q", "kubeLoader", "kubeSelect", "MockKubeData", function($q, loader, select, data) { return loader.watch("pods").then(function() { var pods = select().kind("Pod"); assert.equal(pods.length, 3, "number of pods"); assert.equal(pods["/api/v1/namespaces/default/pods/apache"].metadata.labels.name, "apache", "pod has label"); var defer = $q.defer(); var listened = false; var x = loader.listen(function() { var pods; if (listened) { pods = select().kind("Pod"); assert.equal(pods["/api/v1/namespaces/default/pods/apache"].metadata.labels.name, "apachepooo", "pod has changed"); x.cancel(); defer.resolve(); done(); } listened = true; }); data.update("namespaces/default/pods/apache", { "kind": "Pod", "metadata": { "name": "apache", "uid": "11768037-ab8a-11e4-9a7c-080027300d85", "namespace": "default", "labels": { "name": "apachepooo" }, } }); return defer.promise; }); }]); }); QUnit.test("remove pod", function (assert) { var done = assert.async(); assert.expect(5); injectLoadFixtures(FIXTURE_BASIC); inject(["$q", "kubeLoader", "kubeSelect", "MockKubeData", function($q, loader, select, data) { return loader.watch("pods").then(function() { var pods = select().kind("Pod"); assert.equal(pods.length, 3, "number of pods"); assert.equal(pods["/api/v1/namespaces/default/pods/apache"].metadata.labels.name, "apache", "pod has label"); var defer = $q.defer(); var listened = false; var x = loader.listen(function() { var pods; if (listened) { pods = select().kind("Pod"); assert.equal(pods.length, 2, "removed a pod"); assert.strictEqual(pods["/api/v1/namespaces/default/pods/apache"], undefined, "removed pod"); assert.equal(pods["/api/v1/namespaces/default/pods/database-1"].metadata.labels.name, "wordpressreplica", "other pod"); x.cancel(); defer.resolve(); done(); } listened = true; }); data.update("namespaces/default/pods/apache", null); return defer.promise; }); }]); }); QUnit.test("list services", function (assert) { var done = assert.async(); assert.expect(4); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeSelect", function(loader, select) { return loader.watch("services").then(function() { var services = select().kind("Service"); var x; var svc = null; for (x in services) { svc = services[x]; break; } assert.ok(!!svc, "got a service"); assert.equal(services.length, 2, "number of services"); assert.equal(svc.metadata.name, "kubernetes", "service id"); assert.equal(svc.spec.selector.component, "apiserver", "service has label"); done(); }); }]); }); var CREATE_ITEMS = [ { "kind": "Pod", "apiVersion": "v1", "metadata": { "name": "pod1", "uid": "d072fb85-f70e-11e4-b829-10c37bdb8410", "resourceVersion": "634203", "labels": { "name": "pod1" }, }, "spec": { "volumes": null, "containers": [{ "name": "database", "image": "mysql", "ports": [{ "containerPort": 3306, "protocol": "TCP" }], }], "nodeName": "127.0.0.1" } }, { "kind": "Node", "apiVersion": "v1", "metadata": { "name": "node1", "uid": "6e51438e-d161-11e4-acbc-10c37bdb8410", "resourceVersion": "634539", }, "spec": { "externalID": "172.2.3.1" } } ]; QUnit.test("create", function (assert) { var done = assert.async(); assert.expect(2); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { loader.watch("pods"); loader.watch("nodes"); loader.watch("namespaces"); return methods.create(CREATE_ITEMS, "namespace1").then(function() { assert.equal(loader.objects["/api/v1/namespaces/namespace1/pods/pod1"].metadata.name, "pod1", "pod object"); assert.equal(loader.objects["/api/v1/nodes/node1"].metadata.name, "node1", "node object"); done(); }); }]); }); QUnit.test("create namespace exists", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { loader.watch("pods"); loader.watch("nodes"); loader.watch("namespaces"); var NAMESPACE_ITEM = { "apiVersion" : "v1", "kind" : "Namespace", "metadata" : { "name": "namespace1" } }; return methods.create(NAMESPACE_ITEM).then(function() { assert.ok("/api/v1/namespaces/namespace1" in loader.objects, "namespace created"); return methods.create(CREATE_ITEMS, "namespace1").then(function() { assert.ok("/api/v1/namespaces/namespace1/pods/pod1" in loader.objects, "pod created"); assert.ok("/api/v1/nodes/node1" in loader.objects, "node created"); done(); }); }); }]); }); QUnit.test("create namespace default", function (assert) { var done = assert.async(); assert.expect(2);<|fim▁hole|> injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { loader.watch("pods"); loader.watch("nodes"); loader.watch("namespaces"); return methods.create(CREATE_ITEMS).then(function() { assert.equal(loader.objects["/api/v1/namespaces/default/pods/pod1"].metadata.name, "pod1", "pod created"); assert.equal(loader.objects["/api/v1/nodes/node1"].metadata.name, "node1", "node created"); done(); }); }]); }); QUnit.test("create object exists", function (assert) { var done = assert.async(); assert.expect(1); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { loader.watch("pods"); loader.watch("nodes"); loader.watch("namespaces"); var items = CREATE_ITEMS.slice(); items.push(items[0]); return methods.create(items).then(function(response) { assert.equal(response, false, "should have failed"); done(); }, function(response) { assert.equal(response.code, 409, "http already exists"); done(); }); }]); }); QUnit.test("delete pod", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { var watch = loader.watch("pods"); return methods.create(CREATE_ITEMS, "namespace2").then(function() { assert.ok("/api/v1/namespaces/namespace2/pods/pod1" in loader.objects, "pod created"); return methods.delete("/api/v1/namespaces/namespace2/pods/pod1").then(function() { assert.ok(true, "remove succeeded"); return watch.finally(function() { assert.ok(!("/api/v1/namespaces/namespace2/pods/pod1" in loader.objects), "pod was removed"); done(); }); }); }); }]); }); QUnit.test("patch pod", function (assert) { var done = assert.async(); assert.expect(4); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { var watch = loader.watch("pods"); var path = "/api/v1/namespaces/namespace2/pods/pod1"; return methods.create(CREATE_ITEMS, "namespace2").then(function() { assert.ok(path in loader.objects, "pod created"); return methods.patch(path, { "extra": "blah" }).then(function() { assert.ok(true, "patch succeeded"); return methods.patch(loader.objects[path], { "second": "test" }).then(function() { return watch.finally(function() { var pod = loader.objects[path]; assert.equal(pod.extra, "blah", "pod has changed"); assert.equal(pod.second, "test", "pod changed by own object"); done(); }); }); }); }); }]); }); QUnit.test("post", function (assert) { var done = assert.async(); assert.expect(1); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { return methods.post("/api/v1/namespaces/namespace1/pods", CREATE_ITEMS[0]).then(function(response) { assert.equal(response.metadata.name, "pod1", "pod object"); done(); }); }]); }); QUnit.test("post fail", function (assert) { var done = assert.async(); assert.expect(1); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { return methods.post("/api/v1/nodes", FIXTURE_BASIC["nodes/127.0.0.1"]).then(function() { assert.ok(false, "shouldn't succeed"); }, function(response) { assert.deepEqual(response, { "code": 409, "message": "Already exists" }, "got failure code"); done(); }); }]); }); QUnit.test("put", function (assert) { var done = assert.async(); assert.expect(1); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeMethods", function(loader, methods) { var node = { "kind": "Node", "metadata": { "name": "127.0.0.1", labels: { "test": "value" } } }; return methods.put("/api/v1/nodes/127.0.0.1", node).then(function(response) { assert.deepEqual(response.metadata.labels, { "test": "value" }, "put returned object"); done(); }); }]); }); QUnit.test("check resource ok", function (assert) { var done = assert.async(); assert.expect(0); injectLoadFixtures(null); inject(["kubeMethods", function(methods) { var data = { kind: "Blah", metadata: { name: "test" } }; done(); return methods.check(data); }]); }); QUnit.test("check resource name empty", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(null); inject(["kubeMethods", function(methods) { var data = { kind: "Blah", metadata: { name: "" } }; return methods.check(data).catch(function(ex) { assert.ok(angular.isArray(ex), "threw array of failures"); assert.equal(ex.length, 1, "number of errors"); assert.ok(ex[0] instanceof Error, "threw an error"); done(); }); }]); }); QUnit.test("check resource name missing", function (assert) { var done = assert.async(); assert.expect(1); injectLoadFixtures(null); inject(["kubeMethods", function(methods) { var data = { kind: "Blah", metadata: { } }; return methods.check(data).then(function() { assert.ok(true, "passed check"); done(); }, null); }]); }); QUnit.test("check resource name namespace bad", function (assert) { var done = assert.async(); assert.expect(6); injectLoadFixtures(null); inject(["kubeMethods", function(methods) { var data = { kind: "Blah", metadata: { name: "a#a", namespace: "" } }; var targets = { "metadata.name": "#name", "metadata.namespace": "#namespace" }; return methods.check(data, targets).catch(function(ex) { assert.ok(angular.isArray(ex), "threw array of failures"); assert.equal(ex.length, 2, "number of errors"); assert.ok(ex[0] instanceof Error, "threw an error"); assert.equal(ex[0].target, "#name", "correct name target"); assert.ok(ex[1] instanceof Error, "threw an error"); assert.equal(ex[1].target, "#namespace", "correct name target"); done(); }); }]); }); QUnit.test("check resource namespace bad", function (assert) { var done = assert.async(); assert.expect(4); injectLoadFixtures(null); inject(["kubeMethods", function(methods) { var data = { kind: "Blah", metadata: { name: "aa", namespace: "" } }; var targets = { "metadata.name": "#name", "metadata.namespace": "#namespace" }; return methods.check(data, targets).catch(function(ex) { assert.ok(angular.isArray(ex), "threw array of failures"); assert.equal(ex.length, 1, "number of errors"); assert.ok(ex[0] instanceof Error, "threw an error"); assert.equal(ex[0].target, "#namespace", "correct name target"); done(); }); }]); }); QUnit.test("lookup uid", function (assert) { var done = assert.async(); assert.expect(3); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeSelect", function(loader, select) { return loader.watch("pods").then(function() { /* Get the item */ var item = select().kind("Pod") .one(); var uid = item.metadata.uid; assert.ok(uid, "Have uid"); var by_uid_item = select().uid(uid) .one(); assert.strictEqual(item, by_uid_item, "load uid"); /* Shouldn't match */ item = select().uid("bad") .one(); assert.strictEqual(item, null, "mismatch uid"); done(); }); }]); }); QUnit.test("lookup host", function (assert) { var done = assert.async(); assert.expect(2); injectLoadFixtures(FIXTURE_BASIC); inject(["kubeLoader", "kubeSelect", function(loader, select) { return loader.watch("pods").then(function() { /* Get the item */ var item = select().host("127.0.0.1") .one(); assert.deepEqual(item.metadata.selfLink, "/api/v1/namespaces/default/pods/database-1", "correct pod"); /* Shouldn't match */ item = select().host("127.0.0.2") .one(); assert.strictEqual(item, null, "mismatch host"); done(); }); }]); }); QUnit.test("lookup", function (assert) { var done = assert.async(); assert.expect(6); injectLoadFixtures(FIXTURE_LARGE); inject(["kubeLoader", "kubeSelect", function(loader, select) { var expected = { "apiVersion": "v1", "kind": "ReplicationController", "metadata": { "labels": { "example": "mock", "name": "3controller" }, "name": "3controller", "resourceVersion": 10000, "uid": "11768037-ab8a-11e4-9a7c-100001001", "namespace": "default", "selfLink": "/api/v1/namespaces/default/replicationcontrollers/3controller", }, "spec": { "replicas": 1, "selector": { "factor3": "yes" } } }; return loader.watch("replicationcontrollers").then(function() { /* Get the item */ var item = select().kind("ReplicationController") .name("3controller") .namespace("default") .one(); assert.deepEqual(item, expected, "correct item"); /* The same item, without namespace */ item = select().kind("ReplicationController") .name("3controller") .one(); assert.deepEqual(item, expected, "selected without namespace"); /* Any replication controller */ item = select().kind("ReplicationController") .one(); assert.equal(item.kind, "ReplicationController", "any replication controller"); /* Shouldn't match */ item = select().kind("BadKind") .name("3controller") .namespace("default") .one(); assert.strictEqual(item, null, "mismatch kind"); item = select().kind("ReplicationController") .name("badcontroller") .namespace("default") .one(); assert.strictEqual(item, null, "mismatch name"); item = select().kind("ReplicationController") .name("3controller") .namespace("baddefault") .one(); assert.strictEqual(item, null, "mismatch namespace"); done(); }); }]); }); QUnit.test("select", function (assert) { var done = assert.async(); assert.expect(12); injectLoadFixtures(FIXTURE_LARGE); inject(["kubeLoader", "kubeSelect", function(loader, select) { return loader.watch("pods").then(function() { var image = { kind: "Image" }; /* same thing twice */ var first = select(image); var second = select(image); assert.strictEqual(first, second, "identical for single object"); /* null thing twice */ first = select(null); second = select(null); assert.strictEqual(first, second, "identical for null object"); /* Select everything odd, 500 pods */ var results = select().namespace("default") .label({ "type": "odd" }); assert.equal(results.length, 500, "correct amount"); /* The same thing should be returned */ var results1 = select().namespace("default") .label({ "type": "odd" }); assert.strictEqual(results, results1, "same object returned"); /* Select everything odd, but wrong namespace, no pods */ results = select().namespace("other") .label({ "type": "odd" }); assert.equal(results.length, 0, "other namespace no pods"); /* The same ones selected even when a second (present) label */ results = select().namespace("default") .label({ "type": "odd", "tag": "silly" }); assert.equal(results.length, 500, "with additional label"); /* Nothing selected when additional invalid field */ results = select().namespace("default") .label({ "type": "odd", "tag": "billy" }); assert.equal(results.length, 0, "no objects"); /* Limit by kind */ results = select().kind("Pod") .namespace("default") .label({ "type": "odd" }); assert.equal(results.length, 500, "by kind"); /* Limit by invalid kind */ results = select().kind("Ood") .namespace("default") .label({ "type": "odd" }); assert.equal(results.length, 0, "nothing for invalid kind"); /* Everything selected when no selector */ results = select().namespace("default"); assert.equal(results.length, 1000, "all pods"); /* Nothing selected when bad namespace */ results = select().namespace("bad"); assert.equal(results.length, 0, "bad namespace no objects"); /* Nothing selected when empty selector */ results = select().label({ }); assert.equal(results.length, 0, "nothing selected"); done(); }); }]); }); angular.module('exceptionOverride', []).factory('$exceptionHandler', function() { return function(exception, cause) { exception.message += ' (caused by "' + cause + '")'; throw exception; }; }); module.run([ '$injector', function($injector) { inject = function inject(func) { return $injector.invoke(func); }; QUnit.start(); } ]); angular.bootstrap(document, ['kubeClient.tests']); }());<|fim▁end|>
<|file_name|>fast_marker_cluster.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from folium.plugins.marker_cluster import MarkerCluster from folium.utilities import if_pandas_df_convert_to_numpy, validate_location from jinja2 import Template class FastMarkerCluster(MarkerCluster): """ Add marker clusters to a map using in-browser rendering. Using FastMarkerCluster it is possible to render 000's of points far quicker than the MarkerCluster class. Be aware that the FastMarkerCluster class passes an empty list to the parent class' __init__ method during initialisation. This means that the add_child method is never called, and no reference to any marker data are retained. Methods such as get_bounds() are therefore not available when using it.<|fim▁hole|> data: list of list with values List of list of shape [[lat, lon], [lat, lon], etc.] When you use a custom callback you could add more values after the lat and lon. E.g. [[lat, lon, 'red'], [lat, lon, 'blue']] callback: string, optional A string representation of a valid Javascript function that will be passed each row in data. See the FasterMarkerCluster for an example of a custom callback. name : string, optional The name of the Layer, as it will appear in LayerControls. overlay : bool, default True Adds the layer as an optional overlay (True) or the base layer (False). control : bool, default True Whether the Layer will be included in LayerControls. show: bool, default True Whether the layer will be shown on opening (only for overlays). icon_create_function : string, default None Override the default behaviour, making possible to customize markers colors and sizes. **kwargs Additional arguments are passed to Leaflet.markercluster options. See https://github.com/Leaflet/Leaflet.markercluster """ _template = Template(u""" {% macro script(this, kwargs) %} var {{ this.get_name() }} = (function(){ {{ this.callback }} var data = {{ this.data|tojson }}; var cluster = L.markerClusterGroup({{ this.options|tojson }}); {%- if this.icon_create_function is not none %} cluster.options.iconCreateFunction = {{ this.icon_create_function.strip() }}; {%- endif %} for (var i = 0; i < data.length; i++) { var row = data[i]; var marker = callback(row); marker.addTo(cluster); } cluster.addTo({{ this._parent.get_name() }}); return cluster; })(); {% endmacro %}""") def __init__(self, data, callback=None, options=None, name=None, overlay=True, control=True, show=True, icon_create_function=None, **kwargs): if options is not None: kwargs.update(options) # options argument is legacy super(FastMarkerCluster, self).__init__(name=name, overlay=overlay, control=control, show=show, icon_create_function=icon_create_function, **kwargs) self._name = 'FastMarkerCluster' data = if_pandas_df_convert_to_numpy(data) self.data = [[*validate_location(row[:2]), *row[2:]] # noqa: E999 for row in data] if callback is None: self.callback = """ var callback = function (row) { var icon = L.AwesomeMarkers.icon(); var marker = L.marker(new L.LatLng(row[0], row[1])); marker.setIcon(icon); return marker; };""" else: self.callback = 'var callback = {};'.format(callback)<|fim▁end|>
Parameters ----------
<|file_name|>test-trac-0218.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import logging if __name__ == '__main__': logging.basicConfig() _log = logging.getLogger(__name__) import pyxb.binding.generate import pyxb.utils.domutils from xml.dom import Node import os.path xst = '''<?xml version="1.0"?> <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"> <xs:element name="topLevel"> <xs:complexType> <xs:sequence> <xs:element name="item" type="xs:int" maxOccurs="unbounded"/> </xs:sequence> </xs:complexType> </xs:element> </xs:schema> ''' code = pyxb.binding.generate.GeneratePython(schema_text=xst) #print code rv = compile(code, 'test', 'exec') eval(rv) from pyxb.exceptions_ import *<|fim▁hole|> def testBasic (self): instance = topLevel() self.assertTrue(instance.item is not None) self.assertFalse(instance.item is None) self.assertTrue(instance.item != None) self.assertTrue(None != instance.item) self.assertFalse(instance.item) instance.item.extend([1,2,3,4]) self.assertTrue(instance.item is not None) self.assertFalse(instance.item is None) self.assertTrue(instance.item != None) self.assertTrue(None != instance.item) self.assertTrue(instance.item) if __name__ == '__main__': unittest.main()<|fim▁end|>
import unittest class TestTrac0218 (unittest.TestCase):
<|file_name|>alignment-gep-tup-like-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[feature(managed_boxes)]; struct Pair<A,B> { a: A, b: B } struct RecEnum<A>(Rec<A>); struct Rec<A> { val: A, rec: Option<@mut RecEnum<A>> } fn make_cycle<A:'static>(a: A) { let g: @mut RecEnum<A> = @mut RecEnum(Rec {val: a, rec: None}); g.rec = Some(g); } struct Invoker<A,B> { a: A, b: B, } <|fim▁hole|>impl<A:Clone,B:Clone> Invokable<A,B> for Invoker<A,B> { fn f(&self) -> (A, B) { (self.a.clone(), self.b.clone()) } } fn f<A:Send + Clone + 'static, B:Send + Clone + 'static>( a: A, b: B) -> @Invokable<A,B> { @Invoker { a: a, b: b, } as @Invokable<A,B> } pub fn main() { let x = 22_u8; let y = 44_u64; let z = f(~x, y); make_cycle(z); let (a, b) = z.f(); info!("a={} b={}", *a as uint, b as uint); assert_eq!(*a, x); assert_eq!(b, y); }<|fim▁end|>
trait Invokable<A,B> { fn f(&self) -> (A, B); }
<|file_name|>tensor_signature_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for learn.estimators.tensor_signature.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow.contrib.learn.python.learn.estimators import tensor_signature class TensorSignatureTest(tf.test.TestCase): def testTensorSignatureCompatible(self): placeholder_a = tf.placeholder(name='test', shape=[None, 100], dtype=tf.int32) placeholder_b = tf.placeholder(name='another', shape=[256, 100], dtype=tf.int32) placeholder_c = tf.placeholder(name='mismatch', shape=[256, 100], dtype=tf.float32) placeholder_d = tf.placeholder(name='mismatch', shape=[128, 100], dtype=tf.int32) signatures = tensor_signature.create_signatures(placeholder_a) self.assertTrue(tensor_signature.tensors_compatible(placeholder_a, signatures)) self.assertTrue(tensor_signature.tensors_compatible(placeholder_b, signatures)) self.assertFalse(tensor_signature.tensors_compatible(placeholder_c, signatures))<|fim▁hole|> inputs = {'a': placeholder_a} signatures = tensor_signature.create_signatures(inputs) self.assertTrue(tensor_signature.tensors_compatible(inputs, signatures)) self.assertFalse(tensor_signature.tensors_compatible(placeholder_a, signatures)) self.assertFalse(tensor_signature.tensors_compatible(placeholder_b, signatures)) self.assertFalse(tensor_signature.tensors_compatible( {'b': placeholder_b}, signatures)) self.assertTrue(tensor_signature.tensors_compatible( {'a': placeholder_b, 'c': placeholder_c}, signatures)) self.assertFalse(tensor_signature.tensors_compatible( {'a': placeholder_c}, signatures)) def testSparseTensorCompatible(self): t = tf.SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], shape=[3, 4]) signatures = tensor_signature.create_signatures(t) self.assertTrue(tensor_signature.tensors_compatible(t, signatures)) def testTensorSignaturePlaceholders(self): placeholder_a = tf.placeholder(name='test', shape=[None, 100], dtype=tf.int32) signatures = tensor_signature.create_signatures(placeholder_a) placeholder_out = tensor_signature.create_placeholders_from_signatures( signatures) self.assertEqual(placeholder_out.dtype, placeholder_a.dtype) self.assertTrue(placeholder_out.get_shape().is_compatible_with( placeholder_a.get_shape())) self.assertTrue(tensor_signature.tensors_compatible(placeholder_out, signatures)) inputs = {'a': placeholder_a} signatures = tensor_signature.create_signatures(inputs) placeholders_out = tensor_signature.create_placeholders_from_signatures( signatures) self.assertEqual(placeholders_out['a'].dtype, placeholder_a.dtype) self.assertTrue( placeholders_out['a'].get_shape().is_compatible_with( placeholder_a.get_shape())) self.assertTrue(tensor_signature.tensors_compatible(placeholders_out, signatures)) def testSparseTensorSignaturePlaceholders(self): tensor = tf.SparseTensor(values=[1.0, 2.0], indices=[[0, 2], [0, 3]], shape=[5, 5]) signature = tensor_signature.create_signatures(tensor) placeholder = tensor_signature.create_placeholders_from_signatures( signature) self.assertTrue(isinstance(placeholder, tf.SparseTensor)) self.assertEqual(placeholder.values.dtype, tensor.values.dtype) if __name__ == '__main__': tf.test.main()<|fim▁end|>
self.assertTrue(tensor_signature.tensors_compatible(placeholder_d, signatures))
<|file_name|>content_panel.js<|end_file_name|><|fim▁begin|>var $$ = React.createElement; var Substance = require("substance"); var Scrollbar = require("./scrollbar"); var _ = require("substance/helpers"); var PanelMixin = require("./panel_mixin"); var ContentPanelMixin = _.extend({}, PanelMixin, { contextTypes: { app: React.PropTypes.object.isRequired, componentFactory: React.PropTypes.object.isRequired, }, // Since component gets rendered multiple times we need to update // the scrollbar and reattach the scroll event componentDidMount: function() { var app = this.context.app; this.updateScrollbar(); $(window).on('resize', this.updateScrollbar); var doc = app.doc; doc.connect(this, { 'document:changed': this.onDocumentChange }); }, componentWillUnmount: function() { var app = this.context.app var doc = app.doc; doc.disconnect(this); $(window).off('resize'); }, onDocumentChange: function() { setTimeout(function() { this.updateScrollbar(); }.bind(this), 0); }, componentDidUpdate: function() { this.updateScrollbar(); }, updateScrollbar: function() { var scrollbar = this.refs.scrollbar; var panelContentEl = this.refs.panelContent.getDOMNode(); // We need to await next repaint, otherwise dimensions will be wrong Substance.delay(function() { scrollbar.update(panelContentEl); },0); // (Re)-Bind scroll event on new panelContentEl $(panelContentEl).off('scroll'); $(panelContentEl).on('scroll', this._onScroll); }, _onScroll: function(e) { var panelContentEl = this.refs.panelContent.getDOMNode(); this.refs.scrollbar.update(panelContentEl); }, // Rendering // ----------------- getContentEditor: function() { var app = this.context.app; var doc = app.doc; var componentFactory = this.context.componentFactory; var ContainerClass = componentFactory.get("container"); return $$(ContainerClass, { doc: doc, node: doc.get("content"), ref: "contentEditor" });<|fim▁hole|> return $$("div", {className: "panel content-panel-component"}, // usually absolutely positioned $$(Scrollbar, { id: "content-scrollbar", contextId: app.state.contextId, highlights: app.getHighlightedNodes.bind(app), ref: "scrollbar" }), $$('div', {className: "panel-content", ref: "panelContent"}, // requires absolute positioning, overflow=auto this.getContentEditor() ) ); } }); var ContentPanel = React.createClass({ mixins: [ContentPanelMixin], displayName: "ContentPanel", }); module.exports = ContentPanel;<|fim▁end|>
}, render: function() { var app = this.context.app;
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from rest_framework import viewsets from rest_framework.permissions import IsAuthenticated from .models import Submission from .serializers import SubmissionSerializer from django.views.generic import ListView, DetailView from django.views.generic.edit import CreateView from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from problem.models import Problem from django.shortcuts import get_object_or_404 from .forms import SubmissionForm from django_tables2 import RequestConfig from .tables import SubmissionTable # from guardian.shortcuts import get_objects_for_user class SubmissionViewSet(viewsets.ModelViewSet): queryset = Submission.objects.all() serializer_class = SubmissionSerializer permission_classes = (IsAuthenticated,) class SubmissionListView(ListView): model = Submission <|fim▁hole|> context = super(SubmissionListView, self).get_context_data(**kwargs) submissions_table = SubmissionTable(self.get_queryset()) RequestConfig(self.request).configure(submissions_table) # add filter here context['submissions_table'] = submissions_table return context class SubmissionDetailView(DetailView): model = Submission def get_context_data(self, **kwargs): context = super(SubmissionDetailView, self).get_context_data(**kwargs) return context class SubmissionCreateView(CreateView): model = Submission form_class = SubmissionForm template_name_suffix = '_create_form' @method_decorator(login_required) def dispatch(self, request, pid=None, *args, **kwargs): pid = self.kwargs['pid'] self.problem = get_object_or_404(Problem.objects.all(), pk=pid) return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs) def get_form_kwargs(self): kw = super(SubmissionCreateView, self).get_form_kwargs() kw['qs'] = self.problem.allowed_lang.all() return kw def get_context_data(self, **kwargs): context = super(SubmissionCreateView, self).get_context_data(**kwargs) context['problem'] = self.problem return context def form_valid(self, form): self.object = form.save(commit=False) self.object.problem = self.problem self.object.user = self.request.user return super(SubmissionCreateView, self).form_valid(form)<|fim▁end|>
def get_context_data(self, **kwargs):
<|file_name|>setup.js<|end_file_name|><|fim▁begin|>require('babel-core/register') const path = require('path') const jsdom = require('jsdom').jsdom const exposedProperties = ['window', 'navigator', 'document'] global.document = jsdom('') global.window = document.defaultView Object.keys(document.defaultView).forEach((property) => { if (typeof global[property] === 'undefined') { exposedProperties.push(property)<|fim▁hole|> global[property] = document.defaultView[property] } }) global.navigator = { userAgent: 'node.js' } global.__base = `${path.resolve()}/`<|fim▁end|>
<|file_name|>ajaxwrapper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from __future__ import print_function import os import cgi from subprocess import Popen, PIPE, STDOUT # Java SCRIPTDIR = 'javaprolog' # SCRIPT = ['/usr/bin/java', '-cp', 'json-simple-1.1.1.jar:gnuprologjava-0.2.6.jar:.', 'Shrdlite'] import platform if platform.system()=='Windows': SCRIPT = ['java', '-cp', 'json-simple-1.1.1.jar;gnuprologjava-0.2.6.jar;.', 'Shrdlite'] else: SCRIPT = ['java', '-cp', 'json-simple-1.1.1.jar:gnuprologjava-0.2.6.jar:.', 'Shrdlite'] # # SWI Prolog # SCRIPTDIR = 'javaprolog' # SCRIPT = ['/usr/local/bin/swipl', '-q', '-g', 'main,halt', '-t', 'halt(1)', '-s', 'shrdlite.pl'] # # Haskell # SCRIPTDIR = 'haskell' # SCRIPT = ['/usr/bin/runhaskell', 'Shrdlite.hs'] # Python # SCRIPTDIR = 'python' # SCRIPT = ['/usr/bin/python', 'shrdlite.py'] while not os.path.isdir(SCRIPTDIR): SCRIPTDIR = os.path.join("..", SCRIPTDIR) print('Content-type:text/plain') print() try: form = cgi.FieldStorage() data = form.getfirst('data') script = Popen(SCRIPT, cwd=SCRIPTDIR, stdin=PIPE, stdout=PIPE, stderr=PIPE) out, err = script.communicate(data) print(out)<|fim▁hole|>except: import sys, traceback print(traceback.format_exc()) sys.exit(1)<|fim▁end|>
if err: raise Exception(err)
<|file_name|>character.rs<|end_file_name|><|fim▁begin|>use datatypes::{Coords, Region}; use terminal::{CharData, CellData, UseStyles}; use terminal::interfaces::{WriteableGrid, WriteableCell}; impl CharData for char { fn write<T>(&self, coords: Coords, styles: UseStyles, grid: &mut T) -> Coords where T: WriteableGrid, T::Cell: WriteableCell { if let Some(cell) = grid.writeable(coords) { cell.write(CellData::Char(*self), styles); } coords }<|fim▁hole|> #[cfg(any(debug_assertions, test))] fn repr(&self) -> String { self.to_string() } } pub struct WideChar(pub char, pub u32); impl WideChar { pub fn new(ch: char, width: u32) -> WideChar { WideChar(ch, width) } } impl CharData for WideChar { fn write<T>(&self, coords: Coords, styles: UseStyles, grid: &mut T) -> Coords where T: WriteableGrid, T::Cell: WriteableCell { let coords = grid.best_fit_for_region(Region::new(coords.x, coords.y, coords.x + self.1, coords.y + 1)); if let Some(cell) = grid.writeable(coords) { cell.write(CellData::Char(self.0), styles); } for extension_coords in (1..self.1).map(|i| Coords { x: coords.x + i, ..coords }) { if let Some(cell) = grid.writeable(extension_coords) { cell.write(CellData::Extension(coords), styles) } } Coords { x: coords.x + self.1 - 1, y: coords.y } } #[cfg(any(debug_assertions, test))] fn repr(&self) -> String { self.0.to_string() } } pub struct CharExtender(pub char); impl CharExtender { pub fn new(ch: char) -> CharExtender { CharExtender(ch) } } impl CharData for CharExtender { fn write<T>(&self, coords: Coords, styles: UseStyles, grid: &mut T) -> Coords where T: WriteableGrid, T::Cell: WriteableCell { match grid.find_cell_to_extend(coords) { Some(coords) => { if let Some(cell) = grid.writeable(coords) { cell.extend(self.0, styles); } coords } None => { if let Some(cell) = grid.writeable(coords) { cell.write(CellData::Char(self.0), styles); } coords } } } #[cfg(any(debug_assertions, test))] fn repr(&self) -> String { self.0.to_string() } }<|fim▁end|>
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>//! Customize line editor use std::default::Default; /// User preferences #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Config { /// Maximum number of entries in History. max_history_size: usize, // history_max_entries history_duplicates: HistoryDuplicates, history_ignore_space: bool, completion_type: CompletionType, /// When listing completion alternatives, only display /// one screen of possibilities at a time. completion_prompt_limit: usize, /// Duration (milliseconds) Rustyline will wait for a character when /// reading an ambiguous key sequence. keyseq_timeout: i32, /// Emacs or Vi mode edit_mode: EditMode, /// If true, each nonblank line returned by `readline` will be /// automatically added to the history. auto_add_history: bool, /// if colors should be enabled. color_mode: ColorMode, } impl Config { pub fn builder() -> Builder { Builder::new() } /// Tell the maximum length (i.e. number of entries) for the history. pub fn max_history_size(&self) -> usize { self.max_history_size } pub(crate) fn set_max_history_size(&mut self, max_size: usize) { self.max_history_size = max_size; } /// Tell if lines which match the previous history entry are saved or not /// in the history list. /// /// By default, they are ignored. pub fn history_duplicates(&self) -> HistoryDuplicates { self.history_duplicates } pub(crate) fn set_history_ignore_dups(&mut self, yes: bool) { self.history_duplicates = if yes { HistoryDuplicates::IgnoreConsecutive } else { HistoryDuplicates::AlwaysAdd }; } /// Tell if lines which begin with a space character are saved or not in /// the history list. /// /// By default, they are saved. pub fn history_ignore_space(&self) -> bool { self.history_ignore_space } pub(crate) fn set_history_ignore_space(&mut self, yes: bool) { self.history_ignore_space = yes; } pub fn completion_type(&self) -> CompletionType { self.completion_type } pub fn completion_prompt_limit(&self) -> usize { self.completion_prompt_limit } pub fn keyseq_timeout(&self) -> i32 { self.keyseq_timeout } pub fn edit_mode(&self) -> EditMode { self.edit_mode } /// Tell if lines are automatically added to the history. /// /// By default, they are not. pub fn auto_add_history(&self) -> bool { self.auto_add_history } /// Tell if colors should be enabled. /// /// By default, they are except if stdout is not a tty. pub fn color_mode(&self) -> ColorMode { self.color_mode } pub(crate) fn set_color_mode(&mut self, color_mode: ColorMode) { self.color_mode = color_mode; } } impl Default for Config { fn default() -> Config { Config { max_history_size: 100, history_duplicates: HistoryDuplicates::IgnoreConsecutive, history_ignore_space: false, completion_type: CompletionType::Circular, // TODO Validate completion_prompt_limit: 100, keyseq_timeout: -1, edit_mode: EditMode::Emacs, auto_add_history: false, color_mode: ColorMode::Enabled, } } } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum HistoryDuplicates { AlwaysAdd, /// a line will not be added to the history if it matches the previous entry IgnoreConsecutive, } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum CompletionType { /// Complete the next full match (like in Vim by default) Circular, /// Complete till longest match. /// When more than one match, list all matches /// (like in Bash/Readline). List, } /// Style of editing / Standard keymaps #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EditMode { Emacs, Vi, } /// Colorization mode #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ColorMode { Enabled, Forced, Disabled, } /// Configuration builder #[derive(Debug, Default)] pub struct Builder { p: Config, } impl Builder { pub fn new() -> Builder { Builder { p: Config::default(), } } /// Set the maximum length for the history. pub fn max_history_size(mut self, max_size: usize) -> Builder { self.set_max_history_size(max_size); self } /// Tell if lines which match the previous history entry are saved or not /// in the history list. /// /// By default, they are ignored. pub fn history_ignore_dups(mut self, yes: bool) -> Builder { self.set_history_ignore_dups(yes); self } /// Tell if lines which begin with a space character are saved or not in /// the history list. /// /// By default, they are saved. pub fn history_ignore_space(mut self, yes: bool) -> Builder { self.set_history_ignore_space(yes); self } /// Set `completion_type`. pub fn completion_type(mut self, completion_type: CompletionType) -> Builder { self.set_completion_type(completion_type); self } /// The number of possible completions that determines when the user is /// asked whether the list of possibilities should be displayed. pub fn completion_prompt_limit(mut self, completion_prompt_limit: usize) -> Builder { self.set_completion_prompt_limit(completion_prompt_limit); self } /// Timeout for ambiguous key sequences in milliseconds. /// Currently, it is used only to distinguish a single ESC from an ESC /// sequence. /// After seeing an ESC key, wait at most `keyseq_timeout_ms` for another /// byte. pub fn keyseq_timeout(mut self, keyseq_timeout_ms: i32) -> Builder { self.set_keyseq_timeout(keyseq_timeout_ms); self } /// Choose between Emacs or Vi mode. pub fn edit_mode(mut self, edit_mode: EditMode) -> Builder { self.set_edit_mode(edit_mode); self } /// Tell if lines are automatically added to the history.<|fim▁hole|> /// /// By default, they are not. pub fn auto_add_history(mut self, yes: bool) -> Builder { self.set_auto_add_history(yes); self } /// Forces colorization on or off. /// /// By default, colorization is on except if stdout is not a tty. pub fn color_mode(mut self, color_mode: ColorMode) -> Builder { self.set_color_mode(color_mode); self } pub fn build(self) -> Config { self.p } } impl Configurer for Builder { fn config_mut(&mut self) -> &mut Config { &mut self.p } } pub trait Configurer { fn config_mut(&mut self) -> &mut Config; /// Set the maximum length for the history. fn set_max_history_size(&mut self, max_size: usize) { self.config_mut().set_max_history_size(max_size); } /// Tell if lines which match the previous history entry are saved or not /// in the history list. /// /// By default, they are ignored. fn set_history_ignore_dups(&mut self, yes: bool) { self.config_mut().set_history_ignore_dups(yes); } /// Tell if lines which begin with a space character are saved or not in /// the history list. /// /// By default, they are saved. fn set_history_ignore_space(&mut self, yes: bool) { self.config_mut().set_history_ignore_space(yes); } /// Set `completion_type`. fn set_completion_type(&mut self, completion_type: CompletionType) { self.config_mut().completion_type = completion_type; } /// The number of possible completions that determines when the user is /// asked whether the list of possibilities should be displayed. fn set_completion_prompt_limit(&mut self, completion_prompt_limit: usize) { self.config_mut().completion_prompt_limit = completion_prompt_limit; } /// Timeout for ambiguous key sequences in milliseconds. fn set_keyseq_timeout(&mut self, keyseq_timeout_ms: i32) { self.config_mut().keyseq_timeout = keyseq_timeout_ms; } /// Choose between Emacs or Vi mode. fn set_edit_mode(&mut self, edit_mode: EditMode) { self.config_mut().edit_mode = edit_mode; match edit_mode { EditMode::Emacs => self.set_keyseq_timeout(-1), // no timeout EditMode::Vi => self.set_keyseq_timeout(500), } } /// Tell if lines are automatically added to the history. /// /// By default, they are not. fn set_auto_add_history(&mut self, yes: bool) { self.config_mut().auto_add_history = yes; } /// Forces colorization on or off. /// /// By default, colorization is on except if stdout is not a tty. fn set_color_mode(&mut self, color_mode: ColorMode) { self.config_mut().set_color_mode(color_mode); } }<|fim▁end|>
<|file_name|>let-destruct-ref.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|> pub fn main() { let x = ~"hello"; let ref y = x; assert_eq!(x.slice(0, x.len()), y.slice(0, y.len())); }<|fim▁end|>
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.
<|file_name|>drivercommon.cpp<|end_file_name|><|fim▁begin|>#include "drivercommon.h" #include <common/fortconf.h> #include <common/fortioctl.h> #include <common/fortlog.h> #include <common/fortprov.h> namespace DriverCommon { QString deviceName() { return QLatin1String(FORT_DEVICE_NAME); } quint32 ioctlValidate() { return FORT_IOCTL_VALIDATE; } quint32 ioctlSetConf() { return FORT_IOCTL_SETCONF; } quint32 ioctlSetFlags() { return FORT_IOCTL_SETFLAGS; } quint32 ioctlGetLog() { return FORT_IOCTL_GETLOG; } quint32 ioctlAddApp() { return FORT_IOCTL_ADDAPP; } quint32 ioctlDelApp() { return FORT_IOCTL_DELAPP; } quint32 ioctlSetZones() { return FORT_IOCTL_SETZONES; } quint32 ioctlSetZoneFlag() { return FORT_IOCTL_SETZONEFLAG; } quint32 userErrorCode() { return FORT_ERROR_USER_ERROR; } qint64 systemToUnixTime(qint64 systemTime) { return fort_system_to_unix_time(systemTime); } int bufferSize() { return FORT_BUFFER_SIZE; } quint32 confIoConfOff() { return FORT_CONF_IO_CONF_OFF; } quint32 logBlockedHeaderSize() { return FORT_LOG_BLOCKED_HEADER_SIZE; } quint32 logBlockedSize(quint32 pathLen) { return FORT_LOG_BLOCKED_SIZE(pathLen); } quint32 logBlockedIpHeaderSize() { return FORT_LOG_BLOCKED_IP_HEADER_SIZE; } quint32 logBlockedIpSize(quint32 pathLen) { return FORT_LOG_BLOCKED_IP_SIZE(pathLen); } quint32 logProcNewHeaderSize() { return FORT_LOG_PROC_NEW_HEADER_SIZE; } quint32 logProcNewSize(quint32 pathLen) { return FORT_LOG_PROC_NEW_SIZE(pathLen); } quint32 logStatHeaderSize() { return FORT_LOG_STAT_HEADER_SIZE; }<|fim▁hole|> return FORT_LOG_STAT_TRAF_SIZE(procCount); } quint32 logStatSize(quint16 procCount) { return FORT_LOG_STAT_SIZE(procCount); } quint32 logTimeSize() { return FORT_LOG_TIME_SIZE; } quint8 logType(const char *input) { return fort_log_type(input); } void logBlockedHeaderWrite(char *output, bool blocked, quint32 pid, quint32 pathLen) { fort_log_blocked_header_write(output, blocked, pid, pathLen); } void logBlockedHeaderRead(const char *input, int *blocked, quint32 *pid, quint32 *pathLen) { fort_log_blocked_header_read(input, blocked, pid, pathLen); } void logBlockedIpHeaderWrite(char *output, int inbound, int inherited, quint8 blockReason, quint8 ipProto, quint16 localPort, quint16 remotePort, quint32 localIp, quint32 remoteIp, quint32 pid, quint32 pathLen) { fort_log_blocked_ip_header_write(output, inbound, inherited, blockReason, ipProto, localPort, remotePort, localIp, remoteIp, pid, pathLen); } void logBlockedIpHeaderRead(const char *input, int *inbound, int *inherited, quint8 *blockReason, quint8 *ipProto, quint16 *localPort, quint16 *remotePort, quint32 *localIp, quint32 *remoteIp, quint32 *pid, quint32 *pathLen) { fort_log_blocked_ip_header_read(input, inbound, inherited, blockReason, ipProto, localPort, remotePort, localIp, remoteIp, pid, pathLen); } void logProcNewHeaderWrite(char *output, quint32 pid, quint32 pathLen) { fort_log_proc_new_header_write(output, pid, pathLen); } void logProcNewHeaderRead(const char *input, quint32 *pid, quint32 *pathLen) { fort_log_proc_new_header_read(input, pid, pathLen); } void logStatTrafHeaderRead(const char *input, quint16 *procCount) { fort_log_stat_traf_header_read(input, procCount); } void logTimeWrite(char *output, int timeChanged, qint64 unixTime) { fort_log_time_write(output, timeChanged, unixTime); } void logTimeRead(const char *input, int *timeChanged, qint64 *unixTime) { fort_log_time_read(input, timeChanged, unixTime); } void confAppPermsMaskInit(void *drvConf) { PFORT_CONF conf = (PFORT_CONF) drvConf; fort_conf_app_perms_mask_init(conf, conf->flags.group_bits); } bool confIpInRange(const void *drvConf, quint32 ip, bool included, int addrGroupIndex) { const PFORT_CONF conf = (const PFORT_CONF) drvConf; const PFORT_CONF_ADDR_GROUP addr_group = fort_conf_addr_group_ref(conf, addrGroupIndex); const bool is_empty = included ? addr_group->include_is_empty : addr_group->exclude_is_empty; if (is_empty) return false; const PFORT_CONF_ADDR_LIST addr_list = included ? fort_conf_addr_group_include_list_ref(addr_group) : fort_conf_addr_group_exclude_list_ref(addr_group); return fort_conf_ip_inlist(ip, addr_list); } quint16 confAppFind(const void *drvConf, const QString &kernelPath) { const PFORT_CONF conf = (const PFORT_CONF) drvConf; const QString kernelPathLower = kernelPath.toLower(); const quint32 len = quint32(kernelPathLower.size()) * sizeof(WCHAR); const WCHAR *p = (PCWCHAR) kernelPathLower.utf16(); const FORT_APP_FLAGS app_flags = fort_conf_app_find(conf, (const PVOID) p, len, fort_conf_app_exe_find); return app_flags.v; } quint8 confAppGroupIndex(quint16 appFlags) { const FORT_APP_FLAGS app_flags = { appFlags }; return app_flags.group_index; } bool confAppBlocked(const void *drvConf, quint16 appFlags, qint8 *blockReason) { const PFORT_CONF conf = (const PFORT_CONF) drvConf; return fort_conf_app_blocked(conf, { appFlags }, blockReason); } quint16 confAppPeriodBits(const void *drvConf, quint8 hour, quint8 minute) { const PFORT_CONF conf = (const PFORT_CONF) drvConf; FORT_TIME time; time.hour = hour; time.minute = minute; return fort_conf_app_period_bits(conf, time, nullptr); } bool isTimeInPeriod(quint8 hour, quint8 minute, quint8 fromHour, quint8 fromMinute, quint8 toHour, quint8 toMinute) { FORT_TIME time; time.hour = hour; time.minute = minute; FORT_PERIOD period; period.from.hour = fromHour; period.from.minute = fromMinute; period.to.hour = toHour; period.to.minute = toMinute; return is_time_in_period(time, period); } int bitScanForward(quint32 mask) { return bit_scan_forward(mask); } void provUnregister() { fort_prov_unregister(nullptr); } }<|fim▁end|>
quint32 logStatTrafSize(quint16 procCount) {
<|file_name|>handler.go<|end_file_name|><|fim▁begin|>package httpd import ( "bytes" "compress/gzip" "encoding/json" "errors" "expvar" "fmt" "io" "io/ioutil" "log" "math" "net/http" "os" "runtime/debug" "strconv" "strings" "sync/atomic" "time" "github.com/bmizerany/pat" "github.com/dgrijalva/jwt-go" "github.com/gogo/protobuf/proto" "github.com/golang/snappy" "github.com/influxdata/influxdb" "github.com/influxdata/influxdb/models" "github.com/influxdata/influxdb/monitor" "github.com/influxdata/influxdb/monitor/diagnostics" "github.com/influxdata/influxdb/prometheus" "github.com/influxdata/influxdb/prometheus/remote" "github.com/influxdata/influxdb/query" "github.com/influxdata/influxdb/services/meta" "github.com/influxdata/influxdb/tsdb" "github.com/influxdata/influxdb/uuid" "github.com/influxdata/influxql" "github.com/prometheus/client_golang/prometheus/promhttp" "go.uber.org/zap" ) const ( // DefaultChunkSize specifies the maximum number of points that will // be read before sending results back to the engine. // // This has no relation to the number of bytes that are returned. DefaultChunkSize = 10000 DefaultDebugRequestsInterval = 10 * time.Second MaxDebugRequestsInterval = 6 * time.Hour ) // AuthenticationMethod defines the type of authentication used. type AuthenticationMethod int // Supported authentication methods. const ( // Authenticate using basic authentication. UserAuthentication AuthenticationMethod = iota // Authenticate with jwt. BearerAuthentication ) // TODO: Check HTTP response codes: 400, 401, 403, 409. // Route specifies how to handle a HTTP verb for a given endpoint. type Route struct { Name string Method string Pattern string Gzipped bool LoggingEnabled bool HandlerFunc interface{} } // Handler represents an HTTP handler for the InfluxDB server. type Handler struct { mux *pat.PatternServeMux Version string BuildType string MetaClient interface { Database(name string) *meta.DatabaseInfo Databases() []meta.DatabaseInfo Authenticate(username, password string) (ui meta.User, err error) User(username string) (meta.User, error) AdminUserExists() bool } QueryAuthorizer interface { AuthorizeQuery(u meta.User, query *influxql.Query, database string) error } WriteAuthorizer interface { AuthorizeWrite(username, database string) error } QueryExecutor *query.QueryExecutor Monitor interface { Statistics(tags map[string]string) ([]*monitor.Statistic, error) Diagnostics() (map[string]*diagnostics.Diagnostics, error) } PointsWriter interface { WritePoints(database, retentionPolicy string, consistencyLevel models.ConsistencyLevel, user meta.User, points []models.Point) error } Config *Config Logger *zap.Logger CLFLogger *log.Logger stats *Statistics requestTracker *RequestTracker } // NewHandler returns a new instance of handler with routes. func NewHandler(c Config) *Handler { h := &Handler{ mux: pat.New(), Config: &c, Logger: zap.NewNop(), CLFLogger: log.New(os.Stderr, "[httpd] ", 0), stats: &Statistics{}, requestTracker: NewRequestTracker(), } h.AddRoutes([]Route{ Route{ "query-options", // Satisfy CORS checks. "OPTIONS", "/query", false, true, h.serveOptions, }, Route{ "query", // Query serving route. "GET", "/query", true, true, h.serveQuery, }, Route{ "query", // Query serving route. "POST", "/query", true, true, h.serveQuery, }, Route{ "write-options", // Satisfy CORS checks. "OPTIONS", "/write", false, true, h.serveOptions, }, Route{ "write", // Data-ingest route. "POST", "/write", true, true, h.serveWrite, }, Route{ "prometheus-write", // Prometheus remote write "POST", "/api/v1/prom/write", false, true, h.servePromWrite, }, Route{ "prometheus-read", // Prometheus remote read "POST", "/api/v1/prom/read", true, true, h.servePromRead, }, Route{ // Ping "ping", "GET", "/ping", false, true, h.servePing, }, Route{ // Ping "ping-head", "HEAD", "/ping", false, true, h.servePing, }, Route{ // Ping w/ status "status", "GET", "/status", false, true, h.serveStatus, }, Route{ // Ping w/ status "status-head", "HEAD", "/status", false, true, h.serveStatus, }, Route{ "prometheus-metrics", "GET", "/metrics", false, true, promhttp.Handler().ServeHTTP, }, }...) return h } // Statistics maintains statistics for the httpd service. type Statistics struct { Requests int64 CQRequests int64 QueryRequests int64 WriteRequests int64 PingRequests int64 StatusRequests int64 WriteRequestBytesReceived int64 QueryRequestBytesTransmitted int64 PointsWrittenOK int64 PointsWrittenDropped int64 PointsWrittenFail int64 AuthenticationFailures int64 RequestDuration int64 QueryRequestDuration int64 WriteRequestDuration int64 ActiveRequests int64 ActiveWriteRequests int64 ClientErrors int64 ServerErrors int64 RecoveredPanics int64 PromWriteRequests int64 PromReadRequests int64 } // Statistics returns statistics for periodic monitoring. func (h *Handler) Statistics(tags map[string]string) []models.Statistic { return []models.Statistic{{ Name: "httpd", Tags: tags, Values: map[string]interface{}{ statRequest: atomic.LoadInt64(&h.stats.Requests), statQueryRequest: atomic.LoadInt64(&h.stats.QueryRequests), statWriteRequest: atomic.LoadInt64(&h.stats.WriteRequests), statPingRequest: atomic.LoadInt64(&h.stats.PingRequests), statStatusRequest: atomic.LoadInt64(&h.stats.StatusRequests), statWriteRequestBytesReceived: atomic.LoadInt64(&h.stats.WriteRequestBytesReceived), statQueryRequestBytesTransmitted: atomic.LoadInt64(&h.stats.QueryRequestBytesTransmitted), statPointsWrittenOK: atomic.LoadInt64(&h.stats.PointsWrittenOK), statPointsWrittenDropped: atomic.LoadInt64(&h.stats.PointsWrittenDropped), statPointsWrittenFail: atomic.LoadInt64(&h.stats.PointsWrittenFail), statAuthFail: atomic.LoadInt64(&h.stats.AuthenticationFailures), statRequestDuration: atomic.LoadInt64(&h.stats.RequestDuration), statQueryRequestDuration: atomic.LoadInt64(&h.stats.QueryRequestDuration), statWriteRequestDuration: atomic.LoadInt64(&h.stats.WriteRequestDuration), statRequestsActive: atomic.LoadInt64(&h.stats.ActiveRequests), statWriteRequestsActive: atomic.LoadInt64(&h.stats.ActiveWriteRequests), statClientError: atomic.LoadInt64(&h.stats.ClientErrors), statServerError: atomic.LoadInt64(&h.stats.ServerErrors), statRecoveredPanics: atomic.LoadInt64(&h.stats.RecoveredPanics), statPromWriteRequest: atomic.LoadInt64(&h.stats.PromWriteRequests), statPromReadRequest: atomic.LoadInt64(&h.stats.PromReadRequests), }, }} } // AddRoutes sets the provided routes on the handler. func (h *Handler) AddRoutes(routes ...Route) { for _, r := range routes { var handler http.Handler // If it's a handler func that requires authorization, wrap it in authentication if hf, ok := r.HandlerFunc.(func(http.ResponseWriter, *http.Request, meta.User)); ok { handler = authenticate(hf, h, h.Config.AuthEnabled) } // This is a normal handler signature and does not require authentication if hf, ok := r.HandlerFunc.(func(http.ResponseWriter, *http.Request)); ok { handler = http.HandlerFunc(hf) } handler = h.responseWriter(handler) if r.Gzipped { handler = gzipFilter(handler) } handler = cors(handler) handler = requestID(handler) if h.Config.LogEnabled && r.LoggingEnabled { handler = h.logging(handler, r.Name) } handler = h.recovery(handler, r.Name) // make sure recovery is always last h.mux.Add(r.Method, r.Pattern, handler) } } // ServeHTTP responds to HTTP request to the handler. func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { atomic.AddInt64(&h.stats.Requests, 1) atomic.AddInt64(&h.stats.ActiveRequests, 1) defer atomic.AddInt64(&h.stats.ActiveRequests, -1) start := time.Now() // Add version and build header to all InfluxDB requests. w.Header().Add("X-Influxdb-Version", h.Version) w.Header().Add("X-Influxdb-Build", h.BuildType) if strings.HasPrefix(r.URL.Path, "/debug/pprof") && h.Config.PprofEnabled { h.handleProfiles(w, r) } else if strings.HasPrefix(r.URL.Path, "/debug/vars") { h.serveExpvar(w, r) } else if strings.HasPrefix(r.URL.Path, "/debug/requests") { h.serveDebugRequests(w, r) } else { h.mux.ServeHTTP(w, r) } atomic.AddInt64(&h.stats.RequestDuration, time.Since(start).Nanoseconds()) } // writeHeader writes the provided status code in the response, and // updates relevant http error statistics. func (h *Handler) writeHeader(w http.ResponseWriter, code int) { switch code / 100 { case 4: atomic.AddInt64(&h.stats.ClientErrors, 1) case 5: atomic.AddInt64(&h.stats.ServerErrors, 1) } w.WriteHeader(code) } // serveQuery parses an incoming query and, if valid, executes the query. func (h *Handler) serveQuery(w http.ResponseWriter, r *http.Request, user meta.User) { atomic.AddInt64(&h.stats.QueryRequests, 1) defer func(start time.Time) { atomic.AddInt64(&h.stats.QueryRequestDuration, time.Since(start).Nanoseconds()) }(time.Now()) h.requestTracker.Add(r, user) // Retrieve the underlying ResponseWriter or initialize our own. rw, ok := w.(ResponseWriter) if !ok { rw = NewResponseWriter(w, r) } // Retrieve the node id the query should be executed on. nodeID, _ := strconv.ParseUint(r.FormValue("node_id"), 10, 64) var qr io.Reader // Attempt to read the form value from the "q" form value. if qp := strings.TrimSpace(r.FormValue("q")); qp != "" { qr = strings.NewReader(qp) } else if r.MultipartForm != nil && r.MultipartForm.File != nil { // If we have a multipart/form-data, try to retrieve a file from 'q'. if fhs := r.MultipartForm.File["q"]; len(fhs) > 0 { f, err := fhs[0].Open() if err != nil { h.httpError(rw, err.Error(), http.StatusBadRequest) return } defer f.Close() qr = f } } if qr == nil { h.httpError(rw, `missing required parameter "q"`, http.StatusBadRequest) return } epoch := strings.TrimSpace(r.FormValue("epoch")) p := influxql.NewParser(qr) db := r.FormValue("db") // Sanitize the request query params so it doesn't show up in the response logger. // Do this before anything else so a parsing error doesn't leak passwords. sanitize(r) // Parse the parameters rawParams := r.FormValue("params") if rawParams != "" { var params map[string]interface{} decoder := json.NewDecoder(strings.NewReader(rawParams)) decoder.UseNumber() if err := decoder.Decode(&params); err != nil { h.httpError(rw, "error parsing query parameters: "+err.Error(), http.StatusBadRequest) return } // Convert json.Number into int64 and float64 values for k, v := range params { if v, ok := v.(json.Number); ok { var err error if strings.Contains(string(v), ".") { params[k], err = v.Float64() } else { params[k], err = v.Int64() } if err != nil { h.httpError(rw, "error parsing json value: "+err.Error(), http.StatusBadRequest) return } } } p.SetParams(params) } // Parse query from query string. q, err := p.ParseQuery() if err != nil { h.httpError(rw, "error parsing query: "+err.Error(), http.StatusBadRequest) return } // Check authorization. if h.Config.AuthEnabled { if err := h.QueryAuthorizer.AuthorizeQuery(user, q, db); err != nil { if err, ok := err.(meta.ErrAuthorize); ok { h.Logger.Info(fmt.Sprintf("Unauthorized request | user: %q | query: %q | database %q", err.User, err.Query.String(), err.Database)) } h.httpError(rw, "error authorizing query: "+err.Error(), http.StatusForbidden) return } } // Parse chunk size. Use default if not provided or unparsable. chunked := r.FormValue("chunked") == "true" chunkSize := DefaultChunkSize if chunked { if n, err := strconv.ParseInt(r.FormValue("chunk_size"), 10, 64); err == nil && int(n) > 0 { chunkSize = int(n) } } // Parse whether this is an async command. async := r.FormValue("async") == "true" opts := query.ExecutionOptions{ Database: db, ChunkSize: chunkSize, ReadOnly: r.Method == "GET", NodeID: nodeID, } if h.Config.AuthEnabled { // The current user determines the authorized actions. opts.Authorizer = user } else { // Auth is disabled, so allow everything. opts.Authorizer = query.OpenAuthorizer } // Make sure if the client disconnects we signal the query to abort var closing chan struct{} if !async { closing = make(chan struct{}) if notifier, ok := w.(http.CloseNotifier); ok { // CloseNotify() is not guaranteed to send a notification when the query // is closed. Use this channel to signal that the query is finished to // prevent lingering goroutines that may be stuck. done := make(chan struct{}) defer close(done) notify := notifier.CloseNotify() go func() { // Wait for either the request to finish // or for the client to disconnect select { case <-done: case <-notify: close(closing) } }() opts.AbortCh = done } else { defer close(closing) } } // Execute query. results := h.QueryExecutor.ExecuteQuery(q, opts, closing) // If we are running in async mode, open a goroutine to drain the results // and return with a StatusNoContent. if async { go h.async(q, results) h.writeHeader(w, http.StatusNoContent) return } // if we're not chunking, this will be the in memory buffer for all results before sending to client resp := Response{Results: make([]*query.Result, 0)} // Status header is OK once this point is reached. // Attempt to flush the header immediately so the client gets the header information // and knows the query was accepted. h.writeHeader(rw, http.StatusOK) if w, ok := w.(http.Flusher); ok { w.Flush() } // pull all results from the channel rows := 0 for r := range results { // Ignore nil results. if r == nil { continue } // if requested, convert result timestamps to epoch if epoch != "" { convertToEpoch(r, epoch) } // Write out result immediately if chunked. if chunked { n, _ := rw.WriteResponse(Response{ Results: []*query.Result{r}, }) atomic.AddInt64(&h.stats.QueryRequestBytesTransmitted, int64(n)) w.(http.Flusher).Flush() continue } // Limit the number of rows that can be returned in a non-chunked // response. This is to prevent the server from going OOM when // returning a large response. If you want to return more than the // default chunk size, then use chunking to process multiple blobs. // Iterate through the series in this result to count the rows and // truncate any rows we shouldn't return. if h.Config.MaxRowLimit > 0 { for i, series := range r.Series { n := h.Config.MaxRowLimit - rows if n < len(series.Values) { // We have reached the maximum number of values. Truncate // the values within this row. series.Values = series.Values[:n] // Since this was truncated, it will always be a partial return. // Add this so the client knows we truncated the response. series.Partial = true } rows += len(series.Values) if rows >= h.Config.MaxRowLimit { // Drop any remaining series since we have already reached the row limit. if i < len(r.Series) { r.Series = r.Series[:i+1] } break } } } // It's not chunked so buffer results in memory. // Results for statements need to be combined together. // We need to check if this new result is for the same statement as // the last result, or for the next statement l := len(resp.Results) if l == 0 { resp.Results = append(resp.Results, r) } else if resp.Results[l-1].StatementID == r.StatementID { if r.Err != nil { resp.Results[l-1] = r continue } cr := resp.Results[l-1] rowsMerged := 0 if len(cr.Series) > 0 { lastSeries := cr.Series[len(cr.Series)-1] for _, row := range r.Series { if !lastSeries.SameSeries(row) { // Next row is for a different series than last. break } // Values are for the same series, so append them. lastSeries.Values = append(lastSeries.Values, row.Values...) rowsMerged++ } } // Append remaining rows as new rows. r.Series = r.Series[rowsMerged:] cr.Series = append(cr.Series, r.Series...) cr.Messages = append(cr.Messages, r.Messages...) cr.Partial = r.Partial } else { resp.Results = append(resp.Results, r) } // Drop out of this loop and do not process further results when we hit the row limit. if h.Config.MaxRowLimit > 0 && rows >= h.Config.MaxRowLimit { // If the result is marked as partial, remove that partial marking // here. While the series is partial and we would normally have // tried to return the rest in the next chunk, we are not using // chunking and are truncating the series so we don't want to // signal to the client that we plan on sending another JSON blob // with another result. The series, on the other hand, still // returns partial true if it was truncated or had more data to // send in a future chunk. r.Partial = false break } } // If it's not chunked we buffered everything in memory, so write it out if !chunked { n, _ := rw.WriteResponse(resp) atomic.AddInt64(&h.stats.QueryRequestBytesTransmitted, int64(n)) } } // async drains the results from an async query and logs a message if it fails. func (h *Handler) async(q *influxql.Query, results <-chan *query.Result) { for r := range results { // Drain the results and do nothing with them. // If it fails, log the failure so there is at least a record of it. if r.Err != nil { // Do not log when a statement was not executed since there would // have been an earlier error that was already logged. if r.Err == query.ErrNotExecuted { continue } h.Logger.Info(fmt.Sprintf("error while running async query: %s: %s", q, r.Err)) } } } // serveWrite receives incoming series data in line protocol format and writes it to the database. func (h *Handler) serveWrite(w http.ResponseWriter, r *http.Request, user meta.User) { atomic.AddInt64(&h.stats.WriteRequests, 1) atomic.AddInt64(&h.stats.ActiveWriteRequests, 1) defer func(start time.Time) { atomic.AddInt64(&h.stats.ActiveWriteRequests, -1) atomic.AddInt64(&h.stats.WriteRequestDuration, time.Since(start).Nanoseconds()) }(time.Now()) h.requestTracker.Add(r, user) database := r.URL.Query().Get("db") if database == "" { h.httpError(w, "database is required", http.StatusBadRequest) return } if di := h.MetaClient.Database(database); di == nil { h.httpError(w, fmt.Sprintf("database not found: %q", database), http.StatusNotFound) return } if h.Config.AuthEnabled { if user == nil { h.httpError(w, fmt.Sprintf("user is required to write to database %q", database), http.StatusForbidden) return } if err := h.WriteAuthorizer.AuthorizeWrite(user.ID(), database); err != nil { h.httpError(w, fmt.Sprintf("%q user is not authorized to write to database %q", user.ID(), database), http.StatusForbidden) return } } body := r.Body if h.Config.MaxBodySize > 0 { body = truncateReader(body, int64(h.Config.MaxBodySize)) } // Handle gzip decoding of the body if r.Header.Get("Content-Encoding") == "gzip" { b, err := gzip.NewReader(r.Body) if err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } defer b.Close() body = b } var bs []byte if r.ContentLength > 0 { if h.Config.MaxBodySize > 0 && r.ContentLength > int64(h.Config.MaxBodySize) { h.httpError(w, http.StatusText(http.StatusRequestEntityTooLarge), http.StatusRequestEntityTooLarge) return } // This will just be an initial hint for the gzip reader, as the // bytes.Buffer will grow as needed when ReadFrom is called bs = make([]byte, 0, r.ContentLength) } buf := bytes.NewBuffer(bs) _, err := buf.ReadFrom(body) if err != nil { if err == errTruncated { h.httpError(w, http.StatusText(http.StatusRequestEntityTooLarge), http.StatusRequestEntityTooLarge) return } if h.Config.WriteTracing { h.Logger.Info("Write handler unable to read bytes from request body") } h.httpError(w, err.Error(), http.StatusBadRequest) return } atomic.AddInt64(&h.stats.WriteRequestBytesReceived, int64(buf.Len())) if h.Config.WriteTracing { h.Logger.Info(fmt.Sprintf("Write body received by handler: %s", buf.Bytes())) } points, parseError := models.ParsePointsWithPrecision(buf.Bytes(), time.Now().UTC(), r.URL.Query().Get("precision")) // Not points parsed correctly so return the error now if parseError != nil && len(points) == 0 { if parseError.Error() == "EOF" { h.writeHeader(w, http.StatusOK) return } h.httpError(w, parseError.Error(), http.StatusBadRequest) return } // Determine required consistency level.<|fim▁hole|> if level != "" { var err error consistency, err = models.ParseConsistencyLevel(level) if err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } } // Write points. if err := h.PointsWriter.WritePoints(database, r.URL.Query().Get("rp"), consistency, user, points); influxdb.IsClientError(err) { atomic.AddInt64(&h.stats.PointsWrittenFail, int64(len(points))) h.httpError(w, err.Error(), http.StatusBadRequest) return } else if influxdb.IsAuthorizationError(err) { atomic.AddInt64(&h.stats.PointsWrittenFail, int64(len(points))) h.httpError(w, err.Error(), http.StatusForbidden) return } else if werr, ok := err.(tsdb.PartialWriteError); ok { atomic.AddInt64(&h.stats.PointsWrittenOK, int64(len(points)-werr.Dropped)) atomic.AddInt64(&h.stats.PointsWrittenDropped, int64(werr.Dropped)) h.httpError(w, werr.Error(), http.StatusBadRequest) return } else if err != nil { atomic.AddInt64(&h.stats.PointsWrittenFail, int64(len(points))) h.httpError(w, err.Error(), http.StatusInternalServerError) return } else if parseError != nil { // We wrote some of the points atomic.AddInt64(&h.stats.PointsWrittenOK, int64(len(points))) // The other points failed to parse which means the client sent invalid line protocol. We return a 400 // response code as well as the lines that failed to parse. h.httpError(w, tsdb.PartialWriteError{Reason: parseError.Error()}.Error(), http.StatusBadRequest) return } atomic.AddInt64(&h.stats.PointsWrittenOK, int64(len(points))) h.writeHeader(w, http.StatusNoContent) } // serveOptions returns an empty response to comply with OPTIONS pre-flight requests func (h *Handler) serveOptions(w http.ResponseWriter, r *http.Request) { h.writeHeader(w, http.StatusNoContent) } // servePing returns a simple response to let the client know the server is running. func (h *Handler) servePing(w http.ResponseWriter, r *http.Request) { atomic.AddInt64(&h.stats.PingRequests, 1) h.writeHeader(w, http.StatusNoContent) } // serveStatus has been deprecated. func (h *Handler) serveStatus(w http.ResponseWriter, r *http.Request) { h.Logger.Info("WARNING: /status has been deprecated. Use /ping instead.") atomic.AddInt64(&h.stats.StatusRequests, 1) h.writeHeader(w, http.StatusNoContent) } // convertToEpoch converts result timestamps from time.Time to the specified epoch. func convertToEpoch(r *query.Result, epoch string) { divisor := int64(1) switch epoch { case "u": divisor = int64(time.Microsecond) case "ms": divisor = int64(time.Millisecond) case "s": divisor = int64(time.Second) case "m": divisor = int64(time.Minute) case "h": divisor = int64(time.Hour) } for _, s := range r.Series { for _, v := range s.Values { if ts, ok := v[0].(time.Time); ok { v[0] = ts.UnixNano() / divisor } } } } // servePromWrite receives data in the Prometheus remote write protocol and writes it // to the database func (h *Handler) servePromWrite(w http.ResponseWriter, r *http.Request, user meta.User) { atomic.AddInt64(&h.stats.WriteRequests, 1) atomic.AddInt64(&h.stats.ActiveWriteRequests, 1) atomic.AddInt64(&h.stats.PromWriteRequests, 1) defer func(start time.Time) { atomic.AddInt64(&h.stats.ActiveWriteRequests, -1) atomic.AddInt64(&h.stats.WriteRequestDuration, time.Since(start).Nanoseconds()) }(time.Now()) h.requestTracker.Add(r, user) database := r.URL.Query().Get("db") if database == "" { h.httpError(w, "database is required", http.StatusBadRequest) return } if di := h.MetaClient.Database(database); di == nil { h.httpError(w, fmt.Sprintf("database not found: %q", database), http.StatusNotFound) return } if h.Config.AuthEnabled { if user == nil { h.httpError(w, fmt.Sprintf("user is required to write to database %q", database), http.StatusForbidden) return } if err := h.WriteAuthorizer.AuthorizeWrite(user.ID(), database); err != nil { h.httpError(w, fmt.Sprintf("%q user is not authorized to write to database %q", user.ID(), database), http.StatusForbidden) return } } body := r.Body if h.Config.MaxBodySize > 0 { body = truncateReader(body, int64(h.Config.MaxBodySize)) } var bs []byte if r.ContentLength > 0 { if h.Config.MaxBodySize > 0 && r.ContentLength > int64(h.Config.MaxBodySize) { h.httpError(w, http.StatusText(http.StatusRequestEntityTooLarge), http.StatusRequestEntityTooLarge) return } // This will just be an initial hint for the reader, as the // bytes.Buffer will grow as needed when ReadFrom is called bs = make([]byte, 0, r.ContentLength) } buf := bytes.NewBuffer(bs) _, err := buf.ReadFrom(body) if err != nil { if err == errTruncated { h.httpError(w, http.StatusText(http.StatusRequestEntityTooLarge), http.StatusRequestEntityTooLarge) return } if h.Config.WriteTracing { h.Logger.Info("Prom write handler unable to read bytes from request body") } h.httpError(w, err.Error(), http.StatusBadRequest) return } atomic.AddInt64(&h.stats.WriteRequestBytesReceived, int64(buf.Len())) if h.Config.WriteTracing { h.Logger.Info(fmt.Sprintf("Prom write body received by handler: %s", buf.Bytes())) } reqBuf, err := snappy.Decode(nil, buf.Bytes()) if err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } // Convert the Prometheus remote write request to Influx Points var req remote.WriteRequest if err := proto.Unmarshal(reqBuf, &req); err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } points, err := prometheus.WriteRequestToPoints(&req) if err != nil { if h.Config.WriteTracing { h.Logger.Info(fmt.Sprintf("Prom write handler: %s", err.Error())) } if err != prometheus.ErrNaNDropped { h.httpError(w, err.Error(), http.StatusBadRequest) return } } // Determine required consistency level. level := r.URL.Query().Get("consistency") consistency := models.ConsistencyLevelOne if level != "" { consistency, err = models.ParseConsistencyLevel(level) if err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } } // Write points. if err := h.PointsWriter.WritePoints(database, r.URL.Query().Get("rp"), consistency, user, points); influxdb.IsClientError(err) { atomic.AddInt64(&h.stats.PointsWrittenFail, int64(len(points))) h.httpError(w, err.Error(), http.StatusBadRequest) return } else if influxdb.IsAuthorizationError(err) { atomic.AddInt64(&h.stats.PointsWrittenFail, int64(len(points))) h.httpError(w, err.Error(), http.StatusForbidden) return } else if werr, ok := err.(tsdb.PartialWriteError); ok { atomic.AddInt64(&h.stats.PointsWrittenOK, int64(len(points)-werr.Dropped)) atomic.AddInt64(&h.stats.PointsWrittenDropped, int64(werr.Dropped)) h.httpError(w, werr.Error(), http.StatusBadRequest) return } else if err != nil { atomic.AddInt64(&h.stats.PointsWrittenFail, int64(len(points))) h.httpError(w, err.Error(), http.StatusInternalServerError) return } atomic.AddInt64(&h.stats.PointsWrittenOK, int64(len(points))) h.writeHeader(w, http.StatusNoContent) } // servePromRead will convert a Prometheus remote read request into an InfluxQL query and // return data in Prometheus remote read protobuf format. func (h *Handler) servePromRead(w http.ResponseWriter, r *http.Request, user meta.User) { compressed, err := ioutil.ReadAll(r.Body) if err != nil { h.httpError(w, err.Error(), http.StatusInternalServerError) return } reqBuf, err := snappy.Decode(nil, compressed) if err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } var req remote.ReadRequest if err := proto.Unmarshal(reqBuf, &req); err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } // Query the DB and create a ReadResponse for Prometheus db := r.FormValue("db") q, err := prometheus.ReadRequestToInfluxQLQuery(&req, db, r.FormValue("rp")) if err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } // Check authorization. if h.Config.AuthEnabled { if err := h.QueryAuthorizer.AuthorizeQuery(user, q, db); err != nil { if err, ok := err.(meta.ErrAuthorize); ok { h.Logger.Info(fmt.Sprintf("Unauthorized request | user: %q | query: %q | database %q", err.User, err.Query.String(), err.Database)) } h.httpError(w, "error authorizing query: "+err.Error(), http.StatusForbidden) return } } opts := query.ExecutionOptions{ Database: db, ChunkSize: DefaultChunkSize, ReadOnly: true, } if h.Config.AuthEnabled { // The current user determines the authorized actions. opts.Authorizer = user } else { // Auth is disabled, so allow everything. opts.Authorizer = query.OpenAuthorizer } // Make sure if the client disconnects we signal the query to abort var closing chan struct{} closing = make(chan struct{}) if notifier, ok := w.(http.CloseNotifier); ok { // CloseNotify() is not guaranteed to send a notification when the query // is closed. Use this channel to signal that the query is finished to // prevent lingering goroutines that may be stuck. done := make(chan struct{}) defer close(done) notify := notifier.CloseNotify() go func() { // Wait for either the request to finish // or for the client to disconnect select { case <-done: case <-notify: close(closing) } }() opts.AbortCh = done } else { defer close(closing) } // Execute query. results := h.QueryExecutor.ExecuteQuery(q, opts, closing) resp := &remote.ReadResponse{ Results: []*remote.QueryResult{{}}, } // pull all results from the channel for r := range results { // Ignore nil results. if r == nil { continue } // read the series data and convert into Prometheus samples for _, s := range r.Series { ts := &remote.TimeSeries{ Labels: prometheus.TagsToLabelPairs(s.Tags), } for _, v := range s.Values { t, ok := v[0].(time.Time) if !ok { h.httpError(w, fmt.Sprintf("value %v wasn't a time", v[0]), http.StatusBadRequest) return } val, ok := v[1].(float64) if !ok { h.httpError(w, fmt.Sprintf("value %v wasn't a float64", v[1]), http.StatusBadRequest) } timestamp := t.UnixNano() / int64(time.Millisecond) / int64(time.Nanosecond) ts.Samples = append(ts.Samples, &remote.Sample{ TimestampMs: timestamp, Value: val, }) } resp.Results[0].Timeseries = append(resp.Results[0].Timeseries, ts) } } data, err := proto.Marshal(resp) if err != nil { h.httpError(w, err.Error(), http.StatusInternalServerError) return } w.Header().Set("Content-Type", "application/x-protobuf") w.Header().Set("Content-Encoding", "snappy") compressed = snappy.Encode(nil, data) if _, err := w.Write(compressed); err != nil { h.httpError(w, err.Error(), http.StatusInternalServerError) return } atomic.AddInt64(&h.stats.QueryRequestBytesTransmitted, int64(len(compressed))) } // serveExpvar serves internal metrics in /debug/vars format over HTTP. func (h *Handler) serveExpvar(w http.ResponseWriter, r *http.Request) { // Retrieve statistics from the monitor. stats, err := h.Monitor.Statistics(nil) if err != nil { h.httpError(w, err.Error(), http.StatusInternalServerError) return } // Retrieve diagnostics from the monitor. diags, err := h.Monitor.Diagnostics() if err != nil { h.httpError(w, err.Error(), http.StatusInternalServerError) return } w.Header().Set("Content-Type", "application/json; charset=utf-8") first := true if val := diags["system"]; val != nil { jv, err := parseSystemDiagnostics(val) if err != nil { h.httpError(w, err.Error(), http.StatusInternalServerError) return } data, err := json.Marshal(jv) if err != nil { h.httpError(w, err.Error(), http.StatusInternalServerError) return } first = false fmt.Fprintln(w, "{") fmt.Fprintf(w, "\"system\": %s", data) } else { fmt.Fprintln(w, "{") } if val := expvar.Get("cmdline"); val != nil { if !first { fmt.Fprintln(w, ",") } first = false fmt.Fprintf(w, "\"cmdline\": %s", val) } if val := expvar.Get("memstats"); val != nil { if !first { fmt.Fprintln(w, ",") } first = false fmt.Fprintf(w, "\"memstats\": %s", val) } for _, s := range stats { val, err := json.Marshal(s) if err != nil { continue } // Very hackily create a unique key. buf := bytes.NewBufferString(s.Name) if path, ok := s.Tags["path"]; ok { fmt.Fprintf(buf, ":%s", path) if id, ok := s.Tags["id"]; ok { fmt.Fprintf(buf, ":%s", id) } } else if bind, ok := s.Tags["bind"]; ok { if proto, ok := s.Tags["proto"]; ok { fmt.Fprintf(buf, ":%s", proto) } fmt.Fprintf(buf, ":%s", bind) } else if database, ok := s.Tags["database"]; ok { fmt.Fprintf(buf, ":%s", database) if rp, ok := s.Tags["retention_policy"]; ok { fmt.Fprintf(buf, ":%s", rp) if name, ok := s.Tags["name"]; ok { fmt.Fprintf(buf, ":%s", name) } if dest, ok := s.Tags["destination"]; ok { fmt.Fprintf(buf, ":%s", dest) } } } key := buf.String() if !first { fmt.Fprintln(w, ",") } first = false fmt.Fprintf(w, "%q: ", key) w.Write(bytes.TrimSpace(val)) } fmt.Fprintln(w, "\n}") } // serveDebugRequests will track requests for a period of time. func (h *Handler) serveDebugRequests(w http.ResponseWriter, r *http.Request) { var d time.Duration if s := r.URL.Query().Get("seconds"); s == "" { d = DefaultDebugRequestsInterval } else if seconds, err := strconv.ParseInt(s, 10, 64); err != nil { h.httpError(w, err.Error(), http.StatusBadRequest) return } else { d = time.Duration(seconds) * time.Second if d > MaxDebugRequestsInterval { h.httpError(w, fmt.Sprintf("exceeded maximum interval time: %s > %s", influxql.FormatDuration(d), influxql.FormatDuration(MaxDebugRequestsInterval)), http.StatusBadRequest) return } } var closing <-chan bool if notifier, ok := w.(http.CloseNotifier); ok { closing = notifier.CloseNotify() } profile := h.requestTracker.TrackRequests() timer := time.NewTimer(d) select { case <-timer.C: profile.Stop() case <-closing: // Connection was closed early. profile.Stop() timer.Stop() return } w.Header().Set("Content-Type", "application/json; charset=utf-8") w.Header().Add("Connection", "close") fmt.Fprintln(w, "{") first := true for req, st := range profile.Requests { val, err := json.Marshal(st) if err != nil { continue } if !first { fmt.Fprintln(w, ",") } first = false fmt.Fprintf(w, "%q: ", req.String()) w.Write(bytes.TrimSpace(val)) } fmt.Fprintln(w, "\n}") } // parseSystemDiagnostics converts the system diagnostics into an appropriate // format for marshaling to JSON in the /debug/vars format. func parseSystemDiagnostics(d *diagnostics.Diagnostics) (map[string]interface{}, error) { // We don't need PID in this case. m := map[string]interface{}{"currentTime": nil, "started": nil, "uptime": nil} for key := range m { // Find the associated column. ci := -1 for i, col := range d.Columns { if col == key { ci = i break } } if ci == -1 { return nil, fmt.Errorf("unable to find column %q", key) } if len(d.Rows) < 1 || len(d.Rows[0]) <= ci { return nil, fmt.Errorf("no data for column %q", key) } var res interface{} switch v := d.Rows[0][ci].(type) { case time.Time: res = v case string: // Should be a string representation of a time.Duration d, err := time.ParseDuration(v) if err != nil { return nil, err } res = int64(d.Seconds()) default: return nil, fmt.Errorf("value for column %q is not parsable (got %T)", key, v) } m[key] = res } return m, nil } // httpError writes an error to the client in a standard format. func (h *Handler) httpError(w http.ResponseWriter, errmsg string, code int) { if code == http.StatusUnauthorized { // If an unauthorized header will be sent back, add a WWW-Authenticate header // as an authorization challenge. w.Header().Set("WWW-Authenticate", fmt.Sprintf("Basic realm=\"%s\"", h.Config.Realm)) } else if code/100 != 2 { sz := math.Min(float64(len(errmsg)), 1024.0) w.Header().Set("X-InfluxDB-Error", errmsg[:int(sz)]) } response := Response{Err: errors.New(errmsg)} if rw, ok := w.(ResponseWriter); ok { h.writeHeader(w, code) rw.WriteResponse(response) return } // Default implementation if the response writer hasn't been replaced // with our special response writer type. w.Header().Add("Content-Type", "application/json") h.writeHeader(w, code) b, _ := json.Marshal(response) w.Write(b) } // Filters and filter helpers type credentials struct { Method AuthenticationMethod Username string Password string Token string } // parseCredentials parses a request and returns the authentication credentials. // The credentials may be present as URL query params, or as a Basic // Authentication header. // As params: http://127.0.0.1/query?u=username&p=password // As basic auth: http://username:[email protected] // As Bearer token in Authorization header: Bearer <JWT_TOKEN_BLOB> func parseCredentials(r *http.Request) (*credentials, error) { q := r.URL.Query() // Check for username and password in URL params. if u, p := q.Get("u"), q.Get("p"); u != "" && p != "" { return &credentials{ Method: UserAuthentication, Username: u, Password: p, }, nil } // Check for the HTTP Authorization header. if s := r.Header.Get("Authorization"); s != "" { // Check for Bearer token. strs := strings.Split(s, " ") if len(strs) == 2 && strs[0] == "Bearer" { return &credentials{ Method: BearerAuthentication, Token: strs[1], }, nil } // Check for basic auth. if u, p, ok := r.BasicAuth(); ok { return &credentials{ Method: UserAuthentication, Username: u, Password: p, }, nil } } return nil, fmt.Errorf("unable to parse authentication credentials") } // authenticate wraps a handler and ensures that if user credentials are passed in // an attempt is made to authenticate that user. If authentication fails, an error is returned. // // There is one exception: if there are no users in the system, authentication is not required. This // is to facilitate bootstrapping of a system with authentication enabled. func authenticate(inner func(http.ResponseWriter, *http.Request, meta.User), h *Handler, requireAuthentication bool) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Return early if we are not authenticating if !requireAuthentication { inner(w, r, nil) return } var user meta.User // TODO corylanou: never allow this in the future without users if requireAuthentication && h.MetaClient.AdminUserExists() { creds, err := parseCredentials(r) if err != nil { atomic.AddInt64(&h.stats.AuthenticationFailures, 1) h.httpError(w, err.Error(), http.StatusUnauthorized) return } switch creds.Method { case UserAuthentication: if creds.Username == "" { atomic.AddInt64(&h.stats.AuthenticationFailures, 1) h.httpError(w, "username required", http.StatusUnauthorized) return } user, err = h.MetaClient.Authenticate(creds.Username, creds.Password) if err != nil { atomic.AddInt64(&h.stats.AuthenticationFailures, 1) h.httpError(w, "authorization failed", http.StatusUnauthorized) return } case BearerAuthentication: keyLookupFn := func(token *jwt.Token) (interface{}, error) { // Check for expected signing method. if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) } return []byte(h.Config.SharedSecret), nil } // Parse and validate the token. token, err := jwt.Parse(creds.Token, keyLookupFn) if err != nil { h.httpError(w, err.Error(), http.StatusUnauthorized) return } else if !token.Valid { h.httpError(w, "invalid token", http.StatusUnauthorized) return } claims, ok := token.Claims.(jwt.MapClaims) if !ok { h.httpError(w, "problem authenticating token", http.StatusInternalServerError) h.Logger.Info("Could not assert JWT token claims as jwt.MapClaims") return } // Make sure an expiration was set on the token. if exp, ok := claims["exp"].(float64); !ok || exp <= 0.0 { h.httpError(w, "token expiration required", http.StatusUnauthorized) return } // Get the username from the token. username, ok := claims["username"].(string) if !ok { h.httpError(w, "username in token must be a string", http.StatusUnauthorized) return } else if username == "" { h.httpError(w, "token must contain a username", http.StatusUnauthorized) return } // Lookup user in the metastore. if user, err = h.MetaClient.User(username); err != nil { h.httpError(w, err.Error(), http.StatusUnauthorized) return } else if user == nil { h.httpError(w, meta.ErrUserNotFound.Error(), http.StatusUnauthorized) return } default: h.httpError(w, "unsupported authentication", http.StatusUnauthorized) } } inner(w, r, user) }) } // cors responds to incoming requests and adds the appropriate cors headers // TODO: corylanou: add the ability to configure this in our config func cors(inner http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if origin := r.Header.Get("Origin"); origin != "" { w.Header().Set(`Access-Control-Allow-Origin`, origin) w.Header().Set(`Access-Control-Allow-Methods`, strings.Join([]string{ `DELETE`, `GET`, `OPTIONS`, `POST`, `PUT`, }, ", ")) w.Header().Set(`Access-Control-Allow-Headers`, strings.Join([]string{ `Accept`, `Accept-Encoding`, `Authorization`, `Content-Length`, `Content-Type`, `X-CSRF-Token`, `X-HTTP-Method-Override`, }, ", ")) w.Header().Set(`Access-Control-Expose-Headers`, strings.Join([]string{ `Date`, `X-InfluxDB-Version`, `X-InfluxDB-Build`, }, ", ")) } if r.Method == "OPTIONS" { return } inner.ServeHTTP(w, r) }) } func requestID(inner http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // X-Request-Id takes priority. rid := r.Header.Get("X-Request-Id") // If X-Request-Id is empty, then check Request-Id if rid == "" { rid = r.Header.Get("Request-Id") } // If Request-Id is empty then generate a v1 UUID. if rid == "" { rid = uuid.TimeUUID().String() } // We read Request-Id in other handler code so we'll use that naming // convention from this point in the request cycle. r.Header.Set("Request-Id", rid) // Set the request ID on the response headers. // X-Request-Id is the most common name for a request ID header. w.Header().Set("X-Request-Id", rid) // We will also set Request-Id for backwards compatibility with previous // versions of InfluxDB. w.Header().Set("Request-Id", rid) inner.ServeHTTP(w, r) }) } func (h *Handler) logging(inner http.Handler, name string) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { start := time.Now() l := &responseLogger{w: w} inner.ServeHTTP(l, r) h.CLFLogger.Println(buildLogLine(l, r, start)) // Log server errors. if l.Status()/100 == 5 { errStr := l.Header().Get("X-InfluxDB-Error") if errStr != "" { h.Logger.Error(fmt.Sprintf("[%d] - %q", l.Status(), errStr)) } } }) } func (h *Handler) responseWriter(inner http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w = NewResponseWriter(w, r) inner.ServeHTTP(w, r) }) } // if the env var is set, and the value is truthy, then we will *not* // recover from a panic. var willCrash bool func init() { var err error if willCrash, err = strconv.ParseBool(os.Getenv(query.PanicCrashEnv)); err != nil { willCrash = false } } func (h *Handler) recovery(inner http.Handler, name string) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { start := time.Now() l := &responseLogger{w: w} defer func() { if err := recover(); err != nil { logLine := buildLogLine(l, r, start) logLine = fmt.Sprintf("%s [panic:%s] %s", logLine, err, debug.Stack()) h.CLFLogger.Println(logLine) http.Error(w, http.StatusText(http.StatusInternalServerError), 500) atomic.AddInt64(&h.stats.RecoveredPanics, 1) // Capture the panic in _internal stats. if willCrash { h.CLFLogger.Println("\n\n=====\nAll goroutines now follow:") buf := debug.Stack() h.CLFLogger.Printf("%s\n", buf) os.Exit(1) // If we panic then the Go server will recover. } } }() inner.ServeHTTP(l, r) }) } // Response represents a list of statement results. type Response struct { Results []*query.Result Err error } // MarshalJSON encodes a Response struct into JSON. func (r Response) MarshalJSON() ([]byte, error) { // Define a struct that outputs "error" as a string. var o struct { Results []*query.Result `json:"results,omitempty"` Err string `json:"error,omitempty"` } // Copy fields to output struct. o.Results = r.Results if r.Err != nil { o.Err = r.Err.Error() } return json.Marshal(&o) } // UnmarshalJSON decodes the data into the Response struct. func (r *Response) UnmarshalJSON(b []byte) error { var o struct { Results []*query.Result `json:"results,omitempty"` Err string `json:"error,omitempty"` } err := json.Unmarshal(b, &o) if err != nil { return err } r.Results = o.Results if o.Err != "" { r.Err = errors.New(o.Err) } return nil } // Error returns the first error from any statement. // Returns nil if no errors occurred on any statements. func (r *Response) Error() error { if r.Err != nil { return r.Err } for _, rr := range r.Results { if rr.Err != nil { return rr.Err } } return nil }<|fim▁end|>
level := r.URL.Query().Get("consistency") consistency := models.ConsistencyLevelOne
<|file_name|>auth.go<|end_file_name|><|fim▁begin|>// Copyright 2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package smtp import ( "crypto/hmac" "crypto/md5" "errors" "fmt" ) // Auth is implemented by an SMTP authentication mechanism. type Auth interface { // Start begins an authentication with a server. // It returns the name of the authentication protocol // and optionally data to include in the initial AUTH message // sent to the server. It can return proto == "" to indicate // that the authentication should be skipped. // If it returns a non-nil error, the SMTP client aborts // the authentication attempt and closes the connection. Start(server *ServerInfo) (proto string, toServer []byte, err error) <|fim▁hole|> // Next continues the authentication. The server has just sent // the fromServer data. If more is true, the server expects a // response, which Next should return as toServer; otherwise // Next should return toServer == nil. // If Next returns a non-nil error, the SMTP client aborts // the authentication attempt and closes the connection. Next(fromServer []byte, more bool) (toServer []byte, err error) } // ServerInfo records information about an SMTP server. type ServerInfo struct { Name string // SMTP server name TLS bool // using TLS, with valid certificate for Name Auth []string // advertised authentication mechanisms } type plainAuth struct { identity, username, password string host string } // PlainAuth returns an Auth that implements the PLAIN authentication // mechanism as defined in RFC 4616. The returned Auth uses the given // username and password to authenticate to host and act as identity. // Usually identity should be the empty string, to act as username. // // PlainAuth will only send the credentials if the connection is using TLS // or is connected to localhost. Otherwise authentication will fail with an // error, without sending the credentials. func PlainAuth(identity, username, password, host string) Auth { return &plainAuth{identity, username, password, host} } func isLocalhost(name string) bool { return name == "localhost" || name == "127.0.0.1" || name == "::1" } func (a *plainAuth) Start(server *ServerInfo) (string, []byte, error) { // Must have TLS, or else localhost server. // Note: If TLS is not true, then we can't trust ANYTHING in ServerInfo. // In particular, it doesn't matter if the server advertises PLAIN auth. // That might just be the attacker saying // "it's ok, you can trust me with your password." if !server.TLS && !isLocalhost(server.Name) { return "", nil, errors.New("unencrypted connection") } if server.Name != a.host { return "", nil, errors.New("wrong host name") } resp := []byte(a.identity + "\x00" + a.username + "\x00" + a.password) return "PLAIN", resp, nil } func (a *plainAuth) Next(fromServer []byte, more bool) ([]byte, error) { if more { // We've already sent everything. return nil, errors.New("unexpected server challenge") } return nil, nil } type cramMD5Auth struct { username, secret string } // CRAMMD5Auth returns an Auth that implements the CRAM-MD5 authentication // mechanism as defined in RFC 2195. // The returned Auth uses the given username and secret to authenticate // to the server using the challenge-response mechanism. func CRAMMD5Auth(username, secret string) Auth { return &cramMD5Auth{username, secret} } func (a *cramMD5Auth) Start(server *ServerInfo) (string, []byte, error) { return "CRAM-MD5", nil, nil } func (a *cramMD5Auth) Next(fromServer []byte, more bool) ([]byte, error) { if more { d := hmac.New(md5.New, []byte(a.secret)) d.Write(fromServer) s := make([]byte, 0, d.Size()) return []byte(fmt.Sprintf("%s %x", a.username, d.Sum(s))), nil } return nil, nil }<|fim▁end|>
<|file_name|>ScoreCaculating.py<|end_file_name|><|fim▁begin|>import math import sys sys.path.append('..') import Analyse.AFX as AFX class State: def __init__(self): self.SenShifterState = True self.MoodStrength = 1.0 self.positive = 0.0 self.negative = 0.0 def Process(self, score): if self.SenShifterState is True: self.positive += score else: self.negative += score def Clear(self): self.SenShifterState = True self.MoodStrength = 1.0 self.positive = 0.0 self.negative = 0.0 def ChangeMood(self,mood): if mood.startswith('I'): self.MoodStrength *= 2 if mood.startswith('D'): self.MoodStrength /= 2 def returnScore(self): score = self.positive - self.negative score *= self.MoodStrength return score #calulating the score pf specific sentence def CaculateASentence(Sentence): S = State() for word in Sentence: tag = AFX.GetWord(word,'Tag') #if the word has no orientation or it is a boring word, just ignore it if tag == 0.0 or tag is "Bor": continue if tag is "Con": S.Clear() elif tag is "Neg": #if there is a negative tagged here, change the state of Sentiment Shifter S.SenShifterState = -S.SenShifterState elif tag is "Inc" or tag is "Dow": S.ChangeMood(tag) else: S.Process(tag) return S.returnScore() #caculating the score of the Document with specific rules def Run(Data): ScoreList = [] counter = 0 for Sen in Data: if Sen != []: if AFX.GetWord(Sen[0],'Tag') is "Con": word = AFX.GetWord(Sen[0],'Word')<|fim▁hole|> ++counter pass #Most people don't like rainy, even if I like the weather quite much.<|fim▁end|>
print Sen print CaculateASentence(Sen)
<|file_name|>ReplayMessageHandler.java<|end_file_name|><|fim▁begin|>package tracker.message.handlers; import elasta.composer.message.handlers.MessageHandler; import io.vertx.core.eventbus.Message;<|fim▁hole|> */ public interface ReplayMessageHandler extends MessageHandler<JsonObject> { @Override void handle(Message<JsonObject> event); }<|fim▁end|>
import io.vertx.core.json.JsonObject; /** * Created by sohan on 2017-07-26.
<|file_name|>thrifttest_handler.go<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * 'License'); you may not use this file except in compliance * with the License. You may obtain a copy of the License at *<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package tests import ( "errors" "thrift" "thrifttest" "time" ) type SecondServiceHandler struct { } func NewSecondServiceHandler() *SecondServiceHandler { return &SecondServiceHandler{} } func (p *SecondServiceHandler) BlahBlah() (err error) { return nil } func (p *SecondServiceHandler) SecondtestString(thing string) (r string, err error) { return thing, nil } type ThriftTestHandler struct { } func NewThriftTestHandler() *ThriftTestHandler { return &ThriftTestHandler{} } func (p *ThriftTestHandler) TestVoid() (err error) { return nil } func (p *ThriftTestHandler) TestString(thing string) (r string, err error) { return thing, nil } func (p *ThriftTestHandler) TestBool(thing bool) (r bool, err error) { return thing, nil } func (p *ThriftTestHandler) TestByte(thing int8) (r int8, err error) { return thing, nil } func (p *ThriftTestHandler) TestI32(thing int32) (r int32, err error) { return thing, nil } func (p *ThriftTestHandler) TestI64(thing int64) (r int64, err error) { return thing, nil } func (p *ThriftTestHandler) TestDouble(thing float64) (r float64, err error) { return thing, nil } func (p *ThriftTestHandler) TestBinary(thing []byte) (r []byte, err error) { return thing, nil } func (p *ThriftTestHandler) TestStruct(thing *thrifttest.Xtruct) (r *thrifttest.Xtruct, err error) { return thing, nil } func (p *ThriftTestHandler) TestNest(thing *thrifttest.Xtruct2) (r *thrifttest.Xtruct2, err error) { return thing, nil } func (p *ThriftTestHandler) TestMap(thing map[int32]int32) (r map[int32]int32, err error) { return thing, nil } func (p *ThriftTestHandler) TestStringMap(thing map[string]string) (r map[string]string, err error) { return thing, nil } func (p *ThriftTestHandler) TestSet(thing map[int32]bool) (r map[int32]bool, err error) { return thing, nil } func (p *ThriftTestHandler) TestList(thing []int32) (r []int32, err error) { return thing, nil } func (p *ThriftTestHandler) TestEnum(thing thrifttest.Numberz) (r thrifttest.Numberz, err error) { return thing, nil } func (p *ThriftTestHandler) TestTypedef(thing thrifttest.UserId) (r thrifttest.UserId, err error) { return thing, nil } func (p *ThriftTestHandler) TestMapMap(hello int32) (r map[int32]map[int32]int32, err error) { r = make(map[int32]map[int32]int32) pos := make(map[int32]int32) neg := make(map[int32]int32) for i := int32(1); i < 5; i++ { pos[i] = i neg[-i] = -i } r[4] = pos r[-4] = neg return r, nil } func (p *ThriftTestHandler) TestInsanity(argument *thrifttest.Insanity) (r map[thrifttest.UserId]map[thrifttest.Numberz]*thrifttest.Insanity, err error) { hello := thrifttest.NewXtruct() hello.StringThing = "Hello2" hello.ByteThing = 2 hello.I32Thing = 2 hello.I64Thing = 2 goodbye := thrifttest.NewXtruct() goodbye.StringThing = "Goodbye4" goodbye.ByteThing = 4 goodbye.I32Thing = 4 goodbye.I64Thing = 4 crazy := thrifttest.NewInsanity() crazy.UserMap = make(map[thrifttest.Numberz]thrifttest.UserId) crazy.UserMap[thrifttest.Numberz_EIGHT] = 8 crazy.UserMap[thrifttest.Numberz_FIVE] = 5 crazy.Xtructs = []*thrifttest.Xtruct{goodbye, hello} first_map := make(map[thrifttest.Numberz]*thrifttest.Insanity) second_map := make(map[thrifttest.Numberz]*thrifttest.Insanity) first_map[thrifttest.Numberz_TWO] = crazy first_map[thrifttest.Numberz_THREE] = crazy looney := thrifttest.NewInsanity() second_map[thrifttest.Numberz_SIX] = looney var insane = make(map[thrifttest.UserId]map[thrifttest.Numberz]*thrifttest.Insanity) insane[1] = first_map insane[2] = second_map return insane, nil } func (p *ThriftTestHandler) TestMulti(arg0 int8, arg1 int32, arg2 int64, arg3 map[int16]string, arg4 thrifttest.Numberz, arg5 thrifttest.UserId) (r *thrifttest.Xtruct, err error) { r = thrifttest.NewXtruct() r.StringThing = "Hello2" r.ByteThing = arg0 r.I32Thing = arg1 r.I64Thing = arg2 return r, nil } func (p *ThriftTestHandler) TestException(arg string) (err error) { if arg == "Xception" { x := thrifttest.NewXception() x.ErrorCode = 1001 x.Message = arg return x } else if arg == "TException" { return thrift.TException(errors.New(arg)) } else { return nil } } func (p *ThriftTestHandler) TestMultiException(arg0 string, arg1 string) (r *thrifttest.Xtruct, err error) { if arg0 == "Xception" { x := thrifttest.NewXception() x.ErrorCode = 1001 x.Message = "This is an Xception" return nil, x } else if arg0 == "Xception2" { x2 := thrifttest.NewXception2() x2.ErrorCode = 2002 x2.StructThing = thrifttest.NewXtruct() x2.StructThing.StringThing = "This is an Xception2" return nil, x2 } res := thrifttest.NewXtruct() res.StringThing = arg1 return res, nil } func (p *ThriftTestHandler) TestOneway(secondsToSleep int32) (err error) { time.Sleep(time.Second * time.Duration(secondsToSleep)) return nil }<|fim▁end|>
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import jsonfield.fields import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ ("contenttypes", "0002_remove_content_type_name"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name="Action", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True,<|fim▁hole|> ), ), ("actor_object_id", models.CharField(max_length=255)), ("verb", models.CharField(max_length=255)), ("description", models.TextField(null=True, blank=True)), ( "target_object_id", models.CharField(max_length=255, null=True, blank=True), ), ( "action_object_object_id", models.CharField(max_length=255, null=True, blank=True), ), ("timestamp", models.DateTimeField(default=django.utils.timezone.now)), ("public", models.BooleanField(default=True)), ("data", jsonfield.fields.JSONField(null=True, blank=True)), ( "action_object_content_type", models.ForeignKey( related_name="action_object", blank=True, to="contenttypes.ContentType", null=True, ), ), ( "actor_content_type", models.ForeignKey( related_name="actor", to="contenttypes.ContentType" ), ), ( "target_content_type", models.ForeignKey( related_name="target", blank=True, to="contenttypes.ContentType", null=True, ), ), ], options={"ordering": ("-timestamp",)}, ), migrations.CreateModel( name="Follow", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("object_id", models.CharField(max_length=255)), ( "actor_only", models.BooleanField( default=True, verbose_name=b"Only follow actions where the object is the target.", ), ), ("started", models.DateTimeField(default=django.utils.timezone.now)), ("content_type", models.ForeignKey(to="contenttypes.ContentType")), ("user", models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterUniqueTogether( name="follow", unique_together=set([("user", "content_type", "object_id")]) ), ]<|fim▁end|>
<|file_name|>music_box.py<|end_file_name|><|fim▁begin|>from gpiozero import Button import pygame.mixer<|fim▁hole|>pygame.mixer.init() button_sounds = { Button(2): Sound("samples/drum_tom_mid_hard.wav"), Button(3): Sound("samples/drum_cymbal_open.wav"), } for button, sound in button_sounds.items(): button.when_pressed = sound.play pause()<|fim▁end|>
from pygame.mixer import Sound from signal import pause
<|file_name|>test_json.py<|end_file_name|><|fim▁begin|>from unittest import TestCase from firstinbattle.deck import Card from firstinbattle.json_util import js class TestJson(TestCase): def test_encode_loads(self): cards = { Card(5, 'diamond'), Card(9, 'heart'), } encoded_str = js.encode({ 'message': 'test_msg', 'cards': cards, }) <|fim▁hole|> self.assertEqual(decoded_obj['message'], 'test_msg') for card in cards: self.assertIn( {'number': card.number, 'suit': card.suit}, decoded_obj['cards'] )<|fim▁end|>
decoded_obj = js.loads(encoded_str)
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import styled from "styled-components" export const Code = styled.code` padding: 3px 8px; background-color: ${({ theme }) => theme.posts.inlineCodeBackground}; color: ${({ theme }) => theme.posts.inlineCodeColor}; border-radius: 6px;<|fim▁hole|><|fim▁end|>
`
<|file_name|>ordered-dict.py<|end_file_name|><|fim▁begin|>""" Task You are the manager of a supermarket. You have a list of N items together with their prices that consumers bought on a particular day. Your task is to print each item_name and net_price in order of its first occurrence. item_name = Name of the item. net_price = Quantity of the item sold multiplied by the price of each item. Input Format The first line contains the number of items, N. The next N lines contains the item's name and price, separated by a space. Constraints 0<N<=00 Output Format Print the item_name and net_price in order of its first occurrence. Sample Input 9 BANANA FRIES 12 POTATO CHIPS 30 APPLE JUICE 10 CANDY 5 APPLE JUICE 10 CANDY 5 CANDY 5 CANDY 5 POTATO CHIPS 30 Sample Output BANANA FRIES 12 POTATO CHIPS 60 APPLE JUICE 20 CANDY 20 Explanation BANANA FRIES: Quantity bought: 1, Price: 12 Net Price: 12 POTATO CHIPS: Quantity bought: 2, Price: 30 Net Price: 60 APPLE JUICE: Quantity bought: 2, Price: 10 Net Price: 20 CANDY: Quantity bought: 4, Price: 5 Net Price: 20 """ from collections import OrderedDict N = int(raw_input()) ordered_dict = OrderedDict() for i in range(N): l = raw_input().split() item = " ".join(l[0:-1]) price = int("".join(l[-1:])) if ordered_dict.has_key(item):<|fim▁hole|> ordered_dict[item] = total_price else: ordered_dict[item] = price for k, v in ordered_dict.items(): print k, v<|fim▁end|>
total_price = ordered_dict.get(item) total_price += price
<|file_name|>make_mo_files.py<|end_file_name|><|fim▁begin|># (C) Copyright 2014 Voyager Search # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os<|fim▁hole|>import glob def make_mo_files(): """Utility function to generate MO files.""" po_files = glob.glob(os.path.join(os.path.dirname(__file__), 'LC_MESSAGES', '*.po')) try: sys.path.append(os.path.join(os.path.dirname(sys.executable), "tools", "i18n")) import msgfmt for po_file in po_files: msgfmt.make(po_file, po_file.replace('.po', '.mo')) except (IOError, ImportError): pass if __name__ == '__main__': make_mo_files()<|fim▁end|>
import sys
<|file_name|>ByteToCharISO8859_7.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Sun designates this * particular file as subject to the "Classpath" exception as provided * by Sun in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, * CA 95054 USA or visit www.sun.com if you need additional information or * have any questions. */ package sun.io; import sun.nio.cs.ISO_8859_7; /** * A table to convert ISO8859_7 to Unicode * * @author ConverterGenerator tool */ public class ByteToCharISO8859_7 extends ByteToCharSingleByte { private final static ISO_8859_7 nioCoder = new ISO_8859_7(); public String getCharacterEncoding() { return "ISO8859_7"; } public ByteToCharISO8859_7() { super.byteToCharTable = nioCoder.getDecoderSingleByteMappings(); } }<|fim▁end|>
* Copyright 1996-2003 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
<|file_name|>app-routing.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core'; import { Routes, RouterModule } from '@angular/router'; <|fim▁hole|>const routes: Routes = [ { path: 'members', component: MembersComponent }, { path: 'seating-map', component: SeatingMapComponent }, { path: '', redirectTo: '/members', pathMatch: 'full' } ]; @NgModule({ imports: [RouterModule.forRoot(routes)], exports: [RouterModule] }) export class AppRoutingModule { }<|fim▁end|>
import { MembersComponent } from './members/members.component'; import { SeatingMapComponent } from './seating-map/seating-map.component';
<|file_name|>HorizontalRule.js<|end_file_name|><|fim▁begin|>dojo.provide("dijit.form.HorizontalRule"); dojo.require("dijit._Widget"); dojo.require("dijit._Templated"); dojo.declare("dijit.form.HorizontalRule", [dijit._Widget, dijit._Templated], { // summary: // Hash marks for `dijit.form.HorizontalSlider` templateString: '<div class="dijitRuleContainer dijitRuleContainerH"></div>', // count: Integer // Number of hash marks to generate count: 3, // container: String // For HorizontalSlider, this is either "topDecoration" or "bottomDecoration", // and indicates whether this rule goes above or below the slider. container: "containerNode", // ruleStyle: String // CSS style to apply to individual hash marks ruleStyle: "", _positionPrefix: '<div class="dijitRuleMark dijitRuleMarkH" style="left:', _positionSuffix: '%;', _suffix: '"></div>', _genHTML: function(pos, ndx){ return this._positionPrefix + pos + this._positionSuffix + this.ruleStyle + this._suffix; }, // _isHorizontal: [protected extension] Boolean // VerticalRule will override this... _isHorizontal: true, postCreate: function(){ var innerHTML; if(this.count==1){ innerHTML = this._genHTML(50, 0); }else{ var i; var interval = 100 / (this.count-1); if(!this._isHorizontal || this.isLeftToRight()){ innerHTML = this._genHTML(0, 0); for(i=1; i < this.count-1; i++){ innerHTML += this._genHTML(interval*i, i);<|fim▁hole|> for(i=1; i < this.count-1; i++){ innerHTML += this._genHTML(100-interval*i, i); } innerHTML += this._genHTML(0, this.count-1); } } this.domNode.innerHTML = innerHTML; } });<|fim▁end|>
} innerHTML += this._genHTML(100, this.count-1); }else{ innerHTML = this._genHTML(100, 0);
<|file_name|>subrepo.py<|end_file_name|><|fim▁begin|># subrepo.py - sub-repository handling for Mercurial # # Copyright 2009-2010 Matt Mackall <[email protected]> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. import errno, os, re, shutil, posixpath, sys import xml.dom.minidom import stat, subprocess, tarfile from i18n import _ import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod hg = None propertycache = util.propertycache nullstate = ('', '', 'empty') def _expandedabspath(path): ''' get a path or url and if it is a path expand it and return an absolute path ''' expandedpath = util.urllocalpath(util.expandpath(path)) u = util.url(expandedpath) if not u.scheme: path = util.normpath(os.path.abspath(u.path)) return path def _getstorehashcachename(remotepath): '''get a unique filename for the store hash cache of a remote repository''' return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12] def _calcfilehash(filename): data = '' if os.path.exists(filename): fd = open(filename, 'rb') data = fd.read() fd.close() return util.sha1(data).hexdigest() class SubrepoAbort(error.Abort): """Exception class used to avoid handling a subrepo error more than once""" def __init__(self, *args, **kw): error.Abort.__init__(self, *args, **kw) self.subrepo = kw.get('subrepo') self.cause = kw.get('cause') def annotatesubrepoerror(func): def decoratedmethod(self, *args, **kargs): try: res = func(self, *args, **kargs) except SubrepoAbort, ex: # This exception has already been handled raise ex except error.Abort, ex: subrepo = subrelpath(self) errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo # avoid handling this exception by raising a SubrepoAbort exception raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo, cause=sys.exc_info()) return res return decoratedmethod def state(ctx, ui): """return a state dict, mapping subrepo paths configured in .hgsub to tuple: (source from .hgsub, revision from .hgsubstate, kind (key in types dict)) """ p = config.config() def read(f, sections=None, remap=None): if f in ctx: try: data = ctx[f].data() except IOError, err: if err.errno != errno.ENOENT: raise # handle missing subrepo spec files as removed ui.warn(_("warning: subrepo spec file %s not found\n") % f) return p.parse(f, data, sections, remap, read) else: raise util.Abort(_("subrepo spec file %s not found") % f) if '.hgsub' in ctx: read('.hgsub') for path, src in ui.configitems('subpaths'): p.set('subpaths', path, src, ui.configsource('subpaths', path)) rev = {} if '.hgsubstate' in ctx: try: for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()): l = l.lstrip() if not l: continue try: revision, path = l.split(" ", 1) except ValueError: raise util.Abort(_("invalid subrepository revision " "specifier in .hgsubstate line %d") % (i + 1)) rev[path] = revision except IOError, err: if err.errno != errno.ENOENT: raise def remap(src): for pattern, repl in p.items('subpaths'): # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub # does a string decode. repl = repl.encode('string-escape') # However, we still want to allow back references to go # through unharmed, so we turn r'\\1' into r'\1'. Again, # extra escapes are needed because re.sub string decodes. repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl) try: src = re.sub(pattern, repl, src, 1) except re.error, e: raise util.Abort(_("bad subrepository pattern in %s: %s") % (p.source('subpaths', pattern), e)) return src state = {} for path, src in p[''].items(): kind = 'hg' if src.startswith('['): if ']' not in src: raise util.Abort(_('missing ] in subrepo source')) kind, src = src.split(']', 1) kind = kind[1:] src = src.lstrip() # strip any extra whitespace after ']' if not util.url(src).isabs(): parent = _abssource(ctx._repo, abort=False) if parent: parent = util.url(parent) parent.path = posixpath.join(parent.path or '', src) parent.path = posixpath.normpath(parent.path) joined = str(parent) # Remap the full joined path and use it if it changes, # else remap the original source. remapped = remap(joined) if remapped == joined: src = remap(src) else: src = remapped src = remap(src) state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind) return state def writestate(repo, state): """rewrite .hgsubstate in (outer) repo with these subrepo states""" lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)] repo.wwrite('.hgsubstate', ''.join(lines), '') def submerge(repo, wctx, mctx, actx, overwrite): """delegated from merge.applyupdates: merging of .hgsubstate file in working context, merging context and ancestor context""" if mctx == actx: # backwards? actx = wctx.p1() s1 = wctx.substate s2 = mctx.substate sa = actx.substate sm = {} repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx)) def debug(s, msg, r=""): if r: r = "%s:%s:%s" % r repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r)) for s, l in sorted(s1.iteritems()): a = sa.get(s, nullstate) ld = l # local state with possible dirty flag for compares if wctx.sub(s).dirty(): ld = (l[0], l[1] + "+") if wctx == actx: # overwrite a = ld if s in s2: r = s2[s] if ld == r or r == a: # no change or local is newer sm[s] = l continue elif ld == a: # other side changed debug(s, "other changed, get", r) wctx.sub(s).get(r, overwrite) sm[s] = r elif ld[0] != r[0]: # sources differ if repo.ui.promptchoice( _(' subrepository sources for %s differ\n' 'use (l)ocal source (%s) or (r)emote source (%s)?' '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0): debug(s, "prompt changed, get", r) wctx.sub(s).get(r, overwrite) sm[s] = r elif ld[1] == a[1]: # local side is unchanged debug(s, "other side changed, get", r) wctx.sub(s).get(r, overwrite) sm[s] = r else: debug(s, "both sides changed") option = repo.ui.promptchoice( _(' subrepository %s diverged (local revision: %s, ' 'remote revision: %s)\n' '(M)erge, keep (l)ocal or keep (r)emote?' '$$ &Merge $$ &Local $$ &Remote') % (s, l[1][:12], r[1][:12]), 0) if option == 0: wctx.sub(s).merge(r) sm[s] = l debug(s, "merge with", r) elif option == 1: sm[s] = l debug(s, "keep local subrepo revision", l) else: wctx.sub(s).get(r, overwrite) sm[s] = r debug(s, "get remote subrepo revision", r) elif ld == a: # remote removed, local unchanged debug(s, "remote removed, remove") wctx.sub(s).remove() elif a == nullstate: # not present in remote or ancestor debug(s, "local added, keep") sm[s] = l continue else: if repo.ui.promptchoice( _(' local changed subrepository %s which remote removed\n' 'use (c)hanged version or (d)elete?' '$$ &Changed $$ &Delete') % s, 0): debug(s, "prompt remove") wctx.sub(s).remove() for s, r in sorted(s2.items()): if s in s1: continue elif s not in sa: debug(s, "remote added, get", r) mctx.sub(s).get(r) sm[s] = r elif r != sa[s]: if repo.ui.promptchoice( _(' remote changed subrepository %s which local removed\n' 'use (c)hanged version or (d)elete?' '$$ &Changed $$ &Delete') % s, 0) == 0: debug(s, "prompt recreate", r) wctx.sub(s).get(r) sm[s] = r # record merged .hgsubstate writestate(repo, sm) return sm def _updateprompt(ui, sub, dirty, local, remote): if dirty: msg = (_(' subrepository sources for %s differ\n' 'use (l)ocal source (%s) or (r)emote source (%s)?\n' '$$ &Local $$ &Remote') % (subrelpath(sub), local, remote)) else: msg = (_(' subrepository sources for %s differ (in checked out ' 'version)\n' 'use (l)ocal source (%s) or (r)emote source (%s)?\n' '$$ &Local $$ &Remote') % (subrelpath(sub), local, remote)) return ui.promptchoice(msg, 0) def reporelpath(repo): """return path to this (sub)repo as seen from outermost repo""" parent = repo while util.safehasattr(parent, '_subparent'): parent = parent._subparent p = parent.root.rstrip(os.sep) return repo.root[len(p) + 1:] def subrelpath(sub): """return path to this subrepo as seen from outermost repo""" if util.safehasattr(sub, '_relpath'): return sub._relpath if not util.safehasattr(sub, '_repo'): return sub._path return reporelpath(sub._repo) def _abssource(repo, push=False, abort=True): """return pull/push path of repo - either based on parent repo .hgsub info or on the top repo config. Abort or return None if no source found.""" if util.safehasattr(repo, '_subparent'): source = util.url(repo._subsource) if source.isabs(): return str(source) source.path = posixpath.normpath(source.path) parent = _abssource(repo._subparent, push, abort=False) if parent: parent = util.url(util.pconvert(parent)) parent.path = posixpath.join(parent.path or '', source.path) parent.path = posixpath.normpath(parent.path) return str(parent) else: # recursion reached top repo if util.safehasattr(repo, '_subtoppath'): return repo._subtoppath if push and repo.ui.config('paths', 'default-push'): return repo.ui.config('paths', 'default-push') if repo.ui.config('paths', 'default'): return repo.ui.config('paths', 'default') if repo.sharedpath != repo.path: # chop off the .hg component to get the default path form return os.path.dirname(repo.sharedpath) if abort: raise util.Abort(_("default path for subrepository not found")) def itersubrepos(ctx1, ctx2): """find subrepos in ctx1 or ctx2""" # Create a (subpath, ctx) mapping where we prefer subpaths from # ctx1. The subpaths from ctx2 are important when the .hgsub file # has been modified (in ctx2) but not yet committed (in ctx1). subpaths = dict.fromkeys(ctx2.substate, ctx2) subpaths.update(dict.fromkeys(ctx1.substate, ctx1)) for subpath, ctx in sorted(subpaths.iteritems()): yield subpath, ctx.sub(subpath) def subrepo(ctx, path): """return instance of the right subrepo class for subrepo in path""" # subrepo inherently violates our import layering rules # because it wants to make repo objects from deep inside the stack # so we manually delay the circular imports to not break # scripts that don't use our demand-loading global hg import hg as h hg = h scmutil.pathauditor(ctx._repo.root)(path) state = ctx.substate[path] if state[2] not in types: raise util.Abort(_('unknown subrepo type %s') % state[2]) return types[state[2]](ctx, path, state[:2]) # subrepo classes need to implement the following abstract class: class abstractsubrepo(object): def storeclean(self, path): """ returns true if the repository has not changed since it was last cloned from or pushed to a given repository. """ return False def dirty(self, ignoreupdate=False): """returns true if the dirstate of the subrepo is dirty or does not match current stored state. If ignoreupdate is true, only check whether the subrepo has uncommitted changes in its dirstate. """ raise NotImplementedError def basestate(self): """current working directory base state, disregarding .hgsubstate state and working directory modifications""" raise NotImplementedError def checknested(self, path): """check if path is a subrepository within this repository""" return False def commit(self, text, user, date): """commit the current changes to the subrepo with the given log message. Use given user and date if possible. Return the new state of the subrepo. """ raise NotImplementedError def remove(self): """remove the subrepo (should verify the dirstate is not dirty first) """ raise NotImplementedError def get(self, state, overwrite=False): """run whatever commands are needed to put the subrepo into this state """ raise NotImplementedError def merge(self, state): """merge currently-saved state with the new state.""" raise NotImplementedError def push(self, opts): """perform whatever action is analogous to 'hg push' This may be a no-op on some systems. """ raise NotImplementedError def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly): return [] def status(self, rev2, **opts): return [], [], [], [], [], [], [] def diff(self, ui, diffopts, node2, match, prefix, **opts): pass def outgoing(self, ui, dest, opts): return 1 def incoming(self, ui, source, opts): return 1 def files(self): """return filename iterator""" raise NotImplementedError def filedata(self, name): """return file data""" raise NotImplementedError def fileflags(self, name): """return file flags""" return '' def archive(self, ui, archiver, prefix, match=None): if match is not None: files = [f for f in self.files() if match(f)] else: files = self.files() total = len(files) relpath = subrelpath(self) ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'), total=total) for i, name in enumerate(files): flags = self.fileflags(name) mode = 'x' in flags and 0755 or 0644 symlink = 'l' in flags archiver.addfile(os.path.join(prefix, self._path, name), mode, symlink, self.filedata(name)) ui.progress(_('archiving (%s)') % relpath, i + 1, unit=_('files'), total=total) ui.progress(_('archiving (%s)') % relpath, None) return total def walk(self, match): ''' walk recursively through the directory tree, finding all files matched by the match function ''' pass def forget(self, ui, match, prefix): return ([], []) def revert(self, ui, substate, *pats, **opts): ui.warn('%s: reverting %s subrepos is unsupported\n' \ % (substate[0], substate[2])) return [] class hgsubrepo(abstractsubrepo): def __init__(self, ctx, path, state): self._path = path self._state = state r = ctx._repo root = r.wjoin(path) create = False if not os.path.exists(os.path.join(root, '.hg')): create = True util.makedirs(root) self._repo = hg.repository(r.baseui, root, create=create) for s, k in [('ui', 'commitsubrepos')]: v = r.ui.config(s, k) if v: self._repo.ui.setconfig(s, k, v) self._repo.ui.setconfig('ui', '_usedassubrepo', 'True') self._initrepo(r, state[0], create) def storeclean(self, path): clean = True lock = self._repo.lock() itercache = self._calcstorehash(path) try: for filehash in self._readstorehashcache(path): if filehash != itercache.next(): clean = False break except StopIteration: # the cached and current pull states have a different size clean = False if clean: try: itercache.next() # the cached and current pull states have a different size clean = False except StopIteration: pass lock.release() return clean def _calcstorehash(self, remotepath): '''calculate a unique "store hash" This method is used to to detect when there are changes that may require a push to a given remote path.''' # sort the files that will be hashed in increasing (likely) file size filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i') yield '# %s\n' % _expandedabspath(remotepath) for relname in filelist: absname = os.path.normpath(self._repo.join(relname)) yield '%s = %s\n' % (relname, _calcfilehash(absname)) def _getstorehashcachepath(self, remotepath): '''get a unique path for the store hash cache''' return self._repo.join(os.path.join( 'cache', 'storehash', _getstorehashcachename(remotepath))) def _readstorehashcache(self, remotepath): '''read the store hash cache for a given remote repository''' cachefile = self._getstorehashcachepath(remotepath) if not os.path.exists(cachefile): return '' fd = open(cachefile, 'r') pullstate = fd.readlines() fd.close() return pullstate def _cachestorehash(self, remotepath): '''cache the current store hash Each remote repo requires its own store hash cache, because a subrepo store may be "clean" versus a given remote repo, but not versus another ''' cachefile = self._getstorehashcachepath(remotepath) lock = self._repo.lock() storehash = list(self._calcstorehash(remotepath)) cachedir = os.path.dirname(cachefile) if not os.path.exists(cachedir): util.makedirs(cachedir, notindexed=True) fd = open(cachefile, 'w') fd.writelines(storehash) fd.close() lock.release() @annotatesubrepoerror def _initrepo(self, parentrepo, source, create): self._repo._subparent = parentrepo self._repo._subsource = source if create: fp = self._repo.opener("hgrc", "w", text=True) fp.write('[paths]\n') def addpathconfig(key, value): if value: fp.write('%s = %s\n' % (key, value)) self._repo.ui.setconfig('paths', key, value) defpath = _abssource(self._repo, abort=False) defpushpath = _abssource(self._repo, True, abort=False) addpathconfig('default', defpath) if defpath != defpushpath: addpathconfig('default-push', defpushpath) fp.close() @annotatesubrepoerror def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly): return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos, os.path.join(prefix, self._path), explicitonly) @annotatesubrepoerror def status(self, rev2, **opts): try: rev1 = self._state[1] ctx1 = self._repo[rev1] ctx2 = self._repo[rev2] return self._repo.status(ctx1, ctx2, **opts) except error.RepoLookupError, inst: self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) return [], [], [], [], [], [], [] @annotatesubrepoerror def diff(self, ui, diffopts, node2, match, prefix, **opts): try: node1 = node.bin(self._state[1]) # We currently expect node2 to come from substate and be # in hex format if node2 is not None: node2 = node.bin(node2) cmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2, match, prefix=posixpath.join(prefix, self._path), listsubrepos=True, **opts) except error.RepoLookupError, inst: self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) @annotatesubrepoerror def archive(self, ui, archiver, prefix, match=None): self._get(self._state + ('hg',)) total = abstractsubrepo.archive(self, ui, archiver, prefix, match) rev = self._state[1] ctx = self._repo[rev] for subpath in ctx.substate: s = subrepo(ctx, subpath) submatch = matchmod.narrowmatcher(subpath, match) total += s.archive( ui, archiver, os.path.join(prefix, self._path), submatch) return total @annotatesubrepoerror def dirty(self, ignoreupdate=False): r = self._state[1] if r == '' and not ignoreupdate: # no state recorded return True w = self._repo[None] if r != w.p1().hex() and not ignoreupdate: # different version checked out return True return w.dirty() # working directory changed def basestate(self): return self._repo['.'].hex() def checknested(self, path): return self._repo._checknested(self._repo.wjoin(path)) @annotatesubrepoerror def commit(self, text, user, date): # don't bother committing in the subrepo if it's only been # updated if not self.dirty(True): return self._repo['.'].hex() self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self)) n = self._repo.commit(text, user, date) if not n: return self._repo['.'].hex() # different version checked out return node.hex(n) @annotatesubrepoerror def remove(self): # we can't fully delete the repository as it may contain # local-only history self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self)) hg.clean(self._repo, node.nullid, False) def _get(self, state): source, revision, kind = state if revision not in self._repo: self._repo._subsource = source srcurl = _abssource(self._repo) other = hg.peer(self._repo, {}, srcurl) if len(self._repo) == 0: self._repo.ui.status(_('cloning subrepo %s from %s\n') % (subrelpath(self), srcurl)) parentrepo = self._repo._subparent shutil.rmtree(self._repo.path) other, cloned = hg.clone(self._repo._subparent.baseui, {}, other, self._repo.root, update=False) self._repo = cloned.local() self._initrepo(parentrepo, source, create=True) self._cachestorehash(srcurl) else: self._repo.ui.status(_('pulling subrepo %s from %s\n') % (subrelpath(self), srcurl)) cleansub = self.storeclean(srcurl) remotebookmarks = other.listkeys('bookmarks') self._repo.pull(other) bookmarks.updatefromremote(self._repo.ui, self._repo, remotebookmarks, srcurl) if cleansub: # keep the repo clean after pull self._cachestorehash(srcurl) @annotatesubrepoerror def get(self, state, overwrite=False): self._get(state) source, revision, kind = state self._repo.ui.debug("getting subrepo %s\n" % self._path) hg.updaterepo(self._repo, revision, overwrite) @annotatesubrepoerror def merge(self, state): self._get(state) cur = self._repo['.'] dst = self._repo[state[1]] anc = dst.ancestor(cur) def mergefunc(): if anc == cur and dst.branch() == cur.branch(): self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) hg.update(self._repo, state[1]) elif anc == dst: self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) else: self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) hg.merge(self._repo, state[1], remind=False) wctx = self._repo[None] if self.dirty(): if anc != dst: if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst): mergefunc() else: mergefunc() else: mergefunc() @annotatesubrepoerror def push(self, opts): force = opts.get('force') newbranch = opts.get('new_branch') ssh = opts.get('ssh') # push subrepos depth-first for coherent ordering c = self._repo[''] subs = c.substate # only repos that are committed for s in sorted(subs): if c.sub(s).push(opts) == 0: return False dsturl = _abssource(self._repo, True) if not force: if self.storeclean(dsturl): self._repo.ui.status( _('no changes made to subrepo %s since last push to %s\n') % (subrelpath(self), dsturl)) return None self._repo.ui.status(_('pushing subrepo %s to %s\n') % (subrelpath(self), dsturl)) other = hg.peer(self._repo, {'ssh': ssh}, dsturl) res = self._repo.push(other, force, newbranch=newbranch) # the repo is now clean self._cachestorehash(dsturl) return res @annotatesubrepoerror def outgoing(self, ui, dest, opts): return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) @annotatesubrepoerror def incoming(self, ui, source, opts): return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) @annotatesubrepoerror def files(self): rev = self._state[1] ctx = self._repo[rev] return ctx.manifest() def filedata(self, name): rev = self._state[1] return self._repo[rev][name].data() def fileflags(self, name): rev = self._state[1] ctx = self._repo[rev] return ctx.flags(name) def walk(self, match): ctx = self._repo[None] return ctx.walk(match) @annotatesubrepoerror def forget(self, ui, match, prefix): return cmdutil.forget(ui, self._repo, match, os.path.join(prefix, self._path), True) @annotatesubrepoerror def revert(self, ui, substate, *pats, **opts): # reverting a subrepo is a 2 step process: # 1. if the no_backup is not set, revert all modified # files inside the subrepo # 2. update the subrepo to the revision specified in # the corresponding substate dictionary ui.status(_('reverting subrepo %s\n') % substate[0]) if not opts.get('no_backup'): # Revert all files on the subrepo, creating backups # Note that this will not recursively revert subrepos # We could do it if there was a set:subrepos() predicate opts = opts.copy() opts['date'] = None opts['rev'] = substate[1] pats = [] if not opts.get('all'): pats = ['set:modified()'] self.filerevert(ui, *pats, **opts) # Update the repo to the revision specified in the given substate self.get(substate, overwrite=True) def filerevert(self, ui, *pats, **opts): ctx = self._repo[opts['rev']] parents = self._repo.dirstate.parents() if opts.get('all'): pats = ['set:modified()'] else: pats = [] cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts) class svnsubrepo(abstractsubrepo): def __init__(self, ctx, path, state): self._path = path self._state = state self._ctx = ctx self._ui = ctx._repo.ui self._exe = util.findexe('svn') if not self._exe: raise util.Abort(_("'svn' executable not found for subrepo '%s'") % self._path) def _svncommand(self, commands, filename='', failok=False): cmd = [self._exe] extrakw = {} if not self._ui.interactive(): # Making stdin be a pipe should prevent svn from behaving # interactively even if we can't pass --non-interactive. extrakw['stdin'] = subprocess.PIPE # Starting in svn 1.5 --non-interactive is a global flag # instead of being per-command, but we need to support 1.4 so # we have to be intelligent about what commands take # --non-interactive. if commands[0] in ('update', 'checkout', 'commit'): cmd.append('--non-interactive') cmd.extend(commands) if filename is not None: path = os.path.join(self._ctx._repo.origroot, self._path, filename) cmd.append(path) env = dict(os.environ) # Avoid localized output, preserve current locale for everything else. lc_all = env.get('LC_ALL') if lc_all: env['LANG'] = lc_all del env['LC_ALL'] env['LC_MESSAGES'] = 'C' p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, env=env, **extrakw) stdout, stderr = p.communicate() stderr = stderr.strip() if not failok: if p.returncode: raise util.Abort(stderr or 'exited with code %d' % p.returncode) if stderr: self._ui.warn(stderr + '\n') return stdout, stderr @propertycache def _svnversion(self): output, err = self._svncommand(['--version', '--quiet'], filename=None) m = re.search(r'^(\d+)\.(\d+)', output) if not m: raise util.Abort(_('cannot retrieve svn tool version')) return (int(m.group(1)), int(m.group(2))) def _wcrevs(self): # Get the working directory revision as well as the last # commit revision so we can compare the subrepo state with # both. We used to store the working directory one. output, err = self._svncommand(['info', '--xml']) doc = xml.dom.minidom.parseString(output) entries = doc.getElementsByTagName('entry') lastrev, rev = '0', '0' if entries: rev = str(entries[0].getAttribute('revision')) or '0' commits = entries[0].getElementsByTagName('commit') if commits: lastrev = str(commits[0].getAttribute('revision')) or '0' return (lastrev, rev) def _wcrev(self): return self._wcrevs()[0] def _wcchanged(self): """Return (changes, extchanges, missing) where changes is True if the working directory was changed, extchanges is True if any of these changes concern an external entry and missing is True if any change is a missing entry. """ output, err = self._svncommand(['status', '--xml']) externals, changes, missing = [], [], [] doc = xml.dom.minidom.parseString(output) for e in doc.getElementsByTagName('entry'): s = e.getElementsByTagName('wc-status') if not s: continue item = s[0].getAttribute('item') props = s[0].getAttribute('props') path = e.getAttribute('path') if item == 'external': externals.append(path) elif item == 'missing': missing.append(path) if (item not in ('', 'normal', 'unversioned', 'external') or props not in ('', 'none', 'normal')): changes.append(path) for path in changes: for ext in externals: if path == ext or path.startswith(ext + os.sep): return True, True, bool(missing) return bool(changes), False, bool(missing) def dirty(self, ignoreupdate=False): if not self._wcchanged()[0]: if self._state[1] in self._wcrevs() or ignoreupdate: return False return True def basestate(self): lastrev, rev = self._wcrevs() if lastrev != rev: # Last committed rev is not the same than rev. We would # like to take lastrev but we do not know if the subrepo # URL exists at lastrev. Test it and fallback to rev it # is not there. try: self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)]) return lastrev except error.Abort: pass return rev @annotatesubrepoerror def commit(self, text, user, date): # user and date are out of our hands since svn is centralized changed, extchanged, missing = self._wcchanged() if not changed: return self.basestate() if extchanged: # Do not try to commit externals raise util.Abort(_('cannot commit svn externals')) if missing: # svn can commit with missing entries but aborting like hg # seems a better approach. raise util.Abort(_('cannot commit missing svn entries')) commitinfo, err = self._svncommand(['commit', '-m', text]) self._ui.status(commitinfo) newrev = re.search('Committed revision ([0-9]+).', commitinfo) if not newrev: if not commitinfo.strip(): # Sometimes, our definition of "changed" differs from # svn one. For instance, svn ignores missing files # when committing. If there are only missing files, no # commit is made, no output and no error code. raise util.Abort(_('failed to commit svn changes')) raise util.Abort(commitinfo.splitlines()[-1]) newrev = newrev.groups()[0] self._ui.status(self._svncommand(['update', '-r', newrev])[0]) return newrev @annotatesubrepoerror def remove(self): if self.dirty(): self._ui.warn(_('not removing repo %s because ' 'it has changes.\n' % self._path)) return self._ui.note(_('removing subrepo %s\n') % self._path) def onerror(function, path, excinfo): if function is not os.remove: raise # read-only files cannot be unlinked under Windows s = os.stat(path) if (s.st_mode & stat.S_IWRITE) != 0: raise os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE) os.remove(path) path = self._ctx._repo.wjoin(self._path) shutil.rmtree(path, onerror=onerror) try: os.removedirs(os.path.dirname(path)) except OSError: pass @annotatesubrepoerror def get(self, state, overwrite=False): if overwrite: self._svncommand(['revert', '--recursive']) args = ['checkout'] if self._svnversion >= (1, 5): args.append('--force') # The revision must be specified at the end of the URL to properly # update to a directory which has since been deleted and recreated. args.append('%s@%s' % (state[0], state[1])) status, err = self._svncommand(args, failok=True) if not re.search('Checked out revision [0-9]+.', status): if ('is already a working copy for a different URL' in err and (self._wcchanged()[:2] == (False, False))): # obstructed but clean working copy, so just blow it away. self.remove() self.get(state, overwrite=False) return raise util.Abort((status or err).splitlines()[-1]) self._ui.status(status) @annotatesubrepoerror def merge(self, state): old = self._state[1] new = state[1] wcrev = self._wcrev() if new != wcrev: dirty = old == wcrev or self._wcchanged()[0] if _updateprompt(self._ui, self, dirty, wcrev, new): self.get(state, False) def push(self, opts): # push is a no-op for SVN return True @annotatesubrepoerror def files(self): output = self._svncommand(['list', '--recursive', '--xml'])[0] doc = xml.dom.minidom.parseString(output) paths = [] for e in doc.getElementsByTagName('entry'): kind = str(e.getAttribute('kind')) if kind != 'file': continue name = ''.join(c.data for c in e.getElementsByTagName('name')[0].childNodes if c.nodeType == c.TEXT_NODE) paths.append(name.encode('utf-8')) return paths def filedata(self, name): return self._svncommand(['cat'], name)[0] class gitsubrepo(abstractsubrepo): def __init__(self, ctx, path, state): self._state = state self._ctx = ctx self._path = path self._relpath = os.path.join(reporelpath(ctx._repo), path) self._abspath = ctx._repo.wjoin(path) self._subparent = ctx._repo self._ui = ctx._repo.ui self._ensuregit() def _ensuregit(self): try: self._gitexecutable = 'git' out, err = self._gitnodir(['--version']) except OSError, e: if e.errno != 2 or os.name != 'nt': raise self._gitexecutable = 'git.cmd' out, err = self._gitnodir(['--version']) m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out) if not m: self._ui.warn(_('cannot retrieve git version')) return version = (int(m.group(1)), m.group(2), m.group(3)) # git 1.4.0 can't work at all, but 1.5.X can in at least some cases, # despite the docstring comment. For now, error on 1.4.0, warn on # 1.5.0 but attempt to continue. if version < (1, 5, 0): raise util.Abort(_('git subrepo requires at least 1.6.0 or later')) elif version < (1, 6, 0): self._ui.warn(_('git subrepo requires at least 1.6.0 or later')) def _gitcommand(self, commands, env=None, stream=False): return self._gitdir(commands, env=env, stream=stream)[0] def _gitdir(self, commands, env=None, stream=False): return self._gitnodir(commands, env=env, stream=stream, cwd=self._abspath) def _gitnodir(self, commands, env=None, stream=False, cwd=None): """Calls the git command The methods tries to call the git command. versions prior to 1.6.0 are not supported and very probably fail. """ self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands))) # unless ui.quiet is set, print git's stderr, # which is mostly progress and useful info errpipe = None if self._ui.quiet: errpipe = open(os.devnull, 'w') p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1, cwd=cwd, env=env, close_fds=util.closefds, stdout=subprocess.PIPE, stderr=errpipe) if stream: return p.stdout, None retdata = p.stdout.read().strip() # wait for the child to exit to avoid race condition. p.wait() if p.returncode != 0 and p.returncode != 1: # there are certain error codes that are ok command = commands[0] if command in ('cat-file', 'symbolic-ref'): return retdata, p.returncode # for all others, abort raise util.Abort('git %s error %d in %s' % (command, p.returncode, self._relpath)) return retdata, p.returncode def _gitmissing(self): return not os.path.exists(os.path.join(self._abspath, '.git')) def _gitstate(self): return self._gitcommand(['rev-parse', 'HEAD']) def _gitcurrentbranch(self): current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet']) if err: current = None return current def _gitremote(self, remote): out = self._gitcommand(['remote', 'show', '-n', remote]) line = out.split('\n')[1] i = line.index('URL: ') + len('URL: ') return line[i:] def _githavelocally(self, revision): out, code = self._gitdir(['cat-file', '-e', revision]) return code == 0 def _gitisancestor(self, r1, r2): base = self._gitcommand(['merge-base', r1, r2]) return base == r1 def _gitisbare(self): return self._gitcommand(['config', '--bool', 'core.bare']) == 'true' def _gitupdatestat(self): """This must be run before git diff-index. diff-index only looks at changes to file stat; this command looks at file contents and updates the stat.""" self._gitcommand(['update-index', '-q', '--refresh']) def _gitbranchmap(self): '''returns 2 things: a map from git branch to revision a map from revision to branches''' branch2rev = {} rev2branch = {} out = self._gitcommand(['for-each-ref', '--format', '%(objectname) %(refname)']) for line in out.split('\n'): revision, ref = line.split(' ') if (not ref.startswith('refs/heads/') and not ref.startswith('refs/remotes/')): continue if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'): continue # ignore remote/HEAD redirects branch2rev[ref] = revision rev2branch.setdefault(revision, []).append(ref) return branch2rev, rev2branch def _gittracking(self, branches): 'return map of remote branch to local tracking branch' # assumes no more than one local tracking branch for each remote tracking = {} for b in branches: if b.startswith('refs/remotes/'): continue bname = b.split('/', 2)[2] remote = self._gitcommand(['config', 'branch.%s.remote' % bname]) if remote: ref = self._gitcommand(['config', 'branch.%s.merge' % bname]) tracking['refs/remotes/%s/%s' % (remote, ref.split('/', 2)[2])] = b return tracking def _abssource(self, source): if '://' not in source: # recognize the scp syntax as an absolute source colon = source.find(':') if colon != -1 and '/' not in source[:colon]: return source self._subsource = source return _abssource(self) def _fetch(self, source, revision): if self._gitmissing(): source = self._abssource(source) self._ui.status(_('cloning subrepo %s from %s\n') % (self._relpath, source)) self._gitnodir(['clone', source, self._abspath]) if self._githavelocally(revision): return self._ui.status(_('pulling subrepo %s from %s\n') % (self._relpath, self._gitremote('origin'))) # try only origin: the originally cloned repo self._gitcommand(['fetch']) if not self._githavelocally(revision): raise util.Abort(_("revision %s does not exist in subrepo %s\n") % (revision, self._relpath)) @annotatesubrepoerror def dirty(self, ignoreupdate=False): if self._gitmissing(): return self._state[1] != '' if self._gitisbare(): return True if not ignoreupdate and self._state[1] != self._gitstate(): # different version checked out return True # check for staged changes or modified files; ignore untracked files self._gitupdatestat() out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) return code == 1 def basestate(self): return self._gitstate() @annotatesubrepoerror def get(self, state, overwrite=False): source, revision, kind = state if not revision: self.remove() return self._fetch(source, revision) # if the repo was set to be bare, unbare it if self._gitisbare(): self._gitcommand(['config', 'core.bare', 'false']) if self._gitstate() == revision: self._gitcommand(['reset', '--hard', 'HEAD']) return elif self._gitstate() == revision: if overwrite: # first reset the index to unmark new files for commit, because # reset --hard will otherwise throw away files added for commit, # not just unmark them. self._gitcommand(['reset', 'HEAD']) self._gitcommand(['reset', '--hard', 'HEAD']) return branch2rev, rev2branch = self._gitbranchmap() def checkout(args): cmd = ['checkout'] if overwrite: # first reset the index to unmark new files for commit, because # the -f option will otherwise throw away files added for # commit, not just unmark them. self._gitcommand(['reset', 'HEAD']) cmd.append('-f') self._gitcommand(cmd + args) def rawcheckout(): # no branch to checkout, check it out with no branch self._ui.warn(_('checking out detached HEAD in subrepo %s\n') % self._relpath) self._ui.warn(_('check out a git branch if you intend ' 'to make changes\n')) checkout(['-q', revision]) if revision not in rev2branch: rawcheckout() return branches = rev2branch[revision] firstlocalbranch = None for b in branches: if b == 'refs/heads/master': # master trumps all other branches checkout(['refs/heads/master']) return if not firstlocalbranch and not b.startswith('refs/remotes/'): firstlocalbranch = b if firstlocalbranch: checkout([firstlocalbranch]) return tracking = self._gittracking(branch2rev.keys()) # choose a remote branch already tracked if possible remote = branches[0] if remote not in tracking: for b in branches: if b in tracking: remote = b break if remote not in tracking: # create a new local tracking branch local = remote.split('/', 3)[3] checkout(['-b', local, remote]) elif self._gitisancestor(branch2rev[tracking[remote]], remote): # When updating to a tracked remote branch, # if the local tracking branch is downstream of it, # a normal `git pull` would have performed a "fast-forward merge" # which is equivalent to updating the local branch to the remote. # Since we are only looking at branching at update, we need to # detect this situation and perform this action lazily. if tracking[remote] != self._gitcurrentbranch(): checkout([tracking[remote]]) self._gitcommand(['merge', '--ff', remote]) else: # a real merge would be required, just checkout the revision rawcheckout() @annotatesubrepoerror def commit(self, text, user, date): if self._gitmissing(): raise util.Abort(_("subrepo %s is missing") % self._relpath) cmd = ['commit', '-a', '-m', text] env = os.environ.copy() if user: cmd += ['--author', user] if date: # git's date parser silently ignores when seconds < 1e9 # convert to ISO8601 env['GIT_AUTHOR_DATE'] = util.datestr(date, '%Y-%m-%dT%H:%M:%S %1%2') self._gitcommand(cmd, env=env) # make sure commit works otherwise HEAD might not exist under certain # circumstances return self._gitstate() <|fim▁hole|> def merge(self, state): source, revision, kind = state self._fetch(source, revision) base = self._gitcommand(['merge-base', revision, self._state[1]]) self._gitupdatestat() out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) def mergefunc(): if base == revision: self.get(state) # fast forward merge elif base != self._state[1]: self._gitcommand(['merge', '--no-commit', revision]) if self.dirty(): if self._gitstate() != revision: dirty = self._gitstate() == self._state[1] or code != 0 if _updateprompt(self._ui, self, dirty, self._state[1][:7], revision[:7]): mergefunc() else: mergefunc() @annotatesubrepoerror def push(self, opts): force = opts.get('force') if not self._state[1]: return True if self._gitmissing(): raise util.Abort(_("subrepo %s is missing") % self._relpath) # if a branch in origin contains the revision, nothing to do branch2rev, rev2branch = self._gitbranchmap() if self._state[1] in rev2branch: for b in rev2branch[self._state[1]]: if b.startswith('refs/remotes/origin/'): return True for b, revision in branch2rev.iteritems(): if b.startswith('refs/remotes/origin/'): if self._gitisancestor(self._state[1], revision): return True # otherwise, try to push the currently checked out branch cmd = ['push'] if force: cmd.append('--force') current = self._gitcurrentbranch() if current: # determine if the current branch is even useful if not self._gitisancestor(self._state[1], current): self._ui.warn(_('unrelated git branch checked out ' 'in subrepo %s\n') % self._relpath) return False self._ui.status(_('pushing branch %s of subrepo %s\n') % (current.split('/', 2)[2], self._relpath)) self._gitcommand(cmd + ['origin', current]) return True else: self._ui.warn(_('no branch checked out in subrepo %s\n' 'cannot push revision %s\n') % (self._relpath, self._state[1])) return False @annotatesubrepoerror def remove(self): if self._gitmissing(): return if self.dirty(): self._ui.warn(_('not removing repo %s because ' 'it has changes.\n') % self._relpath) return # we can't fully delete the repository as it may contain # local-only history self._ui.note(_('removing subrepo %s\n') % self._relpath) self._gitcommand(['config', 'core.bare', 'true']) for f in os.listdir(self._abspath): if f == '.git': continue path = os.path.join(self._abspath, f) if os.path.isdir(path) and not os.path.islink(path): shutil.rmtree(path) else: os.remove(path) def archive(self, ui, archiver, prefix, match=None): total = 0 source, revision = self._state if not revision: return total self._fetch(source, revision) # Parse git's native archive command. # This should be much faster than manually traversing the trees # and objects with many subprocess calls. tarstream = self._gitcommand(['archive', revision], stream=True) tar = tarfile.open(fileobj=tarstream, mode='r|') relpath = subrelpath(self) ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files')) for i, info in enumerate(tar): if info.isdir(): continue if match and not match(info.name): continue if info.issym(): data = info.linkname else: data = tar.extractfile(info).read() archiver.addfile(os.path.join(prefix, self._path, info.name), info.mode, info.issym(), data) total += 1 ui.progress(_('archiving (%s)') % relpath, i + 1, unit=_('files')) ui.progress(_('archiving (%s)') % relpath, None) return total @annotatesubrepoerror def status(self, rev2, **opts): rev1 = self._state[1] if self._gitmissing() or not rev1: # if the repo is missing, return no results return [], [], [], [], [], [], [] modified, added, removed = [], [], [] self._gitupdatestat() if rev2: command = ['diff-tree', rev1, rev2] else: command = ['diff-index', rev1] out = self._gitcommand(command) for line in out.split('\n'): tab = line.find('\t') if tab == -1: continue status, f = line[tab - 1], line[tab + 1:] if status == 'M': modified.append(f) elif status == 'A': added.append(f) elif status == 'D': removed.append(f) deleted = unknown = ignored = clean = [] return modified, added, removed, deleted, unknown, ignored, clean types = { 'hg': hgsubrepo, 'svn': svnsubrepo, 'git': gitsubrepo, }<|fim▁end|>
@annotatesubrepoerror