prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>hw6_starter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
u"""
Основной скрипт запуска ДЗ.
Данный скрипт призван запускать на выполнение домашнее задание #6.
"""
__author__ = "Elena Sharovar"
__date__ = "2014-11-23"
from hw6_solution1 import modifier
def runner():
u"""Запускает выполнение всех задач"""
print "Modifying file..."
modifier("data.csv")
print "Modified successfully!"
if __name__ == '__main__':<|fim▁hole|><|fim▁end|> | runner() |
<|file_name|>services.ts<|end_file_name|><|fim▁begin|>// Rethink Services
import { UserAvailabilityService } from './rethink/userAvailability.service';<|fim▁hole|>import { ConnectorService } from './rethink/connector.service';
import { RethinkService } from './rethink/rethink.service';
import { ChatService } from './rethink/chat.service';
// Services
import { ContextualCommTriggerService } from './contextualCommTrigger.service';
import { ContextualCommDataService } from './contextualCommData.service';
import { ContextualCommDataResolver } from './contextualCommData.resolver';
import { ContextualCommService } from './contextualComm.service';
import { TriggerActionService } from './triggerAction.service';
import { ActivateTaskGuard } from './activateTask.service';
import { ActivateUserGuard } from './activateUser.service';
import { BreadcrumbService } from './breadcrumb.service';
import { ContactService } from './contact.service';
import { RoutingService } from './routing.service';
import { LocalStorage } from './storage.service';
import { AuthGuard } from './authGuard.service';
import { UserResolver } from './user.resolver';
export {
AuthGuard,
ChatService,
UserResolver,
LocalStorage,
RoutingService,
RethinkService,
ContactService,
ConnectorService,
BreadcrumbService,
ActivateTaskGuard,
ActivateUserGuard,
TriggerActionService,
ContextualCommService,
UserAvailabilityService,
ContextualCommDataService,
ContextualCommDataResolver,
ContextualCommTriggerService
};
export let servicesInjectables: Array<any> = [
ChatService,
LocalStorage,
RoutingService,
RethinkService,
ContactService,
ConnectorService,
BreadcrumbService,
ActivateTaskGuard,
ActivateUserGuard,
TriggerActionService,
UserAvailabilityService,
ContextualCommTriggerService
];<|fim▁end|> | |
<|file_name|>buck_test.py<|end_file_name|><|fim▁begin|>from buck import format_watchman_query_params, glob_internal, LazyBuildEnvPartial
from buck import subdir_glob, BuildFileContext
from pathlib import Path, PurePosixPath, PureWindowsPath
import os
import shutil
import tempfile
import unittest
class FakePathMixin(object):
def glob(self, pattern):
return self.glob_results.get(pattern)
def is_file(self):
return True
class FakePosixPath(FakePathMixin, PurePosixPath):
pass
class FakeWindowsPath(FakePathMixin, PureWindowsPath):
pass
def fake_path(fake_path_class, path, glob_results={}):
# Path does magic in __new__ with its args; it's hard to add more without
# changing that class. So we use a wrapper function to diddle with
# FakePath's members.
result = fake_path_class(path)
result.glob_results = {}
for pattern, paths in glob_results.iteritems():
result.glob_results[pattern] = [result / fake_path_class(p) for p in paths]
return result
class TestBuckPlatformBase(object):
def test_glob_includes_simple(self):
search_base = self.fake_path(
'foo',
glob_results={'*.java': ['A.java', 'B.java']})
self.assertGlobMatches(
['A.java', 'B.java'],
glob_internal(
includes=['*.java'],
excludes=[],
include_dotfiles=False,
search_base=search_base))
def test_glob_includes_sort(self):
search_base = self.fake_path(
'foo',
glob_results={'*.java': ['A.java', 'E.java', 'D.java', 'C.java', 'B.java']})
self.assertGlobMatches(
['A.java', 'B.java', 'C.java', 'D.java', 'E.java'],
glob_internal(
includes=['*.java'],
excludes=[],
include_dotfiles=False,
search_base=search_base))
def test_glob_includes_multi(self):
search_base = self.fake_path(
'foo',
glob_results={
'bar/*.java': ['bar/A.java', 'bar/B.java'],
'baz/*.java': ['baz/C.java', 'baz/D.java'],
})
self.assertGlobMatches(
['bar/A.java', 'bar/B.java', 'baz/C.java', 'baz/D.java'],
glob_internal(
includes=['bar/*.java', 'baz/*.java'],
excludes=[],
include_dotfiles=False,
search_base=search_base))
def test_glob_excludes_double_star(self):
search_base = self.fake_path(
'foo',
glob_results={
'**/*.java': ['A.java', 'B.java', 'Test.java'],
})
self.assertGlobMatches(
['A.java', 'B.java'],
glob_internal(
includes=['**/*.java'],
excludes=['**/*Test.java'],
include_dotfiles=False,
search_base=search_base))
def test_glob_excludes_multi(self):
search_base = self.fake_path(
'foo',
glob_results={
'bar/*.java': ['bar/A.java', 'bar/B.java'],
'baz/*.java': ['baz/C.java', 'baz/D.java'],
})
self.assertGlobMatches(
['bar/B.java', 'baz/D.java'],
glob_internal(
includes=['bar/*.java', 'baz/*.java'],
excludes=['*/[AC].java'],
include_dotfiles=False,
search_base=search_base))
def test_subdir_glob(self):
build_env = BuildFileContext(None, None, None, None, None, None, None, None)
search_base = self.fake_path(
'foo',
glob_results={
'lib/bar/*.h': ['lib/bar/A.h', 'lib/bar/B.h'],
'lib/baz/*.h': ['lib/baz/C.h', 'lib/baz/D.h'],
})
self.assertGlobMatches(
{
'bar/B.h': 'lib/bar/B.h',
'bar/A.h': 'lib/bar/A.h',
'baz/D.h': 'lib/baz/D.h',
'baz/C.h': 'lib/baz/C.h',
},
subdir_glob([
('lib', 'bar/*.h'),
('lib', 'baz/*.h')],
build_env=build_env,
search_base=search_base))
def test_subdir_glob_with_prefix(self):
build_env = BuildFileContext(None, None, None, None, None, None, None, None)
search_base = self.fake_path(
'foo',
glob_results={
'lib/bar/*.h': ['lib/bar/A.h', 'lib/bar/B.h'],
})
self.assertGlobMatches(
{
'Prefix/bar/B.h': 'lib/bar/B.h',
'Prefix/bar/A.h': 'lib/bar/A.h',
},
subdir_glob([('lib', 'bar/*.h')],
prefix='Prefix',
build_env=build_env,
search_base=search_base))
def test_glob_excludes_relative(self):
search_base = self.fake_path(
'foo',
glob_results={
'**/*.java': ['foo/A.java', 'foo/bar/B.java', 'bar/C.java'],
})
self.assertGlobMatches(
['foo/A.java', 'foo/bar/B.java'],
glob_internal(
includes=['**/*.java'],
excludes=['bar/*.java'],
include_dotfiles=False,
search_base=search_base))
def test_glob_includes_skips_dotfiles(self):
search_base = self.fake_path(
'foo',
glob_results={'*.java': ['A.java', '.B.java']})
self.assertGlobMatches(
['A.java'],
glob_internal(
includes=['*.java'],
excludes=[],
include_dotfiles=False,
search_base=search_base))
def test_glob_includes_does_not_skip_dotfiles_if_include_dotfiles(self):
search_base = self.fake_path(
'foo',
glob_results={'*.java': ['A.java', '.B.java']})
self.assertGlobMatches(
['.B.java', 'A.java'],
glob_internal(
includes=['*.java'],
excludes=[],
include_dotfiles=True,
search_base=search_base))
def test_lazy_build_env_partial(self):
def cobol_binary(
name,
deps=[],
build_env=None):
return (name, deps, build_env)
testLazy = LazyBuildEnvPartial(cobol_binary)
testLazy.build_env = {}
self.assertEqual(
('HAL', [1, 2, 3], {}),
testLazy.invoke(name='HAL', deps=[1, 2, 3]))
testLazy.build_env = {'abc': 789}
self.assertEqual(
('HAL', [1, 2, 3], {'abc': 789}),
testLazy.invoke(name='HAL', deps=[1, 2, 3]))
def test_explicit_exclude_with_file_separator_excludes(self):
search_base = self.fake_path(
'foo',
glob_results={'java/**/*.java': ['java/Include.java', 'java/Exclude.java']})
self.assertGlobMatches(
['java/Include.java'],
glob_internal(
includes=['java/**/*.java'],
excludes=['java/Exclude.java'],
include_dotfiles=False,
search_base=search_base))
class TestBuckPosix(TestBuckPlatformBase, unittest.TestCase):
@staticmethod
def fake_path(*args, **kwargs):
return fake_path(FakePosixPath, *args, **kwargs)
def assertGlobMatches(self, expected, actual):
self.assertEqual(expected, actual)
class TestBuckWindows(TestBuckPlatformBase, unittest.TestCase):
@staticmethod
def fake_path(*args, **kwargs):
return fake_path(FakeWindowsPath, *args, **kwargs)
def assertGlobMatches(self, expected, actual):
# Fix the path separator to make test writing easier
fixed_expected = None
if isinstance(expected, list):
fixed_expected = []
for path in expected:
fixed_expected.append(path.replace('/', '\\'))
else:
fixed_expected = {}
for key, value in expected.items():
fixed_expected.update({key.replace('/', '\\'): value.replace('/', '\\')})
self.assertEqual(fixed_expected, actual)
class TestBuck(unittest.TestCase):
def test_glob_double_star_integration(self):
d = tempfile.mkdtemp()
try:
subdir = os.path.join(d, 'b', 'a', 'c', 'a')
os.makedirs(subdir)
f = open(os.path.join(subdir, 'A.java'), 'w')
f.close()
f = open(os.path.join(subdir, 'B.java'), 'w')
f.close()
f = open(os.path.join(subdir, 'Test.java'), 'w')
f.close()
f = open(os.path.join(subdir, '.tmp.java'), 'w')
f.close()
os.makedirs(os.path.join(subdir, 'NotAFile.java'))
self.assertEquals(
[
os.path.join('b', 'a', 'c', 'a', 'A.java'),
os.path.join('b', 'a', 'c', 'a', 'B.java'),
],
glob_internal(
includes=['b/a/**/*.java'],
excludes=['**/*Test.java'],
include_dotfiles=False,
search_base=Path(d)))
finally:
shutil.rmtree(d)
def test_case_preserved(self):
d = tempfile.mkdtemp()
try:
subdir = os.path.join(d, 'java')
os.makedirs(subdir)
open(os.path.join(subdir, 'Main.java'), 'w').close()
self.assertEquals(
[
os.path.join('java', 'Main.java'),
],
glob_internal(
includes=['java/Main.java'],
excludes=[],
include_dotfiles=False,
search_base=Path(d)))
finally:
shutil.rmtree(d)
def test_watchman_query_params_includes(self):
query_params = format_watchman_query_params(
['**/*.java'],
[],
False,
'/path/to/glob')
self.assertEquals(
{
'relative_root': '/path/to/glob',
'path': [''],
'fields': ['name'],
'expression': [
'allof',<|fim▁hole|> 'exists',
['anyof', ['type', 'f'], ['type', 'l']],
['anyof', ['match', '**/*.java', 'wholename', {}]],
]
},
query_params)
def test_watchman_query_params_includes_and_excludes(self):
query_params = format_watchman_query_params(
['**/*.java'],
['**/*Test.java'],
False,
'/path/to/glob')
self.assertEquals(
{
'relative_root': '/path/to/glob',
'path': [''],
'fields': ['name'],
'expression': [
'allof',
'exists',
['anyof', ['type', 'f'], ['type', 'l']],
['anyof', ['match', '**/*.java', 'wholename', {}]],
['not', ['anyof', ['match', '**/*Test.java', 'wholename', {}]]],
]
},
query_params)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>DBNNodeExtendable.java<|end_file_name|><|fim▁begin|>/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.model.navigator;
import org.jkiss.code.NotNull;<|fim▁hole|>
/**
* DBNNodeExtendable
*/
public interface DBNNodeExtendable
{
@NotNull
List<DBNNode> getExtraNodes();
void addExtraNode(@NotNull DBNNode node, boolean reflect);
void removeExtraNode(@NotNull DBNNode node);
}<|fim▁end|> |
import java.util.List; |
<|file_name|>box.rs<|end_file_name|><|fim▁begin|>// edition:2021
// run-pass
// Test precise capture when using boxes
struct MetaData { x: String, name: String }
struct Data { m: MetaData }
struct BoxedData(Box<Data>);
struct EvenMoreBoxedData(Box<BoxedData>);
// Mutate disjoint paths, one inside one outside the closure
fn box_1() {
let m = MetaData { x: format!("x"), name: format!("name") };
let d = Data { m };
let b = BoxedData(Box::new(d));
let mut e = EvenMoreBoxedData(Box::new(b));
<|fim▁hole|> e.0.0.m.x = format!("not-x");
};
e.0.0.m.name = format!("not-name");
c();
}
// Mutate a path inside the closure and read a disjoint path outside the closure
fn box_2() {
let m = MetaData { x: format!("x"), name: format!("name") };
let d = Data { m };
let b = BoxedData(Box::new(d));
let mut e = EvenMoreBoxedData(Box::new(b));
let mut c = || {
e.0.0.m.x = format!("not-x");
};
println!("{}", e.0.0.m.name);
c();
}
// Read a path inside the closure and mutate a disjoint path outside the closure
fn box_3() {
let m = MetaData { x: format!("x"), name: format!("name") };
let d = Data { m };
let b = BoxedData(Box::new(d));
let mut e = EvenMoreBoxedData(Box::new(b));
let c = || {
println!("{}", e.0.0.m.name);
};
e.0.0.m.x = format!("not-x");
c();
}
// Read disjoint paths, one inside the closure and one outside the closure.
fn box_4() {
let m = MetaData { x: format!("x"), name: format!("name") };
let d = Data { m };
let b = BoxedData(Box::new(d));
let e = EvenMoreBoxedData(Box::new(b));
let c = || {
println!("{}", e.0.0.m.name);
};
println!("{}", e.0.0.m.x);
c();
}
// Read the same path, once inside the closure and once outside the closure.
fn box_5() {
let m = MetaData { x: format!("x"), name: format!("name") };
let d = Data { m };
let b = BoxedData(Box::new(d));
let e = EvenMoreBoxedData(Box::new(b));
let c = || {
println!("{}", e.0.0.m.name);
};
println!("{}", e.0.0.m.name);
c();
}
fn main() {
box_1();
box_2();
box_3();
box_4();
box_5();
}<|fim▁end|> | let mut c = || { |
<|file_name|>rust_completer.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 ycmd contributors
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from ycmd.utils import ToUtf8IfNeeded
from ycmd.completers.completer import Completer
from ycmd import responses, utils, hmac_utils
import logging
import urlparse
import requests
import httplib
import json
import tempfile
import base64
import binascii
import threading
import os
from os import path as p
_logger = logging.getLogger( __name__ )
DIR_OF_THIS_SCRIPT = p.dirname( p.abspath( __file__ ) )
DIR_OF_THIRD_PARTY = utils.PathToNearestThirdPartyFolder( DIR_OF_THIS_SCRIPT )
RACERD_BINARY_NAME = 'racerd' + ( '.exe' if utils.OnWindows() else '' )
RACERD_BINARY = p.join( DIR_OF_THIRD_PARTY,
'racerd', 'target', 'release', RACERD_BINARY_NAME )
RACERD_HMAC_HEADER = 'x-racerd-hmac'
HMAC_SECRET_LENGTH = 16
BINARY_NOT_FOUND_MESSAGE = ( 'racerd binary not found. Did you build it? ' +
'You can do so by running ' +
'"./build.py --racer-completer".' )
ERROR_FROM_RACERD_MESSAGE = (
'Received error from racerd while retrieving completions. You did not '
'set the rust_src_path option, which is probably causing this issue. '
'See YCM docs for details.'
)
def FindRacerdBinary( user_options ):
"""
Find path to racerd binary
This function prefers the 'racerd_binary_path' value as provided in
user_options if available. It then falls back to ycmd's racerd build. If
that's not found, attempts to use racerd from current path.
"""
racerd_user_binary = user_options.get( 'racerd_binary_path' )
if racerd_user_binary:
# The user has explicitly specified a path.
if os.path.isfile( racerd_user_binary ):
return racerd_user_binary
else:
_logger.warn( 'user provided racerd_binary_path is not file' )
if os.path.isfile( RACERD_BINARY ):
return RACERD_BINARY
return utils.PathToFirstExistingExecutable( [ 'racerd' ] )
class RustCompleter( Completer ):
"""
A completer for the rust programming language backed by racerd.
https://github.com/jwilm/racerd
"""
def __init__( self, user_options ):
super( RustCompleter, self ).__init__( user_options )
self._racerd = FindRacerdBinary( user_options )
self._racerd_host = None
self._server_state_lock = threading.RLock()
self._keep_logfiles = user_options[ 'server_keep_logfiles' ]
self._hmac_secret = ''
self._rust_source_path = self._GetRustSrcPath()
if not self._rust_source_path:
_logger.warn( 'No path provided for the rustc source. Please set the '
'rust_src_path option' )
if not self._racerd:
_logger.error( BINARY_NOT_FOUND_MESSAGE )
raise RuntimeError( BINARY_NOT_FOUND_MESSAGE )
self._StartServer()
def _GetRustSrcPath( self ):
"""
Attempt to read user option for rust_src_path. Fallback to environment
variable if it's not provided.
"""
rust_src_path = self.user_options[ 'rust_src_path' ]
# Early return if user provided config
if rust_src_path:
return rust_src_path
# Fall back to environment variable
env_key = 'RUST_SRC_PATH'
if env_key in os.environ:
return os.environ[ env_key ]
return None
def SupportedFiletypes( self ):
return [ 'rust' ]
def _ComputeRequestHmac( self, method, path, body ):
if not body:
body = ''
hmac = hmac_utils.CreateRequestHmac( method, path, body, self._hmac_secret )
return binascii.hexlify( hmac )
def _GetResponse( self, handler, request_data = None, method = 'POST' ):
"""
Query racerd via HTTP
racerd returns JSON with 200 OK responses. 204 No Content responses occur
when no errors were encountered but no completions, definitions, or errors
were found.
"""
_logger.info( 'RustCompleter._GetResponse' )
url = urlparse.urljoin( self._racerd_host, handler )
parameters = self._TranslateRequest( request_data )
body = json.dumps( parameters ) if parameters else None
request_hmac = self._ComputeRequestHmac( method, handler, body )
extra_headers = { 'content-type': 'application/json' }
extra_headers[ RACERD_HMAC_HEADER ] = request_hmac
response = requests.request( method,
url,
data = body,
headers = extra_headers )<|fim▁hole|>
if response.status_code is httplib.NO_CONTENT:
return None
return response.json()
def _TranslateRequest( self, request_data ):
"""
Transform ycm request into racerd request
"""
if not request_data:
return None
file_path = request_data[ 'filepath' ]
buffers = []
for path, obj in request_data[ 'file_data' ].items():
buffers.append( {
'contents': obj[ 'contents' ],
'file_path': path
} )
line = request_data[ 'line_num' ]
col = request_data[ 'column_num' ] - 1
return {
'buffers': buffers,
'line': line,
'column': col,
'file_path': file_path
}
def _GetExtraData( self, completion ):
location = {}
if completion[ 'file_path' ]:
location[ 'filepath' ] = ToUtf8IfNeeded( completion[ 'file_path' ] )
if completion[ 'line' ]:
location[ 'line_num' ] = completion[ 'line' ]
if completion[ 'column' ]:
location[ 'column_num' ] = completion[ 'column' ] + 1
if location:
return { 'location': location }
return None
def ComputeCandidatesInner( self, request_data ):
try:
completions = self._FetchCompletions( request_data )
except requests.HTTPError:
if not self._rust_source_path:
raise RuntimeError( ERROR_FROM_RACERD_MESSAGE )
raise
if not completions:
return []
return [ responses.BuildCompletionData(
insertion_text = ToUtf8IfNeeded( completion[ 'text' ] ),
kind = ToUtf8IfNeeded( completion[ 'kind' ] ),
extra_menu_info = ToUtf8IfNeeded( completion[ 'context' ] ),
extra_data = self._GetExtraData( completion ) )
for completion in completions ]
def _FetchCompletions( self, request_data ):
return self._GetResponse( '/list_completions', request_data )
def _WriteSecretFile( self, secret ):
"""
Write a file containing the `secret` argument. The path to this file is
returned.
Note that racerd consumes the file upon reading; removal of the temp file is
intentionally not handled here.
"""
# Make temp file
secret_fd, secret_path = tempfile.mkstemp( text=True )
# Write secret
with os.fdopen( secret_fd, 'w' ) as secret_file:
secret_file.write( secret )
return secret_path
def _StartServer( self ):
"""
Start racerd.
"""
with self._server_state_lock:
self._hmac_secret = self._CreateHmacSecret()
secret_file_path = self._WriteSecretFile( self._hmac_secret )
port = utils.GetUnusedLocalhostPort()
args = [ self._racerd, 'serve',
'--port', str(port),
'-l',
'--secret-file', secret_file_path ]
# Enable logging of crashes
env = os.environ.copy()
env[ 'RUST_BACKTRACE' ] = '1'
if self._rust_source_path:
args.extend( [ '--rust-src-path', self._rust_source_path ] )
filename_format = p.join( utils.PathToTempDir(),
'racerd_{port}_{std}.log' )
self._server_stdout = filename_format.format( port = port,
std = 'stdout' )
self._server_stderr = filename_format.format( port = port,
std = 'stderr' )
with open( self._server_stderr, 'w' ) as fstderr:
with open( self._server_stdout, 'w' ) as fstdout:
self._racerd_phandle = utils.SafePopen( args,
stdout = fstdout,
stderr = fstderr,
env = env )
self._racerd_host = 'http://127.0.0.1:{0}'.format( port )
_logger.info( 'RustCompleter using host = ' + self._racerd_host )
def ServerIsRunning( self ):
"""
Check racerd status.
"""
with self._server_state_lock:
if not self._racerd_host or not self._racerd_phandle:
return False
try:
self._GetResponse( '/ping', method = 'GET' )
return True
except requests.HTTPError:
self._StopServer()
return False
def ServerIsReady( self ):
try:
self._GetResponse( '/ping', method = 'GET' )
return True
except Exception:
return False
def _StopServer( self ):
"""
Stop racerd.
"""
with self._server_state_lock:
if self._racerd_phandle:
self._racerd_phandle.terminate()
self._racerd_phandle.wait()
self._racerd_phandle = None
self._racerd_host = None
if not self._keep_logfiles:
# Remove stdout log
if self._server_stdout and p.exists( self._server_stdout ):
os.unlink( self._server_stdout )
self._server_stdout = None
# Remove stderr log
if self._server_stderr and p.exists( self._server_stderr ):
os.unlink( self._server_stderr )
self._server_stderr = None
def _RestartServer( self ):
"""
Restart racerd
"""
_logger.debug( 'RustCompleter restarting racerd' )
with self._server_state_lock:
if self.ServerIsRunning():
self._StopServer()
self._StartServer()
_logger.debug( 'RustCompleter has restarted racerd' )
def GetSubcommandsMap( self ):
return {
'GoTo' : ( lambda self, request_data, args:
self._GoToDefinition( request_data ) ),
'GoToDefinition' : ( lambda self, request_data, args:
self._GoToDefinition( request_data ) ),
'GoToDeclaration' : ( lambda self, request_data, args:
self._GoToDefinition( request_data ) ),
'StopServer' : ( lambda self, request_data, args:
self._StopServer() ),
'RestartServer' : ( lambda self, request_data, args:
self._RestartServer() ),
}
def _GoToDefinition( self, request_data ):
try:
definition = self._GetResponse( '/find_definition', request_data )
return responses.BuildGoToResponse( definition[ 'file_path' ],
definition[ 'line' ],
definition[ 'column' ] + 1 )
except Exception:
raise RuntimeError( 'Can\'t jump to definition.' )
def Shutdown( self ):
self._StopServer()
def _CreateHmacSecret( self ):
return base64.b64encode( os.urandom( HMAC_SECRET_LENGTH ) )
def DebugInfo( self, request_data ):
with self._server_state_lock:
if self.ServerIsRunning():
return ( 'racerd\n'
' listening at: {0}\n'
' racerd path: {1}\n'
' stdout log: {2}\n'
' stderr log: {3}').format( self._racerd_host,
self._racerd,
self._server_stdout,
self._server_stderr )
if self._server_stdout and self._server_stderr:
return ( 'racerd is no longer running\n',
' racerd path: {0}\n'
' stdout log: {1}\n'
' stderr log: {2}').format( self._racerd,
self._server_stdout,
self._server_stderr )
return 'racerd is not running'<|fim▁end|> |
response.raise_for_status() |
<|file_name|>fill-in.js<|end_file_name|><|fim▁begin|>import getElement from './-get-element';
import isFormControl from './-is-form-control';
import { __focus__ } from './focus';
import settled from '../settled';
import fireEvent from './fire-event';
import { nextTickPromise } from '../-utils';
/**
Fill the provided text into the `value` property (or set `.innerHTML` when
the target is a content editable element) then trigger `change` and `input`
events on the specified target.
@public
@param {string|Element} target the element or selector to enter text into
@param {string} text the text to fill into the target element
@return {Promise<void>} resolves when the application is settled
*/
export default function fillIn(target, text) {
return nextTickPromise().then(() => {
if (!target) {
throw new Error('Must pass an element or selector to `fillIn`.');
}
let element = getElement(target);
if (!element) {
throw new Error(`Element not found when calling \`fillIn('${target}')\`.`);
}
let isControl = isFormControl(element);
if (!isControl && !element.isContentEditable) {
throw new Error('`fillIn` is only usable on form controls or contenteditable elements.');
}
if (typeof text === 'undefined' || text === null) {
throw new Error('Must provide `text` when calling `fillIn`.');
}
__focus__(element);
if (isControl) {
element.value = text;
} else {
element.innerHTML = text;
}
fireEvent(element, 'input');
fireEvent(element, 'change');
return settled();
});<|fim▁hole|><|fim▁end|> | } |
<|file_name|>TestPlantComment.java<|end_file_name|><|fim▁begin|>package umm3601.plant;
import com.mongodb.MongoClient;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.junit.Before;
import org.junit.Test;
import umm3601.digitalDisplayGarden.PlantController;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import static org.junit.Assert.*;
public class TestPlantComment {
private final static String databaseName = "data-for-testing-only";
private PlantController plantController;
@Before
public void populateDB() throws IOException {
PopulateMockDatabase db = new PopulateMockDatabase();
db.clearAndPopulateDBAgain();
plantController = new PlantController(databaseName);
}
@Test
public void successfulInputOfComment() throws IOException {
String json = "{ plantId: \"58d1c36efb0cac4e15afd278\", comment : \"Here is our comment for this test\" }";
assertTrue(plantController.addComment(json, "second uploadId"));
MongoClient mongoClient = new MongoClient();
MongoDatabase db = mongoClient.getDatabase(databaseName);
MongoCollection<Document> plants = db.getCollection("plants");
Document filterDoc = new Document();
<|fim▁hole|> filterDoc.append("uploadId", "second uploadId");
Iterator<Document> iter = plants.find(filterDoc).iterator();
Document plant = iter.next();
List<Document> plantComments = (List<Document>) ((Document) plant.get("metadata")).get("comments");
long comments = plantComments.size();
assertEquals(1, comments);
assertEquals("Here is our comment for this test", plantComments.get(0).getString("comment"));
assertNotNull(plantComments.get(0).getObjectId("_id"));
}
@Test
public void failedInputOfComment() throws IOException {
String json = "{ plantId: \"58d1c36efb0cac4e15afd27\", comment : \"Here is our comment for this test\" }";
assertFalse(plantController.addComment(json, "second uploadId"));
MongoClient mongoClient = new MongoClient();
MongoDatabase db = mongoClient.getDatabase(databaseName);
MongoCollection<Document> plants = db.getCollection("plants");
FindIterable findIterable = plants.find();
Iterator iterator = findIterable.iterator();
while(iterator.hasNext()){
Document plant = (Document) iterator.next();
List<Document> plantComments = (List<Document>) ((Document) plant.get("metadata")).get("comments");
assertEquals(0,plantComments.size());
}
}
}<|fim▁end|> | filterDoc.append("_id", new ObjectId("58d1c36efb0cac4e15afd278")); |
<|file_name|>_pathfix.py<|end_file_name|><|fim▁begin|>"""
Hack to get scripts to run from source checkout without having to set
PYTHONPATH.
"""
import sys
from os.path import dirname, join, abspath
<|fim▁hole|>project_path = abspath(join(db_path, ".."))
sys.path.insert(0, project_path)<|fim▁end|> | db_path = dirname(__file__) |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>MapSearchApp.init = function () {
var realEstates = MapSearchApp.Repositories.realEstateRepository.getAll();
var pageView = new MapSearchApp.Views.PageView({
collection: realEstates
});<|fim▁hole|> MapSearchApp.trigger("afterRender");
};
$(function () {
MapSearchApp.init();
});<|fim▁end|> | pageView.render();
$(".js-app").html(pageView.$el); |
<|file_name|>SupplementalInfoRetriever.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.result.supplement;
import android.content.Context;
import android.os.AsyncTask;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.method.LinkMovementMethod;
import android.text.style.URLSpan;
import android.util.Log;
import android.widget.TextView;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.google.zxing.client.result.ISBNParsedResult;
import com.google.zxing.client.result.ParsedResult;
import com.google.zxing.client.result.ProductParsedResult;
import com.google.zxing.client.result.URIParsedResult;
import com.google.zxing.client.android.common.executor.AsyncTaskExecInterface;
import com.google.zxing.client.android.common.executor.AsyncTaskExecManager;
import com.google.zxing.client.android.history.HistoryManager;
public abstract class SupplementalInfoRetriever extends AsyncTask<Object,Object,Object> {
private static final String TAG = "SupplementalInfo";
public static void maybeInvokeRetrieval(TextView textView,
ParsedResult result,
HistoryManager historyManager,
Context context) {
AsyncTaskExecInterface taskExec = new AsyncTaskExecManager().build();
if (result instanceof URIParsedResult) {
taskExec.execute(new URIResultInfoRetriever(textView, (URIParsedResult) result, historyManager, context));
taskExec.execute(new TitleRetriever(textView, (URIParsedResult) result, historyManager));
} else if (result instanceof ProductParsedResult) {
String productID = ((ProductParsedResult) result).getProductID();
taskExec.execute(new ProductResultInfoRetriever(textView, productID, historyManager, context));
switch (productID.length()) {
case 12:
taskExec.execute(new AmazonInfoRetriever(textView, "UPC", productID, historyManager, context));
break;
case 13:
taskExec.execute(new AmazonInfoRetriever(textView, "EAN", productID, historyManager, context));
break;
}
} else if (result instanceof ISBNParsedResult) {
String isbn = ((ISBNParsedResult) result).getISBN();
taskExec.execute(new ProductResultInfoRetriever(textView, isbn, historyManager, context));
taskExec.execute(new BookResultInfoRetriever(textView, isbn, historyManager, context));
taskExec.execute(new AmazonInfoRetriever(textView, "ISBN", isbn, historyManager, context));
}
}
private final WeakReference<TextView> textViewRef;
private final WeakReference<HistoryManager> historyManagerRef;
private final List<Spannable> newContents;
private final List<String[]> newHistories;
SupplementalInfoRetriever(TextView textView, HistoryManager historyManager) {
textViewRef = new WeakReference<TextView>(textView);
historyManagerRef = new WeakReference<HistoryManager>(historyManager);
newContents = new ArrayList<Spannable>();
newHistories = new ArrayList<String[]>();
}
@Override
public final Object doInBackground(Object... args) {
try {
retrieveSupplementalInfo();
} catch (IOException e) {
Log.w(TAG, e);
}
return null;
}
@Override
protected final void onPostExecute(Object arg) {
TextView textView = textViewRef.get();
if (textView != null) {
for (Spannable content : newContents) {
textView.append(content);
}
textView.setMovementMethod(LinkMovementMethod.getInstance());
}
HistoryManager historyManager = historyManagerRef.get();
if (historyManager != null) {
for (String[] text : newHistories) {
historyManager.addHistoryItemDetails(text[0], text[1]);
}
}
}
abstract void retrieveSupplementalInfo() throws IOException;
<|fim▁hole|>
if (source != null) {
newTextCombined.append(source).append(' ');
}
int linkStart = newTextCombined.length();
boolean first = true;
for (String newText : newTexts) {
if (first) {
newTextCombined.append(newText);
first = false;
} else {
newTextCombined.append(" [");
newTextCombined.append(newText);
newTextCombined.append(']');
}
}
int linkEnd = newTextCombined.length();
String newText = newTextCombined.toString();
Spannable content = new SpannableString(newText + "\n\n");
if (linkURL != null) {
// Strangely, some Android browsers don't seem to register to handle HTTP:// or HTTPS://.
// Lower-case these as it should always be OK to lower-case these schemes.
if (linkURL.startsWith("HTTP://")) {
linkURL = "http" + linkURL.substring(4);
} else if (linkURL.startsWith("HTTPS://")) {
linkURL = "https" + linkURL.substring(5);
}
content.setSpan(new URLSpan(linkURL), linkStart, linkEnd, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
newContents.add(content);
newHistories.add(new String[] {itemID, newText});
}
static void maybeAddText(String text, Collection<String> texts) {
if (text != null && text.length() > 0) {
texts.add(text);
}
}
static void maybeAddTextSeries(Collection<String> textSeries, Collection<String> texts) {
if (textSeries != null && !textSeries.isEmpty()) {
boolean first = true;
StringBuilder authorsText = new StringBuilder();
for (String author : textSeries) {
if (first) {
first = false;
} else {
authorsText.append(", ");
}
authorsText.append(author);
}
texts.add(authorsText.toString());
}
}
}<|fim▁end|> | final void append(String itemID, String source, String[] newTexts, String linkURL) {
StringBuilder newTextCombined = new StringBuilder(); |
<|file_name|>api_demo_pdos_broadening.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3
import numpy as np
import matplotlib.pyplot as plt
plt.style.use("seaborn-colorblind")
import galore
import galore.plot
vasprun = './test/MgO/vasprun.xml.gz'
xmin, xmax = (-10, 2)
fig = plt.figure()
for i, l in enumerate(np.arange(0.05, 0.50, 0.05)):
ax = fig.add_subplot(3, 3, i + 1)
ax.set_title("$\gamma = {0:4.2f}$".format(l))<|fim▁hole|>
fig.tight_layout()
plt.show()<|fim▁end|> | plotting_data = galore.process_pdos(input=[vasprun], lorentzian=l,
xmin=xmin, xmax=xmax)
galore.plot.plot_pdos(plotting_data, ax=ax)
ax.legend().set_visible(False) |
<|file_name|>ControllerMain.py<|end_file_name|><|fim▁begin|>"""
Main controller.
"""
import json
from Server.Importer import ImportFromModule
class ControllerMain(ImportFromModule("Server.ControllerBase", "ControllerBase")):
"""
Main controller.
"""
def ShowPage(self, uriParameters, postedParameters):
"""
Shows the home page.
"""
webPage = ImportFromModule("WebApplication.Views.PageView", "PageView")("Main")
self.SetOutput(webPage.GetContent())
def EchoText(self, uriParameters, postedParameters):
"""<|fim▁hole|><|fim▁end|> | Echo the incomming text.
"""
self.SetOutput(json.dumps(uriParameters, indent=4)) |
<|file_name|>31.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from typing import List
"""
31. Next Permutation
https://leetcode.com/problems/next-permutation/
"""
class Solution:
def nextPermutation(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
best = None
for i in range(len(nums) - 1):
smallest = 9999999
for j in range(i + 1, len(nums)):
if (nums[i] < nums[j] and nums[j] < smallest):
smallest = nums[j]
best = i, j
if not best:
nums[:] = sorted(nums)
else:
i, j = best
nums[i], nums[j] = nums[j], nums[i]
nums[:] = nums[:i+1] + sorted(nums[i+1:])<|fim▁hole|> # [4, 2, 2, 0, 0, 2, 3]
# [4, 2, 0, 2, 3, 0, 2]
# [4, 2, 0, 3, 0, 2, 2]
a = [4, 2, 0, 2, 3, 2, 0]
sol.nextPermutation(a)
print(a)
return 0
if __name__ == '__main__':
raise SystemExit(main())<|fim▁end|> |
def main():
sol = Solution() |
<|file_name|>TheSpidersWeb.js<|end_file_name|><|fim▁begin|>const PlotCard = require('../../plotcard.js');
class TheSpidersWeb extends PlotCard {
setupCardAbilities(ability) {
this.reaction({
limit: ability.limit.perPhase(1),
when: {
onClaimApplied: event => event.player === this.controller && event.challenge.challengeType === 'intrigue'
},
handler: () => {
this.game.addMessage('{0} uses {1} to be able to initiate an additional {2} challenge with claim raised by 1', this.controller, this, 'intrigue');<|fim▁hole|> }));
this.untilEndOfPhase(ability =>({
condition: () => this.game.isDuringChallenge({ challengeType: 'intrigue' }),
match: card => card === this.controller.activePlot,
effect: ability.effects.modifyClaim(1)
}));
}
});
}
}
TheSpidersWeb.code = '09049';
module.exports = TheSpidersWeb;<|fim▁end|> | this.untilEndOfPhase(ability => ({
targetController: 'current',
effect: ability.effects.mayInitiateAdditionalChallenge('intrigue') |
<|file_name|>k8s_openstack_monitor_manager.py<|end_file_name|><|fim▁begin|># coding: utf-8
import threading
import time
from datetime import datetime
from flask import Flask
from influxdb import InfluxDBClient
from oslo_service import periodic_task
from oslo_config import cfg
from oslo_log import log
from oslo_service import service
from keystoneauth1.identity import v3
from keystoneauth1 import session
from keystoneclient.v3 import client as keystone_client
from neutronclient.v2_0 import client as neutron_client
from novaclient import client as nova_client
import glanceclient as glance_client
import config
wsgi_app = Flask(__name__)
CONF = cfg.CONF
LOG = log.getLogger(__name__)
metrics_map = {}
class ServiceManager(service.Service):
def __init__(self):
super(ServiceManager, self).__init__()
def start(self):
LOG.info('start')
if CONF.influxdb.enable:
self.influxdb_periodic_tasks = InfluxdbPeriodicTasks()
self.tg.add_dynamic_timer(self._get_influxdb_periodic_tasks,
initial_delay=0,
periodic_interval_max=120)
if not CONF.rabbitmq_manager.enable_prometheus_exporter:
self.prometheus_exporter_thread = self._spawn_prometheus_exporter()
else:
self.prometheus_exporter_thread = None
self.periodic_tasks = ServicePeriodicTasks()
self.tg.add_dynamic_timer(self._get_periodic_tasks,
initial_delay=0,
periodic_interval_max=120)
def wait(self):
LOG.info('wait')
def stop(self):
LOG.info('stop')
if self.prometheus_exporter_thread is not None:
self.prometheus_exporter_thread.join()
super(ServiceManager, self).stop()
def _get_periodic_tasks(self, raise_on_error=False):
ctxt = {}
return self.periodic_tasks.periodic_tasks(ctxt, raise_on_error=raise_on_error)
def _get_influxdb_periodic_tasks(self, raise_on_error=False):
ctxt = {}
return self.influxdb_periodic_tasks.periodic_tasks(ctxt, raise_on_error=raise_on_error)
def _spawn_prometheus_exporter(self):
t = threading.Thread(target=wsgi_app.run, kwargs={
'host': CONF.openstack_deploy_manager.bind_host,
'port': CONF.openstack_deploy_manager.bind_port
})
t.daemon = True
t.start()
return t
#
# influxdb reporter
#
class InfluxdbPeriodicTasks(periodic_task.PeriodicTasks):
def __init__(self):
super(InfluxdbPeriodicTasks, self).__init__(CONF)
self.influxdb = InfluxDBClient(
CONF.influxdb.host,
CONF.influxdb.port,
CONF.influxdb.user,
CONF.influxdb.password,
CONF.influxdb.database,
)
def periodic_tasks(self, context, raise_on_error=False):
return self.run_periodic_tasks(context, raise_on_error=raise_on_error)
@periodic_task.periodic_task(spacing=60)
def report(self, context):
LOG.info('Report metrics to influxdb')
json_body = []
for measurement, metrics in metrics_map.items():
json_body.append({
"measurement": measurement.split(':')[0],
"tags": metrics["tags"],<|fim▁hole|> })
if len(json_body) > 0:
self.influxdb.write_points(json_body)
#
# prometheus exporter
#
@wsgi_app.route("/")
def status():
return "OK"
@wsgi_app.route("/metrics")
def metrics():
pmetrics = ''
for measurement, metrics in metrics_map.items():
labels = ''
for k, v in metrics['tags'].items():
labels += '{0}="{1}",'.format(k, v)
labels = labels[:-1]
pmetrics += '{0}{{{1}}} {2}\n'.format(measurement.split(':')[0], labels, metrics['value'])
return pmetrics
#
# service tasks
#
class ServicePeriodicTasks(periodic_task.PeriodicTasks):
def __init__(self):
super(ServicePeriodicTasks, self).__init__(CONF)
auth = v3.Password(auth_url=CONF.openstack_auth.auth_url,
username=CONF.openstack_auth.username,
password=CONF.openstack_auth.password,
project_name=CONF.openstack_auth.project_name,
user_domain_id=CONF.openstack_auth.user_domain_id,
project_domain_id=CONF.openstack_auth.project_domain_id,
)
sess = session.Session(auth=auth, verify=False)
self.keystone = keystone_client.Client(session=sess)
self.neutron = neutron_client.Client(session=sess)
self.nova = nova_client.Client('2.1', session=sess)
self.glance = glance_client.Client('2', session=sess)
def periodic_tasks(self, context, raise_on_error=False):
return self.run_periodic_tasks(context, raise_on_error=raise_on_error)
@periodic_task.periodic_task(spacing=30)
def check(self, context):
LOG.info('Start check openstack')
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
start_time = time.time()
self.keystone.services.list()
elapsed_time = time.time() - start_time
metrics_map['openstack_keystone_service_list_latency'] = {
'tags': {"svc": "keystone"},
'value': elapsed_time,
'time': timestamp,
}
start_time = time.time()
self.neutron.list_networks()
elapsed_time = time.time() - start_time
metrics_map['openstack_neutron_network_list_latency'] = {
'tags': {"svc": "neutron"},
'value': elapsed_time,
'time': timestamp,
}
start_time = time.time()
self.nova.flavors.list()
elapsed_time = time.time() - start_time
metrics_map['openstack_nova_flavor_list_latency'] = {
'tags': {"svc": "nova"},
'value': elapsed_time,
'time': timestamp,
}
start_time = time.time()
self.glance.images.list()
elapsed_time = time.time() - start_time
metrics_map['openstack_glance_image_list_latency'] = {
'tags': {"svc": "glance"},
'value': elapsed_time,
'time': timestamp,
}
LOG.info(metrics_map)
@periodic_task.periodic_task(spacing=30)
def check_k8s(self, context):
LOG.info('Start check k8s')
# TODO
def main():
config.init()
launcher = service.launch(CONF, ServiceManager())
launcher.wait()
if __name__ == '__main__':
main()<|fim▁end|> | "fields": {
"value": metrics["value"],
} |
<|file_name|>test_container_acl_negative.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class ObjectACLsNegativeTest(base.BaseObjectTest):
"""Negative tests of object ACLs"""
credentials = [['operator', CONF.object_storage.operator_role],
['operator_alt', CONF.object_storage.operator_role]]
@classmethod
def setup_credentials(cls):
super(ObjectACLsNegativeTest, cls).setup_credentials()
cls.os_operator = cls.os_roles_operator_alt
<|fim▁hole|>
def setUp(self):
super(ObjectACLsNegativeTest, self).setUp()
self.container_name = data_utils.rand_name(name='TestContainer')
self.container_client.update_container(self.container_name)
@classmethod
def resource_cleanup(cls):
cls.delete_containers()
super(ObjectACLsNegativeTest, cls).resource_cleanup()
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af587587-0c24-4e15-9822-8352ce711013')
def test_write_object_without_using_creds(self):
"""Test writing object without using credentials"""
# trying to create object with empty headers
# X-Auth-Token is not provided
object_name = data_utils.rand_name(name='Object')
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af85af0b-a025-4e72-a90e-121babf55720')
def test_delete_object_without_using_creds(self):
"""Test deleting object without using credentials"""
# create object
object_name = data_utils.rand_name(name='Object')
self.object_client.create_object(self.container_name, object_name,
'data')
# trying to delete object with empty headers
# X-Auth-Token is not provided
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('63d84e37-55a6-42e2-9e5f-276e60e26a00')
def test_write_object_with_non_authorized_user(self):
"""Test writing object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
# trying to create object with non-authorized user
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('abf63359-be52-4feb-87dd-447689fc77fd')
def test_read_object_with_non_authorized_user(self):
"""Test reading object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to get object with non authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('7343ac3d-cfed-4198-9bb0-00149741a492')
def test_delete_object_with_non_authorized_user(self):
"""Test deleting object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to delete object with non-authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('9ed01334-01e9-41ea-87ea-e6f465582823')
def test_read_object_without_rights(self):
"""Test reading object without rights"""
# update X-Container-Read metadata ACL
cont_headers = {'X-Container-Read': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to read the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('a3a585a7-d8cf-4b65-a1a0-edc2b1204f85')
def test_write_object_without_rights(self):
"""Test writing object without rights"""
# update X-Container-Write metadata ACL
cont_headers = {'X-Container-Write': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name,
object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('8ba512ad-aa6e-444e-b882-2906a0ea2052')
def test_write_object_without_write_rights(self):
"""Test writing object without write rights"""
# update X-Container-Read and X-Container-Write metadata ACL
tenant_name = self.os_operator.credentials.tenant_name
username = self.os_operator.credentials.username
cont_headers = {'X-Container-Read':
tenant_name + ':' + username,
'X-Container-Write': ''}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object without write rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name,
object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('b4e366f8-f185-47ab-b789-df4416f9ecdb')
def test_delete_object_without_write_rights(self):
"""Test deleting object without write rights"""
# update X-Container-Read and X-Container-Write metadata ACL
tenant_name = self.os_operator.credentials.tenant_name
username = self.os_operator.credentials.username
cont_headers = {'X-Container-Read':
tenant_name + ':' + username,
'X-Container-Write': ''}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to delete the object without write rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.delete_object,
self.container_name,
object_name)<|fim▁end|> | @classmethod
def resource_setup(cls):
super(ObjectACLsNegativeTest, cls).resource_setup()
cls.test_auth_data = cls.os_operator.auth_provider.auth_data |
<|file_name|>deeplearning_examplerun.py<|end_file_name|><|fim▁begin|># Specify the response and predictor columns
y = "C785"
x = train.names[0:784]
# We encode the response column as categorical for multinomial classification
train[y] = train[y].asfactor()
test[y] = test[y].asfactor()
# Train a Deep Learning model and validate on a test set
model = h2o.deeplearning(x=x,
y=y,
training_frame=train,
validation_frame=test,
distribution="multinomial",
activation="RectifierWithDropout",
hidden=[200,200,200],
input_dropout_ratio=0.2,
l1=1e-5, <|fim▁hole|><|fim▁end|> | epochs=10) |
<|file_name|>videoframe.rs<|end_file_name|><|fim▁begin|>use buffer::Buffer;
use ffi::*;
use std::mem;
use std::slice::from_raw_parts;
pub struct VideoFrame{
vf: GstVideoFrame,
buffer: Buffer
}
pub struct VideoPlane<'a>{
vf: &'a GstVideoFrame,
p: usize
}
pub struct VideoComponent<'a>{
vf: &'a GstVideoFrame,
c: usize
}
macro_rules! GST_VIDEO_FRAME_COMP_WIDTH(
($video_frame:expr,$c:expr) => ( -((-$video_frame.info.width) >> ((*$video_frame.info.finfo).w_sub[$c]) as usize) ) ;
);
macro_rules! GST_VIDEO_FRAME_COMP_HEIGHT(
($video_frame:expr,$c:expr) => ( -((-$video_frame.info.height) >> ((*$video_frame.info.finfo).h_sub[$c]) as usize) );
);
<|fim▁hole|>macro_rules! GST_VIDEO_FRAME_COMP_OFFSET(
($video_frame:expr,$c:expr) => ((($video_frame.info.offset)[(*$video_frame.info.finfo).plane[($c)] as usize]) as u64 + (*$video_frame.info.finfo).poffset[($c)] as u64)
);
macro_rules! GST_VIDEO_FRAME_COMP_STRIDE(
($video_frame:expr,$c:expr) => (($video_frame.info.stride)[(*$video_frame.info.finfo).plane[($c)] as usize])
);
macro_rules! GST_VIDEO_FRAME_COMP_DATA(
($video_frame:expr,$c:expr) => {
($video_frame.data[(*$video_frame.info.finfo).plane[$c] as usize] as *mut u8).offset((*$video_frame.info.finfo).poffset[$c] as isize)
}
);
impl<'a> VideoPlane<'a>{
pub fn stride(&self) -> i32{
self.info().stride[self.p]
}
pub fn offset(&self) -> u64{
self.info().offset[self.p] as u64
}
pub fn width(&self) -> i32{
unsafe{ GST_VIDEO_FRAME_COMP_WIDTH!(self.vf,self.p) }
}
pub fn height(&self) -> i32{
unsafe{ GST_VIDEO_FRAME_COMP_HEIGHT!(self.vf,self.p) }
}
pub fn size(&self) -> usize{
(self.stride()*self.height()) as usize
}
pub fn len<T>(&self) -> usize{
self.size()/mem::size_of::<T>()
}
pub fn depth(&self) -> u32{
self.format_info().depth[self.p]
}
pub fn data<T:'a>(&self) -> &'a[T]{
unsafe{
from_raw_parts( mem::transmute(self.vf.data[self.p]), self.len::<T>())
}
}
fn info(&self) -> &::VideoInfo{
&self.vf.info
}
fn format_info(&self) -> &GstVideoFormatInfo{
unsafe{ &(*self.vf.info.finfo) }
}
}
impl<'a> VideoComponent<'a>{
pub fn stride(&self) -> i32{
unsafe{ GST_VIDEO_FRAME_COMP_STRIDE!(self.vf,self.c) }
}
pub fn offset(&self) -> u64{
unsafe{ GST_VIDEO_FRAME_COMP_OFFSET!(self.vf,self.c) }
}
pub fn width(&self) -> i32{
unsafe{ GST_VIDEO_FRAME_COMP_WIDTH!(self.vf,self.c) }
}
pub fn height(&self) -> i32{
unsafe{ GST_VIDEO_FRAME_COMP_HEIGHT!(self.vf,self.c) }
}
pub fn size(&self) -> usize{
(self.stride()*self.height()) as usize
}
pub fn len<T>(&self) -> usize{
self.size()/mem::size_of::<T>()
}
pub fn depth(&self) -> u32{
self.format_info().depth[self.c]
}
pub fn data<T:'a>(&self) -> &'a[T]{
unsafe{
let data = GST_VIDEO_FRAME_COMP_DATA!(self.vf,self.c);
from_raw_parts( mem::transmute(data), self.len::<T>())
}
}
fn format_info(&self) -> &GstVideoFormatInfo{
unsafe{ &(*self.vf.info.finfo) }
}
}
impl Drop for VideoFrame{
fn drop(&mut self){
unsafe{ gst_video_frame_unmap(&mut self.vf) };
}
}
impl VideoFrame{
pub unsafe fn new(mut vi: GstVideoInfo, mut buffer: Buffer) -> Option<VideoFrame>{
let mut gstframe = mem::zeroed();
if gst_video_frame_map(&mut gstframe, &mut vi, buffer.gst_buffer_mut(), GST_MAP_READ) != 0{
Some(VideoFrame{ vf: gstframe, buffer: buffer })
}else{
None
}
}
#[inline]
pub fn info(&self) -> &::VideoInfo{
&self.vf.info
}
#[inline]
pub fn flags(&self) -> &GstVideoFlags{
&self.vf.flags
}
#[inline]
pub fn buffer(&self) -> &Buffer{
&self.buffer
}
#[inline]
pub fn format_info(&self) -> &GstVideoFormatInfo{
unsafe{ &(*self.vf.info.finfo) }
}
#[inline]
pub fn format(&self) -> &GstVideoFormat{
&self.format_info().format
}
#[inline]
pub fn width(&self) -> i32{
self.info().width
}
#[inline]
pub fn height(&self) -> i32{
self.info().height
}
#[inline]
pub fn size(&self) -> u64{
self.info().size as u64
}
#[inline]
pub fn len<T>(&self) -> usize{
(self.size() / mem::size_of::<T>() as u64) as usize
}
#[inline]
pub fn is_interlaced(&self) -> bool{
self.flags() & GST_VIDEO_FRAME_FLAG_INTERLACED == GST_VIDEO_FRAME_FLAG_INTERLACED
}
#[inline]
pub fn is_tff(&self) -> bool{
self.flags() & GST_VIDEO_FRAME_FLAG_TFF == GST_VIDEO_FRAME_FLAG_TFF
}
#[inline]
pub fn is_rff(&self) -> bool{
self.flags() & GST_VIDEO_FRAME_FLAG_RFF == GST_VIDEO_FRAME_FLAG_RFF
}
#[inline]
pub fn is_onefield(&self) -> bool{
self.flags() & GST_VIDEO_FRAME_FLAG_ONEFIELD == GST_VIDEO_FRAME_FLAG_ONEFIELD
}
#[inline]
pub fn n_planes(&self) -> u32{
self.format_info().n_planes
}
#[inline]
pub fn plane<'a>(&'a self, p: u32) -> Option<VideoPlane<'a>>{
if p < self.n_planes(){
Some(VideoPlane{
vf: &self.vf,
p: p as usize
})
}else{
None
}
}
#[inline]
pub fn n_components(&self) -> u32{
self.format_info().n_components
}
#[inline]
pub fn component<'a>(&'a self, c: u32) -> Option<VideoComponent<'a>>{
if c < self.n_components(){
Some(VideoComponent{
vf: &self.vf,
c: c as usize
})
}else{
None
}
}
}<|fim▁end|> | |
<|file_name|>json.rs<|end_file_name|><|fim▁begin|>#[derive(Debug, Deserialize)]
/// A user object returned from the API
pub struct User{
/// The username of the user
pub username: String
}
#[derive(Debug, Deserialize)]
/// An object containing the ID of something newly created
pub struct Id<T>
{
/// The ID
pub id: T
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "lowercase")]
/// A response that either is an error or a success
pub enum HueResponse<T> {
/// The result from the bridge if it didn't fail
Success(T),
/// The error that was returned from the bridge
Error(Error)
}
<|fim▁hole|>impl<T> HueResponse<T> {
pub fn into_result(self) -> Result<T, HueError> {
match self {
HueResponse::Success(s) => Ok(s),
HueResponse::Error(e) => Err(e.into()),
}
}
}
#[derive(Debug, Clone, Serialize)]
pub struct SceneRecall<'a> {
pub scene: &'a str
}
#[derive(Debug, Deserialize)]
/// An error object returned from the API
pub struct Error {
/// The URI the error happened on
pub address: String,
/// A short description of the error
pub description: String,
/// Its errorcode
#[serde(rename="type")]
pub code: u16,
}<|fim▁end|> | use ::errors::HueError;
|
<|file_name|>network_benches.rs<|end_file_name|><|fim▁begin|>#![feature(test)]
extern crate test;
#[macro_use]
extern crate timeit;
extern crate collenchyma as co;
extern crate leaf;
#[cfg(feature = "cuda")]
mod cuda {
use test::Bencher;
use co::prelude::*;
use std::sync::{Arc, RwLock};
use leaf::layers::*;
use leaf::layer::*;
use std::rc::Rc;
#[cfg(feature = "cuda")]
fn cuda_backend() -> Rc<Backend<Cuda>> {
Rc::new(Backend::<Cuda>::default().unwrap())
}
#[cfg(feature = "opencl")]
#[allow(dead_code)]
fn opencl_backend() -> Rc<Backend<OpenCL>> {
Rc::new(Backend::<OpenCL>::default().unwrap())
}
#[inline(never)]
#[allow(unused_variables)]
fn bench_profile<F: FnMut() -> ()>(
b: &mut Bencher,
mut bench_func: F,
times: usize) {
timeit_loops!(times, {
bench_func();
});
}
// #[inline(never)]
// fn sync_back_and_forth(
// b: &mut Bencher,
// n: usize,
// nt_device: &DeviceType,
// cl_device: &DeviceType,
// mem: &mut SharedTensor<u8>
// ) {
// b.iter(|| {
// for _ in 0..n {
// match mem.sync(&cl_device) {
// Ok(_) => assert!(true),
// Err(err) => {
// println!("{:?}", err);
// assert!(false);
// }
// }
// match mem.sync(&nt_device) {
// Ok(_) => assert!(true),
// Err(err) => {
// println!("{:?}", err);
// assert!(false);
// }
// }
// }
// });
// }
#[bench]
#[ignore]
#[cfg(feature = "cuda")]
fn bench_mnsit_forward_1(b: &mut Bencher) {
let mut cfg = SequentialConfig::default();
// set up input
cfg.add_input("in", &vec![1, 30, 30]);
cfg.add_input("label", &vec![1, 1, 10]);
// set up sigmoid
let mut sig_cfg = LayerConfig::new("sig", LayerType::Sigmoid);
sig_cfg.add_input("in");
sig_cfg.add_output("sig_out");
cfg.add_layer(sig_cfg);
let fc_layer_cfg = LinearConfig { output_size: 10 };
let mut fc_cfg = LayerConfig::new("fully_connected", LayerType::Linear(fc_layer_cfg));
fc_cfg.add_input("sig_out");
fc_cfg.add_output("fc_out");
cfg.add_layer(fc_cfg);
// set up softmax_loss
// let mut loss_cfg = LayerConfig::new("loss", LayerType::SoftmaxLoss);
// loss_cfg.add_input("fc_out");
// loss_cfg.add_input("label");
// cfg.add_layer(loss_cfg);
let backend = cuda_backend();
let mut network = Layer::from_config(
backend.clone(), &LayerConfig::new("network", LayerType::Sequential(cfg)));
let _ = timeit_loops!(10, {
let inp = SharedTensor::<f32>::new(backend.device(), &vec![1, 30, 30]).unwrap();
let inp_lock = Arc::new(RwLock::new(inp));
network.forward(&[inp_lock]);
});
// b.iter(|| {
// for _ in 0..1 {
// let inp = Blob::from_data(SharedTensor::<f32>::new(backend.device(), &vec![1, 30, 30]).unwrap());
// let label = Blob::from_data(SharedTensor::<f32>::new(native_backend.device(), &vec![1, 1, 10]).unwrap());
//
// let inp_lock = Arc::new(RwLock::new(inp));
// let label_lock = Arc::new(RwLock::new(label));
//
// network.forward(&[inp_lock, label_lock], loss);
// }
// });
}
#[bench]
// #[ignore]
#[cfg(feature = "cuda")]
fn alexnet_forward(b: &mut Bencher) {
let mut cfg = SequentialConfig::default();
// Layer: data
cfg.add_input("data", &vec![128, 3, 224, 224]);
// Layer: conv1
let conv1_layer_cfg = ConvolutionConfig {
num_output: 64,
filter_shape: vec![11],
padding: vec![2],
stride: vec![4]
};
let mut conv1_cfg = LayerConfig::new("conv1", LayerType::Convolution(conv1_layer_cfg));
conv1_cfg.add_input("data");
conv1_cfg.add_output("conv1_preac");
cfg.add_layer(conv1_cfg);
// Layer: conv1/relu
let mut conv1_relu_cfg = LayerConfig::new("conv1/relu", LayerType::ReLU);
conv1_relu_cfg.add_input("conv1_preac");
conv1_relu_cfg.add_output("conv1_out");
cfg.add_layer(conv1_relu_cfg);
// Layer: pool1
let pool1_layer_cfg = PoolingConfig {
mode: PoolingMode::Max,
filter_shape: vec![3],
stride: vec![2],
padding: vec![0], // TODO: make optional
};
let mut pool1_cfg = LayerConfig::new("pool1", LayerType::Pooling(pool1_layer_cfg));
pool1_cfg.add_input("conv1_out");
pool1_cfg.add_output("pool1_out");
cfg.add_layer(pool1_cfg);
// Layer: conv2
let conv2_layer_cfg = ConvolutionConfig {
num_output: 192,
filter_shape: vec![5],
padding: vec![2],
stride: vec![1]
};
let mut conv2_cfg = LayerConfig::new("conv2", LayerType::Convolution(conv2_layer_cfg));
conv2_cfg.add_input("pool1_out");
conv2_cfg.add_output("conv2_preac");
cfg.add_layer(conv2_cfg);
// Layer: conv2/relu
let mut conv2_relu_cfg = LayerConfig::new("conv2/relu", LayerType::ReLU);
conv2_relu_cfg.add_input("conv2_preac");
conv2_relu_cfg.add_output("conv2_out");
cfg.add_layer(conv2_relu_cfg);
// Layer: pool2
let pool2_layer_cfg = PoolingConfig {
mode: PoolingMode::Max,
filter_shape: vec![3],
stride: vec![2],
padding: vec![0], // TODO: make optional
};
let mut pool2_cfg = LayerConfig::new("pool2", LayerType::Pooling(pool2_layer_cfg));
pool2_cfg.add_input("conv2_out");
pool2_cfg.add_output("pool2_out");
cfg.add_layer(pool2_cfg);
// Layer: conv3
let conv3_layer_cfg = ConvolutionConfig {
num_output: 384,
filter_shape: vec![3],
padding: vec![1],
stride: vec![1]
};
let mut conv3_cfg = LayerConfig::new("conv3", LayerType::Convolution(conv3_layer_cfg));
conv3_cfg.add_input("pool2_out");
conv3_cfg.add_output("conv3_preac");
cfg.add_layer(conv3_cfg);
// Layer: conv3/relu
let mut conv3_relu_cfg = LayerConfig::new("conv3/relu", LayerType::ReLU);
conv3_relu_cfg.add_input("conv3_preac");
conv3_relu_cfg.add_output("conv3_out");
cfg.add_layer(conv3_relu_cfg);
// Layer: conv4
let conv4_layer_cfg = ConvolutionConfig {
num_output: 256,
filter_shape: vec![3],
padding: vec![1],
stride: vec![1]
};
let mut conv4_cfg = LayerConfig::new("conv4", LayerType::Convolution(conv4_layer_cfg));
conv4_cfg.add_input("conv3_out");
conv4_cfg.add_output("conv4_preac");
cfg.add_layer(conv4_cfg);
// Layer: conv4/relu
let mut conv4_relu_cfg = LayerConfig::new("conv4/relu", LayerType::ReLU);
conv4_relu_cfg.add_input("conv4_preac");
conv4_relu_cfg.add_output("conv4_out");
cfg.add_layer(conv4_relu_cfg);
// Layer: conv5
let conv5_layer_cfg = ConvolutionConfig {
num_output: 256,
filter_shape: vec![3],
padding: vec![1],
stride: vec![1]
};
let mut conv5_cfg = LayerConfig::new("conv5", LayerType::Convolution(conv5_layer_cfg));
conv5_cfg.add_input("conv4_out");
conv5_cfg.add_output("conv5_preac");
cfg.add_layer(conv5_cfg);
// Layer: conv5/relu
let mut conv5_relu_cfg = LayerConfig::new("conv5/relu", LayerType::ReLU);
conv5_relu_cfg.add_input("conv5_preac");
conv5_relu_cfg.add_output("conv5_out");
cfg.add_layer(conv5_relu_cfg);
// Layer: pool3
let pool3_layer_cfg = PoolingConfig {
mode: PoolingMode::Max,
filter_shape: vec![3],
stride: vec![2],
padding: vec![0], // TODO: make optional
};
let mut pool3_cfg = LayerConfig::new("pool3", LayerType::Pooling(pool3_layer_cfg));
pool3_cfg.add_input("conv5_out");
pool3_cfg.add_output("pool3_out");
cfg.add_layer(pool3_cfg);
// Layer: fc1
let fc1_layer_cfg = LinearConfig { output_size: 4096 };
let mut fc1_cfg = LayerConfig::new("fc1", LayerType::Linear(fc1_layer_cfg));
fc1_cfg.add_input("pool3_out");
fc1_cfg.add_output("fc1_out");
cfg.add_layer(fc1_cfg);
// Layer: fc2
let fc2_layer_cfg = LinearConfig { output_size: 4096 };
let mut fc2_cfg = LayerConfig::new("fc2", LayerType::Linear(fc2_layer_cfg));
fc2_cfg.add_input("fc1_out");
fc2_cfg.add_output("fc2_out");
cfg.add_layer(fc2_cfg);
// Layer: fc3
let fc3_layer_cfg = LinearConfig { output_size: 1000 };
let mut fc3_cfg = LayerConfig::new("fc3", LayerType::Linear(fc3_layer_cfg));
fc3_cfg.add_input("fc2_out");
fc3_cfg.add_output("fc3_out");
cfg.add_layer(fc3_cfg);
let backend = cuda_backend();
// let native_backend = native_backend();
let mut network = Layer::from_config(
backend.clone(), &LayerConfig::new("network", LayerType::Sequential(cfg)));
let func = || {
let forward_time = timeit_loops!(1, {
let inp = SharedTensor::<f32>::new(backend.device(), &vec![128, 3, 112, 112]).unwrap();
let inp_lock = Arc::new(RwLock::new(inp));
network.forward(&[inp_lock]);
});
println!("Forward step: {}", forward_time);
};
{ bench_profile(b, func, 10); }
}
#[bench]
#[ignore]
#[cfg(feature = "cuda")]
fn small_alexnet_forward(b: &mut Bencher) {
// let _ = env_logger::init();
let mut cfg = SequentialConfig::default();
// Layer: data
cfg.add_input("data", &vec![128, 3, 112, 112]);
// Layer: conv1
let conv1_layer_cfg = ConvolutionConfig {
num_output: 32,
filter_shape: vec![11],
padding: vec![2],
stride: vec![4]
};
let mut conv1_cfg = LayerConfig::new("conv1", LayerType::Convolution(conv1_layer_cfg));
conv1_cfg.add_input("data");
conv1_cfg.add_output("conv1_preac");
cfg.add_layer(conv1_cfg);
// Layer: conv1/relu
let mut conv1_relu_cfg = LayerConfig::new("conv1/relu", LayerType::ReLU);
conv1_relu_cfg.add_input("conv1_preac");
conv1_relu_cfg.add_output("conv1_out");
cfg.add_layer(conv1_relu_cfg);
// Layer: pool1
let pool1_layer_cfg = PoolingConfig {
mode: PoolingMode::Max,
filter_shape: vec![3],
stride: vec![2],
padding: vec![0], // TODO: make optional
};
let mut pool1_cfg = LayerConfig::new("pool1", LayerType::Pooling(pool1_layer_cfg));
pool1_cfg.add_input("conv1_out");
pool1_cfg.add_output("pool1_out");
cfg.add_layer(pool1_cfg);
// Layer: conv2
let conv2_layer_cfg = ConvolutionConfig {
num_output: 96,
filter_shape: vec![5],
padding: vec![2],
stride: vec![1]
};
let mut conv2_cfg = LayerConfig::new("conv2", LayerType::Convolution(conv2_layer_cfg));
conv2_cfg.add_input("pool1_out");
conv2_cfg.add_output("conv2_preac");
cfg.add_layer(conv2_cfg);
// Layer: conv2/relu
let mut conv2_relu_cfg = LayerConfig::new("conv2/relu", LayerType::ReLU);
conv2_relu_cfg.add_input("conv2_preac");
conv2_relu_cfg.add_output("conv2_out");
cfg.add_layer(conv2_relu_cfg);
// Layer: pool2
let pool2_layer_cfg = PoolingConfig {
mode: PoolingMode::Max,
filter_shape: vec![3],
stride: vec![2],
padding: vec![0], // TODO: make optional
};
let mut pool2_cfg = LayerConfig::new("pool2", LayerType::Pooling(pool2_layer_cfg));
pool2_cfg.add_input("conv2_out");
pool2_cfg.add_output("pool2_out");
cfg.add_layer(pool2_cfg);
// Layer: conv3
let conv3_layer_cfg = ConvolutionConfig {
num_output: 142,
filter_shape: vec![3],
padding: vec![1],
stride: vec![1]
};
let mut conv3_cfg = LayerConfig::new("conv3", LayerType::Convolution(conv3_layer_cfg));
conv3_cfg.add_input("pool2_out");
conv3_cfg.add_output("conv3_preac");
cfg.add_layer(conv3_cfg);
// Layer: conv3/relu
let mut conv3_relu_cfg = LayerConfig::new("conv3/relu", LayerType::ReLU);
conv3_relu_cfg.add_input("conv3_preac");
conv3_relu_cfg.add_output("conv3_out");
cfg.add_layer(conv3_relu_cfg);
// Layer: conv4
let conv4_layer_cfg = ConvolutionConfig {
num_output: 128,
filter_shape: vec![3],
padding: vec![1],
stride: vec![1]
};
let mut conv4_cfg = LayerConfig::new("conv4", LayerType::Convolution(conv4_layer_cfg));
conv4_cfg.add_input("conv3_out");
conv4_cfg.add_output("conv4_preac");
cfg.add_layer(conv4_cfg);<|fim▁hole|> conv4_relu_cfg.add_output("conv4_out");
cfg.add_layer(conv4_relu_cfg);
// Layer: conv5
let conv5_layer_cfg = ConvolutionConfig {
num_output: 128,
filter_shape: vec![3],
padding: vec![1],
stride: vec![1]
};
let mut conv5_cfg = LayerConfig::new("conv5", LayerType::Convolution(conv5_layer_cfg));
conv5_cfg.add_input("conv4_out");
conv5_cfg.add_output("conv5_preac");
cfg.add_layer(conv5_cfg);
// Layer: conv5/relu
let mut conv5_relu_cfg = LayerConfig::new("conv5/relu", LayerType::ReLU);
conv5_relu_cfg.add_input("conv5_preac");
conv5_relu_cfg.add_output("conv5_out");
cfg.add_layer(conv5_relu_cfg);
// Layer: pool3
let pool3_layer_cfg = PoolingConfig {
mode: PoolingMode::Max,
filter_shape: vec![3],
stride: vec![2],
padding: vec![0], // TODO: make optional
};
let mut pool3_cfg = LayerConfig::new("pool3", LayerType::Pooling(pool3_layer_cfg));
pool3_cfg.add_input("conv5_out");
pool3_cfg.add_output("pool3_out");
cfg.add_layer(pool3_cfg);
// Layer: fc1
let fc1_layer_cfg = LinearConfig { output_size: 2048 };
let mut fc1_cfg = LayerConfig::new("fc1", LayerType::Linear(fc1_layer_cfg));
fc1_cfg.add_input("pool3_out");
fc1_cfg.add_output("fc1_out");
cfg.add_layer(fc1_cfg);
// Layer: fc2
let fc2_layer_cfg = LinearConfig { output_size: 2048 };
let mut fc2_cfg = LayerConfig::new("fc2", LayerType::Linear(fc2_layer_cfg));
fc2_cfg.add_input("fc1_out");
fc2_cfg.add_output("fc2_out");
cfg.add_layer(fc2_cfg);
// Layer: fc3
let fc3_layer_cfg = LinearConfig { output_size: 500 };
let mut fc3_cfg = LayerConfig::new("fc3", LayerType::Linear(fc3_layer_cfg));
fc3_cfg.add_input("fc2_out");
fc3_cfg.add_output("fc3_out");
cfg.add_layer(fc3_cfg);
let backend = cuda_backend();
// let native_backend = native_backend();
let mut network = Layer::from_config(
backend.clone(), &LayerConfig::new("network", LayerType::Sequential(cfg)));
let mut func = || {
let inp = SharedTensor::<f32>::new(backend.device(), &vec![128, 3, 112, 112]).unwrap();
let inp_lock = Arc::new(RwLock::new(inp));
network.forward(&[inp_lock]);
};
{ func(); bench_profile(b, func, 10); }
}
}<|fim▁end|> | // Layer: conv4/relu
let mut conv4_relu_cfg = LayerConfig::new("conv4/relu", LayerType::ReLU);
conv4_relu_cfg.add_input("conv4_preac"); |
<|file_name|>junos_config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_config
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage configuration on devices running Juniper JUNOS
description:
- This module provides an implementation for working with the active
configuration running on Juniper JUNOS devices. It provides a set
of arguments for loading configuration, performing rollback operations
and zeroing the active configuration on the device.
extends_documentation_fragment: junos
options:
lines:
description:
- This argument takes a list of C(set) or C(delete) configuration
lines to push into the remote device. Each line must start with
either C(set) or C(delete). This argument is mutually exclusive
with the I(src) argument.
src:
description:
- The I(src) argument provides a path to the configuration file
to load into the remote system. The path can either be a full
system path to the configuration file if the value starts with /
or relative to the root of the implemented role or playbook.
This argument is mutually exclusive with the I(lines) argument.
version_added: "2.2"
src_format:
description:
- The I(src_format) argument specifies the format of the configuration
found int I(src). If the I(src_format) argument is not provided,
the module will attempt to determine the format of the configuration
file specified in I(src).
choices: ['xml', 'set', 'text', 'json']
version_added: "2.2"
rollback:
description:
- The C(rollback) argument instructs the module to rollback the
current configuration to the identifier specified in the
argument. If the specified rollback identifier does not
exist on the remote device, the module will fail. To rollback
to the most recent commit, set the C(rollback) argument to 0.
zeroize:
description:
- The C(zeroize) argument is used to completely sanitize the
remote device configuration back to initial defaults. This
argument will effectively remove all current configuration
statements on the remote device.
confirm:
description:
- The C(confirm) argument will configure a time out value for
the commit to be confirmed before it is automatically
rolled back. If the C(confirm) argument is set to False, this
argument is silently ignored. If the value for this argument
is set to 0, the commit is confirmed immediately.
default: 0
comment:
description:
- The C(comment) argument specifies a text string to be used
when committing the configuration. If the C(confirm) argument
is set to False, this argument is silently ignored.
default: configured by junos_config
replace:
description:
- The C(replace) argument will instruct the remote device to
replace the current configuration hierarchy with the one specified
in the corresponding hierarchy of the source configuration loaded
from this module.
- Note this argument should be considered deprecated. To achieve
the equivalent, set the I(update) argument to C(replace). This argument
will be removed in a future release. The C(replace) and C(update) argument
is mutually exclusive.
type: bool
default: 'no'
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. The backup file is written to the C(backup)
folder in the playbook root directory or role root directory, if
playbook is part of an ansible role. If the directory does not exist,
it is created.
type: bool
default: 'no'
version_added: "2.2"
update:
description:
- This argument will decide how to load the configuration
data particulary when the candidate configuration and loaded
configuration contain conflicting statements. Following are
accepted values.
C(merge) combines the data in the loaded configuration with the
candidate configuration. If statements in the loaded configuration
conflict with statements in the candidate configuration, the loaded
statements replace the candidate ones.
C(override) discards the entire candidate configuration and replaces
it with the loaded configuration.
C(replace) substitutes each hierarchy level in the loaded configuration
for the corresponding level.
default: merge
choices: ['merge', 'override', 'replace']
version_added: "2.3"
confirm_commit:
description:
- This argument will execute commit operation on remote device.
It can be used to confirm a previous commit.
type: bool
default: 'no'
version_added: "2.4"
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Abbreviated commands are NOT idempotent, see
L(Network FAQ,../network/user_guide/faq.html#why-do-the-config-modules-always-return-changed-true-with-abbreviated-commands).
- Loading JSON-formatted configuration I(json) is supported
starting in Junos OS Release 16.1 onwards.
- Tested against vSRX JUNOS version 15.1X49-D15.4, vqfx-10000 JUNOS Version 15.1X53-D60.4.
- Recommended connection is C(netconf). See L(the Junos OS Platform Options,../network/user_guide/platform_junos.html).
- This module also works with C(local) connections for legacy playbooks.
"""
EXAMPLES = """
- name: load configure file into device
junos_config:
src: srx.cfg
comment: update config
- name: load configure lines into device
junos_config:
lines:
- set interfaces ge-0/0/1 unit 0 description "Test interface"
- set vlans vlan01 description "Test vlan"
comment: update config
- name: rollback the configuration to id 10
junos_config:
rollback: 10
- name: zero out the current configuration
junos_config:
zeroize: yes
- name: confirm a previous commit
junos_config:
confirm_commit: yes
- name: for idempotency, use full-form commands
junos_config:
lines:
# - set int ge-0/0/1 unit 0 desc "Test interface"
- set interfaces ge-0/0/1 unit 0 description "Test interface"
"""
RETURN = """
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: string
sample: /playbooks/ansible/backup/config.2016-07-16@22:28:34
"""
import re
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.netconf import exec_rpc
from ansible.module_utils.network.junos.junos import get_diff, load_config, get_configuration
from ansible.module_utils.network.junos.junos import commit_configuration, discard_changes, locked_config
from ansible.module_utils.network.junos.junos import junos_argument_spec, load_configuration, tostring
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native, to_text
try:
from lxml.etree import Element, fromstring
except ImportError:
from xml.etree.ElementTree import Element, fromstring
try:
from lxml.etree import ParseError
except ImportError:
try:
from xml.etree.ElementTree import ParseError
except ImportError:
# for Python < 2.7
from xml.parsers.expat import ExpatError
ParseError = ExpatError
USE_PERSISTENT_CONNECTION = True
DEFAULT_COMMENT = 'configured by junos_config'
def check_args(module, warnings):
if module.params['replace'] is not None:
module.fail_json(msg='argument replace is deprecated, use update')
def zeroize(module):
return exec_rpc(module, tostring(Element('request-system-zeroize')), ignore_warning=False)
def rollback(ele, id='0'):
return get_diff(ele, id)
def guess_format(config):
try:
json.loads(config)
return 'json'
except ValueError:
pass
try:
fromstring(config)
return 'xml'
except ParseError:
pass
if config.startswith('set') or config.startswith('delete'):
return 'set'
return 'text'
def filter_delete_statements(module, candidate):
reply = get_configuration(module, format='set')
match = reply.find('.//configuration-set')
if match is None:
# Could not find configuration-set in reply, perhaps device does not support it?
return candidate
config = to_native(match.text, encoding='latin-1')
modified_candidate = candidate[:]
for index, line in reversed(list(enumerate(candidate))):
if line.startswith('delete'):
newline = re.sub('^delete', 'set', line)
if newline not in config:
del modified_candidate[index]
return modified_candidate
def configure_device(module, warnings, candidate):
kwargs = {}
config_format = None
if module.params['src']:
config_format = module.params['src_format'] or guess_format(str(candidate))
if config_format == 'set':
kwargs.update({'format': 'text', 'action': 'set'})
else:
kwargs.update({'format': config_format, 'action': module.params['update']})
if isinstance(candidate, string_types):
candidate = candidate.split('\n')
# this is done to filter out `delete ...` statements which map to
# nothing in the config as that will cause an exception to be raised
if any((module.params['lines'], config_format == 'set')):
candidate = filter_delete_statements(module, candidate)
kwargs['format'] = 'text'
kwargs['action'] = 'set'
return load_config(module, candidate, warnings, **kwargs)
def main():
""" main entry point for module execution
"""
argument_spec = dict(
lines=dict(type='list'),
src=dict(type='path'),
src_format=dict(choices=['xml', 'text', 'set', 'json']),
# update operations
update=dict(default='merge', choices=['merge', 'override', 'replace', 'update']),
# deprecated replace in Ansible 2.3
replace=dict(type='bool'),
confirm=dict(default=0, type='int'),
comment=dict(default=DEFAULT_COMMENT),
confirm_commit=dict(type='bool', default=False),
# config operations
backup=dict(type='bool', default=False),
rollback=dict(type='int'),
zeroize=dict(default=False, type='bool'),
)
argument_spec.update(junos_argument_spec)
mutually_exclusive = [('lines', 'src', 'rollback', 'zeroize')]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
candidate = module.params['lines'] or module.params['src']
commit = not module.check_mode
result = {'changed': False, 'warnings': warnings}
if module.params['backup']:
for conf_format in ['set', 'text']:
reply = get_configuration(module, format=conf_format)
match = reply.find('.//configuration-%s' % conf_format)
if match is not None:
break
else:
module.fail_json(msg='unable to retrieve device configuration')
result['__backup__'] = match.text.strip()
rollback_id = module.params['rollback']
if rollback_id:
diff = rollback(module, rollback_id)
if commit:
kwargs = {
'comment': module.params['comment']
}
with locked_config(module):
load_configuration(module, rollback=rollback_id)
commit_configuration(module, **kwargs)
if module._diff:
result['diff'] = {'prepared': diff}
result['changed'] = True
elif module.params['zeroize']:
if commit:
zeroize(module)
result['changed'] = True
<|fim▁hole|> if candidate:
with locked_config(module):
diff = configure_device(module, warnings, candidate)
if diff:
if commit:
kwargs = {
'comment': module.params['comment']
}
confirm = module.params['confirm']
if confirm > 0:
kwargs.update({
'confirm': True,
'confirm_timeout': to_text(confirm, errors='surrogate_then_replace')
})
commit_configuration(module, **kwargs)
else:
discard_changes(module)
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
elif module.params['confirm_commit']:
with locked_config(module):
# confirm a previous commit
commit_configuration(module)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()<|fim▁end|> | else: |
<|file_name|>cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py<|end_file_name|><|fim▁begin|>_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py'
model = dict(
backbone=dict(
type='ResNeXt',
depth=101,
groups=32,
base_width=8,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=False),
style='pytorch',
init_cfg=dict(
type='Pretrained',
checkpoint='open-mmlab://detectron2/resnext101_32x8d')))
# ResNeXt-101-32x8d model trained with Caffe2 at FB,
# so the mean and std need to be changed.
img_norm_cfg = dict(
mean=[103.530, 116.280, 123.675],
std=[57.375, 57.120, 58.395],
to_rgb=False)
# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)],
# multiscale_mode='range'
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(
type='Resize',
img_scale=[(1333, 640), (1333, 800)],
multiscale_mode='range',
keep_ratio=True),<|fim▁hole|> dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
train=dict(dataset=dict(pipeline=train_pipeline)),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline))<|fim▁end|> | dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32), |
<|file_name|>issue5358.go<|end_file_name|><|fim▁begin|>// errorcheck
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// issue 5358: incorrect error message when using f(g()) form on ... args.
package main
func f(x int, y ...int) {}
func g() (int, []int)
func main() {
f(g()) // ERROR "as type int in|incompatible type"<|fim▁hole|><|fim▁end|> | } |
<|file_name|>ambiguity.rs<|end_file_name|><|fim▁begin|>// edition:2018
#![allow(non_camel_case_types)]
use std::io;
//~^ ERROR `std` is ambiguous
<|fim▁hole|> pub struct io;
}
fn main() {}<|fim▁end|> | mod std { |
<|file_name|>PluginException.java<|end_file_name|><|fim▁begin|>package org.eso.ias.plugin;
/**
* The exception returned by the Plugin
* @author acaproni
*
*/
public class PluginException extends Exception {
public PluginException() {
}
public PluginException(String message) {
super(message);
}
public PluginException(Throwable cause) {
super(cause);<|fim▁hole|> public PluginException(String message, Throwable cause) {
super(message, cause);
}
public PluginException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}<|fim▁end|> | }
|
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>//jshint strict: false
module.exports = function(config) {
config.set({
basePath: './app',
files: [
'bower_components/angular/angular.js',
'bower_components/angular-route/angular-route.js',
'bower_components/angular-mocks/angular-mocks.js',
'*.js'<|fim▁hole|>
frameworks: ['jasmine'],
browsers: ['Chrome'],
plugins: [
'karma-chrome-launcher',
'karma-firefox-launcher',
'karma-jasmine',
'karma-junit-reporter'
],
junitReporter: {
outputFile: 'test_out/unit.xml',
suite: 'unit'
}
});
};<|fim▁end|> | ],
autoWatch: true, |
<|file_name|>sti_incremental.go<|end_file_name|><|fim▁begin|>package builds
import (
"fmt"
"path/filepath"
"strings"
"time"
g "github.com/onsi/ginkgo"
o "github.com/onsi/gomega"
"k8s.io/kubernetes/test/e2e"
exutil "github.com/openshift/origin/test/extended/util"
)
var _ = g.Describe("default: S2I incremental build with push and pull to authenticated registry", func() {
defer g.GinkgoRecover()
var (
templateFixture = exutil.FixturePath("fixtures", "incremental-auth-build.json")
oc = exutil.NewCLI("build-sti-env", exutil.KubeConfigPath())
)
g.JustBeforeEach(func() {
g.By("waiting for builder service account")
err := exutil.WaitForBuilderAccount(oc.KubeREST().ServiceAccounts(oc.Namespace()))
o.Expect(err).NotTo(o.HaveOccurred())
})
g.Describe("Building from a template", func() {
g.It(fmt.Sprintf("should create a build from %q template and run it", templateFixture), func() {
oc.SetOutputDir(exutil.TestContext.OutputDir)
g.By(fmt.Sprintf("calling oc new-app -f %q", templateFixture))
err := oc.Run("new-app").Args("-f", templateFixture).Execute()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("starting a test build")
buildName, err := oc.Run("start-build").Args("initial-build").Output()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("expecting the build is in Complete phase")
err = exutil.WaitForABuild(oc.REST().Builds(oc.Namespace()), buildName, exutil.CheckBuildSuccessFunc, exutil.CheckBuildFailedFunc)<|fim▁hole|> buildName, err = oc.Run("start-build").Args("internal-build").Output()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("expecting the build is in Complete phase")
err = exutil.WaitForABuild(oc.REST().Builds(oc.Namespace()), buildName, exutil.CheckBuildSuccessFunc, exutil.CheckBuildFailedFunc)
o.Expect(err).NotTo(o.HaveOccurred())
g.By("getting the Docker image reference from ImageStream")
imageName, err := exutil.GetDockerImageReference(oc.REST().ImageStreams(oc.Namespace()), "internal-image", "latest")
o.Expect(err).NotTo(o.HaveOccurred())
g.By("writing the pod definition to a file")
outputPath := filepath.Join(exutil.TestContext.OutputDir, oc.Namespace()+"-sample-pod.json")
pod := exutil.CreatePodForImage(imageName)
err = exutil.WriteObjectToFile(pod, outputPath)
o.Expect(err).NotTo(o.HaveOccurred())
g.By(fmt.Sprintf("calling oc create -f %q", outputPath))
err = oc.Run("create").Args("-f", outputPath).Execute()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("expecting the pod to be running")
err = oc.KubeFramework().WaitForPodRunning(pod.Name)
o.Expect(err).NotTo(o.HaveOccurred())
// even though the pod is running, the app isn't always started
// so wait until webrick output is complete before curling.
logs := ""
count := 0
for strings.Contains(logs, "8080") && count < 10 {
logs, _ = oc.Run("logs").Args(pod.Name).Output()
time.Sleep(time.Second)
count++
}
g.By("expecting the pod container has saved artifacts")
out, err := oc.Run("exec").Args("-p", pod.Name, "--", "curl", "http://0.0.0.0:8080").Output()
o.Expect(err).NotTo(o.HaveOccurred())
if !strings.Contains(out, "artifacts exist") {
logs, _ = oc.Run("logs").Args(pod.Name).Output()
e2e.Failf("Pod %q does not contain expected artifacts: %q\n%q", pod.Name, out, logs)
}
})
})
})<|fim▁end|> | o.Expect(err).NotTo(o.HaveOccurred())
g.By("starting a test build using the image produced by the last build") |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess<|fim▁hole|>from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)<|fim▁end|> | |
<|file_name|>selection.rs<|end_file_name|><|fim▁begin|>use crate::session::{Session, State, AES_IV_LEN};
use log::{debug, error};
use protocol::messages::connection::ServerSelectionMessage;
impl Session {
pub async fn select_server(&mut self, server_id: i16) -> std::io::Result<u8> {
use protocol::constants::server_connection_error;
use protocol::constants::server_status;
use protocol::messages::connection::SelectedServerDataMessage;
let (aes_key, ticket) = match &self.state {
State::Logged { aes_key, ticket } => (aes_key, ticket),
_ => return Ok(0),
};
let gs = match self.server.game_servers.get(&server_id) {
Some(gs) => gs,
None => return Ok(server_connection_error::NO_REASON),
};
if gs.status() != server_status::ONLINE {
return Ok(server_connection_error::DUE_TO_STATUS);
}
let encrypted = match openssl::symm::encrypt(
openssl::symm::Cipher::aes_256_cbc(),
aes_key,
Some(&aes_key[..AES_IV_LEN]),
ticket.as_bytes(),
) {
Ok(encrypted) => encrypted,
Err(err) => {
error!("encryption error: {}", err);
return Ok(server_connection_error::NO_REASON);
}
};
debug!("server selected: {}, ticket = {}", gs.id(), ticket);
let ports = &[gs.port() as u32];
self.stream.write(SelectedServerDataMessage {
server_id: gs.id() as _,
address: gs.host(),
ports: std::borrow::Cow::Borrowed(ports),
can_create_new_character: true,
// Just convert from an `&[u8]` to an `&[i8]`.
ticket: unsafe {
std::slice::from_raw_parts(encrypted.as_ptr() as *const i8, encrypted.len())
},
})?;
self.stream.flush().await?;
self.stream.get_ref().shutdown(std::net::Shutdown::Both)?;
Ok(0)
}
pub async fn handle_server_selection<'a>(
&'a mut self,
msg: ServerSelectionMessage<'a>,
) -> std::io::Result<()> {
use protocol::messages::connection::SelectedServerRefusedMessage;
let reason = self.select_server(msg.server_id as _).await?;
if reason != 0 {
self.stream
.send(SelectedServerRefusedMessage {
server_id: msg.server_id,
error: reason,
server_status: self
.server
.game_servers
.get(&(msg.server_id as _))<|fim▁hole|> .unwrap_or(0),
_phantom: std::marker::PhantomData,
})
.await?;
}
Ok(())
}
}<|fim▁end|> | .map(|gs| gs.status()) |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Authors of Cilium
//<|fim▁hole|>// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +groupName=maps
package ipcache<|fim▁end|> | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. |
<|file_name|>CallbackGenerator.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2003,2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> * limitations under the License.
*/
package net.sf.cglib.proxy;
import java.util.List;
import net.sf.cglib.core.*;
interface CallbackGenerator
{
void generate(ClassEmitter ce, Context context, List methods) throws Exception;
void generateStatic(CodeEmitter e, Context context, List methods) throws Exception;
interface Context
{
ClassLoader getClassLoader();
CodeEmitter beginMethod(ClassEmitter ce, MethodInfo method);
int getOriginalModifiers(MethodInfo method);
int getIndex(MethodInfo method);
void emitCallback(CodeEmitter ce, int index);
Signature getImplSignature(MethodInfo method);
void emitInvoke(CodeEmitter e, MethodInfo method);
}
}<|fim▁end|> | * See the License for the specific language governing permissions and |
<|file_name|>jsx.js<|end_file_name|><|fim▁begin|>function foo() {
throw <Bar />;
}
function foo() {<|fim▁hole|>
function foo() {
throw <Bar baz={baz} />;
}
function foo() {
throw <Bar baz={baz}>foo</Bar>;
}
function foo() {
throw <></>;
}
function foo() {
throw <>foo</>;
}<|fim▁end|> | throw <Bar>baz</Bar>;
} |
<|file_name|>populate.py<|end_file_name|><|fim▁begin|># The MIT License (MIT)
# Copyright (c) 2014-2015 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# === EDIT ====================================================================
SERVER = 'http://localhost:12345'
ROOT_PASSWORD = 'password'
# admin usernname and password
ADMIN = 'administrator'
ADMIN_PASSWORD = 'password'
# template of path to video files (relative to /media)
URL = 'REPERE/phase2/test/{name}'
# =============================================================================
from camomile import Camomile
client = Camomile(SERVER)
# login as root
client.login('root', ROOT_PASSWORD)
# create new admin user
admin = client.createUser(ADMIN, ADMIN_PASSWORD, role='admin', returns_id=True)
# login as admin
client.login(ADMIN, ADMIN_PASSWORD)<|fim▁hole|>corpus = client.createCorpus('REPERE', returns_id=True)
# add media to corpus and keep track of their IDs
mediaID = {}
with open('media.lst', 'r') as f:
for medium in f:
# remove trailing "\n"
name = medium.strip()
# create medium
mediaID[name] = client.createMedium(
corpus, name, url=URL.format(name=name), returns_id=True)
# parse sample annotation files
def parse(path, mediaID):
annotations = []
with open(path, 'r') as f:
for line in f:
# remove trailing "\n" and split on spaces
tokens = line.strip().split()
# get medium ID
mediumName = tokens[0]
id_medium = mediaID[mediumName]
# get fragment start and end times
startTime = float(tokens[1])
endTime = float(tokens[2])
# get data
label = tokens[4]
annotation = {'fragment': {'start': startTime, 'end': endTime},
'data': label,
'id_medium': id_medium}
# append annotations to the list
annotations.append(annotation)
return annotations
# create reference layer
annotations = parse('reference.repere', mediaID)
reference = client.createLayer(
corpus, 'reference',
fragment_type='segment',
data_type='label',
annotations=annotations,
returns_id=True)
# create hypothesis layers
for i in [2]:
path = 'hypothesis{i}.repere'.format(i=i)
annotations = parse(path, mediaID)
hypothesis = client.createLayer(
corpus,
'hypothesis {i}'.format(i=i),
fragment_type='segment',
data_type='label',
annotations=annotations,
returns_id=True)<|fim▁end|> |
# create new corpus |
<|file_name|>codegen.rs<|end_file_name|><|fim▁begin|>#![feature(plugin_registrar, rustc_private, slice_patterns, plugin)]
#![plugin(quasi_macros)]
extern crate rustc_plugin;
extern crate syntax;
#[macro_use] extern crate lazy_static;
extern crate quasi;
use std::collections::HashMap;
use std::sync::Mutex;
use syntax::ast::{MetaItem, Ident, ItemImpl, ImplItemKind, TokenTree, Arm, Ty_};
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::Token;
use syntax::ast_util::path_to_ident;
lazy_static! {
static ref HANDLERS: Mutex<HashMap<String, Vec<(Ident, Ident)>>> = Mutex::new(HashMap::new());
}
pub fn register_handlers(cx: &mut ExtCtxt, _: Span, _: &MetaItem, annotatable: &Annotatable,
_: &mut FnMut(Annotatable)) {
if let Annotatable::Item(ref item) = *annotatable {
if let ItemImpl(_, _, _, _, _, ref list) = item.node {
for impl_item in list {
if let ImplItemKind::Method(ref sig, _) = impl_item.node {
let mut table = HANDLERS.lock().unwrap();
if let Ty_::TyPath(_, ref path) = sig.decl.inputs[2].ty.node {
let base_mod = format!("{:?}", cx.mod_path()[2]);
let handlers = table.entry(base_mod).or_insert(Vec::new());
handlers.push((path_to_ident(path).unwrap(), impl_item.ident));
}
}
}
}
}<|fim▁hole|>fn handle(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) -> Box<MacResult + 'static> {
let (session, ch_ref, id, buffer) = match args {
[TokenTree::Token(_, Token::Ident(session, _)),
TokenTree::Token(_, Token::Comma),
TokenTree::Token(_, Token::Ident(ch_ref, _)),
TokenTree::Token(_, Token::Comma),
TokenTree::Token(_, Token::Ident(id, _)),
TokenTree::Token(_, Token::Comma),
TokenTree::Token(_, Token::Ident(buffer, _))] => (session, ch_ref, id, buffer),
_ => {
cx.span_err(sp, "arguments should be four comma separated identifiers");
return DummyResult::any(sp);
}
};
let base_mod = format!("{:?}", cx.mod_path()[2]);
let handlers = HANDLERS.lock().unwrap();
let mut arms = Vec::new();
for hdl in &*handlers.get(&base_mod).unwrap() {
let name = hdl.0;
let handler = hdl.1;
let guard = quote_expr!(cx, $id == $name::id());
let body = quote_expr!(
cx,
$session.$handler($ch_ref, try!($name::deserialize(&mut $buffer)))
);
arms.push(Arm {
attrs: Vec::new(),
pats: vec![cx.pat_wild(sp)],
guard: Some(guard),
body: body,
});
}
let unit = quote_expr!(cx, Ok(()));
arms.push(Arm {
attrs: Vec::new(),
pats: vec![cx.pat_wild(sp)],
guard: None,
body: unit,
});
MacEager::expr(cx.expr_match(sp, cx.expr_ident(sp, id), arms))
}
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut rustc_plugin::Registry) {
use syntax::parse::token::intern;
use syntax::ext::base::MultiDecorator;
reg.register_syntax_extension(
intern("register_handlers"),
MultiDecorator(Box::new(register_handlers))
);
reg.register_macro("handle", handle);
}<|fim▁end|> | }
|
<|file_name|>Database.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
__author__ = 'Sean Yu'
__mail__ = '[email protected]'
import sqlite3
def CreateTable(dbname, table,table_define):
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute("""create table %s ( %s )"""%(table,table_define))
db.commit()
cu.close()
db.close()
def InsertRecord(dbname, table,record):
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''insert into %s values(%s)'''%(table,record))
db.commit()
cu.close()
db.close()
def UpdateRecord(dbname,table, action, condition ):
#cu.execute("update tasks set status='compleded' where id = 0")
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''update %s set %s where %s'''%(table,action,condition))
db.commit()
cu.close()
db.close()
def RemoveRecord(dbname,table, condition ):
#cu.execute("update tasks set status='compleded' where id = 0")
db = sqlite3.connect(dbname)
cu=db.cursor()
cu.execute('''delete from %s where %s'''%(table,condition))
db.commit()
cu.close()
db.close()
def FetchRecord(dbname,table, condition=''):
db = sqlite3.connect(dbname)
cu=db.cursor()
if condition!='':
condition="where %s"%condition
records =cu.execute('''select * from %s %s'''%(table,condition))
result =[]
for i in records:
i= list(i)
result.append(i)
db.commit()
cu.close()
db.close()
return result
def FetchOne(dbname,table, condition=''):
db = sqlite3.connect(dbname)
cu=db.cursor()
if condition!='':
condition="where %s"%condition
records =cu.execute('''select * from %s %s'''%(table,condition))
records =cu.fetchone()
if records:
result =list(records)
else:<|fim▁hole|> db.commit()
cu.close()
db.close()
return result<|fim▁end|> | result=None |
<|file_name|>make_interpolator.cc<|end_file_name|><|fim▁begin|>#include "estimation/sensors/make_interpolator.hh"
namespace estimation {
geometry::spatial::TimeInterpolator make_accel_interpolator(
const std::vector<TimedMeasurement<jet_filter::AccelMeasurement>>&
accel_meas,
const ImuModel& imu_model) {
std::vector<geometry::spatial::TimeControlPoint> points;
for (const auto& accel : accel_meas) {
const jcc::Vec3 corrected_accel =
imu_model.correct_measured_accel(accel.measurement.observed_acceleration);
points.push_back({accel.timestamp, corrected_accel});
}
const geometry::spatial::TimeInterpolator interp(points);
return interp;
}
geometry::spatial::TimeInterpolator make_gyro_interpolator(
const std::vector<TimedMeasurement<jet_filter::GyroMeasurement>>&
gyro_meas) {
std::vector<geometry::spatial::TimeControlPoint> points;
for (const auto& gyro : gyro_meas) {
points.push_back({gyro.timestamp, gyro.measurement.observed_w});
}
const geometry::spatial::TimeInterpolator interp(points);
return interp;<|fim▁hole|>}
} // namespace estimation<|fim▁end|> | |
<|file_name|>console.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2019 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::io::{self, Write};
use std::thread::{self, JoinHandle};
use std::time::Duration;
use console::style;
use humantime::format_duration;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use super::{
BeginTaskEvent, ChecksumErrorEvent, DeleteEvent, EndBuildEvent,
EndTaskEvent, Event, EventHandler, Timestamp,
};
#[derive(Clone)]
struct TaskState {
/// Time the task started.
start: Timestamp,
/// Progress bar associated with this task.
pb: ProgressBar,
/// String of the task being executed.
name: String,
/// Buffer of output for the task.
buf: Vec<u8>,
}
impl TaskState {
pub fn new(start: Timestamp, pb: ProgressBar) -> Self {
TaskState {
start,
pb,
name: String::new(),
buf: Vec::new(),
}
}
}
/// Calculates the number of spaces a number takes up. Useful for padding
/// decimal numbers.
fn num_width(mut max_value: usize) -> usize {
let mut count = 0;
while max_value > 0 {
max_value /= 10;
count += 1;
}
count
}
/// "Inner" that lives as long as a single build. This is created and destroyed
/// for `BeginBuildEvent`s and `EndBuildEvent`s respectively.
struct Inner {
// Vector of in-flight tasks.
tasks: Vec<TaskState>,
// Time at which the build started. This is used to calculate the duration
// of the build when it finishes.
start_time: Timestamp,
// Progress bar thread.
pb_thread: JoinHandle<Result<(), io::Error>>,
// Continuously updates each of the progress bars.
tick_thread: JoinHandle<()>,
// Name of the build.
name: String,
}
impl Inner {
pub fn new(threads: usize, name: String, timestamp: Timestamp) -> Self {
// Give a bogus start time. This will be changed as we receive events.
let mut tasks = Vec::with_capacity(threads);
let mut bars = Vec::with_capacity(threads);
let progress = MultiProgress::new();
for i in 0..threads {
let pb = progress.add(ProgressBar::new_spinner());
pb.set_style(Console::style_idle());
pb.set_prefix(&format!(
"[{:>width$}]",
i + 1,
width = num_width(threads)
));
pb.set_message(&style("Idle").dim().to_string());
// Clone the progress bar handle so we can update them later.
bars.push(pb.clone());
tasks.push(TaskState::new(timestamp, pb));
}
let pb_thread = thread::spawn(move || progress.join_and_clear());
let tick_thread = thread::spawn(move || loop {
thread::sleep(Duration::from_millis(200));
for pb in &bars {
if pb.is_finished() {
return;
}
pb.tick();
}
});
Inner {
tasks,
start_time: timestamp,
pb_thread,
tick_thread,
name,
}
}
pub fn finish(mut self) -> Result<(), io::Error> {
for task in self.tasks.iter_mut() {
task.pb
.finish_with_message(&style("Done").dim().to_string());
}
self.tick_thread.join().unwrap();
self.pb_thread.join().unwrap()?;
Ok(())
}
pub fn end_build(
self,
timestamp: Timestamp,
event: EndBuildEvent,
) -> Result<(), io::Error> {
let duration = (timestamp - self.start_time).to_std().unwrap();
let duration = format_duration(duration);
let msg = match event.result {
Ok(()) => format!(
"{} {} in {}",
style("Finished").bold().green(),
style(&self.name).yellow(),
style(duration).cyan(),
),
Err(err) => format!(
"{} {} after {}: {}",
style("Failed").bold().red(),
style(&self.name).yellow(),
style(duration).cyan(),
err
),
};
for task in &self.tasks {
task.pb.set_style(Console::style_idle());
}
self.tasks[0].pb.println(&msg);
self.finish()
}
pub fn begin_task(
&mut self,
timestamp: Timestamp,
event: BeginTaskEvent,
) -> Result<(), io::Error> {
let mut task = &mut self.tasks[event.id];
task.start = timestamp;
let name = event.task.to_string();
task.pb.reset_elapsed();
task.pb.set_style(Console::style_running());
task.pb.set_message(&name);
task.name = name;
Ok(())
}
pub fn end_task(
&mut self,
timestamp: Timestamp,
event: EndTaskEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
let duration = (timestamp - task.start).to_std().unwrap();
let duration = format_duration(duration);
if let Err(err) = event.result {
writeln!(
&mut task.buf,
"{} after {}: {}",
style("Task failed").bold().red(),
style(duration).cyan(),
style(err).red(),
)?;
task.pb.println(format!(
"> {}\n{}",
style(&task.name).bold().red(),
String::from_utf8_lossy(&task.buf),
));
}
task.buf.clear();
task.pb.set_style(Console::style_idle());
Ok(())
}
pub fn delete(
&mut self,
_timestamp: Timestamp,
event: DeleteEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.set_style(Console::style_running());
task.pb.set_message(&format!("Deleted {}", event.resource));
if let Err(err) = event.result {
task.pb.println(format!(
"{} to delete `{}`: {}",
style("Failed").bold().red(),
style(event.resource).yellow(),
err
));
}
Ok(())
}
pub fn checksum_error(
&mut self,
_timestamp: Timestamp,
event: ChecksumErrorEvent,
) -> Result<(), io::Error> {
let task = &mut self.tasks[event.id];
task.pb.println(format!(
"Failed to compute checksum for {} ({})",
event.resource, event.error
));
Ok(())
}
}
/// Logs events to a console.
#[derive(Default)]
pub struct Console {
// Delay creation of the inner state until we receive our first BeginBuild
// event. This lets us handle any number of threads.
inner: Option<Inner>,
}
impl Console {
fn style_idle() -> ProgressStyle {
ProgressStyle::default_spinner().template("{prefix:.bold.dim} 🚶")
}
fn style_running() -> ProgressStyle {
ProgressStyle::default_spinner().template(&format!(
"{{prefix:.bold.dim}} 🏃 {} {{wide_msg}}",
style("{elapsed}").dim()
))
}
pub fn new() -> Self {
// Delay initialization until we receive a BeginBuild event.
Self::default()
}
}
impl EventHandler for Console {
type Error = io::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
match event {
Event::BeginBuild(event) => {
if self.inner.is_none() {
self.inner =
Some(Inner::new(event.threads, event.name, timestamp));
}
}
Event::EndBuild(event) => {
if let Some(inner) = self.inner.take() {
inner.end_build(timestamp, event)?;
}
}
Event::BeginTask(event) => {
if let Some(inner) = &mut self.inner {
inner.begin_task(timestamp, event)?;
}
}
Event::TaskOutput(event) => {
if let Some(inner) = &mut self.inner {
inner.tasks[event.id].buf.extend(event.chunk);
}
}
Event::EndTask(event) => {
if let Some(inner) = &mut self.inner {
inner.end_task(timestamp, event)?;
}
}
Event::Delete(event) => {
if let Some(inner) = &mut self.inner {
inner.delete(timestamp, event)?;
}
}
Event::ChecksumError(event) => {
if let Some(inner) = &mut self.inner {
inner.checksum_error(timestamp, event)?;
}
}
}<|fim▁hole|> }
fn finish(&mut self) -> Result<(), Self::Error> {
if let Some(inner) = self.inner.take() {
inner.finish()?;
}
Ok(())
}
}<|fim▁end|> |
Ok(()) |
<|file_name|>test_security_group.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#<|fim▁hole|># License for the specific language governing permissions and limitations
# under the License.
import uuid
from mock import Mock
from mock import patch
from novaclient import exceptions as nova_exceptions
from trove.common import cfg
from trove.common import exception
import trove.common.remote
from trove.extensions.security_group import models as sec_mod
from trove.instance import models as inst_model
from trove.tests.fakes import nova
from trove.tests.unittests import trove_testtools
CONF = cfg.CONF
"""
Unit tests for testing the exceptions raised by Security Groups
"""
class Security_Group_Exceptions_Test(trove_testtools.TestCase):
def setUp(self):
super(Security_Group_Exceptions_Test, self).setUp()
self.createNovaClient = trove.common.remote.create_nova_client
self.context = trove_testtools.TroveTestContext(self)
self.FakeClient = nova.fake_create_nova_client(self.context)
fException = Mock(
side_effect=lambda *args, **kwargs: self._raise(
nova_exceptions.ClientException("Test")))
self.FakeClient.security_groups.create = fException
self.FakeClient.security_groups.delete = fException
self.FakeClient.security_group_rules.create = fException
self.FakeClient.security_group_rules.delete = fException
trove.common.remote.create_nova_client = (
lambda c, r: self._return_mocked_nova_client(c))
def tearDown(self):
super(Security_Group_Exceptions_Test, self).tearDown()
trove.common.remote.create_nova_client = self.createNovaClient
def _return_mocked_nova_client(self, context):
return self.FakeClient
def _raise(self, ex):
raise ex
@patch('trove.network.nova.LOG')
def test_failed_to_create_security_group(self, mock_logging):
self.assertRaises(exception.SecurityGroupCreationError,
sec_mod.RemoteSecurityGroup.create,
"TestName",
"TestDescription",
self.context,
region_name=CONF.os_region_name)
@patch('trove.network.nova.LOG')
def test_failed_to_delete_security_group(self, mock_logging):
self.assertRaises(exception.SecurityGroupDeletionError,
sec_mod.RemoteSecurityGroup.delete,
1, self.context,
region_name=CONF.os_region_name)
@patch('trove.network.nova.LOG')
def test_failed_to_create_security_group_rule(self, mock_logging):
self.assertRaises(exception.SecurityGroupRuleCreationError,
sec_mod.RemoteSecurityGroup.add_rule,
1, "tcp", 3306, 3306, "0.0.0.0/0", self.context,
region_name=CONF.os_region_name)
@patch('trove.network.nova.LOG')
def test_failed_to_delete_security_group_rule(self, mock_logging):
self.assertRaises(exception.SecurityGroupRuleDeletionError,
sec_mod.RemoteSecurityGroup.delete_rule,
1, self.context,
region_name=CONF.os_region_name)
class fake_RemoteSecGr(object):
def data(self):
self.id = uuid.uuid4()
return {'id': self.id}
def delete(self, context, region_name):
pass
class fake_SecGr_Association(object):
def get_security_group(self):
return fake_RemoteSecGr()
def delete(self):
pass
class SecurityGroupDeleteTest(trove_testtools.TestCase):
def setUp(self):
super(SecurityGroupDeleteTest, self).setUp()
self.inst_model_conf_patch = patch.object(inst_model, 'CONF')
self.inst_model_conf_mock = self.inst_model_conf_patch.start()
self.addCleanup(self.inst_model_conf_patch.stop)
self.context = trove_testtools.TroveTestContext(self)
self.original_find_by = (
sec_mod.SecurityGroupInstanceAssociation.find_by)
self.original_delete = sec_mod.SecurityGroupInstanceAssociation.delete
self.fException = Mock(
side_effect=lambda *args, **kwargs: self._raise(
exception.ModelNotFoundError()))
def tearDown(self):
super(SecurityGroupDeleteTest, self).tearDown()
(sec_mod.SecurityGroupInstanceAssociation.
find_by) = self.original_find_by
(sec_mod.SecurityGroupInstanceAssociation.
delete) = self.original_delete
def _raise(self, ex):
raise ex
def test_failed_to_get_assoc_on_delete(self):
sec_mod.SecurityGroupInstanceAssociation.find_by = self.fException
self.assertIsNone(
sec_mod.SecurityGroup.delete_for_instance(
uuid.uuid4(), self.context, CONF.os_region_name))
def test_get_security_group_from_assoc_with_db_exception(self):
fException = Mock(
side_effect=lambda *args, **kwargs: self._raise(
nova_exceptions.ClientException('TEST')))
i_id = uuid.uuid4()
class new_fake_RemoteSecGrAssoc(object):
def get_security_group(self):
return None
def delete(self):
return fException
sec_mod.SecurityGroupInstanceAssociation.find_by = Mock(
return_value=new_fake_RemoteSecGrAssoc())
self.assertIsNone(
sec_mod.SecurityGroup.delete_for_instance(
i_id, self.context, CONF.os_region_name))
def test_delete_secgr_assoc_with_db_exception(self):
i_id = uuid.uuid4()
sec_mod.SecurityGroupInstanceAssociation.find_by = Mock(
return_value=fake_SecGr_Association())
sec_mod.SecurityGroupInstanceAssociation.delete = self.fException
self.assertIsNotNone(sec_mod.SecurityGroupInstanceAssociation.find_by(
i_id, deleted=False).get_security_group())
self.assertTrue(hasattr(sec_mod.SecurityGroupInstanceAssociation.
find_by(i_id, deleted=False).
get_security_group(), 'delete'))
self.assertIsNone(
sec_mod.SecurityGroup.delete_for_instance(
i_id, self.context, CONF.os_region_name))<|fim▁end|> | # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::io;
use std::fmt;
use std::error::Error;
/// Result type for using with [`EmuleadError`].
pub type EmuleadResult<T> = Result<T, EmuleadError>;
/// Error type using for the project errors.
#[derive(Debug)]
pub enum EmuleadError {
/// IO Error
Io(io::Error),
/// Rotate bytes error used in [`network::rotate_bytes_right`]
RotateBytes(usize)
}
impl fmt::Display for EmuleadError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&EmuleadError::Io(ref err) => write!(f, "IO error: {}", err),
err => write!(f, "Error: {}", err.description())
}
}
}
impl Error for EmuleadError {
fn description(&self) -> &str
{
match *self {
EmuleadError::Io(ref err) => err.description(),
EmuleadError::RotateBytes(_) => "Rotate shift must be in 0-8 bits."
}
}
fn cause(&self) -> Option<&Error> {
match *self {
EmuleadError::Io(ref err) => Some(err),
_ => None
}
}
}
impl From<io::Error> for EmuleadError {
fn from(err: io::Error) -> EmuleadError {<|fim▁hole|> EmuleadError::Io(err)
}
}<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate plugin_system;
use plugin_system::{M, Pluggable, Plugin1Options, Plugins};
use std::thread;
use std::time::Duration;
fn main() {
let m = M::new();
let mut pl_app = Pluggable::new(m);
pl_app.load_plugin(Plugins::Plugin1(Plugin1Options::new(1)));
pl_app.load_plugin(Plugins::Plugin2);
pl_app.run_app();
pl_app.run_plugins();
thread::sleep(Duration::from_millis(5));<|fim▁hole|><|fim▁end|> | pl_app.stop();
} |
<|file_name|>bg.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'iframe', 'bg', {
border: 'Показва рамка на карето',
noUrl: 'Моля въведете URL за iFrame',
scrolling: 'Активира прелистване',
title: 'IFrame настройки',
<|fim▁hole|><|fim▁end|> | toolbar: 'IFrame'
} ); |
<|file_name|>allocator_stats.hpp<|end_file_name|><|fim▁begin|>// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef OPENCV_CORE_ALLOCATOR_STATS_HPP
#define OPENCV_CORE_ALLOCATOR_STATS_HPP
#include "../cvdef.h"
namespace cv { namespace utils {
class AllocatorStatisticsInterface
{
protected:
AllocatorStatisticsInterface() {}
virtual ~AllocatorStatisticsInterface() {}
public:<|fim▁hole|> virtual uint64_t getCurrentUsage() const = 0;
virtual uint64_t getTotalUsage() const = 0;
virtual uint64_t getNumberOfAllocations() const = 0;
virtual uint64_t getPeakUsage() const = 0;
/** set peak usage = current usage */
virtual void resetPeakUsage() = 0;
};
}} // namespace
#endif // OPENCV_CORE_ALLOCATOR_STATS_HPP<|fim▁end|> | |
<|file_name|>Recognizer.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2012-2016 The ANTLR Project. All rights reserved.
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#
from antlr4.RuleContext import RuleContext
from antlr4.Token import Token
from antlr4.error.ErrorListener import ProxyErrorListener, ConsoleErrorListener
# need forward delcaration
RecognitionException = None
class Recognizer(object):
tokenTypeMapCache = dict()
ruleIndexMapCache = dict()
def __init__(self):
self._listeners = [ ConsoleErrorListener.INSTANCE ]
self._interp = None
self._stateNumber = -1
def extractVersion(self, version):
pos = version.find(".")
major = version[0:pos]
version = version[pos+1:]
pos = version.find(".")
if pos==-1:
pos = version.find("-")
if pos==-1:
pos = len(version)
minor = version[0:pos]
return major, minor
def checkVersion(self, toolVersion):
runtimeVersion = "4.6.1"
rvmajor, rvminor = self.extractVersion(runtimeVersion)
tvmajor, tvminor = self.extractVersion(toolVersion)
if rvmajor!=tvmajor or rvminor!=tvminor:
print("ANTLR runtime and generated code versions disagree: "+runtimeVersion+"!="+toolVersion)
def addErrorListener(self, listener):
self._listeners.append(listener)
def removeErrorListener(self, listener):
self._listeners.remove(listener)
def removeErrorListeners(self):
self._listeners = []
def getTokenTypeMap(self):
tokenNames = self.getTokenNames()
if tokenNames is None:
from antlr4.error.Errors import UnsupportedOperationException
raise UnsupportedOperationException("The current recognizer does not provide a list of token names.")
result = self.tokenTypeMapCache.get(tokenNames, None)
if result is None:
result = zip( tokenNames, range(0, len(tokenNames)))
result["EOF"] = Token.EOF
self.tokenTypeMapCache[tokenNames] = result
return result
# Get a map from rule names to rule indexes.
#
# <p>Used for XPath and tree pattern compilation.</p>
#
def getRuleIndexMap(self):
ruleNames = self.getRuleNames()
if ruleNames is None:
from antlr4.error.Errors import UnsupportedOperationException
raise UnsupportedOperationException("The current recognizer does not provide a list of rule names.")
result = self.ruleIndexMapCache.get(ruleNames, None)
if result is None:
result = zip( ruleNames, range(0, len(ruleNames)))
self.ruleIndexMapCache[ruleNames] = result
return result
def getTokenType(self, tokenName:str):
ttype = self.getTokenTypeMap().get(tokenName, None)
if ttype is not None:
return ttype
else:
return Token.INVALID_TYPE
# What is the error header, normally line/character position information?#
def getErrorHeader(self, e:RecognitionException):
line = e.getOffendingToken().line
column = e.getOffendingToken().column
return "line "+line+":"+column
# How should a token be displayed in an error message? The default
# is to display just the text, but during development you might
# want to have a lot of information spit out. Override in that case
# to use t.toString() (which, for CommonToken, dumps everything about
# the token). This is better than forcing you to override a method in
# your token objects because you don't have to go modify your lexer
# so that it creates a new Java type.
#
# @deprecated This method is not called by the ANTLR 4 Runtime. Specific
# implementations of {@link ANTLRErrorStrategy} may provide a similar
# feature when necessary. For example, see
# {@link DefaultErrorStrategy#getTokenErrorDisplay}.
#
def getTokenErrorDisplay(self, t:Token):
if t is None:
return "<no token>"
s = t.text
if s is None:
if t.type==Token.EOF:
s = "<EOF>"
else:
s = "<" + str(t.type) + ">"
s = s.replace("\n","\\n")
s = s.replace("\r","\\r")
s = s.replace("\t","\\t")
return "'" + s + "'"
def getErrorListenerDispatch(self):
return ProxyErrorListener(self._listeners)
# subclass needs to override these if there are sempreds or actions
# that the ATN interp needs to execute
def sempred(self, localctx:RuleContext, ruleIndex:int, actionIndex:int):
return True
def precpred(self, localctx:RuleContext , precedence:int):
return True
@property
def state(self):
return self._stateNumber
# Indicate that the recognizer has changed internal state that is<|fim▁hole|> # context objects form a stack that lets us see the stack of
# invoking rules. Combine this and we have complete ATN
# configuration information.
@state.setter
def state(self, atnState:int):
self._stateNumber = atnState
del RecognitionException
import unittest
class Test(unittest.TestCase):
def testVersion(self):
major, minor = Recognizer().extractVersion("1.2")
self.assertEqual("1", major)
self.assertEqual("2", minor)
major, minor = Recognizer().extractVersion("1.2.3")
self.assertEqual("1", major)
self.assertEqual("2", minor)
major, minor = Recognizer().extractVersion("1.2-snapshot")
self.assertEqual("1", major)
self.assertEqual("2", minor)<|fim▁end|> | # consistent with the ATN state passed in. This way we always know
# where we are in the ATN as the parser goes along. The rule |
<|file_name|>product.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from odoo.exceptions import ValidationError
from odoo import models, api, _
<|fim▁hole|> """
Copy the function from product_configurator to show price using price list.
To Fix :
- Extra price For Attribute value
- Extra price For Custom value.
"""
@api.multi
def _compute_product_price_extra(self):
"""Compute price of configurable products as sum
of products related to attribute values picked"""
products = self.filtered(lambda x: not x.config_ok)
pricelist = self.env.user.partner_id.property_product_pricelist
configurable_products = self - products
if products:
prices = super(ProductProduct, self)._compute_product_price_extra()
conversions = self._get_conversions_dict()
for product in configurable_products:
lst_price = product.product_tmpl_id.lst_price
value_ids = product.attribute_value_ids.ids
# TODO: Merge custom values from products with cfg session
# and use same method to retrieve parsed custom val dict
custom_vals = {}
for val in product.value_custom_ids:
custom_type = val.attribute_id.custom_type
if custom_type in conversions:
try:
custom_vals[val.attribute_id.id] = conversions[
custom_type](val.value)
except:
raise ValidationError(
_("Could not convert custom value '%s' to '%s' on "
"product variant: '%s'" % (val.value,
custom_type,
product.display_name))
)
else:
custom_vals[val.attribute_id.id] = val.value
#
# prices = product.product_tmpl_id.get_cfg_price(
# value_ids, custom_vals)
product_price = pricelist.get_product_price(product, 1, 1)
# product.price_extra = prices['total'] - prices['taxes'] - lst_price
product.price_extra = product_price - lst_price<|fim▁end|> | class ProductProduct(models.Model):
_inherit = 'product.product'
_rec_name = 'config_name'
|
<|file_name|>day_01.rs<|end_file_name|><|fim▁begin|>use std::iter::FromIterator;
type Link<T> = Option<Box<Node<T>>>;
struct Node<T> {
item: T,
next: Link<T>,
}
impl <T> Node<T> {
fn new(item: T, next: Link<T>) -> Link<T> {
Some(Box::new(Node { item, next }))
}
}
pub struct Stack<T> {
head: Link<T>,
}
impl <T> Stack<T> {
pub fn pop(&mut self) -> Option<T> {
self.head.take().map(|node| {
self.head = node.next;
node.item
})
}
pub fn push(&mut self, item: T) {
match self.head.take() {
None => self.head = Node::new(item, None),
Some(node) => self.head = Node::new(item, Some(node))
}
}
}
impl <T> Default for Stack<T> {
fn default() -> Self {
Self { head: None }
}
}
impl <T> IntoIterator for Stack<T> {
type Item = T;
type IntoIter = StackIter<T>;
fn into_iter(self) -> Self::IntoIter {
StackIter{ stack: self }
}
}
pub struct StackIter<T> {
stack: Stack<T>
}
impl <T> Iterator for StackIter<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
self.stack.pop()
}
}
impl <T> AsRef<Stack<T>> for Stack<T> {
fn as_ref(&self) -> &Stack<T> {
self
}
}
impl <'s, T> IntoIterator for &'s Stack<T> {
type Item = &'s T;
type IntoIter = StackRefIter<'s, T>;
fn into_iter(self) -> Self::IntoIter {
StackRefIter { node: self.head.as_ref().map(|node| &**node) }
}
}
pub struct StackRefIter<'s, T> {
node: Option<&'s Node<T>>
}
impl <'s, T> Iterator for StackRefIter<'s, T> {
type Item = &'s T;
fn next(&mut self) -> Option<Self::Item> {
self.node.take().map(|node| {
self.node = node.next.as_ref().map(|node| &**node);
&node.item<|fim▁hole|>}
impl <T> AsMut<Stack<T>> for Stack<T> {
fn as_mut(&mut self) -> &mut Self {
self
}
}
impl <'s, T> IntoIterator for &'s mut Stack<T> {
type Item = &'s mut T;
type IntoIter = StackMutRefIterator<'s, T>;
fn into_iter(self) -> Self::IntoIter {
StackMutRefIterator { node: self.head.as_mut().map(|node| &mut **node)}
}
}
pub struct StackMutRefIterator<'s, T> {
node: Option<&'s mut Node<T>>
}
impl <'s, T> Iterator for StackMutRefIterator<'s, T> {
type Item = &'s mut T;
fn next(&mut self) -> Option<Self::Item> {
self.node.take().map(|node| {
self.node = node.next.as_mut().map(|node| &mut **node);
&mut node.item
})
}
}
impl <T> FromIterator<T> for Stack<T> {
fn from_iter<II: IntoIterator<Item=T>>(iter: II) -> Self {
let mut stack: Stack<T> = Stack::default();
for item in iter {
stack.push(item);
}
stack
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn pop_from_empty_stack() {
let mut stack: Stack<i32> = Stack::default();
assert_eq!(stack.pop(), None);
}
#[test]
fn push_pop_one_item() {
let mut stack = Stack::default();
stack.push(1);
assert_eq!(stack.pop(), Some(1));
assert_eq!(stack.pop(), None)
}
#[test]
fn push_pop_many_items() {
let mut stack = Stack::default();
stack.push(1);
stack.push(2);
stack.push(3);
assert_eq!(stack.pop(), Some(3));
assert_eq!(stack.pop(), Some(2));
assert_eq!(stack.pop(), Some(1));
assert_eq!(stack.pop(), None);
}
#[test]
fn iterator() {
let stack = Stack::from_iter(1..=3);
let mut iter = stack.into_iter();
assert_eq!(iter.next(), Some(3));
assert_eq!(iter.next(), Some(2));
assert_eq!(iter.next(), Some(1));
assert_eq!(iter.next(), None);
}
#[test]
fn ref_iterator() {
let stack = Stack::from_iter(1..=3);
let mut iter = stack.as_ref().into_iter();
assert_eq!(iter.next(), Some(&3));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), None);
}
#[test]
fn mut_ref_iterator() {
let mut stack = Stack::from_iter(1..=3);
let mut iter = stack.as_mut().into_iter();
assert_eq!(iter.next(), Some(&mut 3));
assert_eq!(iter.next(), Some(&mut 2));
assert_eq!(iter.next(), Some(&mut 1));
assert_eq!(iter.next(), None);
}
}<|fim▁end|> | })
} |
<|file_name|>store_test.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os.path
import sqlite3
import mock
import pytest
import six
from pre_commit.store import _get_default_directory
from pre_commit.store import Store
from pre_commit.util import cmd_output
from pre_commit.util import cwd
from pre_commit.util import rmtree
from testing.fixtures import git_dir
from testing.util import get_head_sha
def test_our_session_fixture_works():
"""There's a session fixture which makes `Store` invariantly raise to
prevent writing to the home directory.
"""
with pytest.raises(AssertionError):
Store()
def test_get_default_directory_defaults_to_home():
# Not we use the module level one which is not mocked<|fim▁hole|>
def test_adheres_to_xdg_specification():
with mock.patch.dict(
os.environ, {'XDG_CACHE_HOME': '/tmp/fakehome'},
):
ret = _get_default_directory()
assert ret == os.path.join('/tmp/fakehome', 'pre-commit')
def test_uses_environment_variable_when_present():
with mock.patch.dict(
os.environ, {'PRE_COMMIT_HOME': '/tmp/pre_commit_home'},
):
ret = _get_default_directory()
assert ret == '/tmp/pre_commit_home'
def test_store_require_created(store):
assert not os.path.exists(store.directory)
store.require_created()
# Should create the store directory
assert os.path.exists(store.directory)
# Should create a README file indicating what the directory is about
with io.open(os.path.join(store.directory, 'README')) as readme_file:
readme_contents = readme_file.read()
for text_line in (
'This directory is maintained by the pre-commit project.',
'Learn more: https://github.com/pre-commit/pre-commit',
):
assert text_line in readme_contents
def test_store_require_created_does_not_create_twice(store):
assert not os.path.exists(store.directory)
store.require_created()
# We intentionally delete the directory here so we can figure out if it
# calls it again.
rmtree(store.directory)
assert not os.path.exists(store.directory)
# Call require_created, this should not trigger a call to create
store.require_created()
assert not os.path.exists(store.directory)
def test_does_not_recreate_if_directory_already_exists(store):
assert not os.path.exists(store.directory)
# We manually create the directory.
# Note: we're intentionally leaving out the README file. This is so we can
# know that `Store` didn't call create
os.mkdir(store.directory)
open(store.db_path, 'a').close()
# Call require_created, this should not call create
store.require_created()
assert not os.path.exists(os.path.join(store.directory, 'README'))
def test_clone(store, tempdir_factory, log_info_mock):
path = git_dir(tempdir_factory)
with cwd(path):
cmd_output('git', 'commit', '--allow-empty', '-m', 'foo')
sha = get_head_sha(path)
cmd_output('git', 'commit', '--allow-empty', '-m', 'bar')
ret = store.clone(path, sha)
# Should have printed some stuff
assert log_info_mock.call_args_list[0][0][0].startswith(
'Initializing environment for ',
)
# Should return a directory inside of the store
assert os.path.exists(ret)
assert ret.startswith(store.directory)
# Directory should start with `repo`
_, dirname = os.path.split(ret)
assert dirname.startswith('repo')
# Should be checked out to the sha we specified
assert get_head_sha(ret) == sha
# Assert there's an entry in the sqlite db for this
with sqlite3.connect(store.db_path) as db:
path, = db.execute(
'SELECT path from repos WHERE repo = ? and ref = ?',
[path, sha],
).fetchone()
assert path == ret
def test_clone_cleans_up_on_checkout_failure(store):
try:
# This raises an exception because you can't clone something that
# doesn't exist!
store.clone('/i_dont_exist_lol', 'fake_sha')
except Exception as e:
assert '/i_dont_exist_lol' in six.text_type(e)
things_starting_with_repo = [
thing for thing in os.listdir(store.directory)
if thing.startswith('repo')
]
assert things_starting_with_repo == []
def test_clone_when_repo_already_exists(store):
# Create an entry in the sqlite db that makes it look like the repo has
# been cloned.
store.require_created()
with sqlite3.connect(store.db_path) as db:
db.execute(
'INSERT INTO repos (repo, ref, path) '
'VALUES ("fake_repo", "fake_ref", "fake_path")',
)
assert store.clone('fake_repo', 'fake_ref') == 'fake_path'
def test_require_created_when_directory_exists_but_not_db(store):
# In versions <= 0.3.5, there was no sqlite db causing a need for
# backward compatibility
os.makedirs(store.directory)
store.require_created()
assert os.path.exists(store.db_path)<|fim▁end|> | ret = _get_default_directory()
assert ret == os.path.join(os.path.expanduser('~/.cache'), 'pre-commit') |
<|file_name|>charity_repository_test.py<|end_file_name|><|fim▁begin|>from givabit.backend.charity import Charity
from givabit.backend.errors import MissingValueException, MultipleValueException
from givabit.test_common import test_data
from givabit.test_common import test_utils
class CharityRepositoryTest(test_utils.TestCase):
def setUp(self):
super(CharityRepositoryTest, self).setUp()<|fim▁hole|> for charity in self.all_charities:
self.charity_repo.add_or_update_charity(charity)
def test_lists_charities(self):
self.assertSequenceEqual(self.charity_repo.list_charities(), self.all_charities)
def test_gets_single_charity(self):
self.assertEqual(self.charity_repo.get_charity('Shelter'), test_data.c1)
self.assertEqual(self.charity_repo.get_charity('Oxfam'), test_data.c2)
with self.assertRaises(MissingValueException):
self.charity_repo.get_charity('Does not exist')
try:
self.charity_repo.get_charity('BHF')
except MultipleValueException, e:
self.assertSequenceEqual(e.values, [test_data.c3, test_data.c4])
def test_gets_charity_by_id(self):
self.assertEquals(self.charity_repo.get_charity(id=test_data.c1.key().id()), test_data.c1)
def test_getting_missing_charity_by_id_throws(self):
missing_id = 0
while missing_id in map(lambda charity: charity.key().id(), self.all_charities):
missing_id += 1
with self.assertRaises(MissingValueException):
self.charity_repo.get_charity(id=missing_id)<|fim▁end|> | self.all_charities = [test_data.c1, test_data.c2, test_data.c3, test_data.c4] |
<|file_name|>prefixes.js<|end_file_name|><|fim▁begin|>(function() {
var add, crispedges, feature, flexbox, fullscreen, gradients, logicalProps, prefix, readOnly, resolution, result, sort, writingMode,
slice = [].slice;
sort = function(array) {
return array.sort(function(a, b) {
var d;
a = a.split(' ');
b = b.split(' ');
if (a[0] > b[0]) {
return 1;
} else if (a[0] < b[0]) {
return -1;
} else {
d = parseFloat(a[1]) - parseFloat(b[1]);
if (d > 0) {
return 1;
} else if (d < 0) {
return -1;
} else {
return 0;
}
}
});
};
feature = function(data, opts, callback) {
var browser, match, need, ref, ref1, support, version, versions;
if (!callback) {
ref = [opts, {}], callback = ref[0], opts = ref[1];
}
match = opts.match || /\sx($|\s)/;
need = [];
ref1 = data.stats;
for (browser in ref1) {
versions = ref1[browser];<|fim▁hole|> if (support.match(match)) {
need.push(browser + ' ' + version);
}
}
}
return callback(sort(need));
};
result = {};
prefix = function() {
var data, i, j, k, len, name, names, results;
names = 2 <= arguments.length ? slice.call(arguments, 0, j = arguments.length - 1) : (j = 0, []), data = arguments[j++];
results = [];
for (k = 0, len = names.length; k < len; k++) {
name = names[k];
result[name] = {};
results.push((function() {
var results1;
results1 = [];
for (i in data) {
results1.push(result[name][i] = data[i]);
}
return results1;
})());
}
return results;
};
add = function() {
var data, j, k, len, name, names, results;
names = 2 <= arguments.length ? slice.call(arguments, 0, j = arguments.length - 1) : (j = 0, []), data = arguments[j++];
results = [];
for (k = 0, len = names.length; k < len; k++) {
name = names[k];
results.push(result[name].browsers = sort(result[name].browsers.concat(data.browsers)));
}
return results;
};
module.exports = result;
feature(require('caniuse-db/features-json/border-radius'), function(browsers) {
return prefix('border-radius', 'border-top-left-radius', 'border-top-right-radius', 'border-bottom-right-radius', 'border-bottom-left-radius', {
mistakes: ['-khtml-', '-ms-', '-o-'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-boxshadow'), function(browsers) {
return prefix('box-shadow', {
mistakes: ['-khtml-'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-animation'), function(browsers) {
return prefix('animation', 'animation-name', 'animation-duration', 'animation-delay', 'animation-direction', 'animation-fill-mode', 'animation-iteration-count', 'animation-play-state', 'animation-timing-function', '@keyframes', {
mistakes: ['-khtml-', '-ms-'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-transitions'), function(browsers) {
return prefix('transition', 'transition-property', 'transition-duration', 'transition-delay', 'transition-timing-function', {
mistakes: ['-khtml-', '-ms-'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/transforms2d'), function(browsers) {
return prefix('transform', 'transform-origin', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/transforms3d'), function(browsers) {
prefix('perspective', 'perspective-origin', {
browsers: browsers
});
return prefix('transform-style', 'backface-visibility', {
mistakes: ['-ms-', '-o-'],
browsers: browsers
});
});
gradients = require('caniuse-db/features-json/css-gradients');
feature(gradients, {
match: /y\sx/
}, function(browsers) {
return prefix('linear-gradient', 'repeating-linear-gradient', 'radial-gradient', 'repeating-radial-gradient', {
props: ['background', 'background-image', 'border-image', 'mask', 'list-style', 'list-style-image', 'content', 'mask-image'],
mistakes: ['-ms-'],
browsers: browsers
});
});
feature(gradients, {
match: /a\sx/
}, function(browsers) {
browsers = browsers.map(function(i) {
if (/op/.test(i)) {
return i;
} else {
return i + " old";
}
});
return add('linear-gradient', 'repeating-linear-gradient', 'radial-gradient', 'repeating-radial-gradient', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css3-boxsizing'), function(browsers) {
return prefix('box-sizing', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-filters'), function(browsers) {
return prefix('filter', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-filter-function'), function(browsers) {
return prefix('filter-function', {
props: ['background', 'background-image', 'border-image', 'mask', 'list-style', 'list-style-image', 'content', 'mask-image'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-backdrop-filter'), function(browsers) {
return prefix('backdrop-filter', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-element-function'), function(browsers) {
return prefix('element', {
props: ['background', 'background-image', 'border-image', 'mask', 'list-style', 'list-style-image', 'content', 'mask-image'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/multicolumn'), function(browsers) {
prefix('columns', 'column-width', 'column-gap', 'column-rule', 'column-rule-color', 'column-rule-width', {
browsers: browsers
});
return prefix('column-count', 'column-rule-style', 'column-span', 'column-fill', 'break-before', 'break-after', 'break-inside', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/user-select-none'), function(browsers) {
return prefix('user-select', {
mistakes: ['-khtml-'],
browsers: browsers
});
});
flexbox = require('caniuse-db/features-json/flexbox');
feature(flexbox, {
match: /a\sx/
}, function(browsers) {
browsers = browsers.map(function(i) {
if (/ie|firefox/.test(i)) {
return i;
} else {
return i + " 2009";
}
});
prefix('display-flex', 'inline-flex', {
props: ['display'],
browsers: browsers
});
prefix('flex', 'flex-grow', 'flex-shrink', 'flex-basis', {
browsers: browsers
});
return prefix('flex-direction', 'flex-wrap', 'flex-flow', 'justify-content', 'order', 'align-items', 'align-self', 'align-content', {
browsers: browsers
});
});
feature(flexbox, {
match: /y\sx/
}, function(browsers) {
add('display-flex', 'inline-flex', {
browsers: browsers
});
add('flex', 'flex-grow', 'flex-shrink', 'flex-basis', {
browsers: browsers
});
return add('flex-direction', 'flex-wrap', 'flex-flow', 'justify-content', 'order', 'align-items', 'align-self', 'align-content', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/calc'), function(browsers) {
return prefix('calc', {
props: ['*'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/background-img-opts'), function(browsers) {
return prefix('background-clip', 'background-origin', 'background-size', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/font-feature'), function(browsers) {
return prefix('font-feature-settings', 'font-variant-ligatures', 'font-language-override', 'font-kerning', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/border-image'), function(browsers) {
return prefix('border-image', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-selection'), function(browsers) {
return prefix('::selection', {
selector: true,
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-placeholder'), function(browsers) {
browsers = browsers.map(function(i) {
var name, ref, version;
ref = i.split(' '), name = ref[0], version = ref[1];
if (name === 'firefox' && parseFloat(version) <= 18) {
return i + ' old';
} else {
return i;
}
});
return prefix('::placeholder', {
selector: true,
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-hyphens'), function(browsers) {
return prefix('hyphens', {
browsers: browsers
});
});
fullscreen = require('caniuse-db/features-json/fullscreen');
feature(fullscreen, function(browsers) {
return prefix(':fullscreen', {
selector: true,
browsers: browsers
});
});
feature(fullscreen, {
match: /x(\s#2|$)/
}, function(browsers) {
return prefix('::backdrop', {
selector: true,
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css3-tabsize'), function(browsers) {
return prefix('tab-size', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/intrinsic-width'), function(browsers) {
return prefix('max-content', 'min-content', 'fit-content', 'fill-available', {
props: ['width', 'min-width', 'max-width', 'height', 'min-height', 'max-height'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css3-cursors-newer'), function(browsers) {
return prefix('zoom-in', 'zoom-out', {
props: ['cursor'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css3-cursors-grab'), function(browsers) {
return prefix('grab', 'grabbing', {
props: ['cursor'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-sticky'), function(browsers) {
return prefix('sticky', {
props: ['position'],
browsers: browsers
});
});
feature(require('caniuse-db/features-json/pointer'), function(browsers) {
return prefix('touch-action', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/text-decoration'), function(browsers) {
return prefix('text-decoration-style', 'text-decoration-line', 'text-decoration-color', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/text-size-adjust'), function(browsers) {
return prefix('text-size-adjust', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-masks'), function(browsers) {
prefix('mask-clip', 'mask-composite', 'mask-image', 'mask-origin', 'mask-repeat', 'mask-border-repeat', 'mask-border-source', {
browsers: browsers
});
return prefix('clip-path', 'mask', 'mask-position', 'mask-size', 'mask-border', 'mask-border-outset', 'mask-border-width', 'mask-border-slice', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-boxdecorationbreak'), function(brwsrs) {
return prefix('box-decoration-break', {
browsers: brwsrs
});
});
feature(require('caniuse-db/features-json/object-fit'), function(browsers) {
return prefix('object-fit', 'object-position', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-shapes'), function(browsers) {
return prefix('shape-margin', 'shape-outside', 'shape-image-threshold', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/text-overflow'), function(browsers) {
return prefix('text-overflow', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/text-emphasis'), function(browsers) {
return prefix('text-emphasis', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-deviceadaptation'), function(browsers) {
return prefix('@viewport', {
browsers: browsers
});
});
resolution = require('caniuse-db/features-json/css-media-resolution');
feature(resolution, {
match: /( x($| )|a #3)/
}, function(browsers) {
return prefix('@resolution', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-text-align-last'), function(browsers) {
return prefix('text-align-last', {
browsers: browsers
});
});
crispedges = require('caniuse-db/features-json/css-crisp-edges');
feature(crispedges, {
match: /y x/
}, function(browsers) {
return prefix('pixelated', {
props: ['image-rendering'],
browsers: browsers
});
});
feature(crispedges, {
match: /a x #2/
}, function(browsers) {
return prefix('image-rendering', {
browsers: browsers
});
});
logicalProps = require('caniuse-db/features-json/css-logical-props');
feature(logicalProps, function(browsers) {
return prefix('border-inline-start', 'border-inline-end', 'margin-inline-start', 'margin-inline-end', 'padding-inline-start', 'padding-inline-end', {
browsers: browsers
});
});
feature(logicalProps, {
match: /x\s#2/
}, function(browsers) {
return prefix('border-block-start', 'border-block-end', 'margin-block-start', 'margin-block-end', 'padding-block-start', 'padding-block-end', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-appearance'), function(browsers) {
return prefix('appearance', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-snappoints'), function(browsers) {
return prefix('scroll-snap-type', 'scroll-snap-coordinate', 'scroll-snap-destination', 'scroll-snap-points-x', 'scroll-snap-points-y', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-regions'), function(browsers) {
return prefix('flow-into', 'flow-from', 'region-fragment', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-image-set'), function(browsers) {
return prefix('image-set', {
props: ['background', 'background-image', 'border-image', 'mask', 'list-style', 'list-style-image', 'content', 'mask-image'],
browsers: browsers
});
});
writingMode = require('caniuse-db/features-json/css-writing-mode');
feature(writingMode, {
match: /a|x/
}, function(browsers) {
return prefix('writing-mode', {
browsers: browsers
});
});
feature(require('caniuse-db/features-json/css-cross-fade.json'), function(browsers) {
return prefix('cross-fade', {
props: ['background', 'background-image', 'border-image', 'mask', 'list-style', 'list-style-image', 'content', 'mask-image'],
browsers: browsers
});
});
readOnly = require('caniuse-db/features-json/css-read-only-write.json');
feature(readOnly, function(browsers) {
return prefix(':read-only', ':read-write', {
selector: true,
browsers: browsers
});
});
}).call(this);<|fim▁end|> | for (version in versions) {
support = versions[version]; |
<|file_name|>Shapes.java<|end_file_name|><|fim▁begin|>/*
* www.javagl.de - Flow
*
* Copyright (c) 2012-2017 Marco Hutter - http://www.javagl.de
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
<|fim▁hole|> * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package de.javagl.flow.gui;
import java.awt.Shape;
import java.awt.geom.Line2D;
import java.awt.geom.PathIterator;
import java.util.ArrayList;
import java.util.List;
/**
* Utility methods related to shapes
*/
class Shapes
{
/**
* Create a list containing line segments that approximate the given
* shape.
*
* NOTE: Copied from https://github.com/javagl/Geom/blob/master/
* src/main/java/de/javagl/geom/Shapes.java
*
* @param shape The shape
* @param flatness The allowed flatness
* @return The list of line segments
*/
public static List<Line2D> computeLineSegments(
Shape shape, double flatness)
{
List<Line2D> result = new ArrayList<Line2D>();
PathIterator pi = shape.getPathIterator(null, flatness);
double[] coords = new double[6];
double previous[] = new double[2];
double first[] = new double[2];
while (!pi.isDone())
{
int segment = pi.currentSegment(coords);
switch (segment)
{
case PathIterator.SEG_MOVETO:
previous[0] = coords[0];
previous[1] = coords[1];
first[0] = coords[0];
first[1] = coords[1];
break;
case PathIterator.SEG_CLOSE:
result.add(new Line2D.Double(
previous[0], previous[1],
first[0], first[1]));
previous[0] = first[0];
previous[1] = first[1];
break;
case PathIterator.SEG_LINETO:
result.add(new Line2D.Double(
previous[0], previous[1],
coords[0], coords[1]));
previous[0] = coords[0];
previous[1] = coords[1];
break;
case PathIterator.SEG_QUADTO:
// Should never occur
throw new AssertionError(
"SEG_QUADTO in flattened path!");
case PathIterator.SEG_CUBICTO:
// Should never occur
throw new AssertionError(
"SEG_CUBICTO in flattened path!");
default:
// Should never occur
throw new AssertionError(
"Invalid segment in flattened path!");
}
pi.next();
}
return result;
}
/**
* Private constructor to prevent instantiation
*/
private Shapes()
{
// Private constructor to prevent instantiation
}
}<|fim▁end|> | |
<|file_name|>multichecksum.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
__author__ = "Andrew Hankinson ([email protected])"
__version__ = "1.5"
__date__ = "2011"
__copyright__ = "Creative Commons Attribution"
__license__ = """The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE."""
import multiprocessing
from optparse import OptionParser
import os
import sys
import hashlib
import codecs
import re
from pybagit.exceptions import *
# declare a default hashalgorithm
HASHALG = 'sha1'
ENCODING = "utf-8"
def write_manifest(datadir, encoding, update=False):
bag_root = os.path.split(os.path.abspath(datadir))[0]
manifest_file = os.path.join(bag_root, "manifest-{0}.txt".format(HASHALG))
checksums = dict()
files_to_checksum = set(dirwalk(datadir))
if update and os.path.isfile(manifest_file):
for line in codecs.open(manifest_file, 'rb', encoding):
checksum, file_ = line.strip().split(' ', 1)
full_file = os.path.join(bag_root, file_)
if full_file in files_to_checksum:
files_to_checksum.remove(full_file)
checksums[os.path.join(bag_root, file_)] = checksum
p = multiprocessing.Pool(processes=multiprocessing.cpu_count())
result = p.map_async(csumfile, files_to_checksum)
checksums.update((k, v) for v, k in result.get())
p.close()
p.join()
mfile = codecs.open(manifest_file, 'wb', encoding)
for file_, checksum in sorted(checksums.iteritems()):
rp = os.path.relpath(file_, bag_root)
fl = ensure_unix_pathname(rp)
mfile.write(u"{0} {1}\n".format(checksum, fl))
mfile.close()
def dirwalk(datadir):
datafiles = []
for dirpath, dirnames, filenames in os.walk(u"{0}".format(datadir)):
for fn in filenames:
datafiles.append(os.path.join(dirpath, fn))
return datafiles
def csumfile(filename):
""" Based on
http://abstracthack.wordpress.com/2007/10/19/calculating-md5-checksum/
"""
hashalg = getattr(hashlib, HASHALG)() # == 'hashlib.md5' or 'hashlib.sha1'
blocksize = 0x10000
def __upd(m, data):
m.update(data)
return m
fd = open(filename, 'rb')
try:
contents = iter(lambda: fd.read(blocksize), "")
m = reduce(__upd, contents, hashalg)
finally:
fd.close()
return (m.hexdigest(), filename)
def ensure_unix_pathname(pathname):
# it's only windows we have to worry about
if sys.platform != "win32":
return pathname
replace = re.compile(r"\\", re.UNICODE)
fnm = re.sub(replace, "/", pathname)
return fnm
if __name__ == "__main__":
parser = OptionParser()<|fim▁hole|> (options, args) = parser.parse_args()
if options.algorithm:
if not options.algorithm in ('md5', 'sha1'):
raise BagCheckSumNotValid('You must specify either "md5" or "sha1" as the checksum algorithm')
HASHALG = options.algorithm
if options.encoding:
ENCODING = options.encoding
if len(args) < 1:
parser.error("You must specify a data directory")
write_manifest(args[0], ENCODING, update=options.update)<|fim▁end|> | usage = "%prog [options] arg1 arg2"
parser.add_option("-a", "--algorithm", action="store", help="checksum algorithm to use (sha1|md5)")
parser.add_option("-c", "--encoding", action="store", help="File encoding to write manifest")
parser.add_option("-u", "--update", action="store_true", help="Only update new/removed files") |
<|file_name|>gear.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('myApp.gear', ['ngRoute'])
<|fim▁hole|>.config(['$routeProvider', function($routeProvider) {
$routeProvider.when('/gear', {
templateUrl: 'gear/gear.html',
controller: 'GearCtrl'
});
}])
.controller('GearCtrl', ['$scope','$http', function($scope, $http) {
$http.get('resources/data/gear.json').
success(function(data, status, headers, config) {
$scope.content = data;
}).
error(function(data, status, headers, config) {
// log error
});
}]);<|fim▁end|> | |
<|file_name|>chrome_version_info.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/common/chrome_version_info.h"
#include "base/basictypes.h"
#include "base/file_version_info.h"
#include "base/string_util.h"
#include "base/threading/thread_restrictions.h"
#include "base/utf_string_conversions.h"
#include "build/build_config.h"
#include "grit/chromium_strings.h"
#include "grit/generated_resources.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/base/resource/resource_bundle.h"
// Generated header
#include "chrome/common/chrome_release_version_info.h"
namespace chrome {
#if defined(OS_WIN) || defined(OS_MACOSX)
// On Windows and Mac, we get the Chrome version info by querying
// FileVersionInfo for the current module.
VersionInfo::VersionInfo() {
// The current module is already loaded in memory, so this will be cheap.
base::ThreadRestrictions::ScopedAllowIO allow_io;
version_info_.reset(FileVersionInfo::CreateFileVersionInfoForCurrentModule());
}
VersionInfo::~VersionInfo() {
}
bool VersionInfo::is_valid() const {
return version_info_.get() != NULL;
}
std::string VersionInfo::Name() const {
if (!is_valid())
return std::string();
return UTF16ToUTF8(version_info_->product_name());
}
std::string VersionInfo::Version() const {
if (!is_valid())
return std::string();
return UTF16ToUTF8(version_info_->product_version());
}
std::string VersionInfo::LastChange() const {
if (!is_valid())
return std::string();
return UTF16ToUTF8(version_info_->last_change());
}
bool VersionInfo::IsOfficialBuild() const {
if (!is_valid())
return false;
return version_info_->is_official_build();
}
#elif defined(OS_POSIX)
// We get chrome version information from chrome_version_info_posix.h,
// a generated header.
#include "chrome/common/chrome_version_info_posix.h"
VersionInfo::VersionInfo() {
}
VersionInfo::~VersionInfo() {
}
bool VersionInfo::is_valid() const {
return true;
}
std::string VersionInfo::Name() const {
return PRODUCT_NAME;
}
std::string VersionInfo::Version() const {
return PRODUCT_VERSION;
}
std::string VersionInfo::LastChange() const {
return LAST_CHANGE;
}
bool VersionInfo::IsOfficialBuild() const {
return IS_OFFICIAL_BUILD;
}
#endif
std::string VersionInfo::CreateVersionString() const {
std::string current_version;
if (is_valid()) {
current_version += Version();
#if 0
current_version += " (";
current_version += l10n_util::GetStringUTF8(IDS_ABOUT_VERSION_UNOFFICIAL);
current_version += " ";
current_version += LastChange();
current_version += " ";
current_version += OSType();
current_version += ")";
#endif
std::string modifier = GetVersionStringModifier();
if (!modifier.empty())
current_version += " " + modifier;
}
return current_version;
}
std::string VersionInfo::OSType() const {
#if defined(OS_WIN)
return "Windows";
#elif defined(OS_MACOSX)
return "Mac OS X";
#elif defined(OS_CHROMEOS)
if (ui::ResourceBundle::HasSharedInstance())
return UTF16ToASCII(l10n_util::GetStringUTF16(IDS_SHORT_PRODUCT_OS_NAME));
else
return "Chromium OS";
#elif defined(OS_ANDROID)
return "Android";
#elif defined(OS_LINUX)
return "Linux";
#elif defined(OS_FREEBSD)
return "FreeBSD";
#elif defined(OS_OPENBSD)
return "OpenBSD";
#elif defined(OS_SOLARIS)
return "Solaris";
#else
return "Unknown";
#endif
}
<|fim▁hole|>
} // namespace chrome<|fim▁end|> | std::string VersionInfo::ChromiumReleaseVersion() const {
return CHROMIUM_RELEASE_VERSION;
} |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import print_function
import datetime
import glob
import os
import re
import sys
import subprocess
from picard import __version__, compat
if sys.version_info < (2, 6):
print("*** You need Python 2.6 or higher to use Picard.")
args = {}
try:
from py2app.build_app import py2app
do_py2app = True
except ImportError:
do_py2app = False
# this must be imported *after* py2app, because py2app imports setuptools
# which "patches" (read: screws up) the Extension class
from distutils import log
from distutils.command.build import build
from distutils.command.install import install as install
from distutils.core import setup, Command, Extension
from distutils.dep_util import newer
from distutils.dist import Distribution
from distutils.spawn import find_executable
ext_modules = [
Extension('picard.util.astrcmp', sources=['picard/util/astrcmp.c']),
]
py2app_exclude_modules = [
'pydoc',
'PyQt4.QtDeclarative', 'PyQt4.QtDesigner', 'PyQt4.QtHelp', 'PyQt4.QtMultimedia',
'PyQt4.QtOpenGL', 'PyQt4.QtScript', 'PyQt4.QtScriptTools', 'PyQt4.QtSql', 'PyQt4.QtSvg',
'PyQt4.QtTest', 'PyQt4.QtWebKit', 'PyQt4.QtXml', 'PyQt4.QtXmlPatterns', 'PyQt4.phonon'
]
py2exe_exclude_modules = [
'socket', 'select',
]
exclude_modules = [
'ssl', 'bz2',
'distutils', 'unittest',
'bdb', 'calendar', 'difflib', 'doctest', 'dummy_thread', 'gzip',
'optparse', 'pdb', 'plistlib', 'pyexpat', 'quopri', 'repr',
'stringio', 'tarfile', 'uu', 'zipfile'
]
if do_py2app:
args['app'] = ['tagger.py']
args['name'] = 'Picard'
args['options'] = { 'py2app' :
{
'optimize' : 2,
'argv_emulation' : True,
'iconfile' : 'picard.icns',
'frameworks' : ['libiconv.2.dylib', 'libdiscid.0.dylib'],
'resources' : ['locale'],
'includes' : ['json', 'sip', 'PyQt4', 'ntpath'] + [e.name for e in ext_modules],
'excludes' : exclude_modules + py2app_exclude_modules,
'plist' : { 'CFBundleName' : 'MusicBrainz Picard',
'CFBundleGetInfoString' : 'Picard, the next generation MusicBrainz tagger (see http://musicbrainz.org/doc/MusicBrainz_Picard)',
'CFBundleIdentifier':'org.musicbrainz.picard',
'CFBundleShortVersionString':__version__,
'CFBundleVersion': 'Picard ' + __version__,
'LSMinimumSystemVersion':'10.4.3',
'LSMultipleInstancesProhibited':'true',
# RAK: It biffed when I tried to include your accented characters, luks. :-(
'NSHumanReadableCopyright':'Copyright 2008 Lukas Lalinsky, Robert Kaye',
},
'qt_plugins': ['imageformats/libqgif.dylib',
'imageformats/libqjpeg.dylib',
'imageformats/libqtiff.dylib',
'accessible/libqtaccessiblewidgets.dylib']
},
}
tx_executable = find_executable('tx')
class picard_test(Command):
description = "run automated tests"
user_options = [
("tests=", None, "list of tests to run (default all)"),
("verbosity=", "v", "verbosity"),
]
def initialize_options(self):
self.tests = []
self.verbosity = 1
def finalize_options(self):
if self.tests:
self.tests = self.tests.split(",")
if self.verbosity:
self.verbosity = int(self.verbosity)
def run(self):
import unittest
import sip
sip.setapi("QString", 2)
sip.setapi("QVariant", 2)
names = []
for filename in glob.glob("test/test_*.py"):
name = os.path.splitext(os.path.basename(filename))[0]
if not self.tests or name in self.tests:
names.append("test." + name)
tests = unittest.defaultTestLoader.loadTestsFromNames(names)
t = unittest.TextTestRunner(verbosity=self.verbosity)
testresult = t.run(tests)
if not testresult.wasSuccessful():
sys.exit("At least one test failed.")
class picard_build_locales(Command):
description = 'build locale files'
user_options = [
('build-dir=', 'd', "directory to build to"),
('inplace', 'i', "ignore build-lib and put compiled locales into the 'locale' directory"),
]
def initialize_options(self):
self.build_dir = None
self.inplace = 0
def finalize_options(self):
self.set_undefined_options('build', ('build_locales', 'build_dir'))
self.locales = self.distribution.locales
def run(self):
for domain, locale, po in self.locales:
if self.inplace:
path = os.path.join('locale', locale, 'LC_MESSAGES')
else:
path = os.path.join(self.build_dir, locale, 'LC_MESSAGES')
mo = os.path.join(path, '%s.mo' % domain)
self.mkpath(path)
self.spawn(['msgfmt', '-o', mo, po])
Distribution.locales = None
class picard_install_locales(Command):
description = "install locale files"
user_options = [
('install-dir=', 'd', "directory to install locale files to"),
('build-dir=', 'b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'skip-build']
def initialize_options(self):
self.install_dir = None
self.build_dir = None
self.force = 0
self.skip_build = None
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('build', ('build_locales', 'build_dir'))
self.set_undefined_options('install',
('install_locales', 'install_dir'),
('force', 'force'),
('skip_build', 'skip_build'),
)
def run(self):
if not self.skip_build:
self.run_command('build_locales')
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
def get_inputs(self):
return self.locales or []
def get_outputs(self):
return self.outfiles
class picard_install(install):
user_options = install.user_options + [
('install-locales=', None,
"installation directory for locales"),
('localedir=', None, ''),
('disable-autoupdate', None, ''),
('disable-locales', None, ''),
]
sub_commands = install.sub_commands
def initialize_options(self):
install.initialize_options(self)
self.install_locales = None
self.localedir = None
self.disable_autoupdate = None
self.disable_locales = None
def finalize_options(self):
install.finalize_options(self)
if self.install_locales is None:
self.install_locales = '$base/share/locale'
self._expand_attrs(['install_locales'])
self.install_locales = os.path.normpath(self.install_locales)
self.localedir = self.install_locales
# can't use set_undefined_options :/
self.distribution.get_command_obj('build').localedir = self.localedir
self.distribution.get_command_obj('build').disable_autoupdate = self.disable_autoupdate
if self.root is not None:
self.change_roots('locales')
if self.disable_locales is None:
self.sub_commands.append(('install_locales', None))
def run(self):
install.run(self)
class picard_build(build):
user_options = build.user_options + [
('build-locales=', 'd', "build directory for locale files"),
('localedir=', None, ''),
('disable-autoupdate', None, ''),
('disable-locales', None, ''),
]
sub_commands = build.sub_commands
def initialize_options(self):
build.initialize_options(self)
self.build_locales = None
self.localedir = None
self.disable_autoupdate = None
self.disable_locales = None
def finalize_options(self):
build.finalize_options(self)
if self.build_locales is None:
self.build_locales = os.path.join(self.build_base, 'locale')
if self.localedir is None:
self.localedir = '/usr/share/locale'
if self.disable_autoupdate is None:
self.disable_autoupdate = False
if self.disable_locales is None:
self.sub_commands.append(('build_locales', None))
def run(self):
if 'bdist_nsis' not in sys.argv: # somebody shoot me please
log.info('generating scripts/picard from scripts/picard.in')
generate_file('scripts/picard.in', 'scripts/picard', {'localedir': self.localedir, 'autoupdate': not self.disable_autoupdate})
build.run(self)
def py_from_ui(uifile):
return "ui_%s.py" % os.path.splitext(os.path.basename(uifile))[0]
def py_from_ui_with_defaultdir(uifile):
return os.path.join("picard", "ui", py_from_ui(uifile))
def ui_files():
for uifile in glob.glob("ui/*.ui"):
yield (uifile, py_from_ui_with_defaultdir(uifile))
class picard_build_ui(Command):
description = "build Qt UI files and resources"
user_options = [
("files=", None, "comma-separated list of files to rebuild"),
]
def initialize_options(self):
self.files = []
def finalize_options(self):
if self.files:
files = []
for f in self.files.split(","):
head, tail = os.path.split(f)
m = re.match(r'(?:ui_)?([^.]+)', tail)
if m:
name = m.group(1)
else:
log.warn('ignoring %r (cannot extract base name)' % f)
continue
uiname = name + '.ui'
uifile = os.path.join(head, uiname)
if os.path.isfile(uifile):
pyfile = os.path.join(os.path.dirname(uifile),
py_from_ui(uifile))
files.append((uifile, pyfile))
else:
uifile = os.path.join('ui', uiname)
if os.path.isfile(uifile):
files.append((uifile,
py_from_ui_with_defaultdir(uifile)))
else:
log.warn('ignoring %r' % f)
self.files = files
def run(self):
from PyQt4 import uic
_translate_re = (
re.compile(
r'QtGui\.QApplication.translate\(.*?, (.*?), None, '
r'QtGui\.QApplication\.UnicodeUTF8\)'),
re.compile(
r'\b_translate\(.*?, (.*?), None\)')
)
def compile_ui(uifile, pyfile):
log.info("compiling %s -> %s", uifile, pyfile)
tmp = compat.StringIO()
uic.compileUi(uifile, tmp)
source = tmp.getvalue()
rc = re.compile(r'\n\n#.*?(?=\n\n)', re.MULTILINE|re.DOTALL)
comment = (u"\n\n# Automatically generated - don't edit.\n"
u"# Use `python setup.py %s` to update it."
% _get_option_name(self))
for r in list(_translate_re):
source = r.sub(r'_(\1)', source)
source = rc.sub(comment, source)
f = open(pyfile, "w")
f.write(source)
f.close()
if self.files:
for uifile, pyfile in self.files:
compile_ui(uifile, pyfile)
else:
for uifile, pyfile in ui_files():
if newer(uifile, pyfile):
compile_ui(uifile, pyfile)
from resources import compile, makeqrc
makeqrc.main()
compile.main()
class picard_clean_ui(Command):
description = "clean up compiled Qt UI files and resources"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
from PyQt4 import uic
for uifile, pyfile in ui_files():
try:
os.unlink(pyfile)
log.info("removing %s", pyfile)
except OSError:
log.warn("'%s' does not exist -- can't clean it", pyfile)
pyfile = os.path.join("picard", "resources.py")
try:
os.unlink(pyfile)
log.info("removing %s", pyfile)
except OSError:
log.warn("'%s' does not exist -- can't clean it", pyfile)
class picard_get_po_files(Command):
description = "Retrieve po files from transifex"
minimum_perc_default = 5
user_options = [
('minimum-perc=', 'm',
"Specify the minimum acceptable percentage of a translation (default: %d)" % minimum_perc_default)
]
def initialize_options(self):
self.minimum_perc = self.minimum_perc_default
def finalize_options(self):
self.minimum_perc = int(self.minimum_perc)
def run(self):
if tx_executable is None:
sys.exit('Transifex client executable (tx) not found.')
txpull_cmd = [
tx_executable,
'pull',
'--force',
'--all',
'--minimum-perc=%d' % self.minimum_perc
]
self.spawn(txpull_cmd)
_regen_pot_description = "Regenerate po/picard.pot, parsing source tree for new or updated strings"
try:
from babel import __version__ as babel_version
from babel.messages import frontend as babel
def versiontuple(v):
return tuple(map(int, (v.split("."))))
# input_dirs are incorrectly handled in babel versions < 1.0
# http://babel.edgewall.org/ticket/232
input_dirs_workaround = versiontuple(babel_version) < (1, 0, 0)
class picard_regen_pot_file(babel.extract_messages):
description = _regen_pot_description
def initialize_options(self):
# cannot use super() with old-style parent class
babel.extract_messages.initialize_options(self)
self.output_file = 'po/picard.pot'
self.input_dirs = 'contrib, picard'
if self.input_dirs and input_dirs_workaround:
self._input_dirs = self.input_dirs
def finalize_options(self):
babel.extract_messages.finalize_options(self)
if input_dirs_workaround and self._input_dirs:
self.input_dirs = re.split(',\s*', self._input_dirs)
except ImportError:
class picard_regen_pot_file(Command):
description = _regen_pot_description
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
sys.exit("Babel is required to use this command (see po/README.md)")
def _get_option_name(obj):
"""Returns the name of the option for specified Command object"""
for name, klass in obj.distribution.cmdclass.iteritems():
if obj.__class__ == klass:
return name
raise Exception("No such command class")
class picard_update_constants(Command):
description = "Regenerate attributes.py and countries.py"
user_options = [
('skip-pull', None, "skip the tx pull steps"),
]
boolean_options = ['skip-pull']
def initialize_options(self):
self.skip_pull = None
def finalize_options(self):
self.locales = self.distribution.locales
def run(self):
if tx_executable is None:
sys.exit('Transifex client executable (tx) not found.')
from babel.messages import pofile
if not self.skip_pull:
txpull_cmd = [
tx_executable,
'pull',
'--force',
'--resource=musicbrainz.attributes,musicbrainz.countries',
'--source',
'--language=none',
]
self.spawn(txpull_cmd)
countries = dict()
countries_potfile = os.path.join('po', 'countries', 'countries.pot')
isocode_comment = u'iso.code:'
with open(countries_potfile, 'rb') as f:
log.info('Parsing %s' % countries_potfile)
po = pofile.read_po(f)
for message in po:
if not message.id or not isinstance(message.id, unicode):
continue
for comment in message.auto_comments:
if comment.startswith(isocode_comment):
code = comment.replace(isocode_comment, u'')
countries[code] = message.id
if countries:
self.countries_py_file(countries)
else:
sys.exit('Failed to extract any country code/name !')
attributes = dict()
attributes_potfile = os.path.join('po', 'attributes', 'attributes.pot')
extract_attributes = (
u'DB:cover_art_archive.art_type/name',
u'DB:medium_format/name',
u'DB:release_group_primary_type/name',
u'DB:release_group_secondary_type/name',
)
with open(attributes_potfile, 'rb') as f:
log.info('Parsing %s' % attributes_potfile)
po = pofile.read_po(f)
for message in po:<|fim▁hole|> for loc, pos in message.locations:
if loc in extract_attributes:
attributes[u"%s:%03d" % (loc, pos)] = message.id
if attributes:
self.attributes_py_file(attributes)
else:
sys.exit('Failed to extract any attribute !')
def countries_py_file(self, countries):
header = (u"# -*- coding: utf-8 -*-\n"
u"# Automatically generated - don't edit.\n"
u"# Use `python setup.py {option}` to update it.\n"
u"\n"
u"RELEASE_COUNTRIES = {{\n")
line = u" u'{code}': u'{name}',\n"
footer = u"}}\n"
filename = os.path.join('picard', 'const', 'countries.py')
with open(filename, 'w') as countries_py:
def write_utf8(s, **kwargs):
countries_py.write(s.format(**kwargs).encode('utf-8'))
write_utf8(header, option=_get_option_name(self))
for code, name in sorted(countries.items(), key=lambda t: t[0]):
write_utf8(line, code=code, name=name.replace("'", "\\'"))
write_utf8(footer)
log.info("%s was rewritten (%d countries)" % (filename,
len(countries)))
def attributes_py_file(self, attributes):
header = (u"# -*- coding: utf-8 -*-\n"
u"# Automatically generated - don't edit.\n"
u"# Use `python setup.py {option}` to update it.\n"
u"\n"
u"MB_ATTRIBUTES = {{\n")
line = u" u'{key}': u'{value}',\n"
footer = u"}}\n"
filename = os.path.join('picard', 'const', 'attributes.py')
with open(filename, 'w') as attributes_py:
def write_utf8(s, **kwargs):
attributes_py.write(s.format(**kwargs).encode('utf-8'))
write_utf8(header, option=_get_option_name(self))
for key, value in sorted(attributes.items(), key=lambda i: i[0]):
write_utf8(line, key=key, value=value.replace("'", "\\'"))
write_utf8(footer)
log.info("%s was rewritten (%d attributes)" % (filename,
len(attributes)))
class picard_patch_version(Command):
description = "Update PICARD_BUILD_VERSION_STR for daily builds"
user_options = [
('platform=', 'p', "platform for the build version, ie. osx or win"),
]
def initialize_options(self):
self.platform = 'unknown'
def finalize_options(self):
pass
def run(self):
self.patch_version('picard/__init__.py')
def patch_version(self, filename):
regex = re.compile(r'^PICARD_BUILD_VERSION_STR\s*=.*$', re.MULTILINE)
with open(filename, 'r+b') as f:
source = f.read()
build = self.platform + '_' + datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S')
patched_source = regex.sub('PICARD_BUILD_VERSION_STR = "%s"' % build, source)
f.seek(0)
f.write(patched_source)
f.truncate()
def cflags_to_include_dirs(cflags):
cflags = cflags.split()
include_dirs = []
for cflag in cflags:
if cflag.startswith('-I'):
include_dirs.append(cflag[2:])
return include_dirs
def _picard_get_locale_files():
locales = []
path_domain = {
'po': 'picard',
os.path.join('po', 'countries'): 'picard-countries',
os.path.join('po', 'attributes'): 'picard-attributes',
}
for path, domain in path_domain.items():
for filepath in glob.glob(os.path.join(path, '*.po')):
filename = os.path.basename(filepath)
locale = os.path.splitext(filename)[0]
locales.append((domain, locale, filepath))
return locales
def _explode_path(path):
"""Return a list of components of the path (ie. "/a/b" -> ["a", "b"])"""
components = []
while True:
(path,tail) = os.path.split(path)
if tail == "":
components.reverse()
return components
components.append(tail)
def _picard_packages():
"Build a tuple containing each module under picard/"
packages = []
for subdir, dirs, files in os.walk("picard"):
packages.append(".".join(_explode_path(subdir)))
return tuple(sorted(packages))
args2 = {
'name': 'picard',
'version': __version__,
'description': 'The next generation MusicBrainz tagger',
'url': 'http://musicbrainz.org/doc/MusicBrainz_Picard',
'package_dir': {'picard': 'picard'},
'packages': _picard_packages(),
'locales': _picard_get_locale_files(),
'ext_modules': ext_modules,
'data_files': [],
'cmdclass': {
'test': picard_test,
'build': picard_build,
'build_locales': picard_build_locales,
'build_ui': picard_build_ui,
'clean_ui': picard_clean_ui,
'install': picard_install,
'install_locales': picard_install_locales,
'update_constants': picard_update_constants,
'get_po_files': picard_get_po_files,
'regen_pot_file': picard_regen_pot_file,
'patch_version': picard_patch_version,
},
'scripts': ['scripts/picard'],
}
args.update(args2)
def generate_file(infilename, outfilename, variables):
with open(infilename, "rt") as f_in:
with open(outfilename, "wt") as f_out:
f_out.write(f_in.read() % variables)
def contrib_plugin_files():
plugin_files = {}
dist_root = os.path.join("contrib", "plugins")
for root, dirs, files in os.walk(dist_root):
file_root = os.path.join('plugins', os.path.relpath(root, dist_root)) \
if root != dist_root else 'plugins'
for file in files:
if file.endswith(".py"):
if file_root in plugin_files:
plugin_files[file_root].append(os.path.join(root, file))
else:
plugin_files[file_root] = [os.path.join(root, file)]
data_files = [(x, sorted(y)) for x, y in plugin_files.iteritems()]
return sorted(data_files, key=lambda x: x[0])
try:
from py2exe.build_exe import py2exe
class bdist_nsis(py2exe):
def run(self):
generate_file('scripts/picard.py2exe.in', 'scripts/picard', {})
self.distribution.data_files.append(
("", ["discid.dll", "fpcalc.exe", "msvcr90.dll", "msvcp90.dll"]))
for locale in self.distribution.locales:
self.distribution.data_files.append(
("locale/" + locale[1] + "/LC_MESSAGES",
["build/locale/" + locale[1] + "/LC_MESSAGES/" + locale[0] + ".mo"]))
self.distribution.data_files.append(
("imageformats", [find_file_in_path("PyQt4/plugins/imageformats/qgif4.dll"),
find_file_in_path("PyQt4/plugins/imageformats/qjpeg4.dll"),
find_file_in_path("PyQt4/plugins/imageformats/qtiff4.dll")]))
self.distribution.data_files.append(
("accessible", [find_file_in_path("PyQt4/plugins/accessible/qtaccessiblewidgets4.dll")]))
self.distribution.data_files += contrib_plugin_files()
py2exe.run(self)
print("*** creating the NSIS setup script ***")
pathname = "installer\picard-setup.nsi"
generate_file(pathname + ".in", pathname,
{'name': 'MusicBrainz Picard',
'version': __version__,
'description': 'The next generation MusicBrainz tagger.',
'url': 'http://musicbrainz.org/doc/MusicBrainz_Picard', })
print("*** compiling the NSIS setup script ***")
subprocess.call([self.find_nsis(), pathname])
def find_nsis(self):
import _winreg
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, "Software\\NSIS") as reg_key:
nsis_path = _winreg.QueryValueEx(reg_key, "")[0]
return os.path.join(nsis_path, "makensis.exe")
args['cmdclass']['bdist_nsis'] = bdist_nsis
args['windows'] = [{
'script': 'scripts/picard',
'icon_resources': [(1, 'picard.ico')],
}]
args['options'] = {
'bdist_nsis': {
'includes': ['json', 'sip'] + [e.name for e in ext_modules],
'excludes': exclude_modules + py2exe_exclude_modules,
'optimize': 2,
},
}
except ImportError:
py2exe = None
def find_file_in_path(filename):
for include_path in sys.path:
file_path = os.path.join(include_path, filename)
if os.path.exists(file_path):
return file_path
if do_py2app:
from py2app.util import copy_file, find_app
from PyQt4 import QtCore
class BuildAPP(py2app):
def run(self):
py2app.run(self)
# XXX Find and bundle fpcalc, since py2app can't.
fpcalc = find_app("fpcalc")
if fpcalc:
dest_fpcalc = os.path.abspath("dist/MusicBrainz Picard.app/Contents/MacOS/fpcalc")
copy_file(fpcalc, dest_fpcalc)
os.chmod(dest_fpcalc, 0o755)
args['scripts'] = ['tagger.py']
args['cmdclass']['py2app'] = BuildAPP
# FIXME: this should check for the actual command ('install' vs. 'bdist_nsis', 'py2app', ...), not installed libraries
if py2exe is None and do_py2app is False:
args['data_files'].append(('share/icons/hicolor/16x16/apps', ['resources/images/16x16/picard.png']))
args['data_files'].append(('share/icons/hicolor/24x24/apps', ['resources/images/24x24/picard.png']))
args['data_files'].append(('share/icons/hicolor/32x32/apps', ['resources/images/32x32/picard.png']))
args['data_files'].append(('share/icons/hicolor/48x48/apps', ['resources/images/48x48/picard.png']))
args['data_files'].append(('share/icons/hicolor/128x128/apps', ['resources/images/128x128/picard.png']))
args['data_files'].append(('share/icons/hicolor/256x256/apps', ['resources/images/256x256/picard.png']))
args['data_files'].append(('share/applications', ('picard.desktop',)))
setup(**args)<|fim▁end|> | if not message.id or not isinstance(message.id, unicode):
continue |
<|file_name|>interfaces.go<|end_file_name|><|fim▁begin|>package batchaiapi
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/azure-sdk-for-go/services/batchai/mgmt/2018-05-01/batchai"
)
// OperationsClientAPI contains the set of methods on the OperationsClient type.
type OperationsClientAPI interface {
List(ctx context.Context) (result batchai.OperationListResultPage, err error)
}
var _ OperationsClientAPI = (*batchai.OperationsClient)(nil)
// UsagesClientAPI contains the set of methods on the UsagesClient type.
type UsagesClientAPI interface {
List(ctx context.Context, location string) (result batchai.ListUsagesResultPage, err error)
}
var _ UsagesClientAPI = (*batchai.UsagesClient)(nil)
// WorkspacesClientAPI contains the set of methods on the WorkspacesClient type.
type WorkspacesClientAPI interface {
Create(ctx context.Context, resourceGroupName string, workspaceName string, parameters batchai.WorkspaceCreateParameters) (result batchai.WorkspacesCreateFuture, err error)
Delete(ctx context.Context, resourceGroupName string, workspaceName string) (result batchai.WorkspacesDeleteFuture, err error)
Get(ctx context.Context, resourceGroupName string, workspaceName string) (result batchai.Workspace, err error)
List(ctx context.Context, maxResults *int32) (result batchai.WorkspaceListResultPage, err error)
ListByResourceGroup(ctx context.Context, resourceGroupName string, maxResults *int32) (result batchai.WorkspaceListResultPage, err error)
Update(ctx context.Context, resourceGroupName string, workspaceName string, parameters batchai.WorkspaceUpdateParameters) (result batchai.Workspace, err error)
}
var _ WorkspacesClientAPI = (*batchai.WorkspacesClient)(nil)
// ExperimentsClientAPI contains the set of methods on the ExperimentsClient type.
type ExperimentsClientAPI interface {
Create(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string) (result batchai.ExperimentsCreateFuture, err error)
Delete(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string) (result batchai.ExperimentsDeleteFuture, err error)
Get(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string) (result batchai.Experiment, err error)
ListByWorkspace(ctx context.Context, resourceGroupName string, workspaceName string, maxResults *int32) (result batchai.ExperimentListResultPage, err error)
}<|fim▁hole|>
var _ ExperimentsClientAPI = (*batchai.ExperimentsClient)(nil)
// JobsClientAPI contains the set of methods on the JobsClient type.
type JobsClientAPI interface {
Create(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string, jobName string, parameters batchai.JobCreateParameters) (result batchai.JobsCreateFuture, err error)
Delete(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string, jobName string) (result batchai.JobsDeleteFuture, err error)
Get(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string, jobName string) (result batchai.Job, err error)
ListByExperiment(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string, maxResults *int32) (result batchai.JobListResultPage, err error)
ListOutputFiles(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string, jobName string, outputdirectoryid string, directory string, linkexpiryinminutes *int32, maxResults *int32) (result batchai.FileListResultPage, err error)
ListRemoteLoginInformation(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string, jobName string) (result batchai.RemoteLoginInformationListResultPage, err error)
Terminate(ctx context.Context, resourceGroupName string, workspaceName string, experimentName string, jobName string) (result batchai.JobsTerminateFuture, err error)
}
var _ JobsClientAPI = (*batchai.JobsClient)(nil)
// FileServersClientAPI contains the set of methods on the FileServersClient type.
type FileServersClientAPI interface {
Create(ctx context.Context, resourceGroupName string, workspaceName string, fileServerName string, parameters batchai.FileServerCreateParameters) (result batchai.FileServersCreateFuture, err error)
Delete(ctx context.Context, resourceGroupName string, workspaceName string, fileServerName string) (result batchai.FileServersDeleteFuture, err error)
Get(ctx context.Context, resourceGroupName string, workspaceName string, fileServerName string) (result batchai.FileServer, err error)
ListByWorkspace(ctx context.Context, resourceGroupName string, workspaceName string, maxResults *int32) (result batchai.FileServerListResultPage, err error)
}
var _ FileServersClientAPI = (*batchai.FileServersClient)(nil)
// ClustersClientAPI contains the set of methods on the ClustersClient type.
type ClustersClientAPI interface {
Create(ctx context.Context, resourceGroupName string, workspaceName string, clusterName string, parameters batchai.ClusterCreateParameters) (result batchai.ClustersCreateFuture, err error)
Delete(ctx context.Context, resourceGroupName string, workspaceName string, clusterName string) (result batchai.ClustersDeleteFuture, err error)
Get(ctx context.Context, resourceGroupName string, workspaceName string, clusterName string) (result batchai.Cluster, err error)
ListByWorkspace(ctx context.Context, resourceGroupName string, workspaceName string, maxResults *int32) (result batchai.ClusterListResultPage, err error)
ListRemoteLoginInformation(ctx context.Context, resourceGroupName string, workspaceName string, clusterName string) (result batchai.RemoteLoginInformationListResultPage, err error)
Update(ctx context.Context, resourceGroupName string, workspaceName string, clusterName string, parameters batchai.ClusterUpdateParameters) (result batchai.Cluster, err error)
}
var _ ClustersClientAPI = (*batchai.ClustersClient)(nil)<|fim▁end|> | |
<|file_name|>let_statement.rs<|end_file_name|><|fim▁begin|>// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::ast::LetStatement;
use crate::lexer::TokenType;
use crate::parser::ParseError;
use crate::parser::Parser;
fn is_assign_operator(token_type: TokenType) -> bool {
match token_type {
TokenType::Assign => true,
TokenType::PlusAssign => true,
TokenType::MinusAssign => true,
TokenType::MultiplyAssign => true,
TokenType::DivideAssign => true,
TokenType::ModuloAssign => true,
TokenType::DotAssign => true,
_ => false,
}
}
// Let = 'let' VarName = Expression (NewLine | EOF)
pub fn parse(parser: &mut Parser) -> Option<LetStatement> {
// TODO: This is not really correct, as only some expressions like ident, array subscript and
// array are supported here.
let var = parser.parse_expression()?;
let operator = parser.peek_token();
if !is_assign_operator(operator.token_type) {
parser.errors.push(ParseError {
message: format!(
"expected assign operator, found {}",
parser.token_text(&operator)
),
position: parser.l.token_position(&operator.location),
});
parser.consume_until_end_of_statement();
// TODO: error
return None;
}
parser.advance();
let expr = parser.parse_expression()?;
parser.expect_end_of_statement()?;
return Some(LetStatement {
var: Box::new(var),
operator: operator.token_type,
value: Box::new(expr),
});
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::StmtKind;
use crate::lexer::Lexer;
use crate::lexer::SourcePosition;
use crate::lexer::TokenPosition;
use crate::parser::ParseError;
use pretty_assertions::assert_eq;
use serde_json::json;
#[test]
fn parses_simple_let_statement() {
let mut parser = Parser::new(Lexer::new("let l:var = 15"));
let program = parser.parse();
assert_eq!(parser.errors, &[]);
assert_eq!(program.statements.len(), 1);
assert_eq!(
program.dump_for_testing(),
json!([{
"let": {
"var": {"identifier": "l:var"},
"operator": "`=`",
"value": {
"number": 15.0,
},
},
}])
);
}
#[test]
fn parses_let_statement_with_different_operators() {
let mut parser = Parser::new(Lexer::new("let l:var += 15"));
let program = parser.parse();
assert_eq!(parser.errors, &[]);
assert_eq!(program.statements.len(), 1);
assert_eq!(
program.dump_for_testing(),
json!([{
"let": {
"var": {"identifier": "l:var"},
"operator": "`+=`",
"value": {
"number": 15.0,
},
},
}])
);
}
#[test]<|fim▁hole|> let program = parser.parse();
assert_eq!(parser.errors, &[]);
assert_eq!(program.statements.len(), 1);
let let_stmt = match &program.statements[0].kind {
StmtKind::Let(stmt) => stmt,
stmt => panic!(format!("expected let statement, got {:?}", stmt)),
};
// assert_eq!(let_stmt.name(), "l:var");
assert_eq!(let_stmt.value.to_string(), "15");
// assert_eq!(
// parser
// .resolve_location(let_stmt.name_location().clone())
// .to_string(),
// "0:4-0:9"
// );
}
#[test]
fn returns_error_when_let_statement_is_missing_assign() {
let mut parser = Parser::new(Lexer::new("let l:var ! 15"));
parser.parse();
assert_eq!(
parser.errors,
&[ParseError {
message: "expected assign operator, found `!`".to_string(),
position: TokenPosition {
start: SourcePosition {
line: 0,
character: 10,
},
end: SourcePosition {
line: 0,
character: 11,
},
}
}]
);
}
#[test]
fn returns_error_when_let_statement_ends_after_identifier() {
let mut parser = Parser::new(Lexer::new("let l:var\nlet l:var = 15"));
let program = parser.parse();
assert_eq!(
parser.errors,
&[ParseError {
message: "expected assign operator, found new line".to_string(),
position: TokenPosition {
start: SourcePosition {
line: 0,
character: 9,
},
end: SourcePosition {
line: 1,
character: 0,
},
}
}]
);
assert_eq!(program.statements.len(), 1);
let let_stmt = match &program.statements[0].kind {
StmtKind::Let(stmt) => stmt,
stmt => panic!(format!("expected let statement, got {:?}", stmt)),
};
// assert_eq!(let_stmt.name(), "l:var");
assert_eq!(let_stmt.value.to_string(), "15");
}
#[test]
fn returns_error_when_let_statement_is_missing_identifier() {
let mut parser = Parser::new(Lexer::new("let\n"));
parser.parse();
let error_messages: Vec<String> =
parser.errors.into_iter().map(|err| err.message).collect();
// TODO: should be `expected variable`
assert_eq!(error_messages, &["expected expression, found new line"],);
}
#[test]
fn returns_error_when_let_statement_has_more_tokens_after_expression() {
let mut parser = Parser::new(Lexer::new("let a = 'b' a\n"));
parser.parse();
let error_messages: Vec<String> =
parser.errors.into_iter().map(|err| err.message).collect();
assert_eq!(error_messages, &["expected new line, found `a`"],);
}
#[test]
fn returns_error_when_expression_is_invalid() {
let mut parser = Parser::new(Lexer::new("let a = 'b' .\n"));
parser.parse();
let error_messages: Vec<String> =
parser.errors.into_iter().map(|err| err.message).collect();
assert_eq!(error_messages, &["expected expression, found new line"],);
}
#[test]
fn returns_error_when_let_statement_expression_() {
let mut parser = Parser::new(Lexer::new("let a = 'b' a\n"));
parser.parse();
let error_messages: Vec<String> =
parser.errors.into_iter().map(|err| err.message).collect();
assert_eq!(error_messages, &["expected new line, found `a`"],);
}
}<|fim▁end|> | fn parses_let_statement_with_number_expression() {
let mut parser = Parser::new(Lexer::new("let l:var = 15")); |
<|file_name|>request.rs<|end_file_name|><|fim▁begin|>//! AWS API requests.
//!
//! Wraps the Hyper library to send PUT, POST, DELETE and GET requests.
extern crate lazy_static;
use std::env;
use std::io::Read;
use std::io::Error as IoError;
use std::error::Error;
use std::fmt;
use std::collections::HashMap;
use hyper::Client;
use hyper::Error as HyperError;
use hyper::header::Headers;
use hyper::header::UserAgent;
use hyper::method::Method;
use log::LogLevel::Debug;
use signature::SignedRequest;
// Pulls in the statically generated rustc version.
include!(concat!(env!("OUT_DIR"), "/user_agent_vars.rs"));
// Use a lazy static to cache the default User-Agent header
// because it never changes once it's been computed.
lazy_static! {
static ref DEFAULT_USER_AGENT: Vec<Vec<u8>> = vec![format!("rusoto/{} rust/{} {}",
env!("CARGO_PKG_VERSION"), RUST_VERSION, env::consts::OS).as_bytes().to_vec()];
}
#[derive(Clone, Default)]
pub struct HttpResponse {
pub status: u16,
pub body: String,
pub headers: HashMap<String, String>
}
#[derive(Debug, PartialEq)]
pub struct HttpDispatchError {
message: String
}
impl Error for HttpDispatchError {
fn description(&self) -> &str {
&self.message
}
}
impl fmt::Display for HttpDispatchError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.message)
}
}
impl From<HyperError> for HttpDispatchError {
fn from(err: HyperError) -> HttpDispatchError {
HttpDispatchError { message: err.description().to_string() }
}
}
impl From<IoError> for HttpDispatchError {
fn from(err: IoError) -> HttpDispatchError {
HttpDispatchError { message: err.description().to_string() }
}
}
pub trait DispatchSignedRequest {
fn dispatch(&self, request: &SignedRequest) -> Result<HttpResponse, HttpDispatchError>;
}
impl DispatchSignedRequest for Client {
fn dispatch(&self, request: &SignedRequest) -> Result<HttpResponse, HttpDispatchError> {
let hyper_method = match request.method().as_ref() {
"POST" => Method::Post,
"PUT" => Method::Put,
"DELETE" => Method::Delete,
"GET" => Method::Get,
"HEAD" => Method::Head,
v => return Err(HttpDispatchError { message: format!("Unsupported HTTP verb {}", v) })
};
// translate the headers map to a format Hyper likes
let mut hyper_headers = Headers::new();
for h in request.headers().iter() {
hyper_headers.set_raw(h.0.to_owned(), h.1.to_owned());
}
// Add a default user-agent header if one is not already present.
if !hyper_headers.has::<UserAgent>() {
hyper_headers.set_raw("user-agent".to_owned(), DEFAULT_USER_AGENT.clone());
}
let mut final_uri = format!("https://{}{}", request.hostname(), request.canonical_path());
if !request.canonical_query_string().is_empty() {
final_uri = final_uri + &format!("?{}", request.canonical_query_string());<|fim▁hole|> let mut payload_string = String::new();
payload_bytes.read_to_string(&mut payload_string)
.map(|_| payload_string)
.unwrap_or_else(|_| String::from("<non-UTF-8 data>"))
});
debug!("Full request: \n method: {}\n final_uri: {}\n payload: {}\nHeaders:\n", hyper_method, final_uri, payload.unwrap_or("".to_owned()));
for h in hyper_headers.iter() {
debug!("{}:{}", h.name(), h.value_string());
}
}
let mut hyper_response = match request.payload() {
None => try!(self.request(hyper_method, &final_uri).headers(hyper_headers).body("").send()),
Some(payload_contents) => try!(self.request(hyper_method, &final_uri).headers(hyper_headers).body(payload_contents).send()),
};
let mut body = String::new();
try!(hyper_response.read_to_string(&mut body));
if log_enabled!(Debug) {
debug!("Response body:\n{}", body);
}
let mut headers: HashMap<String, String> = HashMap::new();
for header in hyper_response.headers.iter() {
headers.insert(header.name().to_string(), header.value_string());
}
Ok(HttpResponse {
status: hyper_response.status.to_u16(),
body: body,
headers: headers
})
}
}<|fim▁end|> | }
if log_enabled!(Debug) {
let payload = request.payload().map(|mut payload_bytes| { |
<|file_name|>issue-27949.rs<|end_file_name|><|fim▁begin|>// run-pass
//
// At one time, the `==` operator (and other binary operators) did not
// support subtyping during type checking, and would therefore require
// LHS and RHS to be exactly identical--i.e. to have the same lifetimes.
//
// This was fixed in 1a7fb7dc78439a704f024609ce3dc0beb1386552.
#[derive(Copy, Clone)]
struct Input<'a> {
foo: &'a u32
}
impl <'a> std::cmp::PartialEq<Input<'a>> for Input<'a> {
fn eq(&self, other: &Input<'a>) -> bool {
self.foo == other.foo
}<|fim▁hole|> fn ne(&self, other: &Input<'a>) -> bool {
self.foo != other.foo
}
}
fn check_equal<'a, 'b>(x: Input<'a>, y: Input<'b>) -> bool {
// Type checking error due to 'a != 'b prior to 1a7fb7dc78
x == y
}
fn main() {
let i = 1u32;
let j = 1u32;
let k = 2u32;
let input_i = Input { foo: &i };
let input_j = Input { foo: &j };
let input_k = Input { foo: &k };
assert!(check_equal(input_i, input_i));
assert!(check_equal(input_i, input_j));
assert!(!check_equal(input_i, input_k));
}<|fim▁end|> | |
<|file_name|>dropletConfig.js<|end_file_name|><|fim▁begin|>var msg = require('./locale');
var api = require('./apiJavascript.js');
var paramLists = require('./paramLists.js');
module.exports.blocks = [
{func: 'setDroid', parent: api, category: '', params: ['"R2-D2"'], dropdown: { 0: ['"random"', '"R2-D2"', '"C-3PO"'] } },
{func: 'setDroidSpeed', parent: api, category: '', params: ['"fast"'], dropdown: { 0: ['"random"', '"slow"', '"normal"', '"fast"'] } },
{func: 'setBackground', parent: api, category: '', params: ['"Hoth"'], dropdown: { 0: ['"random"', '"Endor"', '"Hoth"', '"Starship"'] } },
{func: 'setMap', parent: api, category: '', params: ['"blank"'], dropdown: { 0: ['"random"', '"blank"', '"circle"', '"horizontal"', '"grid"', '"blobs"'] } },
{func: 'moveRight', parent: api, category: '', },
{func: 'moveLeft', parent: api, category: '', },
{func: 'moveUp', parent: api, category: '', },
{func: 'moveDown', parent: api, category: '', },
{func: 'goRight', parent: api, category: '', },
{func: 'goLeft', parent: api, category: '', },
{func: 'goUp', parent: api, category: '', },
{func: 'goDown', parent: api, category: '', },
{func: 'playSound', parent: api, category: '', params: ['"R2-D2sound1"'], dropdown: { 0: paramLists.playSoundDropdown } },
{func: 'endGame', parent: api, category: '', params: ['"win"'], dropdown: { 0: ['"win"', '"lose"'] } },
{func: 'addPoints', parent: api, category: '', params: ["100"] },
{func: 'removePoints', parent: api, category: '', params: ["100"] },
{func: 'addCharacter', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'moveFast', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'moveNormal', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'moveSlow', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] } },
{func: 'whenLeft', block: 'function whenLeft() {}', expansion: 'function whenLeft() {\n __;\n}', category: '' },
{func: 'whenRight', block: 'function whenRight() {}', expansion: 'function whenRight() {\n __;\n}', category: '' },
{func: 'whenUp', block: 'function whenUp() {}', expansion: 'function whenUp() {\n __;\n}', category: '' },
{func: 'whenDown', block: 'function whenDown() {}', expansion: 'function whenDown() {\n __;\n}', category: '' },
{func: 'whenTouchObstacle', block: 'function whenTouchObstacle() {}', expansion: 'function whenTouchObstacle() {\n __;\n}', category: '' },
{func: 'whenGetCharacter', block: 'function whenGetCharacter() {}', expansion: 'function whenGetCharacter() {\n __;\n}', category: '' },
{func: 'whenGetStormtrooper', block: 'function whenGetStormtrooper() {}', expansion: 'function whenGetStormtrooper() {\n __;\n}', category: '' },
{func: 'whenGetRebelPilot', block: 'function whenGetRebelPilot() {}', expansion: 'function whenGetRebelPilot() {\n __;\n}', category: '' },
{func: 'whenGetPufferPig', block: 'function whenGetPufferPig() {}', expansion: 'function whenGetPufferPig() {\n __;\n}', category: '' },
{func: 'whenGetMynock', block: 'function whenGetMynock() {}', expansion: 'function whenGetMynock() {\n __;\n}', category: '' },
{func: 'whenGetMouseDroid', block: 'function whenGetMouseDroid() {}', expansion: 'function whenGetMouseDroid() {\n __;\n}', category: '' },
{func: 'whenGetTauntaun', block: 'function whenGetTauntaun() {}', expansion: 'function whenGetTauntaun() {\n __;\n}', category: '' },
{func: 'whenGetProbot', block: 'function whenGetProbot() {}', expansion: 'function whenGetProbot() {\n __;\n}', category: '' },
{func: 'whenGetAllCharacters', block: 'function whenGetAllCharacters() {}', expansion: 'function whenGetAllCharacters() {\n __;\n}', category: '' },
{func: 'whenGetAllStormtroopers', block: 'function whenGetAllStormtroopers() {}', expansion: 'function whenGetAllStormtroopers() {\n __;\n}', category: '' },
{func: 'whenGetAllRebelPilots', block: 'function whenGetAllRebelPilots() {}', expansion: 'function whenGetAllRebelPilots() {\n __;\n}', category: '' },
{func: 'whenGetAllPufferPigs', block: 'function whenGetAllPufferPigs() {}', expansion: 'function whenGetAllPufferPigs() {\n __;\n}', category: '' },
{func: 'whenGetAllMynocks', block: 'function whenGetAllMynocks() {}', expansion: 'function whenGetAllMynocks() {\n __;\n}', category: '' },
{func: 'whenGetAllMouseDroids', block: 'function whenGetAllMouseDroids() {}', expansion: 'function whenGetAllMouseDroids() {\n __;\n}', category: '' },
{func: 'whenGetAllTauntauns', block: 'function whenGetAllTauntauns() {}', expansion: 'function whenGetAllTauntauns() {\n __;\n}', category: '' },
{func: 'whenGetAllProbots', block: 'function whenGetAllProbots() {}', expansion: 'function whenGetAllProbots() {\n __;\n}', category: '' },
// Functions hidden from autocomplete - not used in hoc2015:
{func: 'whenTouchStormtrooper', block: 'function whenTouchStormtrooper() {}', expansion: 'function whenTouchStormtrooper() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchRebelPilot', block: 'function whenTouchRebelPilot() {}', expansion: 'function whenTouchRebelPilot() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchPufferPig', block: 'function whenTouchPufferPig() {}', expansion: 'function whenTouchPufferPig() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchMynock', block: 'function whenTouchMynock() {}', expansion: 'function whenTouchMynock() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchMouseDroid', block: 'function whenTouchMouseDroid() {}', expansion: 'function whenTouchMouseDroid() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchTauntaun', block: 'function whenTouchTauntaun() {}', expansion: 'function whenTouchTauntaun() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchProbot', block: 'function whenTouchProbot() {}', expansion: 'function whenTouchProbot() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchCharacter', block: 'function whenTouchCharacter() {}', expansion: 'function whenTouchCharacter() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'changeScore', parent: api, category: '', params: ["1"], noAutocomplete: true },
{func: 'whenTouchGoal', block: 'function whenTouchGoal() {}', expansion: 'function whenTouchGoal() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenTouchAllGoals', block: 'function whenTouchAllGoals() {}', expansion: 'function whenTouchAllGoals() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'whenScore1000', block: 'function whenScore1000() {}', expansion: 'function whenScore1000() {\n __;\n}', category: '', noAutocomplete: true },
{func: 'setToChase', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setToFlee', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setToRoam', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setToStop', parent: api, category: '', params: ['"PufferPig"'], dropdown: { 0: ['"random"', '"Stormtrooper"', '"RebelPilot"', '"PufferPig"', '"Mynock"', '"MouseDroid"', '"Tauntaun"', '"Probot"'] }, noAutocomplete: true },
{func: 'setSprite', parent: api, category: '', params: ['0', '"R2-D2"'], dropdown: { 1: ['"random"', '"R2-D2"', '"C-3PO"'] }, noAutocomplete: true },<|fim▁hole|> {func: 'setSpriteSpeed', parent: api, category: '', params: ["0", "8"], noAutocomplete: true },
{func: 'setSpriteEmotion', parent: api, category: '', params: ["0", "1"], noAutocomplete: true },
{func: 'setSpriteSize', parent: api, category: '', params: ["0", "1.0"], noAutocomplete: true },
{func: 'throwProjectile', parent: api, category: '', params: ["0", "1", '"blue_fireball"'], noAutocomplete: true },
{func: 'vanish', parent: api, category: '', params: ["0"], noAutocomplete: true },
{func: 'move', parent: api, category: '', params: ["0", "1"], noAutocomplete: true },
{func: 'showDebugInfo', parent: api, category: '', params: ["false"], noAutocomplete: true },
{func: 'onEvent', parent: api, category: '', params: ["'when-left'", "function() {\n \n}"], noAutocomplete: true },
];
module.exports.categories = {
'': {
color: 'red',
blocks: []
},
'Play Lab': {
color: 'red',
blocks: []
},
Commands: {
color: 'red',
blocks: []
},
Events: {
color: 'green',
blocks: []
},
};
module.exports.autocompleteFunctionsWithParens = true;
module.exports.showParamDropdowns = true;<|fim▁end|> | {func: 'setSpritePosition', parent: api, category: '', params: ["0", "7"], noAutocomplete: true }, |
<|file_name|>decode_codepoint.d.ts<|end_file_name|><|fim▁begin|>export default function decodeCodePoint(codePoint: number): string;<|fim▁hole|><|fim▁end|> | //# sourceMappingURL=decode_codepoint.d.ts.map |
<|file_name|>queued.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use capnp::{any_pointer};
use capnp::Error;
use capnp::capability::Promise;
use capnp::private::capability::{ClientHook, ParamsHook, PipelineHook, PipelineOp,
ResultsHook};
use futures::Future;
use std::cell::RefCell;
use std::rc::{Rc, Weak};
use {broken, local};
use attach::Attach;
use forked_promise::ForkedPromise;
use sender_queue::SenderQueue;
pub struct PipelineInner {
// Once the promise resolves, this will become non-null and point to the underlying object.
redirect: Option<Box<PipelineHook>>,
promise_to_drive: ForkedPromise<Promise<(), Error>>,
clients_to_resolve: SenderQueue<(Weak<RefCell<ClientInner>>, Vec<PipelineOp>), ()>,
}
impl PipelineInner {
fn resolve(this: &Rc<RefCell<PipelineInner>>, result: Result<Box<PipelineHook>, Error>) {
assert!(this.borrow().redirect.is_none());
let pipeline = match result {
Ok(pipeline_hook) => pipeline_hook,
Err(e) => Box::new(broken::Pipeline::new(e)),
};
this.borrow_mut().redirect = Some(pipeline.add_ref());
for ((weak_client, ops), waiter) in this.borrow_mut().clients_to_resolve.drain() {
if let Some(client) = weak_client.upgrade() {
let clienthook = pipeline.get_pipelined_cap_move(ops);
ClientInner::resolve(&client, Ok(clienthook));
}
let _ = waiter.send(());
}
this.borrow_mut().promise_to_drive = ForkedPromise::new(Promise::ok(()));
}
}
pub struct PipelineInnerSender {
inner: Option<Weak<RefCell<PipelineInner>>>,
}
impl Drop for PipelineInnerSender {
fn drop(&mut self) {
if let Some(weak_queued) = self.inner.take() {
if let Some(pipeline_inner) = weak_queued.upgrade() {
PipelineInner::resolve(
&pipeline_inner,
Ok(Box::new(
::broken::Pipeline::new(Error::failed("PipelineInnerSender was canceled".into())))));
}
}
}
}
impl PipelineInnerSender {
pub fn complete(mut self, pipeline: Box<PipelineHook>) {
if let Some(weak_queued) = self.inner.take() {
if let Some(pipeline_inner) = weak_queued.upgrade() {
::queued::PipelineInner::resolve(&pipeline_inner, Ok(pipeline));
}
}
}
}
pub struct Pipeline {
inner: Rc<RefCell<PipelineInner>>,
}
impl Pipeline {
pub fn new() -> (PipelineInnerSender, Pipeline) {
let inner = Rc::new(RefCell::new(PipelineInner {
redirect: None,
promise_to_drive: ForkedPromise::new(Promise::ok(())),
clients_to_resolve: SenderQueue::new(),
}));
(PipelineInnerSender { inner: Some(Rc::downgrade(&inner)) }, Pipeline { inner: inner })
}
pub fn drive<F>(&mut self, promise: F)
where F: Future<Item=(), Error=Error> + 'static
{
let new = ForkedPromise::new(
Promise::from_future(self.inner.borrow_mut().promise_to_drive.clone().join(promise).map(|_|())));
self.inner.borrow_mut().promise_to_drive = new;
}
}
impl Clone for Pipeline {
fn clone(&self) -> Pipeline {
Pipeline { inner: self.inner.clone() }
}
}
impl PipelineHook for Pipeline {
fn add_ref(&self) -> Box<PipelineHook> {
Box::new(self.clone())
}
fn get_pipelined_cap(&self, ops: &[PipelineOp]) -> Box<ClientHook> {
self.get_pipelined_cap_move(ops.into())
}
fn get_pipelined_cap_move(&self, ops: Vec<PipelineOp>) -> Box<ClientHook> {
if let Some(ref p) = self.inner.borrow().redirect {
return p.get_pipelined_cap_move(ops)
}
let mut queued_client = Client::new(Some(self.inner.clone()));
queued_client.drive(self.inner.borrow().promise_to_drive.clone());
let weak_queued = Rc::downgrade(&queued_client.inner);
self.inner.borrow_mut().clients_to_resolve.push_detach((weak_queued, ops));
Box::new(queued_client)
}
}
pub struct ClientInner {
// Once the promise resolves, this will become non-null and point to the underlying object.
redirect: Option<Box<ClientHook>>,
// The queued::PipelineInner that this client is derived from, if any. We need to hold on
// to a reference to it so that it doesn't get canceled before the client is resolved.
pipeline_inner: Option<Rc<RefCell<PipelineInner>>>,
promise_to_drive: Option<ForkedPromise<Promise<(), Error>>>,
// When this promise resolves, each queued call will be forwarded to the real client. This needs
// to occur *before* any 'whenMoreResolved()' promises resolve, because we want to make sure
// previously-queued calls are delivered before any new calls made in response to the resolution.
call_forwarding_queue: SenderQueue<(u64, u16, Box<ParamsHook>, Box<ResultsHook>),
(Promise<(), Error>)>,
// whenMoreResolved() returns forks of this promise. These must resolve *after* queued calls
// have been initiated (so that any calls made in the whenMoreResolved() handler are correctly
// delivered after calls made earlier), but *before* any queued calls return (because it might
// confuse the application if a queued call returns before the capability on which it was made
// resolves). Luckily, we know that queued calls will involve, at the very least, an
// eventLoop.evalLater.
client_resolution_queue: SenderQueue<(), Box<ClientHook>>,
}
impl ClientInner {
pub fn resolve(state: &Rc<RefCell<ClientInner>>, result: Result<Box<ClientHook>, Error>) {
assert!(state.borrow().redirect.is_none());
let client = match result {
Ok(clienthook) => clienthook,
Err(e) => broken::new_cap(e),
};
state.borrow_mut().redirect = Some(client.add_ref());
for (args, waiter) in state.borrow_mut().call_forwarding_queue.drain() {
let (interface_id, method_id, params, results) = args;
let result_promise = client.call(interface_id, method_id, params, results);
let _ = waiter.send(result_promise);
}
for ((), waiter) in state.borrow_mut().client_resolution_queue.drain() {
let _ = waiter.send(client.add_ref());
}
state.borrow_mut().promise_to_drive.take();
state.borrow_mut().pipeline_inner.take();
}
}
pub struct Client {
pub inner: Rc<RefCell<ClientInner>>,
}
impl Client {
pub fn new(pipeline_inner: Option<Rc<RefCell<PipelineInner>>>) -> Client
{
let inner = Rc::new(RefCell::new(ClientInner {
promise_to_drive: None,
pipeline_inner: pipeline_inner,
redirect: None,
call_forwarding_queue: SenderQueue::new(),
client_resolution_queue: SenderQueue::new(),
}));
Client {
inner: inner
}
}
pub fn drive<F>(&mut self, promise: F)
where F: Future<Item=(), Error=Error> + 'static
{
assert!(self.inner.borrow().promise_to_drive.is_none());
self.inner.borrow_mut().promise_to_drive = Some(ForkedPromise::new(Promise::from_future(promise)));
}
}
impl ClientHook for Client {
fn add_ref(&self) -> Box<ClientHook> {
Box::new(Client {inner: self.inner.clone()})
}<|fim▁hole|> size_hint: Option<::capnp::MessageSize>)
-> ::capnp::capability::Request<any_pointer::Owned, any_pointer::Owned>
{
::capnp::capability::Request::new(
Box::new(local::Request::new(interface_id, method_id, size_hint, self.add_ref())))
}
fn call(&self, interface_id: u64, method_id: u16, params: Box<ParamsHook>, results: Box<ResultsHook>)
-> Promise<(), Error>
{
if let Some(ref client) = self.inner.borrow().redirect {
return client.call(interface_id, method_id, params, results)
}
let inner_clone = self.inner.clone();
let promise = self.inner.borrow_mut().call_forwarding_queue.push(
(interface_id, method_id, params, results)).attach(inner_clone).flatten();
match self.inner.borrow().promise_to_drive {
Some(ref p) => Promise::from_future(p.clone().join(promise).map(|v| v.1)),
None => Promise::from_future(promise),
}
}
fn get_ptr(&self) -> usize {
(&*self.inner.borrow()) as * const _ as usize
}
fn get_brand(&self) -> usize {
0
}
fn get_resolved(&self) -> Option<Box<ClientHook>> {
match self.inner.borrow().redirect {
Some(ref inner) => {
Some(inner.clone())
}
None => {
None
}
}
}
fn when_more_resolved(&self) -> Option<Promise<Box<ClientHook>, Error>> {
if let Some(ref client) = self.inner.borrow().redirect {
return Some(Promise::ok(client.add_ref()));
}
let promise = self.inner.borrow_mut().client_resolution_queue.push(());
match self.inner.borrow().promise_to_drive {
Some(ref p) => Some(Promise::from_future(p.clone().join(promise).map(|v| v.1))),
None => Some(Promise::from_future(promise)),
}
}
}<|fim▁end|> | fn new_call(&self, interface_id: u64, method_id: u16, |
<|file_name|>subst.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
pub use self::ParamSpace::*;
pub use self::RegionSubsts::*;
use middle::ty::{self, Ty};
use middle::ty_fold::{self, TypeFoldable, TypeFolder};
use util::ppaux::Repr;
use std::fmt;
use std::iter::IntoIterator;
use std::slice::Iter;
use std::vec::{Vec, IntoIter};
use syntax::codemap::{Span, DUMMY_SP};
///////////////////////////////////////////////////////////////////////////
/// A substitution mapping type/region parameters to new values. We
/// identify each in-scope parameter by an *index* and a *parameter
/// space* (which indices where the parameter is defined; see
/// `ParamSpace`).
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Substs<'tcx> {
pub types: VecPerParamSpace<Ty<'tcx>>,
pub regions: RegionSubsts,
}
/// Represents the values to use when substituting lifetime parameters.
/// If the value is `ErasedRegions`, then this subst is occurring during
/// trans, and all region parameters will be replaced with `ty::ReStatic`.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum RegionSubsts {
ErasedRegions,
NonerasedRegions(VecPerParamSpace<ty::Region>)
}
impl<'tcx> Substs<'tcx> {
pub fn new(t: VecPerParamSpace<Ty<'tcx>>,
r: VecPerParamSpace<ty::Region>)
-> Substs<'tcx>
{
Substs { types: t, regions: NonerasedRegions(r) }
}
pub fn new_type(t: Vec<Ty<'tcx>>,
r: Vec<ty::Region>)
-> Substs<'tcx>
{
Substs::new(VecPerParamSpace::new(t, Vec::new(), Vec::new()),
VecPerParamSpace::new(r, Vec::new(), Vec::new()))
}
pub fn new_trait(t: Vec<Ty<'tcx>>,
r: Vec<ty::Region>,
s: Ty<'tcx>)
-> Substs<'tcx>
{
Substs::new(VecPerParamSpace::new(t, vec!(s), Vec::new()),
VecPerParamSpace::new(r, Vec::new(), Vec::new()))
}
pub fn erased(t: VecPerParamSpace<Ty<'tcx>>) -> Substs<'tcx>
{
Substs { types: t, regions: ErasedRegions }
}
pub fn empty() -> Substs<'tcx> {
Substs {
types: VecPerParamSpace::empty(),
regions: NonerasedRegions(VecPerParamSpace::empty()),
}
}
pub fn trans_empty() -> Substs<'tcx> {
Substs {
types: VecPerParamSpace::empty(),
regions: ErasedRegions
}
}
pub fn is_noop(&self) -> bool {
let regions_is_noop = match self.regions {
ErasedRegions => false, // may be used to canonicalize
NonerasedRegions(ref regions) => regions.is_empty(),
};
regions_is_noop && self.types.is_empty()
}
pub fn type_for_def(&self, ty_param_def: &ty::TypeParameterDef) -> Ty<'tcx> {
*self.types.get(ty_param_def.space, ty_param_def.index as uint)
}
pub fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.types.iter().any(|&t| ty::type_escapes_depth(t, depth)) || {
match self.regions {
ErasedRegions =>
false,
NonerasedRegions(ref regions) =>
regions.iter().any(|r| r.escapes_depth(depth)),
}
}
}
pub fn self_ty(&self) -> Option<Ty<'tcx>> {
self.types.get_self().map(|&t| t)
}
pub fn with_self_ty(&self, self_ty: Ty<'tcx>) -> Substs<'tcx> {
assert!(self.self_ty().is_none());
let mut s = (*self).clone();
s.types.push(SelfSpace, self_ty);
s
}
pub fn erase_regions(self) -> Substs<'tcx> {
let Substs { types, regions: _ } = self;
Substs { types: types, regions: ErasedRegions }
}
/// Since ErasedRegions are only to be used in trans, most of the compiler can use this method
/// to easily access the set of region substitutions.
pub fn regions<'a>(&'a self) -> &'a VecPerParamSpace<ty::Region> {
match self.regions {
ErasedRegions => panic!("Erased regions only expected in trans"),
NonerasedRegions(ref r) => r
}
}
/// Since ErasedRegions are only to be used in trans, most of the compiler can use this method
/// to easily access the set of region substitutions.
pub fn mut_regions<'a>(&'a mut self) -> &'a mut VecPerParamSpace<ty::Region> {
match self.regions {
ErasedRegions => panic!("Erased regions only expected in trans"),
NonerasedRegions(ref mut r) => r
}
}
pub fn with_method(self,
m_types: Vec<Ty<'tcx>>,
m_regions: Vec<ty::Region>)
-> Substs<'tcx>
{
let Substs { types, regions } = self;
let types = types.with_vec(FnSpace, m_types);
let regions = regions.map(m_regions,
|r, m_regions| r.with_vec(FnSpace, m_regions));
Substs { types: types, regions: regions }
}
}
impl RegionSubsts {
fn map<A, F>(self, a: A, op: F) -> RegionSubsts where
F: FnOnce(VecPerParamSpace<ty::Region>, A) -> VecPerParamSpace<ty::Region>,
{
match self {
ErasedRegions => ErasedRegions,
NonerasedRegions(r) => NonerasedRegions(op(r, a))
}
}
pub fn is_erased(&self) -> bool {
match *self {
ErasedRegions => true,
NonerasedRegions(_) => false,
}
}
}
///////////////////////////////////////////////////////////////////////////
// ParamSpace
#[derive(PartialOrd, Ord, PartialEq, Eq, Copy,
Clone, Hash, RustcEncodable, RustcDecodable, Debug)]
pub enum ParamSpace {
TypeSpace, // Type parameters attached to a type definition, trait, or impl
SelfSpace, // Self parameter on a trait
FnSpace, // Type parameters attached to a method or fn
}
impl ParamSpace {
pub fn all() -> [ParamSpace; 3] {
[TypeSpace, SelfSpace, FnSpace]
}
pub fn to_uint(self) -> uint {
match self {
TypeSpace => 0,
SelfSpace => 1,
FnSpace => 2,
}
}
pub fn from_uint(u: uint) -> ParamSpace {
match u {
0 => TypeSpace,
1 => SelfSpace,
2 => FnSpace,
_ => panic!("Invalid ParamSpace: {}", u)
}
}
}
/// Vector of things sorted by param space. Used to keep
/// the set of things declared on the type, self, or method
/// distinct.
#[derive(PartialEq, Eq, Clone, Hash, RustcEncodable, RustcDecodable)]
pub struct VecPerParamSpace<T> {
// This was originally represented as a tuple with one Vec<T> for
// each variant of ParamSpace, and that remains the abstraction
// that it provides to its clients.
//
// Here is how the representation corresponds to the abstraction
// i.e. the "abstraction function" AF:
//
// AF(self) = (self.content[..self.type_limit],
// self.content[self.type_limit..self.self_limit],
// self.content[self.self_limit..])
type_limit: uint,
self_limit: uint,
content: Vec<T>,
}
/// The `split` function converts one `VecPerParamSpace` into this
/// `SeparateVecsPerParamSpace` structure.
pub struct SeparateVecsPerParamSpace<T> {
pub types: Vec<T>,
pub selfs: Vec<T>,
pub fns: Vec<T>,
}
impl<T: fmt::Debug> fmt::Debug for VecPerParamSpace<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "VecPerParamSpace {{"));
for space in &ParamSpace::all() {
try!(write!(fmt, "{:?}: {:?}, ", *space, self.get_slice(*space)));
}
try!(write!(fmt, "}}"));
Ok(())
}
}
impl<T> VecPerParamSpace<T> {
fn limits(&self, space: ParamSpace) -> (uint, uint) {
match space {
TypeSpace => (0, self.type_limit),
SelfSpace => (self.type_limit, self.self_limit),
FnSpace => (self.self_limit, self.content.len()),
}
}
pub fn empty() -> VecPerParamSpace<T> {
VecPerParamSpace {
type_limit: 0,
self_limit: 0,
content: Vec::new()
}
}
pub fn params_from_type(types: Vec<T>) -> VecPerParamSpace<T> {
VecPerParamSpace::empty().with_vec(TypeSpace, types)
}
/// `t` is the type space.
/// `s` is the self space.
/// `a` is the assoc space.
/// `f` is the fn space.
pub fn new(t: Vec<T>, s: Vec<T>, f: Vec<T>) -> VecPerParamSpace<T> {
let type_limit = t.len();
let self_limit = type_limit + s.len();
let mut content = t;
content.extend(s.into_iter());
content.extend(f.into_iter());
VecPerParamSpace {
type_limit: type_limit,
self_limit: self_limit,
content: content,
}
}
fn new_internal(content: Vec<T>, type_limit: uint, self_limit: uint)
-> VecPerParamSpace<T>
{
VecPerParamSpace {
type_limit: type_limit,
self_limit: self_limit,
content: content,
}
}
/// Appends `value` to the vector associated with `space`.
///
/// Unlike the `push` method in `Vec`, this should not be assumed
/// to be a cheap operation (even when amortized over many calls).
pub fn push(&mut self, space: ParamSpace, value: T) {
let (_, limit) = self.limits(space);
match space {
TypeSpace => { self.type_limit += 1; self.self_limit += 1; }
SelfSpace => { self.self_limit += 1; }
FnSpace => { }
}
self.content.insert(limit, value);
}
/// Appends `values` to the vector associated with `space`.
///
/// Unlike the `extend` method in `Vec`, this should not be assumed
/// to be a cheap operation (even when amortized over many calls).
pub fn extend<I:Iterator<Item=T>>(&mut self, space: ParamSpace, values: I) {
// This could be made more efficient, obviously.
for item in values {
self.push(space, item);
}
}
pub fn pop(&mut self, space: ParamSpace) -> Option<T> {
let (start, limit) = self.limits(space);
if start == limit {
None
} else {
match space {
TypeSpace => { self.type_limit -= 1; self.self_limit -= 1; }
SelfSpace => { self.self_limit -= 1; }
FnSpace => {}
}
if self.content.is_empty() {
None
} else {
Some(self.content.remove(limit - 1))
}
}
}
pub fn truncate(&mut self, space: ParamSpace, len: uint) {
// FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n).
while self.len(space) > len {
self.pop(space);
}
}
pub fn replace(&mut self, space: ParamSpace, elems: Vec<T>) {
// FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n).
self.truncate(space, 0);
for t in elems {
self.push(space, t);
}
}
pub fn get_self<'a>(&'a self) -> Option<&'a T> {
let v = self.get_slice(SelfSpace);
assert!(v.len() <= 1);
if v.len() == 0 { None } else { Some(&v[0]) }
}
pub fn len(&self, space: ParamSpace) -> uint {
self.get_slice(space).len()
}
pub fn is_empty_in(&self, space: ParamSpace) -> bool {
self.len(space) == 0
}
pub fn get_slice<'a>(&'a self, space: ParamSpace) -> &'a [T] {
let (start, limit) = self.limits(space);
&self.content[start.. limit]
}
pub fn get_mut_slice<'a>(&'a mut self, space: ParamSpace) -> &'a mut [T] {
let (start, limit) = self.limits(space);
&mut self.content[start.. limit]
}
pub fn opt_get<'a>(&'a self,
space: ParamSpace,
index: uint)
-> Option<&'a T> {
let v = self.get_slice(space);
if index < v.len() { Some(&v[index]) } else { None }
}
pub fn get<'a>(&'a self, space: ParamSpace, index: uint) -> &'a T {
&self.get_slice(space)[index]
}
pub fn iter<'a>(&'a self) -> Iter<'a,T> {
self.content.iter()
}
pub fn into_iter(self) -> IntoIter<T> {
self.content.into_iter()
}
pub fn iter_enumerated<'a>(&'a self) -> EnumeratedItems<'a,T> {
EnumeratedItems::new(self)
}
pub fn as_slice(&self) -> &[T] {
&self.content
}
pub fn into_vec(self) -> Vec<T> {
self.content
}
pub fn all_vecs<P>(&self, mut pred: P) -> bool where
P: FnMut(&[T]) -> bool,
{
let spaces = [TypeSpace, SelfSpace, FnSpace];
spaces.iter().all(|&space| { pred(self.get_slice(space)) })
}
pub fn all<P>(&self, pred: P) -> bool where P: FnMut(&T) -> bool {
self.iter().all(pred)
}
pub fn any<P>(&self, pred: P) -> bool where P: FnMut(&T) -> bool {
self.iter().any(pred)
}
pub fn is_empty(&self) -> bool {
self.all_vecs(|v| v.is_empty())
}
pub fn map<U, P>(&self, pred: P) -> VecPerParamSpace<U> where P: FnMut(&T) -> U {
let result = self.iter().map(pred).collect();
VecPerParamSpace::new_internal(result,
self.type_limit,
self.self_limit)
}
pub fn map_enumerated<U, P>(&self, pred: P) -> VecPerParamSpace<U> where
P: FnMut((ParamSpace, uint, &T)) -> U,
{
let result = self.iter_enumerated().map(pred).collect();
VecPerParamSpace::new_internal(result,
self.type_limit,
self.self_limit)
}
pub fn map_move<U, F>(self, mut pred: F) -> VecPerParamSpace<U> where
F: FnMut(T) -> U,
{
let SeparateVecsPerParamSpace {
types: t,
selfs: s,
fns: f
} = self.split();
VecPerParamSpace::new(t.into_iter().map(|p| pred(p)).collect(),
s.into_iter().map(|p| pred(p)).collect(),
f.into_iter().map(|p| pred(p)).collect())
}
pub fn split(self) -> SeparateVecsPerParamSpace<T> {
let VecPerParamSpace { type_limit, self_limit, content } = self;
let mut content_iter = content.into_iter();
SeparateVecsPerParamSpace {
types: content_iter.by_ref().take(type_limit).collect(),
selfs: content_iter.by_ref().take(self_limit - type_limit).collect(),
fns: content_iter.collect()
}
}
pub fn with_vec(mut self, space: ParamSpace, vec: Vec<T>)
-> VecPerParamSpace<T>
{
assert!(self.is_empty_in(space));
self.replace(space, vec);
self
}
}
#[derive(Clone)]
pub struct EnumeratedItems<'a,T:'a> {
vec: &'a VecPerParamSpace<T>,
space_index: uint,
elem_index: uint
}
impl<'a,T> EnumeratedItems<'a,T> {
fn new(v: &'a VecPerParamSpace<T>) -> EnumeratedItems<'a,T> {
let mut result = EnumeratedItems { vec: v, space_index: 0, elem_index: 0 };
result.adjust_space();
result
}
fn adjust_space(&mut self) {
let spaces = ParamSpace::all();
while
self.space_index < spaces.len() &&
self.elem_index >= self.vec.len(spaces[self.space_index])
{
self.space_index += 1;
self.elem_index = 0;
}
}
}
impl<'a,T> Iterator for EnumeratedItems<'a,T> {
type Item = (ParamSpace, uint, &'a T);
fn next(&mut self) -> Option<(ParamSpace, uint, &'a T)> {
let spaces = ParamSpace::all();
if self.space_index < spaces.len() {
let space = spaces[self.space_index];
let index = self.elem_index;
let item = self.vec.get(space, index);
self.elem_index += 1;
self.adjust_space();
Some((space, index, item))
} else {
None
}
}
}
impl<T> IntoIterator for VecPerParamSpace<T> {
type IntoIter = IntoIter<T>;
fn into_iter(self) -> IntoIter<T> {
self.into_vec().into_iter()
}
}
impl<'a,T> IntoIterator for &'a VecPerParamSpace<T> {
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.as_slice().into_iter()
}
}
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`. Or use `foo.subst_spanned(tcx, substs, Some(span))` when
// there is more information available (for better errors).
pub trait Subst<'tcx> : Sized {
fn subst(&self, tcx: &ty::ctxt<'tcx>, substs: &Substs<'tcx>) -> Self {
self.subst_spanned(tcx, substs, None)
}
fn subst_spanned(&self, tcx: &ty::ctxt<'tcx>,
substs: &Substs<'tcx>,
span: Option<Span>)
-> Self;
}
impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T {
fn subst_spanned(&self,
tcx: &ty::ctxt<'tcx>,
substs: &Substs<'tcx>,
span: Option<Span>)
-> T
{
let mut folder = SubstFolder { tcx: tcx,
substs: substs,
span: span,
root_ty: None,
ty_stack_depth: 0,
region_binders_passed: 0 };
(*self).fold_with(&mut folder)
}
}
///////////////////////////////////////////////////////////////////////////
// The actual substitution engine itself is a type folder.
struct SubstFolder<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
substs: &'a Substs<'tcx>,
// The location for which the substitution is performed, if available.
span: Option<Span>,
<|fim▁hole|> ty_stack_depth: uint,
// Number of region binders we have passed through while doing the substitution
region_binders_passed: u32,
}
impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
fn tcx(&self) -> &ty::ctxt<'tcx> { self.tcx }
fn enter_region_binder(&mut self) {
self.region_binders_passed += 1;
}
fn exit_region_binder(&mut self) {
self.region_binders_passed -= 1;
}
fn fold_region(&mut self, r: ty::Region) -> ty::Region {
// Note: This routine only handles regions that are bound on
// type declarations and other outer declarations, not those
// bound in *fn types*. Region substitution of the bound
// regions that appear in a function signature is done using
// the specialized routine `ty::replace_late_regions()`.
match r {
ty::ReEarlyBound(_, space, i, region_name) => {
match self.substs.regions {
ErasedRegions => ty::ReStatic,
NonerasedRegions(ref regions) =>
match regions.opt_get(space, i as uint) {
Some(&r) => {
self.shift_region_through_binders(r)
}
None => {
let span = self.span.unwrap_or(DUMMY_SP);
self.tcx().sess.span_bug(
span,
&format!("Type parameter out of range \
when substituting in region {} (root type={}) \
(space={:?}, index={})",
region_name.as_str(),
self.root_ty.repr(self.tcx()),
space, i)[]);
}
}
}
}
_ => r
}
}
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
if !ty::type_needs_subst(t) {
return t;
}
// track the root type we were asked to substitute
let depth = self.ty_stack_depth;
if depth == 0 {
self.root_ty = Some(t);
}
self.ty_stack_depth += 1;
let t1 = match t.sty {
ty::ty_param(p) => {
self.ty_for_param(p, t)
}
_ => {
ty_fold::super_fold_ty(self, t)
}
};
assert_eq!(depth + 1, self.ty_stack_depth);
self.ty_stack_depth -= 1;
if depth == 0 {
self.root_ty = None;
}
return t1;
}
}
impl<'a,'tcx> SubstFolder<'a,'tcx> {
fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> {
// Look up the type in the substitutions. It really should be in there.
let opt_ty = self.substs.types.opt_get(p.space, p.idx as uint);
let ty = match opt_ty {
Some(t) => *t,
None => {
let span = self.span.unwrap_or(DUMMY_SP);
self.tcx().sess.span_bug(
span,
&format!("Type parameter `{}` ({}/{:?}/{}) out of range \
when substituting (root type={}) substs={}",
p.repr(self.tcx()),
source_ty.repr(self.tcx()),
p.space,
p.idx,
self.root_ty.repr(self.tcx()),
self.substs.repr(self.tcx()))[]);
}
};
self.shift_regions_through_binders(ty)
}
/// It is sometimes necessary to adjust the debruijn indices during substitution. This occurs
/// when we are substituting a type with escaping regions into a context where we have passed
/// through region binders. That's quite a mouthful. Let's see an example:
///
/// ```
/// type Func<A> = fn(A);
/// type MetaFunc = for<'a> fn(Func<&'a int>)
/// ```
///
/// The type `MetaFunc`, when fully expanded, will be
///
/// for<'a> fn(fn(&'a int))
/// ^~ ^~ ^~~
/// | | |
/// | | DebruijnIndex of 2
/// Binders
///
/// Here the `'a` lifetime is bound in the outer function, but appears as an argument of the
/// inner one. Therefore, that appearance will have a DebruijnIndex of 2, because we must skip
/// over the inner binder (remember that we count Debruijn indices from 1). However, in the
/// definition of `MetaFunc`, the binder is not visible, so the type `&'a int` will have a
/// debruijn index of 1. It's only during the substitution that we can see we must increase the
/// depth by 1 to account for the binder that we passed through.
///
/// As a second example, consider this twist:
///
/// ```
/// type FuncTuple<A> = (A,fn(A));
/// type MetaFuncTuple = for<'a> fn(FuncTuple<&'a int>)
/// ```
///
/// Here the final type will be:
///
/// for<'a> fn((&'a int, fn(&'a int)))
/// ^~~ ^~~
/// | |
/// DebruijnIndex of 1 |
/// DebruijnIndex of 2
///
/// As indicated in the diagram, here the same type `&'a int` is substituted once, but in the
/// first case we do not increase the Debruijn index and in the second case we do. The reason
/// is that only in the second case have we passed through a fn binder.
fn shift_regions_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
debug!("shift_regions(ty={:?}, region_binders_passed={:?}, type_has_escaping_regions={:?})",
ty.repr(self.tcx()), self.region_binders_passed, ty::type_has_escaping_regions(ty));
if self.region_binders_passed == 0 || !ty::type_has_escaping_regions(ty) {
return ty;
}
let result = ty_fold::shift_regions(self.tcx(), self.region_binders_passed, &ty);
debug!("shift_regions: shifted result = {:?}", result.repr(self.tcx()));
result
}
fn shift_region_through_binders(&self, region: ty::Region) -> ty::Region {
ty_fold::shift_region(region, self.region_binders_passed)
}
}<|fim▁end|> | // The root type that is being substituted, if available.
root_ty: Option<Ty<'tcx>>,
// Depth of type stack |
<|file_name|>test_db.py<|end_file_name|><|fim▁begin|>"""
The following tests that db connections works properly.
Make sure the default configurations match your connection to the database<|fim▁hole|>
from StreamingSQL.db import create_connection, execute_command
from StreamingSQL.fonts import Colors, Formats
"""Default configuration to connect to the DB"""
host = 'localhost'
port = 3306
usr = 'root'
paswd = ''
db = 'test'
def test_default_create_connection():
"""
Test the connection to database
Assert:
Connection occurs
"""
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
def test_wrong_host_fail_create_connection():
"""
Test that error is properly returned when there is an incorrect host
Assert:
Proper error is returned/formatted
"""
error = "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host[:-3]
cur = create_connection(host=host[:-3], port=port, user=usr, password=paswd, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_wrong_port_fail_create_connection():
"""
Test that error is properly returned when there is an incorrect port number
Assert:
Proper error is returned/formatted
"""
error = "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host
cur = create_connection(host=host, port=port + 13, user=usr, password=paswd, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_wrong_user_fail_create_connection():
"""
Test that error is properly returned when there is an incorrect user
Assert:
Proper error is returned/formatted
"""
error = "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host
cur = create_connection(host=host, port=port, user='', password=paswd, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_wrong_passwd_fail_create_connection():
"""
Test that error is properly returned when there is an incorrect password
Assert:
Proper error is returned/formatted
"""
error = "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host
cur = create_connection(host=host, port=port, user=usr, password=usr, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_execute_command():
"""
Execute "SELECT 1;"
Assert:
A result of 1 is returned
"""
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
stmt = "SELECT 1"
result = execute_command(cur, stmt)
assert result[0][0] == 1
def test_syntax_fail_execute_command():
"""
Execute "SLCT 1;"
Assert:
An error message is returned
"""
stmt = "SLCT 1"
error = ("1064: You have an error in your SQL syntax; check the manual that corresponds to your MariaDB server "+
"version for the right syntax to use near '%s' at line 1")
error = Formats.BOLD + Colors.RED + "Connection Error - " + error % stmt + Formats.END + Colors.END
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
result = execute_command(cur, stmt)
try:
assert result == error
except AssertionError:
pass
def test_new_db_create_connection():
"""
Create a connection to a new database
Assert:
New database is created/removed
"""
db="db2"
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
stmt = "SELECT `SCHEMA_NAME` from `INFORMATION_SCHEMA`.`SCHEMATA` WHERE `SCHEMA_NAME` LIKE '%s';" % db
result = execute_command(cur, stmt)
assert result[0][0] == db
stmt = "FLUSH TABLES; DROP DATABASE IF EXISTS %s;" % db
result = execute_command(cur, stmt)
assert result == ()<|fim▁end|> | """
import pymysql
import warnings
warnings.filterwarnings("ignore") |
<|file_name|>ViewModelGlideRequest.java<|end_file_name|><|fim▁begin|>package com.ov3rk1ll.kinocast.ui.util.glide;
import com.ov3rk1ll.kinocast.data.ViewModel;
<|fim▁hole|> private String type;
public ViewModelGlideRequest(ViewModel viewModel, int screenWidthPx, String type) {
this.viewModel = viewModel;
this.screenWidthPx = screenWidthPx;
this.type = type;
}
ViewModel getViewModel() {
return viewModel;
}
int getScreenWidthPx() {
return screenWidthPx;
}
public String getType() {
return type;
}
}<|fim▁end|> | public class ViewModelGlideRequest {
private ViewModel viewModel;
private int screenWidthPx; |
<|file_name|>sandbox_mixture_slf.py<|end_file_name|><|fim▁begin|>import numpy as np
from snob import mixture_slf as slf
n_samples, n_features, n_clusters, rank = 1000, 50, 6, 1
sigma = 0.5
true_homo_specific_variances = sigma**2 * np.ones((1, n_features))
rng = np.random.RandomState(321)
U, _, _ = np.linalg.svd(rng.randn(n_features, n_features))
true_factor_loads = U[:, :rank].T
true_factor_scores = rng.randn(n_samples, rank)
X = np.dot(true_factor_scores, true_factor_loads)
# Assign objects to different clusters.
indices = rng.randint(0, n_clusters, size=n_samples)
true_weights = np.zeros(n_clusters)<|fim▁hole|> true_weights[index] = (indices==index).sum()
true_weights = true_weights/n_samples
# Adding homoscedastic noise
bar = rng.randn(n_samples, n_features)
X_homo = X + sigma * bar
# Adding heteroscedastic noise
sigmas = sigma * rng.rand(n_features) + sigma / 2.
X_hetero = X + rng.randn(n_samples, n_features) * sigmas
true_hetero_specific_variances = sigmas**2
data = X_hetero
model = slf.SLFGMM(n_clusters)
model.fit(data)
def scatter_common(x, y, title=None):
fig, ax = plt.subplots()
ax.scatter(x,y)
ax.set_title(title or "")
limits = np.array([ax.get_xlim(), ax.get_ylim()])
limits = (limits.min(), limits.max())
ax.plot(limits, limits, c="#666666", linestyle=":", linewidth=0.5, zorder=-1)
ax.set_xlim(limits)
ax.set_ylim(limits)
return fig
scatter_common(true_factor_loads, model.factor_loads, "factor loads")
scatter_common(true_factor_scores, model.factor_scores, "factor scores")
scatter_common(true_homo_specific_variances, model.specific_variances, "specific variances")
# means
# This one is tricky because the indices are not necessarily the same.
# So just take whichever is closest.
idx = np.zeros(n_clusters, dtype=int)
for index, true_mean in enumerate(true_means):
distance = np.sum(np.abs(model._means - true_mean), axis=1) \
+ np.abs(model.weights.flatten()[index] - true_weights)
idx[index] = np.argmin(distance)
assert len(idx) == len(set(idx))
true = true_means.flatten()
inferred = model._means[idx].flatten()
scatter_common(true, inferred, "means")
# Plot some data...
fig, ax = plt.subplots()
ax.scatter(data[:, 0], data[:, 1], facecolor="g")
raise a
# factor scores
ax = axes[1]
true = true_factor_scores.flatten()
inferred = model._factor_scores.flatten()
ax.scatter(true, inferred)
# factor loads
ax = axes[2]
true = true_factor_loads.flatten()
inferred = model._factor_loads.flatten()
ax.scatter(true, inferred)
raise a
true = np.hstack([each.flatten() for each in (true_means, true_factor_scores, true_factor_loads, true_specific_variances)])
inferred = np.hstack([each.flatten() for each in (model.means, model.factor_scores, model.factor_loads, model.specific_variances)])
fig, ax = plt.subplots()
ax.scatter(true, inferred, alpha=0.5)
raise a<|fim▁end|> | true_means = rng.randn(n_clusters, n_features)
for index in range(n_clusters):
X[indices==index] += true_means[index] |
<|file_name|>plugin.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2008- Samuli Jrvel
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html. If redistributing this code,
* this entire header must remain intact.
*/
mollify.registerPlugin(new ArchiverPlugin());
function ArchiverPlugin() {
var that = this;
this.getPluginInfo = function() { return { id: "plugin_archiver" }; }
this.initialize = function(env) {
that.env = env;
that.env.addItemContextProvider(that.getItemContext);
$.getScript(that.env.pluginUrl("Archiver") + "client/texts_" + that.env.texts().locale + ".js");
}
this.getItemContext = function(item, details) {
if (!details["plugin_archiver"] || !details["plugin_archiver"]["action_extract"]) return null;
var extractServiceUrl = details["plugin_archiver"]["action_extract"];
return {
actions : {
secondary: [
{ title: "-" },
{
title: that.env.texts().get("plugin_archiver_extractAction"),
callback: function(item) { that.onExtract(extractServiceUrl, false); }
}
]
}
}
}
this.onExtract = function(url, allowOverwrite) {
var wd = that.env.dialog().showWait(that.env.texts().get("pleaseWait"));
var params = { overwrite: allowOverwrite };
that.env.service().post(url, params,
function(result) {
wd.close();
that.env.fileview().refresh();
},
function(code, error) {<|fim▁hole|> title: that.env.texts().get("plugin_archiver_extractFolderAlreadyExistsTitle"),
message: that.env.texts().get("plugin_archiver_extractFolderAlreadyExistsMessage"),
on_confirm: function() { that.onExtract(url, true); }
});
return;
}
alert("Extract error: "+code+"/"+error);
}
);
}
}<|fim▁end|> | wd.close();
if (code == 205) {
that.env.dialog().showConfirmation({ |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> 10
}<|fim▁end|> | #[no_mangle]
pub extern fn rust_hello_world() -> i32 {
println!("Hello, I'm in Rust code! I'm about to return 10."); |
<|file_name|>14balt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
Find characters deep in the expanded string, for fun.
"""
import sys
from collections import Counter
def real_step(s, rules):
out = ""
for i in range(len(s)):
out += s[i]
k = s[i:i+2]
if k in rules:
out += rules[k]
return out
def step(cnt, rules):
ncnt = Counter()
for k, v in cnt.items():
if k in rules:
c = rules[k]
ncnt[k[0] + c] += v
ncnt[c + k[1]] += v
else:
ncnt[k] += v
return ncnt
def size(s, n, rules):
cnt = Counter(s[i:i+2] for i in range(len(s)-1))
for _ in range(n):
cnt = step(cnt, rules)
lcnt = Counter(s[0])
for k, v in cnt.items():
lcnt[k[1]] += v
return sum(lcnt.values())
def get_char(s, idx, iters, rules):
for i in range(iters):
h = len(s) // 2
first = s[:h+1]
sz = size(first, iters - i, rules)
if idx < sz:
s = real_step(first, rules)<|fim▁hole|> return s[idx]
def main(args):
data = [s.strip() for s in sys.stdin]
s = data[0]
rules = dict(x.split(" -> ") for x in data[2:])
# Make sure it works
t = s
for i in range(4):
t = real_step(t, rules)
for idx in range(len(t)):
c = get_char(s, idx, 4, rules)
assert t[idx] == c
# find some random characters deep into it
print(size(s, 40, rules))
start = 7311752324710
out = ""
for i in range(10):
out += get_char(s, start + i, 40, rules)
print(out)
if __name__ == '__main__':
main(sys.argv)<|fim▁end|> | else:
s = real_step(s[h:], rules)
idx -= sz - 1
|
<|file_name|>sendkeys_test.py<|end_file_name|><|fim▁begin|>import os
import sys
import random
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
repo_root = os.path.abspath(os.path.join(__file__, "../../.."))
sys.path.insert(1, os.path.join(repo_root, "tools", "webdriver"))
from webdriver import exceptions
class SendKeysTest(base_test.WebDriverBaseTest):
def setUp(self):
self.driver.get(self.webserver.where_is("user_input/res/text-form.html"))
def test_send_simple_string(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("lorem ipsum")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"lorem ipsum")
def test_send_return(self):
element = self.driver.find_element_by_id("Text1")
returnkey = unichr(int("E006", 16))
element.send_keys([returnkey])
self.assertEquals(u"" + self.driver.get_current_url(), u"" + self.webserver.where_is("user_input/res/text-form-landing.html?e=mc2"))
def test_send_backspace(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("world ")
element.send_keys("wide ")
element.send_keys("web ")
element.send_keys("consortium")
backspace= unichr(int("E003", 16))
for i in range(0, 11):
element.send_keys([backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"world wide web")
def test_send_tab(self):
element1 = self.driver.find_element_by_id("Text1")
element2 = self.driver.find_element_by_id("Text2")
element1.send_keys("typing here")
tab= unichr(int("E004", 16))
element1.send_keys([tab])
output = self.driver.find_element_by_id("output")
tab_pressed = output.get_attribute("checked")
self.assertEquals(tab_pressed, u"true")
def test_send_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("low ")
shift= unichr(int("E008", 16))
element.send_keys([shift , "u", "p", shift])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"low UP")
def test_send_arrow_keys(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("internet")
backspace= unichr(int("E003", 16))
left= unichr(int("E012", 16))
right= unichr(int("E014", 16))
for i in range(0, 4):
element.send_keys([left])
element.send_keys([backspace])
element.send_keys([right])
element.send_keys("a")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"intranet")
def test_select_text_with_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("WebDriver")
backspace= unichr(int("E003", 16))
shift= unichr(int("E008", 16))
left= unichr(int("E012", 16))
element.send_keys([shift, left, left, left, left, left, left, backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"Web")
<|fim▁hole|> unittest.main()<|fim▁end|> | if __name__ == "__main__": |
<|file_name|>NativeCarbon.cc<|end_file_name|><|fim▁begin|>// Copyright 2010 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
#include "Diadem/NativeCarbon.h"
#include <algorithm>
#include "Diadem/Factory.h"
#include "Diadem/Layout.h"
namespace {
CFStringRef CFStringFromString(const Diadem::String &dstring) {
return ::CFStringCreateWithCString(
kCFAllocatorDefault, dstring.Get(), kCFStringEncodingUTF8);
} // namespace
Diadem::String StringFromCFString(CFStringRef cf_string) {
const CFIndex length = ::CFStringGetBytes(
cf_string, CFRangeMake(0, ::CFStringGetLength(cf_string)),
kCFStringEncodingUTF8, '.', false, NULL, 0, NULL);
if (length == 0)
return Diadem::String();
char *buffer = new char[length+1];
::CFStringGetBytes(
cf_string, CFRangeMake(0, ::CFStringGetLength(cf_string)),
kCFStringEncodingUTF8, '.', false,
reinterpret_cast<UInt8*>(buffer), length, NULL);
buffer[length] = '\0';
return Diadem::String(buffer, Diadem::String::kAdoptBuffer);
}
enum RetainAction { kDontRetain, kDoRetain };
template <class T>
class ScopedCFType {
public:
ScopedCFType() : ref_(NULL) {}
ScopedCFType(T ref, RetainAction action)
: ref_(ref) {
if (action == kDoRetain)
::CFRetain(ref_);
}
~ScopedCFType() {
if (ref_ != NULL)
::CFRelease(ref_);
}
void Set(T ref, RetainAction action) {
if (ref_ != NULL)
::CFRelease(ref_);
ref_ = ref;
if (action == kDoRetain)
::CFRetain(ref_);
}
T* RetainedOutPtr() {
if (ref_ != NULL) {
::CFRelease(ref_);
ref_ = NULL;
}
return &ref_;
}
operator T() const { return ref_; }
protected:
T ref_;
};
} // namespace
namespace Diadem {
PlatformMetrics Carbon::NativeCarbon::metrics_ = {
14, 17, 18,
Spacing(12, 6, 12, 6) };
void Carbon::SetUpFactory(Factory *factory) {
DASSERT(factory != NULL);
factory->RegisterNative<Label>(kTypeNameLabel);
factory->RegisterNative<Window>(kTypeNameWindow);
factory->RegisterNative<Button>(kTypeNameButton);
factory->RegisterNative<Checkbox>(kTypeNameCheck);
factory->RegisterNative<EditField>(kTypeNameEdit);
factory->RegisterNative<Separator>(kTypeNameSeparator);
}
void Carbon::Window::InitializeProperties(const PropertyMap &properties) {
const Rect default_bounds = { 40, 0, 50, 50 };
OSStatus err;
err = ::CreateNewWindow(
kMovableModalWindowClass,
kWindowAsyncDragAttribute |
kWindowStandardHandlerAttribute |
kWindowCompositingAttribute,
&default_bounds, &window_ref_);
if ((err == noErr) && (window_ref_ != NULL)) {
if (properties.Exists(kPropText)) {
const String title_string =
properties[kPropText].Coerce<String>();
ScopedCFType<CFStringRef> cf_title(
CFStringFromString(title_string),
kDontRetain);
::SetWindowTitleWithCFString(window_ref_, cf_title);
}
}
}
Carbon::Window::~Window() {
if (window_ref_ != NULL)
::CFRelease(window_ref_);
}
bool Carbon::Window::SetProperty(PropertyName name, const Value &value) {
if (window_ref_ == NULL)
return false;
if (strcmp(name, kPropText) == 0) {
ScopedCFType<CFStringRef> title(
CFStringFromString(value.Coerce<String>()),
kDontRetain);
::SetWindowTitleWithCFString(window_ref_, title);
return true;
}
if (strcmp(name, kPropSize) == 0) {
const Size size = value.Coerce<Size>();
Rect bounds;
::GetWindowBounds(window_ref_, kWindowContentRgn, &bounds);
bounds.right = bounds.left + size.width;
bounds.bottom = bounds.top + size.height;
::SetWindowBounds(window_ref_, kWindowContentRgn, &bounds);
// TODO(catmull): reposition?
return true;
}
return false;
}
Value Carbon::Window::GetProperty(PropertyName name) const {
if (window_ref_ == NULL)
return false;
if (strcmp(name, kPropText) == 0) {
ScopedCFType<CFStringRef> title;
::CopyWindowTitleAsCFString(window_ref_, title.RetainedOutPtr());
return StringFromCFString(title);
}
if (strcmp(name, kPropSize) == 0) {
Rect bounds;
::GetWindowBounds(window_ref_, kWindowContentRgn, &bounds);
return Size(bounds.right-bounds.left, bounds.bottom-bounds.top);
}
if (strcmp(name, kPropMargins) == 0) {
return Value(Spacing(14, 20, 20, 20));
}
return Value();
}
void Carbon::Window::AddChild(Native *child) {
if (!::HIViewIsValid((HIViewRef)child->GetNativeRef()))
return;
HIViewRef content_view = NULL;
::HIViewFindByID(
::HIViewGetRoot(window_ref_), kHIViewWindowContentID, &content_view);
if (content_view != NULL)
::HIViewAddSubview(content_view, (HIViewRef)child->GetNativeRef());
}
bool Carbon::Window::ShowModeless() {
if (window_ref_ == NULL)
return false;
::SelectWindow(window_ref_);
::ShowWindow(window_ref_);
return true;
}
bool Carbon::Window::Close() {
if (window_ref_ == NULL)
return false;
::HideWindow(window_ref_);
return true;
}
bool Carbon::Window::ShowModal(void *on_parent) {
if (window_ref_ == NULL)
return false;
// TODO(catmull): defer to main thread
WindowPtr parent = ::GetFrontWindowOfClass(kDocumentWindowClass, true);
WindowPositionMethod window_position;
if (parent == NULL)
parent = ::GetFrontWindowOfClass(kMovableModalWindowClass, true);
if (parent != NULL) {
// We use alert position in both cases because that's actually the
// preferred location for new windows in the HIG. In the case where it
// has been marked as an alert and has a parent window, we assume it's
// strongly associated with that window (i.e., it would be a sheet if
// not for our modality needs), so we give it window alert position.
window_position = is_alert_ ? kWindowAlertPositionOnParentWindow
: kWindowAlertPositionOnParentWindowScreen;
} else {
window_position = kWindowAlertPositionOnMainScreen;
}
::RepositionWindow(window_ref_, parent, window_position);
::SetThemeCursor(kThemeArrowCursor);
::ShowWindow(window_ref_);
::SelectWindow(window_ref_);
::RunAppModalLoopForWindow(window_ref_);
return true;
}
bool Carbon::Window::EndModal() {
if (window_ref_ == NULL)
return false;
// TODO(catmull): defer to main thread
// TODO(catmull): maybe check window modality for good measure
::QuitAppModalLoopForWindow(window_ref_);
::HideWindow(window_ref_);
return true;
}
bool Carbon::Window::SetFocus(Entity *new_focus) {
if ((window_ref_ == NULL) || (new_focus == NULL) ||
(new_focus->GetNative() == NULL) ||
(new_focus->GetNative()->GetNativeRef() == NULL))
return false;
if (!::IsValidControlHandle((HIViewRef)
new_focus->GetNative()->GetNativeRef()))
return false;
::SetKeyboardFocus(
window_ref_,
(HIViewRef)new_focus->GetNative()->GetNativeRef(),
kControlFocusNextPart);
return true;
}
Carbon::Control::~Control() {
if (view_ref_ != NULL)
::CFRelease(view_ref_);
}
bool Carbon::Control::SetProperty(PropertyName name, const Value &value) {
if (view_ref_ == NULL)
return false;
if (strcmp(name, kPropLocation) == 0) {
const Location loc = value.Coerce<Location>() + GetViewOffset();
::HIViewPlaceInSuperviewAt(view_ref_, loc.x, loc.y);
return true;
}
if (strcmp(name, kPropSize) == 0) {
const Size size = value.Coerce<Size>() + GetInset();
HIRect frame;
::HIViewGetFrame(view_ref_, &frame);
frame.size.width = size.width;
frame.size.height = size.height;
::HIViewSetFrame(view_ref_, &frame);
return true;
}
if (strcmp(name, kPropText) == 0) {
ScopedCFType<CFStringRef> cf_text(
CFStringFromString(value.Coerce<String>()),
kDontRetain);
return ::HIViewSetText(view_ref_, cf_text) == noErr;
}
if (strcmp(name, kPropVisible) == 0) {
::HIViewSetVisible(view_ref_, value.Coerce<bool>());
}
return false;
}
Value Carbon::Control::GetProperty(PropertyName name) const {
if (view_ref_ == NULL)
return Value();
if (strcmp(name, kPropText) == 0) {
ScopedCFType<CFStringRef> cf_text(::HIViewCopyText(view_ref_), kDontRetain);
return StringFromCFString(cf_text);
}
if (strcmp(name, kPropMinimumSize) == 0) {
HIRect bounds;
::HIViewGetOptimalBounds(view_ref_, &bounds, NULL);
return Size(bounds.size.width, bounds.size.height) - GetInset();
}
if (strcmp(name, kPropLocation) == 0) {
HIRect frame;
::HIViewGetFrame(view_ref_, &frame);
return Location(frame.origin.x, frame.origin.y) - GetViewOffset();
}
if (strcmp(name, kPropSize) == 0) {
return GetSize() - GetInset();
}
if (strcmp(name, kPropVisible) == 0) {
return (bool)::HIViewIsVisible(view_ref_);
}
return Value();
}
Size Carbon::Control::GetSize() const {
HIRect frame;
::HIViewGetFrame(view_ref_, &frame);
return Size(frame.size.width, frame.size.height);
}
void Carbon::Button::InitializeProperties(const PropertyMap &properties) {
ScopedCFType<CFStringRef> title;
if (properties.Exists(kPropText))
title.Set(
CFStringFromString(properties[kPropText].Coerce<String>()),
kDontRetain);
const Rect default_bounds = { 0, 0, 20, 50 };
::CreatePushButtonControl(NULL, &default_bounds, title, &view_ref_);
}
bool Carbon::Button::SetProperty(PropertyName name, const Value &value) {
return Control::SetProperty(name, value);
}
Value Carbon::Button::GetProperty(PropertyName name) const {
if (strcmp(name, kPropPadding) == 0) {
return Spacing(12, 12, 12, 12);
}
return Control::GetProperty(name);
}
void Carbon::Checkbox::InitializeProperties(const PropertyMap &properties) {
ScopedCFType<CFStringRef> title;
if (properties.Exists(kPropText))
title.Set(
CFStringFromString(properties[kPropText].Coerce<String>()),
kDontRetain);
const Rect default_bounds = { 0, 0, 20, 50 };
::CreateCheckBoxControl(NULL, &default_bounds, title, 0, true, &view_ref_);
}
void Carbon::Label::InitializeProperties(const PropertyMap &properties) {
ScopedCFType<CFStringRef> title;
if (properties.Exists(kPropText))
title.Set(
CFStringFromString(properties[kPropText].Coerce<String>()),
kDontRetain);
const Rect default_bounds = { 0, 0, 20, 50 };
::CreateStaticTextControl(NULL, &default_bounds, title, NULL, &view_ref_);
}
Value Carbon::Label::GetProperty(PropertyName name) const {
if (strcmp(name, kPropPadding) == 0) {
return Spacing(8, 8, 8, 8);
}
if (strcmp(name, kPropMinimumSize) == 0) {
float wrap_width = 0;
bool variable = false;
#if 0 // TODO(catmull): variable height
GetControlProperty(view_ref_, kFenSig, 'VHgt', variable);
if (variable)
wrap_width = GetSize().width;
#endif
ControlSize size = kControlSizeNormal;
ThemeFontID font_ID = kThemeSystemFont;
::GetControlData(
view_ref_, kControlEntireControl, kControlSizeTag,
sizeof(size), &size, NULL);
switch (size) {
case kControlSizeSmall: font_ID = kThemeSmallSystemFont; break;
case kControlSizeMini: font_ID = kThemeMiniSystemFont; break;
}
// HIViewGetOptimalBounds for static text only adjusts the height,
// so we calculate the text width manually
HIThemeTextInfo info = {
0, kThemeStateActive, font_ID,
kHIThemeTextHorizontalFlushLeft,
kHIThemeTextVerticalFlushTop,<|fim▁hole|>
::HIThemeGetTextDimensions(title, wrap_width, &info, &width, &height, NULL);
::CFRelease(title);
return Size(width, variable ? std::max(height, 16.0f) : GetSize().height);
}
return Control::GetProperty(name);
}
void Carbon::EditField::InitializeProperties(const PropertyMap &properties) {
ScopedCFType<CFStringRef> title;
if (properties.Exists(kPropText))
title.Set(
CFStringFromString(properties[kPropText].Coerce<String>()),
kDontRetain);
const Rect default_bounds = { 0, 0, 20, 50 };
::CreateEditUnicodeTextControl(
NULL, &default_bounds, title, false, NULL, &view_ref_);
}
void Carbon::Separator::InitializeProperties(const PropertyMap &properties) {
const Rect default_bounds = { 0, 0, 2, 50 };
::CreateSeparatorControl(NULL, &default_bounds, &view_ref_);
}
void Carbon::Separator::Finalize() {
Layout *layout = entity_->GetLayout();
if (layout == NULL)
return;
if (layout->GetDirection() == Layout::kLayoutRow)
layout->SetVSizeOption(kSizeFill);
else // kLayoutColumn
layout->SetHSizeOption(kSizeFill);
}
Value Carbon::Separator::GetProperty(PropertyName name) const {
if (strcmp(name, kPropMinimumSize) == 0) {
return Size(1, 1);
}
return Control::GetProperty(name);
}
} // namespace Diadem<|fim▁end|> | 0, kHIThemeTextTruncationNone, 0, false };
CFStringRef title = ::HIViewCopyText(view_ref_);
float width, height; |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const admin = require('firebase-admin');
const DATABASE_URL = process.env.DATABASE_URL || 'https://dailyjack-8a930.firebaseio.com';
// const DATABASE_URL = 'https://dailyjack-d2fa0.firebaseio.com';
const jackDB = (config = {}) => {
const app = admin.initializeApp({
credential: admin.credential.cert({
projectId: config.projectId,
clientEmail: config.clientEmail,
privateKey: config.privateKey,
}),
databaseURL: DATABASE_URL,
});
const db = admin.database();
const jacksRef = db.ref('jacks');
const totalJacksRef = db.ref('totalJacks');
const usersRef = db.ref('users');
const insert = jack => (
jacksRef
.child(jack.id)
.set({
id: jack.id,
title: jack.title,
contents: jack.contents || [],
author: jack.author || null,
createdTime: Date.now(),
isLimited: Boolean(jack.isLimited),
isSpecial: Boolean(jack.isSpecial),
})
.then(() => (
totalJacksRef.transaction(total => (total || 0) + 1)
))
);
const filter = (jacks = [], filterOptions = {}) => jacks.filter(
jack => (!filterOptions.shouldExcludeLimited || !jack.isLimited)
&& (!filterOptions.shouldExcludeSpecial || !jack.isSpecial)
);
const all = (filterOptions = {}) => (
jacksRef.once('value')
.then(snapshot => snapshot.val())
.then(jacks => (jacks || []).filter(Boolean))
.then(jacks => filter(jacks, filterOptions))
);
const get = id => (
jacksRef.child(id)
.once('value')
.then(snapshot => snapshot.val())
);
const random = (filterOptions = {}) => (
all(filterOptions)
.then(jacks => jacks[Math.floor(Math.random() * jacks.length)])
);
const upvote = (id, user) => (
jacksRef.child(id)
.child('ratedUsers')
.child(user)
.set(true)
);
const downvote = (id, user) => (
jacksRef.child(id)
.child('ratedUsers')
.remove(user)
);
const togglevote = (id, user) => (
jacksRef.child(id)
.child('ratedUsers')
.child(user)<|fim▁hole|>
const getRate = id => (
jacksRef.child(id)
.child('ratedUsers')
.once('value')
.then(snapshot => snapshot.val())
.then(rate => Object.keys(rate || {})
.filter(Boolean)
.length
)
);
const setUser = user => (
usersRef.child(user.name)
.set(user)
);
const updateUser = user => (
usersRef.child(user.name)
.update(user)
);
const getUser = userName => (
usersRef.child(userName)
.once('value')
.then(snapshot => snapshot.val())
);
const exit = () => {
db.goOffline();
app.delete();
};
return {
all,
get,
random,
insert,
exit,
upvote,
downvote,
togglevote,
getRate,
setUser,
updateUser,
getUser,
};
};
module.exports = {
default: jackDB,
};<|fim▁end|> | .transaction(rate => (rate ? null : true))
); |
<|file_name|>TrikSonarSensor.ts<|end_file_name|><|fim▁begin|>/*
* Copyright Vladimir Zakharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.<|fim▁hole|> *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <reference path="../../robotParts/RangeSensor.ts" />
class TrikSonarSensor extends RangeSensor {
static parentType = RangeSensor;
static friendlyName = "Sonic Sensor";
}<|fim▁end|> | * You may obtain a copy of the License at |
<|file_name|>test_mlab_on_unix.py<|end_file_name|><|fim▁begin|>import sys
sys.path = ['../src/'] + sys.path
import unittest
from mlab.mlabwrap import MatlabReleaseNotFound
class TestMlabUnix(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_version_discovery(self):
import mlab
instances = mlab.releases.MatlabVersions(globals())<|fim▁hole|>
def test_latest_release(self):
from mlab.releases import latest_release
from matlab import matlabroot
self.assertTrue(len(matlabroot())>0)
matlabroot()
if __name__ == '__main__':
unittest.main()<|fim▁end|> | assert len(instances.pick_latest_release()) > 0
with self.assertRaises(MatlabReleaseNotFound):
mlab_inst = instances.get_mlab_instance('R2010c') |
<|file_name|>inferno.js<|end_file_name|><|fim▁begin|>/*!
* inferno v0.7.3
* (c) 2016 Dominic Gannaway
* Released under the MPL-2.0 License.
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.Inferno = factory());
}(this, function () { 'use strict';
var babelHelpers = {};
babelHelpers.typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {
return typeof obj;<|fim▁hole|> babelHelpers.classCallCheck = function (instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
};
babelHelpers.createClass = function () {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
return function (Constructor, protoProps, staticProps) {
if (protoProps) defineProperties(Constructor.prototype, protoProps);
if (staticProps) defineProperties(Constructor, staticProps);
return Constructor;
};
}();
babelHelpers.extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
babelHelpers;
function isNullOrUndefined(obj) {
return obj === void 0 || obj === null;
}
function isAttrAnEvent(attr) {
return attr[0] === 'o' && attr[1] === 'n' && attr.length > 3;
}
function VNode(blueprint) {
this.bp = blueprint;
this.dom = null;
this.instance = null;
this.tag = null;
this.children = null;
this.style = null;
this.className = null;
this.attrs = null;
this.events = null;
this.hooks = null;
this.key = null;
this.clipData = null;
}
VNode.prototype = {
setAttrs: function setAttrs(attrs) {
this.attrs = attrs;
return this;
},
setTag: function setTag(tag) {
this.tag = tag;
return this;
},
setStyle: function setStyle(style) {
this.style = style;
return this;
},
setClassName: function setClassName(className) {
this.className = className;
return this;
},
setChildren: function setChildren(children) {
this.children = children;
return this;
},
setHooks: function setHooks(hooks) {
this.hooks = hooks;
return this;
},
setEvents: function setEvents(events) {
this.events = events;
return this;
},
setKey: function setKey(key) {
this.key = key;
return this;
}
};
function createVNode(bp) {
return new VNode(bp);
}
function createBlueprint(shape, childrenType) {
var tag = shape.tag || null;
var tagIsDynamic = tag && tag.arg !== void 0 ? true : false;
var children = !isNullOrUndefined(shape.children) ? shape.children : null;
var childrenIsDynamic = children && children.arg !== void 0 ? true : false;
var attrs = shape.attrs || null;
var attrsIsDynamic = attrs && attrs.arg !== void 0 ? true : false;
var hooks = shape.hooks || null;
var hooksIsDynamic = hooks && hooks.arg !== void 0 ? true : false;
var events = shape.events || null;
var eventsIsDynamic = events && events.arg !== void 0 ? true : false;
var key = shape.key !== void 0 ? shape.key : null;
var keyIsDynamic = !isNullOrUndefined(key) && !isNullOrUndefined(key.arg);
var style = shape.style || null;
var styleIsDynamic = style && style.arg !== void 0 ? true : false;
var className = shape.className !== void 0 ? shape.className : null;
var classNameIsDynamic = className && className.arg !== void 0 ? true : false;
var blueprint = {
lazy: shape.lazy || false,
dom: null,
pools: {
keyed: {},
nonKeyed: []
},
tag: !tagIsDynamic ? tag : null,
className: className !== '' && className ? className : null,
style: style !== '' && style ? style : null,
isComponent: tagIsDynamic,
hasAttrs: attrsIsDynamic || (attrs ? true : false),
hasHooks: hooksIsDynamic,
hasEvents: eventsIsDynamic,
hasStyle: styleIsDynamic || (style !== '' && style ? true : false),
hasClassName: classNameIsDynamic || (className !== '' && className ? true : false),
childrenType: childrenType === void 0 ? children ? 5 : 0 : childrenType,
attrKeys: null,
eventKeys: null,
isSVG: shape.isSVG || false
};
return function () {
var vNode = new VNode(blueprint);
if (tagIsDynamic === true) {
vNode.tag = arguments[tag.arg];
}
if (childrenIsDynamic === true) {
vNode.children = arguments[children.arg];
}
if (attrsIsDynamic === true) {
vNode.attrs = arguments[attrs.arg];
} else {
vNode.attrs = attrs;
}
if (hooksIsDynamic === true) {
vNode.hooks = arguments[hooks.arg];
}
if (eventsIsDynamic === true) {
vNode.events = arguments[events.arg];
}
if (keyIsDynamic === true) {
vNode.key = arguments[key.arg];
}
if (styleIsDynamic === true) {
vNode.style = arguments[style.arg];
} else {
vNode.style = blueprint.style;
}
if (classNameIsDynamic === true) {
vNode.className = arguments[className.arg];
} else {
vNode.className = blueprint.className;
}
return vNode;
};
}
// Runs only once in applications lifetime
var isBrowser = typeof window !== 'undefined' && window.document;
// Copy of the util from dom/util, otherwise it makes massive bundles
function documentCreateElement(tag, isSVG) {
var dom = void 0;
if (isSVG === true) {
dom = document.createElementNS('http://www.w3.org/2000/svg', tag);
} else {
dom = document.createElement(tag);
}
return dom;
}
function createUniversalElement(tag, attrs, isSVG) {
if (isBrowser) {
var dom = documentCreateElement(tag, isSVG);
if (attrs) {
createStaticAttributes(attrs, dom);
}
return dom;
}
return null;
}
function createStaticAttributes(attrs, dom) {
var attrKeys = Object.keys(attrs);
for (var i = 0; i < attrKeys.length; i++) {
var attr = attrKeys[i];
var value = attrs[attr];
if (attr === 'className') {
dom.className = value;
} else {
if (value === true) {
dom.setAttribute(attr, attr);
} else if (!isNullOrUndefined(value) && value !== false && !isAttrAnEvent(attr)) {
dom.setAttribute(attr, value);
}
}
}
}
var index = {
createBlueprint: createBlueprint,
createVNode: createVNode,
universal: {
createElement: createUniversalElement
}
};
return index;
}));<|fim▁end|> | } : function (obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol ? "symbol" : typeof obj;
};
|
<|file_name|>Practica1Conver.py<|end_file_name|><|fim▁begin|>def main():
print "hola"
print "Como te llmas?"
nombre = raw_input()
print "Buenos dias", nombre
print "Que edad tienes?"
edad = raw_input()<|fim▁hole|><|fim▁end|> | print "que bien te conservas para tener", edad
main() |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
extern crate gfx;
extern crate ipc_channel;<|fim▁hole|>extern crate metrics;
extern crate msg;
extern crate net_traits;
extern crate profile_traits;
extern crate script_traits;
extern crate servo_url;
extern crate webrender_api;
// This module contains traits in layout used generically
// in the rest of Servo.
// The traits are here instead of in layout so
// that these modules won't have to depend on layout.
use gfx::font_cache_thread::FontCacheThread;
use ipc_channel::ipc::{IpcReceiver, IpcSender};
use metrics::PaintTimeMetrics;
use msg::constellation_msg::PipelineId;
use msg::constellation_msg::TopLevelBrowsingContextId;
use net_traits::image_cache::ImageCache;
use profile_traits::{mem, time};
use script_traits::{ConstellationControlMsg, LayoutControlMsg};
use script_traits::LayoutMsg as ConstellationMsg;
use servo_url::ServoUrl;
use std::sync::Arc;
use std::sync::mpsc::{Receiver, Sender};
// A static method creating a layout thread
// Here to remove the compositor -> layout dependency
pub trait LayoutThreadFactory {
type Message;
fn create(id: PipelineId,
top_level_browsing_context_id: TopLevelBrowsingContextId,
url: ServoUrl,
is_iframe: bool,
chan: (Sender<Self::Message>, Receiver<Self::Message>),
pipeline_port: IpcReceiver<LayoutControlMsg>,
constellation_chan: IpcSender<ConstellationMsg>,
script_chan: IpcSender<ConstellationControlMsg>,
image_cache: Arc<ImageCache>,
font_cache_thread: FontCacheThread,
time_profiler_chan: time::ProfilerChan,
mem_profiler_chan: mem::ProfilerChan,
content_process_shutdown_chan: Option<IpcSender<()>>,
webrender_api_sender: webrender_api::RenderApiSender,
layout_threads: usize,
paint_time_metrics: PaintTimeMetrics);
}<|fim▁end|> | |
<|file_name|>filename.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# Copyright 2014 Simone Campagna
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = "Simone Campagna"
import re
<|fim▁hole|>from .py23 import BASE_STRING
class Filename(object):
def __init__(self, init):
if isinstance(init, Filename):
filename = init._filename
elif isinstance(init, BASE_STRING):
filename = self._from_string(init)
else:
raise ValueError("cannot make a {c} from {t} object {o!r}".format(
c=self.__class__.__name__,
t=type(init).__name__,
o=init))
self._filename = filename
@property
def filename(self):
return self._filename
def __str__(self):
return self._filename
def __repr__(self):
return "{0}(filename={1!r})".format(self.__class__.__name__, self._filename)
@classmethod
def _from_string(cls, value):
return value
class InputFilename(Filename):
pass
class OutputFilename(Filename):
pass
class Mode(object):
MODES = set()
DEFAULT_MODE = 'rb'
def __init__(self, mode=None):
if mode is None:
mode = self.DEFAULT_MODE
mode = mode.lower()
for m in self.MODES:
if set(mode) == set(m):
break
else:
raise ValueError("invalid {} {!r}: allowed modes are {}".format(
self.__class__.__name__,
mode,
', '.join(repr(m) for m in self.MODES)))
self.mode = mode
def __str__(self):
return self.mode
def __repr__(self):
return "{}({!r})".format(self.__class__.__name__, self.mode)
class InputMode(Mode):
MODES = {'r', 'rb'}
DEFAULT_MODE = 'rb'
class OutputMode(Mode):
MODES = {'w', 'wb', 'a', 'ab', 'w+b', 'r+b', 'a+b'}
DEFAULT_MODE = 'wb'
def is_append_mode(self):
return 'a' in self.mode<|fim▁end|> | |
<|file_name|>IframeMousetrap.js<|end_file_name|><|fim▁begin|>define(['mousetrap'], function(Mousetrap){
'use strict';
var utils = require('util'),
instances = []
;
function IframeMousetrap(a){
var self = new Mousetrap(a);
/*self._instanceId = instances.push(this);
self._originalHandleKey = this._handleKey;
<|fim▁hole|> return self;
}
IframeMousetrap.prototype.shutdown = function() {
this._handleKey = function(){}; //:(
};
IframeMousetrap.prototype.handleKey = function() {
return Mousetrap.handleKey.apply(Mousetrap, arguments);
};
return IframeMousetrap;
});<|fim▁end|> | self._handleKey = IframeMousetrap.prototype._handleKey;*/
self.handleKey = IframeMousetrap.prototype.handleKey;
self.shutdown = IframeMousetrap.prototype.shutdown;
|
<|file_name|>geometry.py<|end_file_name|><|fim▁begin|>import os
import json
import arcpy
import types
import general
from .._abstract import abstract
########################################################################
class SpatialReference(abstract.AbstractGeometry):
""" creates a spatial reference instance """
_wkid = None
#----------------------------------------------------------------------
def __init__(self, wkid):
"""Constructor"""
self._wkid = wkid
#----------------------------------------------------------------------
@property
def wkid(self):
""" get/set the wkid """
return self._wkid
@wkid.setter
def wkid(self, wkid):
""" get/set the wkid """
self._wkid = wkid
@property
def asDictionary(self):
"""returns the wkid id for use in json calls"""
return {"wkid": self._wkid}
#----------------------------------------------------------------------
@property
def value(self):
"""returns the wkid id for use in json calls"""
return {"wkid": self._wkid}
########################################################################
class Point(abstract.AbstractGeometry):
""" Point Geometry
Inputs:
coord - list of [X,Y] pair or arcpy.Point Object
wkid - well know id of spatial references
z - is the Z coordinate value
m - m value
"""
_x = None
_y = None
_z = None
_m = None
_wkid = None
_json = None
_geom = None
_dict = None
#----------------------------------------------------------------------
def __init__(self, coord, wkid, z=None, m=None):
"""Constructor"""
if isinstance(coord, list):
self._x = float(coord[0])
self._y = float(coord[1])
elif isinstance(coord, arcpy.Geometry):
self._x = coord.centroid.X
self._y = coord.centroid.Y
self._z = coord.centroid.Z
self._m = coord.centroid.M
self._geom = coord.centroid
self._wkid = wkid
if not z is None:
self._z = float(z)
if not m is None:
self._m = m
#----------------------------------------------------------------------
def __str__(self):
""" returns the object as a string """
return json.dumps(self.asDictionary,
default=general._date_handler)
#----------------------------------------------------------------------
@property
def spatialReference(self):
"""returns the geometry spatial reference"""
return {'wkid' : self._wkid}
#----------------------------------------------------------------------
@property
def type(self):
""" returns the geometry type """
return "esriGeometryPoint"
#----------------------------------------------------------------------
@property
def asJSON(self):
""" returns a geometry as JSON """
value = self._json
if value is None:
value = json.dumps(self.asDictionary,
default=general._date_handler)
self._json = value
return self._json
#----------------------------------------------------------------------
@property
def asArcPyObject(self):
""" returns the Point as an ESRI arcpy.Point object """
return arcpy.AsShape(self.asDictionary, True)
#----------------------------------------------------------------------
@property
def asDictionary(self):
""" returns the object as a python dictionary """
#
template = {"x" : self._x,
"y" : self._y,
"spatialReference" : {"wkid" : self._wkid}
}
if not self._z is None:
template['z'] = self._z
if not self._m is None:
template['z'] = self._m
return template
#----------------------------------------------------------------------
@property
def asList(self):
""" returns a Point value as a list of [x,y,<z>,<m>] """
base = [self._x, self._y]
if not self._z is None:
base.append(self._z)
elif not self._m is None:
base.append(self._m)
return base
#----------------------------------------------------------------------
@property
def X(self):
""" gets the X coordinate """
return self._x
#----------------------------------------------------------------------
@X.setter
def X(self, value):
"""sets the X coordinate"""
if isinstance(value, (int, float,
long, types.NoneType)):
self._x = value
#----------------------------------------------------------------------
@property
def Y(self):
""" gets the Y Coordinate """
return self._y
#----------------------------------------------------------------------
@Y.setter
def Y(self, value):
""" sets the Y coordinate """
if isinstance(value, (int, float,
long, types.NoneType)):
self._y = value
#----------------------------------------------------------------------
@property
def Z(self):
""" gets the Z Coordinate """
return self._z
#----------------------------------------------------------------------
@Z.setter
def Z(self, value):
""" sets the Z coordinate """
if isinstance(value, (int, float,
long, types.NoneType)):
self._z = value
#----------------------------------------------------------------------
@property
def wkid(self):
""" gets the wkid """
return self._wkid
#----------------------------------------------------------------------
@wkid.setter
def wkid(self, value):
""" sets the wkid """
if isinstance(value, (int,
long)):
self._wkid = value
########################################################################
class MultiPoint(abstract.AbstractGeometry):
""" Implements the ArcGIS JSON MultiPoint Geometry Object """
_geom = None
_json = None
_dict = None
_wkid = None
_points = None
_hasZ = False
_hasM = False
#----------------------------------------------------------------------
def __init__(self, points, wkid, hasZ=False, hasM=False):
"""Constructor"""
if isinstance(points, list):
self._points = points
elif isinstance(points, arcpy.Geometry):
self._points = self.__geomToPointList(points)
self._wkid = wkid
self._hasZ = hasZ
self._hasM = hasM
#----------------------------------------------------------------------
def __geomToPointList(self, geom):
""" converts a geometry object to a common.Geometry object """
if isinstance(geom, arcpy.Multipoint):
feature_geom = []
fPart = []
for part in geom:
fPart = []
for pnt in part:
fPart.append(Point(coord=[pnt.X, pnt.Y],
wkid=geom.spatialReference.factoryCode,
z=pnt.Z, m=pnt.M))
feature_geom.append(fPart)
return feature_geom
#----------------------------------------------------------------------
@property
def spatialReference(self):
"""returns the geometry spatial reference"""
return {'wkid' : self._wkid}
#----------------------------------------------------------------------
@property
def type(self):
""" returns the geometry type """
return "esriGeometryMultipoint"
#----------------------------------------------------------------------
@property
def asJSON(self):
""" returns a geometry as JSON """
value = self._json
if value is None:
value = json.dumps(self.asDictionary,
default=general._date_handler)
self._json = value
return self._json
#----------------------------------------------------------------------
@property
def asArcPyObject(self):
""" returns the Point as an ESRI arcpy.MultiPoint object """
return arcpy.AsShape(self.asDictionary, True)
#----------------------------------------------------------------------
@property
def asDictionary(self):
""" returns the object as a python dictionary """
#
value = self._dict
if value is None:
template = {
"hasM" : self._hasM,
"hasZ" : self._hasZ,
"points" : [],
"spatialReference" : {"wkid" : self._wkid}
}
for pt in self._points:
template['points'].append(pt.asList)
self._dict = template
return self._dict
########################################################################
class Polyline(abstract.AbstractGeometry):
""" Implements the ArcGIS REST API Polyline Object
Inputs:
paths - list - list of lists of Point objects
wkid - integer - well know spatial reference id
hasZ - boolean -
hasM - boolean -
"""
_paths = None
_wkid = None
_json = None
_dict = None
_geom = None
_hasZ = None
_hasM = None
#----------------------------------------------------------------------
def __init__(self, paths, wkid, hasZ=False, hasM=False):
"""Constructor"""
if isinstance(paths, list):
self._paths = paths
elif isinstance(paths, arcpy.Geometry):
self._paths = self.__geomToPointList(paths)
self._wkid = wkid
self._hasM = hasM
self._hasZ = hasZ
#----------------------------------------------------------------------
def __geomToPointList(self, geom):
""" converts a geometry object to a common.Geometry object """
if isinstance(geom, arcpy.Polyline):
feature_geom = []
fPart = []
for part in geom:
fPart = []
for pnt in part:
if geom.spatialReference is None:
wkid = self._wkid
else:
wkid = geom.spatialReference.factoryCode
fPart.append(Point(coord=[pnt.X, pnt.Y],
wkid=wkid,
z=pnt.Z, m=pnt.M))
feature_geom.append(fPart)
return feature_geom
#----------------------------------------------------------------------
@property
def spatialReference(self):
"""returns the geometry spatial reference"""
return {'wkid' : self._wkid}
#----------------------------------------------------------------------
@property
def type(self):
""" returns the geometry type """
return "esriGeometryPolyline"
#----------------------------------------------------------------------
@property
def asJSON(self):
""" returns a geometry as JSON """
value = self._json
if value is None:
value = json.dumps(self.asDictionary,
default=general._date_handler)
self._json = value
return self._json
#----------------------------------------------------------------------
@property
def asArcPyObject(self):
""" returns the Polyline as an ESRI arcpy.Polyline object """
return arcpy.AsShape(self.asDictionary, True)
#----------------------------------------------------------------------
@property
def asDictionary(self):
""" returns the object as a python dictionary """
value = self._dict
if value is None:
template = {
"hasM" : self._hasM,
"hasZ" : self._hasZ,
"paths" : [],
"spatialReference" : {"wkid" : self._wkid}
}
for part in self._paths:
lpart = []
for pt in part:
lpart.append(pt.asList)
template['paths'].append(lpart)
del lpart
self._dict = template
return self._dict
########################################################################
class Polygon(abstract.AbstractGeometry):
""" Implements the ArcGIS REST JSON for Polygon Object """
_rings = None
_wkid = None
_json = None
_dict = None
_geom = None
_hasZ = None
_hasM = None
#----------------------------------------------------------------------
def __init__(self, rings, wkid, hasZ=False, hasM=False):
"""Constructor"""
if isinstance(rings, list):
self._rings = rings
elif isinstance(rings, arcpy.Geometry):
self._rings = self.__geomToPointList(rings)
## self._json = rings.JSON
## self._dict = _unicode_convert(json.loads(self._json))
self._wkid = wkid
self._hasM = hasM
self._hasZ = hasZ
#----------------------------------------------------------------------
def __geomToPointList(self, geom):
""" converts a geometry object to a common.Geometry object """
sr = geom.spatialReference
if sr is None:
wkid = self._wkid
else:
wkid = sr.factoryCode
g = json.loads(geom.JSON)
top = []
for gring in g['rings']:
ring = []
for g in gring:
ring.append(Point(coord=g, wkid=wkid, z=None, m=None))
top.append(ring)
return top
#if isinstance(geom, arcpy.Polygon):
#feature_geom = []
#fPart = []
#for part in geom:
#fPart = []
#for pnt in part:
#if geom.spatialReference is None:
#wkid = self._wkid
#else:
#wkid = geom.spatialReference.factoryCode
#fPart.append(Point(coord=[pnt.X, pnt.Y],
#wkid=wkid,
#z=pnt.Z, m=pnt.M))
#feature_geom.append(fPart)
#return feature_geom
#----------------------------------------------------------------------
@property
def spatialReference(self):
"""returns the geometry spatial reference"""
return {'wkid' : self._wkid}
#----------------------------------------------------------------------
@property
def type(self):
""" returns the geometry type """
return "esriGeometryPolygon"
#----------------------------------------------------------------------
@property
def asJSON(self):
""" returns a geometry as JSON """
value = self._json
if value is None:
value = json.dumps(self.asDictionary,
default=general._date_handler)
self._json = value
return self._json
#----------------------------------------------------------------------
@property
def asArcPyObject(self):
""" returns the Polyline as an ESRI arcpy.Polyline object """
return arcpy.AsShape(self.asDictionary, True)
#----------------------------------------------------------------------
@property
def asDictionary(self):
""" returns the object as a python dictionary """
value = self._dict
if value is None:
template = {
"hasM" : self._hasM,
"hasZ" : self._hasZ,
"rings" : [],
"spatialReference" : {"wkid" : self._wkid}
}
for part in self._rings:
lpart = []
for pt in part:
if isinstance(pt, list):
lpart.append(pt)
elif isinstance(pt, Point):
lpart.append(pt.asList)
template['rings'].append(lpart)
del lpart
self._dict = template
return self._dict
########################################################################
class Envelope(abstract.AbstractGeometry):<|fim▁hole|> """
An envelope is a rectangle defined by a range of values for each
coordinate and attribute. It also has a spatialReference field.
The fields for the z and m ranges are optional.
"""
_json = None
_dict = None
_geom = None
_xmin = None
_ymin = None
_zmin = None
_mmin = None
_xmax = None
_ymax = None
_zmax = None
_mmax = None
_wkid = None
#----------------------------------------------------------------------
def __init__(self, xmin, ymin, xmax, ymax, wkid,
zmin=None, zmax=None, mmin=None, mmax=None):
"""Constructor"""
self._xmin = xmin
self._ymin = ymin
self._zmin = zmin
self._mmin = mmin
self._xmax = xmax
self._ymax = ymax
self._zmax = zmax
self._mmax = mmax
self._wkid = wkid
#----------------------------------------------------------------------
@property
def spatialReference(self):
"""returns the geometry spatial reference"""
return {'wkid' : self._wkid}
#----------------------------------------------------------------------
@property
def type(self):
""" returns the geometry type """
return "esriGeometryEnvelope"
#----------------------------------------------------------------------
@property
def asDictionary(self):
""" returns the envelope as a dictionary """
template = {
"xmin" : self._xmin,
"ymin" : self._ymin,
"xmax" : self._xmax,
"ymax" : self._ymax,
"spatialReference" : {"wkid" : self._wkid}
}
if self._zmax is not None and \
self._zmin is not None:
template['zmin'] = self._zmin
template['zmax'] = self._zmax
if self._mmin is not None and \
self._mmax is not None:
template['mmax'] = self._mmax
template['mmin'] = self._mmin
return template
#----------------------------------------------------------------------
@property
def value(self):
""" returns the envelope as a dictionary """
template = {
"xmin" : self._xmin,
"ymin" : self._ymin,
"xmax" : self._xmax,
"ymax" : self._ymax,
"spatialReference" : {"wkid" : self._wkid}
}
if self._zmax is not None and \
self._zmin is not None:
template['zmin'] = self._zmin
template['zmax'] = self._zmax
if self._mmin is not None and \
self._mmax is not None:
template['mmax'] = self._mmax
template['mmin'] = self._mmin
return template
#----------------------------------------------------------------------
def __str__(self):
"""returns object as string"""
return self.asJSON
#----------------------------------------------------------------------
@property
def asJSON(self):
""" returns a geometry as JSON """
value = self._json
if value is None:
value = json.dumps(self.asDictionary,
default=general._date_handler)
self._json = value
return self._json
#----------------------------------------------------------------------
@property
def asArcPyObject(self):
""" returns the Envelope as an ESRI arcpy.Polygon object """
env = self.asDictionary
ring = [[
Point(env['xmin'], env['ymin'], self._wkid),
Point(env['xmax'], env['ymin'], self._wkid),
Point(env['xmax'], env['ymax'], self._wkid),
Point(env['xmin'], env['ymax'], self._wkid)
]]
return Polygon(ring, self._wkid).asArcPyObject<|fim▁end|> | |
<|file_name|>extrameta.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
import hashlib
from scrapy_proj.helpers import *
class SanatatePipelineExtraMeta(object):<|fim▁hole|> if act_type == None:
raise scrapy.exceptions.DropItem
item['type'] = act_type
engrol = RomanianHelper.englishize_romanian(item['title']).lower()
engrolna = TextHelper.remove_non_ascii(engrol)
identifier_text = '{0} {1}'.format(engrolna, item['date'] if 'date' in item else 'NA')
identifier_text_hashed = hashlib.md5(identifier_text.encode()).hexdigest()
item['identifier'] = '{0}-{1}-{2}'.format(item['institution'], item['type'], identifier_text_hashed)
return item<|fim▁end|> | def process_item(self, item, spider):
item['institution'] = spider.name
act_type = LegalHelper.get_type_from_title(item['title']) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate syntex;
extern crate syntex_syntax;
use syntex::Registry;
use syntex_syntax::ast;
use syntex_syntax::codemap::Span;
use syntex_syntax::ext::base::{ExtCtxt, MacEager, MacResult};<|fim▁hole|>use syntex_syntax::parse::token::InternedString;
pub fn expand_hello_world<'cx>(
cx: &'cx mut ExtCtxt,
sp: Span,
_: &[ast::TokenTree]
) -> Box<MacResult + 'cx> {
let expr = cx.expr_str(sp, InternedString::new("hello world"));
MacEager::expr(expr)
}
pub fn register(registry: &mut Registry) {
registry.add_macro("hello_world", expand_hello_world);
}<|fim▁end|> | use syntex_syntax::ext::build::AstBuilder; |
<|file_name|>cmd_list.py<|end_file_name|><|fim▁begin|>import click
from do_cli.contexts import CTX
from do_cli.commands.common import host_commands
@click.command('list')
@click.option('-f', '--force-refresh', is_flag=True, help='Pull data from the API')
@click.option('-h', '--host-names', help='Comma separated list of host names')
@CTX
def cli(ctx, force_refresh, host_names):
"""
Show minimal data for droplets<|fim▁hole|> Show minimal data for specific droplets
"""
if ctx.verbose:
click.echo("Show minimal data for droplets")
click.echo(host_commands(ctx, force_refresh, host_names))
if ctx.verbose:
click.echo('---- cmd_list done ----')<|fim▁end|> |
--host-names -h Comma separated list of host names |
<|file_name|>blottner_viscosity_unit.C<|end_file_name|><|fim▁begin|>//-----------------------------------------------------------------------bl-
//--------------------------------------------------------------------------
//
// Antioch - A Gas Dynamics Thermochemistry Library
//
// Copyright (C) 2013 The PECOS Development Team
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the Version 2.1 GNU Lesser General
// Public License as published by the Free Software Foundation.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc. 51 Franklin Street, Fifth Floor,
// Boston, MA 02110-1301 USA
//
//-----------------------------------------------------------------------el-
//
// $Id$
//
//--------------------------------------------------------------------------
//--------------------------------------------------------------------------
// C++
#include <iostream>
#include <cmath>
// Antioch
#include "antioch/blottner_viscosity.h"
template <typename Scalar>
int test_viscosity( const Scalar mu, const Scalar mu_exact, const Scalar tol )
{
using std::abs;
int return_flag = 0;
const double rel_error = abs( (mu - mu_exact)/mu_exact);
if( rel_error > tol )
{
std::cerr << "Error: Mismatch in viscosity" << std::endl
<< "mu(T) = " << mu << std::endl
<< "mu_exact = " << mu_exact << std::endl
<< "rel_error = " << rel_error << std::endl
<< "tol = " << tol << std::endl;
return_flag = 1;
}
return return_flag;
}
template <typename Scalar>
int tester()
{
const Scalar a = 3.14e-3;
const Scalar b = 2.71e-2;
const Scalar c = 42.0e-5;
Antioch::BlottnerViscosity<Scalar> mu(a,b,c);
std::cout << mu << std::endl;
const Scalar T = 1521.2;
// octave gives
const Scalar mu_exact = 0.144422234167703;
int return_flag = 0;
const Scalar tol = 1.0e-14;
return_flag = test_viscosity( mu(T), mu_exact, tol );
const Scalar a2 = 1e-3;
const Scalar b2 = 2e-2;
const Scalar c2 = 3e-5;
<|fim▁hole|> // octave gives
const Scalar mu_exact2 = 0.122172495548880;
return_flag = test_viscosity( mu(T), mu_exact2, tol );
return return_flag;
}
int main()
{
return (tester<double>() ||
tester<long double>() ||
tester<float>());
}<|fim▁end|> | mu.reset_coeffs( a2, b2, c2 );
|
<|file_name|>content_encoding.rs<|end_file_name|><|fim▁begin|>use brotli::enc::backward_references::{BrotliEncoderParams, BrotliEncoderMode};
use brotli::enc::BrotliCompress as brotli_compress;
use flate2::write::{DeflateEncoder, GzEncoder};
use flate2::Compression as Flate2Compression;
use iron::headers::{QualityItem, Encoding};
use bzip2::Compression as BzCompression;
use std::collections::BTreeSet;
use bzip2::write::BzEncoder;
use std::io::{self, Write};
use unicase::UniCase;
use std::path::Path;
use std::fs::File;
use blake3;
lazy_static! {
/// The list of content encodings we handle.
pub static ref SUPPORTED_ENCODINGS: Vec<Encoding> = {
let es = vec![Encoding::Gzip, Encoding::Deflate, Encoding::EncodingExt("br".to_string()), Encoding::EncodingExt("bzip2".to_string())];
[es.clone(), es.into_iter().map(|e| Encoding::EncodingExt(format!("x-{}", e))).collect()].iter().flat_map(|e| e.clone()).collect()
};
/// The list of extensions not to encode.
pub static ref BLACKLISTED_ENCODING_EXTENSIONS: BTreeSet<UniCase<&'static str>> = {
let raw = include_str!("../../assets/encoding_blacklist");
raw.split('\n').map(str::trim).filter(|s| !s.is_empty() && !s.starts_with('#')).map(UniCase::new).collect()
};
pub static ref BROTLI_PARAMS: BrotliEncoderParams = BrotliEncoderParams {
mode: BrotliEncoderMode::BROTLI_MODE_TEXT,
..Default::default()
};
}
/// The minimal size at which to encode filesystem files.
pub const MIN_ENCODING_SIZE: u64 = 1024;
/// The maximal size at which to encode filesystem files.
pub const MAX_ENCODING_SIZE: u64 = 100 * 1024 * 1024;
/// The minimal size gain at which to preserve encoded filesystem files.
pub const MIN_ENCODING_GAIN: f64 = 1.1;
/// Find best supported encoding to use, or `None` for identity.
pub fn response_encoding(requested: &mut [QualityItem<Encoding>]) -> Option<Encoding> {
requested.sort_by_key(|e| e.quality);
requested.iter().filter(|e| e.quality.0 != 0).find(|e| SUPPORTED_ENCODINGS.contains(&e.item)).map(|e| e.item.clone())
}
/// Encode a string slice using a specified encoding or `None` if encoding failed or is not recognised.
pub fn encode_str(dt: &str, enc: &Encoding) -> Option<Vec<u8>> {
type EncodeT = fn(&str) -> Option<Vec<u8>>;
const STR_ENCODING_FNS: &[EncodeT] = &[encode_str_gzip, encode_str_deflate, encode_str_brotli, encode_str_bzip2];
encoding_idx(enc).and_then(|fi| STR_ENCODING_FNS[fi](dt))
}
/// Encode the file denoted by the specified path into the file denoted by the specified path using a specified encoding or
/// `false` if encoding failed, is not recognised or an I/O error occurred.
pub fn encode_file(p: &Path, op: &Path, enc: &Encoding) -> bool {
type EncodeT = fn(File, File) -> bool;
const FILE_ENCODING_FNS: &[EncodeT] = &[encode_file_gzip, encode_file_deflate, encode_file_brotli, encode_file_bzip2];
encoding_idx(enc)
.map(|fi| {
let inf = File::open(p);
let outf = File::create(op);
inf.is_ok() && outf.is_ok() && FILE_ENCODING_FNS[fi](inf.unwrap(), outf.unwrap())
})
.unwrap()
}
/// Encoding extension to use for encoded files, for example "gz" for gzip, or `None` if the encoding is not recognised.
pub fn encoding_extension(enc: &Encoding) -> Option<&'static str> {
const ENCODING_EXTS: &[&str] = &["gz", "dflt", "br", "bz2"];
encoding_idx(enc).map(|ei| ENCODING_EXTS[ei])
}
/// Return the 256-bit BLAKE3 hash of the file denoted by the specified path.
pub fn file_hash(p: &Path) -> blake3::Hash {
let mut ctx = blake3::Hasher::new();
io::copy(&mut File::open(p).unwrap(), &mut ctx).unwrap();
ctx.finalize()
}
fn encoding_idx(enc: &Encoding) -> Option<usize> {
match *enc {
Encoding::Gzip => Some(0),
Encoding::Deflate => Some(1),
Encoding::EncodingExt(ref e) => {
match &e[..] {
"x-gzip" => Some(0),
"x-deflate" => Some(1),
"br" | "x-br" => Some(2),
"bzip2" | "x-bzip2" => Some(3),
_ => None,
}
}
_ => None,
}
}
macro_rules! encode_fn {
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr, $constructor:expr) => {<|fim▁hole|> }
fn $file_fn_name(mut inf: File, outf: File) -> bool {
let mut cmp = $constructor(outf);
io::copy(&mut inf, &mut cmp).and_then(|_| cmp.finish()).is_ok()
}
};
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr) => {
encode_fn!($str_fn_name, $file_fn_name, $enc_tp, $comp_lvl, |into| $enc_tp::new(into, $comp_lvl));
}
}
encode_fn!(encode_str_gzip, encode_file_gzip, GzEncoder, Flate2Compression::default());
encode_fn!(encode_str_deflate, encode_file_deflate, DeflateEncoder, Flate2Compression::default());
encode_fn!(encode_str_bzip2, encode_file_bzip2, BzEncoder, BzCompression::Default);
fn encode_str_brotli(dt: &str) -> Option<Vec<u8>> {
let mut ret = Vec::new();
brotli_compress(&mut dt.as_bytes(), &mut ret, &BROTLI_PARAMS).ok().map(|_| ret)
}
fn encode_file_brotli(mut inf: File, mut outf: File) -> bool {
brotli_compress(&mut inf, &mut outf, &BROTLI_PARAMS).is_ok()
}<|fim▁end|> | fn $str_fn_name(dt: &str) -> Option<Vec<u8>> {
let mut cmp = $constructor(Vec::new());
cmp.write_all(dt.as_bytes()).ok().and_then(|_| cmp.finish().ok()) |
<|file_name|>test_new_instance.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.models import User
from django.test import TestCase, Client
from django.urls import reverse
# Declaration of Username and Password
username = 'admin'
password = 'Test1234$'
"""
Method to replicate
~~~~~~~~~~~~~~~~~~~
1. Bring up a new instance of NearBeach (grab from fixtures)<|fim▁hole|>~~~~~~~~~~~~~~~~
User will log in with no issues, system will create all of the user's permission sets and groups
"""
def login_user(c: object, self: object) -> object:
response = c.post(
reverse('login'),
self.credentials,
follow=True,
)
self.assertTrue(response.context['user'].is_active)
class NewInstanceLoginTest(TestCase):
fixtures = ['NearBeach_no_setup.json']
def setUp(self):
self.credentials = {
'username': username,
'password': password
}
def test_admin_login(self):
c = Client()
# User will be logged in
login_user(c, self)
# Make sure the admin user can open up the project
response = c.get(reverse('dashboard'))
self.assertEqual(response.status_code, 200)<|fim▁end|> | 2. Try and log in as the admin user
Expected Results |
<|file_name|>year.rs<|end_file_name|><|fim▁begin|>//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use std::str::FromStr;
use filters::filter::Filter;
use chrono::NaiveDateTime;
use libimagerror::trace::trace_error;
use libimagerror::trace::MapErrTrace;
use libimagerror::iter::TraceIterator;
use libimagstore::store::FileLockEntry;
use libimagtimetrack::timetrackingstore::TimeTrackStore;
use libimagtimetrack::timetracking::TimeTracking;
use libimagtimetrack::tag::TimeTrackingTag;
use libimagtimetrack::iter::filter::*;
use libimagrt::runtime::Runtime;
pub fn year(rt: &Runtime) -> i32 {
let cmd = rt.cli().subcommand().1.unwrap(); // checked in main
let filter = {
use chrono::offset::Local;
use chrono::naive::NaiveDate;
use chrono::Datelike;
let now = Local::now();
let start = match cmd.value_of("start").map(::chrono::naive::NaiveDateTime::from_str) {
None => NaiveDate::from_ymd(now.year(), 1, 1).and_hms(0, 0, 0),
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let end = match cmd.value_of("end").map(::chrono::naive::NaiveDateTime::from_str) {
None => {
NaiveDate::from_ymd(now.year() + 1, 1, 1).and_hms(0, 0, 0)
},
Some(Ok(dt)) => dt,
Some(Err(e)) => {
trace_error(&e);
return 1
}
};
let tags = cmd
.values_of("tags")
.map(|ts| ts.into_iter().map(String::from).map(TimeTrackingTag::from).collect());
let start_time_filter = has_start_time_where(move |dt: &NaiveDateTime| {
start <= *dt
});
let end_time_filter = has_end_time_where(move |dt: &NaiveDateTime| {<|fim▁hole|> match tags {
Some(ref tags) => has_one_of_tags(&tags).filter(fle),
None => true,
}
};
tags_filter.and(start_time_filter).and(end_time_filter)
};
rt.store()
.get_timetrackings()
.and_then(|iter| {
iter.trace_unwrap()
.filter(|e| filter.filter(e))
.fold(Ok(()), |acc, e| {
acc.and_then(|_| {
debug!("Processing {:?}", e.get_location());
let tag = e.get_timetrack_tag()?;
debug!(" -> tag = {:?}", tag);
let start = e.get_start_datetime()?;
debug!(" -> start = {:?}", start);
let end = e.get_end_datetime()?;
debug!(" -> end = {:?}", end);
match (start, end) {
(None, _) => println!("{} has no start time.", tag),
(Some(s), None) => println!("{} | {} - ...", tag, s),
(Some(s), Some(e)) => println!("{} | {} - {}", tag, s, e),
}
Ok(())
})
})
})
.map(|_| 0)
.map_err_trace()
.unwrap_or(1)
}<|fim▁end|> | end >= *dt
});
let tags_filter = move |fle: &FileLockEntry| { |
<|file_name|>solr_query_integration_tests.rs<|end_file_name|><|fim▁begin|>extern crate url;
extern crate heliotrope;
use url::Url;
use heliotrope::{SolrClient, SolrQuery};
<|fim▁hole|>
#[test]
fn simple_query_test() {
let url = Url::parse("http://localhost:8983/solr/test/").unwrap();
let client = SolrClient::new(&url);
let query = SolrQuery::new("*:*");
match client.query(&query) {
Ok(query_response) => println!("{:?}", query_response),
Err(e) => panic!(e.message)
}
}<|fim▁end|> | |
<|file_name|>TDNode.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010, 2011, 2012 mapsforge.org
*
* This program is free software: you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.mapsforge.map.writer.model;
import java.util.Arrays;
import org.mapsforge.core.model.Coordinates;
import org.mapsforge.map.writer.OSMTagMapping;
import org.mapsforge.map.writer.util.OSMUtils;
import org.openstreetmap.osmosis.core.domain.v0_6.Node;
/**
* @author bross
*/
public class TDNode {
// private static final Logger LOGGER = Logger.getLogger(TDNode.class.getName());
private static final byte ZOOM_HOUSENUMBER = (byte) 18;
// private static final byte ZOOM_NAME = (byte) 16;
private final long id;
private final int latitude;
private final int longitude;
private final short elevation; // NOPMD by bross on 25.12.11 12:55
private final String houseNumber;
private final byte layer;
private final String name;
private short[] tags; // NOPMD by bross on 25.12.11 12:55
/**
* Constructs a new TDNode from a given osmosis node entity. Checks the validity of the entity.
*
* @param node
* the osmosis entity
* @param preferredLanguage
* the preferred language or null if no preference
* @return a new TDNode
*/
public static TDNode fromNode(Node node, String preferredLanguage) {
SpecialTagExtractionResult ster = OSMUtils.extractSpecialFields(node, preferredLanguage);
short[] knownWayTags = OSMUtils.extractKnownPOITags(node); // NOPMD by bross on 25.12.11 12:55
java.util.Arrays.sort(knownWayTags);
return new TDNode(node.getId(), Coordinates.degreesToMicrodegrees(node.getLatitude()),
Coordinates.degreesToMicrodegrees(node.getLongitude()), ster.getElevation(), ster.getLayer(),
ster.getHousenumber(), ster.getName(), knownWayTags);
}
/**
* @param id
* the OSM id
* @param latitude
* the latitude
* @param longitude
* the longitude
* @param elevation
* the elevation if existent
* @param layer
* the layer if existent
* @param houseNumber
* the house number if existent
* @param name
* the name if existent
*/
public TDNode(long id, int latitude, int longitude, short elevation, byte layer, String houseNumber, // NOPMD
// by
// bross
// on
// 25.12.11
// 12:55
String name) {
this.id = id;
this.latitude = latitude;
this.longitude = longitude;
this.elevation = elevation;
this.houseNumber = houseNumber;
this.layer = layer;
this.name = name;
}
/**
* @param id
* the OSM id
* @param latitude
* the latitude
* @param longitude
* the longitude
* @param elevation
* the elevation if existent
* @param layer
* the layer if existent
* @param houseNumber
* the house number if existent
* @param name
* the name if existent
* @param tags
* the
*/
public TDNode(long id, int latitude, int longitude, short elevation, byte layer, String houseNumber, // NOPMD
// by
// bross
// on
// 25.12.11
// 12:55
String name, short[] tags) { // NOPMD by bross on 25.12.11 12:58
this.id = id;
this.latitude = latitude;
this.longitude = longitude;
this.elevation = elevation;
this.houseNumber = houseNumber;
this.layer = layer;
this.name = name;
this.tags = tags;
}
/**
* @return true if the node represents a POI
*/
public boolean isPOI() {
return this.houseNumber != null || this.elevation != 0 || this.tags.length > 0;
}
/**
* @return the zoom level on which the node appears first
*/
public byte getZoomAppear() {
if (this.tags == null || this.tags.length == 0) {
if (this.houseNumber != null) {
return ZOOM_HOUSENUMBER;
}
return Byte.MAX_VALUE;
}
return OSMTagMapping.getInstance().getZoomAppearPOI(this.tags);
}
/**
* @return the id
*/
public long getId() {
return this.id;
}
/**
* @return the tags
*/
public short[] getTags() { // NOPMD by bross on 25.12.11 12:58
return this.tags; // NOPMD by bross on 25.12.11 12:56
}
/**
* @param tags
* the tags to set
*/
public void setTags(short[] tags) { // NOPMD by bross on 25.12.11 12:58
this.tags = tags;
}
/**
* @return the latitude
*/
public int getLatitude() {
return this.latitude;
}
/**
* @return the longitude
*/
public int getLongitude() {
return this.longitude;
}
/**
* @return the elevation
*/
public short getElevation() { // NOPMD by bross on 25.12.11 12:58
return this.elevation;
}
<|fim▁hole|> /**
* @return the houseNumber
*/
public String getHouseNumber() {
return this.houseNumber;
}
/**
* @return the layer
*/
public byte getLayer() {
return this.layer;
}
/**
* @return the name
*/
public String getName() {
return this.name;
}
@Override
public int hashCode() {
final int prime = 31; // NOPMD by bross on 25.12.11 12:56
int result = 1;
result = prime * result + (int) (this.id ^ (this.id >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TDNode other = (TDNode) obj;
if (this.id != other.id) {
return false;
}
return true;
}
@Override
public final String toString() {
return "TDNode [id=" + this.id + ", latitude=" + this.latitude + ", longitude=" + this.longitude + ", name="
+ this.name + ", tags=" + Arrays.toString(this.tags) + "]";
}
}<|fim▁end|> | |
<|file_name|>header.dist.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go from "header.proto"
// DO NOT EDIT!
package protorpc
import proto "goprotobuf.googlecode.com/hg/proto"
import "os"
// Reference proto & os imports to suppress error if it's not otherwise used.
var _ = proto.GetString
var _ os.Error
type Header struct {
Seq *uint64 "PB(varint,1,req,name=seq)"
ServiceMethod *string "PB(bytes,2,req,name=service_method)"
Error *string "PB(bytes,3,opt,name=error)"
XXX_unrecognized []byte
}
func (this *Header) Reset() {
*this = Header{}
}
func init() {<|fim▁hole|><|fim▁end|> | } |
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>$(function () {
// Prepare demo data
var data = [
{
"hc-key": "dm-lu",
"value": 0
},
{
"hc-key": "dm-ma",
"value": 1
},
{
"hc-key": "dm-pk",
"value": 2
},
{
"hc-key": "dm-da",
"value": 3<|fim▁hole|> {
"hc-key": "dm-pl",
"value": 4
},
{
"hc-key": "dm-pr",
"value": 5
},
{
"hc-key": "dm-an",
"value": 6
},
{
"hc-key": "dm-go",
"value": 7
},
{
"hc-key": "dm-jn",
"value": 8
},
{
"hc-key": "dm-jh",
"value": 9
}
];
// Initiate the chart
$('#container').highcharts('Map', {
title : {
text : 'Highmaps basic demo'
},
subtitle : {
text : 'Source map: <a href="http://code.highcharts.com/mapdata/countries/dm/dm-all.js">Dominica</a>'
},
mapNavigation: {
enabled: true,
buttonOptions: {
verticalAlign: 'bottom'
}
},
colorAxis: {
min: 0
},
series : [{
data : data,
mapData: Highcharts.maps['countries/dm/dm-all'],
joinBy: 'hc-key',
name: 'Random data',
states: {
hover: {
color: '#BADA55'
}
},
dataLabels: {
enabled: true,
format: '{point.name}'
}
}]
});
});<|fim▁end|> | }, |
<|file_name|>blob_storage.rs<|end_file_name|><|fim▁begin|>extern crate futures;
use indy::IndyError;<|fim▁hole|>
pub fn open_reader(type_: &str, config_json: &str) -> Result<i32, IndyError> {
blob_storage::open_reader(type_, config_json).wait()
}
pub fn open_writer(type_: &str, config_json: &str) -> Result<i32, IndyError> {
blob_storage::open_writer(type_, config_json).wait()
}<|fim▁end|> | use indy::blob_storage;
use self::futures::Future; |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp');
var sourcemaps = require('gulp-sourcemaps');
var source = require('vinyl-source-stream');
var buffer = require('vinyl-buffer');
var browserify = require('browserify');
var watchify = require('watchify');
var babel = require('babelify');
var jade = require('gulp-jade');
var connect = require('gulp-connect');
var uglify = require('gulp-uglify');
var envify = require('envify/custom');
var file = require('gulp-file');
var buildDir = 'build';
var devBuildDir = 'dev_build';
function handleError(err) {
console.error(err); // eslint-disable-line no-console
this.emit('end');
}
function templates(outDir) {
return function() {
return gulp.src('public/*.jade')
.pipe(jade())
.pipe(gulp.dest(outDir));
};
}
function styles(outDir) {
return function() {
return gulp.src('public/*.css')
.pipe(gulp.dest(outDir))
.pipe(connect.reload());
};
}
function vendor(outDir) {
return function() {
return gulp.src('public/vendor/**')
.pipe(gulp.dest(outDir + '/vendor'));
};
}
function icons(outDir) {
return function() {
return gulp.src('public/icons/**')
.pipe(gulp.dest(outDir + '/icons'));
};
}
gulp.task('templates', templates(devBuildDir));
gulp.task('styles', styles(devBuildDir));
gulp.task('vendor', vendor(devBuildDir));
gulp.task('icons', icons(devBuildDir));
function compile(opts) {
var bundler = watchify(
browserify('./public/main.js', { debug: true })
.transform(babel)
.transform(envify({
NODE_ENV: 'development'
}), {global: true})
);
function rebundle() {
return bundler.bundle()
.on('error', handleError)
.pipe(source('main.js'))
.pipe(buffer())
.pipe(sourcemaps.init({ loadMaps: true }))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(devBuildDir))
.pipe(connect.reload());
}
if (opts.watch) {
bundler.on('update', function() {
console.log('-> bundling...'); // eslint-disable-line no-console
return rebundle();
});
}
return rebundle();
}
gulp.task('connect', function() {
return connect.server({
root: devBuildDir,
livereload: true
});
});
gulp.task('watch', function() {
gulp.watch('public/*.css', ['styles']);
return compile({watch: true});
});
gulp.task('build', function() {
templates(buildDir)();
styles(buildDir)();
vendor(buildDir)();
icons(buildDir)();
file('CNAME', 'circuits.im', { src: true })
.pipe(gulp.dest(buildDir));
return browserify('./public/main.js')
.transform(envify({
NODE_ENV: 'production'
}), {global: true})
.transform(babel.configure({
optional: [
'optimisation.react.constantElements',
'optimisation.react.inlineElements'
]
}))
.bundle().on('error', handleError)
.pipe(source('main.js'))
.pipe(buffer())
.pipe(uglify())
.pipe(gulp.dest(buildDir));<|fim▁hole|><|fim▁end|> | });
gulp.task('default', ['templates', 'vendor', 'styles', 'icons', 'connect', 'watch']); |
<|file_name|>InputChannelTemplate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import print_function
# Form implementation generated from reading ui file 'InputChannelTemplate.ui'
#
# Created: Sun Feb 22 13:29:16 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(427, 220)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.groupBox = GroupBox(Form)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.groupBox.setFont(font)
self.groupBox.setCheckable(False)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout = QtGui.QGridLayout(self.groupBox)
self.gridLayout.setSpacing(0)
self.gridLayout.setContentsMargins(5, 0, 0, 0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.recordCheck = QtGui.QCheckBox(self.groupBox)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.recordCheck.setFont(font)
self.recordCheck.setChecked(True)
self.recordCheck.setObjectName(_fromUtf8("recordCheck"))
self.gridLayout.addWidget(self.recordCheck, 0, 0, 1, 1)
self.displayCheck = QtGui.QCheckBox(self.groupBox)
font = QtGui.QFont()<|fim▁hole|> self.displayCheck.setFont(font)
self.displayCheck.setChecked(True)
self.displayCheck.setObjectName(_fromUtf8("displayCheck"))
self.gridLayout.addWidget(self.displayCheck, 0, 1, 1, 1)
self.recordInitCheck = QtGui.QCheckBox(self.groupBox)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.recordInitCheck.setFont(font)
self.recordInitCheck.setObjectName(_fromUtf8("recordInitCheck"))
self.gridLayout.addWidget(self.recordInitCheck, 1, 0, 1, 2)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 2, 0, 1, 1)
self.verticalLayout.addWidget(self.groupBox)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.groupBox.setTitle(_translate("Form", "GroupBox", None))
self.recordCheck.setText(_translate("Form", "Record Trace", None))
self.displayCheck.setText(_translate("Form", "Display", None))
self.recordInitCheck.setText(_translate("Form", "Record Initial State", None))
from acq4.pyqtgraph import GroupBox<|fim▁end|> | font.setBold(False)
font.setWeight(50) |
<|file_name|>TurnTest.java<|end_file_name|><|fim▁begin|>package ru.istolbov;
import org.junit.Test;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* Test.
* @author istolbov
* @version $Id$
*/
public class TurnTest {
/**
* Test turn back array.
*/
@Test
public void whenWeTurnBackArray() {
final int[] targetArray = new int[] {5, 4, 3, 2, 1};
final int[] testArray = new int[] {1, 2, 3, 4, 5};
final Turn turn = new Turn();<|fim▁hole|> * Test sort.
*/
@Test
public void whenWeSortArray() {
final int[] targetArray = new int[] {1, 2, 3, 4, 5};
final int[] testArray = new int[] {5, 3, 4, 1, 2};
final Turn turn = new Turn();
final int[] resultArray = turn.sort(testArray);
assertThat(resultArray, is(targetArray));
}
/**
* Test rotate.
*/
@Test
public void whenWeRotateArray() {
final int[][] targetArray = new int[][] {{13, 9, 5, 1}, {14, 10, 6, 2}, {15, 11, 7, 3}, {16, 12, 8, 4}};
final int[][] testArray = new int[][] {{1, 2, 3, 4}, {5, 6, 7, 8}, {9, 10, 11, 12}, {13, 14, 15, 16}};
final Turn turn = new Turn();
final int[][] resultArray = turn.rotate(testArray);
assertThat(resultArray, is(targetArray));
}
/**
* Test duplicateDelete.
*/
@Test
public void whenWeDuplicateDeleteInArray() {
final String[] targetArray = new String[] {"Привет", "Мир", "Май"};
final String[] testArray = new String[] {"Привет", "Привет", "Мир", "Привет", "Май", "Май", "Мир"};
final Turn turn = new Turn();
final String[] resultArray = turn.duplicateDelete(testArray);
assertThat(resultArray, is(targetArray));
}
/**
* Test join.
*/
@Test
public void whenWeJoinArrays() {
final int[] firstTestArray = new int[] {1, 3, 5, 7, 9};
final int[] secondTestArray = new int[] {2, 4, 6, 8, 10};
final int[] targetArray = new int[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
final Turn turn = new Turn();
final int[] resultArray = turn.join(firstTestArray, secondTestArray);
assertThat(resultArray, is(targetArray));
}
}<|fim▁end|> | final int[] resultArray = turn.back(testArray);
assertThat(resultArray, is(targetArray));
}
/** |
<|file_name|>test_logging.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Test harness for the logging module. Run all tests.
Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved.
"""
import logging
import logging.handlers
import logging.config
import codecs
import datetime
import pickle
import io
import gc
import json
import os
import queue
import random
import re
import select
import socket
import struct
import sys
import tempfile
from test.support import (captured_stdout, run_with_locale, run_unittest,
patch, requires_zlib, TestHandler, Matcher)
import textwrap
import time
import unittest
import warnings
import weakref
try:
import threading
# The following imports are needed only for tests which
# require threading
import asynchat
import asyncore
import errno
from http.server import HTTPServer, BaseHTTPRequestHandler
import smtpd
from urllib.parse import urlparse, parse_qs
from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
ThreadingTCPServer, StreamRequestHandler)
except ImportError:
threading = None
try:
import win32evtlog
except ImportError:
win32evtlog = None
try:
import win32evtlogutil
except ImportError:
win32evtlogutil = None
win32evtlog = None
try:
import zlib
except ImportError:
pass
class BaseTest(unittest.TestCase):
"""Base class for logging tests."""
log_format = "%(name)s -> %(levelname)s: %(message)s"
expected_log_pat = r"^([\w.]+) -> ([\w]+): ([\d]+)$"
message_num = 0
def setUp(self):
"""Setup the default logging stream to an internal StringIO instance,
so that we can examine log output as we want."""
logger_dict = logging.getLogger().manager.loggerDict
logging._acquireLock()
try:
self.saved_handlers = logging._handlers.copy()
self.saved_handler_list = logging._handlerList[:]
self.saved_loggers = saved_loggers = logger_dict.copy()
self.saved_level_names = logging._levelNames.copy()
self.logger_states = logger_states = {}
for name in saved_loggers:
logger_states[name] = getattr(saved_loggers[name],
'disabled', None)
finally:
logging._releaseLock()
# Set two unused loggers
self.logger1 = logging.getLogger("\xab\xd7\xbb")
self.logger2 = logging.getLogger("\u013f\u00d6\u0047")
self.root_logger = logging.getLogger("")
self.original_logging_level = self.root_logger.getEffectiveLevel()
self.stream = io.StringIO()
self.root_logger.setLevel(logging.DEBUG)
self.root_hdlr = logging.StreamHandler(self.stream)
self.root_formatter = logging.Formatter(self.log_format)
self.root_hdlr.setFormatter(self.root_formatter)
if self.logger1.hasHandlers():
hlist = self.logger1.handlers + self.root_logger.handlers
raise AssertionError('Unexpected handlers: %s' % hlist)
if self.logger2.hasHandlers():
hlist = self.logger2.handlers + self.root_logger.handlers
raise AssertionError('Unexpected handlers: %s' % hlist)
self.root_logger.addHandler(self.root_hdlr)
self.assertTrue(self.logger1.hasHandlers())
self.assertTrue(self.logger2.hasHandlers())
def tearDown(self):
"""Remove our logging stream, and restore the original logging
level."""
self.stream.close()
self.root_logger.removeHandler(self.root_hdlr)
while self.root_logger.handlers:
h = self.root_logger.handlers[0]
self.root_logger.removeHandler(h)
h.close()
self.root_logger.setLevel(self.original_logging_level)
logging._acquireLock()
try:
logging._levelNames.clear()
logging._levelNames.update(self.saved_level_names)
logging._handlers.clear()
logging._handlers.update(self.saved_handlers)
logging._handlerList[:] = self.saved_handler_list
loggerDict = logging.getLogger().manager.loggerDict
loggerDict.clear()
loggerDict.update(self.saved_loggers)
logger_states = self.logger_states
for name in self.logger_states:
if logger_states[name] is not None:
self.saved_loggers[name].disabled = logger_states[name]
finally:
logging._releaseLock()
def assert_log_lines(self, expected_values, stream=None):
"""Match the collected log lines against the regular expression
self.expected_log_pat, and compare the extracted group values to
the expected_values list of tuples."""
stream = stream or self.stream
pat = re.compile(self.expected_log_pat)
try:
stream.reset()
actual_lines = stream.readlines()
except AttributeError:
# StringIO.StringIO lacks a reset() method.
actual_lines = stream.getvalue().splitlines()
self.assertEqual(len(actual_lines), len(expected_values))
for actual, expected in zip(actual_lines, expected_values):
match = pat.search(actual)
if not match:
self.fail("Log line does not match expected pattern:\n" +
actual)
self.assertEqual(tuple(match.groups()), expected)
s = stream.read()
if s:
self.fail("Remaining output at end of log stream:\n" + s)
def next_message(self):
"""Generate a message consisting solely of an auto-incrementing
integer."""
self.message_num += 1
return "%d" % self.message_num
class BuiltinLevelsTest(BaseTest):
"""Test builtin levels and their inheritance."""
def test_flat(self):
#Logging levels in a flat logger namespace.
m = self.next_message
ERR = logging.getLogger("ERR")
ERR.setLevel(logging.ERROR)
INF = logging.LoggerAdapter(logging.getLogger("INF"), {})
INF.setLevel(logging.INFO)
DEB = logging.getLogger("DEB")
DEB.setLevel(logging.DEBUG)
# These should log.
ERR.log(logging.CRITICAL, m())
ERR.error(m())
INF.log(logging.CRITICAL, m())
INF.error(m())
INF.warning(m())
INF.info(m())
DEB.log(logging.CRITICAL, m())
DEB.error(m())
DEB.warning(m())
DEB.info(m())
DEB.debug(m())
# These should not log.
ERR.warning(m())
ERR.info(m())
ERR.debug(m())
INF.debug(m())
self.assert_log_lines([
('ERR', 'CRITICAL', '1'),
('ERR', 'ERROR', '2'),
('INF', 'CRITICAL', '3'),
('INF', 'ERROR', '4'),
('INF', 'WARNING', '5'),
('INF', 'INFO', '6'),
('DEB', 'CRITICAL', '7'),
('DEB', 'ERROR', '8'),
('DEB', 'WARNING', '9'),
('DEB', 'INFO', '10'),
('DEB', 'DEBUG', '11'),
])
def test_nested_explicit(self):
# Logging levels in a nested namespace, all explicitly set.
m = self.next_message
INF = logging.getLogger("INF")
INF.setLevel(logging.INFO)
INF_ERR = logging.getLogger("INF.ERR")
INF_ERR.setLevel(logging.ERROR)
# These should log.
INF_ERR.log(logging.CRITICAL, m())
INF_ERR.error(m())
# These should not log.
INF_ERR.warning(m())
INF_ERR.info(m())
INF_ERR.debug(m())
self.assert_log_lines([
('INF.ERR', 'CRITICAL', '1'),
('INF.ERR', 'ERROR', '2'),
])
def test_nested_inherited(self):
#Logging levels in a nested namespace, inherited from parent loggers.
m = self.next_message
INF = logging.getLogger("INF")
INF.setLevel(logging.INFO)
INF_ERR = logging.getLogger("INF.ERR")
INF_ERR.setLevel(logging.ERROR)
INF_UNDEF = logging.getLogger("INF.UNDEF")
INF_ERR_UNDEF = logging.getLogger("INF.ERR.UNDEF")
UNDEF = logging.getLogger("UNDEF")
# These should log.
INF_UNDEF.log(logging.CRITICAL, m())
INF_UNDEF.error(m())
INF_UNDEF.warning(m())
INF_UNDEF.info(m())
INF_ERR_UNDEF.log(logging.CRITICAL, m())
INF_ERR_UNDEF.error(m())
# These should not log.
INF_UNDEF.debug(m())
INF_ERR_UNDEF.warning(m())
INF_ERR_UNDEF.info(m())
INF_ERR_UNDEF.debug(m())
self.assert_log_lines([
('INF.UNDEF', 'CRITICAL', '1'),
('INF.UNDEF', 'ERROR', '2'),
('INF.UNDEF', 'WARNING', '3'),
('INF.UNDEF', 'INFO', '4'),
('INF.ERR.UNDEF', 'CRITICAL', '5'),
('INF.ERR.UNDEF', 'ERROR', '6'),
])
def test_nested_with_virtual_parent(self):
# Logging levels when some parent does not exist yet.
m = self.next_message
INF = logging.getLogger("INF")
GRANDCHILD = logging.getLogger("INF.BADPARENT.UNDEF")
CHILD = logging.getLogger("INF.BADPARENT")
INF.setLevel(logging.INFO)
# These should log.
GRANDCHILD.log(logging.FATAL, m())
GRANDCHILD.info(m())
CHILD.log(logging.FATAL, m())
CHILD.info(m())
# These should not log.
GRANDCHILD.debug(m())
CHILD.debug(m())
self.assert_log_lines([
('INF.BADPARENT.UNDEF', 'CRITICAL', '1'),
('INF.BADPARENT.UNDEF', 'INFO', '2'),
('INF.BADPARENT', 'CRITICAL', '3'),
('INF.BADPARENT', 'INFO', '4'),
])
class BasicFilterTest(BaseTest):
"""Test the bundled Filter class."""
def test_filter(self):
# Only messages satisfying the specified criteria pass through the
# filter.
filter_ = logging.Filter("spam.eggs")
handler = self.root_logger.handlers[0]
try:
handler.addFilter(filter_)
spam = logging.getLogger("spam")
spam_eggs = logging.getLogger("spam.eggs")
spam_eggs_fish = logging.getLogger("spam.eggs.fish")
spam_bakedbeans = logging.getLogger("spam.bakedbeans")
spam.info(self.next_message())
spam_eggs.info(self.next_message()) # Good.
spam_eggs_fish.info(self.next_message()) # Good.
spam_bakedbeans.info(self.next_message())
self.assert_log_lines([
('spam.eggs', 'INFO', '2'),
('spam.eggs.fish', 'INFO', '3'),
])
finally:
handler.removeFilter(filter_)
def test_callable_filter(self):
# Only messages satisfying the specified criteria pass through the
# filter.
def filterfunc(record):
parts = record.name.split('.')
prefix = '.'.join(parts[:2])
return prefix == 'spam.eggs'
handler = self.root_logger.handlers[0]
try:
handler.addFilter(filterfunc)
spam = logging.getLogger("spam")
spam_eggs = logging.getLogger("spam.eggs")
spam_eggs_fish = logging.getLogger("spam.eggs.fish")
spam_bakedbeans = logging.getLogger("spam.bakedbeans")
spam.info(self.next_message())
spam_eggs.info(self.next_message()) # Good.
spam_eggs_fish.info(self.next_message()) # Good.
spam_bakedbeans.info(self.next_message())
self.assert_log_lines([
('spam.eggs', 'INFO', '2'),
('spam.eggs.fish', 'INFO', '3'),
])
finally:
handler.removeFilter(filterfunc)
def test_empty_filter(self):
f = logging.Filter()
r = logging.makeLogRecord({'name': 'spam.eggs'})
self.assertTrue(f.filter(r))
#
# First, we define our levels. There can be as many as you want - the only
# limitations are that they should be integers, the lowest should be > 0 and
# larger values mean less information being logged. If you need specific
# level values which do not fit into these limitations, you can use a
# mapping dictionary to convert between your application levels and the
# logging system.
#
SILENT = 120
TACITURN = 119
TERSE = 118
EFFUSIVE = 117
SOCIABLE = 116
VERBOSE = 115
TALKATIVE = 114
GARRULOUS = 113
CHATTERBOX = 112
BORING = 111
LEVEL_RANGE = range(BORING, SILENT + 1)
#
# Next, we define names for our levels. You don't need to do this - in which
# case the system will use "Level n" to denote the text for the level.
#
my_logging_levels = {
SILENT : 'Silent',
TACITURN : 'Taciturn',
TERSE : 'Terse',
EFFUSIVE : 'Effusive',
SOCIABLE : 'Sociable',
VERBOSE : 'Verbose',
TALKATIVE : 'Talkative',
GARRULOUS : 'Garrulous',
CHATTERBOX : 'Chatterbox',
BORING : 'Boring',
}
class GarrulousFilter(logging.Filter):
"""A filter which blocks garrulous messages."""
def filter(self, record):
return record.levelno != GARRULOUS
class VerySpecificFilter(logging.Filter):
"""A filter which blocks sociable and taciturn messages."""
def filter(self, record):
return record.levelno not in [SOCIABLE, TACITURN]
class CustomLevelsAndFiltersTest(BaseTest):
"""Test various filtering possibilities with custom logging levels."""
# Skip the logger name group.
expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
def setUp(self):
BaseTest.setUp(self)
for k, v in my_logging_levels.items():
logging.addLevelName(k, v)
def log_at_all_levels(self, logger):
for lvl in LEVEL_RANGE:
logger.log(lvl, self.next_message())
def test_logger_filter(self):
# Filter at logger level.
self.root_logger.setLevel(VERBOSE)
# Levels >= 'Verbose' are good.
self.log_at_all_levels(self.root_logger)
self.assert_log_lines([
('Verbose', '5'),
('Sociable', '6'),
('Effusive', '7'),
('Terse', '8'),
('Taciturn', '9'),
('Silent', '10'),
])
def test_handler_filter(self):
# Filter at handler level.
self.root_logger.handlers[0].setLevel(SOCIABLE)
try:
# Levels >= 'Sociable' are good.
self.log_at_all_levels(self.root_logger)
self.assert_log_lines([
('Sociable', '6'),
('Effusive', '7'),
('Terse', '8'),
('Taciturn', '9'),
('Silent', '10'),
])
finally:
self.root_logger.handlers[0].setLevel(logging.NOTSET)
def test_specific_filters(self):
# Set a specific filter object on the handler, and then add another
# filter object on the logger itself.
handler = self.root_logger.handlers[0]
specific_filter = None
garr = GarrulousFilter()
handler.addFilter(garr)
try:
self.log_at_all_levels(self.root_logger)
first_lines = [
# Notice how 'Garrulous' is missing
('Boring', '1'),
('Chatterbox', '2'),
('Talkative', '4'),
('Verbose', '5'),
('Sociable', '6'),
('Effusive', '7'),
('Terse', '8'),
('Taciturn', '9'),
('Silent', '10'),
]
self.assert_log_lines(first_lines)
specific_filter = VerySpecificFilter()
self.root_logger.addFilter(specific_filter)
self.log_at_all_levels(self.root_logger)
self.assert_log_lines(first_lines + [
# Not only 'Garrulous' is still missing, but also 'Sociable'
# and 'Taciturn'
('Boring', '11'),
('Chatterbox', '12'),
('Talkative', '14'),
('Verbose', '15'),
('Effusive', '17'),
('Terse', '18'),
('Silent', '20'),
])
finally:
if specific_filter:
self.root_logger.removeFilter(specific_filter)
handler.removeFilter(garr)
class HandlerTest(BaseTest):
def test_name(self):
h = logging.Handler()
h.name = 'generic'
self.assertEqual(h.name, 'generic')
h.name = 'anothergeneric'
self.assertEqual(h.name, 'anothergeneric')
self.assertRaises(NotImplementedError, h.emit, None)
def test_builtin_handlers(self):
# We can't actually *use* too many handlers in the tests,
# but we can try instantiating them with various options
if sys.platform in ('linux', 'darwin'):
for existing in (True, False):
fd, fn = tempfile.mkstemp()
os.close(fd)
if not existing:
os.unlink(fn)
h = logging.handlers.WatchedFileHandler(fn, delay=True)
if existing:
dev, ino = h.dev, h.ino
self.assertEqual(dev, -1)
self.assertEqual(ino, -1)
r = logging.makeLogRecord({'msg': 'Test'})
h.handle(r)
# Now remove the file.
os.unlink(fn)
self.assertFalse(os.path.exists(fn))
# The next call should recreate the file.
h.handle(r)
self.assertTrue(os.path.exists(fn))
else:
self.assertEqual(h.dev, -1)
self.assertEqual(h.ino, -1)
h.close()
if existing:
os.unlink(fn)
if sys.platform == 'darwin':
sockname = '/var/run/syslog'
else:
sockname = '/dev/log'
try:
h = logging.handlers.SysLogHandler(sockname)
self.assertEqual(h.facility, h.LOG_USER)
self.assertTrue(h.unixsocket)
h.close()
except socket.error: # syslogd might not be available
pass
for method in ('GET', 'POST', 'PUT'):
if method == 'PUT':
self.assertRaises(ValueError, logging.handlers.HTTPHandler,
'localhost', '/log', method)
else:
h = logging.handlers.HTTPHandler('localhost', '/log', method)
h.close()
h = logging.handlers.BufferingHandler(0)
r = logging.makeLogRecord({})
self.assertTrue(h.shouldFlush(r))
h.close()
h = logging.handlers.BufferingHandler(1)
self.assertFalse(h.shouldFlush(r))
h.close()
@unittest.skipIf(os.name == 'nt', 'WatchedFileHandler not appropriate for Windows.')
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_race(self):
# Issue #14632 refers.
def remove_loop(fname, tries):
for _ in range(tries):
try:
os.unlink(fname)
except OSError:
pass
time.sleep(0.004 * random.randint(0, 4))
del_count = 500
log_count = 500
for delay in (False, True):
fd, fn = tempfile.mkstemp('.log', 'test_logging-3-')
os.close(fd)
remover = threading.Thread(target=remove_loop, args=(fn, del_count))
remover.daemon = True
remover.start()
h = logging.handlers.WatchedFileHandler(fn, delay=delay)
f = logging.Formatter('%(asctime)s: %(levelname)s: %(message)s')
h.setFormatter(f)
try:
for _ in range(log_count):
time.sleep(0.005)
r = logging.makeLogRecord({'msg': 'testing' })
h.handle(r)
finally:
remover.join()
h.close()
if os.path.exists(fn):
os.unlink(fn)
class BadStream(object):
def write(self, data):
raise RuntimeError('deliberate mistake')
class TestStreamHandler(logging.StreamHandler):
def handleError(self, record):
self.error_record = record
class StreamHandlerTest(BaseTest):
def test_error_handling(self):
h = TestStreamHandler(BadStream())
r = logging.makeLogRecord({})
old_raise = logging.raiseExceptions
old_stderr = sys.stderr
try:
h.handle(r)
self.assertIs(h.error_record, r)
h = logging.StreamHandler(BadStream())
sys.stderr = sio = io.StringIO()
h.handle(r)
self.assertIn('\nRuntimeError: deliberate mistake\n',
sio.getvalue())
logging.raiseExceptions = False
sys.stderr = sio = io.StringIO()
h.handle(r)
self.assertEqual('', sio.getvalue())
finally:
logging.raiseExceptions = old_raise
sys.stderr = old_stderr
# -- The following section could be moved into a server_helper.py module
# -- if it proves to be of wider utility than just test_logging
if threading:
class TestSMTPChannel(smtpd.SMTPChannel):
"""
This derived class has had to be created because smtpd does not
support use of custom channel maps, although they are allowed by
asyncore's design. Issue #11959 has been raised to address this,
and if resolved satisfactorily, some of this code can be removed.
"""
def __init__(self, server, conn, addr, sockmap):
asynchat.async_chat.__init__(self, conn, sockmap)
self.smtp_server = server
self.conn = conn
self.addr = addr
self.data_size_limit = None
self.received_lines = []
self.smtp_state = self.COMMAND
self.seen_greeting = ''
self.mailfrom = None
self.rcpttos = []
self.received_data = ''
self.fqdn = socket.getfqdn()
self.num_bytes = 0
try:
self.peer = conn.getpeername()
except socket.error as err:
# a race condition may occur if the other end is closing
# before we can get the peername
self.close()
if err.args[0] != errno.ENOTCONN:
raise
return
self.push('220 %s %s' % (self.fqdn, smtpd.__version__))
self.set_terminator(b'\r\n')
self.extended_smtp = False
class TestSMTPServer(smtpd.SMTPServer):
"""
This class implements a test SMTP server.
:param addr: A (host, port) tuple which the server listens on.
You can specify a port value of zero: the server's
*port* attribute will hold the actual port number
used, which can be used in client connections.
:param handler: A callable which will be called to process
incoming messages. The handler will be passed
the client address tuple, who the message is from,
a list of recipients and the message data.
:param poll_interval: The interval, in seconds, used in the underlying
:func:`select` or :func:`poll` call by
:func:`asyncore.loop`.
:param sockmap: A dictionary which will be used to hold
:class:`asyncore.dispatcher` instances used by
:func:`asyncore.loop`. This avoids changing the
:mod:`asyncore` module's global state.
"""
channel_class = TestSMTPChannel
def __init__(self, addr, handler, poll_interval, sockmap):
self._localaddr = addr
self._remoteaddr = None
self.data_size_limit = None
self.sockmap = sockmap
asyncore.dispatcher.__init__(self, map=sockmap)
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setblocking(0)
self.set_socket(sock, map=sockmap)
# try to re-use a server port if possible
self.set_reuse_addr()
self.bind(addr)
self.port = sock.getsockname()[1]
self.listen(5)
except:
self.close()
raise
self._handler = handler
self._thread = None
self.poll_interval = poll_interval
def handle_accepted(self, conn, addr):
"""
Redefined only because the base class does not pass in a
map, forcing use of a global in :mod:`asyncore`.
"""
channel = self.channel_class(self, conn, addr, self.sockmap)
def process_message(self, peer, mailfrom, rcpttos, data):
"""
Delegates to the handler passed in to the server's constructor.
Typically, this will be a test case method.
:param peer: The client (host, port) tuple.
:param mailfrom: The address of the sender.
:param rcpttos: The addresses of the recipients.
:param data: The message.
"""
self._handler(peer, mailfrom, rcpttos, data)
def start(self):
"""
Start the server running on a separate daemon thread.
"""
self._thread = t = threading.Thread(target=self.serve_forever,
args=(self.poll_interval,))
t.setDaemon(True)
t.start()
def serve_forever(self, poll_interval):
"""
Run the :mod:`asyncore` loop until normal termination
conditions arise.
:param poll_interval: The interval, in seconds, used in the underlying
:func:`select` or :func:`poll` call by
:func:`asyncore.loop`.
"""
try:
asyncore.loop(poll_interval, map=self.sockmap)
except select.error:
# On FreeBSD 8, closing the server repeatably
# raises this error. We swallow it if the
# server has been closed.
if self.connected or self.accepting:
raise
def stop(self, timeout=None):
"""
Stop the thread by closing the server instance.
Wait for the server thread to terminate.
:param timeout: How long to wait for the server thread
to terminate.
"""
self.close()
self._thread.join(timeout)
self._thread = None
class ControlMixin(object):
"""
This mixin is used to start a server on a separate thread, and
shut it down programmatically. Request handling is simplified - instead
of needing to derive a suitable RequestHandler subclass, you just
provide a callable which will be passed each received request to be
processed.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request. This handler is called on the
server thread, effectively meaning that requests are
processed serially. While not quite Web scale ;-),
this should be fine for testing applications.
:param poll_interval: The polling interval in seconds.
"""
def __init__(self, handler, poll_interval):
self._thread = None
self.poll_interval = poll_interval
self._handler = handler
self.ready = threading.Event()
def start(self):
"""
Create a daemon thread to run the server, and start it.
"""
self._thread = t = threading.Thread(target=self.serve_forever,
args=(self.poll_interval,))
t.setDaemon(True)
t.start()
def serve_forever(self, poll_interval):
"""
Run the server. Set the ready flag before entering the
service loop.
"""
self.ready.set()
super(ControlMixin, self).serve_forever(poll_interval)
def stop(self, timeout=None):
"""
Tell the server thread to stop, and wait for it to do so.
:param timeout: How long to wait for the server thread
to terminate.
"""
self.shutdown()
if self._thread is not None:
self._thread.join(timeout)
self._thread = None
self.server_close()
self.ready.clear()
class TestHTTPServer(ControlMixin, HTTPServer):
"""
An HTTP server which is controllable using :class:`ControlMixin`.
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request.
:param poll_interval: The polling interval in seconds.
:param log: Pass ``True`` to enable log messages.
"""
def __init__(self, addr, handler, poll_interval=0.5,
log=False, sslctx=None):
class DelegatingHTTPRequestHandler(BaseHTTPRequestHandler):
def __getattr__(self, name, default=None):
if name.startswith('do_'):
return self.process_request
raise AttributeError(name)
def process_request(self):
self.server._handler(self)
def log_message(self, format, *args):
if log:
super(DelegatingHTTPRequestHandler,
self).log_message(format, *args)
HTTPServer.__init__(self, addr, DelegatingHTTPRequestHandler)
ControlMixin.__init__(self, handler, poll_interval)
self.sslctx = sslctx
def get_request(self):
try:
sock, addr = self.socket.accept()
if self.sslctx:
sock = self.sslctx.wrap_socket(sock, server_side=True)
except socket.error as e:
# socket errors are silenced by the caller, print them here
sys.stderr.write("Got an error:\n%s\n" % e)
raise
return sock, addr
class TestTCPServer(ControlMixin, ThreadingTCPServer):
"""
A TCP server which is controllable using :class:`ControlMixin`.
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a single
parameter - the request - in order to process the request.
:param poll_interval: The polling interval in seconds.
:bind_and_activate: If True (the default), binds the server and starts it
listening. If False, you need to call
:meth:`server_bind` and :meth:`server_activate` at
some later time before calling :meth:`start`, so that
the server will set up the socket and listen on it.
"""
allow_reuse_address = True
def __init__(self, addr, handler, poll_interval=0.5,
bind_and_activate=True):
class DelegatingTCPRequestHandler(StreamRequestHandler):
def handle(self):
self.server._handler(self)
ThreadingTCPServer.__init__(self, addr, DelegatingTCPRequestHandler,
bind_and_activate)
ControlMixin.__init__(self, handler, poll_interval)
def server_bind(self):
super(TestTCPServer, self).server_bind()
self.port = self.socket.getsockname()[1]
class TestUDPServer(ControlMixin, ThreadingUDPServer):
"""
A UDP server which is controllable using :class:`ControlMixin`.
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request.
:param poll_interval: The polling interval for shutdown requests,
in seconds.
:bind_and_activate: If True (the default), binds the server and
starts it listening. If False, you need to
call :meth:`server_bind` and
:meth:`server_activate` at some later time
before calling :meth:`start`, so that the server will
set up the socket and listen on it.
"""
def __init__(self, addr, handler, poll_interval=0.5,
bind_and_activate=True):
class DelegatingUDPRequestHandler(DatagramRequestHandler):
def handle(self):
self.server._handler(self)
def finish(self):
data = self.wfile.getvalue()
if data:
try:
super(DelegatingUDPRequestHandler, self).finish()
except socket.error:
if not self.server._closed:
raise
ThreadingUDPServer.__init__(self, addr,
DelegatingUDPRequestHandler,
bind_and_activate)
ControlMixin.__init__(self, handler, poll_interval)
self._closed = False
def server_bind(self):
super(TestUDPServer, self).server_bind()
self.port = self.socket.getsockname()[1]
def server_close(self):
super(TestUDPServer, self).server_close()
self._closed = True
# - end of server_helper section
@unittest.skipUnless(threading, 'Threading required for this test.')
class SMTPHandlerTest(BaseTest):
def test_basic(self):
sockmap = {}
server = TestSMTPServer(('localhost', 0), self.process_message, 0.001,
sockmap)
server.start()
addr = ('localhost', server.port)
h = logging.handlers.SMTPHandler(addr, 'me', 'you', 'Log', timeout=5.0)
self.assertEqual(h.toaddrs, ['you'])
self.messages = []
r = logging.makeLogRecord({'msg': 'Hello'})
self.handled = threading.Event()
h.handle(r)
self.handled.wait(5.0) # 14314: don't wait forever
server.stop()
self.assertTrue(self.handled.is_set())
self.assertEqual(len(self.messages), 1)
peer, mailfrom, rcpttos, data = self.messages[0]
self.assertEqual(mailfrom, 'me')
self.assertEqual(rcpttos, ['you'])
self.assertIn('\nSubject: Log\n', data)
self.assertTrue(data.endswith('\n\nHello'))
h.close()
def process_message(self, *args):
self.messages.append(args)
self.handled.set()
class MemoryHandlerTest(BaseTest):
"""Tests for the MemoryHandler."""
# Do not bother with a logger name group.
expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
def setUp(self):
BaseTest.setUp(self)
self.mem_hdlr = logging.handlers.MemoryHandler(10, logging.WARNING,
self.root_hdlr)
self.mem_logger = logging.getLogger('mem')
self.mem_logger.propagate = 0
self.mem_logger.addHandler(self.mem_hdlr)
def tearDown(self):
self.mem_hdlr.close()
BaseTest.tearDown(self)
def test_flush(self):
# The memory handler flushes to its target handler based on specific
# criteria (message count and message level).
self.mem_logger.debug(self.next_message())
self.assert_log_lines([])
self.mem_logger.info(self.next_message())
self.assert_log_lines([])
# This will flush because the level is >= logging.WARNING
self.mem_logger.warning(self.next_message())
lines = [
('DEBUG', '1'),
('INFO', '2'),
('WARNING', '3'),
]
self.assert_log_lines(lines)
for n in (4, 14):
for i in range(9):
self.mem_logger.debug(self.next_message())
self.assert_log_lines(lines)
# This will flush because it's the 10th message since the last
# flush.
self.mem_logger.debug(self.next_message())
lines = lines + [('DEBUG', str(i)) for i in range(n, n + 10)]
self.assert_log_lines(lines)
self.mem_logger.debug(self.next_message())
self.assert_log_lines(lines)
class ExceptionFormatter(logging.Formatter):
"""A special exception formatter."""
def formatException(self, ei):
return "Got a [%s]" % ei[0].__name__
class ConfigFileTest(BaseTest):
"""Reading logging config from a .ini-style config file."""
expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$"
# config0 is a standard configuration.
config0 = """
[loggers]
keys=root
[handlers]
keys=hand1
[formatters]
keys=form1
[logger_root]
level=WARNING
handlers=hand1
[handler_hand1]
class=StreamHandler
level=NOTSET
formatter=form1
args=(sys.stdout,)
[formatter_form1]
format=%(levelname)s ++ %(message)s
datefmt=
"""
# config1 adds a little to the standard configuration.
config1 = """
[loggers]
keys=root,parser
[handlers]
keys=hand1
[formatters]
keys=form1
[logger_root]
level=WARNING
handlers=
[logger_parser]
level=DEBUG
handlers=hand1
propagate=1
qualname=compiler.parser
[handler_hand1]
class=StreamHandler
level=NOTSET
formatter=form1
args=(sys.stdout,)
[formatter_form1]
format=%(levelname)s ++ %(message)s
datefmt=
"""
# config1a moves the handler to the root.
config1a = """
[loggers]
keys=root,parser
[handlers]
keys=hand1
[formatters]
keys=form1
[logger_root]
level=WARNING
handlers=hand1
[logger_parser]
level=DEBUG
handlers=
propagate=1
qualname=compiler.parser
[handler_hand1]
class=StreamHandler
level=NOTSET
formatter=form1
args=(sys.stdout,)
[formatter_form1]
format=%(levelname)s ++ %(message)s
datefmt=
"""
# config2 has a subtle configuration error that should be reported
config2 = config1.replace("sys.stdout", "sys.stbout")
# config3 has a less subtle configuration error
config3 = config1.replace("formatter=form1", "formatter=misspelled_name")
# config4 specifies a custom formatter class to be loaded
config4 = """
[loggers]
keys=root
[handlers]
keys=hand1
[formatters]
keys=form1
[logger_root]
level=NOTSET
handlers=hand1
[handler_hand1]
class=StreamHandler
level=NOTSET
formatter=form1
args=(sys.stdout,)
[formatter_form1]
class=""" + __name__ + """.ExceptionFormatter
format=%(levelname)s:%(name)s:%(message)s
datefmt=
"""
# config5 specifies a custom handler class to be loaded
config5 = config1.replace('class=StreamHandler', 'class=logging.StreamHandler')
# config6 uses ', ' delimiters in the handlers and formatters sections
config6 = """
[loggers]
keys=root,parser
[handlers]
keys=hand1, hand2
[formatters]
keys=form1, form2
[logger_root]
level=WARNING
handlers=
[logger_parser]
level=DEBUG
handlers=hand1
propagate=1
qualname=compiler.parser
[handler_hand1]
class=StreamHandler
level=NOTSET
formatter=form1
args=(sys.stdout,)
[handler_hand2]
class=StreamHandler
level=NOTSET
formatter=form1
args=(sys.stderr,)
[formatter_form1]
format=%(levelname)s ++ %(message)s
datefmt=
[formatter_form2]
format=%(message)s
datefmt=
"""
# config7 adds a compiler logger.
config7 = """
[loggers]
keys=root,parser,compiler
[handlers]
keys=hand1
[formatters]
keys=form1
[logger_root]
level=WARNING
handlers=hand1
[logger_compiler]
level=DEBUG
handlers=
propagate=1
qualname=compiler
[logger_parser]
level=DEBUG
handlers=
propagate=1
qualname=compiler.parser
[handler_hand1]
class=StreamHandler
level=NOTSET
formatter=form1
args=(sys.stdout,)
[formatter_form1]
format=%(levelname)s ++ %(message)s
datefmt=
"""
def apply_config(self, conf):
file = io.StringIO(textwrap.dedent(conf))
logging.config.fileConfig(file)
def test_config0_ok(self):
# A simple config file which overrides the default settings.
with captured_stdout() as output:
self.apply_config(self.config0)
logger = logging.getLogger()
# Won't output anything
logger.info(self.next_message())
# Outputs a message
logger.error(self.next_message())
self.assert_log_lines([
('ERROR', '2'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
def test_config1_ok(self, config=config1):
# A config file defining a sub-parser as well.
with captured_stdout() as output:
self.apply_config(config)
logger = logging.getLogger("compiler.parser")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
self.assert_log_lines([
('INFO', '1'),
('ERROR', '2'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
def test_config2_failure(self):
# A simple config file which overrides the default settings.
self.assertRaises(Exception, self.apply_config, self.config2)
def test_config3_failure(self):
# A simple config file which overrides the default settings.
self.assertRaises(Exception, self.apply_config, self.config3)
def test_config4_ok(self):
# A config file specifying a custom formatter class.
with captured_stdout() as output:
self.apply_config(self.config4)
logger = logging.getLogger()
try:
raise RuntimeError()
except RuntimeError:
logging.exception("just testing")
sys.stdout.seek(0)
self.assertEqual(output.getvalue(),
"ERROR:root:just testing\nGot a [RuntimeError]\n")
# Original logger output is empty
self.assert_log_lines([])
def test_config5_ok(self):
self.test_config1_ok(config=self.config5)
def test_config6_ok(self):
self.test_config1_ok(config=self.config6)
def test_config7_ok(self):
with captured_stdout() as output:
self.apply_config(self.config1a)
logger = logging.getLogger("compiler.parser")
# See issue #11424. compiler-hyphenated sorts
# between compiler and compiler.xyz and this
# was preventing compiler.xyz from being included
# in the child loggers of compiler because of an
# overzealous loop termination condition.
hyphenated = logging.getLogger('compiler-hyphenated')
# All will output a message
logger.info(self.next_message())
logger.error(self.next_message())
hyphenated.critical(self.next_message())
self.assert_log_lines([
('INFO', '1'),
('ERROR', '2'),
('CRITICAL', '3'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
with captured_stdout() as output:
self.apply_config(self.config7)
logger = logging.getLogger("compiler.parser")
self.assertFalse(logger.disabled)
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
logger = logging.getLogger("compiler.lexer")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
# Will not appear
hyphenated.critical(self.next_message())
self.assert_log_lines([
('INFO', '4'),
('ERROR', '5'),
('INFO', '6'),
('ERROR', '7'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
@unittest.skipUnless(threading, 'Threading required for this test.')
class SocketHandlerTest(BaseTest):
"""Test for SocketHandler objects."""
def setUp(self):
"""Set up a TCP server to receive log messages, and a SocketHandler
pointing to that server's address and port."""
BaseTest.setUp(self)
addr = ('localhost', 0)
self.server = server = TestTCPServer(addr, self.handle_socket,
0.01)
server.start()
server.ready.wait()
self.sock_hdlr = logging.handlers.SocketHandler('localhost',
server.port)
self.log_output = ''
self.root_logger.removeHandler(self.root_logger.handlers[0])
self.root_logger.addHandler(self.sock_hdlr)
self.handled = threading.Semaphore(0)
def tearDown(self):
"""Shutdown the TCP server."""
try:
self.server.stop(2.0)
self.root_logger.removeHandler(self.sock_hdlr)
self.sock_hdlr.close()
finally:
BaseTest.tearDown(self)
def handle_socket(self, request):
conn = request.connection
while True:
chunk = conn.recv(4)
if len(chunk) < 4:
break
slen = struct.unpack(">L", chunk)[0]
chunk = conn.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
obj = pickle.loads(chunk)
record = logging.makeLogRecord(obj)
self.log_output += record.msg + '\n'
self.handled.release()
def test_output(self):
# The log message sent to the SocketHandler is properly received.
logger = logging.getLogger("tcp")
logger.error("spam")
self.handled.acquire()
logger.debug("eggs")
self.handled.acquire()
self.assertEqual(self.log_output, "spam\neggs\n")
def test_noserver(self):
# Kill the server
self.server.stop(2.0)
#The logging call should try to connect, which should fail
try:
raise RuntimeError('Deliberate mistake')
except RuntimeError:
self.root_logger.exception('Never sent')
self.root_logger.error('Never sent, either')
now = time.time()
self.assertTrue(self.sock_hdlr.retryTime > now)
time.sleep(self.sock_hdlr.retryTime - now + 0.001)
self.root_logger.error('Nor this')
@unittest.skipUnless(threading, 'Threading required for this test.')
class DatagramHandlerTest(BaseTest):
"""Test for DatagramHandler."""
def setUp(self):
"""Set up a UDP server to receive log messages, and a DatagramHandler
pointing to that server's address and port."""
BaseTest.setUp(self)
addr = ('localhost', 0)
self.server = server = TestUDPServer(addr, self.handle_datagram, 0.01)
server.start()
server.ready.wait()
self.sock_hdlr = logging.handlers.DatagramHandler('localhost',
server.port)
self.log_output = ''
self.root_logger.removeHandler(self.root_logger.handlers[0])
self.root_logger.addHandler(self.sock_hdlr)
self.handled = threading.Event()
def tearDown(self):
"""Shutdown the UDP server."""
try:
self.server.stop(2.0)
self.root_logger.removeHandler(self.sock_hdlr)
self.sock_hdlr.close()
finally:
BaseTest.tearDown(self)
def handle_datagram(self, request):
slen = struct.pack('>L', 0) # length of prefix
packet = request.packet[len(slen):]
obj = pickle.loads(packet)
record = logging.makeLogRecord(obj)
self.log_output += record.msg + '\n'
self.handled.set()
def test_output(self):
# The log message sent to the DatagramHandler is properly received.
logger = logging.getLogger("udp")
logger.error("spam")
self.handled.wait()
self.handled.clear()
logger.error("eggs")
self.handled.wait()
self.assertEqual(self.log_output, "spam\neggs\n")
@unittest.skipUnless(threading, 'Threading required for this test.')
class SysLogHandlerTest(BaseTest):
"""Test for SysLogHandler using UDP."""
def setUp(self):
"""Set up a UDP server to receive log messages, and a SysLogHandler
pointing to that server's address and port."""
BaseTest.setUp(self)
addr = ('localhost', 0)
self.server = server = TestUDPServer(addr, self.handle_datagram,
0.01)
server.start()
server.ready.wait()
self.sl_hdlr = logging.handlers.SysLogHandler(('localhost',
server.port))
self.log_output = ''
self.root_logger.removeHandler(self.root_logger.handlers[0])
self.root_logger.addHandler(self.sl_hdlr)
self.handled = threading.Event()
def tearDown(self):
"""Shutdown the UDP server."""
try:
self.server.stop(2.0)
self.root_logger.removeHandler(self.sl_hdlr)
self.sl_hdlr.close()
finally:
BaseTest.tearDown(self)
def handle_datagram(self, request):
self.log_output = request.packet
self.handled.set()
def test_output(self):
# The log message sent to the SysLogHandler is properly received.
logger = logging.getLogger("slh")
logger.error("sp\xe4m")
self.handled.wait()
self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m\x00')
self.handled.clear()
self.sl_hdlr.append_nul = False
logger.error("sp\xe4m")
self.handled.wait()
self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m')
self.handled.clear()
self.sl_hdlr.ident = "h\xe4m-"
logger.error("sp\xe4m")
self.handled.wait()
self.assertEqual(self.log_output, b'<11>h\xc3\xa4m-sp\xc3\xa4m')
@unittest.skipUnless(threading, 'Threading required for this test.')
class HTTPHandlerTest(BaseTest):
"""Test for HTTPHandler."""
PEMFILE = """-----BEGIN RSA PRIVATE KEY-----
MIICXQIBAAKBgQDGT4xS5r91rbLJQK2nUDenBhBG6qFk+bVOjuAGC/LSHlAoBnvG
zQG3agOG+e7c5z2XT8m2ktORLqG3E4mYmbxgyhDrzP6ei2Anc+pszmnxPoK3Puh5
aXV+XKt0bU0C1m2+ACmGGJ0t3P408art82nOxBw8ZHgIg9Dtp6xIUCyOqwIDAQAB
AoGBAJFTnFboaKh5eUrIzjmNrKsG44jEyy+vWvHN/FgSC4l103HxhmWiuL5Lv3f7
0tMp1tX7D6xvHwIG9VWvyKb/Cq9rJsDibmDVIOslnOWeQhG+XwJyitR0pq/KlJIB
5LjORcBw795oKWOAi6RcOb1ON59tysEFYhAGQO9k6VL621gRAkEA/Gb+YXULLpbs
piXN3q4zcHzeaVANo69tUZ6TjaQqMeTxE4tOYM0G0ZoSeHEdaP59AOZGKXXNGSQy
2z/MddcYGQJBAMkjLSYIpOLJY11ja8OwwswFG2hEzHe0cS9bzo++R/jc1bHA5R0Y
i6vA5iPi+wopPFvpytdBol7UuEBe5xZrxWMCQQCWxELRHiP2yWpEeLJ3gGDzoXMN
PydWjhRju7Bx3AzkTtf+D6lawz1+eGTuEss5i0JKBkMEwvwnN2s1ce+EuF4JAkBb
E96h1lAzkVW5OAfYOPY8RCPA90ZO/hoyg7PpSxR0ECuDrgERR8gXIeYUYfejBkEa
rab4CfRoVJKKM28Yq/xZAkBvuq670JRCwOgfUTdww7WpdOQBYPkzQccsKNCslQW8
/DyW6y06oQusSENUvynT6dr3LJxt/NgZPhZX2+k1eYDV
-----END RSA PRIVATE KEY-----
-----BEGIN CERTIFICATE-----
MIICGzCCAYSgAwIBAgIJAIq84a2Q/OvlMA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV
BAMTCWxvY2FsaG9zdDAeFw0xMTA1MjExMDIzMzNaFw03NTAzMjEwMzU1MTdaMBQx
EjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
xk+MUua/da2yyUCtp1A3pwYQRuqhZPm1To7gBgvy0h5QKAZ7xs0Bt2oDhvnu3Oc9
l0/JtpLTkS6htxOJmJm8YMoQ68z+notgJ3PqbM5p8T6Ctz7oeWl1flyrdG1NAtZt
vgAphhidLdz+NPGq7fNpzsQcPGR4CIPQ7aesSFAsjqsCAwEAAaN1MHMwHQYDVR0O
BBYEFLWaUPO6N7efGiuoS9i3DVYcUwn0MEQGA1UdIwQ9MDuAFLWaUPO6N7efGiuo
S9i3DVYcUwn0oRikFjAUMRIwEAYDVQQDEwlsb2NhbGhvc3SCCQCKvOGtkPzr5TAM
BgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAMK5whPjLNQK1Ivvk88oqJqq
4f889OwikGP0eUhOBhbFlsZs+jq5YZC2UzHz+evzKBlgAP1u4lP/cB85CnjvWqM+
1c/lywFHQ6HOdDeQ1L72tSYMrNOG4XNmLn0h7rx6GoTU7dcFRfseahBCq8mv0IDt
IRbTpvlHWPjsSvHz0ZOH
-----END CERTIFICATE-----"""
def setUp(self):
"""Set up an HTTP server to receive log messages, and a HTTPHandler
pointing to that server's address and port."""
BaseTest.setUp(self)
self.handled = threading.Event()
def handle_request(self, request):
self.command = request.command
self.log_data = urlparse(request.path)
if self.command == 'POST':
try:
rlen = int(request.headers['Content-Length'])
self.post_data = request.rfile.read(rlen)
except:
self.post_data = None
request.send_response(200)
request.end_headers()
self.handled.set()
def test_output(self):
# The log message sent to the HTTPHandler is properly received.
logger = logging.getLogger("http")
root_logger = self.root_logger
root_logger.removeHandler(self.root_logger.handlers[0])
for secure in (False, True):
addr = ('localhost', 0)
if secure:
try:
import ssl
fd, fn = tempfile.mkstemp()
os.close(fd)
with open(fn, 'w') as f:
f.write(self.PEMFILE)
sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslctx.load_cert_chain(fn)
os.unlink(fn)
except ImportError:
sslctx = None
else:
sslctx = None
self.server = server = TestHTTPServer(addr, self.handle_request,
0.01, sslctx=sslctx)
server.start()
server.ready.wait()
host = 'localhost:%d' % server.server_port
secure_client = secure and sslctx
self.h_hdlr = logging.handlers.HTTPHandler(host, '/frob',
secure=secure_client)
self.log_data = None
root_logger.addHandler(self.h_hdlr)
for method in ('GET', 'POST'):
self.h_hdlr.method = method
self.handled.clear()
msg = "sp\xe4m"
logger.error(msg)
self.handled.wait()
self.assertEqual(self.log_data.path, '/frob')
self.assertEqual(self.command, method)
if method == 'GET':
d = parse_qs(self.log_data.query)
else:
d = parse_qs(self.post_data.decode('utf-8'))
self.assertEqual(d['name'], ['http'])
self.assertEqual(d['funcName'], ['test_output'])
self.assertEqual(d['msg'], [msg])
self.server.stop(2.0)
self.root_logger.removeHandler(self.h_hdlr)
self.h_hdlr.close()
class MemoryTest(BaseTest):
"""Test memory persistence of logger objects."""
def setUp(self):
"""Create a dict to remember potentially destroyed objects."""
BaseTest.setUp(self)
self._survivors = {}
def _watch_for_survival(self, *args):
"""Watch the given objects for survival, by creating weakrefs to
them."""
for obj in args:
key = id(obj), repr(obj)
self._survivors[key] = weakref.ref(obj)
def _assertTruesurvival(self):
"""Assert that all objects watched for survival have survived."""
# Trigger cycle breaking.
gc.collect()
dead = []
for (id_, repr_), ref in self._survivors.items():
if ref() is None:
dead.append(repr_)
if dead:
self.fail("%d objects should have survived "
"but have been destroyed: %s" % (len(dead), ", ".join(dead)))
def test_persistent_loggers(self):
# Logger objects are persistent and retain their configuration, even
# if visible references are destroyed.
self.root_logger.setLevel(logging.INFO)
foo = logging.getLogger("foo")
self._watch_for_survival(foo)
foo.setLevel(logging.DEBUG)
self.root_logger.debug(self.next_message())
foo.debug(self.next_message())
self.assert_log_lines([
('foo', 'DEBUG', '2'),
])
del foo
# foo has survived.
self._assertTruesurvival()
# foo has retained its settings.
bar = logging.getLogger("foo")
bar.debug(self.next_message())
self.assert_log_lines([
('foo', 'DEBUG', '2'),
('foo', 'DEBUG', '3'),
])
class EncodingTest(BaseTest):
def test_encoding_plain_file(self):
# In Python 2.x, a plain file object is treated as having no encoding.
log = logging.getLogger("test")
fd, fn = tempfile.mkstemp(".log", "test_logging-1-")
os.close(fd)
# the non-ascii data we write to the log.
data = "foo\x80"
try:
handler = logging.FileHandler(fn, encoding="utf-8")
log.addHandler(handler)
try:
# write non-ascii data to the log.
log.warning(data)
finally:
log.removeHandler(handler)
handler.close()
# check we wrote exactly those bytes, ignoring trailing \n etc
f = open(fn, encoding="utf-8")
try:
self.assertEqual(f.read().rstrip(), data)
finally:
f.close()
finally:
if os.path.isfile(fn):
os.remove(fn)
def test_encoding_cyrillic_unicode(self):
log = logging.getLogger("test")
#Get a message in Unicode: Do svidanya in Cyrillic (meaning goodbye)
message = '\u0434\u043e \u0441\u0432\u0438\u0434\u0430\u043d\u0438\u044f'
#Ensure it's written in a Cyrillic encoding
writer_class = codecs.getwriter('cp1251')
writer_class.encoding = 'cp1251'
stream = io.BytesIO()
writer = writer_class(stream, 'strict')
handler = logging.StreamHandler(writer)
log.addHandler(handler)
try:
log.warning(message)
finally:
log.removeHandler(handler)
handler.close()
# check we wrote exactly those bytes, ignoring trailing \n etc
s = stream.getvalue()
#Compare against what the data should be when encoded in CP-1251
self.assertEqual(s, b'\xe4\xee \xf1\xe2\xe8\xe4\xe0\xed\xe8\xff\n')
class WarningsTest(BaseTest):
def test_warnings(self):
with warnings.catch_warnings():
logging.captureWarnings(True)
self.addCleanup(logging.captureWarnings, False)
warnings.filterwarnings("always", category=UserWarning)
stream = io.StringIO()
h = logging.StreamHandler(stream)
logger = logging.getLogger("py.warnings")
logger.addHandler(h)
warnings.warn("I'm warning you...")
logger.removeHandler(h)
s = stream.getvalue()
h.close()
self.assertTrue(s.find("UserWarning: I'm warning you...\n") > 0)
#See if an explicit file uses the original implementation
a_file = io.StringIO()
warnings.showwarning("Explicit", UserWarning, "dummy.py", 42,
a_file, "Dummy line")
s = a_file.getvalue()
a_file.close()
self.assertEqual(s,
"dummy.py:42: UserWarning: Explicit\n Dummy line\n")
def test_warnings_no_handlers(self):
with warnings.catch_warnings():
logging.captureWarnings(True)
self.addCleanup(logging.captureWarnings, False)
# confirm our assumption: no loggers are set
logger = logging.getLogger("py.warnings")
self.assertEqual(logger.handlers, [])
warnings.showwarning("Explicit", UserWarning, "dummy.py", 42)
self.assertEqual(len(logger.handlers), 1)
self.assertIsInstance(logger.handlers[0], logging.NullHandler)
def formatFunc(format, datefmt=None):
return logging.Formatter(format, datefmt)
def handlerFunc():
return logging.StreamHandler()
class CustomHandler(logging.StreamHandler):
pass
class ConfigDictTest(BaseTest):
"""Reading logging config from a dictionary."""
expected_log_pat = r"^([\w]+) \+\+ ([\w]+)$"
# config0 is a standard configuration.
config0 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'root' : {
'level' : 'WARNING',
'handlers' : ['hand1'],
},
}
# config1 adds a little to the standard configuration.
config1 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
# config1a moves the handler to the root. Used with config8a
config1a = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
},
},
'root' : {
'level' : 'WARNING',
'handlers' : ['hand1'],
},
}
# config2 has a subtle configuration error that should be reported
config2 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdbout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
#As config1 but with a misspelt level on a handler
config2a = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NTOSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
#As config1 but with a misspelt level on a logger
config2b = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WRANING',
},
}
# config3 has a less subtle configuration error
config3 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'misspelled_name',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
# config4 specifies a custom formatter class to be loaded
config4 = {
'version': 1,
'formatters': {
'form1' : {
'()' : __name__ + '.ExceptionFormatter',
'format' : '%(levelname)s:%(name)s:%(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'root' : {
'level' : 'NOTSET',
'handlers' : ['hand1'],
},
}
# As config4 but using an actual callable rather than a string
config4a = {
'version': 1,
'formatters': {
'form1' : {
'()' : ExceptionFormatter,
'format' : '%(levelname)s:%(name)s:%(message)s',
},
'form2' : {
'()' : __name__ + '.formatFunc',
'format' : '%(levelname)s:%(name)s:%(message)s',
},
'form3' : {
'()' : formatFunc,
'format' : '%(levelname)s:%(name)s:%(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
'hand2' : {
'()' : handlerFunc,
},
},
'root' : {
'level' : 'NOTSET',
'handlers' : ['hand1'],
},
}
# config5 specifies a custom handler class to be loaded
config5 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : __name__ + '.CustomHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
# config6 specifies a custom handler class to be loaded
# but has bad arguments
config6 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : __name__ + '.CustomHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
'9' : 'invalid parameter name',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
#config 7 does not define compiler.parser but defines compiler.lexer
#so compiler.parser should be disabled after applying it
config7 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.lexer' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
# config8 defines both compiler and compiler.lexer
# so compiler.parser should not be disabled (since
# compiler is defined)
config8 = {
'version': 1,
'disable_existing_loggers' : False,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
'compiler.lexer' : {
},
},
'root' : {
'level' : 'WARNING',
},
}
# config8a disables existing loggers
config8a = {
'version': 1,
'disable_existing_loggers' : True,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
'compiler.lexer' : {
},
},
'root' : {
'level' : 'WARNING',
},
}
config9 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'WARNING',
'stream' : 'ext://sys.stdout',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'WARNING',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'NOTSET',
},
}
config9a = {
'version': 1,
'incremental' : True,
'handlers' : {
'hand1' : {
'level' : 'WARNING',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'INFO',
},
},
}
config9b = {
'version': 1,
'incremental' : True,
'handlers' : {
'hand1' : {
'level' : 'INFO',
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'INFO',
},
},
}
#As config1 but with a filter added
config10 = {
'version': 1,
'formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'filters' : {
'filt1' : {
'name' : 'compiler.parser',
},
},
'handlers' : {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
'filters' : ['filt1'],
},
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'filters' : ['filt1'],
},
},
'root' : {
'level' : 'WARNING',
'handlers' : ['hand1'],
},
}
#As config1 but using cfg:// references
config11 = {
'version': 1,
'true_formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handler_configs': {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'formatters' : 'cfg://true_formatters',
'handlers' : {
'hand1' : 'cfg://handler_configs[hand1]',
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
#As config11 but missing the version key
config12 = {
'true_formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handler_configs': {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'formatters' : 'cfg://true_formatters',
'handlers' : {
'hand1' : 'cfg://handler_configs[hand1]',
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
#As config11 but using an unsupported version
config13 = {
'version': 2,
'true_formatters': {
'form1' : {
'format' : '%(levelname)s ++ %(message)s',
},
},
'handler_configs': {
'hand1' : {
'class' : 'logging.StreamHandler',
'formatter' : 'form1',
'level' : 'NOTSET',
'stream' : 'ext://sys.stdout',
},
},
'formatters' : 'cfg://true_formatters',
'handlers' : {
'hand1' : 'cfg://handler_configs[hand1]',
},
'loggers' : {
'compiler.parser' : {
'level' : 'DEBUG',
'handlers' : ['hand1'],
},
},
'root' : {
'level' : 'WARNING',
},
}
def apply_config(self, conf):
logging.config.dictConfig(conf)
def test_config0_ok(self):
# A simple config which overrides the default settings.
with captured_stdout() as output:
self.apply_config(self.config0)
logger = logging.getLogger()
# Won't output anything
logger.info(self.next_message())
# Outputs a message
logger.error(self.next_message())
self.assert_log_lines([
('ERROR', '2'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
def test_config1_ok(self, config=config1):
# A config defining a sub-parser as well.
with captured_stdout() as output:
self.apply_config(config)
logger = logging.getLogger("compiler.parser")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
self.assert_log_lines([
('INFO', '1'),
('ERROR', '2'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
def test_config2_failure(self):
# A simple config which overrides the default settings.
self.assertRaises(Exception, self.apply_config, self.config2)
def test_config2a_failure(self):
# A simple config which overrides the default settings.
self.assertRaises(Exception, self.apply_config, self.config2a)
def test_config2b_failure(self):
# A simple config which overrides the default settings.
self.assertRaises(Exception, self.apply_config, self.config2b)
def test_config3_failure(self):
# A simple config which overrides the default settings.
self.assertRaises(Exception, self.apply_config, self.config3)
def test_config4_ok(self):
# A config specifying a custom formatter class.
with captured_stdout() as output:
self.apply_config(self.config4)
#logger = logging.getLogger()
try:
raise RuntimeError()
except RuntimeError:
logging.exception("just testing")
sys.stdout.seek(0)
self.assertEqual(output.getvalue(),
"ERROR:root:just testing\nGot a [RuntimeError]\n")
# Original logger output is empty
self.assert_log_lines([])
def test_config4a_ok(self):
# A config specifying a custom formatter class.
with captured_stdout() as output:
self.apply_config(self.config4a)
#logger = logging.getLogger()
try:
raise RuntimeError()
except RuntimeError:
logging.exception("just testing")
sys.stdout.seek(0)
self.assertEqual(output.getvalue(),
"ERROR:root:just testing\nGot a [RuntimeError]\n")
# Original logger output is empty
self.assert_log_lines([])
def test_config5_ok(self):
self.test_config1_ok(config=self.config5)
def test_config6_failure(self):
self.assertRaises(Exception, self.apply_config, self.config6)
def test_config7_ok(self):
with captured_stdout() as output:
self.apply_config(self.config1)
logger = logging.getLogger("compiler.parser")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
self.assert_log_lines([
('INFO', '1'),
('ERROR', '2'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
with captured_stdout() as output:
self.apply_config(self.config7)
logger = logging.getLogger("compiler.parser")
self.assertTrue(logger.disabled)
logger = logging.getLogger("compiler.lexer")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
self.assert_log_lines([
('INFO', '3'),
('ERROR', '4'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
#Same as test_config_7_ok but don't disable old loggers.
def test_config_8_ok(self):
with captured_stdout() as output:
self.apply_config(self.config1)
logger = logging.getLogger("compiler.parser")
# All will output a message
logger.info(self.next_message())
logger.error(self.next_message())
self.assert_log_lines([
('INFO', '1'),
('ERROR', '2'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
with captured_stdout() as output:
self.apply_config(self.config8)
logger = logging.getLogger("compiler.parser")
self.assertFalse(logger.disabled)
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
logger = logging.getLogger("compiler.lexer")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
self.assert_log_lines([
('INFO', '3'),
('ERROR', '4'),
('INFO', '5'),
('ERROR', '6'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
def test_config_8a_ok(self):
with captured_stdout() as output:
self.apply_config(self.config1a)
logger = logging.getLogger("compiler.parser")
# See issue #11424. compiler-hyphenated sorts
# between compiler and compiler.xyz and this
# was preventing compiler.xyz from being included
# in the child loggers of compiler because of an
# overzealous loop termination condition.
hyphenated = logging.getLogger('compiler-hyphenated')
# All will output a message
logger.info(self.next_message())
logger.error(self.next_message())
hyphenated.critical(self.next_message())
self.assert_log_lines([
('INFO', '1'),
('ERROR', '2'),
('CRITICAL', '3'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
with captured_stdout() as output:
self.apply_config(self.config8a)
logger = logging.getLogger("compiler.parser")
self.assertFalse(logger.disabled)
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
logger = logging.getLogger("compiler.lexer")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
# Will not appear
hyphenated.critical(self.next_message())
self.assert_log_lines([
('INFO', '4'),
('ERROR', '5'),
('INFO', '6'),
('ERROR', '7'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
def test_config_9_ok(self):
with captured_stdout() as output:
self.apply_config(self.config9)
logger = logging.getLogger("compiler.parser")
#Nothing will be output since both handler and logger are set to WARNING
logger.info(self.next_message())
self.assert_log_lines([], stream=output)
self.apply_config(self.config9a)
#Nothing will be output since both handler is still set to WARNING
logger.info(self.next_message())
self.assert_log_lines([], stream=output)
self.apply_config(self.config9b)
#Message should now be output
logger.info(self.next_message())
self.assert_log_lines([
('INFO', '3'),
], stream=output)
def test_config_10_ok(self):
with captured_stdout() as output:
self.apply_config(self.config10)
logger = logging.getLogger("compiler.parser")
logger.warning(self.next_message())
logger = logging.getLogger('compiler')
#Not output, because filtered
logger.warning(self.next_message())
logger = logging.getLogger('compiler.lexer')
#Not output, because filtered
logger.warning(self.next_message())
logger = logging.getLogger("compiler.parser.codegen")
#Output, as not filtered
logger.error(self.next_message())
self.assert_log_lines([
('WARNING', '1'),
('ERROR', '4'),
], stream=output)
def test_config11_ok(self):
self.test_config1_ok(self.config11)
def test_config12_failure(self):
self.assertRaises(Exception, self.apply_config, self.config12)
def test_config13_failure(self):
self.assertRaises(Exception, self.apply_config, self.config13)
@unittest.skipUnless(threading, 'listen() needs threading to work')
def setup_via_listener(self, text):
text = text.encode("utf-8")
# Ask for a randomly assigned port (by using port 0)
t = logging.config.listen(0)
t.start()
t.ready.wait()
# Now get the port allocated
port = t.port
t.ready.clear()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2.0)
sock.connect(('localhost', port))
slen = struct.pack('>L', len(text))
s = slen + text
sentsofar = 0
left = len(s)
while left > 0:
sent = sock.send(s[sentsofar:])
sentsofar += sent
left -= sent
sock.close()
finally:
t.ready.wait(2.0)
logging.config.stopListening()
t.join(2.0)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_listen_config_10_ok(self):
with captured_stdout() as output:
self.setup_via_listener(json.dumps(self.config10))
logger = logging.getLogger("compiler.parser")
logger.warning(self.next_message())
logger = logging.getLogger('compiler')
#Not output, because filtered
logger.warning(self.next_message())
logger = logging.getLogger('compiler.lexer')
#Not output, because filtered
logger.warning(self.next_message())
logger = logging.getLogger("compiler.parser.codegen")
#Output, as not filtered
logger.error(self.next_message())
self.assert_log_lines([
('WARNING', '1'),
('ERROR', '4'),
], stream=output)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_listen_config_1_ok(self):
with captured_stdout() as output:
self.setup_via_listener(textwrap.dedent(ConfigFileTest.config1))
logger = logging.getLogger("compiler.parser")
# Both will output a message
logger.info(self.next_message())
logger.error(self.next_message())
self.assert_log_lines([
('INFO', '1'),
('ERROR', '2'),
], stream=output)
# Original logger output is empty.
self.assert_log_lines([])
def test_baseconfig(self):
d = {
'atuple': (1, 2, 3),
'alist': ['a', 'b', 'c'],
'adict': {'d': 'e', 'f': 3 },
'nest1': ('g', ('h', 'i'), 'j'),
'nest2': ['k', ['l', 'm'], 'n'],
'nest3': ['o', 'cfg://alist', 'p'],
}
bc = logging.config.BaseConfigurator(d)
self.assertEqual(bc.convert('cfg://atuple[1]'), 2)
self.assertEqual(bc.convert('cfg://alist[1]'), 'b')
self.assertEqual(bc.convert('cfg://nest1[1][0]'), 'h')
self.assertEqual(bc.convert('cfg://nest2[1][1]'), 'm')
self.assertEqual(bc.convert('cfg://adict.d'), 'e')
self.assertEqual(bc.convert('cfg://adict[f]'), 3)
v = bc.convert('cfg://nest3')
self.assertEqual(v.pop(1), ['a', 'b', 'c'])
self.assertRaises(KeyError, bc.convert, 'cfg://nosuch')
self.assertRaises(ValueError, bc.convert, 'cfg://!')
self.assertRaises(KeyError, bc.convert, 'cfg://adict[2]')
class ManagerTest(BaseTest):
def test_manager_loggerclass(self):
logged = []
class MyLogger(logging.Logger):
def _log(self, level, msg, args, exc_info=None, extra=None):
logged.append(msg)
man = logging.Manager(None)
self.assertRaises(TypeError, man.setLoggerClass, int)
man.setLoggerClass(MyLogger)
logger = man.getLogger('test')
logger.warning('should appear in logged')
logging.warning('should not appear in logged')
self.assertEqual(logged, ['should appear in logged'])
def test_set_log_record_factory(self):
man = logging.Manager(None)
expected = object()
man.setLogRecordFactory(expected)
self.assertEqual(man.logRecordFactory, expected)
class ChildLoggerTest(BaseTest):
def test_child_loggers(self):
r = logging.getLogger()
l1 = logging.getLogger('abc')
l2 = logging.getLogger('def.ghi')
c1 = r.getChild('xyz')
c2 = r.getChild('uvw.xyz')
self.assertTrue(c1 is logging.getLogger('xyz'))
self.assertTrue(c2 is logging.getLogger('uvw.xyz'))
c1 = l1.getChild('def')
c2 = c1.getChild('ghi')
c3 = l1.getChild('def.ghi')
self.assertTrue(c1 is logging.getLogger('abc.def'))
self.assertTrue(c2 is logging.getLogger('abc.def.ghi'))
self.assertTrue(c2 is c3)
class DerivedLogRecord(logging.LogRecord):
pass
class LogRecordFactoryTest(BaseTest):
def setUp(self):
class CheckingFilter(logging.Filter):
def __init__(self, cls):
self.cls = cls
def filter(self, record):
t = type(record)
if t is not self.cls:
msg = 'Unexpected LogRecord type %s, expected %s' % (t,
self.cls)
raise TypeError(msg)
return True
BaseTest.setUp(self)
self.filter = CheckingFilter(DerivedLogRecord)
self.root_logger.addFilter(self.filter)
self.orig_factory = logging.getLogRecordFactory()
def tearDown(self):
self.root_logger.removeFilter(self.filter)
BaseTest.tearDown(self)
logging.setLogRecordFactory(self.orig_factory)
def test_logrecord_class(self):
self.assertRaises(TypeError, self.root_logger.warning,
self.next_message())
logging.setLogRecordFactory(DerivedLogRecord)
self.root_logger.error(self.next_message())
self.assert_log_lines([
('root', 'ERROR', '2'),
])
class QueueHandlerTest(BaseTest):
# Do not bother with a logger name group.
expected_log_pat = r"^[\w.]+ -> ([\w]+): ([\d]+)$"
def setUp(self):
BaseTest.setUp(self)
self.queue = queue.Queue(-1)
self.que_hdlr = logging.handlers.QueueHandler(self.queue)
self.que_logger = logging.getLogger('que')
self.que_logger.propagate = False
self.que_logger.setLevel(logging.WARNING)
self.que_logger.addHandler(self.que_hdlr)
def tearDown(self):
self.que_hdlr.close()
BaseTest.tearDown(self)
def test_queue_handler(self):
self.que_logger.debug(self.next_message())
self.assertRaises(queue.Empty, self.queue.get_nowait)
self.que_logger.info(self.next_message())
self.assertRaises(queue.Empty, self.queue.get_nowait)
msg = self.next_message()
self.que_logger.warning(msg)
data = self.queue.get_nowait()
self.assertTrue(isinstance(data, logging.LogRecord))
self.assertEqual(data.name, self.que_logger.name)
self.assertEqual((data.msg, data.args), (msg, None))
@unittest.skipUnless(hasattr(logging.handlers, 'QueueListener'),
'logging.handlers.QueueListener required for this test')
def test_queue_listener(self):
handler = TestHandler(Matcher())
listener = logging.handlers.QueueListener(self.queue, handler)
listener.start()
try:
self.que_logger.warning(self.next_message())
self.que_logger.error(self.next_message())
self.que_logger.critical(self.next_message())
finally:
listener.stop()
self.assertTrue(handler.matches(levelno=logging.WARNING, message='1'))
self.assertTrue(handler.matches(levelno=logging.ERROR, message='2'))
self.assertTrue(handler.matches(levelno=logging.CRITICAL, message='3'))
ZERO = datetime.timedelta(0)
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return ZERO
dst = utcoffset
def tzname(self, dt):
return 'UTC'
utc = UTC()
class FormatterTest(unittest.TestCase):
def setUp(self):
self.common = {
'name': 'formatter.test',
'level': logging.DEBUG,
'pathname': os.path.join('path', 'to', 'dummy.ext'),
'lineno': 42,
'exc_info': None,
'func': None,
'msg': 'Message with %d %s',
'args': (2, 'placeholders'),
}
self.variants = {
}
def get_record(self, name=None):
result = dict(self.common)
if name is not None:
result.update(self.variants[name])
return logging.makeLogRecord(result)
def test_percent(self):<|fim▁hole|> f = logging.Formatter('${%(message)s}')
self.assertEqual(f.format(r), '${Message with 2 placeholders}')
f = logging.Formatter('%(random)s')
self.assertRaises(KeyError, f.format, r)
self.assertFalse(f.usesTime())
f = logging.Formatter('%(asctime)s')
self.assertTrue(f.usesTime())
f = logging.Formatter('%(asctime)-15s')
self.assertTrue(f.usesTime())
f = logging.Formatter('asctime')
self.assertFalse(f.usesTime())
def test_braces(self):
# Test {}-formatting
r = self.get_record()
f = logging.Formatter('$%{message}%$', style='{')
self.assertEqual(f.format(r), '$%Message with 2 placeholders%$')
f = logging.Formatter('{random}', style='{')
self.assertRaises(KeyError, f.format, r)
self.assertFalse(f.usesTime())
f = logging.Formatter('{asctime}', style='{')
self.assertTrue(f.usesTime())
f = logging.Formatter('{asctime!s:15}', style='{')
self.assertTrue(f.usesTime())
f = logging.Formatter('{asctime:15}', style='{')
self.assertTrue(f.usesTime())
f = logging.Formatter('asctime', style='{')
self.assertFalse(f.usesTime())
def test_dollars(self):
# Test $-formatting
r = self.get_record()
f = logging.Formatter('$message', style='$')
self.assertEqual(f.format(r), 'Message with 2 placeholders')
f = logging.Formatter('$$%${message}%$$', style='$')
self.assertEqual(f.format(r), '$%Message with 2 placeholders%$')
f = logging.Formatter('${random}', style='$')
self.assertRaises(KeyError, f.format, r)
self.assertFalse(f.usesTime())
f = logging.Formatter('${asctime}', style='$')
self.assertTrue(f.usesTime())
f = logging.Formatter('${asctime', style='$')
self.assertFalse(f.usesTime())
f = logging.Formatter('$asctime', style='$')
self.assertTrue(f.usesTime())
f = logging.Formatter('asctime', style='$')
self.assertFalse(f.usesTime())
def test_invalid_style(self):
self.assertRaises(ValueError, logging.Formatter, None, None, 'x')
def test_time(self):
r = self.get_record()
dt = datetime.datetime(1993, 4, 21, 8, 3, 0, 0, utc)
# We use None to indicate we want the local timezone
# We're essentially converting a UTC time to local time
r.created = time.mktime(dt.astimezone(None).timetuple())
r.msecs = 123
f = logging.Formatter('%(asctime)s %(message)s')
f.converter = time.gmtime
self.assertEqual(f.formatTime(r), '1993-04-21 08:03:00,123')
self.assertEqual(f.formatTime(r, '%Y:%d'), '1993:21')
f.format(r)
self.assertEqual(r.asctime, '1993-04-21 08:03:00,123')
class TestBufferingFormatter(logging.BufferingFormatter):
def formatHeader(self, records):
return '[(%d)' % len(records)
def formatFooter(self, records):
return '(%d)]' % len(records)
class BufferingFormatterTest(unittest.TestCase):
def setUp(self):
self.records = [
logging.makeLogRecord({'msg': 'one'}),
logging.makeLogRecord({'msg': 'two'}),
]
def test_default(self):
f = logging.BufferingFormatter()
self.assertEqual('', f.format([]))
self.assertEqual('onetwo', f.format(self.records))
def test_custom(self):
f = TestBufferingFormatter()
self.assertEqual('[(2)onetwo(2)]', f.format(self.records))
lf = logging.Formatter('<%(message)s>')
f = TestBufferingFormatter(lf)
self.assertEqual('[(2)<one><two>(2)]', f.format(self.records))
class ExceptionTest(BaseTest):
def test_formatting(self):
r = self.root_logger
h = RecordingHandler()
r.addHandler(h)
try:
raise RuntimeError('deliberate mistake')
except:
logging.exception('failed', stack_info=True)
r.removeHandler(h)
h.close()
r = h.records[0]
self.assertTrue(r.exc_text.startswith('Traceback (most recent '
'call last):\n'))
self.assertTrue(r.exc_text.endswith('\nRuntimeError: '
'deliberate mistake'))
self.assertTrue(r.stack_info.startswith('Stack (most recent '
'call last):\n'))
self.assertTrue(r.stack_info.endswith('logging.exception(\'failed\', '
'stack_info=True)'))
class LastResortTest(BaseTest):
def test_last_resort(self):
# Test the last resort handler
root = self.root_logger
root.removeHandler(self.root_hdlr)
old_stderr = sys.stderr
old_lastresort = logging.lastResort
old_raise_exceptions = logging.raiseExceptions
try:
sys.stderr = sio = io.StringIO()
root.debug('This should not appear')
self.assertEqual(sio.getvalue(), '')
root.warning('This is your final chance!')
self.assertEqual(sio.getvalue(), 'This is your final chance!\n')
#No handlers and no last resort, so 'No handlers' message
logging.lastResort = None
sys.stderr = sio = io.StringIO()
root.warning('This is your final chance!')
self.assertEqual(sio.getvalue(), 'No handlers could be found for logger "root"\n')
# 'No handlers' message only printed once
sys.stderr = sio = io.StringIO()
root.warning('This is your final chance!')
self.assertEqual(sio.getvalue(), '')
root.manager.emittedNoHandlerWarning = False
#If raiseExceptions is False, no message is printed
logging.raiseExceptions = False
sys.stderr = sio = io.StringIO()
root.warning('This is your final chance!')
self.assertEqual(sio.getvalue(), '')
finally:
sys.stderr = old_stderr
root.addHandler(self.root_hdlr)
logging.lastResort = old_lastresort
logging.raiseExceptions = old_raise_exceptions
class FakeHandler:
def __init__(self, identifier, called):
for method in ('acquire', 'flush', 'close', 'release'):
setattr(self, method, self.record_call(identifier, method, called))
def record_call(self, identifier, method_name, called):
def inner():
called.append('{} - {}'.format(identifier, method_name))
return inner
class RecordingHandler(logging.NullHandler):
def __init__(self, *args, **kwargs):
super(RecordingHandler, self).__init__(*args, **kwargs)
self.records = []
def handle(self, record):
"""Keep track of all the emitted records."""
self.records.append(record)
class ShutdownTest(BaseTest):
"""Test suite for the shutdown method."""
def setUp(self):
super(ShutdownTest, self).setUp()
self.called = []
raise_exceptions = logging.raiseExceptions
self.addCleanup(setattr, logging, 'raiseExceptions', raise_exceptions)
def raise_error(self, error):
def inner():
raise error()
return inner
def test_no_failure(self):
# create some fake handlers
handler0 = FakeHandler(0, self.called)
handler1 = FakeHandler(1, self.called)
handler2 = FakeHandler(2, self.called)
# create live weakref to those handlers
handlers = map(logging.weakref.ref, [handler0, handler1, handler2])
logging.shutdown(handlerList=list(handlers))
expected = ['2 - acquire', '2 - flush', '2 - close', '2 - release',
'1 - acquire', '1 - flush', '1 - close', '1 - release',
'0 - acquire', '0 - flush', '0 - close', '0 - release']
self.assertEqual(expected, self.called)
def _test_with_failure_in_method(self, method, error):
handler = FakeHandler(0, self.called)
setattr(handler, method, self.raise_error(error))
handlers = [logging.weakref.ref(handler)]
logging.shutdown(handlerList=list(handlers))
self.assertEqual('0 - release', self.called[-1])
def test_with_ioerror_in_acquire(self):
self._test_with_failure_in_method('acquire', IOError)
def test_with_ioerror_in_flush(self):
self._test_with_failure_in_method('flush', IOError)
def test_with_ioerror_in_close(self):
self._test_with_failure_in_method('close', IOError)
def test_with_valueerror_in_acquire(self):
self._test_with_failure_in_method('acquire', ValueError)
def test_with_valueerror_in_flush(self):
self._test_with_failure_in_method('flush', ValueError)
def test_with_valueerror_in_close(self):
self._test_with_failure_in_method('close', ValueError)
def test_with_other_error_in_acquire_without_raise(self):
logging.raiseExceptions = False
self._test_with_failure_in_method('acquire', IndexError)
def test_with_other_error_in_flush_without_raise(self):
logging.raiseExceptions = False
self._test_with_failure_in_method('flush', IndexError)
def test_with_other_error_in_close_without_raise(self):
logging.raiseExceptions = False
self._test_with_failure_in_method('close', IndexError)
def test_with_other_error_in_acquire_with_raise(self):
logging.raiseExceptions = True
self.assertRaises(IndexError, self._test_with_failure_in_method,
'acquire', IndexError)
def test_with_other_error_in_flush_with_raise(self):
logging.raiseExceptions = True
self.assertRaises(IndexError, self._test_with_failure_in_method,
'flush', IndexError)
def test_with_other_error_in_close_with_raise(self):
logging.raiseExceptions = True
self.assertRaises(IndexError, self._test_with_failure_in_method,
'close', IndexError)
class ModuleLevelMiscTest(BaseTest):
"""Test suite for some module level methods."""
def test_disable(self):
old_disable = logging.root.manager.disable
# confirm our assumptions are correct
self.assertEqual(old_disable, 0)
self.addCleanup(logging.disable, old_disable)
logging.disable(83)
self.assertEqual(logging.root.manager.disable, 83)
def _test_log(self, method, level=None):
called = []
patch(self, logging, 'basicConfig',
lambda *a, **kw: called.append((a, kw)))
recording = RecordingHandler()
logging.root.addHandler(recording)
log_method = getattr(logging, method)
if level is not None:
log_method(level, "test me: %r", recording)
else:
log_method("test me: %r", recording)
self.assertEqual(len(recording.records), 1)
record = recording.records[0]
self.assertEqual(record.getMessage(), "test me: %r" % recording)
expected_level = level if level is not None else getattr(logging, method.upper())
self.assertEqual(record.levelno, expected_level)
# basicConfig was not called!
self.assertEqual(called, [])
def test_log(self):
self._test_log('log', logging.ERROR)
def test_debug(self):
self._test_log('debug')
def test_info(self):
self._test_log('info')
def test_warning(self):
self._test_log('warning')
def test_error(self):
self._test_log('error')
def test_critical(self):
self._test_log('critical')
def test_set_logger_class(self):
self.assertRaises(TypeError, logging.setLoggerClass, object)
class MyLogger(logging.Logger):
pass
logging.setLoggerClass(MyLogger)
self.assertEqual(logging.getLoggerClass(), MyLogger)
logging.setLoggerClass(logging.Logger)
self.assertEqual(logging.getLoggerClass(), logging.Logger)
class LogRecordTest(BaseTest):
def test_str_rep(self):
r = logging.makeLogRecord({})
s = str(r)
self.assertTrue(s.startswith('<LogRecord: '))
self.assertTrue(s.endswith('>'))
def test_dict_arg(self):
h = RecordingHandler()
r = logging.getLogger()
r.addHandler(h)
d = {'less' : 'more' }
logging.warning('less is %(less)s', d)
self.assertIs(h.records[0].args, d)
self.assertEqual(h.records[0].message, 'less is more')
r.removeHandler(h)
h.close()
def test_multiprocessing(self):
r = logging.makeLogRecord({})
self.assertEqual(r.processName, 'MainProcess')
try:
import multiprocessing as mp
r = logging.makeLogRecord({})
self.assertEqual(r.processName, mp.current_process().name)
except ImportError:
pass
def test_optional(self):
r = logging.makeLogRecord({})
NOT_NONE = self.assertIsNotNone
if threading:
NOT_NONE(r.thread)
NOT_NONE(r.threadName)
NOT_NONE(r.process)
NOT_NONE(r.processName)
log_threads = logging.logThreads
log_processes = logging.logProcesses
log_multiprocessing = logging.logMultiprocessing
try:
logging.logThreads = False
logging.logProcesses = False
logging.logMultiprocessing = False
r = logging.makeLogRecord({})
NONE = self.assertIsNone
NONE(r.thread)
NONE(r.threadName)
NONE(r.process)
NONE(r.processName)
finally:
logging.logThreads = log_threads
logging.logProcesses = log_processes
logging.logMultiprocessing = log_multiprocessing
class BasicConfigTest(unittest.TestCase):
"""Test suite for logging.basicConfig."""
def setUp(self):
super(BasicConfigTest, self).setUp()
self.handlers = logging.root.handlers
self.saved_handlers = logging._handlers.copy()
self.saved_handler_list = logging._handlerList[:]
self.original_logging_level = logging.root.level
self.addCleanup(self.cleanup)
logging.root.handlers = []
def tearDown(self):
for h in logging.root.handlers[:]:
logging.root.removeHandler(h)
h.close()
super(BasicConfigTest, self).tearDown()
def cleanup(self):
setattr(logging.root, 'handlers', self.handlers)
logging._handlers.clear()
logging._handlers.update(self.saved_handlers)
logging._handlerList[:] = self.saved_handler_list
logging.root.level = self.original_logging_level
def test_no_kwargs(self):
logging.basicConfig()
# handler defaults to a StreamHandler to sys.stderr
self.assertEqual(len(logging.root.handlers), 1)
handler = logging.root.handlers[0]
self.assertIsInstance(handler, logging.StreamHandler)
self.assertEqual(handler.stream, sys.stderr)
formatter = handler.formatter
# format defaults to logging.BASIC_FORMAT
self.assertEqual(formatter._style._fmt, logging.BASIC_FORMAT)
# datefmt defaults to None
self.assertIsNone(formatter.datefmt)
# style defaults to %
self.assertIsInstance(formatter._style, logging.PercentStyle)
# level is not explicitly set
self.assertEqual(logging.root.level, self.original_logging_level)
def test_filename(self):
logging.basicConfig(filename='test.log')
self.assertEqual(len(logging.root.handlers), 1)
handler = logging.root.handlers[0]
self.assertIsInstance(handler, logging.FileHandler)
expected = logging.FileHandler('test.log', 'a')
self.addCleanup(expected.close)
self.assertEqual(handler.stream.mode, expected.stream.mode)
self.assertEqual(handler.stream.name, expected.stream.name)
def test_filemode(self):
logging.basicConfig(filename='test.log', filemode='wb')
handler = logging.root.handlers[0]
expected = logging.FileHandler('test.log', 'wb')
self.addCleanup(expected.close)
self.assertEqual(handler.stream.mode, expected.stream.mode)
def test_stream(self):
stream = io.StringIO()
self.addCleanup(stream.close)
logging.basicConfig(stream=stream)
self.assertEqual(len(logging.root.handlers), 1)
handler = logging.root.handlers[0]
self.assertIsInstance(handler, logging.StreamHandler)
self.assertEqual(handler.stream, stream)
def test_format(self):
logging.basicConfig(format='foo')
formatter = logging.root.handlers[0].formatter
self.assertEqual(formatter._style._fmt, 'foo')
def test_datefmt(self):
logging.basicConfig(datefmt='bar')
formatter = logging.root.handlers[0].formatter
self.assertEqual(formatter.datefmt, 'bar')
def test_style(self):
logging.basicConfig(style='$')
formatter = logging.root.handlers[0].formatter
self.assertIsInstance(formatter._style, logging.StringTemplateStyle)
def test_level(self):
old_level = logging.root.level
self.addCleanup(logging.root.setLevel, old_level)
logging.basicConfig(level=57)
self.assertEqual(logging.root.level, 57)
# Test that second call has no effect
logging.basicConfig(level=58)
self.assertEqual(logging.root.level, 57)
def test_incompatible(self):
assertRaises = self.assertRaises
handlers = [logging.StreamHandler()]
stream = sys.stderr
assertRaises(ValueError, logging.basicConfig, filename='test.log',
stream=stream)
assertRaises(ValueError, logging.basicConfig, filename='test.log',
handlers=handlers)
assertRaises(ValueError, logging.basicConfig, stream=stream,
handlers=handlers)
def test_handlers(self):
handlers = [
logging.StreamHandler(),
logging.StreamHandler(sys.stdout),
logging.StreamHandler(),
]
f = logging.Formatter()
handlers[2].setFormatter(f)
logging.basicConfig(handlers=handlers)
self.assertIs(handlers[0], logging.root.handlers[0])
self.assertIs(handlers[1], logging.root.handlers[1])
self.assertIs(handlers[2], logging.root.handlers[2])
self.assertIsNotNone(handlers[0].formatter)
self.assertIsNotNone(handlers[1].formatter)
self.assertIs(handlers[2].formatter, f)
self.assertIs(handlers[0].formatter, handlers[1].formatter)
def _test_log(self, method, level=None):
# logging.root has no handlers so basicConfig should be called
called = []
old_basic_config = logging.basicConfig
def my_basic_config(*a, **kw):
old_basic_config()
old_level = logging.root.level
logging.root.setLevel(100) # avoid having messages in stderr
self.addCleanup(logging.root.setLevel, old_level)
called.append((a, kw))
patch(self, logging, 'basicConfig', my_basic_config)
log_method = getattr(logging, method)
if level is not None:
log_method(level, "test me")
else:
log_method("test me")
# basicConfig was called with no arguments
self.assertEqual(called, [((), {})])
def test_log(self):
self._test_log('log', logging.WARNING)
def test_debug(self):
self._test_log('debug')
def test_info(self):
self._test_log('info')
def test_warning(self):
self._test_log('warning')
def test_error(self):
self._test_log('error')
def test_critical(self):
self._test_log('critical')
class LoggerAdapterTest(unittest.TestCase):
def setUp(self):
super(LoggerAdapterTest, self).setUp()
old_handler_list = logging._handlerList[:]
self.recording = RecordingHandler()
self.logger = logging.root
self.logger.addHandler(self.recording)
self.addCleanup(self.logger.removeHandler, self.recording)
self.addCleanup(self.recording.close)
def cleanup():
logging._handlerList[:] = old_handler_list
self.addCleanup(cleanup)
self.addCleanup(logging.shutdown)
self.adapter = logging.LoggerAdapter(logger=self.logger, extra=None)
def test_exception(self):
msg = 'testing exception: %r'
exc = None
try:
1 / 0
except ZeroDivisionError as e:
exc = e
self.adapter.exception(msg, self.recording)
self.assertEqual(len(self.recording.records), 1)
record = self.recording.records[0]
self.assertEqual(record.levelno, logging.ERROR)
self.assertEqual(record.msg, msg)
self.assertEqual(record.args, (self.recording,))
self.assertEqual(record.exc_info,
(exc.__class__, exc, exc.__traceback__))
def test_critical(self):
msg = 'critical test! %r'
self.adapter.critical(msg, self.recording)
self.assertEqual(len(self.recording.records), 1)
record = self.recording.records[0]
self.assertEqual(record.levelno, logging.CRITICAL)
self.assertEqual(record.msg, msg)
self.assertEqual(record.args, (self.recording,))
def test_is_enabled_for(self):
old_disable = self.adapter.logger.manager.disable
self.adapter.logger.manager.disable = 33
self.addCleanup(setattr, self.adapter.logger.manager, 'disable',
old_disable)
self.assertFalse(self.adapter.isEnabledFor(32))
def test_has_handlers(self):
self.assertTrue(self.adapter.hasHandlers())
for handler in self.logger.handlers:
self.logger.removeHandler(handler)
self.assertFalse(self.logger.hasHandlers())
self.assertFalse(self.adapter.hasHandlers())
class LoggerTest(BaseTest):
def setUp(self):
super(LoggerTest, self).setUp()
self.recording = RecordingHandler()
self.logger = logging.Logger(name='blah')
self.logger.addHandler(self.recording)
self.addCleanup(self.logger.removeHandler, self.recording)
self.addCleanup(self.recording.close)
self.addCleanup(logging.shutdown)
def test_set_invalid_level(self):
self.assertRaises(TypeError, self.logger.setLevel, object())
def test_exception(self):
msg = 'testing exception: %r'
exc = None
try:
1 / 0
except ZeroDivisionError as e:
exc = e
self.logger.exception(msg, self.recording)
self.assertEqual(len(self.recording.records), 1)
record = self.recording.records[0]
self.assertEqual(record.levelno, logging.ERROR)
self.assertEqual(record.msg, msg)
self.assertEqual(record.args, (self.recording,))
self.assertEqual(record.exc_info,
(exc.__class__, exc, exc.__traceback__))
def test_log_invalid_level_with_raise(self):
old_raise = logging.raiseExceptions
self.addCleanup(setattr, logging, 'raiseExecptions', old_raise)
logging.raiseExceptions = True
self.assertRaises(TypeError, self.logger.log, '10', 'test message')
def test_log_invalid_level_no_raise(self):
old_raise = logging.raiseExceptions
self.addCleanup(setattr, logging, 'raiseExecptions', old_raise)
logging.raiseExceptions = False
self.logger.log('10', 'test message') # no exception happens
def test_find_caller_with_stack_info(self):
called = []
patch(self, logging.traceback, 'print_stack',
lambda f, file: called.append(file.getvalue()))
self.logger.findCaller(stack_info=True)
self.assertEqual(len(called), 1)
self.assertEqual('Stack (most recent call last):\n', called[0])
def test_make_record_with_extra_overwrite(self):
name = 'my record'
level = 13
fn = lno = msg = args = exc_info = func = sinfo = None
rv = logging._logRecordFactory(name, level, fn, lno, msg, args,
exc_info, func, sinfo)
for key in ('message', 'asctime') + tuple(rv.__dict__.keys()):
extra = {key: 'some value'}
self.assertRaises(KeyError, self.logger.makeRecord, name, level,
fn, lno, msg, args, exc_info,
extra=extra, sinfo=sinfo)
def test_make_record_with_extra_no_overwrite(self):
name = 'my record'
level = 13
fn = lno = msg = args = exc_info = func = sinfo = None
extra = {'valid_key': 'some value'}
result = self.logger.makeRecord(name, level, fn, lno, msg, args,
exc_info, extra=extra, sinfo=sinfo)
self.assertIn('valid_key', result.__dict__)
def test_has_handlers(self):
self.assertTrue(self.logger.hasHandlers())
for handler in self.logger.handlers:
self.logger.removeHandler(handler)
self.assertFalse(self.logger.hasHandlers())
def test_has_handlers_no_propagate(self):
child_logger = logging.getLogger('blah.child')
child_logger.propagate = False
self.assertFalse(child_logger.hasHandlers())
def test_is_enabled_for(self):
old_disable = self.logger.manager.disable
self.logger.manager.disable = 23
self.addCleanup(setattr, self.logger.manager, 'disable', old_disable)
self.assertFalse(self.logger.isEnabledFor(22))
def test_root_logger_aliases(self):
root = logging.getLogger()
self.assertIs(root, logging.root)
self.assertIs(root, logging.getLogger(None))
self.assertIs(root, logging.getLogger(''))
self.assertIs(root, logging.getLogger('foo').root)
self.assertIs(root, logging.getLogger('foo.bar').root)
self.assertIs(root, logging.getLogger('foo').parent)
self.assertIsNot(root, logging.getLogger('\0'))
self.assertIsNot(root, logging.getLogger('foo.bar').parent)
def test_invalid_names(self):
self.assertRaises(TypeError, logging.getLogger, any)
self.assertRaises(TypeError, logging.getLogger, b'foo')
class BaseFileTest(BaseTest):
"Base class for handler tests that write log files"
def setUp(self):
BaseTest.setUp(self)
fd, self.fn = tempfile.mkstemp(".log", "test_logging-2-")
os.close(fd)
self.rmfiles = []
def tearDown(self):
for fn in self.rmfiles:
os.unlink(fn)
if os.path.exists(self.fn):
os.unlink(self.fn)
BaseTest.tearDown(self)
def assertLogFile(self, filename):
"Assert a log file is there and register it for deletion"
self.assertTrue(os.path.exists(filename),
msg="Log file %r does not exist" % filename)
self.rmfiles.append(filename)
class FileHandlerTest(BaseFileTest):
def test_delay(self):
os.unlink(self.fn)
fh = logging.FileHandler(self.fn, delay=True)
self.assertIsNone(fh.stream)
self.assertFalse(os.path.exists(self.fn))
fh.handle(logging.makeLogRecord({}))
self.assertIsNotNone(fh.stream)
self.assertTrue(os.path.exists(self.fn))
fh.close()
class RotatingFileHandlerTest(BaseFileTest):
def next_rec(self):
return logging.LogRecord('n', logging.DEBUG, 'p', 1,
self.next_message(), None, None, None)
def test_should_not_rollover(self):
# If maxbytes is zero rollover never occurs
rh = logging.handlers.RotatingFileHandler(self.fn, maxBytes=0)
self.assertFalse(rh.shouldRollover(None))
rh.close()
def test_should_rollover(self):
rh = logging.handlers.RotatingFileHandler(self.fn, maxBytes=1)
self.assertTrue(rh.shouldRollover(self.next_rec()))
rh.close()
def test_file_created(self):
# checks that the file is created and assumes it was created
# by us
rh = logging.handlers.RotatingFileHandler(self.fn)
rh.emit(self.next_rec())
self.assertLogFile(self.fn)
rh.close()
def test_rollover_filenames(self):
def namer(name):
return name + ".test"
rh = logging.handlers.RotatingFileHandler(
self.fn, backupCount=2, maxBytes=1)
rh.namer = namer
rh.emit(self.next_rec())
self.assertLogFile(self.fn)
rh.emit(self.next_rec())
self.assertLogFile(namer(self.fn + ".1"))
rh.emit(self.next_rec())
self.assertLogFile(namer(self.fn + ".2"))
self.assertFalse(os.path.exists(namer(self.fn + ".3")))
rh.close()
@requires_zlib
def test_rotator(self):
def namer(name):
return name + ".gz"
def rotator(source, dest):
with open(source, "rb") as sf:
data = sf.read()
compressed = zlib.compress(data, 9)
with open(dest, "wb") as df:
df.write(compressed)
os.remove(source)
rh = logging.handlers.RotatingFileHandler(
self.fn, backupCount=2, maxBytes=1)
rh.rotator = rotator
rh.namer = namer
m1 = self.next_rec()
rh.emit(m1)
self.assertLogFile(self.fn)
m2 = self.next_rec()
rh.emit(m2)
fn = namer(self.fn + ".1")
self.assertLogFile(fn)
newline = os.linesep
with open(fn, "rb") as f:
compressed = f.read()
data = zlib.decompress(compressed)
self.assertEqual(data.decode("ascii"), m1.msg + newline)
rh.emit(self.next_rec())
fn = namer(self.fn + ".2")
self.assertLogFile(fn)
with open(fn, "rb") as f:
compressed = f.read()
data = zlib.decompress(compressed)
self.assertEqual(data.decode("ascii"), m1.msg + newline)
rh.emit(self.next_rec())
fn = namer(self.fn + ".2")
with open(fn, "rb") as f:
compressed = f.read()
data = zlib.decompress(compressed)
self.assertEqual(data.decode("ascii"), m2.msg + newline)
self.assertFalse(os.path.exists(namer(self.fn + ".3")))
rh.close()
class TimedRotatingFileHandlerTest(BaseFileTest):
# other test methods added below
def test_rollover(self):
fh = logging.handlers.TimedRotatingFileHandler(self.fn, 'S',
backupCount=1)
fmt = logging.Formatter('%(asctime)s %(message)s')
fh.setFormatter(fmt)
r1 = logging.makeLogRecord({'msg': 'testing - initial'})
fh.emit(r1)
self.assertLogFile(self.fn)
time.sleep(1.1) # a little over a second ...
r2 = logging.makeLogRecord({'msg': 'testing - after delay'})
fh.emit(r2)
fh.close()
# At this point, we should have a recent rotated file which we
# can test for the existence of. However, in practice, on some
# machines which run really slowly, we don't know how far back
# in time to go to look for the log file. So, we go back a fair
# bit, and stop as soon as we see a rotated file. In theory this
# could of course still fail, but the chances are lower.
found = False
now = datetime.datetime.now()
GO_BACK = 5 * 60 # seconds
for secs in range(GO_BACK):
prev = now - datetime.timedelta(seconds=secs)
fn = self.fn + prev.strftime(".%Y-%m-%d_%H-%M-%S")
found = os.path.exists(fn)
if found:
self.rmfiles.append(fn)
break
msg = 'No rotated files found, went back %d seconds' % GO_BACK
if not found:
#print additional diagnostics
dn, fn = os.path.split(self.fn)
files = [f for f in os.listdir(dn) if f.startswith(fn)]
print('Test time: %s' % now.strftime("%Y-%m-%d %H-%M-%S"), file=sys.stderr)
print('The only matching files are: %s' % files, file=sys.stderr)
for f in files:
print('Contents of %s:' % f)
path = os.path.join(dn, f)
with open(path, 'r') as tf:
print(tf.read())
self.assertTrue(found, msg=msg)
def test_invalid(self):
assertRaises = self.assertRaises
assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
self.fn, 'X', delay=True)
assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
self.fn, 'W', delay=True)
assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler,
self.fn, 'W7', delay=True)
def secs(**kw):
return datetime.timedelta(**kw) // datetime.timedelta(seconds=1)
for when, exp in (('S', 1),
('M', 60),
('H', 60 * 60),
('D', 60 * 60 * 24),
('MIDNIGHT', 60 * 60 * 24),
# current time (epoch start) is a Thursday, W0 means Monday
('W0', secs(days=4, hours=24)),
):
def test_compute_rollover(self, when=when, exp=exp):
rh = logging.handlers.TimedRotatingFileHandler(
self.fn, when=when, interval=1, backupCount=0, utc=True)
currentTime = 0.0
actual = rh.computeRollover(currentTime)
if exp != actual:
# Failures occur on some systems for MIDNIGHT and W0.
# Print detailed calculation for MIDNIGHT so we can try to see
# what's going on
if when == 'MIDNIGHT':
try:
if rh.utc:
t = time.gmtime(currentTime)
else:
t = time.localtime(currentTime)
currentHour = t[3]
currentMinute = t[4]
currentSecond = t[5]
# r is the number of seconds left between now and midnight
r = logging.handlers._MIDNIGHT - ((currentHour * 60 +
currentMinute) * 60 +
currentSecond)
result = currentTime + r
print('t: %s (%s)' % (t, rh.utc), file=sys.stderr)
print('currentHour: %s' % currentHour, file=sys.stderr)
print('currentMinute: %s' % currentMinute, file=sys.stderr)
print('currentSecond: %s' % currentSecond, file=sys.stderr)
print('r: %s' % r, file=sys.stderr)
print('result: %s' % result, file=sys.stderr)
except Exception:
print('exception in diagnostic code: %s' % sys.exc_info()[1], file=sys.stderr)
self.assertEqual(exp, actual)
rh.close()
setattr(TimedRotatingFileHandlerTest, "test_compute_rollover_%s" % when, test_compute_rollover)
@unittest.skipUnless(win32evtlog, 'win32evtlog/win32evtlogutil required for this test.')
class NTEventLogHandlerTest(BaseTest):
def test_basic(self):
logtype = 'Application'
elh = win32evtlog.OpenEventLog(None, logtype)
num_recs = win32evtlog.GetNumberOfEventLogRecords(elh)
h = logging.handlers.NTEventLogHandler('test_logging')
r = logging.makeLogRecord({'msg': 'Test Log Message'})
h.handle(r)
h.close()
# Now see if the event is recorded
self.assertTrue(num_recs < win32evtlog.GetNumberOfEventLogRecords(elh))
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | \
win32evtlog.EVENTLOG_SEQUENTIAL_READ
found = False
GO_BACK = 100
events = win32evtlog.ReadEventLog(elh, flags, GO_BACK)
for e in events:
if e.SourceName != 'test_logging':
continue
msg = win32evtlogutil.SafeFormatMessage(e, logtype)
if msg != 'Test Log Message\r\n':
continue
found = True
break
msg = 'Record not found in event log, went back %d records' % GO_BACK
self.assertTrue(found, msg=msg)
# Set the locale to the platform-dependent default. I have no idea
# why the test does this, but in any case we save the current locale
# first and restore it at the end.
@run_with_locale('LC_ALL', '')
def test_main():
run_unittest(BuiltinLevelsTest, BasicFilterTest,
CustomLevelsAndFiltersTest, HandlerTest, MemoryHandlerTest,
ConfigFileTest, SocketHandlerTest, DatagramHandlerTest,
MemoryTest, EncodingTest, WarningsTest, ConfigDictTest,
ManagerTest, FormatterTest, BufferingFormatterTest,
StreamHandlerTest, LogRecordFactoryTest, ChildLoggerTest,
QueueHandlerTest, ShutdownTest, ModuleLevelMiscTest,
BasicConfigTest, LoggerAdapterTest, LoggerTest,
SMTPHandlerTest, FileHandlerTest, RotatingFileHandlerTest,
LastResortTest, LogRecordTest, ExceptionTest,
SysLogHandlerTest, HTTPHandlerTest, NTEventLogHandlerTest,
TimedRotatingFileHandlerTest
)
if __name__ == "__main__":
test_main()<|fim▁end|> | # Test %-formatting
r = self.get_record() |
<|file_name|>improc.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Image processing and feature extraction functions.
"""
import cv2
import numpy as np
def pad_image(im, width, height, border=255):
"""pad char image in a larger image"""
xoff = abs(int((im.shape[1] - width) / 2))
yoff = abs(int((im.shape[0] - height) / 2))
if width >= im.shape[1]:
x_min_old = 0
x_max_old = im.shape[1]
x_min_new = xoff
x_max_new = im.shape[1] + xoff
else:
x_min_old = xoff
x_max_old = width + xoff
x_min_new = 0
x_max_new = width
if height >= im.shape[0]:
y_min_old = 0
y_max_old = im.shape[0]
y_min_new = yoff
y_max_new = im.shape[0] + yoff
else:
y_min_old = yoff
y_max_old = height + yoff
y_min_new = 0
y_max_new = height
image_subset = im[y_min_old:y_max_old, x_min_old:x_max_old]
new_bmp = np.ones((height, width, 3), dtype=np.uint8) * border
new_bmp[y_min_new:y_max_new, x_min_new:x_max_new] = image_subset
return new_bmp
def transform_random(image, trans_size, rot_size, scale_size):
"""apply a small random transformation to an image"""
# TODO: make ranges of random numbers input parameters
trans = (np.random.rand(2) - 0.5) * np.array(trans_size)
rot = (np.random.rand(4) - 0.5) * rot_size
scale = 1.0 + scale_size * (np.random.rand(1)[0] - 0.5)
x_size = image.shape[1]
y_size = image.shape[0]
trans_to_center = np.float32(
[[1, 0, -x_size / 2.0],
[0, 1, -y_size / 2.0],
[0, 0, 1]])
trans_from_center = np.float32(
[[1, 0, x_size / 2.0],
[0, 1, y_size / 2.0],
[0, 0, 1]])
trans_random = np.float32(
[[1 + rot[0], 0 + rot[1], trans[0]],
[0 + rot[2], 1 + rot[3], trans[1]],
[0, 0, 1]])
trans_scale = np.identity(3, dtype=np.float32) * scale
tmat = np.dot(trans_from_center, np.dot(trans_scale, np.dot(trans_random, trans_to_center)))[0:2, :]
image_new = cv2.warpAffine(
image, tmat,
(image.shape[1], image.shape[0]),
borderValue=(255, 255, 255))
# cv2.imshow("image", image)
# cv2.imshow("new_image", image_new)
# cv2.waitKey()
return image_new
def filter_cc(image):
"""find connected components in a threshold image and white out
everything except the second largest"""
# TODO: better way to select relevant components
comp_filt = np.copy(image)
gray = 255 - np.array(np.sum(image, axis=2) / 3.0, dtype=np.uint8)
_, thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
connectivity = 4
comps = cv2.connectedComponentsWithStats(thresh, connectivity, cv2.CV_32S)
labels = comps[1]
sizes = comps[2][:, cv2.CC_STAT_AREA]
# get index of second-largest component
if len(sizes) > 1:
second_largest_idx = np.argsort(sizes)[-2]
else:
second_largest_idx = np.argsort(sizes)[-1]
# eliminate everything else
for label_idx in range(len(sizes)):
if label_idx != second_largest_idx:
comp_filt[labels == label_idx] = 255
# cv2.imshow("image", image)
# cv2.imshow("gray", gray)
# cv2.imshow("thresh", thresh)
# cv2.imshow("comp_filt", comp_filt)
# cv2.waitKey()
return comp_filt
def align(image, x_align=True, y_align=True):
"""shift an image so the center of mass of the pixels is centered"""
# TODO: this should just operate on grayscale
gray = 255 - np.array(np.sum(image, axis=2) / 3.0, dtype=np.uint8)
if x_align:
x_size = image.shape[1]
x_mean = np.sum(np.sum(gray, axis=0) * np.arange(x_size)) / np.sum(gray)
x_shift = x_size / 2.0 - x_mean
else:
x_shift = 0.0
if y_align:
y_size = image.shape[0]
y_mean = np.sum(np.sum(gray, axis=1) * np.arange(y_size)) / np.sum(gray)
y_shift = y_size / 2.0 - y_mean
else:
y_shift = 0.0
tmat = np.float32(
[[1, 0, x_shift],
[0, 1, y_shift]])
new_image = cv2.warpAffine(
image, tmat, (image.shape[1], image.shape[0]), borderValue=(255, 255, 255))<|fim▁hole|> # cv2.imshow("new_image", new_image)
# cv2.waitKey()
return new_image
def grayscale(image):
"""convert RGB ubyte image to grayscale"""
return np.sum(image, axis=2) / 3.0
def downsample(image, scale_factor):
"""downsample an image and unravel to create a feature vector"""
feats = cv2.resize(
image,
(int(image.shape[0] * scale_factor),
int(image.shape[1] * scale_factor)))
return feats
def downsample_4(image):
"""create a feature vector from four downsampling amounts"""
return downsample_multi(image, [0.4, 0.2, 0.1, 0.05])
def downsample_multi(image, scales):
"""create a feature vector from arbitrary downsampling amounts"""
return np.hstack([np.ravel(downsample(image, x)) for x in scales])
def max_pool(im):
"""perform 2x2 max pooling"""
return np.max(
np.stack(
(im[0::2, 0::2],
im[0::2, 1::2],
im[1::2, 0::2],
im[1::2, 1::2]),
axis=-1),
axis=-1)
def max_pool_multi(image, ns):
"""perform multiple levels of max pooling and unravel
to create a feature vector"""
# TODO: move this to a higher level
# image_gray = _grayscale(image)
if 1 in ns:
res = [image]
else:
res = []
for n in range(2, max(ns) + 1):
image = max_pool(image)
if n in ns:
res.append(image)
return np.hstack([np.ravel(y) for y in res])
def column_ex(gray):
"""experimental feature - something like the center of mass of
overlapping columns of the image"""
width = 2
# mul_mat = np.arange(y_size)[:, np.newaxis]
# for some reason, it works a lot better to not divide by the sum of the
# whole window but only the first column.
mul_mat = np.linspace(0, 1, gray.shape[0])[:, np.newaxis]
y_agg = np.array([(np.sum(gray[:, idx + width] * mul_mat) /
np.sum(gray[:, idx]))
for idx in range(gray.shape[1] - width)])
y_agg[~np.isfinite(y_agg)] = 0.0
res = np.hstack((y_agg, np.diff(y_agg)))
return res
def extract_pos(pos, im, border=255):
"""extract a position (tuple of start and end) from an image"""
# this is intended to have the correct logic to always return an image
# of the width of the position even if it is off the edge of the image
target_width = pos[1] - pos[0]
extract = im[:, np.maximum(pos[0], 0):pos[1]]
# print(cpos, extract.shape, im.shape)
if extract.shape[1] < target_width:
res = np.ones((im.shape[0], target_width, 3), dtype=np.ubyte) * border
if pos[0] < 0:
pr = (-pos[0], -pos[0] + extract.shape[1])
else:
pr = (0, extract.shape[1])
# print(pr, flush=True)
res[:, pr[0]:pr[1]] = extract
return res
else:
res = extract
return res<|fim▁end|> |
# cv2.imshow("image", image) |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import getopt
import json
import locale
import os
import re
import sys
from urllib import request, parse
import platform
import threading
from .version import __version__
from .util import log, sogou_proxy_server, get_filename, unescape_html
dry_run = False
force = False
player = None
sogou_proxy = None
sogou_env = None
cookies_txt = None
fake_headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'UTF-8,*;q=0.5',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'en-US,en;q=0.8',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:13.0) Gecko/20100101 Firefox/13.0'
}
if sys.stdout.isatty():
default_encoding = sys.stdout.encoding.lower()
else:
default_encoding = locale.getpreferredencoding().lower()
def tr(s):
try:
s.encode(default_encoding)
return s
except:
return str(s.encode('utf-8'))[2:-1]
# DEPRECATED in favor of match1()
def r1(pattern, text):
m = re.search(pattern, text)
if m:
return m.group(1)
# DEPRECATED in favor of match1()
def r1_of(patterns, text):
for p in patterns:
x = r1(p, text)
if x:
return x
def match1(text, *patterns):
"""Scans through a string for substrings matched some patterns (first-subgroups only).
Args:
text: A string to be scanned.
patterns: Arbitrary number of regex patterns.
Returns:
When only one pattern is given, returns a string (None if no match found).
When more than one pattern are given, returns a list of strings ([] if no match found).
"""
if len(patterns) == 1:
pattern = patterns[0]
match = re.search(pattern, text)
if match:
return match.group(1)
else:
return None
else:
ret = []
for pattern in patterns:
match = re.search(pattern, text)
if match:
ret.append(match.group(1))
return ret
def launch_player(player, urls):
import subprocess
import shlex
subprocess.call(shlex.split(player) + list(urls))
def parse_query_param(url, param):
"""Parses the query string of a URL and returns the value of a parameter.
Args:
url: A URL.
param: A string representing the name of the parameter.
Returns:
The value of the parameter.
"""
try:
return parse.parse_qs(parse.urlparse(url).query)[param][0]
except:
return None
def unicodize(text):
return re.sub(r'\\u([0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f])', lambda x: chr(int(x.group(0)[2:], 16)), text)
# DEPRECATED in favor of util.legitimize()
def escape_file_path(path):
path = path.replace('/', '-')
path = path.replace('\\', '-')
path = path.replace('*', '-')
path = path.replace('?', '-')
return path
# DEPRECATED in favor of util.legitimize()
def filenameable(text):
"""Converts a string to a legal filename through various OSes.
"""
# All POSIX systems
text = text.translate({
0: None,
ord('/'): '-',
})
if platform.system() == 'Windows': # For Windows
text = text.translate({
ord(':'): '-',
ord('*'): '-',
ord('?'): '-',
ord('\\'): '-',
ord('\"'): '\'',
ord('<'): '-',
ord('>'): '-',
ord('|'): '-',
ord('+'): '-',
ord('['): '(',
ord(']'): ')',
})
else:
if text.startswith("."):
text = text[1:]
if platform.system() == 'Darwin': # For Mac OS
text = text.translate({
ord(':'): '-',
})
return text
<|fim▁hole|> """Decompresses data for Content-Encoding: gzip.
"""
from io import BytesIO
import gzip
buffer = BytesIO(data)
f = gzip.GzipFile(fileobj=buffer)
return f.read()
def undeflate(data):
"""Decompresses data for Content-Encoding: deflate.
(the zlib compression is used.)
"""
import zlib
decompressobj = zlib.decompressobj(-zlib.MAX_WBITS)
return decompressobj.decompress(data)+decompressobj.flush()
# DEPRECATED in favor of get_content()
def get_response(url, faker = False):
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), None)
else:
response = request.urlopen(url)
data = response.read()
if response.info().get('Content-Encoding') == 'gzip':
data = ungzip(data)
elif response.info().get('Content-Encoding') == 'deflate':
data = undeflate(data)
response.data = data
return response
# DEPRECATED in favor of get_content()
def get_html(url, encoding = None, faker = False):
content = get_response(url, faker).data
return str(content, 'utf-8', 'ignore')
# DEPRECATED in favor of get_content()
def get_decoded_html(url, faker = False):
response = get_response(url, faker)
data = response.data
charset = r1(r'charset=([\w-]+)', response.headers['content-type'])
if charset:
return data.decode(charset, 'ignore')
else:
return data
def get_content(url, headers={}, decoded=True):
"""Gets the content of a URL via sending a HTTP GET request.
Args:
url: A URL.
headers: Request headers used by the client.
decoded: Whether decode the response body using UTF-8 or the charset specified in Content-Type.
Returns:
The content as a string.
"""
req = request.Request(url, headers=headers)
if cookies_txt:
cookies_txt.add_cookie_header(req)
req.headers.update(req.unredirected_hdrs)
response = request.urlopen(req)
data = response.read()
# Handle HTTP compression for gzip and deflate (zlib)
content_encoding = response.getheader('Content-Encoding')
if content_encoding == 'gzip':
data = ungzip(data)
elif content_encoding == 'deflate':
data = undeflate(data)
# Decode the response body
if decoded:
charset = match1(response.getheader('Content-Type'), r'charset=([\w-]+)')
if charset is not None:
data = data.decode(charset)
else:
data = data.decode('utf-8')
return data
def url_size(url, faker = False):
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), None)
else:
response = request.urlopen(url)
size = int(response.headers['content-length'])
return size
def urls_size(urls):
return sum(map(url_size, urls))
def url_info(url, faker = False):
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), None)
else:
response = request.urlopen(request.Request(url))
headers = response.headers
type = headers['content-type']
mapping = {
'video/3gpp': '3gp',
'video/f4v': 'flv',
'video/mp4': 'mp4',
'video/MP2T': 'ts',
'video/quicktime': 'mov',
'video/webm': 'webm',
'video/x-flv': 'flv',
'video/x-ms-asf': 'asf',
'audio/mpeg': 'mp3'
}
if type in mapping:
ext = mapping[type]
else:
type = None
if headers['content-disposition']:
try:
filename = parse.unquote(r1(r'filename="?([^"]+)"?', headers['content-disposition']))
if len(filename.split('.')) > 1:
ext = filename.split('.')[-1]
else:
ext = None
except:
ext = None
else:
ext = None
if headers['transfer-encoding'] != 'chunked':
size = int(headers['content-length'])
else:
size = None
return type, ext, size
def url_locations(urls, faker = False):
locations = []
for url in urls:
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), None)
else:
response = request.urlopen(request.Request(url))
locations.append(response.url)
return locations
def url_save(url, filepath, bar, refer = None, is_part = False, faker = False):
file_size = url_size(url, faker = faker)
if os.path.exists(filepath):
if not force and file_size == os.path.getsize(filepath):
if not is_part:
if bar:
bar.done()
print('Skipping %s: file already exists' % tr(os.path.basename(filepath)))
else:
if bar:
bar.update_received(file_size)
return
else:
if not is_part:
if bar:
bar.done()
print('Overwriting %s' % tr(os.path.basename(filepath)), '...')
elif not os.path.exists(os.path.dirname(filepath)):
os.mkdir(os.path.dirname(filepath))
temp_filepath = filepath + '.download'
received = 0
if not force:
open_mode = 'ab'
if os.path.exists(temp_filepath):
received += os.path.getsize(temp_filepath)
if bar:
bar.update_received(os.path.getsize(temp_filepath))
else:
open_mode = 'wb'
if received < file_size:
if faker:
headers = fake_headers
else:
headers = {}
if received:
headers['Range'] = 'bytes=' + str(received) + '-'
if refer:
headers['Referer'] = refer
response = request.urlopen(request.Request(url, headers = headers), None)
try:
range_start = int(response.headers['content-range'][6:].split('/')[0].split('-')[0])
end_length = end = int(response.headers['content-range'][6:].split('/')[1])
range_length = end_length - range_start
except:
range_length = int(response.headers['content-length'])
if file_size != received + range_length:
received = 0
if bar:
bar.received = 0
open_mode = 'wb'
with open(temp_filepath, open_mode) as output:
while True:
buffer = response.read(1024 * 256)
if not buffer:
if received == file_size: # Download finished
break
else: # Unexpected termination. Retry request
headers['Range'] = 'bytes=' + str(received) + '-'
response = request.urlopen(request.Request(url, headers = headers), None)
output.write(buffer)
received += len(buffer)
if bar:
bar.update_received(len(buffer))
assert received == os.path.getsize(temp_filepath), '%s == %s == %s' % (received, os.path.getsize(temp_filepath), temp_filepath)
if os.access(filepath, os.W_OK):
os.remove(filepath) # on Windows rename could fail if destination filepath exists
os.rename(temp_filepath, filepath)
def url_save_chunked(url, filepath, bar, refer = None, is_part = False, faker = False):
if os.path.exists(filepath):
if not force:
if not is_part:
if bar:
bar.done()
print('Skipping %s: file already exists' % tr(os.path.basename(filepath)))
else:
if bar:
bar.update_received(os.path.getsize(filepath))
return
else:
if not is_part:
if bar:
bar.done()
print('Overwriting %s' % tr(os.path.basename(filepath)), '...')
elif not os.path.exists(os.path.dirname(filepath)):
os.mkdir(os.path.dirname(filepath))
temp_filepath = filepath + '.download'
received = 0
if not force:
open_mode = 'ab'
if os.path.exists(temp_filepath):
received += os.path.getsize(temp_filepath)
if bar:
bar.update_received(os.path.getsize(temp_filepath))
else:
open_mode = 'wb'
if faker:
headers = fake_headers
else:
headers = {}
if received:
headers['Range'] = 'bytes=' + str(received) + '-'
if refer:
headers['Referer'] = refer
response = request.urlopen(request.Request(url, headers = headers), None)
with open(temp_filepath, open_mode) as output:
while True:
buffer = response.read(1024 * 256)
if not buffer:
break
output.write(buffer)
received += len(buffer)
if bar:
bar.update_received(len(buffer))
assert received == os.path.getsize(temp_filepath), '%s == %s == %s' % (received, os.path.getsize(temp_filepath))
if os.access(filepath, os.W_OK):
os.remove(filepath) # on Windows rename could fail if destination filepath exists
os.rename(temp_filepath, filepath)
class SimpleProgressBar:
def __init__(self, total_size, total_pieces = 1):
self.displayed = False
self.total_size = total_size
self.total_pieces = total_pieces
self.current_piece = 1
self.received = 0
def update(self):
self.displayed = True
bar_size = 40
percent = round(self.received * 100 / self.total_size, 1)
if percent > 100:
percent = 100
dots = bar_size * int(percent) // 100
plus = int(percent) - dots // bar_size * 100
if plus > 0.8:
plus = '='
elif plus > 0.4:
plus = '>'
else:
plus = ''
bar = '=' * dots + plus
bar = '{0:>5}% ({1:>5}/{2:<5}MB) [{3:<40}] {4}/{5}'.format(percent, round(self.received / 1048576, 1), round(self.total_size / 1048576, 1), bar, self.current_piece, self.total_pieces)
sys.stdout.write('\r' + bar)
sys.stdout.flush()
def update_received(self, n):
self.received += n
self.update()
def update_piece(self, n):
self.current_piece = n
def done(self):
if self.displayed:
print()
self.displayed = False
class PiecesProgressBar:
def __init__(self, total_size, total_pieces = 1):
self.displayed = False
self.total_size = total_size
self.total_pieces = total_pieces
self.current_piece = 1
self.received = 0
def update(self):
self.displayed = True
bar = '{0:>5}%[{1:<40}] {2}/{3}'.format('?', '?' * 40, self.current_piece, self.total_pieces)
sys.stdout.write('\r' + bar)
sys.stdout.flush()
def update_received(self, n):
self.received += n
self.update()
def update_piece(self, n):
self.current_piece = n
def done(self):
if self.displayed:
print()
self.displayed = False
class DummyProgressBar:
def __init__(self, *args):
pass
def update_received(self, n):
pass
def update_piece(self, n):
pass
def done(self):
pass
def download_urls(urls, title, ext, total_size, output_dir='.', refer=None, merge=True, faker=False):
assert urls
if dry_run:
print('Real URLs:\n%s\n' % urls)
return
if player:
launch_player(player, urls)
return
if not total_size:
try:
total_size = urls_size(urls)
except:
import traceback
import sys
traceback.print_exc(file = sys.stdout)
pass
title = get_filename(title)
filename = '%s.%s' % (title, ext)
filepath = os.path.join(output_dir, filename)
if total_size:
if not force and os.path.exists(filepath) and os.path.getsize(filepath) >= total_size * 0.9:
print('Skipping %s: file already exists' % tr(filepath))
print()
return
bar = SimpleProgressBar(total_size, len(urls))
else:
bar = PiecesProgressBar(total_size, len(urls))
if len(urls) == 1:
url = urls[0]
print('Downloading %s ...' % tr(filename))
url_save(url, filepath, bar, refer = refer, faker = faker)
bar.done()
else:
parts = []
print('Downloading %s.%s ...' % (tr(title), ext))
for i, url in enumerate(urls):
filename = '%s[%02d].%s' % (title, i, ext)
filepath = os.path.join(output_dir, filename)
parts.append(filepath)
#print 'Downloading %s [%s/%s]...' % (tr(filename), i + 1, len(urls))
bar.update_piece(i + 1)
url_save(url, filepath, bar, refer = refer, is_part = True, faker = faker)
bar.done()
if not merge:
print()
return
if ext == 'flv':
try:
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_concat_flv_to_mp4
ffmpeg_concat_flv_to_mp4(parts, os.path.join(output_dir, title + '.mp4'))
else:
from .processor.join_flv import concat_flv
concat_flv(parts, os.path.join(output_dir, title + '.flv'))
except:
raise
else:
for part in parts:
os.remove(part)
elif ext == 'mp4':
try:
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_concat_mp4_to_mp4
ffmpeg_concat_mp4_to_mp4(parts, os.path.join(output_dir, title + '.mp4'))
else:
from .processor.join_mp4 import concat_mp4
concat_mp4(parts, os.path.join(output_dir, title + '.mp4'))
except:
raise
else:
for part in parts:
os.remove(part)
else:
print("Can't merge %s files" % ext)
print()
def download_urls_chunked(urls, title, ext, total_size, output_dir='.', refer=None, merge=True, faker=False):
assert urls
if dry_run:
print('Real URLs:\n%s\n' % urls)
return
if player:
launch_player(player, urls)
return
assert ext in ('ts')
title = get_filename(title)
filename = '%s.%s' % (title, 'ts')
filepath = os.path.join(output_dir, filename)
if total_size:
if not force and os.path.exists(filepath[:-3] + '.mkv'):
print('Skipping %s: file already exists' % tr(filepath[:-3] + '.mkv'))
print()
return
bar = SimpleProgressBar(total_size, len(urls))
else:
bar = PiecesProgressBar(total_size, len(urls))
if len(urls) == 1:
parts = []
url = urls[0]
print('Downloading %s ...' % tr(filename))
filepath = os.path.join(output_dir, filename)
parts.append(filepath)
url_save_chunked(url, filepath, bar, refer = refer, faker = faker)
bar.done()
if not merge:
print()
return
if ext == 'ts':
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_convert_ts_to_mkv
if ffmpeg_convert_ts_to_mkv(parts, os.path.join(output_dir, title + '.mkv')):
for part in parts:
os.remove(part)
else:
os.remove(os.path.join(output_dir, title + '.mkv'))
else:
print('No ffmpeg is found. Conversion aborted.')
else:
print("Can't convert %s files" % ext)
else:
parts = []
print('Downloading %s.%s ...' % (tr(title), ext))
for i, url in enumerate(urls):
filename = '%s[%02d].%s' % (title, i, ext)
filepath = os.path.join(output_dir, filename)
parts.append(filepath)
#print 'Downloading %s [%s/%s]...' % (tr(filename), i + 1, len(urls))
bar.update_piece(i + 1)
url_save_chunked(url, filepath, bar, refer = refer, is_part = True, faker = faker)
bar.done()
if not merge:
print()
return
if ext == 'ts':
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_concat_ts_to_mkv
if ffmpeg_concat_ts_to_mkv(parts, os.path.join(output_dir, title + '.mkv')):
for part in parts:
os.remove(part)
else:
os.remove(os.path.join(output_dir, title + '.mkv'))
else:
print('No ffmpeg is found. Merging aborted.')
else:
print("Can't merge %s files" % ext)
print()
def download_rtmp_url(url, playpath, title, ext, total_size=0, output_dir='.', refer=None, merge=True, faker=False):
assert url
if dry_run:
print('Real URL:\n%s\n' % [url])
print('Real Playpath:\n%s\n' % [playpath])
return
if player:
from .processor.rtmpdump import play_rtmpdump_stream
play_rtmpdump_stream(player, url, playpath)
return
from .processor.rtmpdump import has_rtmpdump_installed, download_rtmpdump_stream
assert has_rtmpdump_installed(), "RTMPDump not installed."
download_rtmpdump_stream(url, playpath, title, ext, output_dir)
def playlist_not_supported(name):
def f(*args, **kwargs):
raise NotImplementedError('Playlist is not supported for ' + name)
return f
def print_info(site_info, title, type, size):
if type:
type = type.lower()
if type in ['3gp']:
type = 'video/3gpp'
elif type in ['asf', 'wmv']:
type = 'video/x-ms-asf'
elif type in ['flv', 'f4v']:
type = 'video/x-flv'
elif type in ['mkv']:
type = 'video/x-matroska'
elif type in ['mp3']:
type = 'audio/mpeg'
elif type in ['mp4']:
type = 'video/mp4'
elif type in ['mov']:
type = 'video/quicktime'
elif type in ['ts']:
type = 'video/MP2T'
elif type in ['webm']:
type = 'video/webm'
if type in ['video/3gpp']:
type_info = "3GPP multimedia file (%s)" % type
elif type in ['video/x-flv', 'video/f4v']:
type_info = "Flash video (%s)" % type
elif type in ['video/mp4', 'video/x-m4v']:
type_info = "MPEG-4 video (%s)" % type
elif type in ['video/MP2T']:
type_info = "MPEG-2 transport stream (%s)" % type
elif type in ['video/webm']:
type_info = "WebM video (%s)" % type
#elif type in ['video/ogg']:
# type_info = "Ogg video (%s)" % type
elif type in ['video/quicktime']:
type_info = "QuickTime video (%s)" % type
elif type in ['video/x-matroska']:
type_info = "Matroska video (%s)" % type
#elif type in ['video/x-ms-wmv']:
# type_info = "Windows Media video (%s)" % type
elif type in ['video/x-ms-asf']:
type_info = "Advanced Systems Format (%s)" % type
#elif type in ['video/mpeg']:
# type_info = "MPEG video (%s)" % type
elif type in ['audio/mpeg']:
type_info = "MP3 (%s)" % type
else:
type_info = "Unknown type (%s)" % type
print("Video Site:", site_info)
print("Title: ", unescape_html(tr(title)))
print("Type: ", type_info)
print("Size: ", round(size / 1048576, 2), "MiB (" + str(size) + " Bytes)")
print()
def parse_host(host):
"""Parses host name and port number from a string.
"""
if re.match(r'^(\d+)$', host) is not None:
return ("0.0.0.0", int(host))
if re.match(r'^(\w+)://', host) is None:
host = "//" + host
o = parse.urlparse(host)
hostname = o.hostname or "0.0.0.0"
port = o.port or 0
return (hostname, port)
def get_sogou_proxy():
return sogou_proxy
def set_proxy(proxy):
proxy_handler = request.ProxyHandler({
'http': '%s:%s' % proxy,
'https': '%s:%s' % proxy,
})
opener = request.build_opener(proxy_handler)
request.install_opener(opener)
def unset_proxy():
proxy_handler = request.ProxyHandler({})
opener = request.build_opener(proxy_handler)
request.install_opener(opener)
# DEPRECATED in favor of set_proxy() and unset_proxy()
def set_http_proxy(proxy):
if proxy == None: # Use system default setting
proxy_support = request.ProxyHandler()
elif proxy == '': # Don't use any proxy
proxy_support = request.ProxyHandler({})
else: # Use proxy
proxy_support = request.ProxyHandler({'http': '%s' % proxy, 'https': '%s' % proxy})
opener = request.build_opener(proxy_support)
request.install_opener(opener)
def download_main(download, download_playlist, urls, playlist, output_dir, merge, info_only):
for url in urls:
if url.startswith('https://'):
url = url[8:]
if not url.startswith('http://'):
url = 'http://' + url
if playlist:
download_playlist(url, output_dir = output_dir, merge = merge, info_only = info_only)
else:
download(url, output_dir = output_dir, merge = merge, info_only = info_only)
def get_version():
try:
import subprocess
real_dir = os.path.dirname(os.path.realpath(__file__))
git_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=real_dir, stderr=subprocess.DEVNULL).decode('utf-8').strip()
assert git_hash
return '%s-%s' % (__version__, git_hash)
except:
return __version__
def script_main(script_name, download, download_playlist = None):
version = 'You-Get %s, a video downloader.' % get_version()
help = 'Usage: %s [OPTION]... [URL]...\n' % script_name
help += '''\nStartup options:
-V | --version Display the version and exit.
-h | --help Print this help and exit.
'''
help += '''\nDownload options (use with URLs):
-f | --force Force overwriting existed files.
-i | --info Display the information of videos without downloading.
-u | --url Display the real URLs of videos without downloading.
-c | --cookies Load NetScape's cookies.txt file.
-n | --no-merge Don't merge video parts.
-o | --output-dir <PATH> Set the output directory for downloaded videos.
-p | --player <PLAYER [options]> Directly play the video with PLAYER like vlc/smplayer.
-x | --http-proxy <HOST:PORT> Use specific HTTP proxy for downloading.
--no-proxy Don't use any proxy. (ignore $http_proxy)
-S | --sogou Use a Sogou proxy server for downloading.
--sogou-proxy <HOST:PORT> Run a standalone Sogou proxy server.
--debug Show traceback on KeyboardInterrupt.
'''
short_opts = 'Vhfiuc:nSo:p:x:'
opts = ['version', 'help', 'force', 'info', 'url', 'cookies', 'no-merge', 'no-proxy', 'debug', 'sogou', 'output-dir=', 'player=', 'http-proxy=', 'sogou-proxy=', 'sogou-env=']
if download_playlist:
short_opts = 'l' + short_opts
opts = ['playlist'] + opts
try:
opts, args = getopt.getopt(sys.argv[1:], short_opts, opts)
except getopt.GetoptError as err:
log.e(err)
log.e("try 'you-get --help' for more options")
sys.exit(2)
global force
global dry_run
global player
global sogou_proxy
global sogou_env
global cookies_txt
cookies_txt = None
info_only = False
playlist = False
merge = True
output_dir = '.'
proxy = None
traceback = False
for o, a in opts:
if o in ('-V', '--version'):
print(version)
sys.exit()
elif o in ('-h', '--help'):
print(version)
print(help)
sys.exit()
elif o in ('-f', '--force'):
force = True
elif o in ('-i', '--info'):
info_only = True
elif o in ('-u', '--url'):
dry_run = True
elif o in ('-c', '--cookies'):
from http import cookiejar
cookies_txt = cookiejar.MozillaCookieJar(a)
cookies_txt.load()
elif o in ('-l', '--playlist'):
playlist = True
elif o in ('-n', '--no-merge'):
merge = False
elif o in ('--no-proxy',):
proxy = ''
elif o in ('--debug',):
traceback = True
elif o in ('-o', '--output-dir'):
output_dir = a
elif o in ('-p', '--player'):
player = a
elif o in ('-x', '--http-proxy'):
proxy = a
elif o in ('-S', '--sogou'):
sogou_proxy = ("0.0.0.0", 0)
elif o in ('--sogou-proxy',):
sogou_proxy = parse_host(a)
elif o in ('--sogou-env',):
sogou_env = a
else:
log.e("try 'you-get --help' for more options")
sys.exit(2)
if not args:
if sogou_proxy is not None:
try:
if sogou_env is not None:
server = sogou_proxy_server(sogou_proxy, network_env=sogou_env)
else:
server = sogou_proxy_server(sogou_proxy)
server.serve_forever()
except KeyboardInterrupt:
if traceback:
raise
else:
sys.exit()
else:
print(help)
sys.exit()
set_http_proxy(proxy)
try:
download_main(download, download_playlist, args, playlist, output_dir, merge, info_only)
except KeyboardInterrupt:
if traceback:
raise
else:
sys.exit(1)<|fim▁end|> | def ungzip(data): |
<|file_name|>update-plugin.ts<|end_file_name|><|fim▁begin|>import {PluginBase, Workflow, StepNode} from "cwl-svg";
import {AppHelper} from "../../../core/helpers/AppHelper";
import {StepModel} from "cwlts/models";
export class UpdatePlugin extends PluginBase {
private css = {
plugin: "__plugin-update",
update: "__update-has-update"
};
private updateMap = new Map<string, boolean>();
registerWorkflow(workflow: Workflow) {
super.registerWorkflow(workflow);
this.workflow.svgRoot.classList.add(this.css.plugin);
}
hasUpdate(step: StepModel): boolean {
return this.updateMap.get(step.connectionId);
}
updateStep(step: StepModel) {
const stepEl = this.findStep(step);
stepEl.classList.remove(this.css.update);
this.updateMap.set(step.connectionId, false);
new StepNode(stepEl, step as any).update();
}
applyRevisionMap(appRevisionMap: { [appID: string]: number }): void {
this.workflow.model.steps.forEach(step => {
// a non-sbg app might be embedded in an sbg workflow
if (!step.run || !step.run.customProps || !step.run.customProps["sbg:id"]) {
return;
}
const revisionless = AppHelper.getAppIDWithRevision(step.run.customProps["sbg:id"], null);
const revision = AppHelper.getRevision(step.run.customProps["sbg:id"]);
if (appRevisionMap[revisionless] === undefined) {<|fim▁hole|>
let hasUpdate = appRevisionMap[revisionless] > revision;
this.updateMap.set(step.connectionId, hasUpdate);
if (hasUpdate) {
const stepEl = this.findStep(step as any);
stepEl.classList.add(this.css.update);
}
});
}
private findStep(step: StepModel): SVGElement {
return this.workflow.svgRoot.querySelector(`.step[data-connection-id="${step.connectionId}"]`) as SVGElement;
}
}<|fim▁end|> | return;
} |
<|file_name|>loop_control.py<|end_file_name|><|fim▁begin|>def main():<|fim▁hole|> print(c, end='')
else:
print(' else')
print()
for i in range(10, 0, -1):
print(i, end=' ')
if __name__ == "__main__":
main()<|fim▁end|> | s = 'this is a string'
for c in s: |
<|file_name|>test_api_utils.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 NTT, OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from congress.api import api_utils
from congress.api import webservice
from congress.tests import base
<|fim▁hole|> def setUp(self):
super(TestAPIUtils, self).setUp()
def test_create_table_dict(self):
table_name = 'fake_table'
schema = {'fake_table': ({'name': 'id', 'desc': None},
{'name': 'name', 'desc': None})}
expected = {'table_id': table_name,
'columns': [{'name': 'id', 'description': None},
{'name': 'name', 'description': None}]}
result = api_utils.create_table_dict(table_name, schema)
self.assertEqual(expected, result)
def test_get_id_from_context_ds_id(self):
context = {'ds_id': 'datasource id'}
expected = ('datasource id', 'datasource id')
result = api_utils.get_id_from_context(context)
self.assertEqual(expected, result)
def test_get_id_from_context_policy_id(self):
context = {'policy_id': 'policy id'}
expected = ('__engine', 'policy id')
result = api_utils.get_id_from_context(context)
self.assertEqual(expected, result)
def test_get_id_from_context_with_invalid_context(self):
context = {'invalid_id': 'invalid id'}
self.assertRaises(webservice.DataModelException,
api_utils.get_id_from_context, context)<|fim▁end|> | class TestAPIUtils(base.SqlTestCase):
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.