content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Test for the MDF
import os
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__),".."))
from schema import Range,Variable,Table
from census_spec_scanner import CensusSpec
TEST_FNAME = os.path.join(os.path.dirname(__file__), "docx_test/test_file_layout.docx")
def test_mdf_reader():
cs = CensusSpec()
cs.load_schema_from_file(TEST_FNAME)
mdf = list(cs.tables())
assert type(mdf)==list
assert len(mdf) == 1 # demo file has but a single table
assert type(mdf[0]) == Table
assert mdf[0].name == "Test_Apples"
if __name__=="__main__":
test_mdf_reader()
|
python
|
import time
import pyqtgraph as pg
class UserTestUi(object):
def __init__(self, expected_display, current_display):
pg.mkQApp()
self.widget = pg.QtGui.QSplitter(pg.QtCore.Qt.Vertical)
self.widget.resize(1600, 1000)
self.display_splitter = pg.QtGui.QSplitter(pg.QtCore.Qt.Horizontal)
self.widget.addWidget(self.display_splitter)
self.display1 = expected_display
self.display2 = current_display
self.display_splitter.addWidget(self.display1.widget)
self.display_splitter.addWidget(self.display2.widget)
self.ctrl = pg.QtGui.QWidget()
self.widget.addWidget(self.ctrl)
self.ctrl_layout = pg.QtGui.QVBoxLayout()
self.ctrl.setLayout(self.ctrl_layout)
self.diff_widget = pg.DiffTreeWidget()
self.ctrl_layout.addWidget(self.diff_widget)
self.pass_btn = pg.QtGui.QPushButton('pass')
self.fail_btn = pg.QtGui.QPushButton('fail')
self.ctrl_layout.addWidget(self.pass_btn)
self.ctrl_layout.addWidget(self.fail_btn)
self.pass_btn.clicked.connect(self.pass_clicked)
self.fail_btn.clicked.connect(self.fail_clicked)
self.last_btn_clicked = None
self.widget.setSizes([750, 250])
def pass_clicked(self):
self.last_btn_clicked = 'pass'
def fail_clicked(self):
self.last_btn_clicked = 'fail'
def user_passfail(self):
self.widget.show()
while True:
pg.QtGui.QApplication.processEvents()
last_btn_clicked = self.last_btn_clicked
self.last_btn_clicked = None
if last_btn_clicked == 'fail' or not self.widget.isVisible():
raise Exception("User rejected test result.")
elif last_btn_clicked == 'pass':
break
time.sleep(0.03)
def show_results(self, expected, current):
self.diff_widget.setData(expected, current)
self.display2.show_result(current)
self.display1.show_result(expected)
def clear(self):
self.display1.clear()
self.display2.clear()
self.diff_widget.setData(None, None)
|
python
|
from autohandshake.src import HandshakeBrowser
from autohandshake.src.Pages.StudentProfilePage import StudentProfilePage
from autohandshake.src.constants import BASE_URL
class ViewAsStudent:
"""
A sub-session in which the user logs in as a student.
Should be used as a context manager. Example:
::
with HandshakeSession(school_url, email) as browser:
with ViewAsStudent(student_id):
# do something
"""
def __init__(self, student_id: int, browser: HandshakeBrowser, stay_on_page: bool = False):
"""
:param student_id: the numeric Handshake id of the student to view as
:type student_id: int
:param browser: a logged-in Handshake browser with a STAFF user type
:type browser: HandshakeBrowser
:param stay_on_page: whether or not to stay on the same page when logging
back out of the "View as Student" session. If False,
navigate back to the Handshake homepage when the
session is over. Defaults to False.
:type stay_on_page: bool
"""
self._id = student_id
self._browser = browser
self._stay_on_page = stay_on_page
def __enter__(self):
"""
Log in as the specified student.
"""
profile_page = StudentProfilePage(self._id, self._browser)
profile_page.view_as_student()
def __exit__(self, exc_type, exc_val, exc_tb):
"""Stop viewing as the student and return to the career services view."""
self._browser.click_element_by_xpath('//a[@href="/users/stop_viewing_as"]')
self._browser.update_constants()
if not self._stay_on_page:
self._browser.get(BASE_URL)
|
python
|
hensu = "HelloWorld"
print(hensu)
|
python
|
import os
from pathlib import Path
from dotenv import find_dotenv, load_dotenv
project_dir = Path(__file__).resolve().parents[1]
load_dotenv(find_dotenv())
LOGLEVEL = os.getenv("LOGLEVEL", "INFO").upper()
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "[%(asctime)s][%(levelname)-5s][%(name)s] - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "standard",
"level": "INFO",
},
"rolling_file_debug": {
"class": "logging.handlers.RotatingFileHandler",
"filename": project_dir / "logs/debug.log",
"formatter": "standard",
"level": "DEBUG",
"maxBytes": 1024 * 1024,
"backupCount": 10,
},
"rolling_file_warning": {
"class": "logging.handlers.RotatingFileHandler",
"filename": project_dir / "logs/warnings.log",
"formatter": "standard",
"level": "WARNING",
"maxBytes": 1024 * 1024,
"backupCount": 10,
},
},
"root": {
"handlers": ["console", "rolling_file_debug", "rolling_file_warning"],
"level": LOGLEVEL,
},
"loggers": {
"__main__": {"handlers": [], "propagate": True},
"{{ cookiecutter.module_name }}": {"handlers": [], "propagate": True},
},
}
|
python
|
import numpy as np
import pytest
import torch
from thgsp.graphs.generators import random_graph
from thgsp.sampling.rsbs import (
cheby_coeff4ideal_band_pass,
estimate_lk,
recon_rsbs,
rsbs,
)
from ..utils4t import devices, float_dtypes, snr_and_mse
def test_cheby_coeff4ideal_band_pass():
order = 30
ceoff = cheby_coeff4ideal_band_pass(0, 1, 0, 2, order)
assert ceoff.shape == (order + 1,)
print(ceoff)
@pytest.mark.parametrize("device", devices)
@pytest.mark.parametrize("dtype", float_dtypes)
class TestRsbs:
def test_estimater_lk_on_minnesota(self, dtype, device):
N = 100
g = random_graph(N, dtype=dtype, device=device)
lmax = g.max_frequency(lap_type="comb")
print(lmax)
band_limit = 10
lambda_k, cum_coh = estimate_lk(
g, band_limit, lmax=lmax, lap_type="comb", verbose=False, num_estimation=1
)
print(lambda_k)
print(cum_coh)
@pytest.mark.parametrize("return_list", [True, False])
def test_rsbs(self, dtype, device, return_list):
N = 100
k = 50
M = 30
appropriate_num_rv = np.int32(2 * np.round(np.log(N)))
g = random_graph(N, dtype=dtype, device=device)
nodes, coh = rsbs(g, M, k, num_rv=appropriate_num_rv, return_list=return_list)
print(nodes)
if return_list:
assert isinstance(nodes, list)
else:
assert isinstance(nodes, torch.Tensor)
def test_rsbs_recon(self, dtype, device):
N = 10
k = 5
M = 5
appropriate_num_rv = np.int32(2 * np.round(np.log(N)))
g = random_graph(N, 0.3, dtype=dtype, device=device, seed=2021)
print(g.device())
# since scikit-umfpack requires double scalars.
if dtype == torch.double:
nodes, coh = rsbs(g, M, k, num_rv=appropriate_num_rv, return_list=True)
f = torch.rand(N, 1, dtype=dtype, device=device)
f = f / f.norm()
f_hat = recon_rsbs(
f[nodes], S=nodes, L=g.L("comb"), cum_coh=coh, mu=0.1, reg_order=1
)
if torch.any(torch.isnan(f_hat)):
print(
"This case leads to numerical instability and thus would be skipped"
)
else:
s, m = snr_and_mse(f_hat, f)
assert m < 1
|
python
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from docker import errors
from oslo_config import cfg
from requests import exceptions as req_exceptions
from magnum.common import docker_utils
from magnum.tests.functional.python_client_base import BayTest
CONF = cfg.CONF
CONF.import_opt('docker_remote_api_version', 'magnum.common.docker_utils',
group='docker')
CONF.import_opt('default_timeout', 'magnum.common.docker_utils',
group='docker')
class TestSwarmAPIs(BayTest):
"""This class will cover swarm bay basic functional testing.
Will test all kinds of container action with tls_disabled=False mode.
"""
coe = "swarm"
baymodel_kwargs = {
"tls_disabled": False,
"network_driver": None,
"volume_driver": None,
"fixed_network": '192.168.0.0/24',
"labels": {}
}
@classmethod
def setUpClass(cls):
super(TestSwarmAPIs, cls).setUpClass()
cls.bay_is_ready = None
def setUp(self):
super(TestSwarmAPIs, self).setUp()
if self.bay_is_ready is True:
return
# Note(eliqiao): In our test cases, docker client or magnum client will
# try to connect to swarm service which is running on master node,
# the endpoint is bay.api_address(listen port is included), but the
# service is not ready right after the bay was created, sleep for an
# acceptable time to wait for service being started.
# This is required, without this any api call will fail as
# 'ConnectionError: [Errno 111] Connection refused'.
msg = ("If you see this error in the functional test, it means "
"the docker service took too long to come up. This may not "
"be an actual error, so an option is to rerun the "
"functional test.")
if self.bay_is_ready is False:
# In such case, no need to test below cases on gate, raise a
# meanful exception message to indicate ca setup failed after
# bay creation, better to do a `recheck`
# We don't need to test since bay is not ready.
raise Exception(msg)
url = self.cs.bays.get(self.bay.uuid).api_address
# Note(eliqiao): docker_utils.CONF.docker.default_timeout is 10,
# tested this default configure option not works on gate, it will
# cause container creation failed due to time out.
# Debug more found that we need to pull image when the first time to
# create a container, set it as 180s.
docker_api_time_out = 180
self.docker_client = docker_utils.DockerHTTPClient(
url,
CONF.docker.docker_remote_api_version,
docker_api_time_out,
client_key=self.key_file,
client_cert=self.cert_file,
ca_cert=self.ca_file)
self.docker_client_non_tls = docker_utils.DockerHTTPClient(
url,
CONF.docker.docker_remote_api_version,
docker_api_time_out)
def _container_operation(self, func, *args, **kwargs):
# NOTE(hongbin): Swarm bay occasionally aborts the connection, so we
# re-try the operation several times here. In long-term, we need to
# investigate the cause of this issue. See bug #1583337.
for i in range(150):
try:
self.LOG.info("Calling function " + func.__name__)
return func(*args, **kwargs)
except req_exceptions.ConnectionError:
self.LOG.info("Connection aborted on calling Swarm API. "
"Will retry in 2 seconds.")
except errors.APIError as e:
if e.response.status_code != 500:
raise
self.LOG.info("Internal Server Error: " + str(e))
time.sleep(2)
raise Exception("Cannot connect to Swarm API.")
def _create_container(self, **kwargs):
image = kwargs.get('image', 'docker.io/cirros')
command = kwargs.get('command', 'ping -c 1000 8.8.8.8')
return self._container_operation(self.docker_client.create_container,
image=image, command=command)
def test_start_stop_container_from_api(self):
# Leverage docker client to create a container on the bay we created,
# and try to start and stop it then delete it.
resp = self._create_container(image="docker.io/cirros",
command="ping -c 1000 8.8.8.8")
resp = self._container_operation(self.docker_client.containers,
all=True)
container_id = resp[0].get('Id')
self._container_operation(self.docker_client.start,
container=container_id)
resp = self._container_operation(self.docker_client.containers)
self.assertEqual(1, len(resp))
resp = self._container_operation(self.docker_client.inspect_container,
container=container_id)
self.assertTrue(resp['State']['Running'])
self._container_operation(self.docker_client.stop,
container=container_id)
resp = self._container_operation(self.docker_client.inspect_container,
container=container_id)
self.assertFalse(resp['State']['Running'])
self._container_operation(self.docker_client.remove_container,
container=container_id)
resp = self._container_operation(self.docker_client.containers)
self.assertEqual([], resp)
def test_access_with_non_tls_client(self):
self.assertRaises(req_exceptions.SSLError,
self.docker_client_non_tls.containers)
|
python
|
#!/usr/bin/env python
#encoding=utf-8
import numpy as np
from random import choice, shuffle, uniform
#from data_factory import PlotFactory
class DataFactory():
def __init__(self, n=1):
self.plt_max=5
self.nmb_plt=None
if n < self.plt_max:
self.nmb_plt=n
else:
print("Maximum possible plots are", self.nmb_plt, ", n default to", self.nmb_plt)
self.nmb_plt=5 # default to maximal possible
self.nmb_plt=n
self.name=["temperature", "pressure", "humidity", "acceleration", "magnetic_field"]
self.func=[np.sin, np.cos, self.func1, self.func2, self.func3] #function
self.a=[1,2,3,4,5] # amplitude
self.b=[1,2,3,4,5] # bias
self.s=[1,2,3,4,5] # shift
self.f=[1,1,2,2,5] # frequency
self.noise=[1,1,2,3,5] # noise (supposed to be multiplied by 0.01)
self.randomize()
def randomize(self):
print("Shuffle all lists, this way iterating over\nthem has the same result as random choice.")
shuffle(self.name)
shuffle(self.func)
shuffle(self.a)
shuffle(self.b)
shuffle(self.f)
shuffle(self.noise)
# if n<self._max_plts:
# for p in range(n):
# key=self.rand_name.remove( choice(self.name) )
# print(self.rand_name)
# # self.plots[]
# else:
# print("Maximum number of plots available is", self._max_plts)
def produce_data(self,x):
data = dict()
for i in range(self.nmb_plt):
name=self.name[i]
func=self.func[i]
a = self.a[i] # amplitude
b = self.b[i] # bias
s = self.s[i] # shift
f = self.f[i] # frequency
u = self.noise[i]*0.2
noise = uniform(-u,u)
data[name]=np.array([ a*func( (x-s)*f ) + b + noise])
return data
def func1(self,x):
return ( np.sin(x)*np.cos(x) / 2.0 )
def func2(self,x):
return ( np.sin(x) + np.sin(x/2) + np.sin(x/4) )
def func3(self,x):
return ( np.sin(x)*np.sin(x/4) )
|
python
|
from .base import BaseNewsvendor, DataDrivenMixin
from ..utils.validation import check_cu_co
from keras.models import Sequential
from keras.layers import Dense
import keras.backend as K
from sklearn.utils.validation import check_is_fitted
import numpy as np
ACTIVATIONS = ['elu', 'selu', 'linear', 'tanh', 'relu', 'softmax', 'softsign', 'softplus',
'sigmoid', 'hard_sigmoid', 'exponential']
class DeepLearningNewsvendor(BaseNewsvendor, DataDrivenMixin):
"""A newsvendor based on deep learning
Parameters
----------
cu : {array-like of shape (n_outputs,), Number or None}, default=None
The underage costs per unit. If None, then underage costs are one
for each target variable
co : {array-like of shape (n_outputs,), Number or None}, default=None
The overage costs per unit. If None, then overage costs are one
for each target variable
hidden_layers : {'auto', 'custom'}, default='auto'
Whether to use a automated or customized hidden layer structure.
- When set to 'auto' the network will use two hidden layers. The first
with 2*n_features neurons and 'relu' as activation function the second
one with n_features neurons and 'linear' as activation function
- When set to 'custom' the settings specified in both parameters 'neurons' and
'activations' will be used to build the hidden layers of the network
neurons : list, default=[100]
The ith element represents the number of neurons in the ith hidden layer
Only used when hidden_layers='custom'.
activations : list, default=['relu']
The ith element of the list represents the activation function of the ith layer.
Valid activation functions are: 'elu', 'selu', 'linear', 'tanh', 'relu', 'softmax',
'softsign', 'softplus','sigmoid', 'hard_sigmoid', 'exponential'.
Only used when hidden_layers='custom'.
optimizer: {'adam', 'sgd'}, default='adam'
The optimizer to be used.
epochs: int, default=100
Number of epochs to train the model
verbose: int 0, 1, or 2, default=1
Verbosity mode. 0 = silent, 1 = progress bar, 2 = one line per epoch.
Attributes
----------
model_ : tensorflow.keras.Sequential
The underlying model
n_features_ : int
The number of features when ``fit`` is performed.
n_outputs_ : int
The number of outputs.
cu_ : ndarray, shape (n_outputs,)
Validated underage costs.
co_ : ndarray, shape (n_outputs,)
Validated overage costs.
References
----------
.. [1] Afshin Oroojlooyjadid, Lawrence V. Snyder, Martin Takáˇc,
"Applying Deep Learning to the Newsvendor Problem", 2018.
Examples
--------
>>> from ddop.datasets.load_datasets import load_data
>>> from ddop.newsvendor import DeepLearningNewsvendor
>>> from sklearn.model_selection import train_test_split
>>> data = load_data("yaz_steak.csv")
>>> X = data.iloc[:,0:24]
>>> Y = data.iloc[:,24]
>>> cu,co = 15,10
>>> X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.25)
>>> mdl = DeepLearningNewsvendor(cu, co)
>>> mdl.fit(X_train, Y_train)
>>> mdl.score(X_test, Y_test)
[64.62898917]
"""
def __init__(self, cu, co, hidden_layers='auto', neurons=[100],
activations=['relu'], optimizer='adam', epochs=100, verbose=1):
self.hidden_layers = hidden_layers
self.neurons = neurons
self.activations = activations
self.optimizer = optimizer
self.epochs = epochs
self.verbose = verbose
super().__init__(
cu=cu,
co=co)
def _nv_loss(self, cu, co):
"""Create a newsvendor loss function with the given under- and overage costs"""
def customized_loss(y_true, y_pred):
self.tensor_ = y_true
loss = K.switch(K.less(y_pred, y_true), cu * (y_true - y_pred), co * (y_pred - y_true))
return K.sum(loss)
return customized_loss
def _create_model(self):
hidden_layers = self.hidden_layers
neurons = self.neurons
activations = self.activations
n_features = self.n_features_
n_outputs = self.n_outputs_
model = Sequential()
if hidden_layers == 'auto':
model.add(Dense(2 * n_features, activation='relu', input_dim=n_features))
model.add(Dense(n_features))
model.add(Dense(n_outputs))
else:
for size, activation in zip(neurons, activations):
model.add(Dense(units=size, activation=activation))
model.add(Dense(n_outputs))
model.build((None, n_features))
model.compile(loss=self._nv_loss(self.cu_, self.co_), optimizer=self.optimizer)
return model
def fit(self, X, y):
"""Fit the model to the training set (X, y).
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like of shape (n_samples, n_outputs)
The target values.
Returns
----------
self : DeepLearningNewsvendor
Fitted estimator
"""
# Validate input parameters
self._validate_hyperparameters()
X, y = self._validate_data(X, y, multi_output=True)
if y.ndim == 1:
y = np.reshape(y, (-1, 1))
# Determine output settings
self.n_features_ = X.shape[1]
self.n_outputs_ = y.shape[1]
# Check and format under- and overage costs
self.cu_, self.co_ = check_cu_co(self.cu, self.co, self.n_outputs_)
model = self._create_model()
model.fit(X, y, epochs=self.epochs, verbose=self.verbose)
self.model_ = model
return self
def _validate_hyperparameters(self):
# Make sure self.neurons is a list
neurons = self.neurons
if not hasattr(neurons, "__iter__"):
neurons = [neurons]
neurons = list(neurons)
# Make sure self.activations is a list
activations = self.activations
if not hasattr(activations, "__iter__"):
activations = [activations]
activations = list(activations)
if self.hidden_layers == "custom" and np.any(np.array(neurons) <= 0):
raise ValueError("neurons must be > 0, got %s." %
self.neurons)
if self.hidden_layers == "custom" and \
np.any(np.array([activation not in ACTIVATIONS for activation in activations])):
raise ValueError("Invalid activation function in activations. Supported are %s but got %s"
% (list(ACTIVATIONS), activations))
if self.hidden_layers not in ["auto", "custom"]:
raise ValueError("hidden_layers %s is not supported." % self.hidden_layers)
if self.hidden_layers == "custom" and len(neurons) != len(activations):
raise ValueError("When customizing the hidden layers neurons and activations must have same "
"length but neurons is of length %s and activations %s"
% (len(neurons), len(activations)))
if self.verbose not in [0, 1, 2]:
raise ValueError("verbose must be either 0, 1 or 2, got %s." %
self.verbose)
def predict(self, X):
"""Predict values for X.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The input samples to predict.
Returns
----------
y : array-like of shape (n_samples, n_outputs)
The predicted values
"""
check_is_fitted(self)
pred = self.model_.predict(X)
return pred
|
python
|
# Copyright 2018 eShares, Inc. dba Carta, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
import threading
from typing import Iterator, Optional, Tuple, cast
from .interface import AbstractFeatureFlagStore, FlagDoesNotExistError
from .storage import FeatureFlagStoreItem, FeatureFlagStoreMeta
from .util.date import now
logger = logging.getLogger(__name__)
class ConsulFeatureFlagStore(AbstractFeatureFlagStore):
def __init__(self, consul, base_key="features"):
self._cache = {}
self._consul = consul
self.base_key = base_key
self._start()
def _start(self):
logger.debug("Spawning a thread to track changes in consul")
self._thread = threading.Thread(target=self._watch)
self._thread.daemon = True
self._thread.start()
def _watch(self):
index = None
while True:
index, data = self._consul.kv.get(self.base_key, recurse=True)
self._parse_data(data)
def _parse_data(self, data: Tuple[dict]):
if data is None:
return
for item in data:
serialized = item["Value"]
if serialized is None:
continue
deserialized = FeatureFlagStoreItem.deserialize(serialized)
self._set_item_in_cache(item["Key"], deserialized)
def _set_item_in_cache(self, key: str, item: FeatureFlagStoreItem):
self._cache[key] = item
def create(
self,
feature_name: str,
is_enabled: bool = False,
client_data: Optional[dict] = None,
) -> FeatureFlagStoreItem:
item = FeatureFlagStoreItem(
feature_name, is_enabled, FeatureFlagStoreMeta(now(), client_data)
)
return self._save(item)
def _save(self, item: FeatureFlagStoreItem) -> FeatureFlagStoreItem:
self._consul.kv.put(self._make_key(item.feature_name), item.serialize())
self._set_item_in_cache(item.feature_name, item)
return item
def get(self, feature_name: str) -> Optional[FeatureFlagStoreItem]:
return self._cache.get(self._make_key(feature_name))
def _make_key(self, feature_name: str) -> str:
return "/".join([self.base_key, feature_name])
def set(self, feature_name: str, is_enabled: bool):
existing = self.get(feature_name)
if existing is None:
self.create(feature_name, is_enabled)
return
item = FeatureFlagStoreItem(
feature_name, is_enabled, FeatureFlagStoreMeta.from_dict(existing.meta)
)
self._save(item)
def delete(self, feature_name: str):
self._consul.kv.delete(self._make_key(feature_name))
def list(
self, limit: Optional[int] = None, offset: int = 0
) -> Iterator[FeatureFlagStoreItem]:
feature_names = sorted(self._cache.keys())[offset:]
if limit is not None:
feature_names = feature_names[:limit]
for feature_name in feature_names:
yield cast(FeatureFlagStoreItem, self.get(feature_name))
def set_meta(self, feature_name: str, meta: FeatureFlagStoreMeta):
existing = self.get(feature_name)
if existing is None:
raise FlagDoesNotExistError(
"Feature %s does not exist" % feature_name
) # noqa: E501
item = FeatureFlagStoreItem(feature_name, existing.raw_is_enabled, meta)
self._save(item)
|
python
|
MAX_LENGTH_TEXT_MESSAGE = 800
MAX_LENGTH_TEXT_SUBJECT = 80
TEXT_SIZE = "The text must be between 0 and 800 characters."
SUBJECT_SIZE = "Subject must be between 0 and 80 characters."
USER_EXISTS = "User don't exists."
|
python
|
default_app_config = 'features.apps.FeaturesConfig'
|
python
|
import logging
import abc
import traceback
from media.monitor.pure import LazyProperty
appname = 'root'
def setup_logging(log_path):
""" Setup logging by writing log to 'log_path' """
#logger = logging.getLogger(appname)
logging.basicConfig(filename=log_path, level=logging.DEBUG)
def get_logger():
""" in case we want to use the common logger from a procedural
interface """
return logging.getLogger()
class Loggable(object):
""" Any class that wants to log can inherit from this class and
automatically get a logger attribute that can be used like:
self.logger.info(...) etc. """
__metaclass__ = abc.ABCMeta
@LazyProperty
def logger(self): return get_logger()
def unexpected_exception(self,e):
""" Default message for 'unexpected' exceptions """
self.fatal_exception("'Unexpected' exception has occured:", e)
def fatal_exception(self, message, e):
""" Prints an exception 'e' with 'message'. Also outputs the
traceback. """
self.logger.error( message )
self.logger.error( str(e) )
self.logger.error( traceback.format_exc() )
|
python
|
import time
import telnetlib
class Telnet:
#
# Desenvolvido por Felipe Lyp
#
def connect(self, host, port, username, password):
self.telnet = telnetlib.Telnet(host, port)
self.telnet.read_until(b"Login:")
self.telnet.write(username.encode('ascii') + b"\n")
if password:
self.telnet.read_until(b"Password:")
self.telnet.write(password.encode('ascii') + b"\n")
self.send('en')
self.send(password)
def send(self, cmd, encode_ascii = True):
if encode_ascii:
self.telnet.write(cmd.encode('ascii') + b"\n")
else:
self.telnet.write(cmd.encode())
time.sleep(1)
def data(self):
return str(self.telnet.read_very_eager().decode('ascii'))
|
python
|
import random
class Enemy:
"""
Automatically inherits object class from python3
"""
def __init__(self, name="Enemy", hit_points=0, lives=1):
self.name = name
self.hit_points = hit_points
self.lives = lives
self.alive = True
def take_damage(self, damage):
remaining_points = self.hit_points - damage
if remaining_points >= 0:
self.hit_points = remaining_points
print("I took {} points damage and have {} left".format(damage, self.hit_points))
else:
self.lives -= 1
if self.lives > 0:
print("{0.name} lost a life".format(self))
else:
print("{0.name} is dead".format(self))
self.alive = False
def __str__(self):
return """Name: {0.name}, Lives: {0.lives},Hit points: {0.hit_points} Alive: {0.alive}""".format(self)
class Troll(Enemy):
def __init__(self, name):
# super(Troll, self).__init__(name=name, lives=1, hit_points=23)
super().__init__(name=name, lives=1, hit_points=23)
def grunt(self):
print(f'{self.name} stomp you')
class Vampire(Enemy):
def __init__(self, name):
super().__init__(name=name, lives=3, hit_points=12)
def dodges(self):
if random.randint(1, 3) == 3:
print("***** {0.name} dodges *****".format(self))
return True
else:
return False
def take_damage(self, damage):
if self.dodges():
super().take_damage(damage=damage)
class VampireKing(Vampire):
def __init__(self, name):
super().__init__(name=name)
self.hit_points = 140
def take_damage(self, damage):
qtr_damage = damage // 4
super().take_damage(qtr_damage)
|
python
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Service manager module """
import logging
import keystone.backends.api as api
logger = logging.getLogger(__name__) # pylint: disable=C0103
class Manager(object):
def __init__(self):
self.driver = api.SERVICE
def create(self, service):
""" Create a new service """
return self.driver.create(service)
def get(self, service_id):
""" Returns service by ID """
return self.driver.get(service_id)
def get_by_name(self, name):
""" Returns service by name """
return self.driver.get_by_name(name=name)
def get_all(self):
""" Returns all services """
return self.driver.get_all()
def get_page(self, marker, limit):
""" Get one page of services list """
return self.driver.get_page(marker, limit)
def get_page_markers(self, marker, limit):
""" Calculate pagination markers for services list """
return self.driver.get_page_markers(marker, limit)
def get_by_name_and_type(self, name, service_type):
""" Returns service by name and type """
return self.driver.get_by_name_and_type(name, service_type)
# pylint: disable=E1103
def update(self, service):
""" Update service """
return self.driver.update(service['id'], service)
def delete(self, service_id):
""" Delete service """
self.driver.delete(service_id)
|
python
|
import argparse
import pickle
import time
import os
import logging
import numpy as np
import theano as th
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams
import lasagne
import lasagne.layers as ll
from lasagne.layers import dnn, batch_norm
import nn
logging.basicConfig(level=logging.INFO)
# settings
parser = argparse.ArgumentParser()
parser.add_argument('--seed', default=1, type=int)
parser.add_argument('--batch_size', default=100, type=int)
parser.add_argument('--activation', default='relu', type=str)
parser.add_argument('--learning_rate', default=0.001, type=float)
args = parser.parse_args()
logging.info(args)
# fixed random seeds
rng = np.random.RandomState(args.seed)
theano_rng = MRG_RandomStreams(rng.randint(2 ** 15))
lasagne.random.set_rng(np.random.RandomState(rng.randint(2 ** 15)))
# setup output
time_str = time.strftime("%m-%d-%H-%M", time.gmtime())
exp_dir = "./data/" + args.activation + "_" + time_str + "_" + "{}".format(args.learning_rate).replace(".", "p")
try:
os.stat(exp_dir)
except:
os.makedirs(exp_dir)
logging.info("OPENING " + exp_dir + '/results.csv')
results_file = open(exp_dir + '/results.csv', 'w')
results_file.write('epoch, time, train_error, test_error\n')
results_file.flush()
# load CIFAR-10 data
def unpickle(file):
fo = open(file, 'rb')
d = pickle.load(fo, encoding='latin1')
fo.close()
return {'x': np.cast[th.config.floatX]((-127.5 + d['data'].reshape((10000,3,32,32)))/128.), 'y': np.array(d['labels']).astype(np.uint8)}
print('Loading data')
train_data = [unpickle('/home-nfs/dan/cifar_data/cifar-10-batches-py/data_batch_' + str(i)) for i in range(1,6)]
trainx = np.concatenate([d['x'] for d in train_data],axis=0)
trainy = np.concatenate([d['y'] for d in train_data])
test_data = unpickle('/home-nfs/dan/cifar_data/cifar-10-batches-py/test_batch')
testx = test_data['x']
testy = test_data['y']
nr_batches_train = int(trainx.shape[0]/args.batch_size)
nr_batches_test = int(testx.shape[0]/args.batch_size)
print('Whitening')
# whitening
whitener = nn.ZCA(x=trainx)
trainx_white = whitener.apply(trainx)
testx_white = whitener.apply(testx)
print('Done whitening')
if args.activation == 'relu':
f = nn.relu
elif args.activation == 'elu':
f = lasagne.nonlinearities.elu
elif args.activation == 'gelu':
f = nn.gelu
else:
assert False, 'Need "relu" "elu" or "gelu" nonlinearity as input name'
x = T.tensor4()
layers = [ll.InputLayer(shape=(None, 3, 32, 32), input_var=x)]
layers.append(ll.GaussianNoiseLayer(layers[-1], sigma=0.15))
layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 96, (3,3), pad=1, nonlinearity=f)))
layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 96, (3,3), pad=1, nonlinearity=f)))
layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 96, (3,3), pad=1, nonlinearity=f)))
layers.append(ll.MaxPool2DLayer(layers[-1], 2))
layers.append(ll.DropoutLayer(layers[-1], p=0.5))
layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=1, nonlinearity=f)))
layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=1, nonlinearity=f)))
layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=1, nonlinearity=f)))
layers.append(ll.MaxPool2DLayer(layers[-1], 2))
layers.append(ll.DropoutLayer(layers[-1], p=0.5))
layers.append(batch_norm(dnn.Conv2DDNNLayer(layers[-1], 192, (3,3), pad=0, nonlinearity=f)))
layers.append(batch_norm(ll.NINLayer(layers[-1], num_units=192, nonlinearity=f)))
layers.append(batch_norm(ll.NINLayer(layers[-1], num_units=192, nonlinearity=f)))
layers.append(nn.GlobalAvgLayer(layers[-1]))
layers.append(batch_norm(ll.DenseLayer(layers[-1], num_units=10, nonlinearity=None)))
# discriminative cost & updates
output_before_softmax = ll.get_output(layers[-1], x)
y = T.ivector()
cost = nn.softmax_loss(y, output_before_softmax)
train_err = T.mean(T.neq(T.argmax(output_before_softmax,axis=1),y))
params = ll.get_all_params(layers, trainable=True)
lr = T.scalar()
mom1 = T.scalar()
param_updates = nn.adam_updates(params, cost, lr=lr, mom1=mom1)
test_output_before_softmax = ll.get_output(layers[-1], x, deterministic=True)
test_err = T.mean(T.neq(T.argmax(test_output_before_softmax,axis=1),y))
print('Compiling')
# compile Theano functions
train_batch = th.function(inputs=[x,y,lr,mom1], outputs=train_err, updates=param_updates)
test_batch = th.function(inputs=[x,y], outputs=test_err)
print('Beginning training')
# //////////// perform training //////////////
begin_all = time.time()
for epoch in range(200):
begin_epoch = time.time()
lr = np.cast[th.config.floatX](args.learning_rate * np.minimum(2. - epoch/100., 1.))
if epoch < 100:
mom1 = 0.9
else:
mom1 = 0.5
# permute the training data
inds = rng.permutation(trainx_white.shape[0])
trainx_white = trainx_white[inds]
trainy = trainy[inds]
# train
train_err = 0.
for t in range(nr_batches_train):
train_err += train_batch(trainx_white[t*args.batch_size:(t+1)*args.batch_size],
trainy[t*args.batch_size:(t+1)*args.batch_size],lr,mom1)
train_err /= nr_batches_train
# test
test_err = 0.
for t in range(nr_batches_test):
test_err += test_batch(testx_white[t*args.batch_size:(t+1)*args.batch_size],
testy[t*args.batch_size:(t+1)*args.batch_size])
test_err /= nr_batches_test
logging.info('Iteration %d, time = %ds, train_err = %.6f, test_err = %.6f' % (epoch, time.time()-begin_epoch, train_err, test_err))
results_file.write('%d, %d, %.6f, %.6f\n' % (epoch, time.time()-begin_all, train_err, test_err))
results_file.flush()
if epoch % 5 == 0:
np.savez(exp_dir + "/network.npz", *lasagne.layers.get_all_param_values(layers))
print('Saved')
|
python
|
from django.contrib import admin
from bookmarks.models import Bookmark, BookmarkInstance
class BookmarkAdmin(admin.ModelAdmin):
list_display = ('url', 'description', 'added', 'adder',)
admin.site.register(Bookmark, BookmarkAdmin)
admin.site.register(BookmarkInstance)
|
python
|
import os
import pickle
from smart_open import smart_open
def _split3(path):
dir, f = os.path.split(path)
fname, ext = os.path.splitext(f)
return dir, fname, ext
def get_containing_dir(path):
d, _, _ = _split3(path)
return d
def get_parent_dir(path):
if os.path.isfile(path):
path = get_containing_dir(path)
return os.path.abspath(os.path.join(path, os.pardir))
def get_file_name(path):
_, fname, _ = _split3(path)
return fname
def save_obj(obj, name):
with open(name, 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(name):
with smart_open(name, 'rb') as f:
return pickle.load(f)
|
python
|
# coding=utf-8
from __future__ import unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from .models import *
class CreditNoteAdmin(admin.ModelAdmin):
model = CreditNote
search_fields = ('numero', 'invoice__id', 'invoice__contact__id')
list_display = ('invoice', 'serie', 'numero', 'get_contact_id')
raw_id_fields = ['invoice']
readonly_fields = ['invoice', 'uuid', 'serie', 'numero']
ordering = ["-id"]
class InvoiceItemInline(admin.StackedInline):
model = InvoiceItem
fields = ['amount', 'product', 'description', 'price', 'copies', 'service_from', 'service_to', 'type']
extra = 0
class InvoiceAdmin(admin.ModelAdmin):
search_fields = ('contact__id', 'contact__name')
list_display = ('id', 'contact', 'amount', 'paid', 'debited', 'canceled', 'uncollectible', 'serie', 'numero')
fieldsets = (
("", {"fields": (
'contact', 'subscription',
('creation_date', 'expiration_date'),
('service_from', 'service_to'),
('amount', 'payment_type'),
('debited', 'paid'),
('payment_date', 'payment_reference'),
'notes',
('canceled', 'cancelation_date'),
'uncollectible',
('uuid', 'serie', 'numero'),
('pdf', 'balance'),
('route', 'order'),
'print_date'
)}),
(_('Billing data'), {
'fields': (
('billing_name', 'billing_address'),
('billing_state', 'billing_city'),
'billing_document',
)}),
)
raw_id_fields = ['contact', 'subscription']
inlines = (InvoiceItemInline,)
readonly_fields = ['canceled', 'cancelation_date', 'uuid', 'serie', 'numero', 'pdf']
ordering = ['-id']
class InvoiceItemAdmin(admin.ModelAdmin):
pass
class BillingAdmin(admin.ModelAdmin):
list_display = (
'id', 'product', 'start', 'amount_billed', 'count',
'progress', 'status')
# readonly_fields = ['exclude']
def get_readonly_fields(self, request, obj=None):
if request.user.is_staff:
if request.user.is_superuser:
return (
'id', 'start', 'exclude', 'errors', 'created_by',
'started_by', 'dpp', 'billing_date', 'end',
'subscriber_amount')
else:
return [f.name for f in self.model._meta.fields]
admin.site.register(Invoice, InvoiceAdmin)
admin.site.register(Billing, BillingAdmin)
admin.site.register(InvoiceItem, InvoiceItemAdmin)
admin.site.register(CreditNote, CreditNoteAdmin)
|
python
|
import logging
import logging.handlers
from logging.handlers import TimedRotatingFileHandler, MemoryHandler
import os
from datetime import datetime
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
sys.path.insert(0, os.path.dirname(__file__))
if True:
import settings
skyline_app = 'flux'
skyline_app_logger = '%sLog' % skyline_app
logger = logging.getLogger(skyline_app_logger)
logfile = '%s/%s.log' % (settings.LOG_PATH, skyline_app)
def set_up_logging(app):
if not os.path.exists(settings.LOG_PATH):
os.makedirs(settings.LOG_PATH)
# current_time = datetime.now()
# current_date = current_time.strftime("%Y-%m-%d")
# file_name = current_date + '.log'
# file_location = log_location + file_name
# with open(logfile, 'a+'):
if app:
use_logfile = '%s/%s.%s.log' % (settings.LOG_PATH, skyline_app, app)
else:
use_logfile = logfile
with open(use_logfile, 'a+'):
pass
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s :: %(process)s :: %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
handler = logging.handlers.TimedRotatingFileHandler(
use_logfile,
when="midnight",
interval=1,
backupCount=5)
memory_handler = logging.handlers.MemoryHandler(256,
flushLevel=logging.DEBUG,
target=handler)
handler.setFormatter(formatter)
logger.addHandler(memory_handler)
# logger = logging.getLogger(skyline_app)
# format = '[%(asctime)s] [%(levelname)s] [%(message)s] [--> %(pathname)s [%(process)d]:]'
# format = '%(asctime)s [%(levelname)s] %(process)d: %(message)s'
# To store in file
# logging.basicConfig(format=format, filemode='a+', filename=file_location, level=logging.DEBUG)
# logging.basicConfig(format=format, filemode='a', filename=file_location)
# logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
# To print only
# logging.basicConfig(format=format, level=logging.DEBUG)
return logger
|
python
|
from typing import List, Dict, Callable
import numpy as np
import tensorflow as tf
from typeguard import check_argument_types
from neuralmonkey.decorators import tensor
from neuralmonkey.vocabulary import END_TOKEN_INDEX
from neuralmonkey.runners.base_runner import BaseRunner
from neuralmonkey.decoders.sequence_labeler import SequenceLabeler
# pylint: disable=invalid-name
Postprocessor = Callable[[List[List[str]]], List[List[str]]]
# pylint: enable=invalid-name
class LabelRunner(BaseRunner[SequenceLabeler]):
# pylint: disable=too-few-public-methods
# Pylint issue here: https://github.com/PyCQA/pylint/issues/2607
class Executable(BaseRunner.Executable["LabelRunner"]):
def collect_results(self, results: List[Dict]) -> None:
loss = results[0].get("loss", 0.)
summed_logprobs = results[0]["label_logprobs"]
input_mask = results[0]["input_mask"]
for sess_result in results[1:]:
loss += sess_result.get("loss", 0.)
summed_logprobs = np.logaddexp(summed_logprobs,
sess_result["label_logprobs"])
assert input_mask == sess_result["input_mask"]
argmaxes = np.argmax(summed_logprobs, axis=2)
# CAUTION! FABULOUS HACK BELIEVE ME
argmaxes -= END_TOKEN_INDEX
argmaxes *= input_mask.astype(int)
argmaxes += END_TOKEN_INDEX
# transpose argmaxes because vectors_to_sentences is time-major
vocabulary = self.executor.decoder.vocabulary
decoded_labels = vocabulary.vectors_to_sentences(argmaxes.T)
if self.executor.postprocess is not None:
decoded_labels = self.executor.postprocess(decoded_labels)
self.set_result(outputs=decoded_labels, losses=[loss],
scalar_summaries=None, histogram_summaries=None,
image_summaries=None)
# pylint: enable=too-few-public-methods
def __init__(self,
output_series: str,
decoder: SequenceLabeler,
postprocess: Postprocessor = None) -> None:
check_argument_types()
BaseRunner[SequenceLabeler].__init__(self, output_series, decoder)
self.postprocess = postprocess
@tensor
def fetches(self) -> Dict[str, tf.Tensor]:
return {
"label_logprobs": self.decoder.logprobs,
"input_mask": self.decoder.encoder.input_sequence.temporal_mask,
"loss": self.decoder.cost}
@property
def loss_names(self) -> List[str]:
return ["loss"]
|
python
|
import json
import os
from google.auth.transport import requests
from google.oauth2 import service_account
_BASE_URL = "https://healthcare.googleapis.com/v1"
def get_session():
"""Creates an authorized Requests Session."""
credentials = service_account.Credentials.from_service_account_file(
filename=os.environ["GOOGLE_APPLICATION_CREDENTIALS"],
scopes=["https://www.googleapis.com/auth/cloud-platform"],
)
# Create a requests Session object with the credentials.
session = requests.AuthorizedSession(credentials)
return session
def dicomweb_store_instance(
base_url, project_id, cloud_region, dataset_id, dicom_store_id, dcm_file
):
"""Handles the POST requests specified in the DICOMweb standard."""
url = "{}/projects/{}/locations/{}".format(base_url, project_id, cloud_region)
dicomweb_path = "{}/datasets/{}/dicomStores/{}/dicomWeb/studies".format(
url, dataset_id, dicom_store_id
)
# Make an authenticated API request
session = get_session()
with open(dcm_file, "rb") as dcm:
dcm_content = dcm.read()
content_type = "application/dicom"
headers = {"Content-Type": content_type}
response = session.post(dicomweb_path, data=dcm_content, headers=headers)
response.raise_for_status()
print("Stored DICOM instance:")
print(response.text)
return response
def dicomweb_search_instance(
base_url, project_id, cloud_region, dataset_id, dicom_store_id
):
"""Handles the GET requests specified in DICOMweb standard."""
url = "{}/projects/{}/locations/{}".format(base_url, project_id, cloud_region)
dicomweb_path = "{}/datasets/{}/dicomStores/{}/dicomWeb/instances".format(
url, dataset_id, dicom_store_id
)
# Make an authenticated API request
session = get_session()
headers = {"Content-Type": "application/dicom+json; charset=utf-8"}
response = session.get(dicomweb_path, headers=headers)
response.raise_for_status()
instances = response.json()
print("Instances:")
print(json.dumps(instances, indent=2))
return instances
def dicomweb_retrieve_study(
base_url, project_id, cloud_region, dataset_id, dicom_store_id, study_uid
):
"""Handles the GET requests specified in the DICOMweb standard."""
url = "{}/projects/{}/locations/{}".format(base_url, project_id, cloud_region)
dicomweb_path = "{}/datasets/{}/dicomStores/{}/dicomWeb/studies/{}".format(
url, dataset_id, dicom_store_id, study_uid
)
# When specifying the output file, use an extension like ".multipart."
# Then, parse the downloaded multipart file to get each individual
# DICOM file.
file_name = "study.multipart"
# Make an authenticated API request
session = get_session()
response = session.get(dicomweb_path)
response.raise_for_status()
with open(file_name, "wb") as f:
f.write(response.content)
print("Retrieved study and saved to {} in current directory".format(file_name))
return response
def dicomweb_search_studies(
base_url, project_id, cloud_region, dataset_id, dicom_store_id
):
"""Handles the GET requests specified in the DICOMweb standard."""
url = "{}/projects/{}/locations/{}".format(base_url, project_id, cloud_region)
dicomweb_path = "{}/datasets/{}/dicomStores/{}/dicomWeb/studies".format(
url, dataset_id, dicom_store_id
)
# Refine your search by appending DICOM tags to the
# request in the form of query parameters. This sample
# searches for studies containing a patient's name.
params = {"PatientName": "Sally Zhang"}
session = get_session()
response = session.get(dicomweb_path, params=params)
response.raise_for_status()
print("Studies found: response is {}".format(response))
# Uncomment the following lines to process the response as JSON.
# patients = response.json()
# print('Patients found matching query:')
# print(json.dumps(patients, indent=2))
# return patients
def dicomweb_retrieve_instance(
base_url,
project_id,
cloud_region,
dataset_id,
dicom_store_id,
study_uid,
series_uid,
instance_uid,
):
"""Handles the GET requests specified in the DICOMweb standard."""
url = "{}/projects/{}/locations/{}".format(base_url, project_id, cloud_region)
dicom_store_path = "{}/datasets/{}/dicomStores/{}".format(
url, dataset_id, dicom_store_id
)
dicomweb_path = "{}/dicomWeb/studies/{}/series/{}/instances/{}".format(
dicom_store_path, study_uid, series_uid, instance_uid
)
file_name = "instance.dcm"
# Make an authenticated API request
session = get_session()
headers = {"Accept": "application/dicom; transfer-syntax=*"}
response = session.get(dicomweb_path, headers=headers)
response.raise_for_status()
with open(file_name, "wb") as f:
f.write(response.content)
print(
"Retrieved DICOM instance and saved to {} in current directory".format(
file_name
)
)
return response
def dicomweb_retrieve_rendered(
base_url,
project_id,
cloud_region,
dataset_id,
dicom_store_id,
study_uid,
series_uid,
instance_uid,
):
"""Handles the GET requests specified in the DICOMweb standard."""
url = "{}/projects/{}/locations/{}".format(base_url, project_id, cloud_region)
dicom_store_path = "{}/datasets/{}/dicomStores/{}".format(
url, dataset_id, dicom_store_id
)
instance_path = "{}/dicomWeb/studies/{}/series/{}/instances/{}".format(
dicom_store_path, study_uid, series_uid, instance_uid
)
dicomweb_path = "{}/rendered".format(instance_path)
file_name = "rendered_image.png"
# Make an authenticated API request
session = get_session()
headers = {"Accept": "image/png"}
response = session.get(dicomweb_path, headers=headers)
response.raise_for_status()
with open(file_name, "wb") as f:
f.write(response.content)
print(
"Retrieved rendered image and saved to {} in current directory".format(
file_name
)
)
return response
def dicomweb_delete_study(
base_url, project_id, cloud_region, dataset_id, dicom_store_id, study_uid
):
"""Handles DELETE requests equivalent to the GET requests specified in
the WADO-RS standard.
"""
url = "{}/projects/{}/locations/{}".format(base_url, project_id, cloud_region)
dicomweb_path = "{}/datasets/{}/dicomStores/{}/dicomWeb/studies/{}".format(
url, dataset_id, dicom_store_id, study_uid
)
# Make an authenticated API request
session = get_session()
headers = {"Content-Type": "application/dicom+json; charset=utf-8"}
response = session.delete(dicomweb_path, headers=headers)
response.raise_for_status()
print("Deleted study.")
return response
|
python
|
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.db import models
class Isoform(models.Model):
isoform_id = models.BigAutoField(primary_key=True)
uniprot_id = models.BigIntegerField(blank=True, null=True)
accession = models.CharField(max_length=30, blank=True, null=True)
sequence = models.CharField(max_length=200, blank=True, null=True)
uniparc_accession = models.CharField(max_length=30, blank=True, null=True)
embl_acc = models.CharField(max_length=30, blank=True, null=True)
class Meta:
managed = False
db_table = 'isoform'
class Domain(models.Model):
domain_id = models.BigAutoField(primary_key=True)
isoform = models.ForeignKey('Isoform', models.DO_NOTHING, blank=True, null=True)
start = models.BigIntegerField(blank=True, null=True)
end = models.BigIntegerField(blank=True, null=True)
description = models.CharField(max_length=45, blank=True, null=True)
class Meta:
managed = False
db_table = 'domain'
class Ptm(models.Model):
ptm_id = models.BigAutoField(primary_key=True)
domain = models.ForeignKey(Domain, models.DO_NOTHING, blank=True, null=True)
description = models.CharField(max_length=45, blank=True, null=True)
start = models.BigIntegerField(blank=True, null=True)
end = models.BigIntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'ptm'
class UniprotEntry(models.Model):
uniprot_id = models.BigAutoField(primary_key=True)
uniprot_acc = models.CharField(max_length=30, blank=True, null=True)
uniprot_tax_id = models.BigIntegerField(blank=True, null=True)
userstamp = models.CharField(max_length=30, blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
sequence_version = models.SmallIntegerField(blank=True, null=True)
upi = models.CharField(max_length=13, blank=True, null=True)
md5 = models.CharField(max_length=32, blank=True, null=True)
canonical_uniprot_id = models.IntegerField(blank=True, null=True)
ensembl_derived = models.NullBooleanField()
alias = models.CharField(max_length=30, blank=True, null=True)
gene_symbol = models.CharField(max_length=30, blank=True, null=True)
chromosome_line = models.CharField(max_length=50, blank=True, null=True)
entry_type = models.ForeignKey(
'CvEntryType',
models.DO_NOTHING,
blank=True,
null=True,
db_column="entry_type"
)
length = models.IntegerField(blank=True, null=True)
protein_existence_id = models.SmallIntegerField(blank=True, null=True)
def __str__(self):
return "{0} - {1}".format(self.uniprot_id, self.uniprot_acc)
class Meta:
managed = False
db_table = 'uniprot_entry'
unique_together = (('uniprot_acc', 'sequence_version'),)
class UniprotEntryHistory(models.Model):
release_version = models.CharField(max_length=30)
uniprot = models.ForeignKey(UniprotEntry, models.DO_NOTHING, primary_key=True)
grouping_id = models.BigIntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'uniprot_entry_history'
unique_together = (('uniprot', 'release_version'),)
|
python
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import range
from builtins import super
import mock
import string
import unittest
import random
import itertools
from pprint import pprint
from dpaycli import DPay
from dpaycliapi.websocket import DPayWebsocket
from dpaycli.instance import set_shared_dpay_instance
from dpaycli.nodelist import NodeList
# Py3 compatibility
import sys
wif = "5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3"
core_unit = "DWB"
class Testcases(unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
nodelist = NodeList()
nodelist.update_nodes(dpay_instance=DPay(node=nodelist.get_nodes(normal=True, appbase=True), num_retries=10))
stm = DPay(node=nodelist.get_nodes())
self.ws = DPayWebsocket(
urls=stm.rpc.nodes,
num_retries=10
)
def test_connect(self):
ws = self.ws
self.assertTrue(len(next(ws.nodes)) > 0)
|
python
|
import os
from configuration import *
from scipy.io import wavfile
from scipy.signal import stft,check_COLA,istft
import numpy as np
import pickle
import multiprocessing as mp
# save decoded dataset as pickle file
def save_as_wav(dir_list):
dataset= {
'vocals': [],
'accompaniment': [],
'bass': [],
'drums': [],
'other': [],
'mixture': []
}
count=0
for folder in dir_list:
# if count>=10:
# return dataset
# count+=1
# if count % 5 == 0:
# print("\rGetting Data: {0:.2f}% ".format(count /len(os.listdir(os.path.join(wavs_dir,'train'))) * 100), end="")
for key in dataset.keys():
_,data=wavfile.read(os.path.join(wavs_dir,"train",folder,str(key)+".wav"))
dataset[key].append(data[:,0])
dataset[key].append(data[:,1])
# mix=(np.hstack(dataset['vocals'])+np.hstack(dataset['accompaniment']))/2
# print(mix.mean(),np.hstack(dataset['mixture']).mean())
# print(mix.shape,np.hstack(dataset['mixture']).shape)
# print("Complete")
return dataset
# print("Saving dataset")
# pickle.dump(dataset, open(wavs_dir+"/dataset.pickle", "wb"),pickle.HIGHEST_PROTOCOL)
# print("Dataset saved")
# read pickled wav dataset
def read_data_all(infile = wavs_dir+"/dataset_stft.pickle"):
dataset = pickle.load(open(infile, "rb"));
return dataset['mixture'],dataset['vocals'],dataset['accompaniment'],dataset['drums'],dataset['bass'],dataset['other']
# read pickled wav dataset
def read_data(infile = wavs_dir+"/dataset_stft.pickle"):
dataset = pickle.load(open(infile, "rb"));
return dataset['mixture'],dataset['vocals'],dataset['accompaniment']
def make_chunks(lis):
arr=np.hstack(lis)
chunk_len=len(arr)//int(sr*time_len)*int(sr*time_len)
return arr[:chunk_len].reshape(-1,int(sr*time_len))
def make_stft(lis):
arr=make_chunks(lis)
mags=[]
angles=[]
if check_COLA('hann',nperseg=perseg,noverlap = overlap):
for wav in arr:
f,t,X=stft(wav,nperseg=perseg,noverlap = overlap)
mags.append(np.transpose(np.abs(X)).astype('float32'))
angles.append(np.angle(X).astype('float32'))
else:
print("COLA constraint not met, in func: utils.make_stft")
exit()
# print(len(mags),np.abs(mags[0].shape))
return np.vstack(mags),angles
def get_stft_matrix(magnitudes, phases):
return magnitudes * np.exp(1.j * phases)
def make_wav(mags, phases, overlap=overlap):
a=[]
for mag,phase in zip (mags,phases):
mag=(mag.reshape(88,n_bins).swapaxes(1,0))
# phase=np.transpose(phase.reshape(-1,n_bins))
stft_matrix = get_stft_matrix(mag, phase)
# print(stft_maxrix.shape)
# for mat in stft_maxrix:
# print(mat.shape)
a.append(istft(stft_matrix,fs=sr, noverlap=overlap)[1])
# print("one ",end="")
# print(np.hstack(a).shape)
return np.hstack(a)
def save_as_stft(wavs_dir = wavs_dir):
mix,voc,acc,dru,bas,oth=read_data_all(infile = wavs_dir+"/dataset.pickle")
dataset_stft={}
dataset_stft['mixture'],dataset_stft['mixturea']=make_stft(mix)
dataset_stft['vocals'],dataset_stft['vocalsa']=make_stft(voc)
dataset_stft['accompaniment'],dataset_stft['accompanimenta']=make_stft(acc)
dataset_stft['bass'],dataset_stft['bassa']=make_stft(dru)
dataset_stft['drums'],dataset_stft['drumsa']=make_stft(bas)
dataset_stft['other'],dataset_stft['othera']=make_stft(oth)
print("Saving dataset")
pickle.dump(dataset_stft, open(wavs_dir+"/dataset_stft.pickle", "wb"),pickle.HIGHEST_PROTOCOL)
print("Dataset saved")
def multi_stft(mat,key):
phase,angle=make_stft(mat)
print(key)
return [key,phase,angle]
def save_diff_stft(wavs_dir,dataset,index=0):
# output = mp.Queue()
mix,voc,acc,dru,bas,oth=dataset['mixture'],dataset['vocals'],dataset['accompaniment'],dataset['drums'],dataset['bass'],dataset['other']
dataset_stft={}
print('starting stft')
keylist=list(dataset.keys())
pool = mp.Pool(processes=6)
results=[pool.apply(multi_stft,args=(mat,key)) for mat,key in zip ([dataset[keyl] for keyl in keylist],keylist)]
print("out of the wormhole!")
dataset_stft={}
for result in results:
dataset_stft[result[0]]=result[1]
dataset_stft[result[0]+"angle"]=result[2]
print("Saving dataset")
pickle.dump(dataset_stft, open(wavs_dir+"/dataset_stft_"+str(index)+".pickle", "wb"),pickle.HIGHEST_PROTOCOL)
print(" saved")
def read(dir_list,index):
data=save_as_wav(dir_list)
print(index)
save_diff_stft(wavs_dir,data,index)
return index
def read_mix_voc_acc(wavs_dir=wavs_dir,limit=49):
mixl=[]
vocl=[]
accl=[]
for index in range(limit[0],limit[1]-1,5):
print("\rGetting Data: {0:.2f}% ".format(index), end="")
mix,voc,acc=read_data(wavs_dir+"/dataset_stft_"+str(index)+".pickle")
mixl.append(mix)
vocl.append(voc)
accl.append(acc)
zeros=np.zeros((1,n_bins))
mixl=np.vstack(mixl)
vocl=np.vstack(vocl)
accl=np.vstack(accl)
if len(mixl)%4 is not 0:
rem=4-len(mixl)%4
padding=np.repeat(zeros,rem,axis=0)
print(padding.shape)
mixl=np.vstack(mixl,padding)
vocl=np.vstack(vocl)
if len(vocl)%4 is not 0:
rem=4-len(vocl)%4
padding=np.repeat(zeros,rem,axis=0)
print(padding.shape)
vocl=np.vstack(vocl,padding)
accl=np.vstack(accl)
if len(accl)%4 is not 0:
rem=4-len(accl)%4
padding=np.repeat(zeros,rem,axis=0)
print(padding.shape)
accl=np.vstack(accl,padding)
return mixl,vocl,accl
if __name__ == '__main__':
dir_list=os.listdir(os.path.join(wavs_dir,'train'))
# pool=mp.Pool(processes=20)
results=[(read(dir_list[sub_list:sub_list+5],sub_list)) for sub_list in range(95,len(dir_list)-4,5)]
# output = [p.get() for p in results]
print(results)
print("Ta-da!")
|
python
|
import pytest
import numpy as np
from mcalf.models import ModelBase as DummyModel, FitResult, FitResults
fitted_parameters = [1, 2, 1000.2, 1001.8, 5]
fit_info = {'chi2': 1.4, 'classification': 2, 'profile': 'abc',
'success': True, 'index': [123, 456, 789]}
def test_fitresult_passthrough():
fit = FitResult(fitted_parameters, fit_info)
assert fit.parameters == [1, 2, 1000.2, 1001.8, 5]
assert len(fit) == 5
assert fit.chi2 == 1.4
assert fit.classification == 2
assert fit.profile == 'abc'
assert isinstance(fit.success, bool) and fit.success
assert fit.index == [123, 456, 789]
# Test that the string representation can be formed without error
repr(fit)
fit.index = [None]*3
repr(fit)
def test_fitresult_velocity():
m = DummyModel(original_wavelengths=[1000.4, 1000.6])
m.stationary_line_core = 1000.5
m.quiescent_wavelength = 2
m.active_wavelength = 3
fit = FitResult(fitted_parameters, fit_info)
assert fit.velocity(m, vtype='quiescent') == pytest.approx(-89.95502249)
assert fit.velocity(m, vtype='active') == pytest.approx(389.80509745)
# Ensure nan is returned if no active component fitted
fitted_parameters_trim = fitted_parameters[:3]
fit = FitResult(fitted_parameters_trim, fit_info)
vel = fit.velocity(m, vtype='active')
assert vel != vel # assert is nan
# Ensure an invalid velocity type is detected
with pytest.raises(ValueError):
vel = fit.velocity(m, vtype='unknown-vtype')
def test_fitresults_init():
fits = FitResults((49, 52), 4, time=12)
assert fits.parameters.shape == (49, 52, 4)
assert fits.chi2.shape == (49, 52)
assert fits.classifications.shape == (49, 52)
assert fits.profile.shape == (49, 52)
assert fits.success.shape == (49, 52)
assert fits.time == 12
with pytest.raises(TypeError): # Should be a tuple
fits = FitResults(10, 3)
with pytest.raises(TypeError): # Should be a tuple of length 2
fits = FitResults((10, 32, 53), 8)
with pytest.raises(ValueError): # Should be an integer >= 1
fits = FitResults((10, 5), 5.5)
with pytest.raises(ValueError): # Should be an integer >= 1
fits = FitResults((10, 5), 0)
def test_fitresults_append():
# Create dummy fit results
fit1 = FitResult(
[2, 6, 254.6, 963.4],
{'chi2': 7.43, 'classification': 4, 'profile': 'absorption',
'success': True, 'index': [12, 34, 81]}
)
fit2 = FitResult(
[9, 2, 724.32, 134.8],
{'chi2': 1.34, 'classification': 2, 'profile': 'emission',
'success': True, 'index': [12, 0, 99]}
)
fit3 = FitResult(
[1, 8, 932.1, 327.5, 3.7, 9, 2, 0.2],
{'chi2': 0.79, 'classification': 1, 'profile': 'both',
'success': False, 'index': [12, 99, 0]}
)
fit4 = FitResult( # With incorrect time index
[6, 4, 356.2, 738.5],
{'chi2': 8.2, 'classification': 3, 'profile': 'absorption',
'success': True, 'index': [3, 0, 25]}
)
fit5 = FitResult( # With unknown profile name
[5, 3, 256.2, 628.5],
{'chi2': 8.1, 'classification': 3, 'profile': 'continuum',
'success': True, 'index': [12, 10, 40]}
)
# Initialise empty FitResults object
fits = FitResults((100, 100), 8, time=12)
# Append dummy fits
fits.append(fit1)
fits.append(fit2)
fits.append(fit3)
with pytest.raises(ValueError): # Time index does not match
fits.append(fit4)
with pytest.raises(ValueError): # Unknown profile
fits.append(fit5)
assert all([a == b for a, b in zip(fits.parameters[34, 81][:4], fit1.parameters)])
assert fits.chi2[34, 81] == fit1.chi2
assert fits.classifications[34, 81] == fit1.classification
assert fits.profile[34, 81] == fit1.profile
assert fits.success[34, 81] == fit1.success
assert all([a == b for a, b in zip(fits.parameters[0, 99][4:], fit2.parameters)])
assert fits.chi2[0, 99] == fit2.chi2
assert fits.classifications[0, 99] == fit2.classification
assert fits.profile[0, 99] == fit2.profile
assert fits.success[0, 99] == fit2.success
assert all([a == b for a, b in zip(fits.parameters[99, 0], fit3.parameters)])
assert fits.chi2[99, 0] == fit3.chi2
assert fits.classifications[99, 0] == fit3.classification
assert fits.profile[99, 0] == fit3.profile
assert fits.success[99, 0] == fit3.success
def test_fitresults_velocities():
m = DummyModel(original_wavelengths=[1000.4, 1000.6])
m.stationary_line_core = 1000.5
m.quiescent_wavelength = 0
m.active_wavelength = 1
fits = FitResults((4, 4), 2)
fits.parameters = np.array([
[[1000.2, 192.4], [826.5, 534.23], [8365.86, 1252.32], [1532.3, 2152.3]],
[[978.73, 753.52], [1253.5, 1329.3], [6423.4, 2355.45], [12.53, 2523.3]],
[[825.8, 862.5], [1759.5, 1000.9], [2633.4, 234.43], [2535.353, 152.34]],
[[896.53, 153.2], [1224.3, 1111.11], [634.54, 2353.97], [242.35, 763.4]]
])
truth_quiescent = np.array([[-8.99550225e+01, -5.21739130e+04, 2.20850375e+06, 1.59460270e+05],
[-6.52773613e+03, 7.58620690e+04, 1.62605697e+06, -2.96242879e+05],
[-5.23838081e+04, 2.27586207e+05, 4.89625187e+05, 4.60225787e+05],
[-3.11754123e+04, 6.71064468e+04, -1.09733133e+05, -2.27331334e+05]])
truth_active = np.array([[-2.42308846e+05, -1.39811094e+05, 7.55082459e+04, 3.45367316e+05],
[-7.40569715e+04, 9.85907046e+04, 4.06281859e+05, 4.56611694e+05],
[-4.13793103e+04, 1.19940030e+02, -2.29706147e+05, -2.54320840e+05],
[-2.54062969e+05, 3.31664168e+04, 4.05838081e+05, -7.10944528e+04]])
result_quiescent = fits.velocities(m, vtype='quiescent')
result_active = fits.velocities(m, vtype='active')
with pytest.raises(ValueError):
fits.velocities(m, vtype='unknown-vtype')
assert result_quiescent == pytest.approx(truth_quiescent)
assert result_active == pytest.approx(truth_active)
|
python
|
# Generated by Django 4.0.1 on 2022-03-09 12:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('BruteScan', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='bruteresult',
name='result_flag',
),
migrations.AddField(
model_name='bruteresult',
name='password',
field=models.CharField(db_column='password', default='', max_length=32, verbose_name='口令'),
),
migrations.AddField(
model_name='bruteresult',
name='username',
field=models.CharField(db_column='username', default='', max_length=32, verbose_name='账号'),
),
]
|
python
|
import backend
import imagery
import config_reader
import os
import shutil
import geolocation
import numpy as np
import json
from detectors.Detector import Detector
from Mask_RCNN_Detect import Mask_RCNN_Detect
from PIL import Image
from flask import Flask, render_template, request, flash, redirect, url_for, send_from_directory, send_file
application = Flask(__name__)
imd = None
program_config = None
osm = None
mrcnn = None
# gets the directories all set up
if (os.path.isdir('runtime')):
shutil.rmtree('runtime')
os.mkdir('runtime')
os.mkdir('runtime/images')
os.mkdir('runtime/masks')
# useful function for turning request data into usable dictionaries
def result_to_dict(result):
info = {}
for k, v in result.items():
info[k.lower()] = v
return info
@application.route('/', methods=['GET']) # base page that loads up on start/accessing the website
def login(): # this method is called when the page starts up
return redirect('/home/')
@application.route('/home/')
def home(lat=None, lng=None, zoom=None):
# necessary so that if one refreshes, the way memory deletes with the drawn polygons
global osm, program_config
osm.clear_ways_memory()
Detector.reset()
if lat is None or lng is None or zoom is None:
config = program_config
lat = config['start_lat']
lng = config['start_lng']
zoom = config['start_zoom']
access_key = program_config['accessKey']
context = {}
context['lat'] = lat
context['lng'] = lng
context['zoom'] = zoom
context['access_key'] = access_key
return render_template('DisplayMap.html', **context)
@application.route('/<zoom>/<lat>/<lng>', methods=['GET'])
def move_to_new_lat_long(zoom, lat, lng):
return home(zoom, lat, lng)
@application.route('/home/backendWindow/', methods=['POST', 'GET'])
def backend_window():
global mrcnn
if mrcnn is None or mrcnn.image_id == 1: # no images masked yet
return send_from_directory('default_images', 'default_window.jpeg')
print("in backend window", mrcnn.image_id)
print('looking at ' + 'mask_{}.png'.format(mrcnn.image_id-1))
return send_from_directory('runtime/masks', 'mask_{}.png'.format(mrcnn.image_id-1))
@application.route('/home/detect_buildings', methods=['POST'])
def mapclick():
global osm, mcrnn, imd
if request.method == 'POST':
result = request.form
info = result_to_dict(result)
lat = float(info['lat'])
lng = float(info['lng'])
zoom = int(info['zoom'])
strategy = info['strategy']
# find xtile, ytile
xtile, ytile = geolocation.deg_to_tile(lat, lng, zoom)
image = np.array(imd.download_tile(xtile, ytile, zoom))
if strategy == 'mrcnn':
# SETUP MRCNN STUFF
global mrcnn
if mrcnn is None: # import if not already imported
print('import MRCNN stuff...')
from Mask_RCNN_Detect import Mask_RCNN_Detect
mrcnn = Mask_RCNN_Detect('weights/epoch55.h5')
mask_data = mrcnn.detect_building(image, lat, lng, zoom)
building_ids = list(mask_data.keys())
building_points = list(mask_data.values())
else:
detector = Detector(image, lat, lng, zoom)
rect_id, rect_points = detector.detect_building()
building_ids = [rect_id]
building_points = [rect_points]
json_post = {"rects_to_add": [{
"ids": building_ids,
"points": building_points
}],
"rects_to_delete": {"ids": []}
}
return json.dumps(json_post)
@application.route('/home/delete_building', methods=['POST'])
def delete_building():
result = request.form
info = result_to_dict(result)
building_id = None
lat = None
lng = None
zoom = None
building_id = None
if 'building_id' in info:
building_id = int(info['building_id'])
else:
lat = float(info['lat'])
lng = float(info['lng'])
zoom = float(info['zoom'])
global mrcnn
if mrcnn is not None:
building_id = mrcnn.delete_mask(lat, lng, zoom, building_id)
json_post = {"rects_to_delete":
{"ids": [building_id]}
}
return json.dumps(json_post)
return 'mrcnn has not been made'
@application.route('/home/upload', methods=['POST'])
def upload_changes():
print('uploading to OSM...')
global osm
# # Create the way using the list of nodes
changeset_comment = "Added " + str(len(mrcnn.id_geo)) + " buildings."
print("comment", changeset_comment)
ways_created = osm.way_create_multiple(mrcnn.id_geo, changeset_comment, {"building": "yes"})
# # Clear the rectangle list
mrcnn.clear()
print('uploaded!')
return str(len(ways_created))
@application.route('/home/OSMSync', methods=['POST'])
def OSM_map_sync():
if request.method == 'POST':
result = request.form
info = result_to_dict(result)
min_long = float(info['min_long'])
min_lat = float(info['min_lat'])
max_long = float(info['max_long'])
max_lat = float(info['max_lat'])
global osm
mapplicationable_results = osm.sync_map(min_long, min_lat, max_long, max_lat)
if mapplicationable_results == None or len(mapplicationable_results) == 0:
json_post = {'rectsToAdd': []}
return json.dumps(json_post)
# note that this is in a different format as the other json_post for a map click
# mapplicationable_results is a list with each index a building containing tuples for the coordinates of the corners
json_post = {"rectsToAdd": mapplicationable_results}
return json.dumps(json_post)
@application.route('/home/citySearch', methods=['POST'])
def citySearch():
if request.method == 'POST':
result = request.form
info = result_to_dict(result)
print('info', info)
city_name = info['query']
coords = backend.search_city(city_name)
if coords != None:
json_post = {'lat': coords[0],
'lng': coords[1]}
return json.dumps(json_post)
json_post = {'lat': '-1000'}
return json.dumps(json_post)
# run the application.
if __name__ == "__main__":
config = config_reader.get_config()
# useless
application.secret_key = 'super secret key'
# Get config variables
access_key = None
if "accessKey" in config:
access_key = config["accessKey"]
# Create imagery downloader
imd = imagery.ImageryDownloader(access_key)
program_config = config
init_info = program_config["osmUpload"]
args = ["api", "username", "password"]
for arg in args:
if arg not in init_info:
print("[ERROR] Config: osmUpload->" + arg + " not found!")
raise ValueError()
# initializes the class for interacting with OpenStreetMap's API
osm = backend.OSM_Interactor(init_info["api"], init_info["username"], init_info["password"])
application.debug = True
application.run()
|
python
|
# I am a comment, python interpreter will ignore every line that starts with '#'
"""
I am a multiline comment and surrounded by 3 \" or 3 \'
"""
|
python
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.core.management.base import BaseCommand
from desktop import conf
from desktop import supervisor
import os
import sys
from django.utils.translation import ugettext as _
SERVER_HELP = r"""
Run Hue using either the CherryPy server or the Spawning server, based on
the current configuration.
"""
LOG = logging.getLogger(__name__)
class Command(BaseCommand):
help = _("Web server for Hue.")
def handle(self, *args, **options):
runserver()
def usage(self, subcommand):
return SERVER_HELP
def runserver():
script_name = "runspawningserver"
if conf.USE_CHERRYPY_SERVER.get():
script_name = "runcherrypyserver"
cmdv = supervisor.DjangoCommandSupervisee(script_name).cmdv
os.execv(cmdv[0], cmdv)
LOG.error("Failed to exec '%s' with argument '%s'" % (cmdv[0], cmdv[1],))
sys.exit(-1)
if __name__ == '__main__':
runserver()
|
python
|
import numpy as np
import sys
import matplotlib.ticker as mticker
def file2stats(filename):
#f=open(filename)
f=open('results/'+filename)
print('WARNING: Results read have not been regenerated')
lines = f.readlines()
f.close()
A = []
for line in lines:
A.append(float(line[:-1]))
A=np.array(A)
mean = np.mean(A)
std = np.std(A)
maxVal = np.amax(A)
minVal = np.amin(A)
return mean, std, maxVal, minVal
f = mticker.ScalarFormatter(useOffset=False, useMathText=True)
g = lambda x,pos : "{}".format(f._formatSciNotation('%1.2e' % x))
fmt = mticker.FuncFormatter(g)
gbs = lambda x,pos : r"\boldsymbol{"+"{}".format(f._formatSciNotation('%1.2e' % x))
fmtbs = mticker.FuncFormatter(gbs)
gbe = lambda x,pos : "{}".format(f._formatSciNotation('%1.2e' % x)+r"}")
fmtbe = mticker.FuncFormatter(gbe)
def appendOptToString(string,eps,metric):
string += " & "
if metric=='Max':
if eps==0:
string += r'$ 0 $'
if eps==1:
#string += r'$' + fmt(5.3632) + r" \pm " + fmt(0.0149) +'$'
string += r'$' + fmt(5.3632) +'$'
if eps==2:
#string += r'$' + fmt(10.8079) + r" \pm " + fmt(0.0335) +'$'
string += r'$' + fmt(10.8079) +'$'
if eps==3:
#string += r'$' + fmt(16.1125) + r" \pm " + fmt(0.0504) +'$'
string += r'$' + fmt(16.1125) +'$'
if eps==4:
#string += r'$' + fmt(21.5276) + r" \pm " + fmt(0.0594) +'$'
string += r'$' + fmt(21.5276) +'$'
if metric=='Mean':
string += r'$' + fmt(2*eps/np.sqrt(2*np.pi)) + '$'
string += r' \\ '
return string
def appendResultToString(string,n,eps,mode,metric):
if mode=='GP':
mean1, std1, _, _ = file2stats(metric+'_phase_n'+str(n)+'_eps'+str(eps))
mean2, std2, _, _ = file2stats(metric+'_random_n'+str(n)+'_eps'+str(eps))
mean3, std3, _, _ = file2stats(metric+'_kmeans_n'+str(n)+'_eps'+str(eps))
elif mode=='NN':
mean1, std1, _, _ = file2stats(metric+'NN_phase_n'+str(n)+'_eps'+str(eps))
mean2, std2, _, _ = file2stats(metric+'NN_random_n'+str(n)+'_eps'+str(eps))
mean3, std3, _, _ = file2stats(metric+'NN_kmeans_n'+str(n)+'_eps'+str(eps))
listMeans = np.array([mean1,mean2,mean3])
minVal = np.argsort(listMeans)[0]
if minVal==0:
string += r" & $"+fmtbs(mean1) + r" \pm " + fmtbe(std1) +'$'
else:
string += r" & $"+fmt(mean1) + r" \pm " + fmt(std1) +'$'
if minVal==1:
string += r" & $"+fmtbs(mean2) + r" \pm " + fmtbe(std2) +'$'
else:
string += r" & $"+fmt(mean2) + r" \pm " + fmt(std2) +'$'
if minVal==2:
string += r" & $"+fmtbs(mean3) + r" \pm " + fmtbe(std3) +'$'
else:
string += r" & $"+fmt(mean3) + r" \pm " + fmt(std3) +'$'
string = appendOptToString(string,eps,metric)
return string
nSampleList = [1000]
epsilonList = [0, 1, 2, 3, 4]
i_iter = 0
# GP Results
print(r"\begin{table}[h]")
print(r"\caption{{\color{red}GP results}}")
print(r"\label{tab:GPResults}")
print(r"\begin{center}")
print(r"\begin{tabular}{ |c|c|c|c|c|c|c| }")
print(r"\hline")
print(r" Metric & n & $\varepsilon$ & Algo.~\ref{algo:iterative} (2 iter.) & Random & Stratified & Optimum \\ \hline ")
string = r"\multirow{5}{*}{Mean} & \multirow{5}{*}{$1,000$} & 0 "
string = appendResultToString(string,1000,0,'GP','Mean')
print(string)
string = r" & & 1 "
string = appendResultToString(string,1000,1,'GP','Mean')
print(string)
string = r" & & 2 "
string = appendResultToString(string,1000,2,'GP','Mean')
print(string)
string = r" & & 3 "
string = appendResultToString(string,1000,3,'GP','Mean')
print(string)
string = r" & & 4 "
string = appendResultToString(string,1000,4,'GP','Mean')
string += r"\hline"
print(string)
string = r"\multirow{5}{*}{Max} & \multirow{5}{*}{$1,000$} & 0 "
string = appendResultToString(string,1000,0,'GP','Max')
print(string)
string = r" & & 1 "
string = appendResultToString(string,1000,1,'GP','Max')
print(string)
string = r" & & 2 "
string = appendResultToString(string,1000,2,'GP','Max')
print(string)
string = r" & & 3 "
string = appendResultToString(string,1000,3,'GP','Max')
print(string)
string = r" & & 4 "
string = appendResultToString(string,1000,4,'GP','Max')
string += r"\hline"
print(string)
print(r"\end{tabular}")
print(r"\end{center}")
print(r"\end{table}")
print("\n\n\n")
# NN Results
print(r"\begin{table}[h]")
print(r"\caption{{\color{red}NN results}}")
print(r"\label{tab:NNResults}")
print(r"\begin{center}")
print(r"\begin{tabular}{ |c|c|c|c|c|c|c| }")
print(r"\hline")
print(r" Metric & n & $\varepsilon$ & Algo.~\ref{algo:iterative} (2 iter.) & Random & Stratified & Optimum \\ \hline ")
string = r"\multirow{5}{*}{Mean} & \multirow{5}{*}{$1,000$} & 0 "
string = appendResultToString(string,1000,0,'NN','Mean')
print(string)
string = r" & & 1 "
string = appendResultToString(string,1000,1,'NN','Mean')
print(string)
string = r" & & 2 "
string = appendResultToString(string,1000,2,'NN','Mean')
print(string)
string = r" & & 3 "
string = appendResultToString(string,1000,3,'NN','Mean')
print(string)
string = r" & & 4 "
string = appendResultToString(string,1000,4,'NN','Mean')
string += r"\hline"
print(string)
string = r"\multirow{5}{*}{Max} & \multirow{5}{*}{$1,000$} & 0 "
string = appendResultToString(string,1000,0,'NN','Max')
print(string)
string = r" & & 1 "
string = appendResultToString(string,1000,1,'NN','Max')
print(string)
string = r" & & 2 "
string = appendResultToString(string,1000,2,'NN','Max')
print(string)
string = r" & & 3 "
string = appendResultToString(string,1000,3,'NN','Max')
print(string)
string = r" & & 4 "
string = appendResultToString(string,1000,4,'NN','Max')
string += r"\hline"
print(string)
string = r"\multirow{5}{*}{Mean} & \multirow{5}{*}{$10,000$} & 0 "
string = appendResultToString(string,10000,0,'NN','Mean')
print(string)
string = r" & & 1 "
string = appendResultToString(string,10000,1,'NN','Mean')
print(string)
string = r" & & 2 "
string = appendResultToString(string,10000,2,'NN','Mean')
print(string)
string = r" & & 3 "
string = appendResultToString(string,10000,3,'NN','Mean')
print(string)
string = r" & & 4 "
string = appendResultToString(string,10000,4,'NN','Mean')
string += r"\hline"
print(string)
string = r"\multirow{5}{*}{Max} & \multirow{5}{*}{$10,000$} & 0 "
string = appendResultToString(string,10000,0,'NN','Max')
print(string)
string = r" & & 1 "
string = appendResultToString(string,10000,1,'NN','Max')
print(string)
string = r" & & 2 "
string = appendResultToString(string,10000,2,'NN','Max')
print(string)
string = r" & & 3 "
string = appendResultToString(string,10000,3,'NN','Max')
print(string)
string = r" & & 4 "
string = appendResultToString(string,10000,4,'NN','Max')
string += r"\hline"
print(string)
print(r"\end{tabular}")
print(r"\end{center}")
print(r"\end{table}")
|
python
|
import os
import numpy as np
from OpenGL.GL import *
import lib.basic_shapes as bs
import lib.easy_shaders as es
import lib.transformations as tr
import lib.object_handler as oh
class Charmander():
def __init__(self): self.GPU = es.toGPUShape(oh.readOBJ(os.path.join('mod','tex','charmander.obj'), (241/255, 95/266, 62/255)), GL_REPEAT, GL_NEAREST)
def draw(self, pipeline, projection, view, transform, view_pos):
glUseProgram(pipeline.shaderProgram)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "La"), 241/255, 95/266, 62/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ld"), 241/255, 95/266, 62/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ls"), 241/255, 95/266, 62/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ka"), 0.5, 0.5, 0.5)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Kd"), 0.1, 0.1, 0.1)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ks"), 0.5, 0.5, 0.5)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "lightPosition"), 50, 50, 50)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "viewPosition"), *view_pos)
glUniform1ui(glGetUniformLocation(pipeline.shaderProgram, "shininess"), 10000)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "constantAttenuation"), 0.001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "linearAttenuation"), 0.0001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "quadraticAttenuation"), 0.0001)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "model"), 1, GL_TRUE, transform)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "projection"), 1, GL_TRUE, projection)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "view"), 1, GL_TRUE, view)
pipeline.drawShape(self.GPU)
class Bulbasaur():
def __init__(self): self.GPU = es.toGPUShape(oh.readOBJ(os.path.join('mod','tex','bulbasaur.obj'), (137/255, 200/255, 147/255)), GL_REPEAT, GL_NEAREST)
def draw(self, pipeline, projection, view, transform, view_pos):
glUseProgram(pipeline.shaderProgram)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "La"), 137/255, 200/255, 147/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ld"), 137/255, 200/255, 147/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ls"), 137/255, 200/255, 147/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ka"), 0.5, 0.5, 0.5)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Kd"), 0.1, 0.1, 0.1)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ks"), 0.5, 0.5, 0.5)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "lightPosition"), 50, 50, 50)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "viewPosition"), *view_pos)
glUniform1ui(glGetUniformLocation(pipeline.shaderProgram, "shininess"), 10000)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "constantAttenuation"), 0.001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "linearAttenuation"), 0.0001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "quadraticAttenuation"), 0.0001)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "model"), 1, GL_TRUE, transform)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "projection"), 1, GL_TRUE, projection)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "view"), 1, GL_TRUE, view)
pipeline.drawShape(self.GPU)
class Squirtle():
def __init__(self): self.GPU = es.toGPUShape(oh.readOBJ(os.path.join('mod','tex','squirtle.obj'), (162/255, 215/255, 213/255)), GL_REPEAT, GL_NEAREST)
def draw(self, pipeline, projection, view, transform, view_pos):
glUseProgram(pipeline.shaderProgram)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "La"), 162/255, 215/255, 213/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ld"), 162/255, 215/255, 213/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ls"), 162/255, 215/255, 213/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ka"), 0.5, 0.5, 0.5)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Kd"), 0.1, 0.1, 0.1)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ks"), 0.5, 0.5, 0.5)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "lightPosition"), 50, 50, 50)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "viewPosition"), *view_pos)
glUniform1ui(glGetUniformLocation(pipeline.shaderProgram, "shininess"), 10000)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "constantAttenuation"), 0.001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "linearAttenuation"), 0.0001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "quadraticAttenuation"), 0.0001)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "model"), 1, GL_TRUE, transform)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "projection"), 1, GL_TRUE, projection)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "view"), 1, GL_TRUE, view)
pipeline.drawShape(self.GPU)
class Pikachu():
def __init__(self): self.GPU = es.toGPUShape(oh.readOBJ(os.path.join('mod','tex','pikachu.obj'), (250/255, 214/255, 29/255)), GL_REPEAT, GL_NEAREST)
def draw(self, pipeline, projection, view, transform, view_pos):
glUseProgram(pipeline.shaderProgram)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "La"), 250/255, 214/255, 29/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ld"), 250/255, 214/255, 29/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ls"), 250/255, 214/255, 29/255)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ka"), 0.5, 0.5, 0.5)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Kd"), 0.1, 0.1, 0.1)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "Ks"), 1.0, 1.0, 1.0)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "lightPosition"), *view_pos)
glUniform3f(glGetUniformLocation(pipeline.shaderProgram, "viewPosition"), *view_pos)
glUniform1ui(glGetUniformLocation(pipeline.shaderProgram, "shininess"), 10000000)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "constantAttenuation"), 0.001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "linearAttenuation"), 0.0001)
glUniform1f(glGetUniformLocation(pipeline.shaderProgram, "quadraticAttenuation"), 0.0001)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "model"), 1, GL_TRUE, transform)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "projection"), 1, GL_TRUE, projection)
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, "view"), 1, GL_TRUE, view)
pipeline.drawShape(self.GPU)
class Food():
def __init__(self):
self.x, self.y = 0, 0
self.z = 1
self.time = 0
self.tick = np.pi/180*2
self.s = 3
self.models = {
'charmander' : Charmander(),
'bulbasaur' : Bulbasaur(),
'squirtle' : Squirtle(),
'pikachu' : Pikachu()
}
self.list = ['charmander', 'bulbasaur', 'squirtle', 'pikachu']
self.prob = [0.3, 0.3, 0.3, 0.1]
self.choice_model()
self.transform = np.matmul(tr.translate(self.x,self.y,self.z), np.matmul(tr.rotationZ(self.time*4), np.matmul(tr.rotationX(np.pi/2), tr.uniformScale(self.s))))
def choice_model(self):
self.status = np.random.choice(self.list, p=self.prob)
self.model = self.models[self.status]
def draw(self, pipeline, projection, view):
self.model.draw(pipeline, projection, view, self.transform, (self.x, self.y, self.z))
def update(self):
self.time += self.tick
self.z = np.exp(2*np.sin(2*self.time))/4
self.transform = np.matmul(tr.translate(self.x,self.y,self.z), np.matmul(tr.rotationZ(self.time*4), np.matmul(tr.rotationX(np.pi/2), tr.uniformScale(self.s))))
def respawn(self, snake, obstacle):
self.choice_model()
x, y = self.x, self.y; self.x, self.y = np.random.uniform(-97.9, 97.9, 2)
if (self.x - x)**2 + (self.y - y)**2 < self.s**2: self.respawn(snake, obstacle); return
if (self.x - obstacle.x)**2 + (self.y - obstacle.x)**2 < self.s**2: self.respawn(snake, obstacle); return
parts = iter(snake.body); _=next(parts,None)
for part in parts:
if (self.x - part.x)**2 + (self.y - part.y)**2 < self.s**2: self.respawn(snake, obstacle); return
self.transform = np.matmul(tr.translate(self.x,self.y,self.z), np.matmul(tr.rotationZ(self.time*4), np.matmul(tr.rotationX(np.pi/2), tr.uniformScale(self.s))))
|
python
|
from setuptools import setup
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setup(name='elektrum',
version='0.1',
url='https://github.com/zxpower/elektrum',
author='Reinholds Zviedris (zxpower)',
author_email='[email protected]',
description="Utility to authorize and scrape your smart meter consumption data from Elektrum website",
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache-2.0 License",
"Operating System :: OS Independent",
],
packages=['elektrum'],
python_requires=">=3.6",
)
|
python
|
import json
try:
import simplejson as json
except ImportError:
import json
import requests
import os.path
def autodetect_proxy():
proxies = {}
proxy_https = os.getenv('HTTPS_PROXY', os.getenv('https_proxy', None))
proxy_http = os.getenv('HTTP_PROXY', os.getenv('http_proxy', None))
if proxy_https:
proxies['https'] = proxy_https
if proxy_http:
proxies['http'] = proxy_http
return proxies
def fetch_dict(endpoint):
data = None
if endpoint.startswith(('http:', 'https:', 'ftp:')):
proxies = autodetect_proxy()
if proxies:
response = requests.get(url=endpoint, proxies=proxies)
else:
response = requests.get(url=endpoint)
try:
data = response.json()
except json.decoder.JSONDecodeError:
data = {}
else:
data = {}
if os.path.exists(endpoint):
with open(endpoint) as fd:
try:
data = json.load(fd)
except json.JSONDecodeError:
data = {}
return data
def create_default_json_msg():
msg = {
"sensors": [],
"values": [],
"labels": [],
"entry_status": "",
"eligible": "",
"_checkpoint": {
"progress": [],
"records": [],
"global_record": {}
},
}
return msg
def save_dict(endpoint, data):
if endpoint.startswith(('http:', 'https:', 'ftp:')):
proxies = autodetect_proxy()
if proxies:
_ = requests.patch(url=endpoint, headers={'content-type':'application/json'}, data=json.dumps(data, indent=4, sort_keys=True), proxies=proxies)
else:
_ = requests.patch(url=endpoint, headers={'content-type':'application/json'}, data=json.dumps(data, indent=4, sort_keys=True))
else:
with open(endpoint, 'w') as fd:
json.dump(data, fd, indent=4, sort_keys=True)
|
python
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.keras.callbacks import Callback
from scipy.optimize import linear_sum_assignment
def unsupervised_labels(y, yp, n_classes, n_clusters):
"""Linear assignment algorithm
Arguments:
y (tensor): Ground truth labels
yp (tensor): Predicted clusters
n_classes (int): Number of classes
n_clusters (int): Number of clusters
"""
assert n_classes == n_clusters
# initialize count matrix
C = np.zeros([n_clusters, n_classes])
# populate count matrix
for i in range(len(y)):
C[int(yp[i]), int(y[i])] += 1
# optimal permutation using Hungarian Algo
# the higher the count, the lower the cost
# so we use -C for linear assignment
row, col = linear_sum_assignment(-C)
# compute accuracy
accuracy = C[row, col].sum() / C.sum()
return accuracy * 100
def center_crop(image, crop_size=4):
"""Crop the image from the center
Argument:
crop_size (int): Number of pixels to crop
from each side
"""
height, width = image.shape[0], image.shape[1]
x = height - crop_size
y = width - crop_size
dx = dy = crop_size // 2
image = image[dy:(y + dy), dx:(x + dx), :]
return image
def lr_schedule(epoch):
"""Simple learning rate scheduler
Argument:
epoch (int): Which epoch
"""
lr = 1e-3
power = epoch // 400
lr *= 0.8**power
return lr
class AccuracyCallback(Callback):
"""Callback to compute the accuracy every epoch by
calling the eval() method.
Argument:
net (Model): Object with a network model to evaluate.
Must support the eval() method.
"""
def __init__(self, net):
super(AccuracyCallback, self).__init__()
self.net = net
def on_epoch_end(self, epoch, logs=None):
self.net.eval()
|
python
|
from fuzzer import Ascii, Utf8, Num
from grammar import Cfg, Syms
IdChar = Utf8.exclude("?", ":", "}").with_sym_name("IdChar")
IdStartChar = IdChar.exclude("=").with_sym_name("IdStartChar")
RawChar = Utf8.exclude("{", "\\").with_sym_name("RawChar")
ExprLitChar = RawChar.exclude(":", "}").with_sym_name("ExprLitChar")
AsciiChar = Ascii
s = Syms()
cfg = (
Cfg(s.sym("S"))
.a(s.sym("S"))
.a(s.sym("S"), [s.sym("Elem"), s.sym("S")], [s.sym("Elem")])
.a(s.sym("Elem"), [s.sym("Raw")], ["{", s.sym("Expr"), "}"])
.a(s.sym("Expr"), [s.sym("BasicExpr")], [s.sym("CondExpr")])
.a(
s.sym("BasicExpr"),
[s.sym("IdOrLit"), ":", s.sym("Arg")],
[s.sym("IdOrLit"), s.sym("OptionalColon")],
[":", s.sym("Arg")],
)
.a(
s.sym("CondExpr"),
[s.sym("Id"), "?", s.sym("ExprLiteral"), s.sym("OptionalColon")],
[s.sym("Id"), "?", s.sym("ExprLiteral"), ":", s.sym("ExprLiteral")],
)
.a(
s.sym("Raw"),
[RawChar, s.sym("Raw")],
[s.sym("EscapeSequence"), s.sym("Raw")],
[s.sym("Nil")],
)
.a(s.sym("OptionalColon"), [":"], [s.sym("Nil")])
.a(s.sym("IdOrLit"), [IdStartChar, s.sym("Id")], ["=", s.sym("ExprLiteral")])
.a(
s.sym("Id"),
[IdStartChar, s.sym("RestOfId")],
[s.sym("Nil")],
)
.a(s.sym("RestOfId"), [IdChar, s.sym("RestOfId")], [s.sym("Nil")])
.a(
s.sym("ExprLiteral"),
[s.sym("EscapeSequence"), s.sym("ExprLiteral")],
[ExprLitChar, s.sym("ExprLiteral")],
["{", s.sym("Expr"), "}", s.sym("ExprLiteral")],
[s.sym("Nil")],
)
.a(
s.sym("Arg"),
[s.sym("Align"), s.sym("OptionalU8"), s.sym("Prec"), s.sym("Transform")],
)
.a(
s.sym("Align"),
[s.sym("AlignChar"), AsciiChar],
[s.sym("Nil")],
)
.a(s.sym("AlignChar"), ["<"], [">"])
.a(s.sym("Prec"), [".", s.sym("OptionalU8")], [s.sym("Nil")])
.a(s.sym("OptionalU8"), [Num(255)], [s.sym("Nil")])
.a(s.sym("Transform"), ["b"], ["x"], [s.sym("Nil")])
.a(s.sym("EscapeSequence"), ["\\", AsciiChar])
.a(s.sym("Nil"), [""])
.validate()
)
|
python
|
class Fila:
def __init__(self):
self.data = []
def is_empty(self):
return self.data == []
def get_size(self):
return len(self.data)
def peek(self):
if self.is_empty():
raise IndexError
else:
return self.data[0]
def enqueue(self, item):
self.data.append(item)
def dequeue(self):
return self.data.pop(0)
|
python
|
"""
Test cases to validate centos7 base image configurations
"""
import subprocess
import pytest
import testinfra
DOCKER_IMAGE_NAME = 'python:latest'
# scope='session' uses the same container for all the tests;
# scope='function' uses a new container per test function.
@pytest.fixture(scope='session')
def host():
"""
Pytest fixture to manage the lifecycle of a container of interest using the specified DOCKER_IMAGE_NAME
:return: testinfra connection to the container
"""
docker_id = subprocess.check_output(
[
'docker',
'run',
'-d',
'-t',
'-i',
DOCKER_IMAGE_NAME,
'/bin/bash'
]
).decode().strip()
# return a testinfra connection to the container
yield testinfra.get_host("docker://" + docker_id)
# at the end of the test suite, destroy the container
subprocess.check_call(['docker', 'rm', '-f', docker_id])
# Scenario: pip should be installed
def test_pip(host):
"""
Test case to check if pip is installed
:param host: reference to pytest.fixture - 'host'
:return: None
"""
pip = host.file('/usr/lib/python2.7/site-packages/pip')
assert pip.exists
# Scenario: Check Timezone
def test_tz(host):
"""
Test case to check if the time zone is AEST
:param host: reference to pytest.fixture - 'host'
:return: None
"""
actual_output = host.run('date +"%Z %z"').stdout
assert 'AEST' in actual_output
# Scenario: Check if all the yum packages in Dockerfile are installed
def test_yum_packages(host):
"""
Test case to check if all the required yum packages are installed
:param host: reference to pytest.fixture - 'host'
:return:
"""
# yum install python-pip installs pip based on the python version, since python2 is default in centos, checking
# for python2-pip instead of python-pip
for pkg in ['python-devel', 'python2-pip', 'epel-release']:
assert host.package(pkg).is_installed
|
python
|
import numpy as np
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from PyKEP import epoch, DAY2SEC, planet_ss, AU, MU_SUN, lambert_problem
from PyKEP.orbit_plots import plot_planet, plot_lambert
mpl.rcParams['legend.fontsize'] = 10
fig = plt.figure()
ax = fig.gca(projection='3d')
t1 = epoch(0)
t2 = epoch(740)
dt = (t2.mjd2000 - t1.mjd2000) * DAY2SEC
ax.scatter(0,0,0, color='y')
pl = planet_ss('earth')
plot_planet(ax,pl, t0=t1, color=(0.8,0.8,1), legend=True, units = AU)
rE,vE = pl.eph(t1)
pl = planet_ss('mars')
plot_planet(ax,pl, t0=t2, color=(0.8,0.8,1), legend=True, units = AU)
rM, vM = pl.eph(t2)
l = lambert_problem(rE,rM,dt,MU_SUN)
nmax = l.get_Nmax()
print "max number of revolutions",nmax
plot_lambert(ax,l , color=(1,0,0), legend=True, units = AU)
for i in range(1,nmax*2+1):
print i
plot_lambert(ax,l,sol=i, color=(1,0,i/float(nmax*2)), legend=True, units = AU)
def axisEqual3D(ax):
extents = np.array([getattr(ax, 'get_{}lim'.format(dim))() for dim in 'xyz'])
sz = extents[:,1] - extents[:,0]
centers = np.mean(extents, axis=1)
maxsize = max(abs(sz))
r = maxsize/2
for ctr, dim in zip(centers, 'xyz'):
getattr(ax, 'set_{}lim'.format(dim))(ctr - r, ctr + r)
axisEqual3D(ax)
plt.show()
|
python
|
import os, sys
from threading import Thread, active_count
from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog
from PyQt5.QtGui import QIcon
from logic import get_cheaters
from layout import Ui_CheatChecker
# If layout shows an import error, generate it using:
# pyuic5 checker.ui -o layout.py
class CheatChecker(QMainWindow, Ui_CheatChecker):
def __init__(self):
super().__init__()
self.setupUi(self)
self.cheaters = self.folder = None
self.setWindowIcon(QIcon(os.path.join(getattr(sys, "_MEIPASS", "."), "checker.ico")))
self.folderEdit.textChanged.connect(self.setFolder)
self.setFolderButton.clicked.connect(self.setFolder)
self.getCheatersButton.clicked.connect(self.getCheaters)
self.cheatersList.currentTextChanged.connect(self.openCodes)
self.cheatersSearchEdit.textChanged.connect(self.searchCheaters)
self.getCheatersButton.setFocus()
def setFolder(self, folder=None):
self.folder = folder or str(QFileDialog.getExistingDirectory(self, "Select Codes Directory"))
if not folder: self.folderEdit.setText(self.folder)
def getCheaters(self):
if not self.folder:
self.setFolder()
if not self.folder: return
if active_count() == 1:
Thread(target=self.processCheaters).start()
def searchCheaters(self, keyword):
if self.cheaters:
keyword = keyword.lower()
self.cheatersList.clear()
self.cheatersList.addItems(key for key in self.cheaters.keys() if not keyword or keyword in key)
def processCheaters(self):
self.cheatersList.clear()
self.cheatersSearchEdit.clear()
self.cheaters = get_cheaters(self.folder, self.mainCheckBox.isChecked())
self.cheatersList.addItems(self.cheaters.keys())
self.cheatersList.setMinimumWidth(self.cheatersList.sizeHintForColumn(0) + 36)
self.cheatersLabel.setText("Cheaters in " + self.folder.rsplit("/", 1)[1] + ":")
def openCodes(self, index):
if not index: return
file1, file2 = self.cheaters[index]
self.code1Label.setText("Code 1: " + file1)
self.code1TextArea.setText(open(os.path.join(self.folder, file1)).read())
self.code2Label.setText("Code 2: " + file2)
self.code2TextArea.setText(open(os.path.join(self.folder, file2)).read())
if __name__ == "__main__":
app = QApplication(sys.argv)
main = CheatChecker()
main.show()
sys.exit(app.exec_())
|
python
|
# -*- coding: utf-8 -*-
import pandas as pd
import os
def read_csv_data_in_directory(directory_path, variables_to_keep = []):
"""
Read a directory of .csv files and merges them into a single data frame
Parameters
----------
directory_path : str
absolute path to directory containing csv files
variables_to_keep : list (optional)
variables to keep in `cumulative_df`
Raises
------
ValueError
when directory contains a file that is not a .csv
Returns
-------
cumulative_df : pandas.DataFrame
data from all csvs in the directory
"""
files_in_folder = os.listdir(directory_path)
cumulative_df = None
for file in files_in_folder:
if not file.endswith(".csv"):
raise ValueError(f"{file} is not a .csv file")
absolute_path = os.path.join(directory_path, file)
data_from_file = pd.read_csv(absolute_path)
if cumulative_df is None:
cumulative_df = data_from_file
else:
cumulative_df = cumulative_df.append(data_from_file)
filtered_df = cumulative_df[variables_to_keep]
return filtered_df
|
python
|
import tensorflow as tf
import numpy as np
import time
with open('letters_source.txt', 'r', encoding = 'utf-8') as f:
source_data = f.read()
with open('letters_target.txt', 'r', encoding = 'utf-8') as f:
target_data = f.read()
def extrtact_character_vocab(data):
#construct mapping table
special_words = ['<PAD>','<UNK>','<GO>','<EOS>']
set_words = list(set([char for line in data.split('\n') for char in line]))
#add the special words in vocab
int_to_vocab = {idx : word for idx, word in enumerate(special_words + set_words)}
vocab_to_int = {word : idx for idx, word in int_to_vocab.items()}
return int_to_vocab, vocab_to_int
#mapping table
source_int_to_letter, source_letter_to_int = extrtact_character_vocab(source_data)
target_int_to_letter, target_letter_to_int = extrtact_character_vocab(target_data)
#convert character
source_int = [[source_letter_to_int.get(letter, source_letter_to_int['<UNK>'])
for letter in line] for line in source_data.split('\n')]#DISPLAY EACH WORD IN VOCAB IN FORM OF CHAR,
#THEN TRANSLATE TO INT, UNK is default return value for get in dict
target_int = [[target_letter_to_int.get(letter, target_letter_to_int['<UNK>'])
for letter in line] + [target_letter_to_int['<EOS>']] for line in target_data.split('\n')]
#<EOS> at the end of every word
def get_inputs():
'''
input tensor of model
'''
inputs = tf.placeholder(tf.int32, [None, None], name='inputs')
targets = tf.placeholder(tf.int32, [None, None], name='targets')
learning_rate = tf.placeholder(tf.float32, name='learning_rate')
target_sequence_length = tf.placeholder(tf.int32, (None,), name='target_sequence_length')
max_target_sequence_length = tf.reduce_max(target_sequence_length, name='max_target_len')
source_sequence_length = tf.placeholder(tf.int32, (None,), name='source_sequence_length')
return inputs, targets, learning_rate, target_sequence_length, max_target_sequence_length, source_sequence_length
def get_encoder_layer(input_data, rnn_size, num_layers, source_sequence_length,
source_vocab_size, encoding_embedding_size):
#Encoder embedding
encoder_embed_input = tf.contrib.layers.embed_sequence(input_data, source_vocab_size, encoding_embedding_size)
#RNN Cell
def get_lstm_cell(rnn_size):
lstm_cell = tf.contrib.rnn.LSTMCell(rnn_size, initializer = tf.random_uniform_initializer(-0.1, 0.1, seed = 2))
return lstm_cell
cell = tf.contrib.rnn.MultiRNNCell([get_lstm_cell(rnn_size) for _ in range(num_layers)])
encoder_output, encoder_state = tf.nn.dynamic_rnn(cell, encoder_embed_input,
sequence_length = source_sequence_length, dtype = tf.float32)
return encoder_output, encoder_state
def process_decoder_input(data, vocab_to_int, batch_size):
ending = tf.strided_slice(data, [0, 0], [batch_size, -1], [1, 1])
decoder_input = tf.concat([tf.fill([batch_size, 1], vocab_to_int['<GO>']), ending], 1)
return decoder_input
def decoding_layer(target_letter_to_int, decoding_embedding_size, num_layers, rnn_size,
target_sequence_length, max_target_sequence_length, encoder_state, decoder_input):
# 1. Embedding
target_vocab_size = len(target_letter_to_int)
decoder_embeddings = tf.Variable(tf.random_uniform([target_vocab_size, decoding_embedding_size]))
decoder_embed_input = tf.nn.embedding_lookup(decoder_embeddings, decoder_input)
# 2. Construct RNN Cells in Decoder
def get_decoder_cell(rnn_size):
decoder_cell = tf.contrib.rnn.LSTMCell(rnn_size,
initializer=tf.random_uniform_initializer(-0.1, 0.1, seed=2))
return decoder_cell
cell = tf.contrib.rnn.MultiRNNCell([get_decoder_cell(rnn_size) for _ in range(num_layers)])
# 3. Output FC layer
output_layer = tf.layers.Dense(target_vocab_size,
kernel_initializer = tf.truncated_normal_initializer(mean = 0.0, stddev=0.1))
# 4. Training decoder
with tf.variable_scope("decode"):
# get object of help
training_helper = tf.contrib.seq2seq.TrainingHelper(inputs=decoder_embed_input,
sequence_length=target_sequence_length,
time_major=False)
# construct decoder
training_decoder = tf.contrib.seq2seq.BasicDecoder(cell,
training_helper,
encoder_state,
output_layer)
training_decoder_output, _, _ = tf.contrib.seq2seq.dynamic_decode(training_decoder,
impute_finished=True,
maximum_iterations=max_target_sequence_length)
# 5. Predicting decoder
# share param with training
with tf.variable_scope("decode", reuse=True):
start_tokens = tf.tile(tf.constant([target_letter_to_int['<GO>']], dtype=tf.int32), [batch_size],
name='start_tokens')
predicting_helper = tf.contrib.seq2seq.GreedyEmbeddingHelper(decoder_embeddings,
start_tokens,
target_letter_to_int['<EOS>'])
predicting_decoder = tf.contrib.seq2seq.BasicDecoder(cell,
predicting_helper,
encoder_state,
output_layer)
predicting_decoder_output, _, _ = tf.contrib.seq2seq.dynamic_decode(predicting_decoder, impute_finished=True,
maximum_iterations=max_target_sequence_length)
return training_decoder_output, predicting_decoder_output
def seq2seq_model(input_data, targets, lr, target_sequence_length,
max_target_sequence_length, source_sequence_length,
source_vocab_size, target_vocab_size,
encoder_embedding_size, decoder_embedding_size,
rnn_size, num_layers):
# aquire encoder state output
_, encoder_state = get_encoder_layer(input_data,
rnn_size,
num_layers,
source_sequence_length,
source_vocab_size,
encoding_embedding_size)
# decoder's input after preprocessing
decoder_input = process_decoder_input(targets, target_letter_to_int, batch_size)
# state and input to decoder
training_decoder_output, predicting_decoder_output = decoding_layer(target_letter_to_int,
decoding_embedding_size,
num_layers,
rnn_size,
target_sequence_length,
max_target_sequence_length,
encoder_state,
decoder_input)
return training_decoder_output, predicting_decoder_output
#hyper
# Number of Epochs
epochs = 60
# Batch Size
batch_size = 128
# RNN Size
rnn_size = 50
# Number of Layers
num_layers = 2
# Embedding Size
encoding_embedding_size = 15
decoding_embedding_size = 15
# Learning Rate
learning_rate = 0.001
# construct graph
train_graph = tf.Graph()
with train_graph.as_default():
# get model's input
input_data, targets, lr, target_sequence_length, max_target_sequence_length, source_sequence_length = get_inputs()
training_decoder_output, predicting_decoder_output = seq2seq_model(input_data,
targets,
lr,
target_sequence_length,
max_target_sequence_length,
source_sequence_length,
len(source_letter_to_int),
len(target_letter_to_int),
encoding_embedding_size,
decoding_embedding_size,
rnn_size,
num_layers)
training_logits = tf.identity(training_decoder_output.rnn_output, 'logits')
predicting_logits = tf.identity(predicting_decoder_output.sample_id, name='predictions')
masks = tf.sequence_mask(target_sequence_length, max_target_sequence_length, dtype=tf.float32, name='masks')
with tf.name_scope("optimization"):
# Loss function
cost = tf.contrib.seq2seq.sequence_loss(
training_logits,
targets,
masks)
# Optimizer
optimizer = tf.train.AdamOptimizer(lr)
# Gradient Clipping
gradients = optimizer.compute_gradients(cost)
capped_gradients = [(tf.clip_by_value(grad, -5., 5.), var) for grad, var in gradients if grad is not None]
train_op = optimizer.apply_gradients(capped_gradients)
def pad_sentence_batch(sentence_batch, pad_int):
#padding
max_sentence = max([len(sentence) for sentence in sentence_batch])
return [sentence + [pad_int] * (max_sentence - len(sentence)) for sentence in sentence_batch]
def get_batches(targets, sources, batch_size, source_pad_int, target_pad_int):
#in order to get batch
for batch_i in range(0, len(sources)//batch_size):
start_i = batch_i * batch_size
sources_batch = sources[start_i:start_i + batch_size]
targets_batch = targets[start_i:start_i + batch_size]
#padding
pad_sources_batch = np.array(pad_sentence_batch(sources_batch, source_pad_int))
pad_targets_batch = np.array(pad_sentence_batch(targets_batch, target_pad_int))
# record each len
targets_lengths = []
for target in targets_batch:
targets_lengths.append(len(target))
source_lengths = []
for source in sources_batch:
source_lengths.append(len(source))
yield pad_targets_batch, pad_sources_batch, targets_lengths, source_lengths
#train and vali
train_source = source_int[batch_size:]
train_target = target_int[batch_size:]
# one batch to validate
valid_source = source_int[:batch_size]
valid_target = target_int[:batch_size]
(valid_targets_batch, valid_sources_batch, valid_targets_lengths, valid_sources_lengths) = next(get_batches(valid_target, valid_source, batch_size,
source_letter_to_int['<PAD>'],
target_letter_to_int['<PAD>']))
display_step = 50 # each 50 times print loss
checkpoint = "trained_model.ckpt"
with tf.Session(graph=train_graph) as sess:
sess.run(tf.global_variables_initializer())
for epoch_i in range(1, epochs+1):
for batch_i, (targets_batch, sources_batch, targets_lengths, sources_lengths) in enumerate(
get_batches(train_target, train_source, batch_size,
source_letter_to_int['<PAD>'],
target_letter_to_int['<PAD>'])):
_, loss = sess.run(
[train_op, cost],
{input_data: sources_batch,
targets: targets_batch,
lr: learning_rate,
target_sequence_length: targets_lengths,
source_sequence_length: sources_lengths})
if batch_i % display_step == 0:
# calculate validation loss
validation_loss = sess.run(
[cost],
{input_data: valid_sources_batch,
targets: valid_targets_batch,
lr: learning_rate,
target_sequence_length: valid_targets_lengths,
source_sequence_length: valid_sources_lengths})
print('Epoch {:>3}/{} Batch {:>4}/{} - Training Loss: {:>6.3f} - Validation loss: {:>6.3f}'
.format(epoch_i,
epochs,
batch_i,
len(train_source) // batch_size,
loss,
validation_loss[0]))
# save model
saver = tf.train.Saver()
saver.save(sess, checkpoint)
print('Model Trained and Saved')
def source_to_seq(text):
'''
convert source data
'''
sequence_length = 7
return [source_letter_to_int.get(word,
source_letter_to_int['<UNK>']) for word in text] + [source_letter_to_int['<PAD>']]*(sequence_length-len(text))
input_word = 'zhengjiapengniubi'
text = source_to_seq(input_word)
checkpoint = "./trained_model.ckpt"
loaded_graph = tf.Graph()
with tf.Session(graph=loaded_graph) as sess:
# load model
loader = tf.train.import_meta_graph(checkpoint + '.meta')
loader.restore(sess, checkpoint)
input_data = loaded_graph.get_tensor_by_name('inputs:0')
logits = loaded_graph.get_tensor_by_name('predictions:0')
source_sequence_length = loaded_graph.get_tensor_by_name('source_sequence_length:0')
target_sequence_length = loaded_graph.get_tensor_by_name('target_sequence_length:0')
answer_logits = sess.run(logits, {input_data: [text]*batch_size,
target_sequence_length: [len(input_word)]*batch_size,
source_sequence_length: [len(input_word)]*batch_size})[0]
pad = source_letter_to_int["<PAD>"]
print('Original INPUT:', input_word)
print('\nSource')
print(' Word Num.: {}'.format([i for i in text]))
print(' Input Words: {}'.format(" ".join([source_int_to_letter[i] for i in text])))
print('\nTarget')
print(' Word Num.: {}'.format([i for i in answer_logits if i != pad]))
print(' Response Words: {}'.format(" ".join([target_int_to_letter[i] for i in answer_logits if i != pad])))
|
python
|
from functools import partial
import numpy as np
import gym
import gym_rock_paper_scissors
import gym_connect4
from regym.environments import generate_task, EnvType
from regym.environments.wrappers import FrameStack
from regym.environments.tasks import RegymAsyncVectorEnv
def test_can_stack_frames_singleagent_env():
num_stack = 3
frame_stack = partial(FrameStack, num_stack=num_stack)
pendulum_task = generate_task('Pendulum-v0')
stack_pendulum_task = generate_task('Pendulum-v0',
wrappers=[frame_stack])
assert stack_pendulum_task.observation_dim == (num_stack, *pendulum_task.observation_dim)
def test_can_stack_frames_sequential_multiagent_env():
num_stack = 4
frame_stack = partial(FrameStack, num_stack=num_stack)
connect_4_task = generate_task('Connect4-v0', EnvType.MULTIAGENT_SEQUENTIAL_ACTION)
stack_connect_4_task = generate_task('Connect4-v0', EnvType.MULTIAGENT_SEQUENTIAL_ACTION,
wrappers=[frame_stack])
assert stack_connect_4_task.observation_dim == (num_stack, *connect_4_task.observation_dim)
num_envs = 3
vector_env = RegymAsyncVectorEnv(
stack_connect_4_task.name,
num_envs=num_envs,
wrappers=[frame_stack]
)
actual_obs = vector_env.reset()
# Standard Connect4 dimensions is (3, 7, 6)
# NOTE: Think of board as being sideways (chips fall right-to-left)
single_env_initial_observation = np.array(
[[[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.]],
[[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.]],
[[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 0.]]]
)
# We extend by number of stacked frames
# So that per environment observation shape is (num_stacks, 3, 7, 6)
stacked_single_env_initial_observation = np.array(
[single_env_initial_observation for _ in range(num_stack)]
)
# We extend by number of environments
# So that each agent receives observation of shape (num_envs, num_stack, 3, 7, 6)
expected_player_obs = np.array(
[stacked_single_env_initial_observation for _ in range(num_envs)]
)
num_agents = 2
for i in range(num_agents):
np.testing.assert_array_equal(expected_player_obs, actual_obs[i])
|
python
|
import numpy as np
import sympy
import itertools
import math
import mpmath
import warnings
from qubricks.operator import Operator
DEBUG = False
def debug(*messages):
if DEBUG:
for message in messages:
print messages,
print
class Perturb(object):
'''
`Perturb` is a class that allows one to perform degenerate perturbation theory.
The perturbation theory logic is intentionally separated into a different class for clarity.
Currently it only supports using `RSPT` for perturbation theory, though in the future
this may be extended to `Kato` perturbation theory. The advantage of using this class
as compared to directly using the `RSPT` class is that the energies and eigenstates
can be computed cumulatively, as well as gaining access to shorthand constructions
of effective Hamiltonians.
:param H_0: The unperturbed Hamiltonian to consider.
:type H_0: Operator, sympy matrix or numpy array
:param V: The Hamiltonian perturbation to consider.
:type V: Operator, sympy matrix or numpy array
:param subspace: The state indices to which attention should be restricted.
:type subspace: list of int
'''
def __init__(self, H_0=None, V=None, subspace=None):
self.H_0 = H_0
self.V = V
self.__subspace_default = list(subspace) if subspace is not None else None
self.__rspt = RSPT(self.H_0, self.V, self.__subspace())
@property
def dim(self):
'''
The dimension of :math:`H_0`.
'''
return self.H_0.shape[0]
@property
def pt(self):
'''
A reference to the perturbation calculating object (e.g. RSPT).
'''
return self.__rspt
def __subspace(self, subspace=None):
if subspace is not None:
return subspace
if self.__subspace_default is not None:
return self.__subspace_default
return range(self.dim)
def E(self, index, order=0, cumulative=True):
'''
This method returns the `index` th eigenvalue correct to order `order` if
`cumulative` is `True`; or the the `order` th correction otherwise.
:param index: The index of the state to be considered.
:type index: int
:param order: The order of perturbation theory to apply.
:type order: int
:param cumulative: `True` if all order corrections up to `order` should be summed
(including the initial unperturbed energy).
:type cumulative: bool
'''
if cumulative:
return sum([self.pt.E(index,ord) for ord in range(order + 1)])
else:
return self.pt.E(index,order)
def Psi(self, index, order=0, cumulative=True):
'''
This method returns the `index` th eigenstate correct to order `order` if
`cumulative` is `True`; or the the `order` th correction otherwise.
:param index: The index of the state to be considered.
:type index: int
:param order: The order of perturbation theory to apply.
:type order: int
:param cumulative: `True` if all order corrections up to `order` should be summed
(including the initial unperturbed state).
:type cumulative: bool
'''
if cumulative:
return sum([self.pt.Psi(index,ord) for ord in range(order + 1)])
else:
return self.pt.Psi(index,order)
def Es(self, order=0, cumulative=True, subspace=None):
'''
This method returns a the energies associated with the indices
in `subspaces`. Internally this uses `Perturb.E`, passing through
the keyword arguments `order` and `cumulative` for each index in
subspace.
:param order: The order of perturbation theory to apply.
:type order: int
:param cumulative: `True` if all order corrections up to `order` should be summed
(including the initial unperturbed energy).
:type cumulative: bool
:param subspace: The set of indices for which to return the associated energies.
:type subspace: list of int
'''
Es = []
for i in self.__subspace(subspace):
if cumulative:
Es.append(sum([self.pt.E(i,ord) for ord in range(order + 1)]))
else:
Es.append(self.pt.E(i,order))
return np.array(Es, dtype=object)
def Psis(self, order=0, cumulative=True, subspace=None):
'''
This method returns a the eigenstates associated with the indices
in `subspaces`. Internally this uses `Perturb.Psi`, passing through
the keyword arguments `order` and `cumulative` for each index in
subspace.
:param order: The order of perturbation theory to apply.
:type order: int
:param cumulative: `True` if all order corrections up to `order` should be summed
(including the initial unperturbed state).
:type cumulative: bool
:param subspace: The set of indices for which to return the associated energies.
:type subspace: list of int
'''
psis = []
for i in self.__subspace(subspace):
if cumulative:
psis.append(sum([self.pt.Psi(i,ord) for ord in range(order + 1)]))
else:
psis.append(self.pt.Psi(i,order))
return np.array(psis, dtype=object)
def H_eff(self, order=0, cumulative=True, subspace=None, adiabatic=False):
'''
This method returns the effective Hamiltonian on the subspace indicated,
using energies and eigenstates computed using `Perturb.E` and `Perturb.Psi`.
If `adiabatic` is `True`, the effective Hamiltonian describing the energies of
the instantaneous eigenstates is returned in the basis of the instantaneous
eigenstates (i.e. the Hamiltonian is diagonal with energies corresponding to
the instantaneous energies). Otherwise, the Hamiltonian returned is the sum over
the indices of the subspace of the perturbed energies multiplied by the outer
product of the corresponding perturbed eigenstates.
:param order: The order of perturbation theory to apply.
:type order: int
:param cumulative: `True` if all order corrections up to `order` should be summed
(including the initial unperturbed energies and states).
:type cumulative: bool
:param subspace: The set of indices for which to return the associated energies.
:type subspace: list of int
:param adiabatic: `True` if the adiabatic effective Hamiltonian (as described above)
should be returned. `False` otherwise.
:type adiabatic: bool
'''
subspace = self.__subspace(subspace)
H_eff = np.zeros( (len(subspace),len(subspace)) , dtype=object)
for index in subspace:
E = self.E(index, order, cumulative)
if adiabatic:
H_eff[index,index] = E
else:
psi = self.Psi(index, order, cumulative)
H_eff += E*np.outer(psi,psi)
return H_eff
class RSPT(object):
'''
This class implements (degenerate) Rayleigh-Schroedinger Perturbation Theory.
It is geared toward generating symbolic solutions, in the hope that the perturbation
theory might provide insight into the quantum system at hand. For numerical solutions,
you are better off simply diagonalising the evaluated Hamiltonian.
.. warning:: This method currently only supports diagonal :math:`H_0`.
:param H_0: The unperturbed Hamiltonian to consider.
:type H_0: Operator, sympy matrix or numpy array
:param V: The Hamiltonian perturbation to consider.
:type V: Operator, sympy matrix or numpy array
:param subspace: The state indices to which attention should be restricted.
:type subspace: list of int
'''
def __init__(self, H_0=None, V=None, subspace=None):
self.__cache = {
'Es': {},
'Psis': {},
'inv': {}
}
self.H_0 = H_0
self.V = V
self.subspace = subspace
self.E0s, self.Psi0s = self.get_unperturbed_states()
@property
def H_0(self):
return self.__H_0
@H_0.setter
def H_0(self, H_0):
if isinstance(H_0, Operator):
self.__H_0 = np.array(H_0.symbolic())
else:
self.__H_0 = np.array(H_0)
@property
def V(self):
return self.__V
@V.setter
def V(self, V):
if isinstance(V, Operator):
self.__V = np.array(V.symbolic())
else:
self.__V = np.array(V)
def __store(self, store, index, order, value=None):
storage = self.__cache[store]
if value is None:
if index in storage:
return storage[index].get(order,None)
return None
if index not in storage:
storage[index] = {}
storage[index][order] = value
def __Es(self, index, order, value=None):
return self.__store('Es', index, order, value)
def __Psis(self, index, order, value=None):
return self.__store('Psis', index, order, value)
def get_unperturbed_states(self):
'''
This method returns the unperturbed eigenvalues and eigenstates as
a tuple of energies and state-vectors.
.. note:: This is the only method that does not support a non-diagonal
:math:`H_0`. While possible to implement, it is not currently clear
that a non-diagonal :math:`H_0` is actually terribly useful.
'''
# Check if H_0 is diagonal
if not (self.H_0 - np.diag(self.H_0.diagonal()) == 0).all():
raise ValueError("Provided H_0 is not diagonal")
E0s = []
for i in xrange(self.H_0.shape[0]):
E0s.append(self.H_0[i, i])
subspace = self.subspace
if subspace is None:
subspace = range(self.H_0.shape[0])
done = set()
psi0s = [None] * len(E0s)
for i, E0 in enumerate(E0s):
if i not in done:
degenerate_subspace = np.where(np.array(E0s) == E0)[0]
if len(degenerate_subspace) > 1 and not (all(e in subspace for e in degenerate_subspace) or all(e not in subspace for e in degenerate_subspace)):
warnings.warn("Chosen subspace %s overlaps with degenerate subspace of H_0 %s. Extending the subspace to include these states." % (subspace, degenerate_subspace))
subspace = set(subspace).union(degenerate_subspace)
if len(degenerate_subspace) == 1 or i not in subspace:
v = np.zeros(self.H_0.shape[0], dtype='object')
v[i] = sympy.S('1')
psi0s[i] = v
done.add(i)
else:
m = sympy.Matrix(self.V)[tuple(degenerate_subspace), tuple(degenerate_subspace)]
l = 0
for (_energy, multiplicity, vectors) in m.eigenvects():
for k in xrange(multiplicity):
v = np.zeros(self.H_0.shape[0], dtype=object)
v[np.array(degenerate_subspace)] = np.array(vectors[k].transpose().normalized()).flatten()
psi0s[degenerate_subspace[l]] = v
done.add(degenerate_subspace[l])
l += 1
return E0s, psi0s
@property
def dim(self):
'''
The dimension of :math:`H_0`.
'''
return self.H_0.shape[0]
def E(self, index, order=0):
r'''
This method returns the `order` th correction to the eigenvalue associated
with the `index` th state using RSPT.
The algorithm:
If `order` is 0, return the unperturbed energy.
If `order` is even:
.. math::
E_n = \left< \Psi_{n/2} \right| V \left| \Psi_{n/2-1} \right> - \sum_{k=1}^{n/2} \sum_{l=1}^{n/2-1} E_{n-k-l} \left< \Psi_k \big | \Psi_l \right>
If `order` is odd:
.. math::
E_n = \left< \Psi_{(n-1)/2} \right| V \left| \Psi_{(n-1)/2} \right> - \sum_{k=1}^{(n-1)/2} \sum_{l=1}^{(n-1)/2} E_{n-k-l} \left< \Psi_k \big| \Psi_l \right>
Where subscripts indicate that the subscripted symbol is correct to
the indicated order in RSPT, and where `n` = `order`.
:param index: The index of the state to be considered.
:type index: int
:param order: The order of perturbation theory to apply.
:type order: int
'''
if self.__Es(index, order) is not None:
return self.__Es(index, order)
if order == 0:
debug("E", order, self.E0s[index])
return self.E0s[index]
elif order % 2 == 0:
r = self.Psi(index, order / 2).dot(self.V).dot(self.Psi(index, order / 2 - 1))
for k in xrange(1, order / 2 + 1):
for l in xrange(1, order / 2):
r -= self.E(index, order - k - l) * self.Psi(index, k).dot(self.Psi(index, l))
else:
r = self.Psi(index, (order - 1) / 2).dot(self.V).dot(self.Psi(index, (order - 1) / 2))
for k in xrange(1, (order - 1) / 2 + 1):
for l in xrange(1, (order - 1) / 2 + 1):
r -= self.E(index, order - k - l) * self.Psi(index, k).dot(self.Psi(index, l))
debug("E", order, r)
self.__Es(index, order, r)
return r
def inv(self, index):
r'''
This method returns: :math:`(E_0 - H_0)^{-1} P`, for use in `Psi`,
which is computed using:
.. math::
A_{ij} = \delta_{ij} \delta_{i0} (E^n_0 - E^i_0)^{-1}
Where `n` = `order`.
.. note:: In cases where a singularity would result, `0` is used instead.
This works because the projector off the subspace `P`
reduces support on the singularities to zero.
:param index: The index of the state to be considered.
:type index: int
'''
if index in self.__cache['inv']:
return self.__cache['inv'][index]
inv = np.zeros(self.H_0.shape, dtype=object)
for i in xrange(self.dim):
if self.E0s[i] != self.E0s[index]:
inv[i, i] = 1 / (self.E(index, 0) - self.E0s[i])
debug("inv", inv)
self.__cache['inv'][index] = inv
return inv
def Psi(self, index, order=0):
r'''
This method returns the `order` th correction to the `index` th eigenstate using RSPT.
The algorithm:
If `order` is 0, return the unperturbed eigenstate.
Otherwise, return:
.. math::
\left| \Psi_n \right> = (E_0-H_0)^{-1} P \left( V \left|\Psi_{n-1}\right> - \sum_{k=1}^n E_k \left|\Psi_{n-k}\right> \right)
Where `P` is the projector off the degenerate subspace enveloping
the indexed state.
:param index: The index of the state to be considered.
:type index: int
:param order: The order of perturbation theory to apply.
:type order: int
'''
if self.__Psis(index, order) is not None:
return self.__Psis(index, order)
if order == 0:
debug("wf", order, self.Psi0s[index])
return self.Psi0s[index]
b = np.dot(self.V, self.Psi(index, order - 1))
for k in xrange(1, order + 1):
b -= self.E(index, k) * self.Psi(index, order - k)
psi = self.inv(index).dot(b)
self.__Psis(index, order, psi)
debug("wf", order, psi)
return psi
class SWPT(object):
'''
This class implements (degenerate) Schrieffer-Wolff Perturbation Theory.
It is geared toward generating symbolic solutions, in the hope that the perturbation
theory might provide insight into the quantum system at hand. For numerical solutions,
you are better off simply diagonalising the evaluated Hamiltonian.
For more details, review:
- Bravyi, S., DiVincenzo, D. P., & Loss, D. (2011). Schrieffer-Wolff transformation for
quantum many-body systems. Annals of Physics, 326(10), 2793-2826.
:param H_0: The unperturbed Hamiltonian to consider.
:type H_0: Operator, sympy matrix or numpy array
:param V: The Hamiltonian perturbation to consider.
:type V: Operator, sympy matrix or numpy array
:param subspace: The state indices to which attention should be restricted.
:type subspace: list of int
'''
def __init__(self, H_0=None, V=None, subspace=None):
self.__cache = {
'S': {},
'S_k': {},
}
self.H_0 = H_0
self.V = V
self.P_0 = np.zeros(self.H_0.shape)
self.Q_0 = np.zeros(self.H_0.shape)
for i in xrange(self.H_0.shape[0]):
if i in subspace:
self.P_0[i,i] = 1
else:
self.Q_0[i,i] = 1
self.V_od = self.O(self.V)
self.V_d = self.D(self.V)
if subspace is None:
raise ValueError("Must define low energy subspace.")
self.subspace = subspace
self.E0s, self.Psi0s = self.get_unperturbed_states()
def get_unperturbed_states(self):
'''
This method returns the unperturbed eigenvalues and eigenstates as
a tuple of energies and state-vectors.
.. note:: This is the only method that does not support a non-diagonal
:math:`H_0`. While possible to implement, it is not currently clear
that a non-diagonal :math:`H_0` is actually terribly useful.
'''
# Check if H_0 is diagonal
if not (self.H_0 - np.diag(self.H_0.diagonal()) == 0).all():
raise ValueError("Provided H_0 is not diagonal")
E0s = []
for i in xrange(self.H_0.shape[0]):
E0s.append(self.H_0[i, i])
subspace = self.subspace
if subspace is None:
subspace = range(self.H_0.shape[0])
done = set()
psi0s = [None] * len(E0s)
for i, E0 in enumerate(E0s):
if i not in done:
degenerate_subspace = np.where(np.array(E0s) == E0)[0]
if len(degenerate_subspace) > 1 and not (all(e in subspace for e in degenerate_subspace) or all(e not in subspace for e in degenerate_subspace)):
warnings.warn("Chosen subspace %s overlaps with degenerate subspace of H_0 %s. Extending the subspace to include these states." % (subspace, degenerate_subspace))
subspace = set(subspace).union(degenerate_subspace)
if len(degenerate_subspace) == 1 or i not in subspace:
v = np.zeros(self.H_0.shape[0], dtype='object')
v[i] = sympy.S('1')
psi0s[i] = v
done.add(i)
else:
m = sympy.Matrix(self.V)[tuple(degenerate_subspace), tuple(degenerate_subspace)]
l = 0
for (_energy, multiplicity, vectors) in m.eigenvects():
for k in xrange(multiplicity):
v = np.zeros(self.H_0.shape[0], dtype=object)
v[np.array(degenerate_subspace)] = np.array(vectors[k].transpose().normalized()).flatten()
psi0s[degenerate_subspace[l]] = v
done.add(degenerate_subspace[l])
l += 1
return E0s, psi0s
# Utility superoperators
def O(self, op):
return self.P_0.dot(op).dot(self.Q_0) + self.Q_0.dot(op).dot(self.P_0)
def D(self, op):
return self.P_0.dot(op).dot(self.P_0) + self.Q_0.dot(op).dot(self.Q_0)
def L(self, op):
denom = np.array(self.E0s).reshape((self.dim,1)) - np.array(self.E0s).reshape((1,self.dim))
denom[denom == 0] = 1. #TODO: DO THIS MORE SAFELY
return self.O(op)/denom
def hat(self, operator, operand):
return operator.dot(operand) - operand.dot(operator)
def S(self, n):
if n in self.__cache['S']:
return self.__cache['S'][n]
self.__cache['S'][n] = self._S(n)
return self.__cache['S'][n]
def _S(self, n):
if n < 1:
raise ValueError("i must be greater than or equal to zero.")
elif n == 1:
return self.L(self.V_od)
elif n == 2:
return -self.L(self.hat(self.V_d, self.S(1)))
else:
r = -self.L(self.hat(self.V_d, self.S(n-1)))
# k<=m => j<=(n-1)/2
for j in xrange(1, int(math.ceil( (n-1)/2 )) + 1 ):
a = 2**(2*j) * mpmath.bernoulli(2*j) / mpmath.factorial(2*j)
r += a * self.L(self.S_k(2*j, n-1))
return r
def _partition(self, number, count=None):
if count <= 0:
return set()
answer = set()
if count == 1:
answer.add((number, ))
for x in range(1, number):
ys = self._partition(number - x, count-1 if count is not None else None)
if len(ys) == 0:
continue
for y in ys:
answer.add(tuple(sorted((x, ) + y)))
return answer
def S_k(self, k, m):
if (k,m) in self.__cache['S']:
return self.__cache['S_k'][(k,m)]
self.__cache['S_k'][(k,m)] = self._S_k(k,m)
return self.__cache['S_k'][(k,m)]
def _S_k(self, k, m):
indices = self._partition(m,k)
r = np.zeros(self.H_0.shape, dtype=object)
for indexes in indices:
for perm in set(itertools.permutations(indexes)):
rt = self.V_od
for i in perm: # Can ignore ordering because all permutations are considered
rt = self.hat(self.S(i), rt)
r += rt
return r
def H_eff(self, order=0, restrict=True):
H = self.H_0.dot(self.P_0)
if order >= 1:
H += self.P_0.dot(self.V).dot(self.P_0)
for n in xrange(2,order+1):
H += self.H_eff_n(n)
H = np.vectorize(sympy.nsimplify)(H)
if restrict:
subspace = np.array(self.subspace)
return H[subspace[:,None], subspace]
return H
def H_eff_n(self, n):
# k<=m => j<=(n)/2
r = 0
for j in xrange(1, int(math.ceil( n/2. )) + 1 ):
b = 2*(2**(2*j)-1)*mpmath.bernoulli(2*j)/mpmath.factorial(2*j)
r += b*self.P_0.dot(self.S_k(2*j-1,n-1)).dot(self.P_0)
return r
@property
def dim(self):
return self.H_0.shape[0]
|
python
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import subprocess
import time
from resource_management import *
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.format import format
from resource_management.core.resources.system import Execute
from resource_management.core.logger import Logger
class SparkServiceCheck(Script):
def service_check(self, env):
import params
env.set_params(params)
if params.security_enabled:
spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
Execute(spark_kinit_cmd, user=params.spark_user)
command = "curl"
httpGssnegotiate = "--negotiate"
userpswd = "-u:"
insecure = "-k"
silent = "-s"
out = "-o /dev/null"
head = "-w'%{http_code}'"
url = 'http://' + params.spark_history_server_host + ':' + str(params.spark_history_ui_port)
command_with_flags = [command, silent, out, head, httpGssnegotiate, userpswd, insecure, url]
is_running = False
for i in range(1,11):
proc = subprocess.Popen(command_with_flags, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
Logger.info("Try %d, command: %s" % (i, " ".join(command_with_flags)))
(stdout, stderr) = proc.communicate()
response = stdout
if '200' in response:
is_running = True
Logger.info('Spark Job History Server up and running')
break
Logger.info("Response: %s" % str(response))
time.sleep(5)
if is_running == False :
Logger.info('Spark Job History Server not running.')
raise ComponentIsNotRunning()
if __name__ == "__main__":
SparkServiceCheck().execute()
|
python
|
# nuScenes dev-kit.
# Code written by Freddy Boulton, Eric Wolff, 2020.
import json
import os
from typing import List, Dict, Any
from nuscenes.eval.prediction.metrics import Metric, deserialize_metric
from nuscenes.prediction import PredictHelper
class PredictionConfig:
def __init__(self,
metrics: List[Metric],
seconds: int = 6,
frequency: int = 2):
"""
Data class that specifies the prediction evaluation settings.
Initialized with:
metrics: List of nuscenes.eval.prediction.metric.Metric objects.
seconds: Number of seconds to predict for each agent.
frequency: Rate at which prediction is made, in Hz.
"""
self.metrics = metrics
self.seconds = seconds
self.frequency = frequency # Hz
def serialize(self) -> Dict[str, Any]:
""" Serialize instance into json-friendly format. """
return {'metrics': [metric.serialize() for metric in self.metrics],
'seconds': self.seconds}
@classmethod
def deserialize(cls, content: Dict[str, Any], helper: PredictHelper):
""" Initialize from serialized dictionary. """
return cls([deserialize_metric(metric, helper) for metric in content['metrics']],
seconds=content['seconds'])
def load_prediction_config(helper: PredictHelper, config_name: str = 'predict_2020_icra.json') -> PredictionConfig:
"""
Loads a PredictionConfig from json file stored in eval/prediction/configs.
:param helper: Instance of PredictHelper. Needed for OffRoadRate metric.
:param config_name: Name of json config file.
:return: PredictionConfig.
"""
this_dir = os.path.dirname(os.path.abspath(__file__))
cfg_path = os.path.join(this_dir, "configs", config_name)
assert os.path.exists(cfg_path), f'Requested unknown configuration {cfg_path}'
# Load config file and deserialize it.
with open(cfg_path, 'r') as f:
config = json.load(f)
return PredictionConfig.deserialize(config, helper)
|
python
|
"""
This is a file which will contain basic functions to call the GIOŚ API
"""
import requests
import errors
def get_all_measuring_stations():
"""
Returns a list of all measuring stations with their details.
Examplary response
------------------
[{
"id": 14,
"stationName": "Działoszyn",
"gegrLat": "50.972167",
"gegrLon": "14.941319",
"city": {
"id": 192,
"name": "Działoszyn",
"commune": {
"communeName": "Bogatynia",
"districtName": "zgorzelecki",
"provinceName": "DOLNOŚLĄSKIE"
}
},
"addressStreet": null
}]
"""
url = "https://api.gios.gov.pl/pjp-api/rest/station/findAll"
response = requests.get(url, timeout=5)
if response.status_code == requests.codes.ok:
return response.json()
else:
response.raise_for_status()
def get_all_sensors(station_id):
"""
Returns a list of all sensors for a given station.
Examplary response
------------------
[{
"id": 92,
"stationId": 14,
"param": {
"paramName": "pył zawieszony PM10",
"paramFormula": "PM10",
"paramCode": "PM10",
"idParam": 3
}
},
{
"id": 88,
"stationId": 14,
"param": {
"paramName": "dwutlenek azotu",
"paramFormula": "NO2",
"paramCode": "NO2",
"idParam": 6
}
}]
"""
url = f"https://api.gios.gov.pl/pjp-api/rest/station/sensors/{station_id}"
response = requests.get(url, timeout=5)
if response.status_code == requests.codes.ok:
if response.text:
json = response.json()
if json:
return json
else:
raise errors.NoDataReturned(f'Response: "{response.text}"')
else:
raise errors.NoDataReturned(f'Response: "{response.text}"')
else:
response.raise_for_status()
def get_measurement_data(sensor_id):
"""
Returns data for a given sensor.
Examplary response
------------------
{
"key": "PM10",
"values": [
{
"date": "2017-03-28 11:00:00",
"value": 30.3018
},
{
"date": "2017-03-28 12:00:00",
"value": 27.5946
}]
}
"""
url = f"https://api.gios.gov.pl/pjp-api/rest/data/getData/{sensor_id}"
response = requests.get(url, timeout=5)
if response.status_code == requests.codes.ok:
if response.text:
json = response.json()
if json:
return json
else:
raise errors.NoDataReturned(f'Response: "{response.text}"')
else:
raise errors.NoDataReturned(f'Response: "{response.text}"')
else:
response.raise_for_status()
|
python
|
import ldap3.core
import ldap3.abstract
import ldap3.operation
import ldap3.protocol
import ldap3.protocol.sasl
import ldap3.protocol.schemas
import ldap3.protocol.formatters
import ldap3.strategy
import ldap3.utils
import ldap3.extend
import ldap3.extend.novell
import ldap3.extend.microsoft
import ldap3.extend.standard
|
python
|
import PyPluMA
CODING_TABLE = dict()
CODING_TABLE["TTT"] = 'F'
CODING_TABLE["TTC"] = 'F'
CODING_TABLE["TTA"] = 'L'
CODING_TABLE["TTG"] = 'L'
CODING_TABLE["TCT"] = 'S'
CODING_TABLE["TCC"] = 'S'
CODING_TABLE["TCA"] = 'S'
CODING_TABLE["TCG"] = 'S'
CODING_TABLE["TAT"] = 'Y'
CODING_TABLE["TAC"] = 'Y'
CODING_TABLE["TAA"] = ''
CODING_TABLE["TAG"] = ''
CODING_TABLE["TGT"] = 'C'
CODING_TABLE["TGC"] = 'C'
CODING_TABLE["TGA"] = ''
CODING_TABLE["TGG"] = 'W'
CODING_TABLE["CTT"] = 'L'
CODING_TABLE["CTC"] = 'L'
CODING_TABLE["CTA"] = 'L'
CODING_TABLE["CTG"] = 'L'
CODING_TABLE["CCT"] = 'P'
CODING_TABLE["CCC"] = 'P'
CODING_TABLE["CCA"] = 'P'
CODING_TABLE["CCG"] = 'P'
CODING_TABLE["CAT"] = 'H'
CODING_TABLE["CAC"] = 'H'
CODING_TABLE["CAA"] = 'Q'
CODING_TABLE["CAG"] = 'Q'
CODING_TABLE["CGT"] = 'R'
CODING_TABLE["CGC"] = 'R'
CODING_TABLE["CGA"] = 'R'
CODING_TABLE["CGG"] = 'R'
CODING_TABLE["ATT"] = 'I'
CODING_TABLE["ATC"] = 'I'
CODING_TABLE["ATA"] = 'I'
CODING_TABLE["ATG"] = 'M'
CODING_TABLE["ACT"] = 'T'
CODING_TABLE["ACC"] = 'T'
CODING_TABLE["ACA"] = 'T'
CODING_TABLE["ACG"] = 'T'
CODING_TABLE["AAT"] = 'N'
CODING_TABLE["AAC"] = 'N'
CODING_TABLE["AAA"] = 'K'
CODING_TABLE["AAG"] = 'K'
CODING_TABLE["AGT"] = 'S'
CODING_TABLE["AGC"] = 'S'
CODING_TABLE["AGA"] = 'R'
CODING_TABLE["AGG"] = 'R'
CODING_TABLE["GTT"] = 'V'
CODING_TABLE["GTC"] = 'V'
CODING_TABLE["GTA"] = 'V'
CODING_TABLE["GTG"] = 'V'
CODING_TABLE["GCT"] = 'A'
CODING_TABLE["GCC"] = 'A'
CODING_TABLE["GCA"] = 'A'
CODING_TABLE["GCG"] = 'A'
CODING_TABLE["GAT"] = 'D'
CODING_TABLE["GAC"] = 'D'
CODING_TABLE["GAA"] = 'E'
CODING_TABLE["GAG"] = 'E'
CODING_TABLE["GGT"] = 'G'
CODING_TABLE["GGC"] = 'G'
CODING_TABLE["GGA"] = 'G'
CODING_TABLE["GGG"] = 'G'
class DNA2ProteinPlugin:
def input(self, filename):
fastafile = open(filename, 'r')
self.header = fastafile.readline().strip()
self.DNA = ''
for line in fastafile:
self.DNA += line.strip()
def run(self):
if (len(self.DNA) % 3 != 0):
print("WARNING: Coding region length is not a multiple of 3")
if (CODING_TABLE[self.DNA[len(self.DNA)-3:]] != ''):
print("WARNING: Sequence does not end with a STOP codon")
nucnum = 0
self.protein = ''
while (nucnum < len(self.DNA)):
codon = self.DNA[nucnum:nucnum+3]
self.protein += CODING_TABLE[codon]
nucnum += 3
def output(self, filename):
outfile = open(filename, 'w')
outfile.write(self.header+"\n")
outfile.write(self.protein)
|
python
|
import base64
code="import pymongo
import urllib2
import urllib
import cookielib
import random
import re
import string
import sys
import getopt

# init the global cookie jar
cj = cookielib.CookieJar()
# declare the variables to connect to db
connection = None
db = None
webhost = "localhost:8082"
mongostr = "mongodb://localhost:27017"
db_name = "blog"

# this script will check that homework 3.2 is correct

# makes a little salt
def make_salt(n):
    salt = ""
    for i in range(n):
        salt = salt + random.choice(string.ascii_letters)
    return salt


# this is a validation script to make sure the blog works correctly.

def create_user(username, password):
    
    global cj

    try:
        print "Trying to create a test user ", username
        url = "http://{0}/signup".format(webhost)

        data = urllib.urlencode([("email",""),("username",username), ("password",password), ("verify",password)])
        request = urllib2.Request(url=url, data=data)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        f = opener.open(request)

        users = db.users
        # check that the user is in users collection
        user = users.find_one({'_id':username})
        if (user == None):
            print "Could not find the test user ", username, "in the users collection."
            return False
        print "Found the test user ", username, " in the users collection"

        # check that the user has been built
        result = f.read()
        expr = re.compile("Welcome\s+"+ username)
        if expr.search(result):
            return True
        
        print "When we tried to create a user, here is the output we got\n"
        print result
        
        return False
    except:
        print "the request to ", url, " failed, so your blog may not be running."
        raise
        return False


def try_to_login(username, password):

    try:
        print "Trying to login for test user ", username
        url = "http://{0}/login".format(webhost)

        data = urllib.urlencode([("username",username), ("password",password)])
        request = urllib2.Request(url=url, data=data)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        f = opener.open(request)

        # check for successful login
        result = f.read()
        expr = re.compile("Welcome\s+"+ username)
        if expr.search(result):
            return True

        print "When we tried to login, here is the output we got\n"
        print result
        return False
    except:
        print "the request to ", url, " failed, so your blog may not be running."
        return False


def add_blog_post(title,post,tags):

    try:
        print "Trying to submit a post with title ", title
        data = urllib.urlencode([("body",post), ("subject",title), ("tags",tags)])
        url = "http://{0}/newpost".format(webhost)
        request = urllib2.Request(url=url, data=data)
        cj.add_cookie_header(request)
        opener = urllib2.build_opener()
        f = opener.open(request)

        # check for successful login
        result = f.read()
        expr = re.compile(title + ".+" + post, re.DOTALL)

        if expr.search(result):
            return True

        print "When we tried to post, here is the output we got\n"
        print result
        return False

    except:
        print "the request to ", url, " failed, so your blog may not be running."
        raise

        return False

def add_blog_comment(title,post):

    try:
        print "+Trying to submit a blog comment for post with title", title
        url = "http://{0}/newcomment".format(webhost)
        
        doc = {}
        check_mongo_for_post(title, post, doc)

        permalink = doc['doc']['permalink']

        comment_name = make_salt(12)
        comment_body = make_salt(12)

        data = urllib.urlencode([("commentName",comment_name), ("commentBody",comment_body), ("permalink",permalink)])
        request = urllib2.Request(url=url, data=data)
        cj.add_cookie_header(request)
        opener = urllib2.build_opener()
        f = opener.open(request)

        # check for successful addition of comment on page
        result = f.read()
        expr = re.compile(title + ".+" + post, re.DOTALL)

        if not expr.search(result):
            print "When we tried to find the comment we posted at the  ", url, " here is what we got"
            print result
            return False


        # check for successful addition of comment..retrieve the doc again
        if(not check_mongo_for_post(title, post, doc)):
            print "Could not find comment in database"
            return False
        
        found = False
        if ('comments' in doc['doc']):
            for comment in doc['doc']['comments']:
                if (comment['body'] == comment_body and comment['author'] == comment_name):
                    found = True

        return found

    except:
        print "the request to ", url, " failed, so your blog may not be running."
        raise

        return False


# fetch the blog home page and return the link of the first post
def fetch_blog_home_page(posts):

    try:
        url = "http://{0}/".format(webhost)
        print "Trying to grab the blog home page at url and find the first post.", url
        request = urllib2.Request(url=url)
        cj.add_cookie_header(request)
        opener = urllib2.build_opener()
        f = opener.open(request)

        # Look for a post
        result = f.read()
        expr = re.compile("<a href=\"([^\"]+)\"\w*?>", re.DOTALL)


        match = expr.search(result)

        if match is not None:
            print "Found a post url: ", match.group(1)
            posts.append(match.group(1))
            return True

        
        print "Hmm, can't seem to find a post. Is the blog populated with posts?"
        print "When we tried to read the blog index at ", url, " here is what we got"
        print result
        return False

    except:
        print "the request to ", url, " failed, so your blog may not be running."
        raise

        return False

# gets the likes value off the first commment or returns None
def fetch_likes(url):

    try:
        url = "http://{0}{1}".format(webhost, url)
        print "Trying to grab the number of likes for url ", url
        request = urllib2.Request(url=url)
        cj.add_cookie_header(request)
        opener = urllib2.build_opener()
        f = opener.open(request)


        # let's get the first form element
        result = f.read()
        expr = re.compile("<form[^>]*>.*?Likes:\s*(\d+)\s*<.*?</form>", re.DOTALL)

        match = expr.search(result)

        if match is not None:
            print "Likes value ", match.group(1)
            return int(match.group(1))

        print "Can't fetch the like value for the first comment. Perhaps the blog entry has no comments?"
        print "When we tried to read the blog permalink at ", url, " here is what we got"
        return None

    except:
        print "the request to ", url, " failed, so your blog may not be running."
        raise

        return None


# gets the likes value off the first commment or returns None
def click_on_like(permalink):

    print "Clicking on Like link for post: ", permalink
    try:
        expr =  re.compile("[^/]+/([^/]+)")
        match = expr.search(permalink)
        if match is None:
            return False

        permalink = match.group(1)
        url = "http://{0}/like".format(webhost)
        # print "Like POST url", url

        data = urllib.urlencode([("permalink",permalink), ("comment_ordinal","0")])
        request = urllib2.Request(url=url, data=data)
        cj.add_cookie_header(request)
        opener = urllib2.build_opener()
        f = opener.open(request)

        return True

    except:
        print "the request to ", url, " failed, so your blog may not be running."
        raise




# command line arg parsing to make folks happy who want to run at mongolabs or mongohq
# this functions uses global vars to communicate. forgive me.
def arg_parsing(argv):

    global webhost
    global mongostr
    global db_name

    try:
        opts, args = getopt.getopt(argv, "-p:-m:-d:")
    except getopt.GetoptError:
        print "usage validate.py -p webhost -m mongoConnectString -d databaseName"
        print "\twebhost defaults to {0}".format(webhost)
        print "\tmongoConnectionString default to {0}".format(mongostr)
        print "\tdatabaseName defaults to {0}".format(db_name)
        sys.exit(2)
    for opt, arg in opts:
        if (opt == '-h'):
            print "usage validate.py -p webhost -m mongoConnectString -d databaseName"
            sys.exit(2)
        elif opt in ("-p"):
            webhost = arg
            print "Overriding HTTP host to be ", webhost
        elif opt in ("-m"):
            mongostr = arg
            print "Overriding MongoDB connection string to be ", mongostr
        elif opt in ("-d"):
            db_name = arg
            print "Overriding MongoDB database to be ", db_name
            


# main section of the code
def main(argv):
            
    arg_parsing(argv)
    global connection
    global db

    print "Welcome to the M101 Final Exam, Question 4 Validation Checker"

    # connect to the db (mongostr was set in arg_parsing)
    connection = pymongo.MongoClient(mongostr)
    db = connection[db_name]


    # grab the blog home page and find the first post
    posts = []
    if (not fetch_blog_home_page(posts)):
        print "I can't grab the home page of the blog"
        sys.exit(1)

    # now go to the permalink page for that post
    likes_value = fetch_likes(posts[0])

    if (likes_value is  None):
        print "Can't fetch the like value"
        sys.exit(1)

    click_on_like(posts[0])

    new_likes_value = fetch_likes(posts[0])

    if (new_likes_value != (likes_value + 1)):
        print "I was not able to increment the likes on a comment"
        print "old likes value was ", likes_value
        print "likes value after I clicked was ", new_likes_value
        print "Sorry, you have not solved it yet."
        sys.exit(1)


    print "Tests Passed for Final 4. Your validation code is 3f837hhg673ghd93hgf8"


if __name__ == "__main__":
    main(sys.argv[1:])
"
eval(compile(base64.b64decode(code), "<string>", 'exec'))
|
python
|
#!/usr/bin/env python
import sys;print(sys.argv);print(__file__)
|
python
|
import torch
from neural_clbf.systems import ControlAffineSystem
def normalize(
dynamics_model: ControlAffineSystem, x: torch.Tensor, k: float = 1.0
) -> torch.Tensor:
"""Normalize the state input to [-k, k]
args:
dynamics_model: the dynamics model matching the provided states
x: bs x self.dynamics_model.n_dims the points to normalize
k: normalize non-angle dimensions to [-k, k]
"""
x_max, x_min = dynamics_model.state_limits
x_center = (x_max + x_min) / 2.0
x_range = (x_max - x_min) / 2.0
# Scale to get the input between (-k, k), centered at 0
x_range = x_range / k
# We shouldn't scale or offset any angle dimensions
x_center[dynamics_model.angle_dims] = 0.0
x_range[dynamics_model.angle_dims] = 1.0
# Do the normalization
return (x - x_center.type_as(x)) / x_range.type_as(x)
def normalize_with_angles(
dynamics_model: ControlAffineSystem, x: torch.Tensor, k: float = 1.0
) -> torch.Tensor:
"""Normalize the input using the stored center point and range, and replace all
angles with the sine and cosine of the angles
args:
dynamics_model: the dynamics model matching the provided states
x: bs x self.dynamics_model.n_dims the points to normalize
k: normalize non-angle dimensions to [-k, k]
"""
# Scale and offset based on the center and range
x = normalize(dynamics_model, x, k)
# Replace all angles with their sine, and append cosine
angle_dims = dynamics_model.angle_dims
angles = x[:, angle_dims]
x[:, angle_dims] = torch.sin(angles)
x = torch.cat((x, torch.cos(angles)), dim=-1)
return x
|
python
|
from enum import Enum
import datetime
import dbaccess
class Action(Enum):
PAUSE, FINISH, ARCHIVE, WORK, CREATE, DELETE = range(6)
class Log(object):
def __init__(self, action, problem_id, name, category_id, dt=None):
self.action = action
self.problem_id = problem_id
self.name = name
self.category_id = category_id
if dt:
self.datetime = dt
else:
self.datetime = datetime.datetime.now()
def __str__(self):
return ' '.join([str(self.datetime), str(self.action), 'Problem:', str(self.problem_id), self.name, 'Cat:', str(self.category_id)])
class LogManager(object):
def tolog(self, record):
action = Action(int(record[0]))
problem_id, name, category_id = record[1].split('|')
dt = datetime.datetime.strptime(record[2], '%Y-%m-%dT%H:%M:%S.%f')
result = Log(action, problem_id, name, category_id, dt)
return result
def torecord(self, log):
action = log.action.value
problem = '|'.join([str(log.problem_id), log.name, str(log.category_id)])
dt = log.datetime.isoformat()
return {'action': action, 'problem': problem, 'datetime': dt}
def read(self):
records = dbaccess.read('log')
result = []
for record in records:
result.append(self.tolog(record))
return result
def insert(self, log):
dbaccess.insert('log', self.torecord(log))
def dump(self):
dbaccess.delete('log')
|
python
|
import argparse
import collections
import torch
import numpy as np
import data_loader.data_loaders as module_data
import model.loss as module_loss
import model.metric as module_metric
import model.model as module_arch
from parse_config import ConfigParser
from trainer import Trainer
from utils import prepare_device
"""
TODO:
1. modify BaseTrainer:
[checked] remove tensorboard features (keep it simple)
[checked] add test logic into BaseTrainer
2. modify BaseLoader:
we want to generate train_loader, valid_loader, test_loader at once in train.py
[checked] ==> merge train.py and test.py
3. Replace config with parse_known_args(), ConfigParser
"""
def main(config):
logger = config.get_logger('train')
# setup data_loader instances
train_data_loader = config.init_obj('data_loader', module_data)
valid_data_loader = train_data_loader.split_validation()
# setup data_loader instances
test_data_loader = getattr(module_data, config['data_loader']['type'])(
config['data_loader']['args']['data_dir'],
batch_size=512,
shuffle=False,
validation_split=0.0,
training=False,
num_workers=2
)
# build model architecture, then print to console
model = config.init_obj('arch', module_arch)
logger.info(model)
# prepare for (multi-device) GPU training
device, device_ids = prepare_device(config['n_gpu'])
model = model.to(device)
if len(device_ids) > 1:
model = torch.nn.DataParallel(model, device_ids=device_ids)
# get function handles of loss and metrics
criterion = getattr(module_loss, config['loss'])
metrics = [getattr(module_metric, met) for met in config['metrics']]
# build optimizer, learning rate scheduler. delete every lines containing lr_scheduler for disabling scheduler
trainable_params = filter(lambda p: p.requires_grad, model.parameters())
optimizer = config.init_obj('optimizer', torch.optim, trainable_params)
lr_scheduler = config.init_obj('lr_scheduler', torch.optim.lr_scheduler, optimizer)
trainer = Trainer(model, criterion, metrics, optimizer,
config=config,
device=device,
train_data_loader=train_data_loader,
valid_data_loader=valid_data_loader,
test_data_loader=test_data_loader,
lr_scheduler=lr_scheduler)
trainer.train()
trainer.test()
if __name__ == '__main__':
args = argparse.ArgumentParser(description='PyTorch Template')
args.add_argument('-c', '--config', default=None, type=str,
help='config file path (default: None)')
args.add_argument('-r', '--resume', default=None, type=str,
help='path to latest checkpoint (default: None)')
args.add_argument('-d', '--device', default=None, type=str,
help='indices of GPUs to enable (default: all)')
# custom cli options to modify configuration from default values given in json file.
CustomArgs = collections.namedtuple('CustomArgs', 'flags type target')
options = [
CustomArgs(['--lr', '--learning_rate'], type=float, target='optimizer;args;lr'),
CustomArgs(['--bs', '--batch_size'], type=int, target='data_loader;args;batch_size')
]
config = ConfigParser.from_args(args, options)
main(config)
|
python
|
from django.conf.urls import include, url
from categories import views
class SingleCategoryPatterns():
urlpatterns = [
url(r'^$', views.category, name='category'),
url(r'^new/$', views.new_category, name='new_category'),
url(r'^delete/$', views.delete_category, name='delete_category'),
]
urlpatterns = [
url(r'^$', views.categories, name='categories'),
url(r'^(?P<cat_id>([0-9]{1,20}|root))/', include(SingleCategoryPatterns)),
]
|
python
|
#!/usr/bin/env python
print("SUB_TASK, Hello, Am sub_task");
import os;
import hashlib;
import time;
import multiprocessing;
def __getMd5(localFile):
md5Value = "";
md5tool = hashlib.md5();
print("__CheckFile, localFile:" + localFile);
try:
if (os.path.exists(localFile) == False):
return md5Value;
f = open(localFile, 'rb');
#Read data
while True:
data = f.read(4096);
if not data:
break;
md5tool.update(data);
f.close();
except Exception,e:
print("__CheckFile, excp:" + e.message);
finally:
md5Value = md5tool.hexdigest();
return md5Value;
def __Compress(localpath, desfile):
result = False;
try:
import zipfile;
f = zipfile.ZipFile(desfile, 'w', zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(localpath):
for filename in filenames:
f.write(os.path.join(dirpath, filename));
result = True;
except Exception,e:
print("__Compress, excp:" + e.message);
finally:
try:
f.close();
except Exception:
print("");
return result;
def __file_packet():
result = False;
desFile = "";
pbMd5 = "";
srcFile = "/tmp/rmrb_syslog.zip";
if __Compress("/tmp/daily/index/08/", srcFile):
result = True;
print("__file_packet, compress ok :" + srcFile);
else:
return result;
if not os.path.isfile(srcFile):
return pbMd5, desFile, result;
pbMd5 = __getMd5(srcFile);
desFile = os.path.join('/tmp/', '%s.zip' % pbMd5);
os.rename(srcFile, desFile);
print("__file_packet, " + pbMd5 + " " + desFile + " " + str(result));
return pbMd5, desFile, result;
def __test_transport1():
url = "http://pricloud.cn:20000/appupgrade/"
try:
import httplib;
from system import post_form;
connection = httplib.HTTP("pricloud.cn:20000");
connection.putrequest('POST', '/appupgrade/appupgrade');
content_type, body = post_form.encode_multipart_formdata(['/tmp/c3052ec34a35cffac476a65a08b4dd2d.zip']);
print "Header content_type:" + content_type
connection.putheader('content-type', content_type)
connection.putheader('content-length', str(len(body)))
connection.endheaders()
connection.send(body)
errcode, errmsg, headers = connection.getreply()
print errcode
print errmsg
print headers
# for l in connection.getfile():
# print l
connection.close();
# if not 1:
# return connection.file.read()
# f = open(file, 'rb')
# sys.stdout.write(f.read())
# mmapped_file_as_string = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
# request = urllib2.Request(url, mmapped_file_as_string)
# request.add_header('Content-Type', content_type)
# response = urllib2.urlopen(request)
# mmapped_file_as_string.close()
# f.close()
except Exception,e:
print("__test_transport1, excp:" + e.message);
finally:
print("");
def __test_transport():
try:
import urllib2;
from system import post_form;
from cStringIO import StringIO;
from io import StringIO;
from io import BytesIO;
form = post_form.multi_part_form()
form.add_file('file', '/tmp/c3052ec34a35cffac476a65a08b4dd2d.zip', file_obj=BytesIO('/tmp/c3052ec34a35cffac476a65a08b4dd2d.zip'))
# request = urllib2.Request('http://127.0.0.1:8080/appupgrade/appupgrade')
request = urllib2.Request('http://pricloud.cn:20000/appupgrade/appupgrade')
body = str(form)
request.add_header('Content-type', 'multipart/form-data; boundary=####')
request.add_header('Content-length', len(body))
request.add_data(body)
print request.headers
print body;
print "**************************************"
print 'Request'
print request.get_data()
print 'Response'
print urllib2.urlopen(request);
print "**************************************"
except Exception,e:
print("__test_transport, excep:" + e.message);
def __file_transport2(md5Val, desFile):
try:
import urllib2;
from system import post_form;
from system.poster.encode import multipart_encode
from system.poster.streaminghttp import register_openers
register_openers();
datagen, headers = multipart_encode({"LOG_%s" % md5Val: open(desFile, "rb")})
# request = urllib2.Request('http://pricloud.cn:20000/appupgrade/appupgrade', datagen, headers)
request = urllib2.Request('http://pricloud.cn:20000/rmrb/appupgrade', datagen, headers)
print "__file_transport2 " + str(request.headers)
print "__file_transport2 " + str(datagen);
print "****************__file_transport2*******************"
print "__file_transport2 Request"
print request.get_data()
print "__file_transport2, Response"
print urllib2.urlopen(request);
print "****************__file_transport2*******************"
except Exception,e:
print("__file_transport2, excep:" + e.message);
def __file_transport(md5Val, desFile):
print("__file_transport md5Val:" + md5Val + " desFile:" + desFile);
try:
import zipfile;
import urllib;
import urllib2;
if not os.path.isfile(desFile):
return;
if not zipfile.is_zipfile(desFile):
return;
reqStr = "http://pricloud.cn:20000/appupgrade/?type=upload&name=rmrb_pb&md5=" + md5Val;
print("__file_transport, reqStr:" + reqStr);
with open(desFile, 'r') as uploadFile:
content = uploadFile.read();
postdata={'file':content};
request = urllib2.Request(reqStr, data=urllib.urlencode(postdata));
response = urllib2.urlopen(request);
retCode = response.status;
print("__file_transport, upload retCode:" + retCode);
uploadFile.close();
except Exception,e:
print("__file_transport, excp:" + e.message);
finally:
print("");
return;
def __need_update(md5Val):
need = False;
try:
import httplib;
import sys_info;
from modules import xorg_monitor as xorg
sysInfo = sys_info.init_gather_sysinfo();
urls = sysInfo.getReportUrls();
connection = httplib.HTTPConnection("pricloud.cn", 20000, timeout=3);
reqline = "/rmrb/appupgrade?Type=RMRB_SP_INFO&App=rmrb&SP=%s" % md5Val + \
"&Mac=" + sysInfo.getMac() + \
"&Ip=" + sysInfo.getIP() + \
"&AppVer=" + sysInfo.getAppVer() + \
"&MediaVer=" + sysInfo.getMediaVer() + \
"&DevId=" + sysInfo.getID() + \
"&Debug=" + xorg.debug_xorg_monitor();
reqline = reqline.replace(" ", "");
reqline = reqline.replace("\n", "")
print("__need_update, request cmdline:" + reqline);
connection.request("GET", reqline);
response = connection.getresponse();
retCode = response.status;
retMsg = response.read(16);
connection.close();
print("__need_update, retMsg:" + retMsg);
if ((retCode == 200) and (retMsg == "TRUE")):
need = True;
except Exception, e:
print("__need_update, excp:" + e.message);
finally:
print("__need_update, finally");
print("__need_update, need to upload ? %s" % str(need));
return need;
def __file_process():
while True:
try:
md5Val,desFile,result = __file_packet();
if not result:
break;
if not __need_update(md5Val):
break;
if desFile == "":
break;
__file_transport2(md5Val, desFile);
except Exception,e:
print("__file_process, excp:" + e.message);
finally:
break;
print("__file_process, done");
return;
def __reload_modules():
try:
flag="/opt/rmrb/reboot"
if not os.path.isfile(flag):
return
"""delete flag file"""
os.remove(flag)
os.system('sudo reboot')
except:
return;
return;
def fork_task():
# __file_process()
__reload_modules()
try:
import xorg_monitor as xorg
xorg.do_monitor_vlc()
except:
print "";
return;
|
python
|
def primeFactors(n):
facts, by_two = {}, 0
start = n
while n % 2 == 0:
n //= 2
by_two += 1
for t in range(by_two):
facts[2] = by_two
for i in range(3, int(n**0.5)+1, 2):
while n % i == 0:
n = n / i
if i in facts:
facts[i] += 1
else:
facts[i] = 1
return facts
def prime_fac(num):
for i in range(2,num + 1):
if(num % i == 0):
prime = True
for j in range(2,(i//2 + 1)):
if(i % j == 0):
prime = False
break
if prime:
return True
for i in range(2, 100):
print(i, prime_fac(i))
|
python
|
# Project: hardInfo
# Author: George Keith Watson
# Date Started: March 18, 2022
# Copyright: (c) Copyright 2022 George Keith Watson
# Module: model/LsCpu.py
# Date Started: March 20, 2022
# Purpose: Run Linux commands and collect output into usable Python objects.
# Development:
# Sample / Test data file:
# Name: consoleOutput/lscpu/lscpu.output.2022_03_20.txt
# Tool used: scpu -a --json --extended > lscpu.output.2022_03_20.txt
#
from os.path import isfile
from subprocess import Popen, PIPE, STDOUT
from json import loads
from collections import OrderedDict
from copy import deepcopy
from enum import Enum
from datetime import datetime
from tkinter import Tk, messagebox, BOTH
from view.Components import JsonTreeView
PROGRAM_TITLE = "lscpu Importer"
LSCPU_JSON_FILE = 'lscpu.json'
class CPU_Field:
def __init__(self, field: dict):
if not isinstance(field, dict) or 'field' not in field or 'data' not in field:
raise Exception("CPU_Field constructor - Invalid field argument: " + str(field))
self.attributes = deepcopy(field)
self.name = field['field']
if self.name == "Flags:":
self.data = field['data'].split()
else:
self.data = field['data']
def getName(self):
return self.name
def getData(self):
return self.data
def getAttributes(self):
return deepcopy(self.attributes)
class CPU_FieldSet:
def __init__(self, lscpuJson: dict):
if not isinstance(lscpuJson, dict) or not "lscpu" in lscpuJson:
raise Exception("CPU_FieldSet constructor - Invalid lscpuJson argument: " + str(lscpuJson))
self.attributes = deepcopy(lscpuJson)
self.cpuFields = OrderedDict()
for fieldMap in lscpuJson["lscpu"]:
if "field" not in fieldMap or "data" not in fieldMap:
raise Exception("CPU_FieldSet constructor - Invalid fieldMap in lscpuJson argument: " + str(fieldMap))
self.cpuFields[fieldMap['field']] = CPU_Field(fieldMap)
def getAttributes(self):
return deepcopy(self.attributes)
def getCPU_Field(self, name: str):
if name in self.cpuFields:
return deepcopy(self.cpuFields[name])
return None
class Action(Enum):
Generate = 'Generate'
Help = "Help"
Load = 'Load'
Store = 'Store'
Search = 'Search'
Update = 'Update'
Log = 'Log'
Exit = 'Exit'
def __str__(self):
return self.value
class Dispatcher:
def __init__(self):
print("Lshw.Dispatcher does not instantiate")
@staticmethod
def do( action: Action):
if action == Action.Generate:
return Dispatcher.__generateLscpuJsonFile()
@staticmethod
def __generateLscpuJsonFile():
proc = Popen(['lscpu', '--json'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate()
jsonText = proc[0].decode('utf-8')
print("Saving output to:\t" + LSCPU_JSON_FILE)
file = open(LSCPU_JSON_FILE, "w")
file.write(jsonText)
file.close()
return jsonText
class Conversation:
userLog = OrderedDict()
class LogEntry:
def __init__(self, timeStamp: datetime, description: str, attributes: dict ):
if not isinstance(timeStamp, datetime):
raise Exception("Conversation.LogEntry constructor - Invalid timeStamp argument: " + str(timeStamp))
if not isinstance(description, str):
raise Exception("Conversation.LogEntry constructor - Invalid description argument: " + str(description))
if not isinstance(attributes, dict):
raise Exception("Conversation.LogEntry constructor - Invalid attributes argument: " + str(attributes))
self.timeStamp = deepcopy(timeStamp)
self.description = description
self.attributes = deepcopy(attributes)
def storeLog(self):
pass
def __init__(self):
print("Lshw.Conversation does not instantiate")
@staticmethod
def getAndProcessInput():
jsonText = None
if isfile(LSCPU_JSON_FILE):
prompt = "lscpu json storage file already exists. Would you like to update it? (y/Y or n/N)"
print(prompt, end=":\t")
response = input()
if response in ('y', 'Y'):
jsonText = Dispatcher.do(Action.Generate)
# print("Line Count:\t" + str(len(outputText.split('\n'))))
else:
lscpuJsonFile = open(LSCPU_JSON_FILE, "r")
jsonText = lscpuJsonFile.read()
lscpuJsonFile.close()
else:
jsonText = Dispatcher.do(Action.Generate)
if jsonText is not None:
lscpuJson = loads(jsonText)
jsonText = None
# Construct the internal objects storing the output for API use.
cpu_FieldSet = CPU_FieldSet(lscpuJson)
prompt = "Would you line to see the lscpu output in a GUI Tree window? (y/Y or n/N)"
print(prompt, end=":\t")
response = input()
if response in ('y', 'Y'):
print('Generating view')
jsonTreeView = JsonTreeView(mainView, lscpuJson, {"openBranches": True, "mode": "strict"})
jsonTreeView.pack(expand=True, fill=BOTH)
mainView.mainloop()
def ExitProgram():
answer = messagebox.askyesno('Exit program ', "Exit the " + PROGRAM_TITLE + " program?")
if answer:
mainView.destroy()
if __name__ == '__main__':
mainView = Tk()
mainView.protocol('WM_DELETE_WINDOW', ExitProgram)
mainView.geometry("600x400+100+50")
mainView.title(PROGRAM_TITLE)
Conversation.getAndProcessInput()
|
python
|
import json
from .AccessControlEntry import AccessControlEntry
class AccessControlList(object):
"""OCS access control list definition"""
def __init__(self, role_trustee_access_control_entries: list[AccessControlEntry] = None):
self.RoleTrusteeAccessControlEntries = role_trustee_access_control_entries
@property
def RoleTrusteeAccessControlEntries(self) -> list[AccessControlEntry]:
return self.__role_trustee_access_control_entries
@RoleTrusteeAccessControlEntries.setter
def RoleTrusteeAccessControlEntries(self, value: list[AccessControlEntry]):
self.__role_trustee_access_control_entries = value
def toJson(self):
return json.dumps(self.toDictionary())
def toDictionary(self):
result = {'RoleTrusteeAccessControlEntries': []}
if self.RoleTrusteeAccessControlEntries is not None:
for value in self.RoleTrusteeAccessControlEntries:
result['RoleTrusteeAccessControlEntries'].append(
value.toDictionary())
return result
@staticmethod
def fromJson(content: dict[str, str]):
result = AccessControlList()
if not content:
return result
if 'RoleTrusteeAccessControlEntries' in content:
entries = content['RoleTrusteeAccessControlEntries']
if entries is not None and len(entries) > 0:
result.RoleTrusteeAccessControlEntries = []
for value in entries:
result.RoleTrusteeAccessControlEntries.append(
AccessControlEntry.fromJson(value))
return result
|
python
|
"""Tests for zaim_row.py."""
from datetime import datetime
from typing import Type
import pytest
from tests.testlibraries.instance_resource import InstanceResource
from tests.testlibraries.row_data import ZaimRowData
from zaimcsvconverter import CONFIG
from zaimcsvconverter.inputcsvformats import InputRow, InputRowData
from zaimcsvconverter.inputcsvformats.amazon import AmazonRowFactory
from zaimcsvconverter.inputcsvformats.mufg import MufgIncomeFromOthersRow
from zaimcsvconverter.inputcsvformats.sf_card_viewer import SFCardViewerRowData, SFCardViewerRowFactory
from zaimcsvconverter.inputcsvformats.waon import WaonChargeRow, WaonRow, WaonRowData
from zaimcsvconverter.rowconverters import ZaimRowConverter
from zaimcsvconverter.rowconverters.amazon import AmazonZaimRowConverterFactory
from zaimcsvconverter.rowconverters.mufg import MufgZaimIncomeRowConverter
from zaimcsvconverter.rowconverters.sf_card_viewer import SFCardViewerZaimRowConverterFactory
from zaimcsvconverter.rowconverters.waon import (
WaonZaimIncomeRowConverter,
WaonZaimPaymentRowConverter,
WaonZaimTransferRowConverter,
)
from zaimcsvconverter.zaim_row import ZaimIncomeRow, ZaimPaymentRow, ZaimRowFactory, ZaimTransferRow
class TestZaimIncomeRow:
"""Tests for ZaimIncomeRow."""
# pylint: disable=unused-argument
@staticmethod
def test_all(yaml_config_load, database_session_stores_item):
"""Argument should set into properties."""
mufg_row = MufgIncomeFromOthersRow(InstanceResource.ROW_DATA_MUFG_TRANSFER_INCOME_NOT_OWN_ACCOUNT)
# Reason: Pylint's bug. pylint: disable=no-member
zaim_low = ZaimRowFactory.create(MufgZaimIncomeRowConverter(mufg_row))
list_zaim_row = zaim_low.convert_to_list()
zaim_row_data = ZaimRowData(*list_zaim_row)
assert zaim_row_data.date == "2018-08-20"
assert zaim_row_data.method == "income"
assert zaim_row_data.category_large == "臨時収入"
assert zaim_row_data.category_small == "-"
assert zaim_row_data.cash_flow_source == ""
assert zaim_row_data.cash_flow_target == "三菱UFJ銀行"
assert zaim_row_data.item_name == ""
assert zaim_row_data.note == ""
assert zaim_row_data.store_name == "三菱UFJ銀行"
assert zaim_row_data.currency == ""
assert zaim_row_data.amount_income == 20
assert zaim_row_data.amount_payment == 0
assert zaim_row_data.amount_transfer == 0
assert zaim_row_data.balance_adjustment == ""
assert zaim_row_data.amount_before_currency_conversion == ""
assert zaim_row_data.setting_aggregate == ""
class TestZaimPaymentRow:
"""Tests for ZaimPaymentRow."""
# pylint: disable=too-many-arguments,too-many-locals,unused-argument
@staticmethod
@pytest.mark.parametrize(
(
"input_row_factory, input_row_data, zaim_row_converter_selector, expected_date, "
"expected_category_large, expected_category_small, expected_cash_flow_source, expected_item_name, "
"expected_note, expected_store_name, expected_amount_payment"
),
[
(
SFCardViewerRowFactory(lambda: CONFIG.pasmo),
InstanceResource.ROW_DATA_SF_CARD_VIEWER_TRANSPORTATION_KOHRAKUEN_STATION,
SFCardViewerZaimRowConverterFactory(lambda: CONFIG.pasmo),
"2018-11-13",
"交通",
"電車",
"PASMO",
"",
"メトロ 六本木一丁目 → メトロ 後楽園",
"東京地下鉄株式会社 南北線後楽園駅",
195,
),
(
AmazonRowFactory(),
InstanceResource.ROW_DATA_AMAZON_ECHO_DOT,
AmazonZaimRowConverterFactory(),
"2018-10-23",
"大型出費",
"家電",
"ヨドバシゴールドポイントカード・プラス",
"Echo Dot (エコードット) 第2世代 - スマートスピーカー with Alexa、ホワイト",
"",
"Amazon Japan G.K.",
4980,
),
],
)
def test_all(
yaml_config_load,
database_session_stores_item,
input_row_factory,
input_row_data: SFCardViewerRowData,
zaim_row_converter_selector,
expected_date,
expected_category_large,
expected_category_small,
expected_cash_flow_source,
expected_item_name,
expected_note,
expected_store_name,
expected_amount_payment,
):
"""Argument should set into properties."""
input_row = input_row_factory.create(input_row_data)
zaim_low = ZaimRowFactory.create(zaim_row_converter_selector.create(input_row))
list_zaim_row = zaim_low.convert_to_list()
zaim_row_data = ZaimRowData(*list_zaim_row)
assert zaim_row_data.date == expected_date
assert zaim_row_data.method == "payment"
assert zaim_row_data.category_large == expected_category_large
assert zaim_row_data.category_small == expected_category_small
assert zaim_row_data.cash_flow_source == expected_cash_flow_source
assert zaim_row_data.cash_flow_target == ""
assert zaim_row_data.item_name == expected_item_name
assert zaim_row_data.note == expected_note
assert zaim_row_data.store_name == expected_store_name
assert zaim_row_data.currency == ""
assert zaim_row_data.amount_income == 0
assert zaim_row_data.amount_payment == expected_amount_payment
assert zaim_row_data.amount_transfer == 0
assert zaim_row_data.balance_adjustment == ""
assert zaim_row_data.amount_before_currency_conversion == ""
assert zaim_row_data.setting_aggregate == ""
class TestZaimTransferRow:
"""Tests for ZaimTransferRow."""
# pylint: disable=unused-argument
@staticmethod
def test_all(yaml_config_load, database_session_stores_item):
"""Argument should set into properties."""
waon_auto_charge_row = WaonRow(InstanceResource.ROW_DATA_WAON_AUTO_CHARGE_ITABASHIMAENOCHO)
zaim_low = ZaimRowFactory.create(WaonZaimTransferRowConverter(waon_auto_charge_row))
list_zaim_row = zaim_low.convert_to_list()
zaim_row_data = ZaimRowData(*list_zaim_row)
assert zaim_row_data.date == "2018-11-11"
assert zaim_row_data.method == "transfer"
assert zaim_row_data.category_large == "-"
assert zaim_row_data.category_small == "-"
assert zaim_row_data.cash_flow_source == "イオン銀行"
assert zaim_row_data.cash_flow_target == "WAON"
assert zaim_row_data.item_name == ""
assert zaim_row_data.note == ""
assert zaim_row_data.store_name == ""
assert zaim_row_data.currency == ""
assert zaim_row_data.amount_income == 0
assert zaim_row_data.amount_payment == 0
assert zaim_row_data.amount_transfer == 5000
assert zaim_row_data.balance_adjustment == ""
assert zaim_row_data.amount_before_currency_conversion == ""
assert zaim_row_data.setting_aggregate == ""
class TestZaimRowFactory:
"""Tests for ZaimRowFactory."""
# pylint: disable=unused-argument,too-many-arguments
@staticmethod
@pytest.mark.parametrize(
"database_session_with_schema, zaim_row_converter_class, input_row, waon_row_data, expected",
[
(
[InstanceResource.FIXTURE_RECORD_STORE_WAON_ITABASHIMAENOCHO],
WaonZaimIncomeRowConverter,
WaonChargeRow,
InstanceResource.ROW_DATA_WAON_CHARGE_POINT_ITABASHIMAENOCHO,
ZaimIncomeRow,
),
(
[InstanceResource.FIXTURE_RECORD_STORE_WAON_ITABASHIMAENOCHO],
WaonZaimPaymentRowConverter,
WaonRow,
InstanceResource.ROW_DATA_WAON_PAYMENT_ITABASHIMAENOCHO,
ZaimPaymentRow,
),
(
[InstanceResource.FIXTURE_RECORD_STORE_WAON_ITABASHIMAENOCHO],
WaonZaimTransferRowConverter,
WaonRow,
InstanceResource.ROW_DATA_WAON_AUTO_CHARGE_ITABASHIMAENOCHO,
ZaimTransferRow,
),
],
indirect=["database_session_with_schema"],
)
def test_success(
yaml_config_load,
database_session_with_schema,
zaim_row_converter_class,
input_row: Type[WaonRow],
waon_row_data: WaonRowData,
expected,
):
"""Factory should create appropriate type of Zaim row."""
assert isinstance(ZaimRowFactory.create(zaim_row_converter_class(input_row(waon_row_data))), expected)
@staticmethod
def test_fail():
"""Factory should raise ValueError when input row is undefined type."""
# Reason: This class is just for test. pylint: disable=too-few-public-methods
class UndefinedZaimRowConverter(ZaimRowConverter):
pass
class UndefinedInputRow(InputRow):
pass
class UndefinedInputRowData(InputRowData):
# Reason: Raw code is simple enough. pylint: disable=missing-docstring
@property
def date(self) -> datetime:
return datetime.now()
@property
def store_name(self) -> str:
return ""
@property
def item_name(self) -> str:
return ""
@property
def validate(self) -> bool:
return False
with pytest.raises(ValueError) as error:
ZaimRowFactory.create(UndefinedZaimRowConverter(UndefinedInputRow(UndefinedInputRowData())))
assert str(error.value) == "Undefined Zaim row converter. Zaim row converter = UndefinedZaimRowConverter"
|
python
|
"""
Script with modules to connect with the database to prepare sources
"""
import logging
import os
import uuid
import pandas as pd
import utils.data_connection.constant_variables_db as cons
from utils.data_connection.source_manager import Connector
from pypika import Query, Tables, Table, JoinType
logger = logging.getLogger()
class APISourcesFetcher:
"""
Class to get all the proper Sources with connectors
"""
def __init__(self, db_connector: Connector, database: str = None):
"""
:param db_connector: Connector to DB server
:param database: db name
"""
self.__db_connnector = db_connector
self.__insights_db = os.getenv("INSIGHTS_DB_NAME")
if not database:
self.__database = os.getenv("API_DB_NAME")
else:
self.__database = database
def __select_source(self, query: str) -> pd.DataFrame:
"""
Executes Select Queries and transforms to data_connection frames
:param query: query to be executed
:return: data_connection frame with values
"""
try:
self.__db_connnector.open_connection()
result = list(self.__db_connnector.select_query(query))
self.__db_connnector.close_connection()
result = pd.DataFrame(result)
except Exception as e:
logger.error(msg=str(e))
self.__db_connnector.close_connection()
return None
return result
def __insert_source(self, query: str) -> int:
"""
Executes Insert Queries and transforms to data_connection frames
:param query: query to be executed
:return: execution result
"""
try:
self.__db_connnector.open_connection()
result = self.__db_connnector.insert_query(query)
self.__db_connnector.close_connection()
except Exception as e:
logger.error(msg=str(e))
self.__db_connnector.close_connection()
return None
return result
def get_companies_info(self, companies_columns_names: list = None) -> pd.DataFrame:
"""
Get all data from the table companies
:param companies_columns_names: list of columns names we want to select
:return: data_connection frame with all the data_connection resulted from the query execution
"""
if companies_columns_names is None or len(companies_columns_names) == 0:
companies_columns_names = cons.COMPANIES_COLUMN_NAMES
companies = Table("`%s`.`%s`" % (self.__database, cons.COMPANY_TABLE))
query = Query.from_(companies) \
.select(companies.id,
companies.name,
companies.domain,
companies.created_at,
companies.language,
companies.is_enabled,
companies.deleted_at) \
.where(companies.deleted_at.isnull()) \
.where(companies.is_enabled == 1)
result = self.__select_source(query.get_sql(quote_char=None))
try:
result.columns = companies_columns_names
except Exception as e:
logger.error(msg=str(e))
return pd.DataFrame()
return result
def get_companies_users(self, users_columns_names: list = None) -> pd.DataFrame:
"""
Get All data_connection from the table company_users
:param users_columns_names: list of columns names we want to select
:return: data_connection frame with all the data_connection resulted from the query execution
"""
if users_columns_names is None or len(users_columns_names) == 0:
users_columns_names = cons.COMPANY_USERS_COLUMN_NAMES
users_table = Table("`%s`.`%s`" % (self.__database, cons.COMPANY_USERS_TABLE))
q = Query.from_(users_table).select(users_table.id,
users_table.company_id,
users_table.user_id,
users_table.is_general_manager,
users_table.is_admin,
users_table.roles,
users_table.created_at,
users_table.deleted_at,
users_table.is_enabled)
query = str(q).replace("\"", "")
result = self.__select_source(query)
try:
result.columns = users_columns_names
except Exception as e:
logger.error(msg=str(e))
return pd.DataFrame()
return result
def get_surveys_mood(self, surveys_mood_columns_names: list = None) -> pd.DataFrame:
"""
Get All data_connection from the table surveys_mood
:param surveys_mood_columns_names: list of columns names we want to select
:return: data_connection frame with all the data_connection resulted from the query execution
"""
if surveys_mood_columns_names is None or len(surveys_mood_columns_names) == 0:
surveys_mood_columns_names = cons.SURVEYS_REPLIES_COLUMN_NAMES
survey_replies = Table("`%s`.`%s`" % (self.__database, cons.SURVEYS_REPLIES_TABLE))
q = Query.from_(survey_replies).select(survey_replies.id,
survey_replies.survey_question_id,
survey_replies.user_id,
survey_replies.rating,
survey_replies.created_at,
survey_replies.user_timezone,
survey_replies.system_timezone,
survey_replies.survey_iteration_token_id,
survey_replies.comment,
survey_replies.comment_deleted_at)
query = str(q).replace("\"", "")
result = self.__select_source(query)
try:
result.columns = surveys_mood_columns_names
except Exception as e:
logger.error(msg=str(e))
return pd.DataFrame()
return result
def get_survey_replies_dimensions_questions(self, period: dict, company_ids: list) -> pd.DataFrame:
"""
Select the data from database
:param period: period of data to fetch (year_s, week_s, year_e, week_e)
:param company_ids: company_ids to fetch data from
:return: dataframe with the data selected
"""
survey_replies, survey_questions, survey_iterations, questions, surveys, dimensions, feature_flags = Tables(
"`%s`.`%s`" % (self.__database, cons.SURVEYS_REPLIES_TABLE),
"`%s`.`%s`" % (self.__database, cons.SURVEYS_QUESTIONS_TABLE),
"`%s`.`%s`" % (self.__database, cons.SURVEYS_ITERATIONS_TABLE),
"`%s`.`%s`" % (self.__database, cons.QUESTIONS_TABLE),
"`%s`.`%s`" % (self.__database, cons.SURVEYS_TABLE),
"`%s`.`%s`" % (self.__database, cons.DIMENSIONS_TABLE),
"`%s`.`%s`" % (self.__database, cons.COMPANY_FF_TABLE)
)
week_s = period.get("start_week")
year_s = period.get("start_year")
week_e = period.get("end_week")
year_e = period.get("end_year")
q = Query.from_(survey_replies) \
.join(survey_questions).on(survey_questions.id == survey_replies.survey_question_id) \
.join(survey_iterations).on(survey_iterations.id == survey_questions.survey_iteration_id) \
.join(surveys).on(surveys.id == survey_iterations.survey_id) \
.join(questions).on(questions.id == survey_questions.question_id) \
.join(dimensions).on(dimensions.id == questions.dimension_id) \
.join(feature_flags, how=JoinType.left).on(surveys.company_id == feature_flags.company_id) \
.select(survey_replies.id.as_('survey_reply_id'),
survey_replies.user_id,
survey_replies.rating,
survey_replies.created_at,
survey_replies.comment,
survey_iterations.id.as_('survey_iteration_id'),
survey_iterations.created_at,
survey_iterations.year,
survey_iterations.week,
surveys.company_id,
questions.type_id,
questions.description,
questions.dimension_id,
questions.week,
dimensions.description.as_('dimension_description')) \
.where(survey_replies.comment.notnull()) \
.where((feature_flags.company_id.isnull()) |
(feature_flags.feature_flag_id != "DISABLE_SURVEY"))
if week_e is None or year_e is None:
q = q.where((survey_iterations.week == week_s) & (survey_iterations.year == year_s))
else:
logger.info(msg="Fetching survey replies on a time period {}".format(period))
q = q.where((survey_iterations.week[week_s:week_e]) & (survey_iterations.year[year_s:year_e]))
if len(company_ids) > 0:
q = q.where(surveys.company_id.isin(company_ids))
query = str(q).replace("\"", "")
result = self.__select_source(query)
try:
result.columns = cons.SURVEY_REPLIES_DIMENSIONS_QUESTIONS_COLUMN_NAMES
except Exception as e:
logger.error(msg=str(e))
return pd.DataFrame()
return result
def get_topics(self, company_ids: list) -> pd.DataFrame:
"""
Select topics from db
:param company_ids: list of company_ids
:return: dataframe with the topics
"""
topics, topic_comments, feature_flags = Tables("`%s`.`%s`" % (self.__database, cons.TOPICS_TABLE),
"`%s`.`%s`" % (self.__database, cons.TOPIC_COMMENTS_TABLE),
"`%s`.`%s`" % (self.__database, cons.COMPANY_FF_TABLE))
q = Query.from_(topics) \
.join(topic_comments).on(topic_comments.company_topic_id == topics.id) \
.join(feature_flags, how=JoinType.left).on(topics.company_id == feature_flags.company_id) \
.select(topics.id,
topics.company_id,
topics.is_archived,
topics.content,
topics.created_at,
topic_comments.content) \
.where(topics.content.notnull()
& topics.deleted_at.isnull()
& topics.is_archived == 0) \
.where((feature_flags.company_id.isnull()) |
(feature_flags.feature_flag_id != "DISABLE_TOPICS")) \
.groupby(topics.id,
topics.company_id,
topics.content,
topic_comments.content)
if len(company_ids) > 0:
q = q.where(topics.company_id.isin(company_ids))
query = str(q).replace("\"", "")
result = self.__select_source(query)
if result.empty:
return pd.DataFrame()
try:
result.columns = cons.TOPICS_COLUMN_NAMES
except Exception as e:
logger.error(msg=str(e))
return pd.DataFrame()
return result
def get_company_week_from_period(self, year: int, week: int, company_id: str) -> int:
"""
Select company week of teh company
:param year: year
:param week: week
:param company_id: company id
:return: number of the week of the company in that given period
"""
survey_iterations, surveys, survey_questions, questions = Tables(
"`%s`.`%s`" % (self.__database, cons.SURVEYS_ITERATIONS_TABLE),
"`%s`.`%s`" % (self.__database, cons.SURVEYS_TABLE),
"`%s`.`%s`" % (self.__database, cons.SURVEYS_QUESTIONS_TABLE),
"`%s`.`%s`" % (self.__database, cons.QUESTIONS_TABLE))
q = Query.from_(survey_iterations) \
.join(surveys).on(surveys.id == survey_iterations.survey_id) \
.join(survey_questions).on(survey_questions.survey_iteration_id == survey_iterations.id) \
.join(questions).on(questions.id == survey_questions.question_id) \
.select(questions.week).distinct() \
.where((survey_iterations.week == week) & (survey_iterations.year == year)) \
.where(surveys.company_id == company_id) \
.where(questions.dimension_id != 1)
query = str(q).replace("\"", "")
result = self.__select_source(query)
return result.at[0, 0]
def insert_topic_entities(self, company_id: str,
topic_id: str,
year: int,
week: int,
entities_categories: str,
entities_tags: str):
"""
Insert entities information regarding a topic.
:param company_id: uuid of a given company
:param topic_id: uuid of a given topic
:param year: year entities are about
:param week: week entities are about
:param entities_categories: categories of a topic
:param entities_tags: json str of word cloud
:return:
"""
topic_entities = Table("`%s`.`%s`" % (self.__insights_db, cons.TOPIC_ENTITIES_TABLE))
query = Query.into(topic_entities) \
.columns(cons.ENTITIES_TABLE_COMPANY_ID,
cons.TOPIC_ENTITIES_TABLE_COMPANY_TOPIC_ID,
cons.ENTITIES_TABLE_YEAR,
cons.ENTITIES_TABLE_WEEK,
cons.TOPIC_ENTITIES_TABLE_CATEGORIES,
cons.TOPIC_ENTITIES_TABLE_TAGS) \
.insert(company_id,
topic_id,
year,
week,
entities_categories,
entities_tags)
self.__insert_source(query.get_sql(quote_char=None))
def insert_survey_iteration_entities(self, company_id: str,
survey_iteration_id: str,
year: int,
week: int,
entities_categories: str,
entities_tags: str):
"""
Insert entities information regarding a survey_iteration.
:param company_id: uuid of a given company
:param survey_iteration_id: uuid of a given survey_iteration
:param year: year entities are about
:param week: week entities are about
:param entities_categories: categories of a topic
:param entities_tags: json str of word cloud
:return:
"""
topic_entities = Table("`%s`.`%s`" % (self.__insights_db, cons.SURVEY_ITERATION_ENTITIES_TABLE))
query = Query.into(topic_entities) \
.columns(cons.ENTITIES_TABLE_COMPANY_ID,
cons.SURVEY_ENTITIES_TABLE_SURVEY_ITERATION_ID,
cons.ENTITIES_TABLE_YEAR,
cons.ENTITIES_TABLE_WEEK,
cons.TOPIC_ENTITIES_TABLE_CATEGORIES,
cons.TOPIC_ENTITIES_TABLE_TAGS) \
.insert(company_id,
survey_iteration_id,
year,
week,
entities_categories,
entities_tags)
self.__insert_source(query.get_sql(quote_char=None))
|
python
|
#!/share/pyenv/bin/python3
'''
###########################################################
# Code name: VASP Electronic Structure Tool(VEST) #
# #
########### script to extract data from PROCAR ############
# Input file : PROCAR, #
# KPOINTS, #
# POSCAR, DOSCAR(from static calculation) #
########### script to extract data from EIGENVAL ##########
# VASP version: 5.4.4 #
# Input file : EIGENVAL,(soc,nosoc.megnetic) #
# KPOINTS, #
# POSCAR, DOSCAR(from static calculation) #
# KPOINTS.DFT(HSE), #
-----------------------------------------------------------
# run command: python3 vest.py #
# Author : Leiwang updata 2021/05/07 #
# Email : [email protected] #
###########################################################
# The version copy from ubuntu
# note that the format of fermi.dat
# ISMEAR = 0; SIGMA = 0.01 broadening in eV -4-tet -1-fermi 0-gaus
# E-fermi : 7.0717 XC(G=0): -11.2821 alpha+bet :-12.3742
# creat it by : grep fermi OUTCAR > fermi.dat # OUTCAR from static calculation
'''
import numpy as np
import math
import os
pi = math.pi
sqrt = math.sqrt
# used to read data from file
def read_data(filename):
with open(filename, 'r') as f:
content = f.readlines()
return content
# used to write data to file
def write2txt(filename, data):
f = open(filename, 'a')
f.write(data + "\n")
f.close()
#read fermi level
def fermienergy():
#efermi_tem = read_data(fermi)
#efermi_final = efermi_tem[1].split()[2]
#return efermi_final
dos_file=read_data('DOSCAR')
efermi_final = float(dos_file[5].strip().split()[3])
return efermi_final
# tranfer real space lattice a1 a2 a3 to reciprocal space lattice b1 b2 b3
def real_to_reciprocal(lines0):
a1 = lines0[2].split()
a2 = lines0[3].split()
a3 = lines0[4].split()
#print(len(a1))
A=[]
A.append(a1)
A.append(a2)
A.append(a3)
#print(a1[0])
volume = (float(a1[0])*float(a2[1])*float(a3[2])+float(a1[1])*float(a2[2])*float(a3[0])
+float(a1[2])*float(a2[0])*float(a3[1])-float(a1[0])*float(a2[2])*float(a3[1])
-float(a1[1])*float(a2[0])*float(a3[2])-float(a1[2])*float(a2[1])*float(a3[0]))
b=[[],[],[]]
c=[]
for i in (0,1,2):
if i==0:
j = 1
k = 2
elif i==1:
j = 2
k = 0
else:
j = 0
k = 1
c.append(float(A[j][1])*float(A[k][2])-float(A[j][2])*float(A[k][1]))
c.append(float(A[j][2])*float(A[k][0])-float(A[j][0])*float(A[k][2]))
c.append(float(A[j][0])*float(A[k][1])-float(A[j][1])*float(A[k][0]))
#print (c)
for l in (0,1,2):
bx = 2*pi*float(c[l])/volume
b[i].append(bx)
#print(b[i])
del c[:]
return b
# calculate the distance between two point in k space
def L_in_kspace(ary1,ary2,b):
dl = np.subtract(ary1,ary2)
DL = np.dot(dl,b)
# to get the mod of vector
#kb1 = 1/(2*pi)*sqrt((DL[0])**2+(DL[1])**2+(DL[2])**2)
kb1 = sqrt((DL[0])**2+(DL[1])**2+(DL[2])**2)
return kb1 #
# To calculate k mesh
def calcu_k_meth(lines0,lines1): #POSCAR , KPOINTS
result = [] # to read KPOINTS
for line in lines1:
line = line.strip()
if not len(line) or line.startswith('#'):
continue
result.append(line)
mesh = int(result[1]) # second line of KPOINTS
i = 4 # initial line
K_path = [] # to get k path
while i < len(result):
kpath=result[i].split()
K_path.append(kpath[0]+' '+kpath[1]+' '+kpath[2])
i += 1
#print (result,len(result))
# get mesh
Nk_path = len(K_path)
L_k_tem = 0.0
L_k_mesh_list = []
h_k = [] # high symmetry point
for j in range(0,len(K_path),2):
p1 = K_path[j]
p1 = p1.split()
p3=[]
for char in p1:
char = float(char)
p3.append(char)
p2 = K_path[j+1]
p2 = p2.split()
p4=[]
for char in p2:
char = float(char)
p4.append(char)
#print(p3,p4)
reci = real_to_reciprocal(lines0)
#print ('lattice cell',reci)
#print ('P3',p3)
L_k = L_in_kspace(p3,p4,reci) # calculate the distance between two point in k space
for i in range(0,mesh,1):
L_k_mesh = (L_k)*i/(mesh-1)
L_k_mesh_list.append(L_k_mesh+L_k_tem)
h_k.append(L_k_tem)
L_k_tem = L_k_tem+L_k
returnterm=[]
returnterm.append(L_k_mesh_list)
returnterm.append(mesh)
return returnterm
# used to calculate high symmetry line
def high_symmetry_line(lines0,lines1):
k_mesh_reci0=calcu_k_meth(lines0,lines1)
k_mesh_reci1=k_mesh_reci0[0]
k_mesh = k_mesh_reci0[1]
kpoint_high_sym=[]
i=0
kpoint_high_sym.append(k_mesh_reci1[i])
while i <len(k_mesh_reci1):
i=i+k_mesh
kpoint_high_sym.append(k_mesh_reci1[i-1])
return kpoint_high_sym
def read_incar(para):
incar_file=read_data('INCAR')
value = ''
for line in incar_file:
if para in line:
value=line.strip().split('=')[1]
return str(value)
# Deal with PROCAR file and get the orbital component
def project_orbit():
while True:
conform_file = str(input('To ensure POSCAR, PROCAR, KPOINTS, fermi.dat in current floder: Y/N'))
if 'Y' != conform_file :
print('please prepare POSCAR, PROCAR, KPOINTS ')
continue
else:
break
lines0 = read_data('POSCAR') #read POSCAR
lines1 = read_data('KPOINTS')
lines3 = read_data('PROCAR')
# extract data in two mode soc or nosoc
#mode = int(input('spd input 1; s px py pz dxy dyz dz2 dxz dx2 input 2:')) # LORBIT
#mode = 2
LSO = 1
mag = 1
Lorbit = 10
if 'T' in read_incar('LSORBIT') or 'TRUE' in read_incar('LSORBIT'):
LSO = 2
else:
if '2' in read_incar('ISPIN'):
mag = 2
if '11' in read_incar('LORBIT'):
Lorbit = 11
#print (LSO,mag,Lorbit)
efermi = fermienergy()
efermi = float(efermi)
lines2 = lines3[1]
lines2 = lines2.split()
nk = int(lines2[3]) # read the number of kpoints from PROCAR
nb = int(lines2[7]) # read the number of bands from PROCAR
ni = int(lines2[11]) # read the number of ion from PROCAR
#print(len(lines))
print ('number of kpoints:',nk,'number of bands:',nb,'number of ion:',ni)
L_k_mesh_list = calcu_k_meth(lines0,lines1)
L_k_mesh_list=L_k_mesh_list[0]
#Num_A = []
Element=lines0[5].split()
Num_A=lines0[6].split()
total_El = 0
print (Num_A)
for i in range(len(Num_A)):
x=int(Num_A[i])
print(x)
total_El = total_El + x
print (total_El)
if total_El >1:
if LSO ==1:
tb_betw=(ni+1)+4 # the number of line between two adjacent band in one k-block
else:
tb_betw=(ni+1)*4+4
else:
if LSO ==1:
tb_betw=ni+4 # the number of line between two adjacent band in one k-block
else:
tb_betw=ni*4+4
N_A = 0
N_i = 0
#if element in lines0[5]:
#print (Element)
i = 0
while i< len(Element):
N_A = N_A + int(Num_A[i])
for m in range(0,mag,1):
for i_nb in range(0,nb,1): #bands
for i_nk in range(0,nk,1): #kpoints
nkblock = tb_betw*nb+3 # the number of line between two adjacent k-block, such k-points 1 and k-points 2
k_tmp = lines3[3+i_nk*nkblock] # the fractional coordinate of k-points
k = k_tmp[19:52]
A = N_A-int(Num_A[N_i])+1
s = 0;p = 0;d = 0
px=0;py=0;pz=0;dxy=0;dyz=0;dxz=0;dx2=0;dz2=0
Energy = lines3[i_nk*nkblock+2+(tb_betw*(i_nb)+3)+m*(nk*nkblock+1)]
Energy = Energy.split()
energy = float(Energy[4])-efermi
if Lorbit == 10:
for j in range(A,N_A+1,1): # To choose the line the atom that you choose located in
xx_tmp = lines3[i_nk*nkblock+2+(tb_betw*(i_nb)+3)+j+2+m*(nk*nkblock+1)] # the line include the atom that you choose under nk,nb
xx = xx_tmp.split()
s = s + float(xx[1]) # s
p = p + float(xx[2])
d = d + float(xx[3]) # dxy dyz dz2 dxz dx2
write2txt('band-spd-'+Element[i]+'.dat',str(L_k_mesh_list[i_nk])+'\t'+str(energy)+'\t'+str(s)+'\t'+str(p)+'\t'+str(d))
else:
for j in range(A,N_A+1,1):
#print (j)
xx_tmp = lines3[i_nk*nkblock+5+tb_betw*(i_nb)+j+2+m*(nk*nkblock+1)]
#print (xx_tmp)
xx = xx_tmp.split()
s = s + float(xx[1])
px = px + float(xx[2])
py = py + float(xx[3])
pz = pz + float(xx[4])
dxy = dxy + float(xx[5])
dyz = dyz + float(xx[6])
dz2 = dz2 +float(xx[7])
dxz = dxz +float(xx[8])
dx2 = dx2 +float(xx[9])
write2txt('band-spxdx-'+Element[i]+'.dat',str(L_k_mesh_list[i_nk])+'\t'+str(energy)+'\t'+str(s)+'\t'+str(px)+'\t'+str(py)+'\t'+str(pz)+'\t'+str(dxy)+'\t'+str(dyz)+'\t'+str(dz2)+'\t'+str(dxz)+'\t'+str(dx2))
#write2txt('band-spxdx-'+Element[i]+'.dat',str(L_k_mesh_list[i_nk])+'\t'+str(energy)+'\t'+str(s)+'\t'+str(px)+'\t'+str(py))
#write2txt('band-spxdx-'+element+'.dat',str(i_nb+1)+'\t'+str(L_k_mesh_list[i_nk])+'\t'+str(energy)+'\t'+str(s)+'\t'+str(px)+'\t'+str(py)+'\t'+str(pz)+'\t'+str(dxy)+'\t'+str(dyz)+'\t'+str(dz2)+'\t'+str(dxz)+'\t'+str(dx2))
if Lorbit == 10:
#write2txt('band-spd-'+element+'.txt',str( )+'\t') # space
write2txt('band-spd-'+Element[i]+'.dat',str( )+'\t')
else:
write2txt('band-spxdx-'+Element[i]+'.dat',str( )+'\t')
N_i += 1
i += 1
hsl=high_symmetry_line(lines0,lines1)
for i in range(len(hsl)):
write2txt('high-symmetry-line.dat',str(hsl[i])+'\t'+str(-30))
write2txt('high-symmetry-line.dat',str(hsl[i])+'\t'+str(30))
write2txt('high-symmetry-line.dat',' ')
write2txt('high-symmetry-line.dat',str(0)+'\t'+str(0))
write2txt('high-symmetry-line.dat',str(hsl[len(hsl)-1])+'\t'+str(hsl[0]))
#project_orbit2()
def project_orbit2():
# iuput part
print('This part is used to operate the orbit data in PROCAR')
print('To choose the element')
structure = read_data('POSCAR')
print (structure[5])
element0 = str(input('input the kind of element:'))
element = element0.split()
Lorbit = 10
if '11' in read_incar('LORBIT'):
Lorbit = 11
# choose orbits
Norbit = []
i = 0
Name_orb = ''
#print (len(element))
if Lorbit ==11:
while i < len(element):
print ('# 1. s 2. py 3. pz 4.px 5. dxy 6. dyz 7.dz2 8. dxz 9. x2-y2')
Norbit0 = str(input('input the orbit of element'+'\t'+str(element[i])+'\t'+"""in format '1 2 3 4'"""))
Norbit.append(Norbit0)
i+=1
else:
while i < len(element):
print ('# 1. s 2. p 3. d')
Norbit0 = str(input('input the orbit of element'+'\t'+str(element[i])+'\t'+"""in format '1 2 3'"""))
Norbit.append(Norbit0)
i+=1
#orbit name eg. 1 2 3 4
Name_orb = ''
for e in Norbit:
e1 = e.split()
for orb in e1:
Name_orb = Name_orb+orb
# element name eg. Au
Name_ele = ''
N_el = 0
while N_el < len(element):
Name_ele=Name_ele+element[N_el]
N_el += 1
# create file
if Lorbit == 11:
write2txt('projected_band'+Name_ele+Name_orb+'.dat','# 1. s 2. py 3. pz 4.px 5. dxy 6. dyz 7.dz2 8. dxz 9. x2-y2')
else:
write2txt('projected_band'+Name_ele+Name_orb+'.dat','# 1. s 2. p 3. d')
write2txt('projected_band'+Name_ele+Name_orb+'.dat','# element : '+Name_ele+'\t'+Name_orb)
if Lorbit ==11:
orbit_file = read_data('band-spxdx-'+element[0]+'.dat')
else:
orbit_file = read_data('band-spd-'+element[0]+'.dat')
comp=[0.0 for i in range(len(orbit_file))] # component
path = [0.0 for i in range(len(orbit_file))] # path
energy= [0.0 for i in range(len(orbit_file))] # energy
N_el = 0 # number of element
goin = 0 # flag to set write only one times
#print (element)
while N_el < len(element):
#orbit_file = read_data('band-spxdx-'+element[N_el]+'.dat')
Lf = 0 # Length of file
#print (N_el)
while Lf < len(orbit_file):
orbit = orbit_file[Lf].split()
#print (orbit)
if len(orbit)==0:
path[Lf] = ''
energy[Lf] = ''
Lf +=1
continue
else:
path[Lf] = orbit[0]
energy[Lf] = orbit[1]
i=0 # index to sum orbits
Norbit_xx = Norbit[N_el].split()
while i < len(Norbit_xx):
N = int(Norbit_xx[i])+1
comp[Lf] = comp[Lf]+float(orbit[N])
i += 1
#print (Lf)
Lf += 1
goin += 1
N_el += 1
# write data
#print (len(path),len(energy))
i=0
while i < len(path):
if len(path[i]) == 0:
write2txt('projected_band'+Name_ele+Name_orb+'.dat','')
else:
write2txt('projected_band'+Name_ele+Name_orb+'.dat',str(path[i])+'\t'+str(energy[i])+'\t'+str(comp[i])+'\t')
i+=1
# used to read EIGENVAL file
def read_eigenval(lines3,nk,nb,mag):
eigenval_file = lines3
#print (eigenval_file[7])
i= 7
list_eigenval_total=[[0 for i in range(nk)] for j in range(nb)]
list_eigenval_up=[[0 for i in range(nk)] for j in range(nb)]
list_eigenval_down=[[0 for i in range(nk)] for j in range(nb)]
k=0
if mag ==1:
while i < len(eigenval_file):
i = i + 1 # add one line
for j in range(0,nb,1):
value = eigenval_file[i]
temp = value.split()
i +=1
if k==nk:
k=0
list_eigenval_total[j][k]=temp[1]
k+=1 # index of k points
i+=1 # add one line
return list_eigenval_total
else:
while i < len(eigenval_file):
i = i + 1 # add one line
for j in range(0,nb,1):
value = eigenval_file[i]
temp = value.split()
i +=1
if k==nk:
k=0
list_eigenval_up[j][k]=temp[1]
k+=1 # index of k points
i+=1 # add one line
i=7
k=0
while i < len(eigenval_file):
i = i + 1 # add one line
for j in range(0,nb,1):
value = eigenval_file[i]
temp = value.split()
i +=1
if k==nk:
k=0
list_eigenval_down[j][k]=temp[2]
k+=1 # index of k points
i+=1 # add one line
list = [list_eigenval_up,list_eigenval_down]
return list
# used to calculate normal band structure
def band_cal():
lines0 = read_data('POSCAR') #read POSCAR
lines1 = read_data('KPOINTS')
lines3 = read_data('EIGENVAL')
mag = 1 #: int(input('nonmagnetic 1; magnetic (nosoc) 2:')) # ISPIN equal to 1 or 2
if 'T' in read_incar('LSORBIT'):
LSO = 2
else:
if '2' in read_incar('ISPIN'):
mag = 2
efermi = fermienergy()
#efermi = float(efermi)
lines2 = lines3[5]
lines2 = lines2.split()
num_k = int(lines2[1]) # read the number of kpoints from PROCAR
num_b = int(lines2[2]) # read the number of bands from PROCAR
print ('number of kpoints:',num_k,'number of bands:',num_b)
# extract data in two mode magnetic or no
if 1 == mag:
L_k_mesh_list = calcu_k_meth(lines0,lines1)
L_k_mesh_list=L_k_mesh_list[0]
list_eigen_val = read_eigenval(lines3,num_k,num_b,mag)
for ib in range(num_b):
for ik in range(num_k):
write2txt('bandstructure.dat',str(L_k_mesh_list[ik])+'\t'+str(float(list_eigen_val[ib][ik])-efermi))
write2txt('bandstructure.dat',' ')
hsl=high_symmetry_line(lines0,lines1)
for i in range(len(hsl)): # print High symmetry line
write2txt('hsl.dat',str(hsl[i])+'\t'+str(-30))
write2txt('hsl.dat',str(hsl[i])+'\t'+str(30))
write2txt('hsl.dat',' ')
write2txt('hsl.dat',str(0)+'\t'+str(0))
write2txt('hsl.dat',str(hsl[len(hsl)-1])+'\t'+str(hsl[0]))
elif 2==mag :
L_k_mesh_list = calcu_k_meth(lines0,lines1)
L_k_mesh_list=L_k_mesh_list[0]
list_eigen_val = read_eigenval(lines3,num_k,num_b,mag)
list_eigen_val_up = list_eigen_val[0]
list_eigen_val_down = list_eigen_val[1]
for ib in range(num_b):
for ik in range(num_k):
write2txt('bandstructure_up.dat',str(L_k_mesh_list[ik])+'\t'+str(float(list_eigen_val_up[ib][ik])-efermi))
write2txt('bandstructure_up.dat',' ')
for ib in range(num_b):
for ik in range(num_k):
write2txt('bandstructure_down.dat',str(L_k_mesh_list[ik])+'\t'+str(float(list_eigen_val_down[ib][ik])-efermi))
write2txt('bandstructure_down.dat',' ')
hsl=high_symmetry_line(lines0,lines1)
for i in range(len(hsl)): # print High symmetry line
write2txt('hsl.dat',str(hsl[i])+'\t'+str(-30))
write2txt('hsl.dat',str(hsl[i])+'\t'+str(30))
write2txt('hsl.dat',' ')
write2txt('hsl.dat',str(0)+'\t'+str(0))
write2txt('hsl.dat',str(hsl[len(hsl)-1])+'\t'+str(hsl[0]))
else:
print('incorrect magmam')
# used to read the kpoints of HSE calculation
def read_hse_KPOINTS(lines1,lines_1):
result = []
for line in lines_1: #to read each line
line = line.strip()
if not len(line) or line.startswith('#'):
continue
result.append(line)
mesh = int(result[1])
#print(mesh)
i = 4 # initial line
K_path = []
while i < len(result):
K_path.append(result[i])
i += 1
# get mesh
Nk_path = len(K_path)
list = [] # used to store the point on the high symmetry line
for j in range(0,Nk_path,2):
p1 = K_path[j]
p1 = p1.split()
p3=[]
for char in p1:
char = float(char)
p3.append(char)
p2 = K_path[j+1]
p2 = p2.split()
p4=[]
for char in p2:
char = float(char)
p4.append(char)
#print(p3,p4)
#print (reci)
# output k points
for i in range(mesh):
list_k = []
px = p3[0]-(p3[0]-p4[0])*(i)/(mesh-1)
py = p3[1]-(p3[1]-p4[2])*(i)/(mesh-1)
pz = p3[2]-(p3[2]-p4[2])*(i)/(mesh-1)
list_k.append(px)
list_k.append(py)
list_k.append(pz)
#print (list_k)
list.append(list_k)
#print (list)
# compare with HSE mesh
k_mesh_hse = []
for i in range(3,len(lines1),1):
kp0=lines1[i]
kp1 = kp0.split()
if kp1[3] == '0':
#print (kp1)
k_mesh_hse.append(kp1)
#print (len(list),len(k_mesh_hse),k_mesh_hse)
#print(list)
com_num = [] # used to collect compare number
list_temp = list
k=0
j=0
for i in range(len(k_mesh_hse)):
#print ('i',i)
while j<len(list):
com_hse_kx = k_mesh_hse[i][0]
com_hse_ky = k_mesh_hse[i][1]
com_dft_kx = list_temp[j][0]
com_dft_ky = list_temp[j][1]
#print(k_mesh_hse[i],list_temp[j])
if abs(float(com_hse_kx)-com_dft_kx) <= 0.00001 and abs(float(com_hse_ky)-com_dft_ky <= 0.00001):
com_num.append(j)
k=j
#print ('k',k)
break
j += 1
j=k+1
#print(len(com_num),com_num)
return com_num
# used to read EIGENVAL of HSE calculation
def read_hse_eigenval(lines3,nk,nb):
eigenval_file = lines3
i= len(eigenval_file)-nk*(nb+2)+1
list_eigenval_total=[[0 for i in range(nk)] for j in range(nb)]
k=0
#print (i,eigenval_file[i])
while i < len(eigenval_file):
i = i + 1 # add one line
for j in range(0,nb,1):
value = eigenval_file[i]
temp = value.split()
i +=1
if k==nk:
k=0
list_eigenval_total[j][k]=temp[1]
k+=1 # index of k points
i+=1 # add one line
return list_eigenval_total
# used to calculate hse band
def band_hse_cal():
lines0 = read_data('POSCAR') #read POSCAR
lines1 = read_data('KPOINTS')
lines3 = read_data('EIGENVAL')
print('be sure set correct number of kpoints in EIGENVAL file')
while True:
k_DFT = input('input KPOINTS.DFT in current floder (Y/N):')
if k_DFT != 'Y':
print('please input normal mode KPOINTS.DFT file')
else:
print ('OK')
break
lines_1 = read_data('KPOINTS.DFT')
print('1. normal mode input')
print('2. abnormal mode input')
mode = float(input())
efermi = fermienergy()
#efermi = float(efermi)
lines2 = lines3[5] # to get the number of band from EGIENVAL file
lines2 = lines2.split()
num_b = int(lines2[2])
k_mesh_hse_num = []
for i in range(3,len(lines1),1):
kp0=lines1[i]
kp1 = kp0.split()
if kp1[3] == '0':
#print (kp1)
k_mesh_hse_num.append(kp1)
num_k = len(k_mesh_hse_num) # read the number of kpoints from EIGENVAL
#print(num_k)
print('number of kpoints:',num_k)
print('number of bands:',num_b)
# extract data in two mode magnetic or no
if mode == 1:
L_k_mesh_list = calcu_k_meth(lines0,lines_1)
L_k_mesh_list=L_k_mesh_list[0]
list_eigen_val = read_hse_eigenval(lines3,num_k,num_b)
for ib in range(num_b):
for ik in range(num_k):
write2txt('bandstructure.dat',str(L_k_mesh_list[ik])+'\t'+str(float(list_eigen_val[ib][ik])-efermi))
write2txt('bandstructure.dat',' ')
hsl=high_symmetry_line(lines0,lines_1)
for i in range(len(hsl)): # print High symmetry line
write2txt('bandstructure.dat',str(hsl[i])+'\t'+str(-30))
write2txt('bandstructure.dat',str(hsl[i])+'\t'+str(30))
write2txt('bandstructure.dat',' ')
write2txt('bandstructure.dat',str(0)+'\t'+str(0))
write2txt('bandstructure.dat',str(hsl[len(hsl)-1])+'\t'+str(hsl[0]))
else:
L_k_mesh_list = calcu_k_meth(lines0,lines_1)
L_k_mesh_list = L_k_mesh_list[0]
L_k_mesh_list_com = []
num = read_hse_KPOINTS(lines1,lines_1)
#print(len(num))
for i in range(len(num)):
#print(i,int(num[i]))
L_k_mesh_list_com.append(L_k_mesh_list[int(num[i])])
list_eigen_val = read_hse_eigenval(lines3,num_k,num_b)
for ib in range(num_b):
for ik in range(len(num)):
write2txt('bandstructure.dat',str(L_k_mesh_list_com[ik])+'\t'+str(float(list_eigen_val[ib][ik])-efermi))
write2txt('bandstructure.dat',' ')
hsl=high_symmetry_line(lines0,lines_1)
for i in range(len(hsl)): # print High symmetry line
write2txt('bandstructure.dat',str(hsl[i])+'\t'+str(-30))
write2txt('bandstructure.dat',str(hsl[i])+'\t'+str(30))
write2txt('bandstructure.dat',' ')
write2txt('bandstructure.dat',str(0)+'\t'+str(0))
write2txt('bandstructure.dat',str(hsl[len(hsl)-1])+'\t'+str(hsl[0]))
# used to calculate band structure
def bandstructure():
conform_file = str(input('To ensure POSCAR, EIGENVAL, KPOINTS, fermi.dat in current floder: Y/N'))
if 'Y' == conform_file :
print('please prepare POSCAR, EIGENVAL, KPOINTS ')
print('To choose the program that you want to use: ')
print('1. normal band')
print('2. HSE band')
choose_mode = str(input())
if '1' ==choose_mode:
band_cal()
else:
band_hse_cal()
def QEbandstructure():
'''please use this function after performing bands.x.
And if your filband='graphene.band' in input of bands.x,
please use this function by python vest.py graphene.band'''
import sys
filename = sys.argv[1]
DFTbanddata=read_data(filename)
print (DFTbanddata[0])
nb = int(DFTbanddata[0].split()[2][:-1])
nk = int(DFTbanddata[0].split()[4])
len_block = round(nb/10)
if len_block*10 < nk:
len_block=len_block+1
#print(len_block)
energy = []
i=1
for point in range(0,nk):
e1=[]
for j in range(0,len_block):
energy_k=DFTbanddata[i+j+1].strip().split()
for w in energy_k:
e1.append(w)
energy.append(e1)
i=i+len_block+1
#print (np.array(energy).shape)
for i in range(nb):
for j in range(nk):
write2txt('band_qe.dat',str(j+1)+'\t'+str(energy[j][i]))
write2txt('band_qe.dat','')
filename = 'band_qe.dat'
DFTbandfile = read_data(filename)
Efermi = 0 #fermienergy()
write2txt('bandrange.dat',' No. Min Max')
nb_list=[]
EMIN =[]
EMAX=[]
for i in range(0,int(nb)):
Emin,Emax=range_of_band_vest(nk,i+1,filename)
nb_list.append(i+1)
EMIN.append(Emin+Efermi)
EMAX.append(Emax+Efermi)
write2txt('bandrange.dat','Nband: %.f %.3f %.3f'%(i+1,Emin+Efermi,Emax+Efermi))
os.system('rm band_qe.dat')
def band_kpoint_PROCAR():
LSO = 1
if 'T' in read_incar('LSORBIT'):
LSO = 2
ONE_kpoint = int(input('input one k-point'))
SOME_bands0 = str(input('input bands'))
SOME_bands=SOME_bands0.split()
procar = read_data('PROCAR')
procar_line2 = procar[1]
kpoints_bands_ions = procar_line2.split()
kpoints = int(kpoints_bands_ions[3])
bands = int(kpoints_bands_ions[7])
ions = int(kpoints_bands_ions[11])
print ('number of kpoints:',kpoints,'number of bands:',bands)
i=0
j=0
# To find the
procar_line=''
for procar_line in procar:
procar_line_detail = procar_line.split()
if 'k-point ' in procar_line and ONE_kpoint == int(procar_line_detail[1]) :
#print (procar_line_detail[1])
j=i
i+=1
kpoint_detail=[]
block = 2+bands*(4+(ions+1)*(LSO**2))-1
#print (j)
for i in range(j-1,j+block-1,1):
kpoint_detail.append(procar[i])
write2txt('procar_bands_kpoint'+str(ONE_kpoint)+'.dat','k-points :'+'\t'+str(ONE_kpoint))
write2txt('procar_bands_kpoint'+str(ONE_kpoint)+'.dat','bands :'+'\t'+str(SOME_bands0))
ORBIT =procar[j+4]
ORBIT = ORBIT[:-1]
write2txt('procar_bands_kpoint'+str(ONE_kpoint)+'.dat',ORBIT)
i=0
k=0
#print (kpoint_detail)
while i < len(SOME_bands):
j=0
for component_line in kpoint_detail:
component=component_line.split()
if 'band ' in component_line and str(SOME_bands[i]) == component[1]:
k=j
j+=1
#print (j)
i+=1
bandsx=kpoint_detail[k+ions+3]
bandsx=bandsx[:-1]
write2txt('procar_bands_kpoint'+str(ONE_kpoint)+'.dat',bandsx)
write2txt('procar_bands_kpoint'+str(ONE_kpoint)+'.dat','') #empty line
i=0
while i < len(SOME_bands):
j=0
for component_line in kpoint_detail:
component=component_line.split()
if 'band ' in component_line and str(SOME_bands[i]) == component[1]:
k=j
for x in range(k-1,k+ions+4,1):
bandsx=kpoint_detail[x]
bandsx=bandsx[:-1]
write2txt('procar_bands_kpoint'+str(ONE_kpoint)+'.dat',bandsx)
j+=1
i+=1
def band_plot1():
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.use('Agg')
from pymatgen.io.vasp.outputs import Vasprun
from pymatgen.electronic_structure.plotter import BSDOSPlotter,\
BSPlotter,BSPlotterProjected,DosPlotter
# read vasprun.xml,get band and dos information
bs_vasprun = Vasprun("vasprun.xml",parse_projected_eigen=True)
bs_data = bs_vasprun.get_band_structure(line_mode=True)
dos_vasprun=Vasprun("vasprun.xml")
dos_data=dos_vasprun.complete_dos
# set figure parameters, draw figure
banddos_fig = BSDOSPlotter(bs_projection='elements', dos_projection='elements',
vb_energy_range=4, fixed_cb_energy=4)
banddos_fig.get_plot(bs=bs_data, dos=dos_data)
plt.savefig('banddos_fig.png')
def band_plot():
import matplotlib.pyplot as plt
ymin= -2
ymax= 2
step = 1
figs = (6,6) #figsize
plt.figure(figsize=figs)
#######bands###########
DFTbandfile = read_data('bandstructure.dat')
#x=np.loadtxt("bandstructure.dat")[:, 0]
#e=np.loadtxt("bandstructure.dat")[:, 1]
x = []
e = []
x_tem=[]
e_tem=[]
for k in range(len(DFTbandfile)):
line = DFTbandfile[k]
if len(line.split())==0:
e.append(e_tem)
x=x_tem
x_tem=[]
e_tem=[]
continue
x_tem.append(float(line.strip().split()[0]))
e_tem.append(float(line.strip().split()[1]))
nb,nk =get_b_k(DFTbandfile)
x=np.array(x)
e=np.array(e)
plt.ylim(ymin,ymax)
plt.xlim(0,max(x))
for i in range(nb):
plt.plot(x,e[i],'k', linewidth=1)
plt.ylabel('Energy(eV)')
#plt.show()
plt.xticks([])
plt.yticks(np.arange(ymin,ymax,step))
#plt.title(sys.argv[1],x=0.5,y=1.02)
#plt.savefig(sys.argv[1]+".png",dpi=150)
plt.savefig("BAND.png", dpi=600)
#plt.savefig("fig.eps",format='eps', transparent=True, dpi=300)
def get_b_k(bandfile):
'''get the number of kpoints and bands from band file'''
nband =[]
nk = 0
for line in bandfile:
#print(len(line))
if (len(line)==2 or len(line)==3) and not nk:
nk = bandfile.index(line)
#print (nk)
nb = (len(bandfile)+1)/(nk+1)
return int(nb),int(nk)
def range_of_band_vaspkit(nk,nb,filename):
'''get energy of single bands'''
#print (nb,nk)
bandfile = read_data(filename)
nbb = (nb-1)*(nk+2)+3
nbe = nb*(nk+2)+1
i=nbb
band=[]
while i < nbe:
energy = float(bandfile[i].split()[1])
band.append(energy)
i+=1
Emin = min(band)
Emax = max(band)
return Emin,Emax
def range_of_band_vest(nk,nb,filename):
'''get energy of single bands'''
bandfile = read_data(filename)
nbb = (nb-1)*(nk+1)
nbe = nb*(nk+1)-2
i=nbb
band=[]
while i <= nbe:
energy = float(bandfile[i].split()[1])
band.append(energy)
i+=1
Emin = min(band)
Emax = max(band)
return Emin,Emax
def range_of_all_bands():
#from pathlib import Path
import os.path
#os.path.isfile(fname)
'''get energy range of all the bands from DFT bands '''
band_path = os.getcwd()
#print(band_path)
vestbandname = 'bandstructure.dat'
qebandname='band_qe.dat'
vaspkitbandname = 'BAND.dat'
if os.path.isfile(vestbandname):
filename = 'bandstructure.dat'
DFTbandfile = read_data(filename)
nb,nk =get_b_k(DFTbandfile)
Efermi = fermienergy()
write2txt('bandrange.dat',' No. Min Max')
nb_list=[]
EMIN =[]
EMAX=[]
for i in range(0,int(nb)):
Emin,Emax=range_of_band_vest(nk,i+1,filename)
nb_list.append(i+1)
EMIN.append(Emin+Efermi)
EMAX.append(Emax+Efermi)
write2txt('bandrange.dat','Nband: %.f %.3f %.3f'%(i+1,Emin+Efermi,Emax+Efermi))
elif os.path.isfile(qebandname):
filename = 'band_qe.dat'
DFTbandfile = read_data(filename)
nb = int(DFTbandfile[0].split()[2][:-1])
nk = int(DFTbandfile[0].split()[4])
#nb,nk =get_b_k(DFTbandfile)
#print(nb,nk)
Efermi = 0 #fermienergy()
write2txt('bandrange.dat',' No. Min Max')
nb_list=[]
EMIN =[]
EMAX=[]
for i in range(0,int(nb)):
Emin,Emax=range_of_band_vest(nk,i+1,filename)
nb_list.append(i+1)
EMIN.append(Emin+Efermi)
EMAX.append(Emax+Efermi)
write2txt('bandrange.dat','Nband: %.f %.3f %.3f'%(i+1,Emin+Efermi,Emax+Efermi))
elif os.path.isfile(vaspkitbandname):
filename = 'BAND.dat'
DFTbandfile = read_data(filename)
nk =int(DFTbandfile[1].split()[4])
nb =int(DFTbandfile[1].split()[5])
Efermi = fermienergy()
write2txt('bandrange.dat',' No. Min Max')
nb_list=[]
EMIN =[]
EMAX=[]
for i in range(0,int(nb)):
Emin,Emax=range_of_band_vaspkit(nk,i+1,filename)
nb_list.append(i+1)
EMIN.append(Emin+Efermi)
EMAX.append(Emax+Efermi)
write2txt('bandrange.dat','Nband: %.f %.3f %.3f'%(i+1,Emin+Efermi,Emax+Efermi))
else:
print('please extract band structure data from EIGENVAL by using option 3')
def wcc_output():
print('output wcc from wannier90.wout file to final file')
num_wann=int(input('num_wann'))
final=read_data('wannier90.wout')
#Efermi = fermienergy('fermi.dat')
#wt=read_data('wt.in')
#pos=read_data('POSCAR')
#wtout='wt.inp'
f=0
nw=0
FS_list=[]
for i in range(len(final)):
line=final[i]
if f != 0:
FS=line.split() # sometimes failing due to no space, format is 'WF centre and spread 1 ( 0.010369, 2.709238, 10.690639 ) 2.58836225',
write2txt('final',FS[6][:-1]+'\t'+FS[6][:-1]+'\t'+FS[8]+'\t'+FS[10])
nw+=1
if nw==num_wann:
f=0
if 'Final State' in line:
f=i
# used to choose the mode you want to calculate
while True:
#DFTbandfile=argv[1]
print('To choose the program that you want to use:')
print('1. project orbit (step1)')
print('2. project orbit (step2)')
print('3. vasp band structure')
print('4. the component of some bands at one k-point')
print('5. wannier band range')
print('6. wt.in wcc prepare')
print('7. QE band range')
print('8. quit')
project = str(input())
if '1' == project :
print('you are performing a project-orbit program now.')
project_orbit()
continue
elif project == '2':
project_orbit2()
elif project == '3':
bandstructure()
elif project == '4':
band_kpoint_PROCAR()
elif project == '5':
range_of_all_bands()
elif project == '6':
wcc_output()
elif project == '7':
QEbandstructure()
else:
break
|
python
|
class MultiSigDeprecationWitness:
def __init__(self, next_state_state_update, signatures, inclusion_witness):
self.next_state_state_update = next_state_state_update
self.signatures = signatures
self.inclusion_witness = inclusion_witness
class MultiSigPredicate:
dispute_duration = 10
def __init__(self, parent_plasma_contract):
self.parent = parent_plasma_contract
def can_initiate_exit(self, state_update, initiation_witness):
# For now, anyone can submit an exit TODO: make this one or multiple of owners
return True
def verify_deprecation(self, state_id, state_update, revocation_witness):
# Check the state_id is in the state_update
assert state_update.start <= state_id and state_update.end > state_id
# Check the state_id is in the revocation_witness state_update
assert revocation_witness.next_state_state_update.start <= state_id and revocation_witness.next_state_state_update.end > state_id
# Check inclusion proof
assert self.parent.state_update_chain.verify_inclusion(revocation_witness.next_state_state_update,
self.parent.address,
revocation_witness.inclusion_witness)
# Check that all owners signed off on the change
assert state_update.state.recipient == revocation_witness.signatures
# Check that the spend is after the exit state
assert state_update.plasma_block_number < revocation_witness.next_state_state_update.plasma_block_number
return True
def finalize_exit(self, exit):
# Extract required information from call data
recipients_sigs, destination = call_data
# Check that the resolution is signed off on by all parties in the multisig
assert recipients_sigs == exit.state_update.state.recipient
# Transfer funds to the recipient
self.parent.erc20_contract.transferFrom(self, destination, exit.state_update.end - exit.state_update.start)
def get_additional_lockup(self, state):
return 0
|
python
|
import json
import os
import logging
import random
from collections import OrderedDict, defaultdict
import numpy as np
import torch
from coref_bucket_batch_sampler import BucketBatchSampler
from metrics import CorefEvaluator, MentionEvaluator
from utils.utils import extract_clusters, extract_mentions_to_predicted_clusters_from_clusters, extract_clusters_for_decode
from conll import evaluate_conll
def nested_to_tuple(l):
if isinstance(l, list):
for i in range(len(l)):
l[i] = nested_to_tuple(l[i])
l = tuple(l)
return l
class Evaluator:
def __init__(self, logger, eval_output_dir, experiment_name=''):
self.eval_output_dir = eval_output_dir
self.experiment_name = experiment_name
self.logger = logger
def evaluate(self, outputs, prefix="", tb_writer=None, global_step=None, official=False):
assert not official
post_pruning_mention_evaluator = MentionEvaluator()
mention_evaluator = MentionEvaluator()
coref_evaluator = CorefEvaluator()
losses = defaultdict(list)
doc_to_prediction = {}
doc_to_subtoken_map = {}
for output in outputs:
# gold_clusters: List[List[List[int]]]
# predicted_clusters: List[List[List[int]]]
gold_clusters = nested_to_tuple(output['gold_clusters'])
predicted_clusters = nested_to_tuple(output['predicted_clusters'])
doc_key = output['doc_key']
mention_to_gold_clusters = extract_mentions_to_predicted_clusters_from_clusters(gold_clusters)
gold_mentions = list(mention_to_gold_clusters.keys())
# starts, end_offsets, coref_logits, mention_logits = output[-4:]
# max_antecedents = np.argmax(coref_logits, axis=1).tolist()
# mention_to_antecedent = {((int(start), int(end)), (int(starts[max_antecedent]), int(end_offsets[max_antecedent]))) for start, end, max_antecedent in
# zip(starts, end_offsets, max_antecedents) if max_antecedent < len(starts)}
# predicted_clusters, _ = extract_clusters_for_decode(mention_to_antecedent)
# candidate_mentions = list(zip(starts, end_offsets))
mention_to_predicted_clusters = extract_mentions_to_predicted_clusters_from_clusters(predicted_clusters)
predicted_mentions = list(mention_to_predicted_clusters.keys())
# post_pruning_mention_evaluator.update(candidate_mentions, gold_mentions)
mention_evaluator.update(predicted_mentions, gold_mentions)
coref_evaluator.update(predicted_clusters, gold_clusters, mention_to_predicted_clusters,
mention_to_gold_clusters)
doc_to_prediction[doc_key] = predicted_clusters
doc_to_subtoken_map[doc_key] = None
post_pruning_mention_precision, post_pruning_mentions_recall, post_pruning_mention_f1 = post_pruning_mention_evaluator.get_prf()
mention_precision, mentions_recall, mention_f1 = mention_evaluator.get_prf()
prec, rec, f1 = coref_evaluator.get_prf()
# muc, b_cubed, ceafe
results = []
for t, (_prec, _rec, _f1) in zip(('muc', 'b_cubed', 'ceafe') , coref_evaluator.get_prf_sep()):
results.append((f'{t}_prec', _prec))
results.append((f'{t}_rec', _rec))
results.append((f'{t}_f1', _f1))
results += [(key, sum(val) / len(val)) for key, val in losses.items()]
results += [
("post pruning mention precision", post_pruning_mention_precision),
("post pruning mention recall", post_pruning_mentions_recall),
("post pruning mention f1", post_pruning_mention_f1),
("mention precision", mention_precision),
("mention recall", mentions_recall),
("mention f1", mention_f1),
("precision", prec),
("recall", rec),
("f1", f1)
]
self.logger.info("***** Eval results {} *****".format(prefix))
for key, values in results:
if isinstance(values, float):
self.logger.info(f" {key} = {values:.3f}")
else:
self.logger.info(f" {key} = {values}")
if tb_writer is not None and global_step is not None:
tb_writer.add_scalar(key, values, global_step)
if self.eval_output_dir:
output_eval_file = os.path.join(self.eval_output_dir, "eval_results.txt")
with open(output_eval_file, "a") as writer:
if prefix:
writer.write(f'\n{prefix}:\n')
for key, values in results:
if isinstance(values, float):
writer.write(f"{key} = {values:.3f}\n")
else:
writer.write(f"{key} = {values}\n")
results = OrderedDict(results)
results["experiment_name"] = self.experiment_name
results["data"] = prefix
with open(os.path.join(self.eval_output_dir, "results.jsonl"), "a+") as f:
f.write(json.dumps(results) + '\n')
if official:
with open(os.path.join(self.args.output_dir, "preds.jsonl"), "w") as f:
f.write(json.dumps(doc_to_prediction) + '\n')
f.write(json.dumps(doc_to_subtoken_map) + '\n')
if self.args.conll_path_for_eval is not None:
conll_results = evaluate_conll(self.args.conll_path_for_eval, doc_to_prediction, doc_to_subtoken_map)
official_f1 = sum(results["f"] for results in conll_results.values()) / len(conll_results)
self.logger.info('Official avg F1: %.4f' % official_f1)
return results
|
python
|
#!/usr/bin/python3
"""
kimcsv2fasttext.py: convert kim's balanced data format to fasttext format
usage: ./kimcsv2fasttext.py < BalancedDataSet.csv
20180504 erikt(at)xs4all.nl
"""
import csv
import html
import nltk
import re
import sys
import time
from io import BytesIO
from urllib.request import urlopen
COMMAND = sys.argv.pop(0)
HEADINGDATE = "Date"
HEADINGGENRE = "Genre"
HEADINGIDENTIFIER = "Artikel-ID"
HEADINGNEWSPAPER = "Newspaper"
HEADINGSUBJECT = "Prediction"
LABELLENGTH = 3
LABELPREFIX = "__label__"
SEPARATOR = ","
URLPREFIX = r"^https?://"
URLSTART = "http://resolver.kb.nl/resolve?urn="
URLSUFFIX = ":ocr"
def isUrl(url):
return(re.search(URLPREFIX,url))
def readFile():
articles = []
lineNbr = 0
csvReader = csv.DictReader(sys.stdin,delimiter=SEPARATOR)
for row in csvReader:
lineNbr += 1
try:
date = row[HEADINGDATE]
genre = row[HEADINGGENRE]
identifiers = []
for cellValue in row.values():
if not cellValue is None and isUrl(cellValue):
identifiers.append(cellValue)
articles.append({"date":date,"genre":genre,"identifiers":identifiers})
except: sys.exit(COMMAND+": missing data on line "+str(lineNbr))
return(articles)
def abbreviateName(name):
return(name[0:LABELLENGTH].upper())
def readWebPage(url):
time.sleep(1)
return(str(urlopen(url,data=None).read(),encoding="utf-8"))
def removeXML(text):
text = re.sub(r"<[^<>]*>",r" ",text)
text = html.unescape(text)
return(text)
def removeRedundantWhiteSpace(text):
text = re.sub(r"\s+",r" ",text)
text = re.sub(r"^\s+",r"",text)
text = re.sub(r"\s+$",r"",text)
return(text)
def tokenize(text):
tokenizedSentenceList = nltk.word_tokenize(text)
tokenizedText = " ".join(tokenizedSentenceList)
return(tokenizedText)
def makeUrl(articleId):
return(URLSTART+articleId+URLSUFFIX)
def getArticleIdFromUrl(url):
fields = url.split("=")
return(":".join(fields[-1:]))
def printData(articles):
for i in range(0,len(articles)):
date = articles[i]["date"]
genre = abbreviateName(articles[i]["genre"])
text = ""
for url in articles[i]["identifiers"]:
url = makeUrl(getArticleIdFromUrl(url))
if len(text) > 0: text += " "
text += removeRedundantWhiteSpace(tokenize(removeXML(readWebPage(url))))
print(LABELPREFIX+genre+" DATE="+date+" "+text)
def main(argv):
articles = readFile()
printData(articles)
sys.exit(0)
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
python
|
# This file is part of the PySide project.
#
# Copyright (C) 2009-2011 Nokia Corporation and/or its subsidiary(-ies).
# Copyright (C) 2009 Riverbank Computing Limited.
# Copyright (C) 2009 Torsten Marek
#
# Contact: PySide team <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
# If pluginType is MODULE, the plugin loader will call moduleInformation. The
# variable MODULE is inserted into the local namespace by the plugin loader.
pluginType = MODULE
# moduleInformation() must return a tuple (module, widget_list). If "module"
# is "A" and any widget from this module is used, the code generator will write
# "import A". If "module" is "A[.B].C", the code generator will write
# "from A[.B] import C". Each entry in "widget_list" must be unique.
def moduleInformation():
return "PySide.QtWebKit", ("QWebView",)
|
python
|
# MIT License
#
# Copyright (c) 2019 Red Hat, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from http import HTTPStatus
from itertools import islice
from json import dumps
from logging import getLogger
from flask import make_response
try:
from flask_restx import Namespace, Resource
except ModuleNotFoundError:
from flask_restplus import Namespace, Resource
from packit_service.models import TaskResultModel
from packit_service.service.api.parsers import pagination_arguments, indices
from packit_service.service.events import Event
logger = getLogger("packit_service")
ns = Namespace("tasks", description="Celery tasks / jobs")
@ns.route("")
class TasksList(Resource):
@ns.expect(pagination_arguments)
@ns.response(HTTPStatus.PARTIAL_CONTENT, "Celery tasks list follows")
def get(self):
""" List all Celery tasks / jobs """
first, last = indices()
tasks = []
for task in islice(TaskResultModel.get_all(), first, last):
data = task.to_dict()
data["event"] = Event.ts2str(data["event"])
tasks.append(data)
resp = make_response(dumps(tasks), HTTPStatus.PARTIAL_CONTENT)
resp.headers["Content-Range"] = f"tasks {first+1}-{last}/{len(tasks)}"
resp.headers["Content-Type"] = "application/json"
return resp
@ns.route("/<string:id>")
@ns.param("id", "Celery task identifier")
class TaskItem(Resource):
@ns.response(HTTPStatus.OK, "OK, Celery task details follow")
@ns.response(HTTPStatus.NO_CONTENT, "Celery task identifier not in db")
def get(self, id: str):
"""A specific Celery task details"""
task = TaskResultModel.get_by_id(id)
if not task:
return "", HTTPStatus.NO_CONTENT
data = task.to_dict()
data["event"] = Event.ts2str(data["event"])
return data
|
python
|
from enum import Enum, unique
from Tables import door_pair_offset_table
def create_rooms(world, player):
world.rooms += [
Room(player, 0x01, 0x51168).door(Position.WestN2, DoorKind.Warp).door(Position.EastN2, DoorKind.Warp),
Room(player, 0x02, 0x50b97).door(Position.South2, DoorKind.TrapTriggerableLow).door(Position.InteriorV2, DoorKind.NormalLow2).door(Position.South2, DoorKind.ToggleFlag),
# Room(player, 0x03, 0x509cf).door(Position.SouthW, DoorKind.CaveEntrance),
Room(player, 0x04, 0xfe25c).door(Position.NorthW, DoorKind.StairKey2).door(Position.InteriorW, DoorKind.Dashable).door(Position.InteriorS, DoorKind.Dashable).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.SouthW, DoorKind.Normal),
Room(player, 0x06, 0xfa192).door(Position.SouthW, DoorKind.Trap),
Room(player, 0x07, None),
# Room(player, 0x08, 0x5064f).door(Position.InteriorS2, DoorKind.CaveEntranceLow08).door(Position.SouthE, DoorKind.CaveEntrance).door(Position.SouthW2, DoorKind.NormalLow2).door(Position.SouthW2, DoorKind.ToggleFlag),
Room(player, 0x09, None),
Room(player, 0x0a, 0xfa734).door(Position.North, DoorKind.StairKey),
Room(player, 0x0b, 0xfabf0).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.InteriorS, DoorKind.Trap2).door(Position.InteriorN, DoorKind.SmallKey),
Room(player, 0x0c, 0xfef53).door(Position.South, DoorKind.DungeonEntrance),
Room(player, 0x0d, 0xf918b).door(Position.SouthW, DoorKind.Trap),
Room(player, 0x0e, 0xfc279).door(Position.InteriorW, DoorKind.StairKey2).door(Position.InteriorS, DoorKind.Trap).door(Position.SouthE, DoorKind.DungeonEntrance),
# Room(player, 0x10, 0x50596).door(Position.SouthW, DoorKind.DungeonEntrance).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x11, 0x50c52).door(Position.InteriorN, DoorKind.Dashable).door(Position.InteriorS, DoorKind.Dashable).door(Position.SouthE, DoorKind.SmallKey),
Room(player, 0x12, 0x50a9b).door(Position.North2, DoorKind.NormalLow).door(Position.North2, DoorKind.ToggleFlag).door(Position.South2, DoorKind.NormalLow).door(Position.South2, DoorKind.IncognitoEntrance),
Room(player, 0x13, 0xfe29d).door(Position.EastS, DoorKind.SmallKey).door(Position.EastN, DoorKind.Normal),
Room(player, 0x14, 0xfe464).door(Position.SouthE, DoorKind.SmallKey).door(Position.WestS, DoorKind.SmallKey).door(Position.NorthW, DoorKind.Normal).door(Position.WestN, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal).door(Position.EastN, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x15, 0xfe63e).door(Position.WestS, DoorKind.Trap).door(Position.WestN, DoorKind.Normal),
Room(player, 0x16, 0xfa150).door(Position.InteriorV, DoorKind.Bombable).door(Position.InteriorW, DoorKind.SmallKey).door(Position.InteriorE, DoorKind.Normal).door(Position.NorthW, DoorKind.Normal),
Room(player, 0x17, None),
# Room(player, 0x18, 0x506e5).door(Position.NorthW2, DoorKind.NormalLow).door(Position.NorthW2, DoorKind.ToggleFlag),
Room(player, 0x19, 0xfacc6).door(Position.East, DoorKind.Bombable).door(Position.EastN, DoorKind.SmallKey),
Room(player, 0x1a, 0xfa670).door(Position.InteriorE, DoorKind.SmallKey).door(Position.WestN, DoorKind.SmallKey).door(Position.West, DoorKind.Bombable).door(Position.SouthW, DoorKind.SmallKey).door(Position.InteriorN, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal),
Room(player, 0x1b, 0xfab31).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.SouthW, DoorKind.Normal),
Room(player, 0x1c, 0xff784).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.InteriorS, DoorKind.Trap).door(Position.InteriorW, DoorKind.Dashable),
Room(player, 0x1d, 0xfff19).door(Position.NorthW, DoorKind.BigKey),
Room(player, 0x1e, 0xfc35e).door(Position.EastS, DoorKind.Trap).door(Position.InteriorS, DoorKind.Trap).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.SouthE, DoorKind.Normal),
Room(player, 0x1f, 0xfc3af).door(Position.WestS, DoorKind.Trap).door(Position.InteriorS, DoorKind.Trap2),
Room(player, 0x20, 0xf918b).door(Position.SouthW, DoorKind.Trap),
Room(player, 0x21, 0x50d2e).door(Position.NorthE, DoorKind.SmallKey).door(Position.InteriorV, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x22, 0x50dd1).door(Position.South, DoorKind.SmallKey).door(Position.WestS, DoorKind.Normal),
Room(player, 0x23, 0xfed30).door(Position.SouthE, DoorKind.BombableEntrance).door(Position.EastS, DoorKind.Normal),
Room(player, 0x24, 0xfe6ee).door(Position.NorthE, DoorKind.BigKey).door(Position.InteriorN, DoorKind.Trap2).door(Position.InteriorW, DoorKind.Trap2).door(Position.InteriorE, DoorKind.Trap2).door(Position.SouthE, DoorKind.DungeonEntrance).door(Position.NorthW, DoorKind.Normal).door(Position.WestS, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x26, 0xf9cbb).door(Position.South, DoorKind.SmallKey).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.InteriorN, DoorKind.Normal),
Room(player, 0x27, None),
Room(player, 0x28, 0xf92a8).door(Position.NorthW, DoorKind.StairKey2).door(Position.South, DoorKind.DungeonEntrance),
Room(player, 0x2a, 0xfa594).door(Position.NorthE, DoorKind.Trap).door(Position.NorthW, DoorKind.SmallKey).door(Position.EastS, DoorKind.Bombable).door(Position.East2, DoorKind.NormalLow).door(Position.SouthW, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal),
Room(player, 0x2b, 0xfaaa7).door(Position.InteriorS, DoorKind.Bombable).door(Position.WestS, DoorKind.Bombable).door(Position.NorthW, DoorKind.Trap).door(Position.West2, DoorKind.NormalLow),
# Room(player, 0x2c, 0x508cf).door(Position.InteriorW, DoorKind.Bombable).door(Position.InteriorE, DoorKind.Bombable).door(Position.InteriorS, DoorKind.Bombable).door(Position.SouthE, DoorKind.Bombable).door(Position.SouthW, DoorKind.CaveEntrance),
Room(player, 0x2e, 0xfc3d8).door(Position.NorthE, DoorKind.Normal),
# Room(player, 0x2f, 0x507d1).door(Position.InteriorW, DoorKind.Bombable).door(Position.SouthE, DoorKind.CaveEntrance),
Room(player, 0x30, 0xf8de3).door(Position.NorthW, DoorKind.Hidden).door(Position.InteriorW, DoorKind.Trap2),
Room(player, 0x31, 0xfcf4f).door(Position.InteriorW, DoorKind.BigKey).door(Position.InteriorS, DoorKind.TrapTriggerable),
Room(player, 0x32, 0x50e4b).door(Position.North, DoorKind.SmallKey),
Room(player, 0x33, 0xf8792).door(Position.SouthW, DoorKind.Trap),
Room(player, 0x34, 0xf993c).door(Position.EastN, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x35, 0xf97f1).door(Position.EastN, DoorKind.SmallKey).door(Position.WestN, DoorKind.Normal).door(Position.WestS, DoorKind.Normal).door(Position.InteriorE, DoorKind.Normal)
.door(Position.EastS, DoorKind.Normal).door(Position.InteriorV2, DoorKind.NormalLow),
Room(player, 0x36, 0xf9685).door(Position.EastN, DoorKind.Bombable).door(Position.North, DoorKind.SmallKey).door(Position.WestN, DoorKind.SmallKey).door(Position.WestS, DoorKind.Normal)
.door(Position.EastS, DoorKind.Normal).door(Position.South2, DoorKind.NormalLow),
Room(player, 0x37, 0xf9492).door(Position.WestN, DoorKind.Bombable).door(Position.EastN, DoorKind.Bombable).door(Position.InteriorW, DoorKind.SmallKey).door(Position.EastS, DoorKind.SmallKey).door(Position.WestS, DoorKind.Normal).door(Position.InteriorV2, DoorKind.NormalLow),
Room(player, 0x38, 0xf935b).door(Position.WestN, DoorKind.Bombable).door(Position.WestS, DoorKind.SmallKey),
Room(player, 0x39, 0xfc180).door(Position.SouthW, DoorKind.Trap).door(Position.InteriorS, DoorKind.SmallKey),
Room(player, 0x3a, 0xfa3f5).door(Position.South, DoorKind.SmallKey).door(Position.NorthW, DoorKind.Normal).door(Position.NorthE, DoorKind.Normal),
Room(player, 0x3b, 0xfa9de).door(Position.SouthW, DoorKind.Normal),
# Room(player, 0x3c, 0x509a3).door(Position.NorthE, DoorKind.Bombable).door(Position.SouthE, DoorKind.CaveEntrance),
Room(player, 0x3d, 0xffd37).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.InteriorN, DoorKind.SmallKey).door(Position.SouthW, DoorKind.SmallKey).door(Position.InteriorW, DoorKind.Bombable),
Room(player, 0x3e, 0xfc486).door(Position.InteriorE, DoorKind.Trap).door(Position.SouthW, DoorKind.SmallKey),
Room(player, 0x3f, 0xfc51b).door(Position.InteriorS, DoorKind.Trap),
Room(player, 0x40, 0xf8eea).door(Position.InteriorS2, DoorKind.NormalLow2),
Room(player, 0x41, 0x50f15).door(Position.South, DoorKind.Trap),
Room(player, 0x42, None),
Room(player, 0x43, 0xf87f8).door(Position.NorthW, DoorKind.BigKey).door(Position.InteriorE, DoorKind.SmallKey).door(Position.SouthE, DoorKind.SmallKey),
Room(player, 0x44, 0xfdbcd).door(Position.InteriorN, DoorKind.Trap2).door(Position.InteriorS, DoorKind.SmallKey).door(Position.EastN, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x45, 0xfdcae).door(Position.WestN, DoorKind.Trap).door(Position.InteriorW, DoorKind.Normal).door(Position.WestS, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x46, 0xf9bbb).door(Position.North2, DoorKind.NormalLow).door(Position.InteriorW2, DoorKind.NormalLow).door(Position.InteriorE2, DoorKind.NormalLow),
Room(player, 0x49, 0xfc12c).door(Position.NorthW, DoorKind.Hidden).door(Position.InteriorN, DoorKind.TrapTriggerable).door(Position.SouthW, DoorKind.SmallKey).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x4a, 0xfa267).door(Position.InteriorW, DoorKind.Trap).door(Position.InteriorE, DoorKind.Trap).door(Position.North, DoorKind.SmallKey).door(Position.InteriorV, DoorKind.Normal).door(Position.South, DoorKind.DungeonEntrance),
Room(player, 0x4b, 0xfa8c9).door(Position.NorthW, DoorKind.Trap).door(Position.InteriorW, DoorKind.Dashable).door(Position.InteriorE, DoorKind.Dashable),
Room(player, 0x4c, 0xffece).door(Position.EastS, DoorKind.Trap),
Room(player, 0x4d, 0xffe5a).door(Position.NorthW, DoorKind.SmallKey).door(Position.WestS, DoorKind.Normal),
Room(player, 0x4e, 0xfc5ba).door(Position.InteriorN, DoorKind.Trap).door(Position.NorthW, DoorKind.SmallKey),
Room(player, 0x4f, 0xfca89).door(Position.WestS, DoorKind.SmallKey),
Room(player, 0x50, 0x510dc).door(Position.EastN2, DoorKind.Warp).door(Position.SouthE2, DoorKind.NormalLow2),
Room(player, 0x51, 0x51029).door(Position.North, DoorKind.Normal).door(Position.North, DoorKind.DungeonChanger),
Room(player, 0x52, 0x51230).door(Position.WestN2, DoorKind.Warp).door(Position.SouthW2, DoorKind.NormalLow2).door(Position.South, DoorKind.Normal),
Room(player, 0x53, 0xf88ad).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.InteriorS, DoorKind.Trap2).door(Position.NorthE, DoorKind.SmallKey),
Room(player, 0x54, None),
# Room(player, 0x55, 0x50166).door(Position.InteriorW2, DoorKind.NormalLow).door(Position.SouthW, DoorKind.Normal).door(Position.SouthW, DoorKind.IncognitoEntrance),
Room(player, 0x56, 0xfbb4e).door(Position.InteriorW, DoorKind.SmallKey).door(Position.SouthW, DoorKind.DungeonEntrance).door(Position.InteriorS, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x57, 0xfbbd2).door(Position.InteriorN, DoorKind.Bombable).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.EastS, DoorKind.SmallKey).door(Position.SouthW, DoorKind.DungeonEntrance).door(Position.WestS, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal),
Room(player, 0x58, 0xfbcf6).door(Position.NorthW, DoorKind.BlastWall).door(Position.WestS, DoorKind.SmallKey).door(Position.SouthE, DoorKind.SmallKey).door(Position.InteriorN, DoorKind.Bombable).door(Position.SouthW, DoorKind.DungeonEntrance).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x59, 0xfbff7).door(Position.NorthW, DoorKind.SmallKey).door(Position.SouthW, DoorKind.DungeonEntrance).door(Position.InteriorN2, DoorKind.NormalLow2).door(Position.InteriorS2, DoorKind.NormalLow2),
Room(player, 0x5a, 0xfa7e5).door(Position.SouthE, DoorKind.Trap),
Room(player, 0x5b, 0xff8cc).door(Position.SouthE, DoorKind.SmallKey).door(Position.EastN, DoorKind.Trap),
Room(player, 0x5c, 0xff976).door(Position.InteriorE, DoorKind.Bombable).door(Position.WestN, DoorKind.Normal),
Room(player, 0x5d, 0xff9e1).door(Position.InteriorW, DoorKind.Trap).door(Position.SouthW, DoorKind.Trap).door(Position.InteriorN, DoorKind.Trap),
Room(player, 0x5e, 0xfc6b8).door(Position.EastS, DoorKind.SmallKey).door(Position.InteriorE, DoorKind.Trap2).door(Position.SouthE, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x5f, 0xfc6fa).door(Position.WestS, DoorKind.SmallKey),
Room(player, 0x60, 0x51309).door(Position.NorthE2, DoorKind.NormalLow2).door(Position.East2, DoorKind.NormalLow2).door(Position.East2, DoorKind.ToggleFlag).door(Position.EastN, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal).door(Position.SouthE, DoorKind.IncognitoEntrance),
Room(player, 0x61, 0x51454).door(Position.West2, DoorKind.NormalLow).door(Position.West2, DoorKind.ToggleFlag).door(Position.East2, DoorKind.NormalLow).door(Position.East2, DoorKind.ToggleFlag).door(Position.South2, DoorKind.NormalLow).door(Position.South2, DoorKind.IncognitoEntrance).door(Position.WestN, DoorKind.Normal),
Room(player, 0x62, 0x51577).door(Position.West2, DoorKind.NormalLow2).door(Position.West2, DoorKind.ToggleFlag).door(Position.NorthW2, DoorKind.NormalLow2).door(Position.North, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal).door(Position.SouthW, DoorKind.IncognitoEntrance),
Room(player, 0x63, 0xf88ed).door(Position.NorthE, DoorKind.StairKey).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.SouthW, DoorKind.DungeonEntrance), # looked like a huge typo - I had to guess on StairKey
Room(player, 0x64, 0xfda53).door(Position.InteriorS, DoorKind.Trap2),
Room(player, 0x65, 0xfdac5).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x66, 0xfa01b).door(Position.InteriorE2, DoorKind.Waterfall).door(Position.SouthW2, DoorKind.NormalLow2).door(Position.SouthW2, DoorKind.ToggleFlag).door(Position.InteriorW2, DoorKind.NormalLow2),
Room(player, 0x67, 0xfbe17).door(Position.NorthE, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x68, 0xfbf02).door(Position.WestS, DoorKind.Trap).door(Position.NorthE, DoorKind.SmallKey),
Room(player, 0x6a, 0xfa7c7).door(Position.NorthE, DoorKind.BigKey),
Room(player, 0x6b, 0xff821).door(Position.NorthE, DoorKind.BigKey).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.InteriorS, DoorKind.Trap).door(Position.InteriorW, DoorKind.Trap),
Room(player, 0x6c, 0xffaa0).door(Position.InteriorS, DoorKind.Trap2).door(Position.InteriorW, DoorKind.Trap).door(Position.EastS, DoorKind.Normal),
Room(player, 0x6d, 0xffa4e).door(Position.NorthW, DoorKind.Trap).door(Position.InteriorW, DoorKind.Trap).door(Position.WestS, DoorKind.Trap),
Room(player, 0x6e, 0xfc74b).door(Position.NorthE, DoorKind.Trap),
Room(player, 0x70, None),
Room(player, 0x71, 0x52341).door(Position.InteriorW, DoorKind.SmallKey).door(Position.SouthW2, DoorKind.TrapTriggerableLow).door(Position.InteriorS2, DoorKind.TrapTriggerableLow),
Room(player, 0x72, 0x51fda).door(Position.InteriorV, DoorKind.SmallKey),
Room(player, 0x73, 0xf8972).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.InteriorS, DoorKind.Trap2).door(Position.InteriorE, DoorKind.Normal),
Room(player, 0x74, 0xf8a66).door(Position.InteriorE, DoorKind.Normal).door(Position.InteriorW, DoorKind.Normal),
Room(player, 0x75, 0xf8ab9).door(Position.InteriorW, DoorKind.Trap2).door(Position.SouthE, DoorKind.Normal),
Room(player, 0x76, 0xf9e35).door(Position.InteriorN2, DoorKind.NormalLow).door(Position.InteriorS2, DoorKind.NormalLow).door(Position.NorthW2, DoorKind.NormalLow).door(Position.NorthW2, DoorKind.ToggleFlag),
Room(player, 0x77, 0xfd0e6).door(Position.NorthW2, DoorKind.StairKeyLow).door(Position.South2, DoorKind.DungeonEntranceLow),
Room(player, 0x7b, 0xff02b).door(Position.SouthW, DoorKind.Trap).door(Position.EastN, DoorKind.SmallKey).door(Position.EastS, DoorKind.Normal),
Room(player, 0x7c, 0xff0ef).door(Position.NorthE, DoorKind.BlastWall).door(Position.EastS, DoorKind.Bombable).door(Position.WestN, DoorKind.SmallKey).door(Position.WestS, DoorKind.Normal),
Room(player, 0x7d, 0xff20c).door(Position.SouthE, DoorKind.Trap).door(Position.WestS, DoorKind.Bombable).door(Position.InteriorW, DoorKind.SmallKey),
Room(player, 0x7e, 0xfc7c6).door(Position.SouthE, DoorKind.SmallKey).door(Position.InteriorS, DoorKind.TrapTriggerable).door(Position.EastN, DoorKind.Normal),
Room(player, 0x7f, 0xfc827).door(Position.WestN, DoorKind.Trap).door(Position.InteriorW, DoorKind.Normal),
Room(player, 0x80, None),
Room(player, 0x81, 0x5224b).door(Position.NorthW2, DoorKind.NormalLow2),
Room(player, 0x82, None),
Room(player, 0x83, 0xf8bba).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.SouthW, DoorKind.DungeonEntrance).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x84, 0xf8cb7).door(Position.South, DoorKind.DungeonEntrance),
Room(player, 0x85, 0xf8d7d).door(Position.NorthE, DoorKind.Trap).door(Position.InteriorN, DoorKind.SmallKey).door(Position.SouthE, DoorKind.DungeonEntrance).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x87, 0xfd1b7).door(Position.InteriorN, DoorKind.Trap2).door(Position.InteriorE, DoorKind.Normal),
Room(player, 0x89, None),
Room(player, 0x8b, 0xff33f).door(Position.InteriorN, DoorKind.TrapTriggerable).door(Position.InteriorS, DoorKind.SmallKey).door(Position.EastN, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal).door(Position.NorthW, DoorKind.Normal),
Room(player, 0x8c, 0xff3ef).door(Position.EastN, DoorKind.Trap).door(Position.InteriorW, DoorKind.Trap2).door(Position.InteriorN, DoorKind.SmallKey).door(Position.WestN, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal),
Room(player, 0x8d, 0xff4e0).door(Position.SouthE, DoorKind.Trap).door(Position.InteriorN, DoorKind.SmallKey).door(Position.WestN, DoorKind.Normal).door(Position.NorthE, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0x8e, 0xfc84d).door(Position.NorthE, DoorKind.SmallKey),
Room(player, 0x90, 0xfbab2).door(Position.SouthW, DoorKind.Trap),
Room(player, 0x91, 0xfb9e6).door(Position.EastS, DoorKind.Normal),
Room(player, 0x92, 0xfb97b).door(Position.InteriorN, DoorKind.Bombable).door(Position.InteriorW, DoorKind.Bombable).door(Position.WestS, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x93, 0xfb8e1).door(Position.InteriorW, DoorKind.Trap2).door(Position.InteriorE, DoorKind.SmallKey).door(Position.WestS, DoorKind.Normal),
Room(player, 0x95, 0xffc04).door(Position.SouthE, DoorKind.Normal).door(Position.EastN, DoorKind.Normal),
Room(player, 0x96, 0xffc78).door(Position.InteriorS, DoorKind.Trap2).door(Position.WestN, DoorKind.Normal),
Room(player, 0x97, 0xfb30a).door(Position.InteriorS, DoorKind.Normal).door(Position.InteriorW, DoorKind.Normal),
Room(player, 0x98, 0xfaf5b).door(Position.SouthW, DoorKind.DungeonEntrance),
Room(player, 0x99, 0x5172a).door(Position.InteriorW, DoorKind.StairKey).door(Position.South, DoorKind.SmallKey).door(Position.InteriorE, DoorKind.Normal),
Room(player, 0x9b, 0xff5a2).door(Position.InteriorN, DoorKind.SmallKey).door(Position.NorthW, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x9c, 0xff6c9).door(Position.EastS, DoorKind.Trap).door(Position.NorthW, DoorKind.Normal).door(Position.NorthE, DoorKind.Normal).door(Position.WestS, DoorKind.Normal),
Room(player, 0x9d, 0xff741).door(Position.NorthE, DoorKind.Normal).door(Position.WestS, DoorKind.Normal).door(Position.InteriorN, DoorKind.Normal),
Room(player, 0x9e, 0xfc8c8).door(Position.NorthE, DoorKind.StairKey2).door(Position.InteriorE, DoorKind.BigKey).door(Position.InteriorS, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0x9f, 0xfc937).door(Position.WestS, DoorKind.Trap).door(Position.SouthW, DoorKind.Trap),
Room(player, 0xa0, 0xfba9a).door(Position.NorthW, DoorKind.BigKey),
Room(player, 0xa1, 0xfb83d).door(Position.SouthE, DoorKind.SmallKey).door(Position.East, DoorKind.Normal),
Room(player, 0xa2, 0xfb759).door(Position.South, DoorKind.SmallKey).door(Position.West, DoorKind.Normal).door(Position.East, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal),
Room(player, 0xa3, 0xfb667).door(Position.West, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal),
Room(player, 0xa4, 0xfe741).door(Position.SouthW, DoorKind.Trap),
Room(player, 0xa5, 0xffb7f).door(Position.NorthE, DoorKind.Trap).door(Position.InteriorE, DoorKind.Trap2).door(Position.InteriorW, DoorKind.Trap2),
Room(player, 0xa6, None),
Room(player, 0xa8, 0x51887).door(Position.InteriorS, DoorKind.Trap2).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.SouthE, DoorKind.SmallKey).door(Position.InteriorN2, DoorKind.NormalLow2).door(Position.EastN2, DoorKind.NormalLow2).door(Position.East, DoorKind.Normal),
Room(player, 0xa9, 0x519c9).door(Position.West, DoorKind.Trap).door(Position.East, DoorKind.Trap).door(Position.North, DoorKind.BigKey).door(Position.WestN2, DoorKind.NormalLow2).door(Position.EastN2, DoorKind.NormalLow2).door(Position.South, DoorKind.Normal),
Room(player, 0xaa, 0x51b29).door(Position.InteriorE, DoorKind.Trap2).door(Position.WestN2, DoorKind.NormalLow2).door(Position.InteriorN, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal).door(Position.West, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal),
Room(player, 0xab, 0xfd9a9).door(Position.InteriorW, DoorKind.StairKey).door(Position.SouthW, DoorKind.Normal),
Room(player, 0xac, 0xfd9d8).door(Position.SouthE, DoorKind.Trap),
Room(player, 0xae, 0xfc975).door(Position.EastN, DoorKind.Normal),
Room(player, 0xaf, 0xfc9e1).door(Position.NorthW, DoorKind.Normal).door(Position.WestN, DoorKind.Normal),
Room(player, 0xb0, 0xf8f6b).door(Position.InteriorW, DoorKind.Trap).door(Position.InteriorN, DoorKind.Trap).door(Position.InteriorS, DoorKind.SmallKey),
Room(player, 0xb1, 0xfb3b7).door(Position.InteriorW, DoorKind.BigKey).door(Position.NorthE, DoorKind.SmallKey).door(Position.SouthE, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal),
Room(player, 0xb2, 0xfb4ad).door(Position.North, DoorKind.BigKey).door(Position.InteriorS, DoorKind.Trap2).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.EastN2, DoorKind.NormalLow2).door(Position.NorthE, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0xb3, 0xfb5e4).door(Position.InteriorW, DoorKind.SmallKey).door(Position.WestN2, DoorKind.NormalLow2).door(Position.NorthW, DoorKind.Normal).door(Position.WestS, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal),
Room(player, 0xb4, 0xfe807).door(Position.NorthW, DoorKind.BigKey),
Room(player, 0xb5, 0xfeb07).door(Position.SouthW, DoorKind.Trap),
Room(player, 0xb6, 0xfdd50).door(Position.NorthW, DoorKind.StairKey2).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.SouthW, DoorKind.SmallKey).door(Position.InteriorW, DoorKind.SmallKey).door(Position.SouthE, DoorKind.Normal),
Room(player, 0xb7, 0xfddcd).door(Position.SouthW, DoorKind.Normal),
Room(player, 0xb8, 0x51b75).door(Position.NorthE, DoorKind.BigKey).door(Position.EastN, DoorKind.Normal),
Room(player, 0xb9, 0x51d09).door(Position.EastN, DoorKind.SmallKey).door(Position.North, DoorKind.Normal).door(Position.South, DoorKind.Normal).door(Position.WestN, DoorKind.Normal),
Room(player, 0xba, 0x51d57).door(Position.WestN, DoorKind.SmallKey).door(Position.NorthW, DoorKind.Trap).door(Position.InteriorN, DoorKind.Trap2),
Room(player, 0xbb, 0xfd86b).door(Position.NorthW, DoorKind.Normal).door(Position.InteriorN, DoorKind.Normal).door(Position.InteriorS, DoorKind.Normal).door(Position.InteriorE, DoorKind.Normal).door(Position.EastN, DoorKind.Normal).door(Position.EastS, DoorKind.Normal),
Room(player, 0xbc, 0xfd974).door(Position.InteriorS, DoorKind.SmallKey).door(Position.SouthE, DoorKind.SmallKey).door(Position.InteriorN, DoorKind.Trap).door(Position.SouthW, DoorKind.Bombable).door(Position.WestN, DoorKind.Normal).door(Position.WestS, DoorKind.Normal).door(Position.InteriorW, DoorKind.Normal).door(Position.NorthE, DoorKind.Normal),
Room(player, 0xbe, 0xfca28).door(Position.SouthE, DoorKind.Trap).door(Position.EastS, DoorKind.SmallKey).door(Position.InteriorE, DoorKind.Normal),
Room(player, 0xbf, 0xfca89).door(Position.WestS, DoorKind.SmallKey),
Room(player, 0xc0, 0xf9026).door(Position.InteriorN, DoorKind.TrapTriggerable).door(Position.InteriorS, DoorKind.Trap2).door(Position.NorthE, DoorKind.StairKey),
Room(player, 0xc1, 0xfb176).door(Position.InteriorS, DoorKind.SmallKey).door(Position.EastS, DoorKind.SmallKey).door(Position.InteriorN, DoorKind.TrapTriggerable).door(Position.InteriorW, DoorKind.TrapTriggerable).door(Position.SouthW, DoorKind.Normal).door(Position.EastN, DoorKind.Normal).door(Position.NorthE, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal),
Room(player, 0xc2, 0xfb0e7).door(Position.EastN, DoorKind.SmallKey).door(Position.WestS, DoorKind.SmallKey).door(Position.NorthW, DoorKind.Normal).door(Position.WestN, DoorKind.Normal).door(Position.East, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal).door(Position.EastS, DoorKind.Normal).door(Position.NorthE, DoorKind.Normal),
Room(player, 0xc3, 0xfb56c).door(Position.WestN, DoorKind.SmallKey).door(Position.InteriorN, DoorKind.Trap2).door(Position.InteriorH, DoorKind.Trap2).door(Position.InteriorS, DoorKind.TrapTriggerable).door(Position.NorthW, DoorKind.Normal).door(Position.West, DoorKind.Normal).door(Position.WestS, DoorKind.Normal),
Room(player, 0xc4, 0xfec3f).door(Position.EastS, DoorKind.SmallKey),
Room(player, 0xc5, 0xfece1).door(Position.WestS, DoorKind.SmallKey).door(Position.NorthW, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal),
Room(player, 0xc6, 0xfdf5c).door(Position.NorthW, DoorKind.SmallKey).door(Position.NorthE, DoorKind.Normal).door(Position.EastN, DoorKind.Normal).door(Position.EastS, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal),
Room(player, 0xc7, 0xfe0a1).door(Position.NorthW, DoorKind.Trap).door(Position.WestN, DoorKind.Normal).door(Position.WestS, DoorKind.Normal),
Room(player, 0xc8, 0x51596).door(Position.SouthE, DoorKind.Trap),
Room(player, 0xc9, 0x51e5a).door(Position.InteriorV, DoorKind.Trap).door(Position.North, DoorKind.Trap).door(Position.InteriorW, DoorKind.Normal).door(Position.InteriorE, DoorKind.Normal).door(Position.South, DoorKind.DungeonEntrance),
Room(player, 0xcb, 0xfd630).door(Position.East, DoorKind.Dashable),
Room(player, 0xcc, 0xfd783).door(Position.NorthE, DoorKind.BigKey).door(Position.NorthW, DoorKind.Bombable).door(Position.West, DoorKind.Dashable),
Room(player, 0xce, 0xfcadd).door(Position.NorthE, DoorKind.Trap),
Room(player, 0xd0, 0xf90de).door(Position.InteriorS, DoorKind.SmallKey).door(Position.InteriorN, DoorKind.Trap2),
Room(player, 0xd1, 0xfb259).door(Position.InteriorS, DoorKind.Trap2).door(Position.NorthW, DoorKind.Normal).door(Position.NorthE, DoorKind.Normal).door(Position.InteriorE, DoorKind.Normal),
Room(player, 0xd2, 0xfafd6).door(Position.NorthE, DoorKind.Trap),
Room(player, 0xd5, 0xfee40).door(Position.SouthW, DoorKind.BombableEntrance).door(Position.NorthW, DoorKind.Normal),
Room(player, 0xd6, 0xfe1cb).door(Position.NorthW, DoorKind.UnknownD6).door(Position.SouthE, DoorKind.DungeonEntrance).door(Position.NorthE, DoorKind.Normal),
Room(player, 0xd8, 0x515ed).door(Position.NorthE, DoorKind.Trap).door(Position.InteriorE, DoorKind.TrapTriggerable).door(Position.EastS, DoorKind.Normal),
Room(player, 0xd9, 0x5166f).door(Position.WestS, DoorKind.Trap).door(Position.InteriorS, DoorKind.Trap).door(Position.EastS, DoorKind.Trap),
Room(player, 0xda, 0x5169d).door(Position.WestS, DoorKind.Trap),
Room(player, 0xdb, 0xfd370).door(Position.East, DoorKind.Trap).door(Position.South, DoorKind.DungeonEntrance),
Room(player, 0xdc, 0xfd4d1).door(Position.West, DoorKind.Normal),
# Room(player, 0xdf, 0x52db4).door(Position.South, DoorKind.CaveEntrance),
Room(player, 0xe0, 0xf9149).door(Position.InteriorN, DoorKind.Trap2).door(Position.InteriorW, DoorKind.Trap2).door(Position.NorthE, DoorKind.StairKey).door(Position.SouthW, DoorKind.DungeonEntrance),
# Room(player, 0xe1, 0x5023c).door(Position.InteriorH2, DoorKind.NormalLow2).door(Position.SouthW, DoorKind.CaveEntrance),
# Room(player, 0xe2, 0x50464).door(Position.InteriorH, DoorKind.Normal).door(Position.SouthE, DoorKind.CaveEntrance),
# Room(player, 0xe3, 0x5032b).door(Position.InteriorS2, DoorKind.TrapLowE3).door(Position.InteriorE2, DoorKind.NormalLow2).door(Position.SouthW, DoorKind.CaveEntrance),
# Room(player, 0xe4, 0x534b1).door(Position.SouthW2, DoorKind.CaveEntranceLow).door(Position.InteriorN, DoorKind.Normal).door(Position.East, DoorKind.Normal),
# Room(player, 0xe5, 0x535ba).door(Position.West, DoorKind.Normal).door(Position.South, DoorKind.CaveEntrance),
# Room(player, 0xe6, 0x532ee).door(Position.SouthW2, DoorKind.CaveEntranceLow).door(Position.EastN2, DoorKind.NormalLow),
# Room(player, 0xe7, 0x533ce).door(Position.SouthE2, DoorKind.CaveEntranceLow).door(Position.WestN2, DoorKind.NormalLow),
# Room(player, 0xe8, 0x529d3).door(Position.SouthE, DoorKind.CaveEntrance),
# Room(player, 0xea, 0x531f5).door(Position.SouthW, DoorKind.CaveEntrance),
# Room(player, 0xeb, 0x52e1a).door(Position.SouthE, DoorKind.CaveEntrance),
# Room(player, 0xed, 0x52bec).door(Position.SouthE, DoorKind.CaveEntrance),
# Room(player, 0xee, 0x52f76).door(Position.SouthE, DoorKind.CaveEntrance),
# Room(player, 0xef, 0x52d37).door(Position.InteriorE, DoorKind.Trap2).door(Position.South, DoorKind.CaveEntrance),
# Room(player, 0xf0, 0x5258a).door(Position.SouthW, DoorKind.CaveEntrance).door(Position.East2, DoorKind.NormalLow),
# Room(player, 0xf1, 0x52703).door(Position.SouthE2, DoorKind.CaveEntranceLow).door(Position.West2, DoorKind.NormalLow),
# Room(player, 0xf2, 0x5274a).door(Position.EastS, DoorKind.Normal).door(Position.SouthE, DoorKind.Normal).door(Position.SouthE, DoorKind.IncognitoEntrance),
# Room(player, 0xf3, 0x52799).door(Position.WestS, DoorKind.Normal).door(Position.SouthW, DoorKind.Normal).door(Position.SouthW, DoorKind.IncognitoEntrance),
# Room(player, 0xf4, 0x527d3).door(Position.EastS, DoorKind.Dashable).door(Position.SouthE, DoorKind.Normal).door(Position.SouthE, DoorKind.IncognitoEntrance),
# Room(player, 0xf5, 0x52813).door(Position.WestS, DoorKind.Dashable).door(Position.SouthW, DoorKind.Normal).door(Position.SouthW, DoorKind.IncognitoEntrance),
# Room(player, 0xf8, 0x528fe).door(Position.South, DoorKind.CaveEntrance),
# Room(player, 0xf9, 0x5305a).door(Position.SouthW, DoorKind.CaveEntrance),
# Room(player, 0xfa, 0x53165).door(Position.SouthW2, DoorKind.EntranceLow),
# Room(player, 0xfb, 0x52ea4).door(Position.South, DoorKind.CaveEntrance),
# Room(player, 0xfd, 0x52ab1).door(Position.South2, DoorKind.CaveEntranceLow),
# Room(player, 0xfe, 0x52ff1).door(Position.SouthE2, DoorKind.CaveEntranceLow),
# Room(player, 0xff, 0x52c9a).door(Position.InteriorW, DoorKind.Bombable).door(Position.InteriorE, DoorKind.Bombable).door(Position.SouthE, DoorKind.CaveEntrance),
]
# fix some wonky things
world.get_room(0x51, player).change(1, DoorKind.Normal) # fix the dungeon changer
world.get_room(0x60, player).swap(2, 4) # puts the exit at pos 2 - enables pos 3
world.get_room(0x61, player).swap(1, 6) # puts the WN door at pos 1 - enables it
world.get_room(0x61, player).swap(5, 6) # puts the Incognito Entrance at the end, so it can be deleted
world.get_room(0x62, player).swap(1, 4) # puts the exit at pos 1 - enables pos 3
world.get_room(0x77, player).swap(0, 1) # fixes Hera Lobby Key Stairs - entrance now at pos 0
if world.enemy_shuffle[player] != 'none':
world.get_room(0xc0, player).change(0, DoorKind.Normal) # fix this kill room if enemizer is on
class Room(object):
def __init__(self, player, index, address):
self.player = player
self.index = index
self.doorListAddress = address
self.doorList = []
self.modified = False
self.palette = None
def position(self, door):
return self.doorList[door.doorListPos][0]
def kind(self, door):
return self.doorList[door.doorListPos][1]
def door(self, pos, kind):
self.doorList.append((pos, kind))
return self
def change(self, list_idx, kind):
prev = self.doorList[list_idx]
self.doorList[list_idx] = (prev[0], kind)
self.modified = True
def mirror(self, list_idx):
prev = self.doorList[list_idx]
mirror_door = None
for door in self.doorList:
if door != prev:
mirror_door = door
break
self.doorList[list_idx] = (mirror_door[0], mirror_door[1])
self.modified = True
def swap(self, idx1, idx2):
item1 = self.doorList[idx1]
item2 = self.doorList[idx2]
self.doorList[idx1] = item2
self.doorList[idx2] = item1
self.modified = True
def delete(self, list_idx):
self.doorList[list_idx] = (Position.FF, DoorKind.FF)
self.modified = True
def address(self):
return self.doorListAddress
def rom_data(self):
byte_array = []
for pos, kind in self.doorList:
byte_array.append(pos.value)
byte_array.append(kind.value)
return byte_array
def __str__(self):
return str(self.__unicode__())
def __unicode__(self):
return '%s' % self.index
class PairedDoor(object):
def __init__(self, door_a, door_b, original=False):
self.door_a = door_a
self.door_b = door_b
self.pair = True
self.original = original
def address_a(self, world, player):
d = world.check_for_door(self.door_a, player)
return 0x13C000 + (door_pair_offset_table[d.roomIndex]+d.doorListPos)*2
def address_b(self, world, player):
d = world.check_for_door(self.door_b, player)
return 0x13C000 + (door_pair_offset_table[d.roomIndex]+d.doorListPos)*2
def rom_data_a(self, world, player):
if not self.pair:
return [0x00, 0x00]
d = world.check_for_door(self.door_b, player)
return [d.roomIndex, pos_map[d.doorListPos]]
def rom_data_b(self, world, player):
if not self.pair:
return [0x00, 0x00]
d = world.check_for_door(self.door_a, player)
return [d.roomIndex, pos_map[d.doorListPos]]
pos_map = {
0: 0x80, 1: 0x40, 2: 0x20, 3: 0x10
# indices 4-7 not supported yet
}
@unique
class DoorKind(Enum):
Normal = 0x00
NormalLow = 0x02
EntranceLow = 0x04
Waterfall = 0x08
DungeonEntrance = 0x0A
DungeonEntranceLow = 0x0C
CaveEntrance = 0x0E
CaveEntranceLow = 0x10
IncognitoEntrance = 0x12
DungeonChanger = 0x14
ToggleFlag = 0x16
Trap = 0x18
UnknownD6 = 0x1A
SmallKey = 0x1C
BigKey = 0x1E
StairKey = 0x20
StairKey2 = 0x22
HauntedStairKey = 0x24 # not a real door, can see it in dark rooms when facing left
StairKeyLow = 0x26
Dashable = 0x28
BombableEntrance = 0x2A
Bombable = 0x2E
BlastWall = 0x30
Hidden = 0x32
TrapTriggerable = 0x36
Trap2 = 0x38
NormalLow2 = 0x40
TrapTriggerableLow = 0x44
Warp = 0x46
CaveEntranceLow08 = 0x48
TrapLowE3 = 0x4A # Maybe this is a toggle flag too?
FF = 0xFF
@unique
class Position(Enum):
NorthW = 0x00
North = 0x10
NorthE = 0x20
NorthW2 = 0x30
North2 = 0x40
NorthE2 = 0x50
InteriorW = 0x60
InteriorV = 0x70
InteriorE = 0x80
InteriorW2 = 0x90
InteriorV2 = 0xA0
InteriorE2 = 0xB0
SouthW = 0x61
South = 0x71
SouthE = 0x81
SouthW2 = 0x91
South2 = 0xA1
SouthE2 = 0xB1
WestN = 0x02
West = 0x12
WestS = 0x22
WestN2 = 0x32
West2 = 0x42
# WestS2 = 0x52
InteriorN = 0x62
InteriorH = 0x72
InteriorS = 0x82
InteriorN2 = 0x92
InteriorH2 = 0xA2
InteriorS2 = 0xB2
EastN = 0x63
East = 0x73
EastS = 0x83
EastN2 = 0x93
East2 = 0xA3
# EastS2 = 0xB3
FF = 0xFF
class TestWorld(object):
def __init__(self):
self.rooms = []
# python3 -c "from RoomData import offset_utility; offset_utility()"
# This utility was used to calculate the distance offsets
def offset_utility():
world = TestWorld()
create_rooms(world, 1)
map = {}
cntr = 1
for room in world.rooms:
map[room.index] = cntr
cntr = cntr + len(room.doorList)
string = ''
for i in range(225):
if i % 16 == 0:
string = string + 'dw '
if i not in map:
string = string + '$0000,'
else:
string = string + hex(map[i]) + ','
print(string)
# python3 -c "from RoomData import key_door_template_generator; key_door_template_generator()"
# This utility was used to help initialize the pairing data
def key_door_template_generator():
world = TestWorld()
create_rooms(world, 1)
map = {}
cntr = 1
for room in world.rooms:
string = 'dw '
for door in room.doorList:
if door[1] in [DoorKind.SmallKey, DoorKind.BigKey, DoorKind.SmallKey, DoorKind.Dashable, DoorKind.Bombable]:
string = string + '$xxxx,'
else:
string = string + '$0000,'
print(string[0:-1])
# python3 -c "from RoomData import door_address_list; door_address_list('/home/randall/kwyn/orig/z3.sfc')"
# python3 -c "from RoomData import door_address_list; door_address_list('path/to/rom.sfc')"
def door_address_list(rom):
with open(rom, 'rb') as stream:
rom_data = bytearray(stream.read())
room_index = 0
while room_index < 256:
offset = room_index * 3
address = rom_data[0x0F8000 + offset]
address = address + 0x100 * rom_data[0x0F8000 + offset + 1]
byte3 = rom_data[0x0F8000 + offset + 2]
address = address + (byte3 << 16)
if byte3 == 0x03:
address = address - 0x020000
elif byte3 == 0x0A:
address = address - 0x058000
elif byte3 == 0x1f:
address = address - 0x100000
else:
print('Byte3 ' + hex(byte3))
print('Address ' + hex(address))
raise Exception('Bad address?')
terminated = False
while not terminated:
marker = rom_data[address] + (rom_data[address+1] << 8)
# if marker == 0xFFFF:
# print('Room '+ hex(room_index)+ ' terminated at '+ hex(address))
# terminated = True
if marker == 0xFFF0:
print(hex(room_index) + ': ' + hex(address+2))
# print('Room ' + hex(room_index) + ' address: ' + hex(address+2))
terminated = True
else:
address = address + 3
room_index = room_index + 1
|
python
|
# ============================================================================
#
# Copyright (C) 2007-2016 Conceptive Engineering bvba.
# www.conceptive.be / [email protected]
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Conceptive Engineering nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ============================================================================
import logging
FORMAT = '[%(levelname)-7s] [%(name)-35s] - %(message)s'
logging.basicConfig(level=logging.INFO, format=FORMAT)
logger = logging.getLogger('videostore.main')
#logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
try:
import matplotlib
logger.info('matplotlib %s is used'%(matplotlib.__version__))
except:
logger.error('Charts will not work because of missing matplotlib')
from camelot.core.conf import settings, SimpleSettings
class ExampleSettings( SimpleSettings ):
"""Special settings class for the example application, this is done to
'survive' various packaging regimes, such as windows, debian, ...
"""
@staticmethod
def setup_model():
from sqlalchemy.orm import configure_mappers
from camelot.core.sql import metadata
metadata.bind = settings.ENGINE()
#
# import all the needed model files to make sure the mappers and tables
# are defined before creating them in the database
#
from camelot.model import (party, authentication, i18n, fixture,
memento, batch_job)
from . import model
logger.debug('loaded datamodel for %s'%party.__name__)
logger.debug('loaded datamodel for %s'%authentication.__name__)
logger.debug('loaded datamodel for %s'%i18n.__name__)
logger.debug('loaded datamodel for %s'%fixture.__name__)
logger.debug('loaded datamodel for %s'%memento.__name__)
logger.debug('loaded datamodel for %s'%batch_job.__name__)
logger.debug('loaded datamodel for %s'%model.__name__)
#
# create the tables for all models, configure mappers first, to make
# sure all deferred properties have been handled, as those could
# create tables or columns
#
configure_mappers()
metadata.create_all()
#
# Load sample data with the fixure mechanism
#
from camelot_example.fixtures import load_movie_fixtures
load_movie_fixtures()
#
# setup the views
#
from camelot_example.view import setup_views
setup_views()
example_settings = ExampleSettings('camelot',
'videostore',
data = 'videostore_3.sqlite')
def main():
from camelot.admin.action.application import Application
from camelot.view.main import main_action
from camelot_example.application_admin import MyApplicationAdmin
settings.append(example_settings)
videostore = Application(MyApplicationAdmin())
main_action(videostore)
if __name__ == '__main__':
main()
|
python
|
#!/usr/bin/env
# -*- coding: utf-8 -*-
# Copyright (C) Victor M. Mendiola Lau - All Rights Reserved
# Unauthorized copying of this file, via any medium is strictly prohibited
# Proprietary and confidential
# Written by Victor M. Mendiola Lau <[email protected]>, March 2017
import pylab
from datasets.nir_tecator import load_nir_tecator
# ---------------------------------------------------------------
def plot_nir_tecator_data_set():
# loading the nir tecator data set
ds = load_nir_tecator()
# removing columns associated with classes and properties
ds = ds.iloc[:, :-2]
# plotting the data set
ds.T.plot(legend=None)
pylab.show()
def plot_nir_tecator_by_class():
# loading the nir tecator data set
ds = load_nir_tecator()
# creating the figure and adding subplots
fig, axes = pylab.subplots(nrows=1, ncols=2)
# plotting class 0 samples
axes[0].set_title('NIR Tecator (Class 0)(%fat < 20)')
ds[ds['class'] == 0].iloc[:, :-2].T.plot(ax=axes[0], legend=None)
# plotting class 1 samples
axes[1].set_title('NIR Tecator (Class 1)(%fat >= 20)')
ds[ds['class'] == 1].iloc[:, :-2].T.plot(ax=axes[1], legend=None)
# actually showing the plot
pylab.show()
|
python
|
# Copyright (c) 2019-2022 ThatRedKite and contributors
from turtle import right
import discord
from discord.ext import commands
import time
import re
from datetime import datetime
from operator import itemgetter
import aioredis
import discord
from discord.ext import commands
async def update_count(redis: aioredis.Redis, message: discord.Message):
"""
Updates the welcome count for the given message's author.
"""
if "welcome" in message.content.lower():
write = True
guild, channel, author = message.guild.id, message.channel.id, message.author.id
unixtime = time.mktime(message.created_at.timetuple())
join_key = f"latest_join:{guild}"
assert await redis.exists(join_key) # make sure there is a last_joined key
joined_dict = await redis.hgetall(join_key)
welcome_channel, latest_join, joined_id = itemgetter("join_channel", "latest_join", "user_id")(joined_dict)
welcome_channel, latest_join, joined_id = int(welcome_channel), int(latest_join), int(joined_id)
usr_key = f"leaderboard:{author}:{guild}"
if await redis.exists(usr_key):
latest_welcome = int(await redis.hget(usr_key, "latest_welcome"))
if latest_welcome <= latest_join and joined_id != author:
await redis.hincrby(usr_key, "welcome_count", 1) # increase welcome_count by one; create if not exist
else:
return
else:
write = (welcome_channel == channel)
await redis.hset(usr_key, "welcome_count", 1)
if write:
await redis.hset(usr_key, "latest_welcome", int(unixtime))
class WelcomeCog(commands.Cog, name="Welcome counter"):
"""
A cog that counts the number of times a user has welcome newly joined members.
"""
def __init__(self, bot):
self.bot: discord.Client = bot
self.redis_welcomes: aioredis.Redis = bot.redis_welcomes
self.settings_redis: aioredis.Redis = bot.redis
async def cog_check(self, ctx):
return await self.settings_redis.hget(ctx.guild.id, "WELCOME") == "TRUE"
@commands.Cog.listener()
async def on_message(self, message):
"""
Updates the welcome count for the given message's author. This is called by the bot on every message.
"""
if self.bot.command_prefix not in message.content and message.author.id != self.bot.user.id and message.channel.id == message.guild.system_channel.id:
try:
await update_count(self.redis_welcomes, message)
except AssertionError:
pass
@commands.Cog.listener()
async def on_member_join(self, joinedmember):
"""
Updates the latest_join key for the given member. This is called by the bot on every member join.
"""
welcomechannel = joinedmember.guild.system_channel.id
lastjoined = joinedmember.joined_at
unixtime = time.mktime(lastjoined.timetuple())
guild = joinedmember.guild.id
key = f"latest_join:{guild}"
datadict = dict(
latest_join=int(unixtime),
user_id=int(joinedmember.id),
join_channel=int(welcomechannel)
)
await self.redis_welcomes.hmset(key, datadict)
await joinedmember.guild.system_channel.send("welcome")
@commands.cooldown(1, 5, commands.BucketType.user)
@commands.command(name="welcomes")
async def welcome(self, ctx, *, args=None):
"""
Displays the top 10 users with the most welcome count.
"""
current_time = datetime.utcfromtimestamp(int(time.mktime(ctx.message.created_at.timetuple())))
# Scan all users in the DB
# here's a nice one-liner
key_list = [key async for key in self.redis_welcomes.scan_iter(match=f"leaderboard:*:{ctx.guild.id}")]
leaderboard = dict()
for i in key_list:
author = re.findall(r":[\d]{5,}:", i)[0][1:-1] # extract the author id
leaderboard[f"<@{author}>"] = await self.redis_welcomes.hgetall(i)
sorted_lb = sorted(leaderboard.items(), key=lambda x: int(x[1]['welcome_count']), reverse=True)
if not args:
embed = discord.Embed(title="Welcome leaderboard")
lb_str = ""
number = 1
for i in sorted_lb:
if number <= 10:
match number:
case 1:
number_str = ":first_place: "
case 2:
number_str = ":second_place: "
case 3:
number_str = ":third_place: "
case _:
number_str = " **" + str(number) + "**. "
lb_str += number_str + str(i[0]) \
+ " welcomes: **" + str(i[1]["welcome_count"]) + "**, last welcome: **" \
+ str((current_time - datetime.utcfromtimestamp(int(i[1]["latest_welcome"]))).seconds // 3600) \
+ "** hours ago\n"
number += 1
continue
last_join_dict = await self.redis_welcomes.hgetall(f"latest_join:{ctx.message.guild.id}")
embed.add_field(name=":medal: Top 10:", value=lb_str, inline=False)
if 'user_id' in last_join_dict:
footer = str(str(f"<@{last_join_dict['user_id']}>")
+ " joined: **"
+ str((current_time - datetime.utcfromtimestamp(int(last_join_dict['latest_join']))).seconds // 3600))\
+ "** hours ago"
embed.add_field(name=":partying_face: Latest join:", value=footer, inline=False)
elif args.lower() == "me":
embed = discord.Embed(title="Personal welcome count")
target_user = ctx.message.author.id
lb_str = ""
number = 1
for i in sorted_lb:
if str(target_user) in i[0]:
lb_str += "**" + str(number) + "**. " + str(i[0]) \
+ " welcomes: **" + str(i[1]["welcome_count"]) + "**, last welcome: **" \
+ str((current_time - datetime.utcfromtimestamp(int(i[1]["latest_welcome"]))).seconds // 3600) \
+ "** hours ago\n"
embed.add_field(name=f"{str(ctx.message.author)}'s welcome count:", value=lb_str, inline=False)
number += 1
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(WelcomeCog(bot))
|
python
|
from pysit.Tomo.tomo import *
|
python
|
import pandas as pd
def rail_station():
data = pd.read_csv('data/GTFS_stations.txt',sep = ',', header = None)
data = data.rename(columns = {0:'ID',2:'Name'})
use_col = ['ID','Name']
data = data.loc[:,use_col]
link_info = pd.read_csv('data/link_info.csv')
station1 = link_info.loc[:,['link_start','route_id','direction_id','link_start_parent']].rename(columns = {'link_start':'station_id','link_start_parent':'tap_in_ID'})
station2 = link_info.loc[:,['link_end','route_id','direction_id','link_end_parent']].rename(columns = {'link_end':'station_id','link_end_parent':'tap_in_ID'})
station = station1.append(station2)
station = station.drop_duplicates()
station = station.merge(data, left_on = ['tap_in_ID'], right_on = ['ID'])
schd_rail = pd.read_csv('data/schd_rail_stops.csv')
len_old = len(station)
station = station.merge(schd_rail,left_on = ['station_id'],right_on = ['stop_id'])
len_new = len(station)
if len_old!=len_new:
print('missing data, please check')
exit()
line_name_id = pd.read_csv('data/line_name_id.csv')
station = station.merge(line_name_id,left_on = ['route_id'],right_on = ['line_short'])
station = station.rename(columns = {'station_id_x':'station_id','direction_id_x':'GTFS_direction','direction_id_y':'CTA_schedule_direction','stopname':'CTA_stopname'})
col_out = ['station_id','tap_in_ID','Name','longitude','latitude','qt2_trackid','line_id','line_name','CTA_schedule_direction','CTA_stopname']
station['station_id'] = station['station_id'].astype(int)
station['tap_in_ID'] = station['tap_in_ID'].astype(int)
data_save = station.loc[:,col_out]
data_save = data_save.drop_duplicates()
data_save.to_csv('data/All_rail_stations.csv',index=False)
a=1
def bus_station():
data = pd.read_csv('data/bt_stop.csv')
# bt_pattern = pd.read_csv('data/bt_pattern.csv')
use_col = ['geoid','geodescription','longitude','latitude','tageoid']
data = data.loc[:,use_col]
data = data.rename(columns = {'geoid':'tap_in_ID','geodescription':'Name','tageoid': 'GTFS_id'})
col_out = ['tap_in_ID','Name','longitude','latitude','GTFS_id']
data_save = data.loc[:,col_out]
data_save = data_save.drop_duplicates(['tap_in_ID'])
data_save.to_csv('data/All_bus_stations.csv',index=False)
a=1
if __name__ == '__main__':
bus_station()
|
python
|
# coding=utf-8
import tensorflow as tf
import tensorflow.contrib.slim as slim
import numpy as np
class SiameseLSTMw2v(object):
"""
A LSTM based deep Siamese network for text similarity.
Uses an word embedding layer (looks up in pre-trained w2v), followed by a biLSTM and Energy Loss layer.
"""
def stackedRNN(self, x, dropout, scope, embedding_size, sequence_length, hidden_units):
n_hidden = hidden_units
n_layers = 3
# n_layers = 6
# Prepare data shape to match `static_rnn` function requirements
x = tf.unstack(tf.transpose(x, perm=[1, 0, 2]))
# print(x)
# Define lstm cells with tensorflow
# Forward direction cell
with tf.name_scope("fw" + scope), tf.variable_scope("fw" + scope):
stacked_rnn_fw = []
for _ in range(n_layers):
fw_cell = tf.nn.rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0, state_is_tuple=True)
lstm_fw_cell = tf.contrib.rnn.DropoutWrapper(fw_cell, output_keep_prob=dropout)
stacked_rnn_fw.append(lstm_fw_cell)
lstm_fw_cell_m = tf.nn.rnn_cell.MultiRNNCell(cells=stacked_rnn_fw, state_is_tuple=True)
outputs, _ = tf.nn.static_rnn(lstm_fw_cell_m, x, dtype=tf.float32)
return outputs[-1]
def contrastive_loss(self, y, d, batch_size):
tmp = y * tf.square(d)
# tmp= tf.mul(y,tf.square(d))
tmp2 = (1 - y) * tf.square(tf.maximum((1 - d), 0))
reg = tf.contrib.layers.apply_regularization(tf.contrib.layers.l2_regularizer(1e-4), tf.trainable_variables())
return tf.reduce_sum(tmp + tmp2) / batch_size / 2+reg
def __init__(
self, sequence_length, vocab_size, embedding_size, hidden_units, l2_reg_lambda, batch_size,
trainableEmbeddings):
# Placeholders for input, output and dropout
self.input_x1 = tf.placeholder(tf.int32, [None, sequence_length], name="input_x1")
self.input_x2 = tf.placeholder(tf.int32, [None, sequence_length], name="input_x2")
self.input_y = tf.placeholder(tf.float32, [None], name="input_y")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
# Keeping track of l2 regularization loss (optional)
l2_loss = tf.constant(0.0, name="l2_loss")
# Embedding layer
with tf.name_scope("embedding"):
self.W = tf.Variable(
tf.constant(0.0, shape=[vocab_size, embedding_size]),
trainable=trainableEmbeddings, name="W")
self.embedded_words1 = tf.nn.embedding_lookup(self.W, self.input_x1)
self.embedded_words2 = tf.nn.embedding_lookup(self.W, self.input_x2)
# print self.embedded_words1
# Create a convolution + maxpool layer for each filter size
with tf.name_scope("output"):
self.out1 = self.stackedRNN(self.embedded_words1, self.dropout_keep_prob, "side1", embedding_size,
sequence_length, hidden_units)
self.out2 = self.stackedRNN(self.embedded_words2, self.dropout_keep_prob, "side2", embedding_size,
sequence_length, hidden_units)
self.distance = tf.sqrt(tf.reduce_sum(tf.square(tf.subtract(self.out1, self.out2)), 1, keep_dims=True))
self.distance = tf.div(self.distance,
tf.add(tf.sqrt(tf.reduce_sum(tf.square(self.out1), 1, keep_dims=True)),
tf.sqrt(tf.reduce_sum(tf.square(self.out2), 1, keep_dims=True))))
self.distance = tf.reshape(self.distance, [-1], name="distance")
with tf.name_scope("loss"):
self.loss = self.contrastive_loss(self.input_y, self.distance, batch_size)
#### Accuracy computation is outside of this class.
with tf.name_scope("accuracy"):
self.temp_sim = tf.subtract(tf.ones_like(self.distance), tf.rint(self.distance),
name="temp_sim") # auto threshold 0.5
correct_predictions = tf.equal(self.temp_sim, self.input_y)
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
with tf.name_scope('f1'):
ones_like_actuals = tf.ones_like(self.input_y)
zeros_like_actuals = tf.zeros_like(self.input_y)
ones_like_predictions = tf.ones_like(self.temp_sim)
zeros_like_predictions = tf.zeros_like(self.temp_sim)
tp = tf.reduce_sum(
tf.cast(
tf.logical_and(
tf.equal(self.input_y, ones_like_actuals),
tf.equal(self.temp_sim, ones_like_predictions)
),
'float'
)
)
tn = tf.reduce_sum(
tf.cast(
tf.logical_and(
tf.equal(self.input_y, zeros_like_actuals),
tf.equal(self.temp_sim, zeros_like_predictions)
),
'float'
)
)
fp = tf.reduce_sum(
tf.cast(
tf.logical_and(
tf.equal(self.input_y, zeros_like_actuals),
tf.equal(self.temp_sim, ones_like_predictions)
),
'float'
)
)
fn = tf.reduce_sum(
tf.cast(
tf.logical_and(
tf.equal(self.input_y, ones_like_actuals),
tf.equal(self.temp_sim, zeros_like_predictions)
),
'float'
)
)
precision = tp / (tp + fp)
recall = tp / (tp + fn)
self.f1 = 2 * precision * recall / (precision + recall)
|
python
|
from keras.applications import VGG16
from keras import models
from keras import layers
from keras import optimizers
from keras.preprocessing.image import ImageDataGenerator
import matplotlib.pyplot as plt
import os
import numpy as np
from keras.preprocessing.image import ImageDataGenerator
image_width = 768
image_height = 576
train_dir = './CNN/images/train'
validation_dir = './CNN/images/valid'
# Load the VGG model
conv_base = VGG16(weights='imagenet', include_top=False,
input_shape=(image_width, image_height, 3))
base_dir = '/home/vosferatu/Desktop/inesc2k19/CNN/images'
train_dir = os.path.join(base_dir, 'train')
validation_dir = os.path.join(base_dir, 'valid')
test_dir = os.path.join(base_dir, 'test')
datagen = ImageDataGenerator(rescale=1./255)
batch_size = 16
def extract_features(directory, sample_count):
features = np.zeros(shape=(sample_count, 24, 18, 512))
labels = np.zeros(shape=(sample_count))
generator = datagen.flow_from_directory(
directory,
target_size=(image_width, image_height),
batch_size=batch_size,
class_mode='binary')
i = 0
for inputs_batch, labels_batch in generator:
features_batch = conv_base.predict(inputs_batch)
features[i * batch_size: (i + 1) * batch_size] = features_batch
labels[i * batch_size: (i + 1) * batch_size] = labels_batch
i += 1
if i * batch_size >= sample_count:
# Note that since generators yield data indefinitely in a loop,
# we must `break` after every image has been seen once.
break
return features, labels
train_features, train_labels = extract_features(train_dir, 40)
validation_features, validation_labels = extract_features(validation_dir, 40)
test_features, test_labels = extract_features(test_dir, 10)
train_features = np.reshape(train_features, (40, 24 * 18 * 512))
validation_features = np.reshape(validation_features, (10, 24 * 18 * 512))
test_features = np.reshape(test_features, (10, 24 * 18 * 512))
model = models.Sequential()
model.add(layers.Dense(256, activation='relu', input_dim=24 * 18 * 512))
model.add(layers.Dropout(0.5))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(optimizer=optimizers.RMSprop(lr=2e-5),
loss='binary_crossentropy',
metrics=['acc'])
history = model.fit(train_features, train_labels,
epochs=10,
batch_size=16,
validation_data=(validation_features, validation_labels))
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.figure()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
mdl = models.Sequential()
mdl.add(conv_base)
mdl.add(layers.Flatten())
mdl.add(layers.Dense(256, activation='relu'))
mdl.add(layers.Dense(1, activation='sigmoid'))
conv_base.trainable = False
|
python
|
Import("env")
import os
dataFolder = 'data'
if not dataFolder in os.listdir(os.getcwd()):
os.mkdir(dataFolder)
print("Empty \"data\" folder for empty filesystem creation ready")
print("Replace MKSPIFFSTOOL with mklittlefs.exe")
env.Replace (MKSPIFFSTOOL = "mklittlefs.exe")
|
python
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libproxy(CMakePackage):
"""libproxy is a library that provides automatic proxy configuration
management."""
homepage = "http://libproxy.github.io/libproxy/"
url = "https://github.com/libproxy/libproxy/archive/0.4.15.tar.gz"
version('0.4.15', sha256='18f58b0a0043b6881774187427ead158d310127fc46a1c668ad6d207fb28b4e0')
version('0.4.14', sha256='6220a6cab837a8996116a0568324cadfd09a07ec16b930d2a330e16d5c2e1eb6')
version('0.4.13', sha256='d610bc0ef81a18ba418d759c5f4f87bf7102229a9153fb397d7d490987330ffd')
|
python
|
import torch
import torch.nn as nn
import numpy as np
from operations import *
from torch.autograd import Variable
from genotypes import PRIMITIVES
from genotypes import Genotype
class MixedOp (nn.Module):
def __init__(self, C, stride):
super(MixedOp, self).__init__()
self._ops = nn.ModuleList()
for primitive in PRIMITIVES:
op = OPS[primitive](C, stride, False)
if 'pool' in primitive:
op = nn.Sequential(op, nn.BatchNorm2d(C, affine=False))
self._ops.append(op)
def forward(self, x, weights):
return sum(w * op(x) for w, op in zip(weights, self._ops))
class Cell(nn.Module):
def __init__(self, steps, multiplier, C_prev_prev, C_prev, C, rate):
super(Cell, self).__init__()
self.C_out = C
if C_prev_prev != -1 :
self.preprocess0 = ReLUConvBN(C_prev_prev, C, 1, 1, 0, affine=False)
if rate == 2 :
self.preprocess1 = FactorizedReduce (C_prev, C, affine= False)
elif rate == 0 :
self.preprocess1 = FactorizedIncrease (C_prev, C)
else :
self.preprocess1 = ReLUConvBN(C_prev, C, 1, 1, 0, affine=False)
self._steps = steps
self._multiplier = multiplier
self._ops = nn.ModuleList()
for i in range(self._steps):
for j in range(2+i):
stride = 1
if C_prev_prev == -1 and j == 0:
op = None
else:
op = MixedOp(self.C_out, stride)
self._ops.append(op)
self.ReLUConvBN = ReLUConvBN (self._multiplier * self.C_out, self.C_out, 1, 1, 0)
def forward(self, s0, s1, weights):
if s0 is not None :
s0 = self.preprocess0 (s0)
s1 = self.preprocess1(s1)
states = [s0, s1]
offset = 0
for i in range(self._steps):
s = sum(self._ops[offset+j](h, weights[offset+j]) for j, h in enumerate(states) if h is not None)
offset += len(states)
states.append(s)
concat_feature = torch.cat(states[-self._multiplier:], dim=1)
return self.ReLUConvBN (concat_feature)
|
python
|
import pytest
from django.contrib.auth import get_user_model
from django.test import Client
def test_user_guest():
c = Client()
resp = c.get("/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
def test_async_user_guest():
c = Client()
resp = c.get("/async/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_user_inactive():
c = Client()
user = get_user_model().objects.get_or_create(
username="inactive_user", email="[email protected]"
)[0]
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_async_user_inactive():
c = Client()
user = get_user_model().objects.get_or_create(
username="inactive_user", email="[email protected]"
)[0]
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/async/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_user_success():
user = get_user_model().objects.get_or_create(
username="user", email="[email protected]"
)[0]
c = Client()
c.force_login(user)
resp = c.get("/require-user")
assert resp.status_code == 200
assert resp.json() == {"user": "user"}
@pytest.mark.django_db(transaction=True)
def test_async_user_success():
user = get_user_model().objects.get_or_create(
username="user", email="[email protected]"
)[0]
c = Client()
c.force_login(user)
resp = c.get("/async/require-user")
assert resp.status_code == 200
assert resp.json() == {"user": "user"}
def test_staff_guest():
c = Client()
resp = c.get("/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
def test_async_staff_guest():
c = Client()
resp = c.get("/async/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_staff_inactive():
user = get_user_model().objects.get_or_create(
username="inactive_staff", email="[email protected]", is_staff=True
)[0]
c = Client()
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_async_staff_inactive():
user = get_user_model().objects.get_or_create(
username="inactive_staff", email="[email protected]", is_staff=True
)[0]
c = Client()
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/async/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_staff_success():
user = get_user_model().objects.get_or_create(
username="staff", email="[email protected]", is_staff=True
)[0]
c = Client()
c.force_login(user)
resp = c.get("/require-staff")
assert resp.status_code == 200
assert resp.json() == {"user": "staff"}
@pytest.mark.django_db(transaction=True)
def test_async_staff_success():
user = get_user_model().objects.get_or_create(
username="staff", email="[email protected]", is_staff=True
)[0]
c = Client()
c.force_login(user)
resp = c.get("/async/require-staff")
assert resp.status_code == 200
assert resp.json() == {"user": "staff"}
|
python
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from mock import patch
from sentry.tasks.fetch_source import (
UrlResult, expand_javascript_source, discover_sourcemap,
fetch_sourcemap, fetch_url, generate_module, BAD_SOURCE, trim_line)
from sentry.utils.sourcemaps import (SourceMap, SourceMapIndex)
from sentry.testutils import TestCase
base64_sourcemap = 'data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZ2VuZXJhdGVkLmpzIiwic291cmNlcyI6WyIvdGVzdC5qcyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUEiLCJzb3VyY2VzQ29udGVudCI6WyJjb25zb2xlLmxvZyhcImhlbGxvLCBXb3JsZCFcIikiXX0='
class FetchUrlTest(TestCase):
@patch('sentry.tasks.fetch_source.safe_urlopen')
@patch('sentry.tasks.fetch_source.safe_urlread')
def test_simple(self, safe_urlread, safe_urlopen):
safe_urlopen.return_value.headers = (('content-type', 'application/json'),)
safe_urlread.return_value = u'foo bar'
result = fetch_url('http://example.com')
safe_urlopen.assert_called_once_with(
'http://example.com', allow_redirects=True, timeout=5)
safe_urlread.assert_called_once_with(safe_urlopen.return_value)
assert result.url == 'http://example.com'
assert result.body == u'foo bar'
assert result.headers == {'content-type': 'application/json'}
# ensure we use the cached result
result2 = fetch_url('http://example.com')
safe_urlopen.assert_called_once()
assert result == result2
@patch('sentry.tasks.fetch_source.safe_urlopen')
@patch('sentry.tasks.fetch_source.safe_urlread')
def test_connection_failure(self, safe_urlread, safe_urlopen):
safe_urlopen.side_effect = Exception()
result = fetch_url('http://example.com')
safe_urlopen.assert_called_once_with(
'http://example.com', allow_redirects=True, timeout=5)
assert not safe_urlread.mock_calls
assert result == BAD_SOURCE
# ensure we use the cached domain-wide failure for the second call
result = fetch_url('http://example.com/foo/bar')
safe_urlopen.assert_called_once()
assert result == BAD_SOURCE
@patch('sentry.tasks.fetch_source.safe_urlopen')
@patch('sentry.tasks.fetch_source.safe_urlread')
def test_read_failure(self, safe_urlread, safe_urlopen):
safe_urlopen.return_value.headers = (('content-type', 'application/json'),)
safe_urlread.side_effect = Exception()
result = fetch_url('http://example.com')
safe_urlopen.assert_called_once_with(
'http://example.com', allow_redirects=True, timeout=5)
safe_urlread.assert_called_once_with(safe_urlopen.return_value)
assert result == BAD_SOURCE
# ensure we use the cached failure for the second call
result = fetch_url('http://example.com')
safe_urlopen.assert_called_once()
assert result == BAD_SOURCE
class DiscoverSourcemapTest(TestCase):
# discover_sourcemap(result)
def test_simple(self):
result = UrlResult('http://example.com', {}, '')
assert discover_sourcemap(result) is None
result = UrlResult('http://example.com', {
'x-sourcemap': 'http://example.com/source.map.js'
}, '')
assert discover_sourcemap(result) == 'http://example.com/source.map.js'
result = UrlResult('http://example.com', {
'sourcemap': 'http://example.com/source.map.js'
}, '')
assert discover_sourcemap(result) == 'http://example.com/source.map.js'
result = UrlResult('http://example.com', {}, '//@ sourceMappingURL=http://example.com/source.map.js\nconsole.log(true)')
assert discover_sourcemap(result) == 'http://example.com/source.map.js'
result = UrlResult('http://example.com', {}, '//# sourceMappingURL=http://example.com/source.map.js\nconsole.log(true)')
assert discover_sourcemap(result) == 'http://example.com/source.map.js'
result = UrlResult('http://example.com', {}, 'console.log(true)\n//@ sourceMappingURL=http://example.com/source.map.js')
assert discover_sourcemap(result) == 'http://example.com/source.map.js'
result = UrlResult('http://example.com', {}, 'console.log(true)\n//# sourceMappingURL=http://example.com/source.map.js')
assert discover_sourcemap(result) == 'http://example.com/source.map.js'
class ExpandJavascriptSourceTest(TestCase):
@patch('sentry.models.Event.update')
@patch('sentry.tasks.fetch_source.fetch_url')
@patch('sentry.tasks.fetch_source.fetch_sourcemap')
@patch('sentry.tasks.fetch_source.discover_sourcemap')
def test_simple(self, discover_sourcemap, fetch_sourcemap, fetch_url, update):
data = {
'sentry.interfaces.Exception': {
'values': [{
'stacktrace': {
'frames': [
{
'abs_path': 'http://example.com/foo.js',
'filename': 'foo.js',
'lineno': 4,
'colno': 0,
},
{
'abs_path': 'http://example.com/foo.js',
'filename': 'foo.js',
'lineno': 1,
'colno': 0,
},
],
},
}],
}
}
discover_sourcemap.return_value = None
fetch_sourcemap.return_value = None
fetch_url.return_value.body = '\n'.join('hello world')
expand_javascript_source(data)
fetch_url.assert_called_once_with('http://example.com/foo.js')
frame_list = data['sentry.interfaces.Exception']['values'][0]['stacktrace']['frames']
frame = frame_list[0]
assert frame['pre_context'] == ['h', 'e', 'l']
assert frame['context_line'] == 'l'
assert frame['post_context'] == ['o', ' ', 'w', 'o', 'r']
frame = frame_list[1]
assert not frame.get('pre_context')
assert frame['context_line'] == 'h'
assert frame['post_context'] == ['e', 'l', 'l', 'o', ' ']
@patch('sentry.models.Event.update')
@patch('sentry.tasks.fetch_source.fetch_url')
@patch('sentry.tasks.fetch_source.discover_sourcemap')
def test_inlined_sources(self, discover_sourcemap, fetch_url, update):
data = {
'sentry.interfaces.Exception': {
'values': [{
'stacktrace': {
'frames': [
{
'abs_path': 'http://example.com/test.min.js',
'filename': 'test.js',
'lineno': 1,
'colno': 0,
},
],
},
}],
}
}
discover_sourcemap.return_value = base64_sourcemap
fetch_url.return_value.url = 'http://example.com/test.min.js'
fetch_url.return_value.body = '\n'.join('<generated source>')
expand_javascript_source(data)
fetch_url.assert_called_once_with('http://example.com/test.min.js')
frame_list = data['sentry.interfaces.Exception']['values'][0]['stacktrace']['frames']
frame = frame_list[0]
assert not frame.get('pre_context')
assert frame['context_line'] == 'console.log("hello, World!")'
assert not frame.get('post_context')
class GenerateModuleTest(TestCase):
def test_simple(self):
assert generate_module(None) == '<unknown module>'
assert generate_module('http://example.com/foo.js') == 'foo'
assert generate_module('http://example.com/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/js/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/javascript/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/1.0/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/v1/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/v1.0.0/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/_baz/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/1/2/3/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/abcdef0/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/92cd589eca8235e7b373bf5ae94ebf898e3b949c/foo/bar.js') == 'foo/bar'
assert generate_module('http://example.com/7d6d00eae0ceccdc7ee689659585d95f/foo/bar.js') == 'foo/bar'
assert generate_module('/foo/bar.js') == 'foo/bar'
assert generate_module('../../foo/bar.js') == 'foo/bar'
assert generate_module('/foo/bar-7d6d00eae0ceccdc7ee689659585d95f.js') == 'foo/bar'
class FetchBase64SourcemapTest(TestCase):
def test_simple(self):
index = fetch_sourcemap(base64_sourcemap)
states = [SourceMap(1, 0, '/test.js', 0, 0, None)]
sources = set(['/test.js'])
keys = [(1, 0)]
content = {'/test.js': ['console.log("hello, World!")']}
assert index == SourceMapIndex(states, keys, sources, content)
class TrimLineTest(TestCase):
long_line = 'The public is more familiar with bad design than good design. It is, in effect, conditioned to prefer bad design, because that is what it lives with. The new becomes threatening, the old reassuring.'
def test_simple(self):
assert trim_line('foo') == 'foo'
assert trim_line(self.long_line) == 'The public is more familiar with bad design than good design. It is, in effect, conditioned to prefer bad design, because that is what it li {snip}'
assert trim_line(self.long_line, column=10) == 'The public is more familiar with bad design than good design. It is, in effect, conditioned to prefer bad design, because that is what it li {snip}'
assert trim_line(self.long_line, column=66) == '{snip} blic is more familiar with bad design than good design. It is, in effect, conditioned to prefer bad design, because that is what it lives wi {snip}'
assert trim_line(self.long_line, column=190) == '{snip} gn. It is, in effect, conditioned to prefer bad design, because that is what it lives with. The new becomes threatening, the old reassuring.'
assert trim_line(self.long_line, column=9999) == '{snip} gn. It is, in effect, conditioned to prefer bad design, because that is what it lives with. The new becomes threatening, the old reassuring.'
|
python
|
import numpy as np
import pandas as pd
import thermalstd
import dataclima
import solarpower
db_cable = 'DB_cables.xlsx'
csvfile = r'D:\Analise_Dados_Solares\UFV Rio do Peixe\Séries de longo prazo (Helio-Clim3)\SAO_JOAO_DO_RIO_DO_PEIXE_HC3-METEO_hour_lat-6.725_lon-38.454_2004-02-01_2019-01-30_hz1.csv'
# dictstudy_ACSR = {'Type': ['ACSR', 'ACSR', 'ACSR', 'ACSR', 'ACSR', 'ACSR'],
# 'Name': ['Partridge', 'Linnet', 'Ibis', 'Hawk', 'Dove', 'Grosbeak']}
# cablesstudy_ACSR = pd.DataFrame(dictstudy_ACSR)
# dictstudy_AAAC = {'Type': ['AAAC_1120', 'AAAC_1120', 'AAAC_1120', 'AAAC_1120', 'AAAC_1120', 'AAAC_1120'],
# 'Name': ['Krypton', 'Lutetium', 'Neon', 'Nitrogen', 'Nobelium', 'Oxygen']}
# cablesstudy_AAAC = pd.DataFrame(dictstudy_AAAC)
# LOADING HC3 FILE
dataclima = dataclima.helioclim3(csvfile, 'rdp.pkl')
df1 = dataclima.loading()
print('ORIGINAL DATAFRAME')
print(df1.head())
# CLIMATE VARIABLES
climavars = thermalstd.climavars(vw=1.0,
em=0.5,
ab=0.5,
tamb=40,
zl=90,
lat=-6,
atm=1,
he=100,
phi=90,
hour=11,
nday=172)
def study_cables(dictcables, df1):
for i in range(dictcables.shape[0]):
cable_type = dictcables.iloc[i, 0]
cable_name = dictcables.iloc[i, 1]
cablevars = thermalstd.cablevars(db_cable=db_cable,
cable_type=cable_type,
cable_name=cable_name)
calc = thermalstd.Std7382006(climavars=climavars, cablevars=cablevars)
calc.graphcable()
# CALCULATING GROSS AND NET PRODUTION
# Fatores de perdas considerados no cálculo da Produção de Energia (%)
# print(df1.head())
dataloss = solarpower.energycalc(df=df1,
horizon=0.2,
shadings=1.9,
iam=1.4,
soiling=1.5,
lowirradeff=0.3,
temperatureloss=10.1,
modulequality=0.2,
lid=2.1,
mismatch=0.6,
ohmicdcloss=1.1,
inverterloss=1.4,
plantcontroller=2.5,
transf_lv_mv=1.2,
transf_mv_hv=0.6,
auxloadsloss=0.3,
ohmicac_poi=1.3,
systemunavailability=0.8,
gridunavailability=0.2)
# print(df1.head())
'''
Características UFV
Modelo módulo: TSM-370DE14A(II) (380W)
Dimensão módulo: 1960 × 992 × 40 mm
https://www.civicsolar.com/question/how-do-you-calculate-solar-panel-efficiency
modulearea = 1.96 * 0.992 # m²
'''
dfproduction = dataloss.production(modulearea=1.94432,
totalpower=80.256e6,
modulepower=380,
trackeradd=1.276)
# CUTTING MAX PRODUCTION IN SOLAR POWER PLANT
linevars = thermalstd.linevars(dfproduction=dfproduction,
voltage=69,
powerfactor=0.95,
climavars=climavars,
cablevars=cablevars,
extline=14,
maxnetprod=61)
# WITHOUT CUTTING MAX PRODUCTION IN SOLAR POWER PLANT
# linevars = thermalstd.linevars(dfproduction=dfproduction,
# voltage=69,
# powerfactor=0.95,
# climavars=climavars,
# cablevars=cablevars,
# extline=14)
# outnetlimit = power value at the destiny
# maxnetprod = max power valer at the origin
# CONDITIONS
dataanalysis = thermalstd.analysis(climavars=climavars,
cablevars=cablevars,
linevars=linevars,
savexlsx=True)
print('PLOTTING MEAN AND MAX CURRENT BARS')
dataanalysis.curvecur('Current_Bars')
print('PLOTTING CURRENT X TEMP BARS')
dataanalysis.curvecurtemp('Current_Temp_Bars')
dataanalysis.conditions()
# study_cables(cablesstudy_ACSR, df1)
# study_cables(cablesstudy_AAAC, df1)
dictstudy = {'Type': ['AAAC_1120'],
'Name': ['Nitrogen']}
cablesstudy = pd.DataFrame(dictstudy)
study_cables(cablesstudy, df1)
|
python
|
# Copyright (c) 2003-2020 Xsens Technologies B.V. or subsidiaries worldwide.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the names of the copyright holders nor the names of their contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
# THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.THE LAWS OF THE NETHERLANDS
# SHALL BE EXCLUSIVELY APPLICABLE AND ANY DISPUTES SHALL BE FINALLY SETTLED UNDER THE RULES
# OF ARBITRATION OF THE INTERNATIONAL CHAMBER OF COMMERCE IN THE HAGUE BY ONE OR MORE
# ARBITRATORS APPOINTED IN ACCORDANCE WITH SAID RULES.
#
import sys
import xsensdeviceapi.xsensdeviceapi_py37_64 as xda
from threading import Lock
class XdaCallback(xda.XsCallback):
def __init__(self, max_buffer_size = 5):
xda.XsCallback.__init__(self)
self.m_maxNumberOfPacketsInBuffer = max_buffer_size
self.m_packetBuffer = list()
self.m_lock = Lock()
def packetAvailable(self):
self.m_lock.acquire()
res = len(self.m_packetBuffer) > 0
self.m_lock.release()
return res
def getNextPacket(self):
self.m_lock.acquire()
assert(len(self.m_packetBuffer) > 0)
oldest_packet = xda.XsDataPacket(self.m_packetBuffer.pop(0))
self.m_lock.release()
return oldest_packet
def onLiveDataAvailable(self, dev, packet):
self.m_lock.acquire()
assert(packet is not 0)
while len(self.m_packetBuffer) >= self.m_maxNumberOfPacketsInBuffer:
self.m_packetBuffer.pop()
self.m_packetBuffer.append(xda.XsDataPacket(packet))
self.m_lock.release()
if __name__ == '__main__':
print("Creating XsControl object...")
control = xda.XsControl_construct()
assert(control is not 0)
xdaVersion = xda.XsVersion()
xda.xdaVersion(xdaVersion)
print("Using XDA version %s" % xdaVersion.toXsString())
try:
print("Scanning for devices...")
portInfoArray = xda.XsScanner_scanPorts()
# Find an MTi device
mtPort = xda.XsPortInfo()
for i in range(portInfoArray.size()):
if portInfoArray[i].deviceId().isMti() or portInfoArray[i].deviceId().isMtig():
mtPort = portInfoArray[i]
break
if mtPort.empty():
raise RuntimeError("No MTi device found. Aborting.")
did = mtPort.deviceId()
print("Found a device with:")
print(" Device ID: %s" % did.toXsString())
print(" Port name: %s" % mtPort.portName())
print("Opening port...")
if not control.openPort(mtPort.portName(), mtPort.baudrate()):
raise RuntimeError("Could not open port. Aborting.")
# Get the device object
device = control.device(did)
assert(device is not 0)
print("Device: %s, with ID: %s opened." % (device.productCode(), device.deviceId().toXsString()))
# Create and attach callback handler to device
callback = XdaCallback()
device.addCallbackHandler(callback)
# Put the device into configuration mode before configuring the device
print("Putting device into configuration mode...")
if not device.gotoConfig():
raise RuntimeError("Could not put device into configuration mode. Aborting.")
print("Configuring the device...")
configArray = xda.XsOutputConfigurationArray()
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_PacketCounter, 0))
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_SampleTimeFine, 0))
if device.deviceId().isImu():
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_Acceleration, 100))
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_RateOfTurn, 100))
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_MagneticField, 100))
elif device.deviceId().isVru() or device.deviceId().isAhrs():
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_Quaternion, 100))
elif device.deviceId().isGnss():
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_Quaternion, 100))
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_LatLon, 100))
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_AltitudeEllipsoid, 100))
configArray.push_back(xda.XsOutputConfiguration(xda.XDI_VelocityXYZ, 100))
else:
raise RuntimeError("Unknown device while configuring. Aborting.")
if not device.setOutputConfiguration(configArray):
raise RuntimeError("Could not configure the device. Aborting.")
print("Creating a log file...")
logFileName = "logfile.mtb"
if device.createLogFile(logFileName) != xda.XRV_OK:
raise RuntimeError("Failed to create a log file. Aborting.")
else:
print("Created a log file: %s" % logFileName)
print("Putting device into measurement mode...")
if not device.gotoMeasurement():
raise RuntimeError("Could not put device into measurement mode. Aborting.")
print("Starting recording...")
if not device.startRecording():
raise RuntimeError("Failed to start recording. Aborting.")
print("Main loop. Recording data for 10 seconds.")
startTime = xda.XsTimeStamp_nowMs()
while xda.XsTimeStamp_nowMs() - startTime <= 10000:
if callback.packetAvailable():
# Retrieve a packet
packet = callback.getNextPacket()
s = ""
if packet.containsCalibratedData():
acc = packet.calibratedAcceleration()
s = "Acc X: %.2f" % acc[0] + ", Acc Y: %.2f" % acc[1] + ", Acc Z: %.2f" % acc[2]
gyr = packet.calibratedGyroscopeData()
s += " |Gyr X: %.2f" % gyr[0] + ", Gyr Y: %.2f" % gyr[1] + ", Gyr Z: %.2f" % gyr[2]
mag = packet.calibratedMagneticField()
s += " |Mag X: %.2f" % mag[0] + ", Mag Y: %.2f" % mag[1] + ", Mag Z: %.2f" % mag[2]
if packet.containsOrientation():
quaternion = packet.orientationQuaternion()
s = "q0: %.2f" % quaternion[0] + ", q1: %.2f" % quaternion[1] + ", q2: %.2f" % quaternion[2] + ", q3: %.2f " % quaternion[3]
euler = packet.orientationEuler()
s += " |Roll: %.2f" % euler.x() + ", Pitch: %.2f" % euler.y() + ", Yaw: %.2f " % euler.z()
if packet.containsLatitudeLongitude():
latlon = packet.latitudeLongitude()
s += " |Lat: %7.2f" % latlon[0] + ", Lon: %7.2f " % latlon[1]
if packet.containsAltitude():
s += " |Alt: %7.2f " % packet.altitude()
if packet.containsVelocity():
vel = packet.velocity(xda.XDI_CoordSysEnu)
s += " |E: %7.2f" % vel[0] + ", N: %7.2f" % vel[1] + ", U: %7.2f " % vel[2]
print("%s\r" % s, end="", flush=True)
print("\nStopping recording...")
if not device.stopRecording():
raise RuntimeError("Failed to stop recording. Aborting.")
print("Closing log file...")
if not device.closeLogFile():
raise RuntimeError("Failed to close log file. Aborting.")
print("Removing callback handler...")
device.removeCallbackHandler(callback)
print("Closing port...")
control.closePort(mtPort.portName())
print("Closing XsControl object...")
control.close()
except RuntimeError as error:
print(error)
sys.exit(1)
except:
print("An unknown fatal error has occured. Aborting.")
sys.exit(1)
else:
print("Successful exit.")
|
python
|
from django.db import models
from django.contrib.auth.models import User
class Customer(models.Model):
user = models.OneToOneField(User, null=True, blank=True, on_delete= models.CASCADE)
name = models.CharField(max_length=200, null=True)
email = models.CharField(max_length=200, null=True)
def __str__(self):
return self.name
|
python
|
#!/usr/bin/env python
import glob
import json
import logging
import math
import mimetypes
import os
import platform
import re
import shutil
import socket
import subprocess
import sys
import tempfile
from multiprocessing import Process
from random import uniform
from socket import gaierror
from time import sleep
from importlib.metadata import metadata
from urllib.error import URLError
from urllib.parse import unquote, urldefrag, urljoin, urlparse
import requests
from bs4 import BeautifulSoup as bs
from fake_useragent import FakeUserAgentError, UserAgent
from packaging.version import parse
from requests.auth import HTTPBasicAuth
from requests.packages import urllib3
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from robotsparsetools import NotFoundError, Parse
from tqdm import tqdm
try:
import msvcrt
except:
import termios
try:
metadata("prop-request")
_binary = False
except:
_binary = True
_prop_directory = os.path.join(os.environ.get("HOME"), ".prop-datas")
if not os.path.isdir(_prop_directory):
os.mkdir(_prop_directory)
"""
下記コマンド実行必要
pip install requests numpy beautifulsoup4 requests[socks] fake-useragent tqdm
(urllib3はrequests付属)
"""
urllib3.disable_warnings(InsecureRequestWarning)
VERSION = parse("1.2.7")
class error:
@staticmethod
def print(msg):
print(f"\033[31m{msg}\033[0m", file=sys.stderr)
print("\n\033[33mIf you don't know how to use, please use '-h', '--help' options and you will see help message\033[0m", file=sys.stderr)
sys.exit(1)
class LoggingHandler(logging.StreamHandler):
color = {'INFO': '\033[36mINFO\033[0m', 'WARNING': '\033[33mWARNING\033[0m', 'WARN': '\033[33mWARN\033[0m', 'ERROR': '\033[31mERROR\033[0m'}
def __init__(self, level=logging.NOTSET):
super().__init__(level)
def emit(self, record):
try:
record.levelname = LoggingHandler.color.get(record.levelname, record.levelname)
msg = self.format(record)
tqdm.write(msg, file=sys.stderr)
self.flush()
except Exception:
self.handleError(record)
class LoggingFileHandler(logging.Handler):
def __init__(self, file, mode="a", level=logging.NOTSET):
super().__init__(level)
self.file = open(file, mode)
def emit(self, record):
try:
record.msg = re.sub('\033\\[[+-]?\\d+m', '', str(record.msg))
record.levelname = re.sub('\033\\[[+-]?\\d+m', '', record.levelname)
msg = self.format(record)
self.file.write(msg)
self.file.write('\n')
self.file.flush()
except Exception as e:
self.handleError(record)
class setting:
"""
オプション設定やファイルへのログを定義するクラス
"""
if _binary:
log_file = os.path.join(_prop_directory, 'log.log')
config_file = os.path.join(_prop_directory, 'config.json')
else:
log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'log.log')
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.json')
def __init__(self):
# 設定できるオプションたち
# 他からimportしてもこの辞書を弄ることで色々できる
self.options = {'download_name': '', 'limit': 0, 'only_body': False, 'debug': False, 'parse': False, 'types': 'get', 'payload': None, 'output': True, 'filename': None, 'timeout': (3.0, 60.0), 'redirect': True, 'upload': None, 'json': False, 'search': None, 'header': {'User-Agent': 'Prop/1.1.2'}, 'cookie': None, 'proxy': {"http": os.environ.get("http_proxy") or os.environ.get("HTTP_PROXY"), "https": os.environ.get("https_proxy") or os.environ.get("HTTPS_PROXY")}, 'auth': None, 'bytes': False, 'recursive': 0, 'body': True, 'content': True, 'conversion': True, 'reconnect': 5, 'caperror': True, 'noparent': False, 'no_downloaded': False, 'interval': 1, 'start': None, 'format': '%(file)s', 'info': False, 'multiprocess': False, 'ssl': True, 'parser': 'html.parser', 'no_dl_external': True, 'save_robots': True, 'check_only': False}
# 以下logger設定
logger = logging.getLogger('Log of Prop')
logger.setLevel(20)
sh = LoggingHandler()
self.fh = LoggingFileHandler(setting.log_file)
logger.addHandler(sh)
logger.addHandler(self.fh)
format = logging.Formatter('%(asctime)s:[%(levelname)s]> %(message)s')
sh.setFormatter(format)
self.fh.setFormatter(format)
self.log = logger.log
def config_load(self) -> None:
"""
設定ファイルをロード
"""
if os.path.isfile(setting.config_file):
with open(setting.config_file, 'r') as f:
config = json.load(f)
if isinstance(config['timeout'], list):
config['timeout'] = tuple(config['timeout'])
self.options.update(config)
def config(self, key: str, value: str or bool or None) -> None:
"""
オプションの設定
"""
self.options[key] = value
class cache:
"""
キャッシュ(stylesheet)を扱うクラス
"""
if _binary:
root = os.path.join(_prop_directory, 'cache')
else:
root = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'cache')
configfile = os.path.join(root, '.cache_info')
if os.path.isfile(configfile):
with open(configfile, 'r') as f:
_caches = json.load(f)
else:
_caches = dict()
def __init__(self, url, parse):
self.parse = parse
host = self.parse.get_hostname(url)
if not os.path.isdir(self.root):
os.mkdir(self.root)
self.directory = os.path.join(cache.root, host)
if not os.path.isdir(self.directory):
os.mkdir(self.directory)
@staticmethod
def get_cache(url) -> str or None:
return cache._caches.get(url)
def save(self, url, body: bytes) -> str:
file = os.path.join(self.directory, self.parse.get_filename(url))
with open(file, 'wb') as f:
f.write(body)
cache._caches[url] = file
@staticmethod
def update(option):
file = os.path.join('styles', '.prop_info.json')
if os.path.isfile(file):
with open(file, 'r') as f:
info_dict = json.load(f)
else:
info_dict = dict()
if not cache._caches:
return
for url, path in tqdm(cache._caches.items()):
r = requests.get(url, timeout=option['timeout'], proxies=option['proxy'], headers=option['header'], verify=option['ssl'])
with open(path, 'wb') as f:
f.write(r.content)
tqdm.write(f"updated '{path}'")
if url in info_dict:
shutil.copy(path, info_dict[url])
tqdm.write(f"updated '{info_dict[url]}'")
sleep(0.5)
def __enter__(self):
return self
def __exit__(self, *_):
with open(cache.configfile, 'w') as f:
json.dump(self._caches, f)
class history:
"""
ダウンロード履歴関連の関数を定義するクラス
基本的に./history配下のファイルのみ操作
"""
if _binary:
root = os.path.join(_prop_directory, 'history')
else:
root = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'history')
def __init__(self, url: str):
self.domain = urlparse(url).netloc
self.history_file = os.path.join(history.root, self.domain+'.txt')
if not os.path.isdir(history.root):
os.mkdir(history.root)
def write(self, content: str or list, end: str = '\n') -> None:
if isinstance(content, list):
content: str = '\n'.join(content)
if content in self.read():
return
with open(self.history_file, 'a') as f:
f.write(content+end)
def read(self) -> set:
if os.path.isfile(self.history_file):
with open(self.history_file, 'r') as f:
return set(f.read().rstrip().splitlines())
else:
return set()
class parser:
"""
HTMLやURL解析
spiderはaタグとimgタグから参照先URLを抽出し保存、html_extractionは任意のタグを抽出
"""
status_messages = {400: 'Bad Request', 401: 'Unauthorized', 402: 'Payment Required', 403: 'Forbidden', 404: 'Not Found', 405: 'Method Not Allowed', 406: 'Not Acceptable', 407: 'Proxy Authentication Required', 408: 'Request Timeout', 409: 'Conflict', 410: 'Gone', 411: 'Length Required', 412: 'Precondition Failed', 413: 'Payload Too Large', 414: 'URI Too Long', 415: 'Unsupported Media Type', 416: 'Range Not Satisfiable', 417: 'Expectation Failed', 418: "I'm a teapot", 421: 'Misdirected Request', 422: 'Unprocessable Entity', 423: 'Locked', 424: 'Failed Dependency', 425: 'Too Early', 426: 'Upgrade Required', 428: 'Precondition Required', 429: 'Too Many Requests', 431: 'Request Header Fields Too Large', 451: 'Unavailable For Legal Reasons', 500: 'Internal Server Error', 501: 'Not Implemented', 502: 'Bad Gateway', 503: 'Service Unavailable', 504: 'Gateway Timeout', 505: 'HTTP Version Not Supported', 506: 'Variant Also Negotiates', 507: 'Insufficient Storage', 508: 'Loop Detected', 510: 'Not Extended', 511: 'Network Authentication Required'}
def __init__(self, option, log, *, dl=None):
self.option = option
self.log = log
self.parser = self.option['parser']
self.dl = dl
@staticmethod
def get_rootdir(url: str) -> str or None:
"""
ホームアドレスを摘出
"""
if parser.is_url(url):
result = urlparse(url)
return result.scheme+'://'+result.hostname
else:
return None
@staticmethod
def query_dns(url: str):
if parser.is_url(url):
host = parser.get_hostname(url)
else:
host = url
if host:
i = socket.getaddrinfo(host, None)
return i
else:
raise gaierror()
@staticmethod
def get_hostname(url: str) -> str or None:
if parser.is_url(url):
return urlparse(url).hostname
else:
return None
@staticmethod
def get_filename(url, name_only=True):
if not isinstance(url, str):
return url
result = unquote(url.rstrip('/').split('/')[-1])
if name_only:
defrag = urldefrag(result).url
return parser.delete_query(defrag)
return result
@staticmethod
def splitext(url):
if not isinstance(url, str):
return url
split = url.rstrip('/').split('.')
if len(split) < 2 or not split[-1] or '/' in split[-1] or urlparse(url).path in {'', '/'}:
return (url, '.html')
else:
return ('.'.join(split[:-1]), '.'+split[-1])
@staticmethod
def delete_query(url):
if not isinstance(url, str):
return url
index = url.find('?')
if 0 <= index:
return url[:index]
else:
return url
@staticmethod
def is_url(url: str) -> bool:
"""
引数に渡された文字列がURLか判別
"""
return bool(re.match(r"https?://[\w!\?/\+\-_~=;\.,\*&@#\$%\(\)'\[\]]+", url))
def html_extraction(self, source: bytes or str, words: dict) -> str:
data = bs(source, self.parser)
if 'css' in words:
code: list = data.select(words.get('css'), limit=self.option.get('limit') or None)
else:
code: list = data.find_all(name=words.get('tags'), attrs=words['words'], limit=self.option.get('limit') or None)
return '\n\n'.join(map(str, code))
def is_success_status(self, returncode):
if 200 <= returncode < 400:
self.log(20, f'{returncode}: Success request')
return True
else:
self.log(40, '{}: {}'.format(returncode, parser.status_messages.get(returncode, "unknown")))
return False
def delay_check(self):
"""
指定されているインターバルがrobots.txtのcrawl_delayの数値以上か判定
もしcrawl_delayの数値より少なかったらインターバルをcrawl_delayの数値に置き換える
"""
delay = self.robots.delay()
if delay is not None and self.option['interval'] < delay:
self.log(30, f"it changed interval because it was shorter than the time stated in robots.txt '{self.option['interval']}' => '{delay}'")
self.option['interval'] = delay
def _cut(self, list, get, cwd_url, response, root_url, downloaded, is_ok, info_dict, cut=True):
data: dict = dict()
did_host: set = set()
dns = False
start = self.option['start'] is None
for tag in list:
if isinstance(get, str):
url: str = tag.get(get)
else:
for g in get:
url = tag.get(g)
if url:
break
else:
continue
url = url
if not url or '#' in url or url in info_dict:
continue
if self.is_url(url):
target_url: str = url
dns = True
else:
target_url: str = urljoin(cwd_url, url)
if not self.is_url(target_url):
continue
if cut and not start:
if target_url.endswith(self.option['start']):
start = True
else:
continue
if cut and ((self.option['noparent'] and (not target_url.startswith(response.url) and target_url.startswith(root_url))) or target_url in set(data.values()) or ((target_url.startswith(cwd_url) and '#' in target_url) or (self.option['no_dl_external'] and not target_url.startswith(root_url)))):
continue
if cut and (self.option['download_name'] not in target_url or (self.option['no_downloaded'] and target_url in downloaded)):
continue
if self.option['debug']:
self.log(20, f"found '{target_url}'")
if self.option['save_robots'] and not is_ok(url):
self.log(30, f'{target_url} is prohibited by robots.txt')
continue
if dns:
try:
hostname = self.get_hostname(target_url)
if hostname not in did_host:
if not hostname:
raise gaierror()
if self.option['debug']:
self.log(20, f"querying the DNS server for '{hostname}' now...")
i = self.query_dns(hostname)
except gaierror:
self.log(30, f"skiped {target_url} because there was no response from the DNS server")
continue
except:
pass
finally:
dns = False
did_host.add(hostname)
data[url] = target_url
if cut and 0 < self.option['limit'] <= len(data):
break
return data
def _get_count(self):
files = list(filter(lambda p: bool(re.match(re.escape(self.option['formated']).replace(r'%\(num\)d', r'\d+').replace(r'%\(file\)s', '.*').replace(r'%\(ext\)s', '.*'), p)), os.listdir()))
if files:
string = self.option['formated'].split('%(num)d')
start = len(string[0])
if string[1]:
end = string[1][0]
num = map(lambda p: int(p[start:p.find(end, start)]), files)
else:
num = map(lambda p: int(p[start:]), files)
return max(num)+1
return 0
def spider(self, response, *, h=sys.stdout, session):
"""
HTMLからaタグとimgタグの参照先を抽出し保存
"""
temporary_list: list = []
temporary_list_urls: list = []
if '%(num)d' in self.option['formated']:
count = self._get_count()
else:
count = 0
max = self.option['interval']+3
info_file = os.path.join('styles', '.prop_info.json')
if self.option['no_downloaded']:
downloaded: set = h.read()
else:
downloaded: set = set()
if (not os.path.isfile(os.path.join('styles', '.prop_info.json'))) and self.option['body'] and not self.option['start'] and not self.option['check_only'] and not (self.option['no_downloaded'] and response.url.rstrip('/') in downloaded):
root = self.dl.recursive_download(response.url, response.text, count)
count += 1
WebSiteData: dict = {response.url: root}
h.write(response.url.rstrip('/'))
elif self.option['check_only']:
WebSiteData: dict = {response.url: response.url}
else:
WebSiteData: dict = dict()
if os.path.isfile(info_file):
with open(info_file, 'r') as f:
WebSiteData.update(json.load(f))
root_url: str = self.get_rootdir(response.url)
# ↑ホームURLを取得
cwd_urls = [response.url]
# ↑リクエストしたURLを取得
# aタグの参照先に./~~が出てきたときにこの変数の値と連結させる
if self.option['debug']:
self.log(20, 'checking robots.txt...')
try:
self.robots = Parse(root_url, requests=True, headers=self.option['header'], proxies=self.option['proxy'], timeout=self.option['timeout'])
is_ok = self.robots.can_crawl
self.delay_check()
except NotFoundError:
is_ok = lambda *_: True
if self.option['debug']:
self.log(20, 'robots.txt was none')
source = [response.content]
print(f"\033[36mhistories are saved in '{h.history_file}'\033[0m", file=sys.stderr)
for n in range(self.option['recursive']):
for source, cwd_url in zip(source, cwd_urls):
datas = bs(source, self.parser)
if self.option['body']:
a_data: dict = self._cut(datas.find_all('a'), 'href', cwd_url, response, root_url, downloaded, is_ok, WebSiteData) #aタグ抽出
link_data: dict = self._cut(datas.find_all('link', rel='stylesheet'), "href", cwd_url, response, root_url, downloaded, is_ok, WebSiteData, cut=False) # rel=stylesheetのlinkタグを抽出
if self.option['content']:
img_data: dict = self._cut(datas.find_all('img'), ['src', 'data-lazy-src', 'data-src'], cwd_url, response, root_url, downloaded, is_ok, WebSiteData) # imgタグ抽出
self.option['header']['Referer'] = cwd_url
if self.option['body']:
if not os.path.isdir('styles') and not self.option['check_only']:
os.mkdir('styles')
self.log(20, 'loading stylesheets...')
before_fmt = self.dl.option['formated']
self.dl.option['formated'] = os.path.join('styles', '%(file)s')
for from_url, target_url in tqdm(link_data.items(), leave=False, desc="'stylesheets'"):
with cache(target_url, self) as caches:
che = caches.get_cache(target_url)
if che:
result = os.path.join('styles', os.path.basename(che))
shutil.copy(che, result)
self.log(20, f"using cache instead of downloading '{target_url}'")
else:
for i in range(self.option['reconnect']+1):
try:
if i == 0:
self.log(20, f"request start: '{target_url}'")
else:
self.log(20, f"retrying {i}")
res: requests.models.Response = session.get(target_url, timeout=self.option['timeout'], proxies=self.option['proxy'], headers=self.option['header'], verify=self.option['ssl'])
if not self.is_success_status(res.status_code):
break
if self.option['debug']:
tqdm.write(f"response speed: {res.elapsed.total_seconds()}s [{len(res.content)} bytes data]", file=sys.stderr)
res.close()
result = self.dl.recursive_download(res.url, res.content)
caches.save(target_url, res.content)
break
except Exception as e:
if i >= self.option['reconnect']-1:
self.log(30, e)
sleep(1)
continue
WebSiteData[from_url] = result
if os.path.isdir('styles'):
with open(info_file, 'w') as f:
json.dump(WebSiteData, f, indent=4, ensure_ascii=False)
self.dl.option['formated'] = before_fmt
for from_url, target_url in tqdm(a_data.items(), leave=False, desc="'a tag'"):
for i in range(self.option['reconnect']+1):
try:
if i == 0:
self.log(20, f"request start: '{target_url}'")
else:
self.log(20, f"retrying {i}...")
res: requests.models.Response = session.get(target_url, timeout=self.option['timeout'], proxies=self.option['proxy'], headers=self.option['header'], verify=self.option['ssl'])
if not self.is_success_status(res.status_code):
break
temporary_list.append(res.content)
temporary_list_urls.append(res.url)
h.write(target_url)
if self.option['debug']:
tqdm.write(f"response speed: {res.elapsed.total_seconds()}s [{len(res.content)} bytes data]", file=sys.stderr)
res.close()
if self.option['check_only']:
WebSiteData[target_url] = 'Exists' if self.is_success_status(res.status_code) else 'Not'
else:
result = self.dl.recursive_download(res.url, res.content, count)
count += 1
WebSiteData[from_url] = result
if os.path.isdir('styles'):
with open(info_file, 'w') as f:
json.dump(WebSiteData, f, indent=4, ensure_ascii=False)
break
except Exception as e:
if i >= self.option['reconnect']-1:
self.log(30, e)
sleep(1)
continue
else:
if self.option['debug']:
self.log(20, f"didn't response '{target_url}'")
continue
sleep(round(uniform(self.option['interval'], max), 1))
if self.option['content']:
for from_url, target_url in tqdm(img_data.items(), leave=False, desc="'img tag'"):
for i in range(self.option['reconnect']):
try:
if i == 0:
self.log(20, f"request start: '{target_url}'")
else:
self.log(20, f"retrying {i}")
res: requests.models.Response = session.get(target_url, timeout=self.option['timeout'], proxies=self.option['proxy'], headers=self.option['header'], verify=self.option['ssl'])
h.write(target_url)
if not self.is_success_status(res.status_code):
break
if self.option['debug']:
tqdm.write(f"response speed: {res.elapsed.total_seconds()}s [{len(res.content)} bytes data]", file=sys.stderr)
res.close()
if self.option['check_only']:
WebSiteData[target_url] = 'Exists' if self.is_success_status(res.status_code) else 'Not'
else:
result = self.dl.recursive_download(res.url, res.content, count)
count += 1
WebSiteData[from_url] = result
if os.path.isdir('styles'):
with open(info_file, 'w') as f:
json.dump(WebSiteData, f, indent=4, ensure_ascii=False)
break
except Exception as e:
if i >= self.option['reconnect']-1:
self.log(30, e)
continue
else:
if self.option['debug']:
self.log(20, f"didn't response '{target_url}'")
sleep(round(uniform(self.option['interval'], max), 1))
cwd_urls = temporary_list_urls
temporary_list_urls: list = []
source = temporary_list
temporary_list: list = []
if self.option['debug']:
self.log(20, f'{n+1} hierarchy... '+'\033[32m'+'done'+'\033[0m')
if self.option['check_only']:
for k, v in WebSiteData.items():
print('{} ... {}{}\033[0m'.format(k, '\033[32m' if v == 'Exists' else '\033[31m', v))
sys.exit()
elif os.path.isdir('styles'):
with open(info_file, 'w') as f:
json.dump(WebSiteData, f, indent=4, ensure_ascii=False)
return WebSiteData
class downloader:
"""
再帰ダウンロードやリクエスト&パースする関数を定義するクラス
start_download以降の関数は再帰ダウンロード関連の関数
"""
def __init__(self, url: str, option, parsers='html.parser'):
self.url = url # リスト
self.parser: str = parsers
self.option = option
self.session = requests.Session()
logger = logging.getLogger('Log of Prop')
self.log = logger.log
self.parse = parser(self.option, self.log, dl=self)
def start(self) -> None:
"""
URLに対してリスエストを送る前準備と実行
"""
methods: dict = {'get': self.session.get, 'post': self.session.post, 'put': self.session.put, 'delete': self.session.delete}
instance: requests = methods.get(self.option['types'])
if self.option['debug']:
self.log(20, """
request urls: {0}
\033[35m[settings]\033[0m
{1}
""".format(self.url, '\n'.join([f'\033[34m{k}\033[0m: {v}' for k, v in self.option.items()])))
for url in self.url:
try:
hostname = self.parse.get_hostname(url)
if not hostname:
self.log(40, f"'{url}' is not url")
continue
if self.option['debug']:
self.log(20, f"querying the DNS server for '{hostname}' now...")
i = self.parse.query_dns(hostname)
if self.option['debug']:
self.log(20, f"request start {url} [{i[0][-1][0]}]")
self.request(url, instance)
except gaierror:
self.log(20, f"skiped '{url}' because there was no response from the DNS server")
continue
except Exception as e:
if self.option['caperror']:
self.log(40, f'\033[31m{str(e)}\033[0m')
continue
def request(self, url: str, instance) -> str or List[requests.models.Response, str]:
self.option['formated']: str = self.option['format'].replace('%(root)s', self.parse.get_hostname(url))
if self.option['types'] != 'post':
r: requests.models.Response = instance(url, params=self.option['payload'], allow_redirects=self.option['redirect'], cookies=self.option['cookie'], auth=self.option['auth'], timeout=self.option['timeout'], proxies=self.option['proxy'], headers=self.option['header'], verify=self.option['ssl'], stream=True)
else:
if self.option['upload']:
name, form = self.option['upload']
with open(name, 'rb') as f:
if form:
upload_data = {form: (f.name, f, mimetypes.guess_type(f.name)[0])}
else:
upload_data = {f.name: f}
r: requests.models.Response = instance(url, allow_redirects=self.option['redirect'], cookies=self.option['cookie'], auth=self.option['auth'], proxies=self.option['proxy'], timeout=self.option['timeout'], headers=self.option['header'], verify=self.option['ssl'], files=upload_data, stream=True)
elif self.option['json']:
r: requests.models.Response = instance(url, json=self.option['payload'], allow_redirects=self.option['redirect'], cookies=self.option['cookie'], auth=self.option['auth'], proxies=self.option['proxy'], timeout=self.option['timeout'], headers=self.option['header'], verify=self.option['ssl'], stream=True)
else:
r: requests.models.Response = instance(url, data=self.option['payload'], allow_redirects=self.option['redirect'], cookies=self.option['cookie'], auth=self.option['auth'], proxies=self.option['proxy'], timeout=self.option['timeout'], headers=self.option['header'], verify=self.option['ssl'], stream=True)
if self.option['debug'] and not self.option['info']:
print(f'\n\033[35m[response headers]\033[0m\n\n'+'\n'.join([f'\033[34m{k}\033[0m: {v}' for k, v in r.headers.items()])+'\n', file=sys.stderr)
if not self.parse.is_success_status(r.status_code):
return
if self.option['check_only'] and not self.option['recursive']:
print(f'{url} ... \033[32mExists\033[0m')
return
h = history(r.url)
if self.option['recursive']:
if self.option['filename'] is os.path.basename:
self.option['filename']: str = '.'
if self.option['check_only'] or self.option['filename'] is not None and not os.path.isfile(self.option['filename']):
if not os.path.isdir(self.option['filename']):
os.mkdir(self.option['filename'])
cwd = os.getcwd()
os.chdir(self.option['filename'])
self.log(20, 'parsing...')
res = self.parse.spider(r, h=h, session=self.session)
self.log(20, 'download... '+'\033[32m'+'done'+'\033[0m')
self.start_conversion(res)
os.chdir(cwd)
return
else:
self.log(40, 'the output destination is not a directory or not set')
sys.exit(1)
elif self.option['info']:
self._print(r, [r.headers], file=self.get_fmt(r))
return
elif self.option['search']:
result = self.parse.html_extraction(r.text, self.option['search'])
save_filename = self.get_fmt(r)
if save_filename:
with open(save_filename, 'w') as f:
f.write(result)
else:
print(result)
return
elif self.option['only_body']:
try:
s = bs(r.content, self.parser)
save_filename = self.get_fmt(r)
if save_filename:
with open(save_filename, 'w') as f:
f.write(s.text)
else:
print(s.text)
except Exception as e:
self.log(40, e)
return
length = r.headers.get('content-length')
save_filename = self.get_fmt(r)
if save_filename:
if length:
with open(save_filename, 'wb') as f:
self.save(f.write, length, r)
else:
with open(save_filename, 'wb') as f:
f.write(r.content)
else:
self.save(tqdm.write, length, r)
def get_fmt(self, r):
if self.option['filename']:
if self.option['filename'] is os.path.basename:
save_filename = self.parse.get_filename(r.url)
elif os.path.isdir(self.option['filename']):
save_filename: str = os.path.join(self.option['filename'], self.parse.get_filename(r.url))
else:
save_filename: str = self.option['filename']
return save_filename
else:
return None
def save(self, write, length, r):
if write == tqdm.write:
try:
if 1048576 <= int(length) and not self.ask_continue("The output will be large, but they will be printed to stdout.\nContinue?"):
return
except:
pass
with tqdm(total=int(length) if length else None, unit="B", unit_scale=True) as p:
for b in r.iter_content(chunk_size=16384):
write(b.decode(errors='backslashreplace'), end='')
p.update(len(b))
else:
with tqdm(total=int(length) if length else None, unit="B", unit_scale=True) as p:
for b in r.iter_content(chunk_size=16384):
write(b)
p.update(len(b))
def _print(self, response, output=None, file=None) -> None:
if file:
sys.stdout = open(file, 'w')
tqdm.write('\n\033[35m[histories of redirect]\033[0m\n')
if not response.history:
tqdm.write('-')
else:
for h in response.history:
tqdm.write(h.url)
tqdm.write('↓')
tqdm.write(response.url)
tqdm.write('\033[35m[cookies]\033[0m\n')
if not response.cookies:
tqdm.write('-')
else:
for c in response.cookies:
tqdm.write(f'\033[34m{c.name}\033[0m: {c.value}')
tqdm.write('\n\033[35m[response headers]\033[0m\n')
for i in output:
if isinstance(i, (str, bytes)):
tqdm.write(str(i), end='')
else:
for k, v in i.items():
tqdm.write(f'\033[34m{k}\033[0m: {v}')
sys.stdout.flush()
if file:
sys.stdout.close()
sys.stdout = sys.__stdout__
def _split_list(self, array, N):
n = math.ceil(len(array) / N)
return [array[index:index+n] for index in range(0, len(array), n)]
def start_conversion(self, info: tuple) -> None:
"""
ファイルパス変換をスタートする
"""
if self.option['conversion'] and self.option['body']:
self.log(20, 'convert... ')
self.local_path_conversion(info)
self.log(20, 'convert... '+'\033[32m' + 'done' + '\033[0m')
def recursive_download(self, url: str, source: bytes or str, number: int=0) -> str:
"""
HTMLから見つかったファイルをダウンロード
"""
exts = self.parse.splitext(self.parse.delete_query(url))
# フォーマットを元に保存ファイル名を決める
save_filename: str = self.option['formated'].replace('%(file)s', ''.join(self.parse.splitext(self.parse.get_filename(url)))).replace('%(num)d', str(number)).replace('%(ext)s', exts[1].lstrip('.'))
if os.path.isfile(save_filename) and not self.ask_continue(f'{save_filename} has already existed\nCan I overwrite?'):
return save_filename
while True:
try:
if isinstance(source, str):
with open(save_filename, 'w') as f:
f.write(source)
else:
with open(save_filename, 'wb') as f:
f.write(source)
sleep(0.5)
break
except Exception as e:
# エラーがでた場合、Warningログを表示し続けるか標準入力を受け取る[y/n]
self.log(30, e)
if self.ask_continue('continue?'):
continue
else:
return
if self.option['debug']:
self.log(20, f'saved: {url} => {os.path.abspath(save_filename)}')
return save_filename
def local_path_conversion(self, conversion_urls) -> None:
if self.option['conversion'] and self.option['body']:
if self.option['multiprocess']:
to_path = list(conversion_urls.values())
splited_path_list = self._split_list(to_path, 4) # 4分割
processes: list = []
for path in splited_path_list[1:]:
# 分けた内3つをサブプロセスで変換する
# 残り一つはメインプロセスで変換
p = Process(target=self.conversion_path, args=(path, conversion_urls, self.option['formated']))
p.start()
processes.append(p)
try:
self.conversion_path(splited_path_list[0], conversion_urls, self.option['formated'])
finally:
for n, p in enumerate(processes):
# 作成した全てのサブプロセスの終了を待つ
p.join()
self.log(20, f'#{n+1}'+'\033[32m'+'done'+'\033[0m')
else:
self.conversion_path(list(conversion_urls.values()), conversion_urls, self.option['formated'])
def conversion_path(self, task, all_download_data, save_fmt: str) -> None:
# URL変換
for path in task:
while True:
try:
if not path.endswith('.html'):
break
with open(path, 'r') as f:
source: str = f.read()
for from_, to in all_download_data.items():
source = source.replace(from_, to)
with open(path, 'w') as f:
f.write(source)
if self.option['debug']:
self.log(20, f"converted '{path}'")
break
except Exception as e:
self.log(30, f'pid: {os.getpid()} {e}')
if self.ask_continue('continue?'):
continue
else:
break
if platform.system() == 'Windows':
def receive(self):
result = msvcrt.getch()
return str(result).lower()
else:
def receive(self):
fd = sys.stdin.fileno()
old = termios.tcgetattr(fd)
new = termios.tcgetattr(fd)
new[3] &= ~termios.ICANON
new[3] &= ~termios.ECHO
try:
termios.tcsetattr(fd, termios.TCSANOW, new)
result = sys.stdin.read(1).lower()
finally:
termios.tcsetattr(fd, termios.TCSANOW, old)
return result
def ask_continue(self, msg) -> bool:
while True:
tqdm.write(f'{msg}[y/N]\n')
res = ''
answer = self.receive()
while answer != '\n':
res += answer
tqdm.write(answer, end='')
sys.stdout.flush()
answer = self.receive()
if res in {'y', 'n', 'yes', 'no'}:
break
print('\033[1A\r', end='')
print('\033[1A\r\033[0J', end='')
print('\033[1A\r\033[0J', end='', file=sys.stderr)
sys.stdout.flush()
sys.stderr.flush()
return res in {'y', 'yes'}
def tor(port=9050):
return {'http': f'socks5://127.0.0.1:{port}', 'https': f'socks5://127.0.0.1:{port}'}
def help() -> None:
print("""
<usage>
prop <option> URL [URL...]
if you want to read the URL from standard input, please use '-' instead of URL
<List of options>
-o, --output [file path]
Specify the output destination file
Default setting is standard output
-O
Download with the same name as the download source file name
-i, --ignore
Even if set timeout, it ignore
-t, --timeout [timeout time (number)]
Set the timeout time
Please specify number
Also, the -i option takes precedence over this option
-x, --method [method]
Communicate by specifying the communication method
The default is get
Communication that can be specified with -x, --method option
- get
- post
- delete
- put
-S, --ignore-SSL
Ignore SSL certificate validation
-d, --data param1=value1 param2=value2
Specify the data and parameters to send
Specify as follows
prop -d q=hogehoge hl=fugafuga URL
Please specify the -j option when sending in json format
-j, --json
Send data in json format
-H, --header HeaderName1=HeaderInformation1 HeaderName2=HeaderInformation2
Communicate by specifying the header
-a, --fake-user-agent [BrowserName]
It use the automatically generated User-Agent
In addition, it is also possible to specify the name of the browser to automatically generate the User-Agent
-c, --cookie cookie name 1 = information 1 cookie name 2 = information 2
Communicate by specifying the cookies
-X, --proxy [proxy]
Specify the proxy to use for communication
--tor [port number (optional)]
It use tor as a proxy
If you omit the port number, 9050 will be used
And, there are some things you need to do before using this option
Windows:
Just run tor.exe
Mac:
Please enter the following command to start tor
$ brew services start tor
Linux:
Please enter the following command to start tor
$ sudo service tor start
-F, --information
Outputs only status code, redirect history, cookie information, response header information
If you have specified this option and want to output to a file, use> (redirect) instead of the -o option
-s, --search-words [words]
Extracts and outputs the code such as the specified tag, class, id, etc. from the source code of the site
If you specify more than one, separate them with ',' (don't use a space)
Example of use
prop -s tags=a,img,script class=test [URL]
>>> Extract and display the code of a tag, img tag, and script tag from the test class
Also, if limit=number or use -M, --limit option, only the specified number will be extracted
Example of use
prop -s tags=a limit=2 [URL]
>>> Extract a tag from the top to the second
Below is an example of attribute specification (there are others)
class=class name
id=id
text=Contents of tag(character string)
tags=tag name
href=reference
src=reference
And, you can also use the css selector without using the above
prop -s "a, script" [URL]
-Y, --only-body
Show the only body if contents are html
(not body tag)
-M, --limit [num]
Specify the number of '-s', '--search' result or the number of recursive download files (-r, --recursive option)
-e, --no-catch-error
No output even if an error occurs
-R, --read-file [file path]
Reads the URL to download from the specified file
-B, --basic-auth [user id] [password]
Perform Basic authentication
-l, --no-redirect
Disable redirection
-u, --upload file [path] [form (optional)]
You can specify the file to upload at the time of post (multiple files cannot be specified)
-D, --debug
Display detailed information at the time of request
-----Below are the options related to recursive downloads-----
-r, --recursive [Recursion count (optional)]
Recursively download site text links
When specifying this option, be sure to specify the output destination with the -o option (specify "directory" instead of file)
Also, if you specify a directory that doesn't exist, a new one will be created.)
If you don't specify the number of recursion, it will be executed as if 1 was specified
Also, if the -nE option isn't specified, local path conversion will be performed automatically
-nc, --no-content
It don't download images
-nb, --no-body
Downloads only images (if this option is specified, the number of recursion will be 1 even if the number of recursion is specified)
-np, --no-parent
It don't download the parent directory of the download source URL
-nE, --no-conversion
It don't convert web page URL references to local paths
-dx, --download-external
Also download external address sites
-n, --download-filename [string]
Only download files include specified string
-f, --format [format]
You can specify the format of the file save name at the time of recursive download
If "%(file)s" or "%(num)d" aren't included in the character string, it won't be applied because saved name isn't changed for each file
Ex: Suppose there are text links https://example.com/2.html and https://example.com/3.html in https://example.com
prop -r -f "%(num)d-%(root)s-%(file)s" https://example.com
>>> https://example.com saved as 0-example.com, http://example.com/2 saved as 1-example.com-2.html, http://example.com/3 saved as 2-example.com-3.html
prop -r -f "%(num)d.%(ext)s" https://www.example.com
>>> https://example.com saved as 0.html, https://example.com/2.html saved as 1.html, https://example.com/3.html saved as 2.html
Specifiable format
- %(root)s
Hostname
- %(file)s
Web page file name (character string after the last '/' in the URL of the site)
And, this is automatically given an extension
- %(ext)s
File extension (not including '.')
- %(num)d
Consecutive numbers
-I, --interval [seconds]
Specifies the interval for recursive downloads
The default is 1 second
-m, --multiprocess
It use multi-thread processing when converting the URL reference destination of the downloaded
What you do with multithreading The processing time is greatly reduced
Recommended to specify
-nd, --no-downloaded
It don't download urls written in histories
This option doesn't work properly if you delete the files under the {history_directory} (even if you delete it, it will be newly generated when you download it again)
-----The following special options-----
-V, --version
Show the version that you are using
--purge-log
Remove log file
--purge-history
Remove all histories
--purge-cache
Remove all caches
-C, --check
It doesn't download, only checks if the specified URL exists
Checks recursively when used with the -r option
--config-file
Show the config file
--log-file
Show the file which logs are written
--history-directory
Show the directory which files which histories are written are stored
--cache-directory
Show the directory which caches(stylesheet) were stored
-U, --upgrade
Update the prop
--update-cache
Update downloaded caches
And, if you use this option in the directory that 'styles' directory exists, files in the 'styles' directory will be also updated
-p, --parse [file path (optional)]
Get HTML from file or standard input and parse it
You can use the -s option to specify the search tag, class, and id
If you specify a URL when you specify this option, an error will occur
[About parser and default settings]
The default HTML parser is html.parser, but you can also use an external parser
When using lxml
(1) Enter "pip install lxml" to install lxml
(2) Change the value of "parser" in {config_file} as follows
{
"parser": "lxml"
}
You can also change the default settings by changing the contents of {config_file}
Setting Example
{
"timeout": (3, 10),
"header": {
"User-Agent": "test"
},
"proxy": {
"http": "https://IPaddress:PortNumber",
"https": "https://IPaddress:PortNumber"
},
}
The options that can be changed are as follows
{
"types": "get",
"timeout": [3.0, 60.0],
"redirect": true,
"search": false,
"header": null,
"cookie": null,
"proxy": null,
"auth": null,
"recursive": 0,
"body": true,
"content": true,
"conversion": true,
"reconnect": 5,
"caperror": true,
"noparent": false,
"no_downloaded": false,
"interval": 1,
"format": "%(file)s",
"info": false,
"multiprocess": false,
"ssl": true,
"parser": "html.parser",
"no_dl_external": true,
"save_robots": true // this recommended to specify true
}
""".replace("{config_file}", setting.config_file).replace("{log_file}", setting.log_file).replace('{history_directory}', history.root))
def conversion_arg(args) -> list:
result: list = []
for a in args:
if a.startswith('-') and not a.startswith('--') and 2 < len(a) and not a in {'-np', '-nc', '-nb', '-nE', '-ns', '-nd', '-dx', '-st'}:
results: str = '-'+'\n-'.join(a[1:])
result.extend(results.splitlines())
else:
result.append(a)
return result
def _argsplit(args):
result: list = []
continue_: str = None
a = args.split(' ')
for v in a:
if (v.startswith("'") and not v.endswith("'")) or (v.startswith('"') and not v.endswith('"')):
continue_ = v[0]
s = [v.strip(continue_)]
elif continue_ and v.endswith(continue_):
s.append(v.strip(continue_))
continue_ = None
result.append(' '.join(s))
elif continue_:
s.append(v)
else:
result.append(v.strip("'\""))
return result
def argument() -> (list, dict, logging.Logger.log):
option: setting = setting()
option.config_load()
skip: int = 1
url: list = []
arg = conversion_arg(sys.argv)
if len(arg) == 1:
print("""
prop <options> URL [URL...]
\033[33mIf you want to see help message, please use '-h', '--help' options and you will see help\033[0m""")
sys.exit()
for n, args in enumerate(arg):
if skip:
skip -= 1
continue
if args == '-h' or args == '--help':
help()
sys.exit()
elif args == '-V' or args == '--version':
print(str(VERSION))
sys.exit()
elif args == '-o' or args == '--output':
# 出力先ファイルの設定
try:
filename: str = arg[n+1]
except IndexError:
error.print(f"{args} [filename]\nPlease specify value of '{args}'")
if filename != '-':
option.config('filename', os.path.join('.', filename))
option.config('output', False)
skip += 1
elif args == '-O':
option.config('filename', os.path.basename)
option.config('output', False)
elif args == '-t' or args == '--timeout':
try:
timeout: int = arg[n+1]
except IndexError:
error.print(f"{args} [timeout]\nPlease specify value of '{args}'")
if option.options.get('notimeout') is None:
try:
option.config('timeout', float(timeout))
except ValueError:
error.print(f"'{timeout}' isn't int or float\nPlease specify int or float")
skip += 1
elif args == '-i' or args == '--ignore':
option.config('timeout', None)
option.config('notimeout', True)
elif args == '-x' or args == '--method':
try:
method = arg[n+1].lower()
except IndexError:
error.print(f"{args} [method]\nPlease specify value of '{args}'")
if method in {'get', 'post', 'put', 'delete'}:
option.config('types', method)
else:
error.print(f"'{method}' is unknown method")
skip += 1
elif args == '-S' or args == '--ignore-SSL':
option.config('ssl', False)
elif args == '-a' or args == '--fake-user-agent':
try:
_stderr = sys.stderr
with open(os.devnull, "w") as null:
sys.stderr = null
ua = UserAgent()
sys.stderr = _stderr
except Exception as e:
sys.stderr = _stderr
error.print(str(e))
try:
fake = ua[arg[n+1]]
skip += 1
except (IndexError, FakeUserAgentError):
fake = ua.random
option.options['header']['User-Agent'] = fake
elif args == '-d' or args == '-H' or args == '--data' or args == '--header' or args == '-c' or args == '--cookie':
params: dict = dict()
header: dict = dict()
for d in arg[n+1:]:
i = d.split('=', 1)
if len(i) == 2:
if args == '-d' or args == '--data':
params[i[0]] = i[1]
else:
header[i[0]] = i[1]
skip += 1
else:
break
if not params and not header:
error.print(f"{args} [Name=Value] [Name=Value]...\nPlease specify the value of the '{args}' option")
if args == '-d' or args == '--data':
option.config('payload', params)
elif args == '-c' or args == '--cookie':
option.config('cookie', params)
else:
option.options['header'].update(header)
elif args == '-j' or args == '--json':
option.config('json', True)
elif args == '-s' or args == '--search-words':
try:
word = {'words': {}, 'limit': None}
for n, i in enumerate(arg[n+1:]):
fl = i.split('=', 2)
if (n == 0 and len(fl) == 1) or re.match(r'.*\[.*=.*\]$', i):
word['css'] = i
skip += 1
break
elif len(fl) == 2:
if fl[0] != 'limit' and fl[0] != 'tags':
word['words'][fl[0]] = fl[1].split(',')
elif fl[0] == 'tags':
word['tags'] = fl[1].split(',')
else:
option.config('limit', int(fl[1]))
skip += 1
else:
break
option.config('search', word)
except IndexError:
error.print(f"The specifying the argument of the '{args}' option is incorrect")
except ValueError:
error.print(f'{fl[1]} is not number\nPlease specify number')
elif args == '-Y' or args == '--only-body':
option.config('only_body', True)
elif args == '-l' or args == '--no-redirect':
option.config('redirect', False)
elif args == '-D' or args == '-D':
option.config('debug', True)
elif args == '-u' or args == '--upload':
try:
path = arg[n+1]
skip += 1
except IndexError:
error.print(f"{args} [filepath]\nPlease specify value of '{args}'")
try:
form = arg[n+2]
skip += 1
except IndexError:
form = None
if os.path.exists(path):
option.config('upload', (path, form))
else:
error.print(f"The existence couldn't be confirmed: {path}")
option.config('types', 'post')
elif args == '-X' or args == '--proxy':
try:
proxy_url: str = arg[n+1]
except IndexError:
error.print(f"{args} [Proxy]\nPlease specify value of '{args}'")
option.config('proxy', {"http": proxy_url, "https": proxy_url})
skip += 1
elif args == '-R' or args == '--read-file':
try:
file: str = arg[n+1]
except IndexError:
error.print(f"{args} [filepath]\nPlease specify value of '{args}'")
urls: list = []
options: list = []
with open(file, 'r') as f:
instruct = list(filter(lambda s: s != '', f.read().splitlines()))
for n, a in enumerate(instruct):
del sys.argv[1:]
sys.argv.extend(_argsplit(a))
url, log, option = argument()
urls.append(url)
options.append(option)
return urls, log, options
elif args == '-B' or args == '--basic-auth':
try:
user: str = arg[n+1]
password: str = arg[n+2]
option.config('auth', HTTPBasicAuth(user, password))
skip += 2
except:
error.print(f"{args} [UserName] [Password]\nThe specifying the argument of the '{args}' option is incorrect")
elif args == '-r' or args == '--recursive':
try:
number: int = int(arg[n+1])
skip += 1
except (ValueError, IndexError):
number: int = 1
option.config('recursive', number)
result1, result2 = ('-nc' in arg or '--no-content' in arg), ('-nb' in arg or '--no-body' in arg)
if result1:
option.config('content', False)
if result2:
option.config('body', False)
if result1 and result2:
error.print("'-nc' and '-nb' options cannot be used together")
sys.exit(1)
elif args == '-st' or args == '--start':
try:
option.config("start", arg[n+1])
skip += 1
except IndexError:
error.print(f"{args} [StartName]\nPlease specify value of '{args}'")
elif args == '-n' or args == '--download-filename':
try:
option.config('download_name', arg[n+1])
skip += 1
except IndexError:
error.print(f"{args} [string]\nPlease specify value of '{args}'")
elif args == '-np' or args == '--no-parent':
option.config('noparent', True)
elif args in {'-nc', '-nb', '--no-content', '--no-body', '--update-cache', '-U', '--upgrade'}:
continue
elif args == '-M' or args == '--limit':
try:
limit = int(arg[n+1])
skip += 1
except IndexError:
error.print(f"{args} [limit]\nPlease specify value of '{args}'")
except ValueError:
error.print('Please specify a number for the value of limit')
option.config('limit', limit)
elif args == '-e' or args == '--no-catch-error':
option.config('caperror', False)
elif args == '-dx' or args == '--download-external':
option.config('no_dl_external', False)
elif args == '-nE' or args == '--no-conversion':
option.config('conversion', False)
elif args == '-nd' or args == '--no-downloaded':
option.config('no_downloaded', True)
elif args == '-f' or args == '--format':
try:
string: str = arg[n+1]
except IndexError:
error.print(f"{args} [format]\nPlease specify value of '{args}'")
if '%(file)s' in string or '%(num)d' in string:
if re.match(r'%\(num\)d[0-9]', string) or ('%(file)s' in string and (not string.endswith('%(file)s') or 1 < string.count('%(file)s'))) or (1 < string.count('%(num)d')) or any(map(string.__contains__, ['%(num)d%(file)s', '%(num)d%(ext)s'])):
print("""\033[33mSorry, about format, there are the following restrictions because it won't be able to generate an accurate serial number
- '%(file)s' and '%(ext)s' format can only be at the end
- '%(num)d' format cannot be included more than one
- Numbers cannot be used immediately after '%(num)d'
- '%(num)d%(file)s' and '%(num)d%(ext)s' cannot include in format\033[0m""")
sys.exit(1)
option.config('format', string)
else:
option.log(30, '\033[33mFormat specified by you isn\'t applied because "%(file)s" or "%(num)d" aren\'t in it\nIf you want to know why it isn\'t applied without them, please see help message for more information\033[0m')
skip += 1
elif args == '-F' or args == '--information':
option.config('info', True)
elif args == '-I' or args == '--interval':
try:
interval: float = float(arg[n+1])
option.config('interval', interval)
skip += 1
except IndexError:
error.print(f"{args} [interval]\nPlease specify value of '{args}'")
except ValueError:
error.print(f"Please specify int or float to value of '{args}'")
elif args == '-m' or args == '--multiprocess':
option.config('multiprocess', True)
elif args == '--tor':
try:
port = int(arg[n+1])
skip += 1
except (IndexError, ValueError):
port = 9050
Tor = tor(port)
option.config('proxy', Tor)
elif args == '-C' or args == '--check':
option.config('check_only', True)
option.config('filename', os.getcwd())
elif args == '-p' or args == '--parse':
try:
path = arg[n+1]
with open(path, 'r') as f:
html = f.read()
skip += 1
except (IndexError, FileNotFoundError):
html = sys.stdin.read()
option.config('parse', html)
elif args == "--config-file":
print(setting.config_file)
sys.exit()
elif args == "--log-file":
print(setting.log_file)
sys.exit()
elif args == "--history-directory":
print(history.root)
sys.exit()
elif args == "--cache-directory":
print(cache.root)
sys.exit()
elif args == "--purge-log":
if os.path.isfile(setting.log_file):
os.remove(setting.log_file)
print('done')
else:
print('No log file')
sys.exit()
elif args == "--purge-history":
if os.path.isdir(history.root):
files = len(glob.glob(os.path.join(history.root, "**"), recursive=True))
shutil.rmtree(history.root)
print(f'Removed: {files}')
else:
print('No history')
sys.exit()
elif args == "--purge-cache":
if os.path.isdir(cache.root):
files = len(glob.glob(os.path.join(cache.root, "**"), recursive=True))
shutil.rmtree(cache.root)
print(f'Removed: {files}')
else:
print('No cache')
sys.exit()
else:
url.append(args)
return url, option.fh.file, option.options
def main() -> None:
url, log_file, option = argument()
if '--update-cache' in sys.argv:
cache.update(option if isinstance(option, dict) else setting.options)
sys.exit()
elif '-U' in sys.argv or '--upgrade' in sys.argv:
if _binary:
res = requests.get("https://api.github.com/repos/mino-38/prop/releases", timeout=option['timeout'], proxies=option['proxy'], headers=option['header'], verify=option['ssl'])
new_version = res.json()[0]["tag_name"]
if VERSION < parse(new_version):
with open(os.path.join(tempfile.gettempdir(), "prop-updater.bin"), "wb") as f, open(os.path.join(tempfile.gettempdir(), "prop-updater.sh"), "w") as s:
f.write(requests.get("https://github.com/mino-38/prop/releases/latest/download/prop", timeout=option['timeout'], proxies=option['proxy'], headers=option['header'], verify=option['ssl']).content)
s.write("""
function on_error () {
echo -e "\\n\\033[33mFaild update\\nIf you run as root, this problem may solve\\033[0m"
exit 1
}
trap on_error ERR
mv %(new_file)s %(bin_file)s
chmod a+rx %(bin_file)s
echo "Updated to version '%(version)s'"
rm %(script)s
""" % {"bin_file": sys.executable, "new_file": f.name, "script": s.name, "version": new_version})
subprocess.Popen("bash {}".format(s.name), shell=True, close_fds=True)
else:
subprocess.run(["pip", "install", "--upgrade", "prop-request"])
sys.exit()
for index, link in enumerate(url):
if link == '-':
link = sys.stdin.readline().rstrip()
elif not parser.is_url(link):
link = 'http://' + link
url[index] = link
with log_file:
if url != [] and not (isinstance(option, dict) and option['parse']):
if isinstance(option, list):
dl: downloader = downloader(url[0], option[0], option[0]['parser'])
dl.start()
for u, o in zip(url[1:], option[1:]):
dl.url = u
dl.option = o
dl.parse.option = o
dl.start()
else:
dl: downloader = downloader(url, option, option['parser'])
dl.start()
elif option['parse']:
dl: downloader = downloader(url, option, option['parser'])
if option['only_body']:
s = bs(option['parse'], dl.parser)
result = s.text
else:
result = dl.parse.html_extraction(option['parse'], option['search'])
if option['filename']:
with open(option['filename'], 'w') as f:
f.write(result)
else:
print(result)
elif url == []:
error.print('Missing value for URL\nPlease specify URL')
if __name__ == '__main__':
main()
|
python
|
import os
import sys
import getpass
import logging
import time
from selenium import webdriver
from selenium.webdriver.chrome.options import Options as ChromeOptions
from selenium.common.exceptions import WebDriverException
from selenium.common.exceptions import SessionNotCreatedException
import colorama
from .database import Database
from . import constants
from .progress_bar import ProgressBar
from . import helper
from . import get_data
from . import actions
logger = logging.getLogger('__name__')
class Scraper:
def __init__(self, headful, download_stories, max_download, login_username, login_password):
self.__c_fore = colorama.Fore
self.__c_style = colorama.Style
colorama.init()
self.__database = Database()
self.__database.create_tables()
self.__headful = headful
self.__download_stories = download_stories
self.__max_download = max_download
self.__login_username = login_username
self.__login_password = login_password
self.__is_logged_in = False
self.__cookies_accepted = False
self.__web_driver = self.__start_web_driver()
if self.__login_username:
self.__init_login()
if self.__max_download == 0:
print(self.__c_fore.RED
+ 'add the argument \'--max 3\' to specify a maximum amount of posts to scrape'
+ self.__c_style.RESET_ALL)
self.stop()
if not self.__is_logged_in and self.__download_stories:
print(self.__c_fore.RED + 'you need to be logged in to scrape stories' + self.__c_style.RESET_ALL)
self.stop()
def __start_web_driver(self):
""" Start the web driver """
driver_options = ChromeOptions()
driver_options.add_experimental_option('excludeSwitches', ['enable-logging'])
driver_options.add_argument('--mute-audio')
driver_options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36')
driver_options.add_argument('--incognito')
driver_options.headless = not self.__headful
try:
driver = webdriver.Chrome(service_log_path=os.devnull, options=driver_options)
except SessionNotCreatedException as err:
logger.error(err)
print('could not start session')
self.stop()
except WebDriverException as err:
logger.error('Launch Google Chrome error: %s' % err)
print('could not launch Google Chrome: ')
print('make sure Google Chrome is installed on your machine')
self.stop()
else:
driver.maximize_window()
driver.set_page_load_timeout(600)
return driver
def __check_if_ip_is_restricted(self):
"""
Check if the official Instagram profile can be seen.
If not, then Instagram has temporarily restricted the ip address.
"""
if get_data.get_id_by_username_from_ig('instagram') is None:
print(self.__c_fore.RED +
'unable to load profiles at this time (IP temporarily restricted by Instagram)' + '\n' +
'try to login with a DUMMY account to scrape' +
self.__c_style.RESET_ALL)
self.stop()
def __init_login(self):
""" Login """
if self.__login_username and not self.__is_logged_in:
login_password = self.login_password
if login_password is None:
sys.stdout.write('\n')
print('login with a DUMMY account, never use your personal account')
login_password = getpass.getpass(prompt='enter your password: ')
actions.Login(self, self.__login_username, login_password).do()
print('login success')
def __init_scrape_stories(self, user):
""" Start function for scraping stories """
print('counting stories, please wait...')
stories_amount = actions.CountStories(self, user).do()
if stories_amount > 0:
print(self.__c_fore.GREEN
+ str(stories_amount) + ' image(s)/video(s) will be downloaded from stories: '
+ self.__c_style.RESET_ALL)
actions.ScrapeStories(self, user, stories_amount).do()
else:
print('no stories found')
def __filter_post_links(self, user):
"""
Check if the post link is already in the database (if post link was added by scraping a profile).
If yes then skip it.
"""
filtered_post_links = []
shortcode_set = set()
for link in user.post_links:
shortcode = helper.extract_shortcode_from_url(link)
if (shortcode not in shortcode_set) and (not self.__database.video_post_link_exists_by_shortcode(shortcode)):
# if not self.__database.user_post_link_exists(user.username, link):
shortcode_set.add(shortcode)
filtered_post_links.append(link)
return filtered_post_links
def __filter_reel_links(self, user):
"""
Check if the reel link is already in the database (if reel link was added by scraping a profile).
If yes then skip it.
"""
filtered_reel_links = []
shortcode_set = set()
for link in user.reel_links:
shortcode = helper.extract_shortcode_from_url(link)
if (shortcode not in shortcode_set) and (not self.__database.video_post_link_exists_by_shortcode(shortcode)):
# if not self.__database.user_post_link_exists(user.username, link):
shortcode_set.add(shortcode)
filtered_reel_links.append(link)
return filtered_reel_links
def __filter_igtv_links(self, user):
"""
Check if the igtv link is already in the database (if igtv link was added by scraping a profile).
If yes then skip it.
"""
filtered_igtv_links = []
shortcode_set = set()
for link in user.igtv_links:
shortcode = helper.extract_shortcode_from_url(link)
if (shortcode not in shortcode_set) and (not self.__database.video_post_link_exists_by_shortcode(shortcode)):
# if not self.__database.user_post_link_exists(user.username, link):
shortcode_set.add(shortcode)
filtered_igtv_links.append(link)
return filtered_igtv_links
def init_scrape_users(self, users):
""" Start function for scraping users """
helper.create_dir(constants.USERS_DIR)
helper.create_dir("new_posts")
from datetime import datetime
new_post_log_file_timestamp = datetime.utcnow().isoformat().replace(':', '-')
new_post_log_file_name = "new_posts/" + new_post_log_file_timestamp + ".txt"
print("will log new post files to file: ", new_post_log_file_name)
agressive_sleep = True
cur_user_count = 0
for x, user in enumerate(users):
if agressive_sleep:
time.sleep(60)
if not self.__is_logged_in:
self.__check_if_ip_is_restricted()
sys.stdout.write('\n')
cur_user_count = cur_user_count + 1
print('\033[1m' + 'username: ' + user.username + '(', cur_user_count, '/', len(users), ')' + '\033[0;0m')
user.create_user_output_directories()
# Retrieve the id using actions
if self.__is_logged_in:
userid = actions.GetUserId(self, user.username).do()
# Retrieve the id using requests
else:
userid = get_data.get_id_by_username_from_ig(user.username)
# Continue to next user if id not found
if userid is None:
print(self.__c_fore.RED + 'could not load user profile' + self.__c_style.RESET_ALL)
time.sleep(30)
continue
# actions.ScrapeDisplay(self, user).do()
if not self.__database.user_exists(user.username):
self.__database.insert_userid_and_username(userid, user.username)
if self.__is_logged_in and self.__download_stories:
self.__init_scrape_stories(user)
if actions.CheckIfAccountIsPrivate(self, user).do():
print(self.__c_fore.RED + 'account is private' + self.__c_style.RESET_ALL)
continue
if actions.CheckIfHasLink(self, user, '/reels').do():
if agressive_sleep:
time.sleep(30)
print('retrieving reel links from reels ' + user.profile_link + 'reels/' +', please wait... ')
user.reel_links = actions.GrabReelLinks(self, user.profile_link + 'reels/').do()
user.reel_links = self.__filter_reel_links(user)
if len(user.reel_links) <= 0:
print('no new reels to download')
else:
print(self.__c_fore.GREEN + str(len(user.reel_links)) +
' reel(s) will be downloaded: ' + self.__c_style.RESET_ALL)
# progress_bar = ProgressBar(len(user.reel_links), show_count=True)
for link in user.reel_links:
print('Scrape reel link: ' + link)
# actions.InitScrape(self, link, None, user.output_user_reels_path, userid).do()
# progress_bar.update(1)
# progress_bar.close()
print ('write links to file')
helper.append_links_to_file(new_post_log_file_name, user.username, 'Reel', user.reel_links)
for link in user.reel_links:
print ('write ' + link + ' to db')
self.__database.insert_video_post_to_download(user.username, 'Reel', link, new_post_log_file_timestamp)
else:
print ('No reels for user', user.username)
if actions.CheckIfHasLink(self, user, '/channel').do():
if agressive_sleep:
time.sleep(30)
print('retrieving igtv links from igtv ' + user.profile_link + 'channel/' +', please wait... ')
user.igtv_links = actions.GrabIgtvLinks(self, user.profile_link + 'channel/').do()
user.igtv_links = self.__filter_igtv_links(user)
if len(user.igtv_links) <= 0:
print('no new igtvs to download')
else:
print(self.__c_fore.GREEN + str(len(user.igtv_links)) +
' igtv(s) will be downloaded: ' + self.__c_style.RESET_ALL)
# progress_bar = ProgressBar(len(user.igtv_links), show_count=True)
for link in user.igtv_links:
print('Scrape igtv: ' + link)
# actions.InitScrape(self, link, None, user.output_user_igtvs_path, userid).do()
# progress_bar.update(1)
# progress_bar.close()
print ('write links to file')
helper.append_links_to_file(new_post_log_file_name, user.username, 'Igtv', user.igtv_links)
for link in user.igtv_links:
print ('write ' + link + ' to db')
self.__database.insert_video_post_to_download(user.username, 'Igtv', link, new_post_log_file_timestamp)
else:
print ('No igtv for user', user.username)
if agressive_sleep:
time.sleep(30)
# if not actions.CheckIfProfileHasPosts(self, user).do():
# print(self.__c_fore.RED + 'no posts found' + self.__c_style.RESET_ALL)
# continue
print('retrieving post links from profile ' + user.profile_link +', please wait... ')
user.post_links = actions.GrabPostLinks(self, user.profile_link).do()
user.post_links = self.__filter_post_links(user)
if len(user.post_links) <= 0:
print('no new posts to download')
else:
print(self.__c_fore.GREEN + str(len(user.post_links)) +
' post(s) will be downloaded: ' + self.__c_style.RESET_ALL)
# progress_bar = ProgressBar(len(user.post_links), show_count=True)
for link in user.post_links:
print('Scrape link: ' + link)
# actions.InitScrape(self, link, None, user.output_user_posts_path, userid).do()
# progress_bar.update(1)
# progress_bar.close()
print ('write links to file')
helper.append_links_to_file(new_post_log_file_name, user.username, 'Post', user.post_links)
for link in user.post_links:
print ('write ' + link + ' to db')
self.__database.insert_video_post_to_download(user.username, 'Post', link, new_post_log_file_timestamp)
def init_scrape_tags(self, tags, tag_type):
""" Start function for scraping tags """
helper.create_dir(constants.TAGS_DIR)
for tag in tags:
if not self.__is_logged_in:
self.__check_if_ip_is_restricted()
sys.stdout.write('\n')
print('\033[1m' + 'tag: #' + tag.tagname + '\033[0;0m')
tag.create_tag_output_directories()
link = constants.INSTAGRAM_EXPLORE_URL.format(tag.tagname)
self.__database.insert_tag(tag.tagname)
if tag_type == constants.TAG_TYPE_TOP:
actions.ScrapeTopTags(self, link, tag).do()
elif tag_type == constants.TAG_TYPE_RECENT:
actions.ScrapeRecentTags(self, link, tag).do()
def stop(self):
""" Stop the program """
# if self.__is_logged_in:
# actions.Logout(self, self.__login_username).do()
try:
self.__web_driver.quit()
except AttributeError as err:
logger.error('Quit driver error: %s' % err)
self.__database.close_connection()
sys.exit(0)
@property
def is_logged_in(self):
return self.__is_logged_in
@is_logged_in.setter
def is_logged_in(self, is_logged_in):
self.__is_logged_in = is_logged_in
@property
def cookies_accepted(self):
return self.__cookies_accepted
@cookies_accepted.setter
def cookies_accepted(self, accepted):
self.__cookies_accepted = accepted
@property
def web_driver(self):
return self.__web_driver
@property
def login_username(self):
return self.__login_username
@property
def login_password(self):
return self.__login_password
@property
def database(self):
return self.__database
@property
def max_download(self):
return self.__max_download
|
python
|
from armstrong.core.arm_sections import utils
from armstrong.core.arm_sections.models import Section
from ._utils import ArmSectionsTestCase, override_settings
from .support.models import SimpleCommon
def rel_field_names(rels):
return [rel.field.name for rel in rels]
class get_configured_item_modelTestCase(ArmSectionsTestCase):
def test_returns_configured_model(self):
m = "%s.FooBar" % self.__class__.__module__
with self.settings(ARMSTRONG_SECTION_ITEM_MODEL=m):
module, model = utils.get_module_and_model_names()
self.assertEqual(self.__class__.__module__, module)
self.assertEqual("FooBar", model)
def test_provides_default_value(self):
with self.settings(ARMSTRONG_SECTION_ITEM_MODEL=False):
module, model = utils.get_module_and_model_names()
self.assertEqual("armstrong.apps.content.models", module)
self.assertEqual("Content", model)
class get_item_model_classTestCase(ArmSectionsTestCase):
@override_settings(ARMSTRONG_SECTION_ITEM_MODEL='tests.support.models.SimpleCommon')
def test_returns_specified_class(self):
self.assertEqual(SimpleCommon, utils.get_item_model_class())
class get_section_relationsTestCase(ArmSectionsTestCase):
@override_settings(ARMSTRONG_SECTION_ITEM_MODEL='tests.support.models.SimpleCommon')
def test_returns_relation_for_foreign_key_only(self):
self.assertEqual(
['primary_section'],
rel_field_names(utils.get_section_relations(Section)))
@override_settings(ARMSTRONG_SECTION_ITEM_MODEL='tests.support.models.ComplexCommon')
def test_returns_relations_for_foreign_key_and_many_to_many(self):
self.assertEqual(
['primary_section', 'related_sections'],
rel_field_names(utils.get_section_relations(Section)))
@override_settings(ARMSTRONG_SECTION_ITEM_MODEL='tests.support.models.MultipleManyToManyModel')
def test_returns_relations_for_subclass_with_foreign_key_and_m2m(self):
self.assertEqual(
['primary_section', 'related_sections', 'more_sections'],
rel_field_names(utils.get_section_relations(Section)))
class get_section_many_to_many_relationsTestCase(ArmSectionsTestCase):
@override_settings(ARMSTRONG_SECTION_ITEM_MODEL='tests.support.models.SimpleCommon')
def test_returns_no_relations_for_foreign_key_only(self):
self.assertEqual(
[],
rel_field_names(utils.get_section_many_to_many_relations(Section)))
@override_settings(ARMSTRONG_SECTION_ITEM_MODEL='tests.support.models.ComplexCommon')
def test_returns_relation_for_foreign_key_and_many_to_many(self):
self.assertEqual(
['related_sections'],
rel_field_names(utils.get_section_many_to_many_relations(Section)))
@override_settings(ARMSTRONG_SECTION_ITEM_MODEL='tests.support.models.MultipleManyToManyModel')
def test_returns_relations_for_subclass_with_foreign_key_and_m2m(self):
self.assertEqual(
['related_sections', 'more_sections'],
rel_field_names(utils.get_section_many_to_many_relations(Section)))
|
python
|
import boto3, ipaddress, os, socket, time
from codeguru_profiler_agent import with_lambda_profiler
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from aws_lambda_powertools import Logger, Tracer
# AWS Lambda Powertools
logger = Logger()
tracer = Tracer()
# Get S3 bucket and setup s3 client
bucketname = os.environ['s3bucket']
s3_client = boto3.client('s3')
# Get SQS queue and setup sqs client
sqs_queue_url = os.environ['sqsqueue']
sqs_client = boto3.client('sqs')
# Set static return headers
headers = {
'Content-Type': 'text/html',
"strict-transport-security": "max-age=31536000; includeSubDomains; preload"
}
# Check if IP address is allow listed for API Gateway
@tracer.capture_method(capture_response = False)
def is_allow_listed(ip):
# Get allow list IP range
allow_list_range = os.environ['ip_allowlist']
if ipaddress.ip_address(ip) in ipaddress.ip_network(allow_list_range):
print("ALLOW - IP " + ip + " in " + allow_list_range)
return True
else:
print("BLOCK - IP " + ip + " not in " + allow_list_range)
return False
# Upload screen shot to S3
@tracer.capture_method(capture_response = False)
def upload_screenshot(tmpfile, bucketname, fname):
s3_client.upload_file(
Filename = tmpfile,
Bucket = bucketname,
Key = fname,
ExtraArgs = {
'StorageClass': 'STANDARD',
'ACL': 'public-read',
'ContentType': 'image/png'
}
)
# Send S3 path URI to SQS queue
@tracer.capture_method(capture_response = False)
def sqs_send(sqs_queue_url, bucketname, fname):
sqs_client.send_message(
QueueUrl = sqs_queue_url,
MessageBody = 'https://s3.amazonaws.com/' + bucketname + '/' + fname,
)
# Generate S3 Signed URL
@tracer.capture_method(capture_response = False)
def generate_signed_url(bucketname, fname):
presigned_url = s3_client.generate_presigned_url(
ClientMethod = 'get_object',
Params = {
'Bucket': bucketname,
'Key': fname
},
ExpiresIn = 3600
)
return presigned_url
# Capture screenshot
@tracer.capture_method(capture_response = False)
def get_screenshot(url, tmpfile):
# Add chromium driver
options = Options()
options.binary_location = '/usr/bin/chromium-browser'
# Add chromium options
options.add_argument('--start-maximized')
options.add_argument('--headless')
options.add_argument('--no-sandbox')
options.add_argument('--single-process')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--user-agent=Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36')
# Get URL using chromium
driver = webdriver.Chrome('/usr/bin/chromedriver', chrome_options = options)
# Get body of website
driver.get(url)
# Get screen dimensions
screenwidth = 1440
screenheight = driver.execute_script("return document.documentElement.scrollHeight")
if screenheight == 0:
screenheight = 1024
# Maximize screen
print('dimensions ' + ' ' + str(screenwidth) + ' ' + str(screenheight))
driver.set_window_size(screenwidth, screenheight)
# Select body and press escape to close some pop ups
body = driver.find_element_by_xpath('/html')
body.send_keys(Keys.ESCAPE)
# Save screenshot
body.screenshot(tmpfile)
# Close chromium
driver.close()
driver.quit()
# Lambda handler
@tracer.capture_lambda_handler(capture_response = False)
@logger.inject_lambda_context(log_event = True)
@with_lambda_profiler(profiling_group_name = os.environ['AWS_CODEGURU_PROFILER_GROUP_NAME'])
def handler(event, context):
# Get start timestamp
startts = time.time()
# Get url from API input
if len(event['rawPath']) > 1:
rawurl = event['rawPath'][1:]
domain = rawurl.split('/')[0]
src_ip = event['requestContext']['http']['sourceIp']
print(src_ip)
# Check if IP address is allow listed
if is_allow_listed(src_ip):
# Check if the dns domain is valid
try:
x = socket.gethostbyname(domain)
print('ip ' + str(x) + ' for ' + rawurl)
# Return error if domain does not return dns record
except:
print('invalid dns ' + rawurl + ', setting github.com')
return {
"statusCode": 200,
"body": '<html><body><center>invalid URL ' + rawurl + ' submitted</center></body></html>',
"headers": headers
}
# Return error if IP address is not allow listed
else:
print('unauthorized IP ' + src_ip + ', returning error')
return {
"statusCode": 200,
"body": '<html><body><center>not allowed - IP ' + src_ip + '</center></body></html>',
"headers": headers
}
# If no URL is submitted, return error
else:
return {
"statusCode": 200,
"body": '<html><body><center>no URL submitted</center></body></html>',
"headers": headers
}
# Get URL path
url = 'https://' + rawurl
print('getting ' + url)
# Set tmp and file paths
fname = 'screenshots/' + domain + '/' + str(int(startts)) + '-' + rawurl.replace('.', '_').replace('/','-') + '.png'
tmpfile = '/tmp/screen.png'
# Get screenshot
try:
get_screenshot(url, tmpfile)
except Exception as e:
print('error with get screenshot ' + str(e))
return {
"statusCode": 200,
"body": '<html><body><center>error getting - ' + url + '<br /></center></body></html>',
"headers": headers
}
# Upload screenshot to S3
upload_screenshot(tmpfile, bucketname, fname)
# Send SQS message with screenshot url
sqs_send(sqs_queue_url, bucketname, fname)
# Generate S3 pre-signed URL
presigned_url = generate_signed_url(bucketname, fname)
# Get end timestamp
endts = time.time()
timediff = endts - startts
# Return HTML response
return {
"statusCode": 200,
"body": '<html><body><center>' + url + ' - took ' + str(round(timediff, 2)) + ' seconds <br /><img src = ' + presigned_url + '></center></body></html>',
"headers": headers
}
|
python
|
from aicademeCV.skinCV import skinseperator
|
python
|
import os
from get_data import read_params, get_data
import argparse
def load_and_save(config_path):
config = read_params(config_path)
df = get_data(config_path)
df['fixed acidity'].fillna(int(df['fixed acidity'].median()), inplace=True)
df['volatile acidity'].fillna(int(df['volatile acidity'].mean()), inplace=True)
df.dropna(inplace=True)
df.drop_duplicates(inplace=True)
new_cols = [col.replace(" ", "_") for col in df.columns]
replace_map = {'type': {'white': 1, 'red': 2}}
types = df['type'].astype('category').cat.categories.tolist()
replace_map_comp = {'type': {k: v for k, v in zip(types, list(range(1, len(types) + 1)))}}
df.replace(replace_map_comp, inplace=True)
print(df.head())
raw_data_path = config['load_data']['raw_dataset_csv']
df.to_csv(raw_data_path, sep = ',', index = False, header = new_cols)
if __name__ == "__main__":
args = argparse.ArgumentParser()
args.add_argument("--config", default="params.yaml")
parsed_args = args.parse_args()
load_and_save(parsed_args.config)
|
python
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import functools
from unittest.mock import patch
from airflow.www.app import purge_cached_app
def dont_initialize_flask_app_submodules(_func=None, *, skip_all_except=None):
if not skip_all_except:
skip_all_except = []
def decorator_dont_initialize_flask_app_submodules(f):
def no_op(*args, **kwargs):
pass
methods = [
"init_api_experimental_auth",
"init_flash_views",
"init_appbuilder_links",
"init_appbuilder_views",
"init_plugins",
"init_connection_form",
"init_error_handlers",
"init_api_connexion",
"init_api_experimental",
"sync_appbuilder_roles",
"init_jinja_globals",
"init_xframe_protection",
"init_airflow_session_interface",
"init_appbuilder",
]
@functools.wraps(f)
def func(*args, **kwargs):
for method in methods:
if method not in skip_all_except:
patcher = patch(f"airflow.www.app.{method}", no_op)
patcher.start()
purge_cached_app()
result = f(*args, **kwargs)
patch.stopall()
purge_cached_app()
return result
return func
if _func is None:
return decorator_dont_initialize_flask_app_submodules
else:
return decorator_dont_initialize_flask_app_submodules(_func)
|
python
|
import unittest
from exactitude import countries
class CountriesTest(unittest.TestCase):
def test_country_codes(self):
self.assertEqual(countries.clean('DE'), 'de')
self.assertTrue(countries.validate('DE'))
self.assertFalse(countries.validate('DEU'))
self.assertFalse(countries.validate('SU'))
self.assertTrue(countries.validate('XK'))
self.assertTrue(countries.validate('EU'))
def test_country_names(self):
self.assertEqual(countries.clean(None), None)
self.assertEqual(countries.clean('Takatukaland', guess=False), None)
self.assertEqual(countries.clean('Germany'), 'de')
# self.assertEqual(countries.clean('Germani'), 'de')
self.assertEqual(countries.clean('Soviet Union'), 'suhh')
|
python
|
import numpy as np
import pandas as pd
from gaitcalibrate.extract.walk import extract_step
from gaitcalibrate.util.adjust_acceleration import tilt_adjustment
def estimate_walk_speed(
acc,
model,
g2acc=True,
n_skip_edge_step=3,
thd_n_step_each_walk=10,
apply_tilt_adjust=True
):
"""Estimate walking speed by applying `model` on the `acc`.
It assumes that `acc` is an Acceleration object containing
orientation-transformed, non-tilt-adjusted, un-filtered acceleration
data.
Return
------
`output`: esimated speed for each acceleration data point, each step,
and the whole `acc` object
"""
##################
# Get model info #
##################
estimator = model['grid_search_estimator']
scaler = model['scaler']
feature_ext = model['feature_ext']
###################
# Tilt adjustment #
###################
if apply_tilt_adjust:
adj_acc = tilt_adjustment(acc=acc)
else:
adj_acc = acc
###################
# Step extraction #
###################
all_steps = extract_step(
acc=adj_acc,
g2acc=g2acc
)
# Remove edge steps which might not be stable
if n_skip_edge_step > 0:
steps = all_steps[n_skip_edge_step:-n_skip_edge_step]
idx_steps = range(n_skip_edge_step, len(all_steps) - n_skip_edge_step)
else:
steps = all_steps
idx_steps = range(len(all_steps))
# This period is too short
if len(steps) < thd_n_step_each_walk:
return -1
#################
# Step features #
#################
# Feature extraction
x = feature_ext.extract(steps)
# Feature scaling
scaled_x = scaler.transform(X=x)
############################
# Walking speed estimation #
############################
# Estimate walking speed for each step feature
y_pred = estimator.predict(scaled_x)
########################################################
# Estimated walking speed associated with acceleration #
########################################################
# Walking speed associated with acceleration
acc_spd = np.zeros(len(acc.data))
for s, step in enumerate(all_steps):
idx_sample = acc.data[((acc.data['dt'] >= step.data['dt'].values[0]) &
(acc.data['dt'] <= step.data['dt'].values[-1]))].index.values
# Note: subtracting the first index to get around the indexing issues
idx_sample -= acc.data.index.values[0]
# If this step is used to estimate the walking speed, assign estimated walking speed
if s in idx_steps:
acc_spd[idx_sample] = y_pred[s - idx_steps[0]]
# Otherwise, assign 0
else:
acc_spd[idx_sample] = 0
################################################
# Estimated walking speed associated with step #
################################################
# Get timestamp at the middle of each step
mid_step_dt = np.asarray([s.data['dt'].values[len(s.data['dt'])/2] for s in steps])
# Append zero speed at the beginning and the end to mark the beginning and end of each walk
mid_step_dt = np.append(steps[0].data['dt'].values[0], mid_step_dt)
mid_step_dt = np.append(mid_step_dt, steps[-1].data['dt'].values[-1])
y_pred_ext = np.append([0], y_pred)
y_pred_ext = np.append(y_pred_ext, [0])
step_dt = mid_step_dt
step_speed = y_pred_ext
###############################################################
# Estimated walking speed associated with each period of walk #
###############################################################
walk_start_dt = steps[0].data['dt'].values[0]
walk_end_dt = steps[-1].data['dt'].values[-1]
walk_speed = np.average(y_pred)
output = {
"acc_dt": acc.data['dt'].values,
"acc_spd": acc_spd,
"step_dt": step_dt,
"step_spd": step_speed,
"walk_start_dt": walk_start_dt,
"walk_end_dt": walk_end_dt,
"walk_spd": walk_speed
}
return output
|
python
|
#!/usr/bin/env python
"""User API for controlling Map job execution."""
from google.appengine.ext import db
from mapreduce import util
# pylint: disable=g-bad-name
# pylint: disable=protected-access
def start(job_config=None,
in_xg_transaction=False):
"""Start a new map job.
Args:
job_config: an instance of map_job.MapJobConfig.
in_xg_transaction: controls what transaction scope to use to start this MR
job. If True, there has to be an already opened cross-group transaction
scope. MR will use one entity group from it.
If False, MR will create an independent transaction to start the job
regardless of any existing transaction scopes.
Returns:
the id of this map job.
Raises:
ValueError: when in_xg_transaction is True but no transaction scope is
detected.
"""
if in_xg_transaction and not db.is_in_transaction():
raise ValueError("Expects an opened xg transaction to start mapreduce.")
# Break circular dependency.
# pylint: disable=g-import-not-at-top
from mapreduce import handlers
return handlers.StartJobHandler._start_map(
name=job_config.job_name,
mapper_spec=job_config._get_mapper_spec(),
mapreduce_params=job_config._get_mr_params(),
queue_name=job_config.queue_name,
hooks_class_name=util._obj_to_path(job_config._hooks_cls),
_app=job_config._app,
in_xg_transaction=in_xg_transaction)
|
python
|
from sklearn.ensemble import RandomForestClassifier
from sklearn import datasets
from sklearn.model_selection import train_test_split
from IPython.display import display
import eli5
from eli5.sklearn import PermutationImportance
RANDOM_STATE = 0
# Get Iris data
iris = datasets.load_iris()
X = iris.data
y = iris.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=RANDOM_STATE)
# Create and train Random Forest
model = RandomForestClassifier(random_state=RANDOM_STATE)
model.fit(X_train, y_train)
perm = PermutationImportance(model, random_state=1).fit(X_test, y_test)
display(eli5.show_weights(perm, feature_names=iris.feature_names))
eli5_weights = eli5.explain_weights(model, feature_names=iris.feature_names)
print(eli5_weights)
|
python
|
from mock import patch
import pytest
from s3parq import publish_redshift
from s3parq.testing_helper import setup_custom_redshift_columns_and_dataframe
class MockScopeObj():
def execute(self, schema_string: str):
pass
def scope_execute_mock(mock_session_helper):
pass
class Test():
# Make sure that naming validator throws expected errors
def test_naming_validator(self):
response = publish_redshift._validate_name('my string')
assert not response[0], 'Allowed name to contain spaces'
response = publish_redshift._validate_name('my_string')
assert response[0]
response = publish_redshift._validate_name('WHERE')
assert not response[0], 'Allowed name to be a reserved SQL keyword'
response = publish_redshift._validate_name('@my_string')
assert not response[0], 'Allowed name to start as not an alphanumeric or an underscore'
response = publish_redshift._validate_name(
"asdffdsaasdffdsaasdfasdffdsaasdffdsaasd\
fasdffdsaasdffdsaasdfasdffdsaasdffdsaasdsd\
ffdsaasdffdsaasdfasdffdsaasdffdsaasdfasdffdsaasdffdsaasdf"
)
assert not response[0], f'Allowed name as too long string'
# Make sure that the reshift-specific validator throes the right errors
def test_validator(self):
schema_name_good = "my_string"
bad_schema_names = ["my string", "#t3rr1bl13_n4m3", "", "select"]
database_name_good = 'my_database'
bad_database_names = ['my database',
"#t3rr1bl13_n4m3", "", ".", "select"]
publish_redshift._redshift_name_validator(
schema_name_good, database_name_good)
for bad_schema in bad_schema_names:
with pytest.raises(ValueError):
publish_redshift._redshift_name_validator(
bad_schema, database_name_good)
for bad_db in bad_database_names:
with pytest.raises(ValueError):
publish_redshift._redshift_name_validator(
schema_name_good, bad_db)
# Test that the function is called with the schema name
@patch('s3parq.publish_redshift.SessionHelper')
@patch('tests.test_publish_redshift.scope_execute_mock')
def test_create_schema(self, mock_session_helper, mock_execute):
mock_execute.return_value = MockScopeObj()
mock_session_helper.db_session_scope.return_value.__enter__ = scope_execute_mock
schema_name = "my_string"
db_name = "my_database"
iam_role = "my_iam_role"
with mock_session_helper.db_session_scope() as mock_scope:
publish_redshift.create_schema(
schema_name, db_name, iam_role, mock_session_helper)
mock_scope.execute.assert_called_once_with(f"CREATE EXTERNAL SCHEMA IF NOT EXISTS {schema_name} \
FROM DATA CATALOG \
database '{db_name}' \
iam_role '{iam_role}';")
# Test that the function is called with the table name
@patch('s3parq.publish_redshift.SessionHelper')
@patch('tests.test_publish_redshift.scope_execute_mock')
def test_create_table(self, mock_session_helper, mock_execute):
mock_execute.return_value = MockScopeObj()
mock_session_helper.db_session_scope.return_value.__enter__ = scope_execute_mock
table_name = "my_string"
schema_name = "my_schema"
path = "s3://lol"
columns = {'grouped_col': 'object', 'text_col': 'object',
'int_col': 'int64', 'float_col': 'float64'}
partitions = {'fish': 'object'}
expected_sql = f'CREATE EXTERNAL TABLE IF NOT EXISTS {schema_name}.{table_name} {columns} \
PARTITIONED BY {partitions} STORED AS PARQUET \
LOCATION "{path}";'
with mock_session_helper.db_session_scope() as mock_scope:
publish_redshift.create_table(table_name, schema_name, columns,
partitions, path, mock_session_helper)
assert mock_scope.execute.called_once_with(expected_sql)
# Test that the function is called with the table name without partitions
@patch('s3parq.publish_redshift.SessionHelper')
@patch('tests.test_publish_redshift.scope_execute_mock')
def test_create_table_without_partitions(self, mock_session_helper, mock_execute):
mock_execute.return_value = MockScopeObj()
mock_session_helper.db_session_scope.return_value.__enter__ = scope_execute_mock
table_name = "my_string"
schema_name = "my_schema"
path = "s3://lol"
columns = {'grouped_col': 'object', 'text_col': 'object',
'int_col': 'int64', 'float_col': 'float64'}
partitions = {}
expected_sql = f'CREATE EXTERNAL TABLE IF NOT EXISTS {schema_name}.{table_name} {columns} \
STORED AS PARQUET \
LOCATION "{path}";'
with mock_session_helper.db_session_scope() as mock_scope:
publish_redshift.create_table(table_name, schema_name, columns,
partitions, path, mock_session_helper)
assert mock_scope.execute.called_once_with(expected_sql)
def test_gets_proper_partitions(self):
test_str = '/some/path/to/data/banana=33/orange=65/apple=abcd/xyz.parquet'
final_partitions = publish_redshift._get_partitions_for_spectrum(
test_str)
assert final_partitions == ['banana=33', 'orange=65', 'apple=abcd']
def test_gets_no_partitions(self):
test_str = '/some/path/to/data/xyz.parquet'
final_partitions = publish_redshift._get_partitions_for_spectrum(
test_str)
assert final_partitions == []
def test_gets_proper_partitions_multiple_slashes(self):
test_str = '/some/path/to/data//banana=33/orange=65/apple=abcd/xyz.parquet'
final_partitions = publish_redshift._get_partitions_for_spectrum(
test_str)
assert final_partitions == ['banana=33', 'orange=65', 'apple=abcd']
def test_format_partition_strings(self):
test_partitions = ['banana=33', 'orange=65', 'apple=abcd']
final_partitions = publish_redshift._format_partition_strings_for_sql(
test_partitions)
assert final_partitions == [
"banana='33'", "orange='65'", "apple='abcd'"]
def test_format_partition_strings_no_partitions(self):
test_partitions = []
final_partitions = publish_redshift._format_partition_strings_for_sql(
test_partitions)
assert final_partitions == []
def test_index_containing_substring(self):
test_list = ['abcd', 'efgh=1234', 'ijkl=5678', 'xyz.parquet']
index = publish_redshift._last_index_containing_substring(
test_list, '=')
assert index == 2
def test_index_containing_substring_no_match(self):
test_list = ['abcd', 'efgh=1234', 'ijkl=5678']
index = publish_redshift._last_index_containing_substring(
test_list, '&')
assert index == 4
def test_get_partition_location(self):
test_filepath = 'path/to/data/apple=abcd/orange=1234/abcd1234.parquet'
partition_path = publish_redshift._get_partition_location(
test_filepath)
assert partition_path == 'path/to/data/apple=abcd/orange=1234'
def test_get_partition_location_no_partition(self):
test_filepath = 'path/to/data/abcd1234.parquet'
with pytest.raises(ValueError):
partition_path = publish_redshift._get_partition_location(
test_filepath)
# Test that the function is called with the table name without partitions
@patch('s3parq.publish_redshift.SessionHelper')
@patch('tests.test_publish_redshift.scope_execute_mock')
def test_create_partitions(self, mock_session_helper, mock_execute):
mock_execute.return_value = MockScopeObj()
mock_session_helper.db_session_scope.return_value.__enter__ = scope_execute_mock
table_name = "my_table"
schema_name = "my_schema"
bucket = "test"
partitions = ["version", "time"]
filepath = "something_overgeneric/dataset/version=v2_final_new/time=01-01-69 23:59:07/keysmash.parquet"
sql_partitions = "(version='v2_final_new', time='01-01-69 23:59:07')"
path_for_sql = "'s3://test/something_overgeneric/dataset/version=v2_final_new'"
expected_sql = f"ALTER TABLE {schema_name}.{table_name} \
# ADD IF NOT EXISTS PARTITION {sql_partitions} \
# LOCATION {path_for_sql};"
with mock_session_helper.db_session_scope() as mock_scope:
publish_redshift.create_partitions(
bucket, schema_name, table_name, filepath, mock_session_helper)
assert mock_scope.execute.called_once_with(expected_sql)
# Test to check that the passed in datatype maps correctly
def test_datatype_mapper(self):
columns = {'grouped_col': 'object', 'text_col': 'object',
'int_col': 'int64', 'float_col': 'float64'}
expected = {'grouped_col': 'VARCHAR', 'text_col': 'VARCHAR',
'int_col': 'BIGINT', 'float_col': 'FLOAT'}
sql = ""
for key, val in expected.items():
sql += f'{key} {val}, '
sql = "(" + sql[:-2] + ")"
actual = publish_redshift._datatype_mapper(columns)
assert actual == sql
# Verify function call for custom create table
@patch('s3parq.publish_redshift.SessionHelper')
@patch('tests.test_publish_redshift.scope_execute_mock')
def test_create_custom_table(self, mock_session_helper, mock_execute):
custom_redshift_columns = setup_custom_redshift_columns_and_dataframe()[1]
mock_execute.return_value = MockScopeObj()
mock_session_helper.db_session_scope.return_value.__enter__ = scope_execute_mock
table_name = "my_string"
schema_name = "my_schema"
path = "s3://lol"
columns = {'colA': 'VARCHAR(1000)', 'colB': 'BIGINT',
'colC': 'REAL', 'coldD': 'DECIMAL(5,4)',
'colE': 'VARCHAR', 'colF': 'BOOLEAN'}
partitions = {'colA': 'VARCHAR(1000)'}
expected_sql = f'CREATE EXTERNAL TABLE IF NOT EXISTS {schema_name}.{table_name} {columns} \
PARTITIONED BY {partitions} STORED AS PARQUET \
LOCATION "{path}";'
with mock_session_helper.db_session_scope() as mock_scope:
publish_redshift.create_custom_table(table_name, schema_name,
partitions, path, custom_redshift_columns, mock_session_helper)
assert mock_scope.execute.called_once_with(expected_sql)
# Verify function call for custom create table, no partitions
@patch('s3parq.publish_redshift.SessionHelper')
@patch('tests.test_publish_redshift.scope_execute_mock')
def test_create_custom_table_without_partitions(self, mock_session_helper, mock_execute):
custom_redshift_columns = setup_custom_redshift_columns_and_dataframe()[1]
mock_execute.return_value = MockScopeObj()
mock_session_helper.db_session_scope.return_value.__enter__ = scope_execute_mock
table_name = "my_string"
schema_name = "my_schema"
path = "s3://lol"
columns = {'colA': 'VARCHAR(1000)', 'colB': 'BIGINT',
'colC': 'REAL', 'coldD': 'DECIMAL(5,4)',
'colE': 'VARCHAR', 'colF': 'BOOLEAN'}
partitions = {}
expected_sql = f'CREATE EXTERNAL TABLE IF NOT EXISTS {schema_name}.{table_name} {columns} \
STORED AS PARQUET \
LOCATION "{path}";'
with mock_session_helper.db_session_scope() as mock_scope:
publish_redshift.create_custom_table(table_name, schema_name,
partitions, path, custom_redshift_columns, mock_session_helper)
assert mock_scope.execute.called_once_with(expected_sql)
|
python
|
import argparse
import torch
from torch import nn
from torch.nn import functional as F
from torch.utils import data
from torchvision import datasets, transforms
import torchvision.utils as vutils
from classes import Generator, Discriminator
import conf
import utils as ut
# Command line arguments
parser = argparse.ArgumentParser()
parser.add_argument("--n_epochs", type=int, default=100, help="Number of epochs to train model.")
parser.add_argument("--checkpoint", type=str, default=None, help="Path to model checkpoint.")
opt = parser.parse_args()
n_epochs = opt.n_epochs
# CUDA for PyTorch
if torch.cuda.is_available():
device = torch.device("cuda:0")
torch.backends.cudnn.benchmark = True
print("CUDA is available")
else:
device = torch.device("cpu")
print("No GPU found.")
# Define a transform to resize the data
transform = transforms.Compose(
[transforms.Resize(64),
transforms.ToTensor()]
)
# Fashion MNIST Dataset
image_data = datasets.FashionMNIST(
'F_MNIST_data/',
download=True,
transform=transform
)
# Batch loader for images
image_loader = data.DataLoader(
image_data,
batch_size=conf.batch_size,
num_workers=1,
shuffle=False,
drop_last=True
)
print("The dataset contains {} images, in {} batches"
.format(len(image_loader.dataset), len(image_loader)))
# Instantiate model classes and initialise network weights
generator = Generator().to(device)
generator.apply(ut.weights_init)
discriminator = Discriminator().to(device)
discriminator.apply(ut.weights_init)
# Network optimizers
gen_optimizer = torch.optim.Adam(
params=generator.parameters(),
lr=conf.lr,
betas=(0.5, 0.999)
)
disc_optimizer = torch.optim.Adam(
params=discriminator.parameters(),
lr=conf.lr,
betas=(0.5, 0.999)
)
if opt.checkpoint:
generator, discriminator, gen_optimizer, disc_optimizer, start_epoch = \
ut.load_checkpoint(generator, discriminator, gen_optimizer, disc_optimizer, opt.checkpoint)
else:
start_epoch = 1
# Set to training mode
generator.train()
discriminator.train()
print('Training started...')
for epoch in range(start_epoch, n_epochs + 1):
for i, (image_batch, _) in enumerate(image_loader, 1):
image_batch = image_batch.to(device)
# Assign 1 for real label; 0 for fake label
label_real = torch.ones(image_batch.size(0)).to(device)
label_fake = torch.zeros(image_batch.size(0)).to(device)
# Generate a batch of samples from the latent prior
latent = torch.randn(image_batch.size(0), 100, 1, 1).to(device)
fake_image_batch = generator(latent).to(device)
real_pred = discriminator(image_batch).squeeze().to(device)
fake_pred = discriminator(fake_image_batch.detach()).squeeze().to(device)
disc_loss = 0.5 * (
F.binary_cross_entropy(real_pred, label_real) +
F.binary_cross_entropy(fake_pred, label_fake)
)
disc_optimizer.zero_grad()
# Discriminator backpropogation
disc_loss.backward()
disc_optimizer.step()
fake_pred = discriminator(fake_image_batch).squeeze().to(device)
gen_loss = F.binary_cross_entropy(fake_pred, label_real)
gen_optimizer.zero_grad()
# Generator backpropogation
gen_loss.backward()
gen_optimizer.step()
# Output training stats
if i % 1 == 0:
print('[%d/%d][%d/%d] | Loss_D: %.4f | Loss_G: %.4f |'
% (epoch, n_epochs, i, len(image_loader),
disc_loss.item(), gen_loss.item()))
if epoch % 1 == 0:
# Create and save fake image generated from random noise
fixed_noise = torch.randn(conf.n_gen_feats, conf.z_size, 1, 1).to(device)
fake = generator(fixed_noise)
with open('src/visualization/latest_examples.png', 'wb') as f:
vutils.save_image(
fake.detach(),
f,
normalize=True
)
torch.save(
{
'epoch': epoch,
'disc_state_dict': discriminator.state_dict(),
'gen_state_dict': generator.state_dict(),
'disc_optimizer_state_dict': disc_optimizer.state_dict(),
'gen_optimizer_state_dict': gen_optimizer.state_dict()
},
'src/models/checkpoints/model_chkpt_latest.pt'
)
torch.save(
generator.state_dict(),
'src/models/checkpoints/finished/trained_gen_model.pt'
)
# Save real image samples
with open('src/visualization/real_samples.png', 'wb') as f:
vutils.save_image(
image_batch,
f,
normalize=True
)
print("========= Training finished! =========")
|
python
|
# -*- coding:utf-8 -*-
import xml.etree.ElementTree as ET
class Parser:
"""
this class parse style xml files.
xml -> dict in list
* all arguments and text is string. not int. you need to convert it yourself.
like this xml
<?xml version="1.0"?>
<style>
<width>635</width>
<height>384</height>
<img id="user_twitter_icon"
width="128"
height="128"
x="15"
y="15"
rotation="0"/>
<string id="description"
x="150"
y="230"
width="570"
height="300"
rotation="0">
</string>
</style>
to [{"width":635,"height":384},
[{'img': {'width': '25', 'y': '70', 'x': '170', 'rotation': '0', 'id': 'tw_icon', 'height': '25'}}],
[{'string': {'y': '60','x': '200', 'rotation': '0', 'id': 'name'}}]]
it means
[{preferencesd(dict)}, [img tags(dict in list)], [string tags(dict in list)]]
"""
def create_style_list(self,style_path):
#reading style file
style_tree = ET.parse(style_path)
root = style_tree.getroot()
#checking root tag is style
if(root.tag == "style"):
self.root = root
else:
raise ValueError("no style tag in stylesheet xml file")
#style xml -> dict in list
self.style_list = []
preferences = {}
imgs = []
strs = []
for elem in self.root:
#img tag
if(elem.tag == "img"):
img_dict = {elem.tag : elem.attrib}
imgs.append(img_dict)
#string tag
elif(elem.tag == "string"):
overwrite_text = elem.text
attributes = elem.attrib
#checking blank
if(overwrite_text.strip() != ""):
attributes.update({"overwrite_text":overwrite_text.strip()})
str_dict = {elem.tag : attributes}
strs.append(str_dict)
#other tag (pick up only text)
else:
tmp_dict={elem.tag : elem.text}
preferences.update(tmp_dict)
self.style_list.append(preferences)
self.style_list.append(imgs)
self.style_list.append(strs)
def get_list(self):
return self.style_list
def __init__(self,style_path):
self.create_style_list(style_path)
|
python
|
from .bases import EndpointBase
from aioconsul.api import consul, extract_meta
from aioconsul.exceptions import NotFound
from aioconsul.util import extract_attr
class SessionEndpoint(EndpointBase):
"""Create, destroy, and query sessions
.. note:: All of the read session endpoints support blocking queries and
all consistency modes.
Session mechanism can be used to build distributed locks.
Sessions act as a binding layer between nodes, health checks, and
key/value data.
"""
async def create(self, session, *, dc=None):
"""Creates a new session
Parameters:
session (Object): Session definition
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
Object: ID of the created session
The create endpoint is used to initialize a new session.
Sessions must be associated with a node and may be associated
with any number of checks.
The session object must look like::
{
"LockDelay": timedelta(seconds=15),
"Name": "my-service-lock",
"Node": "foobar",
"Checks": ["a", "b", "c"],
"Behavior": "release",
"TTL": timedelta(seconds=0)
}
**LockDelay** can be specified as a duration string using a "s"
suffix for seconds. The default is 15s.
**Node** must refer to a node that is already registered, if specified.
By default, the agent's own node name is used.
**Name** can be used to provide a human-readable name for the Session.
**Checks** is used to provide a list of associated health checks.
It is highly recommended that, if you override this list, you include
the default "serfHealth".
**Behavior** can be set to either ``release`` or ``delete``.
This controls the behavior when a session is invalidated.
By default, this is ``release``, causing any locks that are held to be
released. Changing this to ``delete`` causes any locks that are held
to be deleted. ``delete`` is useful for creating ephemeral key/value
entries.
**TTL** field is a duration string, and like ``LockDelay`` it can use
"s" as a suffix for seconds. If specified, it must be between 10s and
86400s currently. When provided, the session is invalidated if it is
not renewed before the TTL expires. The lowest practical TTL should be
used to keep the number of managed sessions low.
When locks are forcibly expired, such as during a leader election,
sessions may not be reaped for up to double this TTL, so long TTL
values (>1 hour) should be avoided.
"""
response = await self._api.put(
"/v1/session/create",
data=session,
params={"dc": dc})
return response.body
async def destroy(self, session, *, dc=None):
"""Destroys a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
bool: ``True`` on success
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.put("/v1/session/destroy", session_id,
params={"dc": dc})
return response.body is True
delete = destroy
async def info(self, session, *, dc=None, watch=None, consistency=None):
"""Queries a given session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
ObjectMeta: where value is the queried session
Raises:
NotFound: session is absent
Returns the requested session information within a given datacenter.
It returns a mapping like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
}
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.get("/v1/session/info", session_id,
watch=watch,
consistency=consistency,
params={"dc": dc})
try:
result = response.body[0]
except IndexError:
meta = extract_meta(response.headers)
raise NotFound("No session for %r" % session_id, meta=meta)
return consul(result, meta=extract_meta(response.headers))
async def node(self, node, *, dc=None, watch=None, consistency=None):
"""Lists sessions belonging to a node
Parameters:
node (ObjectID): Node ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
CollectionMeta: where value is a list of
sessions attached to node
It returns a list like this::
[
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
},
...
]
"""
node_id = extract_attr(node, keys=["Node", "ID"])
response = await self._api.get("/v1/session/node", node_id, params={
"dc": dc}, watch=watch, consistency=consistency)
return consul(response)
async def items(self, *, dc=None, watch=None, consistency=None):
"""Lists sessions
Parameters:
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
watch (Blocking): Do a blocking query
consistency (Consistency): Force consistency
Returns:
CollectionMeta: where value is a list of sessions
It returns an object like this::
[
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
},
...
]
"""
response = await self._api.get("/v1/session/list", params={
"dc": dc}, watch=watch, consistency=consistency)
return consul(response)
async def renew(self, session, *, dc=None):
"""Renews a TTL-based session
Parameters:
session (ObjectID): Session ID
dc (str): Specify datacenter that will be used.
Defaults to the agent's local datacenter.
Returns:
ObjectMeta: where value is session
Raises:
NotFound: session is absent
The response looks like this::
{
"LockDelay": datetime.timedelta(0, 15),
"Checks": [
"serfHealth"
],
"Node": "foobar",
"ID": "adf4238a-882b-9ddc-4a9d-5b6758e4159e",
"CreateIndex": 1086449
"Behavior": "release",
"TTL": datetime.timedelta(0, 15)
}
.. note:: Consul MAY return a TTL value higher than the one
specified during session creation. This indicates
the server is under high load and is requesting
clients renew less often.
"""
session_id = extract_attr(session, keys=["ID"])
response = await self._api.put("/v1/session/renew", session_id,
params={"dc": dc})
try:
result = response.body[0]
except IndexError:
meta = extract_meta(response.headers)
raise NotFound("No session for %r" % session_id, meta=meta)
return consul(result, meta=extract_meta(response.headers))
|
python
|
import os
import json
from common.models import Contact, ContactType
from facilities.models import (
Facility, FacilityContact, Officer, OfficerContact
)
from users.models import MflUser
from django.core.management import BaseCommand
from django.conf import settings
system_user = MflUser.objects.get(email='[email protected]')
class Command(BaseCommand):
def handle(self, *args, **kwargs):
# facility email contacts
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/email/0018_facility_emails_contacts.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='EMAIL')
for record in records:
conact = record.get('contact')
contact, created = Contact.objects.get_or_create(
contact=conact,
contact_type=email_type
)
# facility email contacts linked
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/email/0019_facility_emails_contacts_linked.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
mobile_type = ContactType.objects.get(name='EMAIL')
for record in records:
contact = record.get('contact').get('contact')
contact, created = Contact.objects.get_or_create(
contact=contact,
contact_type=mobile_type
)
facility = record.get('facility').get('code')
try:
facility_obj = Facility.objects.get(code=facility)
print FacilityContact.objects.get_or_create(
contact=contact, facility=facility_obj,
created_by=system_user, updated_by=system_user)
except Facility.DoesNotExist:
print "The requested facility does not exist"
# officer email contacts
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/email/0030_officer_email_contacts.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='EMAIL')
for record in records:
conact = record.get('contact')
contact, created = Contact.objects.get_or_create(
contact=conact,
contact_type=email_type
)
# officer email linked
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/email/0031_officer_email_contacts_linked.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='EMAIL')
for record in records:
contact = record.get('contact').get('contact')
contact, created = Contact.objects.get_or_create(
contact=contact,
contact_type=email_type
)
officer = record.get('officer')
if officer:
officer = officer.get('name')
try:
officer_obj = Officer.objects.filter(name=officer)
print OfficerContact.objects.get_or_create(
contact=contact, officer=officer_obj[0],
created_by=system_user, updated_by=system_user)
except IndexError:
print "The requested officer does not exist"
else:
print "Officer key is missing"
# facility fax contacts
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/fax/0022_facility_fax_contacts.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='FAX')
for record in records:
conact = record.get('contact')
contact, created = Contact.objects.get_or_create(
contact=conact,
contact_type=email_type
)
# facility fax contacts linked
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/fax/0023_facility_fax_contacts_linked.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
mobile_type = ContactType.objects.get(name='FAX')
for record in records:
contact = record.get('contact').get('contact')
contact, created = Contact.objects.get_or_create(
contact=contact,
contact_type=mobile_type
)
facility = record.get('facility').get('code')
try:
facility_obj = Facility.objects.get(code=facility)
print FacilityContact.objects.get_or_create(
contact=contact, facility=facility_obj,
created_by=system_user, updated_by=system_user)
except Facility.DoesNotExist:
print "The requested facility does not exist"
# facility landline contacts
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/landline/0020_facility_landline_contacts.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='LANDLINE')
for record in records:
conact = record.get('contact')
contact, created = Contact.objects.get_or_create(
contact=conact,
contact_type=email_type
)
# facility landline contacts linked
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/landline/0021_facility_landline_contacts_linked.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
mobile_type = ContactType.objects.get(name='LANDLINE')
for record in records:
contact = record.get('contact').get('contact')
contact, created = Contact.objects.get_or_create(
contact=contact,
contact_type=mobile_type
)
facility = record.get('facility').get('code')
try:
facility_obj = Facility.objects.get(code=facility)
print FacilityContact.objects.get_or_create(
contact=contact, facility=facility_obj,
created_by=system_user, updated_by=system_user)
except Facility.DoesNotExist:
print "The requested facility does not exist"
# facility mobile contacts
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/mobile/0024_facility_mobile_contacts.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='MOBILE')
for record in records:
conact = record.get('contact')
contact, created = Contact.objects.get_or_create(
contact=conact,
contact_type=email_type
)
# facility mobile contacts linked
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/mobile/0025_facility_mobile_contacts_linked.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
mobile_type = ContactType.objects.get(name='MOBILE')
for record in records:
contact = record.get('contact').get('contact')
contact, created = Contact.objects.get_or_create(
contact=contact,
contact_type=mobile_type
)
facility = record.get('facility').get('code')
try:
facility_obj = Facility.objects.get(code=facility)
print FacilityContact.objects.get_or_create(
contact=contact, facility=facility_obj,
created_by=system_user, updated_by=system_user)
except Facility.DoesNotExist:
print "The requested facility does not exist"
# officers mobile contacts
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/mobile/0028_officer_mobile_contacts.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='MOBILE')
for record in records:
conact = record.get('contact')
contact, created = Contact.objects.get_or_create(
contact=conact,
contact_type=email_type
)
# officer mobiles linked
file_path = os.path.join(
settings.BASE_DIR,
'data/new_data/mobile/0029_officer_mobile_contacts_linked.json'
)
with open(file_path) as email_contacts:
email_data = json.load(email_contacts)
records = email_data[0].get('records')
email_type = ContactType.objects.get(name='MOBILE')
for record in records:
contact = record.get('contact').get('contact')
contact, created = Contact.objects.get_or_create(
contact=contact,
contact_type=email_type
)
officer = record.get('officer')
if officer:
officer = officer.get('name')
try:
officer_obj = Officer.objects.filter(name=officer)
print OfficerContact.objects.get_or_create(
contact=contact, officer=officer_obj[0],
created_by=system_user, updated_by=system_user)
except IndexError:
print "The requested officer does not exist"
else:
print "Officer key is missing"
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.