content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
# Copyright (c) 2011-2013 Kunal Mehta. All rights reserved.
# Use of this source code is governed by a BSD License found in README.md.
from django.conf import settings
from django.contrib.auth import authenticate, login, logout
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, \
HttpResponseNotFound, HttpResponseForbidden
from django.views.decorators.http import require_POST, require_GET
from huxley.accounts.forms import RegistrationForm
from huxley.accounts.models import HuxleyUser
from huxley.core.models import *
from huxley.shortcuts import render_template, render_json
def login_user(request):
""" Logs in a user or renders the login template. """
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
user, error = HuxleyUser.authenticate(username, password)
if error:
return render_json({'success': False, 'error': error})
redirect = HuxleyUser.login(request, user)
return render_json({'success': True, 'redirect': redirect})
return render_template(request, 'auth.html')
def login_as_user(request, uid):
""" Logs in as a particular user (admin use only). """
try:
if not request.user.is_superuser:
return HttpResponseForbidden()
username = HuxleyUser.objects.get(id=uid).username
user = authenticate(username=username, password=settings.ADMIN_SECRET)
login(request, user)
return HttpResponseRedirect(reverse('index'))
except HuxleyUser.DoesNotExist:
return HttpResponseNotFound()
def logout_user(request):
""" Logs out the current user. Although we'll only be supporting AJAX,
we're leaving the standard logout here in case of a heinous bug that
prevents normal logout."""
logout(request)
if request.is_ajax():
return HttpResponse(reverse('login'))
else:
return HttpResponseRedirect(reverse('index'))
def register(request):
""" Registers a new user and school. """
# Registration is closed. TODO: Implement the waitlist.
#return render_template(request, 'registration_closed.html')
if request.method =='POST':
form = RegistrationForm(request.POST)
if form.is_valid():
new_school = form.create_school()
new_user = form.create_user(new_school)
form.add_country_preferences(new_school)
form.add_committee_preferences(new_school)
if not settings.DEBUG:
new_user.email_user("Thanks for registering for BMUN 62!",
"We're looking forward to seeing %s at BMUN 62. "
"You can find information on deadlines and fees at "
"http://bmun.org/bmun/timeline/. If you have any "
"more questions, please feel free to email me at "
"[email protected]. See you soon!\n\nBest,\n\nShrey Goel"
"\nUSG of External Relations, BMUN 62" % new_school.name,
"[email protected]")
Conference.auto_country_assign(new_school)
return render_template(request, 'thanks.html')
form = RegistrationForm()
context = {
'form': form,
'state': '',
'countries': Country.objects.filter(special=False).order_by('name'),
'committees': Committee.objects.filter(special=True)
}
return render_template(request, 'registration.html', context)
@require_POST
def change_password(request):
""" Attempts to change the user's password, or returns an error. """
if not request.user.is_authenticated():
return HttpResponse(status=401)
old = request.POST.get('oldpassword')
new = request.POST.get('newpassword')
new2 = request.POST.get('newpassword2')
success, error = request.user.change_password(old, new, new2)
return HttpResponse('OK') if success else HttpResponse(error)
def reset_password(request):
""" Reset a user's password. """
if request.method == 'POST':
username = request.POST.get('username')
new_password = HuxleyUser.reset_password(username)
if new_password:
if not settings.DEBUG:
user.email_user("Huxley Password Reset",
"Your password has been reset to %s.\nThank you for using Huxley!" % (new_password),
from_email="[email protected]")
return render_template(request, 'password-reset-success.html')
else:
return render_template(request, 'password-reset.html', {'error': True})
return render_template(request, 'password-reset.html')
@require_GET
def validate_unique_user(request):
""" Checks that a potential username is unique. """
username = request.GET['username']
if HuxleyUser.objects.filter(username=username).exists():
return HttpResponse(status=406)
else:
return HttpResponse(status=200)
|
python
|
import unittest
import torchtext.vocab as v
import han.encode.sentence as s
class SentenceEncoderTestCase(unittest.TestCase):
def test(self):
vocab = v.build_vocab_from_iterator([["apple", "is", "tasty"]])
sut = s.SentenceEncoder(vocab)
res = sut.forward(["apple is tasty", "tasty is apple"])
self.assertEqual(len(res), 2)
|
python
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
from pathlib import Path
from typing import Any, Dict, List, Optional
import torch
from torch import Tensor
from fairseq import checkpoint_utils, utils
from fairseq.models import (
FairseqEncoderModel,
FairseqEncoderDecoderModel,
FairseqLanguageModel,
register_model,
register_model_architecture,
)
from fairseq.models.speech_to_text import S2TTransformerEncoder
from fairseq.models.speech_to_speech.modules import CTCDecoder, StackedEmbedding
from fairseq.models.text_to_speech import TTSTransformerDecoder
from fairseq.models.transformer import (
Linear,
TransformerDecoder,
TransformerModelBase,
)
logger = logging.getLogger(__name__)
class S2STransformerEncoder(S2TTransformerEncoder):
"""Based on S2T transformer encoder, with support
to incorporate target speaker embedding."""
def __init__(self, args):
super().__init__(args)
self.spk_emb_proj = None
if args.target_speaker_embed:
self.spk_emb_proj = Linear(
args.encoder_embed_dim + args.speaker_embed_dim, args.encoder_embed_dim
)
def forward(
self, src_tokens, src_lengths, tgt_speaker=None, return_all_hiddens=False
):
out = super().forward(src_tokens, src_lengths, return_all_hiddens)
if self.spk_emb_proj:
x = out["encoder_out"][0]
seq_len, bsz, _ = x.size()
tgt_speaker_emb = tgt_speaker.view(1, bsz, -1).expand(seq_len, bsz, -1)
x = self.spk_emb_proj(torch.cat([x, tgt_speaker_emb], dim=2))
out["encoder_out"][0] = x
return out
class TransformerUnitDecoder(TransformerDecoder):
"""Based on Transformer decoder, with support to decoding stacked units"""
def __init__(
self,
args,
dictionary,
embed_tokens,
no_encoder_attn=False,
output_projection=None,
):
super().__init__(
args, dictionary, embed_tokens, no_encoder_attn, output_projection
)
self.n_frames_per_step = args.n_frames_per_step
self.out_proj_n_frames = (
Linear(
self.output_embed_dim,
self.output_embed_dim * self.n_frames_per_step,
bias=False,
)
if self.n_frames_per_step > 1
else None
)
def forward(
self,
prev_output_tokens,
encoder_out: Optional[Dict[str, List[Tensor]]] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
features_only: bool = False,
full_context_alignment: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
src_lengths: Optional[Any] = None,
return_all_hiddens: bool = False,
):
"""
Args:
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for teacher forcing
encoder_out (optional): output from the encoder, used for
encoder-side attention, should be of size T x B x C
incremental_state (dict): dictionary used for storing state during
:ref:`Incremental decoding`
features_only (bool, optional): only return features without
applying output layer (default: False).
full_context_alignment (bool, optional): don't apply
auto-regressive mask to self-attention (default: False).
Returns:
tuple:
- the decoder's output of shape `(batch, tgt_len, vocab)`
- a dictionary with any model-specific outputs
"""
x, extra = self.extract_features(
prev_output_tokens,
encoder_out=encoder_out,
incremental_state=incremental_state,
full_context_alignment=full_context_alignment,
alignment_layer=alignment_layer,
alignment_heads=alignment_heads,
)
if not features_only:
bsz, seq_len, d = x.size()
if self.out_proj_n_frames:
x = self.out_proj_n_frames(x)
x = self.output_layer(x.view(bsz, seq_len, self.n_frames_per_step, d))
x = x.view(bsz, seq_len * self.n_frames_per_step, -1)
if (
incremental_state is None and self.n_frames_per_step > 1
): # teacher-forcing mode in training
x = x[
:, : -(self.n_frames_per_step - 1), :
] # remove extra frames after <eos>
return x, extra
def upgrade_state_dict_named(self, state_dict, name):
if self.n_frames_per_step > 1:
move_keys = [
(
f"{name}.project_in_dim.weight",
f"{name}.embed_tokens.project_in_dim.weight",
)
]
for from_k, to_k in move_keys:
if from_k in state_dict and to_k not in state_dict:
state_dict[to_k] = state_dict[from_k]
del state_dict[from_k]
class S2STransformerMultitaskModelBase(FairseqEncoderDecoderModel):
@classmethod
def build_encoder(cls, args):
encoder = S2STransformerEncoder(args)
pretraining_path = getattr(args, "load_pretrained_encoder_from", None)
if pretraining_path is not None:
if not Path(pretraining_path).exists():
logger.warning(
f"skipped pretraining because {pretraining_path} does not exist"
)
else:
encoder = checkpoint_utils.load_pretrained_component_from_model(
component=encoder, checkpoint=pretraining_path
)
logger.info(f"loaded pretrained encoder from: {pretraining_path}")
return encoder
@classmethod
def build_multitask_decoder(cls, args, tgt_dict, in_dim):
decoder_args = args.decoder_args
decoder_args.encoder_embed_dim = in_dim
if args.decoder_type == "transformer":
base_multitask_text_transformer_decoder_arch(decoder_args)
task_decoder = TransformerDecoder(
decoder_args,
tgt_dict,
embed_tokens=TransformerModelBase.build_embedding(
decoder_args,
tgt_dict,
decoder_args.decoder_embed_dim,
),
)
elif args.decoder_type == "ctc":
task_decoder = CTCDecoder(
dictionary=tgt_dict,
in_dim=in_dim,
)
else:
raise NotImplementedError(
"currently only support multitask decoder_type 'transformer', 'ctc'"
)
return task_decoder
@classmethod
def build_model(cls, args, task):
encoder = cls.build_encoder(args)
decoder = (
cls.build_decoder(args, task.target_dictionary)
if task.args.target_is_code
else cls.build_decoder(args)
)
base_model = cls(encoder, decoder)
# set up multitask decoders
base_model.multitask_decoders = {}
for task_name, task_obj in task.multitask_tasks.items():
in_dim = (
args.encoder_embed_dim
if task_obj.args.input_from == "encoder"
else args.decoder_embed_dim
)
task_decoder = cls.build_multitask_decoder(
task_obj.args, task_obj.target_dictionary, in_dim
)
setattr(base_model, f"{task_name}_decoder", task_decoder)
decoder_model_cls = (
FairseqEncoderModel
if task_obj.args.decoder_type == "ctc"
else FairseqLanguageModel
)
base_model.multitask_decoders[task_name] = decoder_model_cls(
getattr(base_model, f"{task_name}_decoder")
)
return base_model
def forward_encoder(self, src_tokens, src_lengths, speaker=None, **kwargs):
return self.encoder(
src_tokens, src_lengths=src_lengths, tgt_speaker=speaker, **kwargs
)
@register_model("s2ut_transformer")
class S2UTTransformerModel(S2STransformerMultitaskModelBase):
"""
Direct speech-to-speech translation model with S2T Transformer encoder + Transformer discrete unit decoder
https://arxiv.org/abs/2107.05604
"""
@staticmethod
def add_args(parser):
# input
parser.add_argument(
"--conv-kernel-sizes",
type=str,
metavar="N",
help="kernel sizes of Conv1d subsampling layers",
)
parser.add_argument(
"--conv-channels",
type=int,
metavar="N",
help="# of channels in Conv1d subsampling layers",
)
# Transformer
parser.add_argument(
"--activation-fn",
type=str,
default="relu",
choices=utils.get_available_activation_fns(),
help="activation function to use",
)
parser.add_argument(
"--dropout", type=float, metavar="D", help="dropout probability"
)
parser.add_argument(
"--attention-dropout",
type=float,
metavar="D",
help="dropout probability for attention weights",
)
parser.add_argument(
"--activation-dropout",
"--relu-dropout",
type=float,
metavar="D",
help="dropout probability after activation in FFN.",
)
parser.add_argument(
"--encoder-embed-dim",
type=int,
metavar="N",
help="encoder embedding dimension",
)
parser.add_argument(
"--encoder-ffn-embed-dim",
type=int,
metavar="N",
help="encoder embedding dimension for FFN",
)
parser.add_argument(
"--encoder-layers", type=int, metavar="N", help="num encoder layers"
)
parser.add_argument(
"--encoder-attention-heads",
type=int,
metavar="N",
help="num encoder attention heads",
)
parser.add_argument(
"--encoder-normalize-before",
action="store_true",
help="apply layernorm before each encoder block",
)
parser.add_argument(
"--decoder-embed-dim",
type=int,
metavar="N",
help="decoder embedding dimension",
)
parser.add_argument(
"--decoder-ffn-embed-dim",
type=int,
metavar="N",
help="decoder embedding dimension for FFN",
)
parser.add_argument(
"--decoder-layers", type=int, metavar="N", help="num decoder layers"
)
parser.add_argument(
"--decoder-attention-heads",
type=int,
metavar="N",
help="num decoder attention heads",
)
parser.add_argument(
"--decoder-normalize-before",
action="store_true",
help="apply layernorm before each decoder block",
)
parser.add_argument(
"--share-decoder-input-output-embed",
action="store_true",
help="share decoder input and output embeddings",
)
parser.add_argument(
"--layernorm-embedding",
action="store_true",
help="add layernorm to embedding",
)
parser.add_argument(
"--no-scale-embedding",
action="store_true",
help="if True, dont scale embeddings",
)
parser.add_argument(
"--load-pretrained-encoder-from",
type=str,
metavar="STR",
help="model to take encoder weights from (for initialization)",
)
parser.add_argument(
"--encoder-freezing-updates",
type=int,
metavar="N",
help="freeze encoder for first N updates",
)
# speaker
parser.add_argument(
"--speaker-embed-dim",
type=int,
metavar="N",
help="speaker embedding dimension",
)
@classmethod
def build_decoder(cls, args, tgt_dict):
num_embeddings = len(tgt_dict)
padding_idx = tgt_dict.pad()
embed_tokens = StackedEmbedding(
num_embeddings,
args.decoder_embed_dim,
padding_idx,
num_stacked=args.n_frames_per_step,
)
return TransformerUnitDecoder(
args,
tgt_dict,
embed_tokens,
)
def forward(
self,
src_tokens,
src_lengths,
prev_output_tokens,
tgt_speaker=None,
return_all_hiddens=False,
):
encoder_out = self.encoder(
src_tokens,
src_lengths=src_lengths,
tgt_speaker=tgt_speaker,
return_all_hiddens=return_all_hiddens,
)
decoder_out = self.decoder(
prev_output_tokens,
encoder_out=encoder_out,
)
if return_all_hiddens:
decoder_out[-1]["encoder_states"] = encoder_out["encoder_states"]
decoder_out[-1]["encoder_padding_mask"] = encoder_out[
"encoder_padding_mask"
]
return decoder_out
@register_model("s2spect_transformer")
class S2SpecTTransformerModel(S2STransformerMultitaskModelBase):
"""
Speech-to-spectrogram model with S2T Transformer encoder + TTS Transformer decoder
"""
@staticmethod
def add_args(parser):
# input
parser.add_argument(
"--conv-kernel-sizes",
type=str,
metavar="N",
help="kernel sizes of Conv1d subsampling layers",
)
parser.add_argument(
"--conv-channels",
type=int,
metavar="N",
help="# of channels in Conv1d subsampling layers",
)
# Transformer
parser.add_argument(
"--activation-fn",
type=str,
default="relu",
choices=utils.get_available_activation_fns(),
help="activation function to use",
)
parser.add_argument(
"--dropout", type=float, metavar="D", help="dropout probability"
)
parser.add_argument(
"--attention-dropout",
type=float,
metavar="D",
help="dropout probability for attention weights",
)
parser.add_argument(
"--activation-dropout",
"--relu-dropout",
type=float,
metavar="D",
help="dropout probability after activation in FFN.",
)
parser.add_argument(
"--encoder-embed-dim",
type=int,
metavar="N",
help="encoder embedding dimension",
)
parser.add_argument(
"--encoder-ffn-embed-dim",
type=int,
metavar="N",
help="encoder embedding dimension for FFN",
)
parser.add_argument(
"--encoder-layers", type=int, metavar="N", help="num encoder layers"
)
parser.add_argument(
"--encoder-attention-heads",
type=int,
metavar="N",
help="num encoder attention heads",
)
parser.add_argument(
"--encoder-normalize-before",
action="store_true",
help="apply layernorm before each encoder block",
)
parser.add_argument(
"--no-scale-embedding",
action="store_true",
help="if True, dont scale embeddings",
)
parser.add_argument(
"--load-pretrained-encoder-from",
type=str,
metavar="STR",
help="model to take encoder weights from (for initialization)",
)
parser.add_argument(
"--encoder-freezing-updates",
type=int,
metavar="N",
help="freeze encoder for first N updates",
)
# speaker
parser.add_argument(
"--speaker-embed-dim",
type=int,
metavar="N",
help="speaker embedding dimension",
)
# decoder
parser.add_argument("--output-frame-dim", type=int)
# decoder prenet
parser.add_argument("--prenet-dropout", type=float)
parser.add_argument("--prenet-layers", type=int)
parser.add_argument("--prenet-dim", type=int)
# decoder postnet
parser.add_argument("--postnet-dropout", type=float)
parser.add_argument("--postnet-layers", type=int)
parser.add_argument("--postnet-conv-dim", type=int)
parser.add_argument("--postnet-conv-kernel-size", type=int)
# decoder transformer layers
parser.add_argument("--decoder-transformer-layers", type=int)
parser.add_argument("--decoder-embed-dim", type=int)
parser.add_argument("--decoder-ffn-embed-dim", type=int)
parser.add_argument("--decoder-normalize-before", action="store_true")
parser.add_argument("--decoder-attention-heads", type=int)
@classmethod
def build_decoder(cls, args):
return TTSTransformerDecoder(args, None, padding_idx=1)
def forward(
self,
src_tokens,
src_lengths,
prev_output_tokens,
tgt_speaker=None,
incremental_state=None,
target_lengths=None,
speaker=None,
return_all_hiddens=False,
):
encoder_out = self.encoder(
src_tokens,
src_lengths=src_lengths,
tgt_speaker=tgt_speaker,
return_all_hiddens=return_all_hiddens,
)
decoder_out = self.decoder(
prev_output_tokens,
encoder_out=encoder_out,
incremental_state=incremental_state,
target_lengths=target_lengths,
speaker=speaker,
)
if return_all_hiddens:
decoder_out[-1]["encoder_states"] = encoder_out["encoder_states"]
decoder_out[-1]["encoder_padding_mask"] = encoder_out[
"encoder_padding_mask"
]
return decoder_out
def base_multitask_text_transformer_decoder_arch(args):
args.dropout = getattr(args, "dropout", 0.3)
args.decoder_layerdrop = getattr(args, "decoder_layerdrop", 0.0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", True
)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", 256)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.max_target_positions = getattr(args, "max_target_positions", 1024)
args.no_scale_embedding = getattr(args, "no_scale_embedding", False)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.quant_noise_pq = getattr(args, "quant_noise_pq", 0)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.decoder_layers = getattr(args, "decoder_layers", 2)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
# decoder layer
args.activation_dropout = getattr(args, "activation_dropout", args.dropout)
args.activation_fn = getattr(args, "activation_fn", "relu")
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", True)
args.decoder_ffn_embed_dim = getattr(args, "decoder_ffn_embed_dim", 2048)
args.attention_dropout = getattr(args, "attention_dropout", args.dropout)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 4)
def base_s2st_transformer_encoder_architecture(args):
args.encoder_freezing_updates = getattr(args, "encoder_freezing_updates", 0)
# Convolutional subsampler
args.conv_kernel_sizes = getattr(args, "conv_kernel_sizes", "5,5")
args.conv_channels = getattr(args, "conv_channels", 1024)
# Transformer
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 2048)
args.encoder_layers = getattr(args, "encoder_layers", 12)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 8)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", True)
args.no_scale_embedding = getattr(args, "no_scale_embedding", False)
args.dropout = getattr(args, "dropout", 0.1)
args.attention_dropout = getattr(args, "attention_dropout", args.dropout)
args.activation_dropout = getattr(args, "activation_dropout", args.dropout)
args.activation_fn = getattr(args, "activation_fn", "relu")
args.speaker_embed_dim = getattr(args, "speaker_embed_dim", 256)
@register_model_architecture(
model_name="s2ut_transformer", arch_name="s2ut_transformer"
)
def s2ut_architecture_base(args):
base_s2st_transformer_encoder_architecture(args)
# decoder
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 8)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", True)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.decoder_layerdrop = getattr(args, "decoder_layerdrop", 0.0)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.quant_noise_pq = getattr(args, "quant_noise_pq", 0)
@register_model_architecture("s2ut_transformer", "s2ut_transformer_fisher")
def s2ut_architecture_fisher(args):
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 256)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 4)
args.dropout = getattr(args, "dropout", 0.1)
s2ut_architecture_base(args)
@register_model_architecture(
model_name="s2spect_transformer", arch_name="s2spect_transformer"
)
def s2spect_architecture_base(args):
base_s2st_transformer_encoder_architecture(args)
# decoder
args.output_frame_dim = getattr(args, "output_frame_dim", 80)
# decoder prenet
args.prenet_dropout = getattr(args, "prenet_dropout", 0.5)
args.prenet_layers = getattr(args, "prenet_layers", 2)
args.prenet_dim = getattr(args, "prenet_dim", 256)
# decoder postnet
args.postnet_dropout = getattr(args, "postnet_dropout", 0.5)
args.postnet_layers = getattr(args, "postnet_layers", 5)
args.postnet_conv_dim = getattr(args, "postnet_conv_dim", 512)
args.postnet_conv_kernel_size = getattr(args, "postnet_conv_kernel_size", 5)
# decoder transformer layers
args.decoder_transformer_layers = getattr(args, "decoder_transformer_layers", 6)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", 512)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", 4 * args.decoder_embed_dim
)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 4)
@register_model_architecture("s2spect_transformer", "s2spect_transformer_fisher")
def s2spect_architecture_fisher(args):
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 256)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 256 * 8)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 4)
args.dropout = getattr(args, "dropout", 0.1)
# decoder
args.prenet_dim = getattr(args, "prenet_dim", 32)
s2spect_architecture_base(args)
|
python
|
from .abstract_choices_factory import AbstractChoicesFactory
from .choice import Choice
from src.round import Output
from random import randrange
class PlayerVsComChoicesFactory(AbstractChoicesFactory):
def make_player1_choice():
player_choice = Choice('placeholder')
while not player_choice.is_valid():
entered_choice = input(Output.get_user_choice_header())
player_choice.set_choice(entered_choice)
return player_choice
def make_player2_choice():
valid_choices = Choice.VALID_CHOICES
number_of_valid_choices = len(valid_choices)
com_entry_index = randrange(number_of_valid_choices)
com_entry = valid_choices[com_entry_index]
com_choice = Choice(com_entry)
return com_choice
|
python
|
from apple_health.util import parse_date, parse_float
DATE_COMPONENTS = "@dateComponents"
ACTIVE_ENERGY_BURNED = "@activeEnergyBurned"
ACTIVE_ENERGY_BURNED_GOAL = "@activeEnergyBurnedGoal"
ACTIVE_ENERGY_BURNED_UNIT = "@activeEnergyBurnedUnit"
APPLE_EXERCISE_TIME = "@appleExerciseTime"
APPLE_EXERCISE_TIME_GOAL = "@appleExerciseTimeGoal"
APPLE_STAND_HOURS = "@appleStandHours"
APPLE_STAND_HOURS_GOAL = "@appleStandHoursGoal"
class ActivitySummary:
# a.k.a. The Rings
def __init__(self, **data):
self.date = parse_date(data.get(DATE_COMPONENTS))
# Red
self.active_energy_burned: float = parse_float(
data.get(ACTIVE_ENERGY_BURNED)
)
self.active_energy_burned_goal: float = parse_float(
data.get(ACTIVE_ENERGY_BURNED_GOAL)
)
self.active_energy_burned_unit: str = data.get(
ACTIVE_ENERGY_BURNED_UNIT, "kcal"
)
# Green
self.exercise_time: float = parse_float(
data.get(APPLE_EXERCISE_TIME)
)
self.exercise_time_goal: float = parse_float(
data.get(APPLE_EXERCISE_TIME_GOAL)
)
# Blue
self.stand_hours: float = parse_float(
data.get(APPLE_STAND_HOURS)
)
self.stand_hours_goal: float = parse_float(
data.get(APPLE_STAND_HOURS_GOAL)
)
@property
def active_energy_percent(self) -> float:
if not self.active_energy_burned_goal:
return 0.0
return self.active_energy_burned / self.active_energy_burned_goal
@property
def exercise_time_percent(self) -> float:
if not self.exercise_time_goal:
return 0.0
return self.exercise_time / self.exercise_time_goal
@property
def stand_hours_percent(self) -> float:
if not self.stand_hours_goal:
return 0.0
return self.stand_hours / self.stand_hours_goal
def __repr__(self) -> str:
aep = int(100 * self.active_energy_percent)
etp = int(100 * self.exercise_time_percent)
shp = int(100 * self.stand_hours_percent)
return f"{aep}% / {etp}% / {shp}%"
|
python
|
# Warning: Don't edit file (autogenerated from python -m dev codegen).
ROBOCODE_GET_LANGUAGE_SERVER_PYTHON = "robocode.getLanguageServerPython" # Get a python executable suitable to start the language server.
ROBOCODE_GET_PLUGINS_DIR = "robocode.getPluginsDir" # Get the directory for plugins.
ROBOCODE_CREATE_ACTIVITY = "robocode.createActivity" # Create a Robocode Activity Package.
ROBOCODE_LIST_ACTIVITY_TEMPLATES_INTERNAL = "robocode.listActivityTemplates.internal" # Provides a list with the available activity templates.
ROBOCODE_CREATE_ACTIVITY_INTERNAL = "robocode.createActivity.internal" # Actually calls rcc to create the activity.
ROBOCODE_UPLOAD_ACTIVITY_TO_CLOUD = "robocode.uploadActivityToCloud" # Upload activity package to the cloud.
ROBOCODE_LOCAL_LIST_ACTIVITIES_INTERNAL = "robocode.localListActivities.internal" # Lists the activities currently available in the workspace.
ROBOCODE_IS_LOGIN_NEEDED_INTERNAL = "robocode.isLoginNeeded.internal" # Checks if the user is already logged in.
ROBOCODE_CLOUD_LOGIN_INTERNAL = "robocode.cloudLogin.internal" # Logs into Robocloud.
ROBOCODE_CLOUD_LIST_WORKSPACES_INTERNAL = "robocode.cloudListWorkspaces.internal" # Lists the workspaces available for the user (in the cloud).
ROBOCODE_UPLOAD_TO_NEW_ACTIVITY_INTERNAL = "robocode.uploadToNewActivity.internal" # Uploads an activity package as a new activity package in the cloud.
ROBOCODE_UPLOAD_TO_EXISTING_ACTIVITY_INTERNAL = "robocode.uploadToExistingActivity.internal" # Uploads an activity package as an existing activity package in the cloud.
ALL_SERVER_COMMANDS = [
ROBOCODE_GET_PLUGINS_DIR,
ROBOCODE_LIST_ACTIVITY_TEMPLATES_INTERNAL,
ROBOCODE_CREATE_ACTIVITY_INTERNAL,
ROBOCODE_LOCAL_LIST_ACTIVITIES_INTERNAL,
ROBOCODE_IS_LOGIN_NEEDED_INTERNAL,
ROBOCODE_CLOUD_LOGIN_INTERNAL,
ROBOCODE_CLOUD_LIST_WORKSPACES_INTERNAL,
ROBOCODE_UPLOAD_TO_NEW_ACTIVITY_INTERNAL,
ROBOCODE_UPLOAD_TO_EXISTING_ACTIVITY_INTERNAL,
]
|
python
|
import urllib2
import threading
from bs4 import BeautifulSoup
import re
import json
import sys
import os
import django
from stock_list import getlist, getLSEList
from extract_stock_info import get_info, getLSEInfo
from extract_stock_history import get_historical_info
from extract_sector_history import get_sector_history, get_sector_dict
from extract_stock_news import get_stock_news
from extract_NT_transactions import get_NT_transactions
import time
from pymongo import MongoClient
import warnings
import exceptions
warnings.filterwarnings("ignore", category=exceptions.RuntimeWarning, module='django.db.backends.sqlite3.base', lineno=53)
if __name__ == '__main__':
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../MADjangoProject'))
if not path in sys.path:
sys.path.insert(1, path)
del path
os.environ['DJANGO_SETTINGS_MODULE'] = 'MADjangoProject.settings'
django.setup()
from market.models import Stock, StockHistory, SectorHistory
sec_dict = get_sector_dict()
print 'Fethcing Indices...'
ALL_Stocks = getLSEList(collection=Stock)
def get_share_info():
for share in ALL_Stocks:
print 'Fetching info of ' + share['name']
info = getLSEInfo(share['query'], share['symbol'],collection=Stock, sector_dict=sec_dict)
import threading
print 'Distributing Jobs ...'
threads = []
# callables = [get_nt]
callables = [get_share_info]
for f in callables:
t = threading.Thread(target=f)
t.setDaemon(True)
threads.append(t)
t.start()
for t in threads:
t.join()
|
python
|
import Tkinter as tk
class NatnetView:
def __init__(self, parent, reader):
self.parent = parent
self.reader = reader
self.setup()
def __del__(self):
self.destroy()
def destroy(self):
self.frame.grid_forget()
def setup(self):
# container
self.frame = tk.Frame(self.parent, padx=10, pady=10)
# form elements
self.host_label = tk.Label(self.frame, text="Natnet Host IP")
self.host_entry = tk.Entry(self.frame, width=25)
self.multicast_label = tk.Label(self.frame, text="Multicast IP")
self.multicast_entry = tk.Entry(self.frame, width=25)
self.port_label = tk.Label(self.frame, text="NatNet Port")
self.port_entry = tk.Entry(self.frame, width=5)
# status element
self.connection_label = tk.Label(self.frame, text='')
self.error_label = tk.Label(self.frame, text='')
# buttons
self.connect_button = tk.Button(self.frame, text='(re-)connect', command=self.onConnectButton)
self.disconnect_button = tk.Button(self.frame, text='disconnect', command=self.onDisconnectButton)
# grid/positions
self.frame.grid()
# self.file_label.grid(column=0, row=0, columnspan=3)
# self.time_label.grid(column=1, row=1)
# self.load_button.grid(column=0, row=2)
# self.startstop_button.grid(column=1, row=2)
# self.quitButton.grid(column=2, row=2)
self.host_label.grid(column=0, row=0, sticky=tk.E)
self.host_entry.grid(column=1, row=0, sticky=tk.W)
self.multicast_label.grid(column=0, row=1, sticky=tk.E)
self.multicast_entry.grid(column=1, row=1, sticky=tk.W)
self.port_label.grid(column=0, row=2, sticky=tk.E)
self.port_entry.grid(column=1, row=2, sticky=tk.W)
self.connection_label.grid(column=0, row=3, columnspan=3, padx=10, pady=10)
self.error_label.grid(column=0, row=4, columnspan=3, padx=10, pady=10)
self.connect_button.grid(column=0, row=5, sticky=tk.E)
self.disconnect_button.grid(column=1, row=5, sticky=tk.W)
# initialize
self.host_entry.insert(0, self.reader.host)
if self.reader.multicast:
self.multicast_entry.insert(0, str(self.reader.multicast))
self.port_entry.insert(0, str(self.reader.port))
self.reader.connectEvent += self.updateConnectionStatus
self.reader.connectionLostEvent += self.updateConnectionStatus
self.reader.connectionStatusUpdateEvent += self.updateConnectionStatus
self.updateConnectionStatus(self.reader)
def onConnectButton(self):
self.reader.stop()
multicast = self.multicast_entry.get()
if multicast == '':
multicast = None
self.reader.configure(host=self.host_entry.get(), port=self.port_entry.get(), multicast=multicast)
self.reader.start()
def onDisconnectButton(self):
self.reader.stop()
def updateConnectionStatus(self, reader):
if reader.connected == False:
self.connection_label.config(text="Disconnected")
self.error_label.config(text='')
return
self.connection_label.config(text=self.connectionInfo(reader))
err = reader.connection_error if reader.connection_error else ''
self.error_label.config(text=err)
def connectionInfo(self, reader):
if reader.multicast:
return 'Connected to '+str(reader.host)+'@'+str(reader.port)+' ('+reader.multicast+')'
return 'Connected to '+str(reader.host)+'@'+str(reader.port)
|
python
|
import gmpy2
from gmpy2 import (
mpz,
powmod,
mul,
invert,
)
B = 2 ** 20
p = mpz('13407807929942597099574024998205846127479365820592393377723561443721764030073546976801874298166903427690031858186486050853753882811946569946433649006084171')
g = mpz('11717829880366207009516117596335367088558084999998952205599979459063929499736583746670572176471460312928594829675428279466566527115212748467589894601965568')
h = mpz('3239475104050450443565264378728065788649097520952449527834792452971981976143292558073856937958553180532878928001494706097394108577585732452307673444020333')
hash_table = dict()
for x_1 in range(B + 1):
key = mul(h, invert(powmod(g, x_1, p), p)) % p
value = x_1
hash_table[key] = value
base = powmod(g, B, p)
for x_0 in range(B + 1):
target = powmod(base, x_0, p)
if target in hash_table:
break
x = x_0 * B + hash_table[target]
print('x: {}'.format(x))
|
python
|
#单一状态
class Borg:
__shared_state = {"1":"2"}
def __init__(self):
self.x = 1
self.__dict__ = self.__shared_state
b = Borg()
b1 = Borg()
b.x = 4
print("Borg Object b:",b)
print("Borg Object b1:",b1)
print("Object state b:",b.__dict__)
print("Object state b1:",b1.__dict__)
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 25 06:10:44 2018
@author: Kazuki
"""
import numpy as np
import pandas as pd
from tqdm import tqdm
import gc, os
from collections import defaultdict
import sys
sys.path.append(f'/home/{os.environ.get("USER")}/PythonLibrary')
#import lgbextension as ex
import lightgbm as lgb
from multiprocessing import cpu_count
from glob import glob
import utils, utils_cat
utils.start(__file__)
#==============================================================================
SEED = 71
LOOP = 3
NROUND = 4680
FEATURE_SIZE = 700
SUBMIT_FILE_PATH = '../output/725-1.csv.gz'
COMMENT = f'CV auc-mean(7 fold): 0.804265 + 0.00358 round: {NROUND} all+nejumi'
EXE_SUBMIT = True
param = {
'objective': 'binary',
'metric': 'auc',
'learning_rate': 0.01,
'max_depth': 6,
'num_leaves': 63,
'max_bin': 255,
'min_child_weight': 10,
'min_data_in_leaf': 150,
'reg_lambda': 0.5, # L2 regularization term on weights.
'reg_alpha': 0.5, # L1 regularization term on weights.
'colsample_bytree': 0.9,
'subsample': 0.9,
# 'nthread': 32,
'nthread': cpu_count(),
'bagging_freq': 1,
'verbose':-1,
# 'seed': SEED
}
np.random.seed(SEED)
# =============================================================================
# load train
# =============================================================================
imp = pd.read_csv('LOG/imp_801_imp_lgb_onlyMe.py-2.csv')
imp['split'] /= imp['split'].max()
imp['gain'] /= imp['gain'].max()
imp['total'] = imp['split'] + imp['gain']
imp.sort_values('total', ascending=False, inplace=True)
files = ('../feature/train_' + imp.head(FEATURE_SIZE).feature + '.f').tolist()
#files = utils.get_use_files(files, True)
X = pd.concat([
pd.read_feather(f) for f in tqdm(files, mininterval=60)
], axis=1)
y = utils.read_pickles('../data/label').TARGET
if X.columns.duplicated().sum()>0:
raise Exception(f'duplicated!: { X.columns[X.columns.duplicated()] }')
print('no dup :) ')
print(f'X.shape {X.shape}')
gc.collect()
CAT = list( set(X.columns)&set(utils_cat.ALL))
print(f'category: {CAT}')
keys = sorted([c.split('_')[0] for c in X.columns])
di = defaultdict(int)
for k in keys:
di[k] += 1
for k,v in di.items():
print(f'{k}: {v}')
dtrain = lgb.Dataset(X, y,
categorical_feature=CAT)
COL = X.columns.tolist()
X.head().to_csv(SUBMIT_FILE_PATH.replace('.csv', '_X.csv'),
index=False, compression='gzip')
del X, y; gc.collect()
# =============================================================================
# training
# =============================================================================
models = []
for i in range(LOOP):
print(f'LOOP: {i}')
gc.collect()
param.update({'seed':np.random.randint(9999)})
model = lgb.train(param, dtrain, NROUND,
categorical_feature=CAT)
# model.save_model(f'lgb{i}.model')
models.append(model)
del dtrain; gc.collect()
"""
models = []
for i in range(LOOP):
bst = lgb.Booster(model_file=f'lgb{i}.model')
models.append(bst)
imp = ex.getImp(models)
"""
# =============================================================================
# test
# =============================================================================
files = ('../feature/test_' + imp.head(FEATURE_SIZE).feature + '.f').tolist()
dtest = pd.concat([
pd.read_feather(f) for f in tqdm(files, mininterval=60)
], axis=1)[COL]
sub = pd.read_pickle('../data/sub.p')
gc.collect()
label_name = 'TARGET'
sub[label_name] = 0
for model in models:
y_pred = model.predict(dtest)
sub[label_name] += pd.Series(y_pred).rank()
sub[label_name] /= LOOP
sub[label_name] /= sub[label_name].max()
sub['SK_ID_CURR'] = sub['SK_ID_CURR'].map(int)
sub.to_csv(SUBMIT_FILE_PATH, index=False, compression='gzip')
# =============================================================================
# submission
# =============================================================================
if EXE_SUBMIT:
print('submit')
utils.submit(SUBMIT_FILE_PATH, COMMENT)
#==============================================================================
utils.end(__file__)
|
python
|
# flake8: noqa
'''
All step-related classes and factories
'''
from .base_steps import (
BaseStep,
BaseStepFactory,
BaseValidation,
)
from .steps import TestStep
from .outputs import OutputValueStep
from .steps_aggregator import StepsAggregator
from .validations import (
Validation,
XPathValidation,
URLValidation
)
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import math
# Respect seigot
class Waypoint:
way_point = 0
def __init__(self, path):
self.points = []
# self.way_point = 0
with open(path) as f:
lines = csv.reader(f)
for l in lines:
point = [float(i) for i in l]
# print(point)
self.points.append(point[0:3])
def get_next_waypoint(self):
Waypoint.way_point = Waypoint.way_point + 1
# 出来れば2週目からは,相手に奪われているところを狙いたい.
if Waypoint.way_point == len(self.points):
Waypoint.way_point = 0
print('Next Lap')
return self.points[Waypoint.way_point][0:3]
def get_current_waypoint(self):
return self.points[Waypoint.way_point][0:3]
# 敵が近くにいると判断できたときだけ,以下の行動を行う
# 事前設定のルートへの復帰どうする??
# 適当に座標が一番近いところへ行く??
def get_enemy_waypoints(self):
pass
|
python
|
import typing
from ariadne import SchemaDirectiveVisitor
from ariadne.types import GraphQLResolveInfo
from graphql import default_field_resolver
from pytimeparse import parse as parse_duration
from .utils.rate_limit import RateLimit, TooManyRequests
class DateDirective(SchemaDirectiveVisitor):
def visit_field_definition(self, field, object_type):
date_format = self.args.get("format")
original_resolver = field.resolve or default_field_resolver
def resolve_formatted_date(obj, info, **kwargs):
result = original_resolver(obj, info, **kwargs)
if result is None:
return None
if date_format:
return result.strftime(date_format)
return result.isoformat()
field.resolve = resolve_formatted_date
return field
class AuthDirective(SchemaDirectiveVisitor):
def visit_field_definition(self, field, object_type):
original_resolver = field.resolve or default_field_resolver
def resolve_formatted_date(obj, info, **kwargs):
if not info.context["request"].scope["user"].is_authenticated:
raise Exception("unauthenticated user")
result = original_resolver(obj, info, **kwargs)
return result
field.resolve = resolve_formatted_date
return field
class PermissionsDirective(SchemaDirectiveVisitor):
def visit_field_definition(self, field, object_type):
original_resolver = field.resolve or default_field_resolver
def resolve_formatted_date(obj, info, **kwargs):
if not info.context["request"].scope["user"].is_authenticated:
raise Exception("unauthenticated user")
result = original_resolver(obj, info, **kwargs)
return result
field.resolve = resolve_formatted_date
def visit_object(self, object_type):
return object_type
class RateLimitDirective(SchemaDirectiveVisitor):
def visit_field_definition(self, field, object_type):
max_ = self.args.get("max", 10)
window = parse_duration(self.args.get("window", "10m"))
message = self.args.get("message", "You are doing that too often.")
original_resolver = field.resolve or default_field_resolver
def resolve_rate_limited(obj: typing.Any, info: GraphQLResolveInfo, **kwargs):
if info.context["request"]["user"].is_authenticated:
client = info.context["request"].scope["user"].id
else:
ip_address, port = info.context["request"]["client"]
client = ip_address
try:
with RateLimit(
resource=info.field_name,
client=client,
max_requests=max_,
expire=window,
):
result = original_resolver(obj, info, **kwargs)
return result
except TooManyRequests:
raise TooManyRequests(message)
field.resolve = resolve_rate_limited
return field
directives = {
"date": DateDirective,
"auth": AuthDirective,
"permissions": PermissionsDirective,
"rateLimit": RateLimitDirective,
}
|
python
|
import random
import math
import operator
from collections import Counter, defaultdict
import twokenize
import peewee
from models import database, SMS, Contact
class NaiveBayes(object):
def __init__(self):
self.doccounts = Counter()
self.classcounts = Counter()
self.wordcounts = defaultdict(lambda: Counter())
self.vocab = set()
self.priors = {}
self._condprobs = defaultdict(lambda: dict())
def calculate_probs(self):
for c in self.doccounts:
self.priors[c] = (1.0 * self.doccounts[c]) / \
sum(self.doccounts.values())
def get_condprob(self, word, class_):
if not self._condprobs[word].get(class_):
num = self.wordcounts[class_].get(word, 0) + 1.0
denom = len(self.vocab) + 1.0 + \
sum(self.wordcounts[class_].values())
self._condprobs[word][class_] = num / denom
return self._condprobs[word][class_]
def classify(self, words):
if not self.priors:
self.calculate_probs()
score = {}
for c in self.priors:
score[c] = math.log(self.priors[c])
for w in words:
score[c] += math.log(self.get_condprob(w, c))
return max(score.iteritems(), key=operator.itemgetter(1))[0]
def add_example(self, klass, words):
self.doccounts[klass] += 1
self.vocab.update(words)
self.classcounts[klass] += len(words)
self.wordcounts[klass].update(words)
def split_set(s, SIZE):
a = set(random.sample(s, int(SIZE * len(s))))
b = s - a
return a, b
def split_me_not_me(TRAIN_SIZE=0.9):
train, test = {}, {}
not_me = SMS.select().where(from_me=False)
me = SMS.select().where(from_me=True)
not_me = set(not_me)
me = set(me)
train['me'], test['me'] = split_set(me, TRAIN_SIZE)
train['not_me'], test['not_me'] = split_set(not_me, TRAIN_SIZE)
return train, test
def recipient_is(name, TRAIN=0.9):
#: TRAIN = percent of the data to have in training set
train = {}
test = {}
person = Contact.get(name=name)
recipient = set(SMS.select().where(contact=person).where(from_me=False))
not_recipient = set(SMS.select().where(contact__ne=person)
.where(from_me=False))
train[person.name], test[person.name] = split_set(recipient, TRAIN)
train['not_' + person.name], test['not_' + person.name] = \
split_set(not_recipient, TRAIN)
return train, test
def people_with_many_texts(n, TRAIN=0.9):
# TRAIN = percent of data to have in training set
contacts = peewee.RawQuery(Contact, '''SELECT * from sms, contact
where from_me=0 and contact.id=contact_id GROUP BY contact_id
HAVING count(*) >= ?;''', n)
data = {}
for c in contacts:
data[c.name] = set(SMS.select().where(contact=c))
train = {}
test = {}
for c in data:
train[c], test[c] = split_set(data[c], TRAIN)
print 'There are %d people with >= %d texts.' % (len(data), n)
return train, test
def tokenize(words):
return twokenize.tokenize(words)
def build_classifier(train):
n = NaiveBayes()
for klass in train:
for sms in train[klass]:
n.add_example(klass, tokenize(sms.text))
n.calculate_probs()
# print 'PRIORS ARE', n.priors
print 'EXPECTED ACCURACY:', max(n.priors.values())
return n
def run_test(classifier, test):
correct = 0
incorrect = 0
for klass in test:
for sms in test[klass]:
classification = classifier.classify(tokenize(sms.text))
if classification == klass:
correct += 1
else:
incorrect += 1
accuracy = correct / float(correct + incorrect)
print 'Classified %d correctly and %d incorrectly for an accuracy of %f.' \
% (correct, incorrect, accuracy)
return accuracy
def run_naive_bayes(train, test):
classifier = build_classifier(train)
run_test(classifier, test)
def interactive(classifier):
try:
while True:
print 'CLASSIFY YOUR MESSAGE:'
text = raw_input('enter a text: ')
print 'result:', classifier.classify(tokenize(text))
print
except KeyboardInterrupt:
database.close()
if __name__ == '__main__':
database.connect()
train, test = split_me_not_me(0.9)
print 'ME AND NOT ME:'
run_naive_bayes(train, test)
threshold = 200
print
print 'PEOPLE WITH OVER %d TEXTS:' % threshold
run_naive_bayes(*people_with_many_texts(threshold))
print
# train, test = split_me_not_me(1.0)
# train, test = people_with_many_texts(threshold)
# classifier = build_classifier(train)
# interactive(classifier)
database.close()
|
python
|
from LucidDynamodb import DynamoDb
from LucidDynamodb.exceptions import (
TableNotFound
)
import logging
logging.basicConfig(level=logging.INFO)
if __name__ == "__main__":
try:
db = DynamoDb()
db.delete_table(table_name='dev_jobs')
logging.info("Table deleted successfully")
table_names = db.read_all_table_names()
logging.info(f"Table names: {table_names}")
except TableNotFound as e:
logging.error(f"Table delete operation failed {e}")
"""
dineshsonachalam@macbook examples % python 14-delete-a-table.py
INFO:botocore.credentials:Found credentials in environment variables.
INFO:root:Table deleted successfully
INFO:root:Table names: ['CertMagic', 'dev_test', 'kp-config-v1', 'test-1']
"""
|
python
|
from .queries import *
|
python
|
import Transformation
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
def calc_cov_ellipse(a, b, d):
s = np.array([[a, b], [b, d]])
(w, v) = np.linalg.eig(s)
angle = np.degrees(np.arctan2(v[1, 0], v[0, 0]))
return 2*np.sqrt(w[0]), 2*np.sqrt(w[1]), angle
class SubPlot:
def __init__(self, range, offset, rowspan=1, colspan=1):
self.range = range
self.offset = offset
self.rowspan = rowspan
self.colspan = colspan
def plot2grid(self, pos):
plt.subplot2grid(self.range,
[self.offset[0] + pos[0], self.offset[1] + pos[1]],
rowspan=self.rowspan, colspan=self.colspan)
def _plot_covariance(sub_plot, t_mu_cvec, t_cov, ts_cvec):
std = np.sqrt(np.diag(t_cov))
if ts_cvec is not None:
tnp_dev = ts_cvec - t_mu_cvec
if ts_cvec is None:
max_abs = 2.5 * std
else:
max_abs = np.max(np.abs(tnp_dev), axis=1)
for irow in range(6):
for icol in range(6):
sub_plot.plot2grid([irow, icol])
plt.xlim([-max_abs[icol], max_abs[icol]])
plt.ylim([-max_abs[irow], max_abs[irow]])
if ts_cvec is not None:
plt.plot(tnp_dev[icol, :], tnp_dev[irow, :], '.k')
ax = plt.gca()
plt.setp(ax.get_xticklabels(), visible=(irow == 5))
plt.setp(ax.get_yticklabels(), visible=(icol == 0))
# plot covariance ellipse
if icol != irow:
plt.plot(std[icol] * 2, 0., '.r')
plt.plot(0., std[irow] * 2, '.r')
width, height, angle = calc_cov_ellipse(t_cov[icol, icol], t_cov[irow, icol], t_cov[irow, irow])
ellipse = mpl.patches.Ellipse(xy=[0., 0.], width=width * 2, height=height * 2, angle=angle)
ax = plt.gca()
ax.add_artist(ellipse)
def plot_transformation_covariance(title, corners_f_image, t_mu_cvec, t_cov, ts_cvec):
fig = plt.figure()
fig.suptitle(title)
# The image representation
plt.subplot2grid([6, 8], [2, 0], rowspan=2, colspan=2)
plt.xlim([0, 920])
plt.ylim([700, 0])
# show the corners in the image
for i in range(4):
plt.plot(corners_f_image[i*2], corners_f_image[i*2+1], '.r')
# The covariances.
tnp = ts_cvec
means = t_mu_cvec
tnp_dev = tnp - means
std = np.sqrt(np.diag(t_cov))
np_std = np.std(tnp, axis=1)
maxs = np.max(tnp, axis=1)
mins = np.min(tnp, axis=1)
max_abs = np.max(np.abs(tnp_dev), axis=1)
means_label = ("t_world_xxx mu, std:\nroll {0[0]}, {1[0]}\npitch {0[1]}, {1[1]}\nyaw {0[2]}, {1[2]}\n" + \
"x {0[3]}, {1[3]}\ny {0[4]}, {1[4]}\nz {0[5]}, {1[5]}").format(means, std)
plt.subplot2grid([6, 8], [0, 0])
plt.axis([0, 1, 0, 1])
plt.text(0, 0.75, means_label, verticalalignment='top')
ax = plt.gca()
ax.set_axis_off()
_plot_covariance(SubPlot([6, 8], [0, 2]), t_mu_cvec, t_cov, ts_cvec)
# for irow in range(6):
# for icol in range(6):
# plt.subplot2grid([6, 8], [irow, icol+2])
# if True:
# plt.xlim([-max_abs[icol], max_abs[icol]])
# plt.ylim([-max_abs[irow], max_abs[irow]])
# plt.plot(tnp_dev[icol, :], tnp_dev[irow, :], '.k')
# else:
# plt.xlim([mins[icol], maxs[icol]])
# plt.ylim([mins[irow], maxs[irow]])
# plt.plot(tnp[icol, :], tnp[irow, :], '.k')
#
# ax = plt.gca()
# plt.setp(ax.get_xticklabels(), visible=(irow == 5))
# plt.setp(ax.get_yticklabels(), visible=(icol == 0))
#
# plt.plot(std[icol], 0., '.r')
# plt.plot(0., std[irow], '.r')
#
# # plot covariance ellipse
# if icol != irow:
# ax = plt.gca()
# width = 2. * std[icol]
# height = 2. * std[irow]
# width, height, angle = calc_cov_ellipse(t_cov[icol, icol], t_cov[irow, icol], t_cov[irow, irow])
# ellipse = mpl.patches.Ellipse(xy=[0., 0.], width=width, height=height, angle=angle)
# ax.add_artist(ellipse)
plt.show()
def _plot_view(sub_plot, corners_f_images):
sub_plot.plot2grid([0, 0])
plt.xlim([0, 920])
plt.ylim([700, 0])
# show the corners in the image
for i in range(len(corners_f_images)):
for r in range(4):
plt.plot(corners_f_images[i][r*2], corners_f_images[i][r*2+1], '.r')
def _plot_std_values(sub_plot, cov):
sub_plot.plot2grid([0, 0])
std = np.sqrt(np.diag(cov))
cov_label = ("std:\nroll {:5f}\npitch {:5f}\nyaw {:5f}\n" +
"x {:5f}\ny {:5f}\nz {:5f}").format(std[0], std[1], std[2],
std[3], std[4], std[5])
plt.axis([0, 1, 0, 1])
plt.text(0, 0.75, cov_label, verticalalignment='top')
ax = plt.gca()
ax.set_axis_off()
def plot_view_and_covariance(title, corners_f_images, com, do_show=True):
fig = plt.figure()
fig.suptitle(title)
_plot_view(SubPlot([6, 8], [2, 0], rowspan=2, colspan=2), corners_f_images)
_plot_std_values(SubPlot([6, 8], [0, 0]), com.cov)
_plot_covariance(SubPlot([6, 8], [0, 2]), com.mu, com.cov, com.samples)
if do_show:
plt.show(fig)
|
python
|
from django.db import models
from django.utils.text import gettext_lazy as _
from common.models import CommonData, ErrorMessages
from jobs.models import JobOffer, Profile
class Comment(CommonData):
model_name = 'Comment'
profile: Profile = models.ForeignKey(
to=Profile,
on_delete=models.PROTECT,
error_messages=ErrorMessages.get_field(
model=model_name, field='profile_id')
)
job_offer: JobOffer = models.ForeignKey(
to=JobOffer,
on_delete=models.PROTECT,
error_messages=ErrorMessages.get_field(
model=model_name, field='job_offer_id')
)
description: str = models.TextField(
verbose_name=_('Description'),
error_messages=ErrorMessages.get_field(
model=model_name, field='description')
)
def __str__(self):
name: str = self.description[:30]
return f'{self.profile} - {self.job_offer} : {name}'
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-#
# MIT License
#
# Copyright (c) 2019 Pim Witlox
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
from datetime import timedelta, datetime
from random import shuffle
from dateutil import parser
class Scheduler(object):
"""
Simple Scheduler Mechanism
"""
logger = logging.getLogger(__name__)
def __init__(self, storage, staleness):
"""
Our simplistic CronJob scheduler
:param storage: storage class
:param staleness: amount of seconds of non-communication to declare a node as stale
"""
self.storage = storage
self.staleness = staleness
def active_nodes(self):
for node in self.storage.cluster_state():
if datetime.utcnow() - parser.parse(node.time) < timedelta(seconds=self.staleness):
yield node
else:
node.state = 'disconnected'
yield node
def check_cluster_state(self):
"""
check cluster state
:return False if invalid otherwise True
"""
left = list(self.storage.cluster_state())
right = list(self.active_nodes())
inactive_nodes = [i for i in left + right if i not in left or i not in right]
for job in self.storage.cluster_jobs:
if not job.assigned_to:
self.logger.info("detected unassigned job ({0})".format(job.command))
self.re_balance()
return False
if job.assigned_to in inactive_nodes:
self.logger.warning("detected job ({0}) on inactive node".format(job.command))
self.re_balance()
return False
return True
def re_balance(self):
"""
Redistribute CronJobs over the cluster
"""
def partition(lst, keys):
"""
divide a list over a given set of keys
:param lst: list to split in roughly equals chunks
:param keys: keys for the chunks
:return: dictionary of keys with list chunks
"""
shuffle(lst)
return {keys[i]: lst[i::len(keys)] for i in range(len(keys))}
def first_key_by_value(dct, jb):
"""
find the first key in a dictionary where jb is in the values
:param dct: dictionary to analyse
:param jb: value to search for
:return: key or None
"""
for n, jbs in dct.items():
if jb in jbs:
return n
return None
nodes = [n for n in self.active_nodes()]
jobs = list(self.storage.cluster_jobs)
partitions = partition(jobs, nodes)
for job in jobs:
node = first_key_by_value(partitions, job)
if not node:
self.logger.error("could not find node assignment for job {0}".format(job))
else:
self.logger.info("assigning job {0} to node {1}".format(job, node.ip))
job.assigned_to = node.ip
self.storage.cluster_jobs = jobs
|
python
|
from hello.hello import print_hello
from world.world import print_world
def main():
print_hello()
print_world()
return
if __name__ == '__main__':
main()
|
python
|
from django.contrib.sites.models import Site
from .settings import CMS_TEMPLATES
from django.contrib.auth.models import User
# Default page settings Not used for installation
# Content for adding a page
# Still under development
title = 'Django CMS setup'
description = 'Open Source programming at its best'
template = CMS_TEMPLATES[0][0]
language = 'en'
menu_title = title
slug = title.lower().replace(" ", "-")
meta_description = description
created_by = User.objects.get(id=1).get_full_name()
in_navigation = True
published = True
site = Site.objects.get(id=1)
xframe_options = 3
page_title = title
image_path = 'site_server/static/site-images/'
# Content can be added here for initial setup
blogs = {
'Blog1': {
'title':'Manage',
'subtitle': 'This content was added with the setup_content_server.py script',
'abstract': '<section class="features-section-8 relative background-light"><div class="container"><div class="row section-separator"><div class="col-md-12"><p class="mb-4"><img alt="" class="img-fluid" src="/static/images/background-1.jpg" width="96%" /></p><p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Reiciendis, eius mollitia suscipit, quisquam doloremque distinctio perferendis et doloribus unde architecto optio laboriosam porro adipisci sapiente officiis nemo accusamus ad praesentium? Esse minima nisi et. Dolore perferendis, enim praesentium omnis, iste doloremque quia officia optio deserunt molestiae voluptates soluta architecto tempora.</p><p>Molestiae cupiditate inventore animi, maxime sapiente optio, illo est nemo veritatis repellat sunt doloribus nesciunt! Minima laborum magni reiciendis qui voluptate quisquam voluptatem soluta illo eum ullam incidunt rem assumenda eveniet eaque sequi deleniti tenetur dolore amet fugit perspiciatis ipsa, odit. Nesciunt dolor minima esse vero ut ea, repudiandae suscipit!</p><h2 class="mb-3 mt-5">Molestiae cupiditate inventore animi, maxime sapiente optio</h2><p>Temporibus ad error suscipit exercitationem hic molestiae totam obcaecati rerum, eius aut, in. Exercitationem atque quidem tempora maiores ex architecto voluptatum aut officia doloremque. Error dolore voluptas, omnis molestias odio dignissimos culpa ex earum nisi consequatur quos odit quasi repellat qui officiis reiciendis incidunt hic non? Debitis commodi aut, adipisci.</p><p>Quisquam esse aliquam fuga distinctio, quidem delectus veritatis reiciendis. Nihil explicabo quod, est eos ipsum. Unde aut non tenetur tempore, nisi culpa voluptate maiores officiis quis vel ab consectetur suscipit veritatis nulla quos quia aspernatur perferendis, libero sint. Error, velit, porro. Deserunt minus, quibusdam iste enim veniam, modi rem maiores.</p><p>Odit voluptatibus, eveniet vel nihil cum ullam dolores laborum, quo velit commodi rerum eum quidem pariatur! Quia fuga iste tenetur, ipsa vel nisi in dolorum consequatur, veritatis porro explicabo soluta commodi libero voluptatem similique id quidem? Blanditiis voluptates aperiam non magni. Reprehenderit nobis odit inventore, quia laboriosam harum excepturi ea.</p><p>Adipisci vero culpa, eius nobis soluta. Dolore, maxime ullam ipsam quidem, dolor distinctio similique asperiores voluptas enim, exercitationem ratione aut adipisci modi quod quibusdam iusto, voluptates beatae iure nemo itaque laborum. Consequuntur et pariatur totam fuga eligendi vero dolorum provident. Voluptatibus, veritatis. Beatae numquam nam ab voluptatibus culpa, tenetur recusandae!</p><p>Voluptas dolores dignissimos dolorum temporibus, autem aliquam ducimus at officia adipisci quasi nemo a perspiciatis provident magni laboriosam repudiandae iure iusto commodi debitis est blanditiis alias laborum sint dolore. Dolores, iure, reprehenderit. Error provident, pariatur cupiditate soluta doloremque aut ratione. Harum voluptates mollitia illo minus praesentium, rerum ipsa debitis, inventore?</p><div class="tag-widget post-tag-container mb-5 mt-5"> </div></div></div></div></section>',
'image':'one.jpg',
'image_path': image_path,
},
'Blog2': {
'title': 'Django CMS',
'subtitle': 'Now point and click and edit me',
'abstract': '<section class="features-section-8 relative background-light"><div class="container"><div class="row section-separator"><div class="col-md-12"><p class="mb-4"><img alt="" class="img-fluid" src="/static/images/background-2.jpg" width="96%" /></p><p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Reiciendis, eius mollitia suscipit, quisquam doloremque distinctio perferendis et doloribus unde architecto optio laboriosam porro adipisci sapiente officiis nemo accusamus ad praesentium? Esse minima nisi et. Dolore perferendis, enim praesentium omnis, iste doloremque quia officia optio deserunt molestiae voluptates soluta architecto tempora.</p><p>Molestiae cupiditate inventore animi, maxime sapiente optio, illo est nemo veritatis repellat sunt doloribus nesciunt! Minima laborum magni reiciendis qui voluptate quisquam voluptatem soluta illo eum ullam incidunt rem assumenda eveniet eaque sequi deleniti tenetur dolore amet fugit perspiciatis ipsa, odit. Nesciunt dolor minima esse vero ut ea, repudiandae suscipit!</p><h2 class="mb-3 mt-5">Molestiae cupiditate inventore animi, maxime sapiente optio</h2><p>Temporibus ad error suscipit exercitationem hic molestiae totam obcaecati rerum, eius aut, in. Exercitationem atque quidem tempora maiores ex architecto voluptatum aut officia doloremque. Error dolore voluptas, omnis molestias odio dignissimos culpa ex earum nisi consequatur quos odit quasi repellat qui officiis reiciendis incidunt hic non? Debitis commodi aut, adipisci.</p><p>Quisquam esse aliquam fuga distinctio, quidem delectus veritatis reiciendis. Nihil explicabo quod, est eos ipsum. Unde aut non tenetur tempore, nisi culpa voluptate maiores officiis quis vel ab consectetur suscipit veritatis nulla quos quia aspernatur perferendis, libero sint. Error, velit, porro. Deserunt minus, quibusdam iste enim veniam, modi rem maiores.</p><p>Odit voluptatibus, eveniet vel nihil cum ullam dolores laborum, quo velit commodi rerum eum quidem pariatur! Quia fuga iste tenetur, ipsa vel nisi in dolorum consequatur, veritatis porro explicabo soluta commodi libero voluptatem similique id quidem? Blanditiis voluptates aperiam non magni. Reprehenderit nobis odit inventore, quia laboriosam harum excepturi ea.</p><p>Adipisci vero culpa, eius nobis soluta. Dolore, maxime ullam ipsam quidem, dolor distinctio similique asperiores voluptas enim, exercitationem ratione aut adipisci modi quod quibusdam iusto, voluptates beatae iure nemo itaque laborum. Consequuntur et pariatur totam fuga eligendi vero dolorum provident. Voluptatibus, veritatis. Beatae numquam nam ab voluptatibus culpa, tenetur recusandae!</p><p>Voluptas dolores dignissimos dolorum temporibus, autem aliquam ducimus at officia adipisci quasi nemo a perspiciatis provident magni laboriosam repudiandae iure iusto commodi debitis est blanditiis alias laborum sint dolore. Dolores, iure, reprehenderit. Error provident, pariatur cupiditate soluta doloremque aut ratione. Harum voluptates mollitia illo minus praesentium, rerum ipsa debitis, inventore?</p><div class="tag-widget post-tag-container mb-5 mt-5"> </div></div></div></div></section>',
'image': 'two.jpg',
'image_path': image_path,
},
'Blog3': {
'title': 'Django Blog',
'subtitle': 'All content is a blog post and comments can be enabled',
'abstract': '<section class="features-section-8 relative background-light"><div class="container"><div class="row section-separator"><div class="col-md-12"><p class="mb-4"><img alt="" class="img-fluid" src="/static/images/background-4.jpg" width="96%" /></p><p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Reiciendis, eius mollitia suscipit, quisquam doloremque distinctio perferendis et doloribus unde architecto optio laboriosam porro adipisci sapiente officiis nemo accusamus ad praesentium? Esse minima nisi et. Dolore perferendis, enim praesentium omnis, iste doloremque quia officia optio deserunt molestiae voluptates soluta architecto tempora.</p><p>Molestiae cupiditate inventore animi, maxime sapiente optio, illo est nemo veritatis repellat sunt doloribus nesciunt! Minima laborum magni reiciendis qui voluptate quisquam voluptatem soluta illo eum ullam incidunt rem assumenda eveniet eaque sequi deleniti tenetur dolore amet fugit perspiciatis ipsa, odit. Nesciunt dolor minima esse vero ut ea, repudiandae suscipit!</p><h2 class="mb-3 mt-5">Molestiae cupiditate inventore animi, maxime sapiente optio</h2><p>Temporibus ad error suscipit exercitationem hic molestiae totam obcaecati rerum, eius aut, in. Exercitationem atque quidem tempora maiores ex architecto voluptatum aut officia doloremque. Error dolore voluptas, omnis molestias odio dignissimos culpa ex earum nisi consequatur quos odit quasi repellat qui officiis reiciendis incidunt hic non? Debitis commodi aut, adipisci.</p><p>Quisquam esse aliquam fuga distinctio, quidem delectus veritatis reiciendis. Nihil explicabo quod, est eos ipsum. Unde aut non tenetur tempore, nisi culpa voluptate maiores officiis quis vel ab consectetur suscipit veritatis nulla quos quia aspernatur perferendis, libero sint. Error, velit, porro. Deserunt minus, quibusdam iste enim veniam, modi rem maiores.</p><p>Odit voluptatibus, eveniet vel nihil cum ullam dolores laborum, quo velit commodi rerum eum quidem pariatur! Quia fuga iste tenetur, ipsa vel nisi in dolorum consequatur, veritatis porro explicabo soluta commodi libero voluptatem similique id quidem? Blanditiis voluptates aperiam non magni. Reprehenderit nobis odit inventore, quia laboriosam harum excepturi ea.</p><p>Adipisci vero culpa, eius nobis soluta. Dolore, maxime ullam ipsam quidem, dolor distinctio similique asperiores voluptas enim, exercitationem ratione aut adipisci modi quod quibusdam iusto, voluptates beatae iure nemo itaque laborum. Consequuntur et pariatur totam fuga eligendi vero dolorum provident. Voluptatibus, veritatis. Beatae numquam nam ab voluptatibus culpa, tenetur recusandae!</p><p>Voluptas dolores dignissimos dolorum temporibus, autem aliquam ducimus at officia adipisci quasi nemo a perspiciatis provident magni laboriosam repudiandae iure iusto commodi debitis est blanditiis alias laborum sint dolore. Dolores, iure, reprehenderit. Error provident, pariatur cupiditate soluta doloremque aut ratione. Harum voluptates mollitia illo minus praesentium, rerum ipsa debitis, inventore?</p><div class="tag-widget post-tag-container mb-5 mt-5"> </div></div></div></div></section>',
'image': 'three.jpg',
'image_path': image_path,
},
}
# Usefull when adding images from the front end
image_sizes = {
'Small': {
'width': '400',
'height': '300',
},
'Medium': {
'width': '800',
'height': '600',
},
'Large': {
'width': '1024',
'height': '768',
}
}
developer = {
'first_name': 'Jody',
'last_name': 'Beggs',
}
AllowedSearchDomains = {
'nationalgeographic.com': {
'class_names': '',
'id_names': 'article__body'
},
'en.wikipedia.org': {
'class_names': '',
'id_names': 'mw-content-text'
},
'spaceplace.nasa.gov': {
'class_names': '',
'id_names': 'bodyContent'
},
'www.britannica.com': {
'class_names': '',
'id_names': 'ref1'
},
'www.space.com': {
'class_names': 'content-wrapper',
'id_names': ''
},
'www.sciencealert.com': {
'class_names': 'responsive-articlepage',
'id_names': ''
},
'spacecenter.org': {
'class_names': 'single-post format-standard',
'id_names': ''
},
'www.livescience.com': {
'class_names': 'content-wrapper',
'id_names': ''
},
'phys.org': {
'class_names': 'news-article',
'id_names': ''
},
'www.dw.com': {
'class_names': '',
'id_names': 'bodyContent'
},
'www.sun.org': {
'class_names': 'white-field',
'id_names': ''
},
'lco.global': {
'class_names': 'section maincontent',
'id_names': ''
},
'edition.cnn.com': {
'class_names': 'pg-rail-tall__body',
'id_names': ''
},
'www.bbc.com': {
'class_names': 'column--primary',
'id_names': ''
},
'www.nytimes.com': {
'class_names': 'StoryBodyCompanionColumn',
'id_names': ''
},
}
|
python
|
from .shared import replace_gender
#TODO At some point, I want to be able to pass only part of the subject tree
# to child snippets.
class Snippet(object):
''' The base snippet class that all snippets will extend.
Responsible for listing required and optional subject data, validating a
passed subject, generating a bit of text which can have zero or more tokens,
filled in dynamically. '''
# Data names, in xpath format, that are recognized by this snippet
supportedData = []
# Tokens that this snippet will replace
ownTokens = []
def __init__(self, subject, ancestorTokens = [], children = {}):
''' Sets up a new snippet instance. '''
# Each snippet has a list of tokens that it will replace. This base class
# simply adds the parent's supported tokens to that list. Any tokens that
# will be replaced at this level should be appended to the tokens list in
# the child class's constructor.
self.ancestorTokens = ancestorTokens
self.children = children
# Each instance has a subject
self.subject = subject
# Each instance has a dict of tokens that are overridden by child snippets.
for token, child in children.items():
# If the token is actually supported, copy it to the instance
if token in self.ownTokens:
self.children[token] = child
else:
raise ValueError("Token, {}, not supported.".format(token))
#TODO this should be a class method or static method or something because it
# will be called before construction presumably.
def is_compatible(cls, subject):
''' Returns boolean whether the subject is compatible with this snippet. '''
raise NotImplementedError("Abstract method, 'is_compatible' not implemented in child class.")
def generate_text(self):
raise NotImplementedError("Abstract method, 'generate_text' not implemented in child class.")
def render(self):
''' Primary method for actually constructing the final text.
Replaces tokens with text from child snippets if they are provided, or
the default snippet handler if not. Replacement happens in the order
specified in snippet's the ownTokens list'''
# Generate text
self.generate_text()
# Replace tokens
for token in self.ownTokens:
if '/' + token in self.text: # Only replace the token if it actually needs replacing.
if token in self.children:
# This is for nesting snippets. I don't think this line has ever actually been tested.
self.text = self.text.replace('/' + token, self.children[token].render())
else:
self.text = self.text.replace('/' + token, getattr(self, 'token_' + token)())
#TODO Figure out the best way to handle the gender pronouns. For now they
# are handled here in the base class. Idea: gender could be a seperate
# snippet, and depending which token is being replaced, it will return a
# different pronoun. Or it could be a bunch of different snippets
self.text = replace_gender(self.text, self.subject['gender'])
# Return fully rendered text
return self.text
def get_all_tokens(self):
''' Returns a list of all tokens safe to return to this snippet. A safe token
is any token that is replaced directly by this snippet or guarenteed to be
replaced by one of its ancestors.'''
return self.ownTokens + self.ancestorTokens
|
python
|
#!/usr/bin/python2.7
###############################################################
#### Assembled by: Ian M. Pendleton ######################
#### www.pendletonian.com ######################
###############################################################
# Updated December 17, 2019
### This program is designed to take all of the bonded atoms
### to the metal center and find the sterimol parameters of those
### bonded atoms. This program assumes that the vector of interest
### is from the metal center toward the phosphine (or whatever).
############################
### Be sure to adjust the metal center label and the distance cutoff in the variables
### section below. Default is to ignore hydrides (no sterimol information)
##module list###
import sys
import os
import argparse
import numpy as np
from openbabel import pybel
from sterimoltools import *
from tqdm import tqdm
#### variables ####
directory=os.path.dirname(os.path.abspath(__file__))
### User variables ###
METAL = 27 #atomic number of metal center in question for calculating tau
HYDRO_BOOL = 0 #consider hydrogens bound to the metal center or not?
DISTANCE_THRESHHOLD = 2.8 #Sets the cutoff for considering an atom "bound" that open babel doens't see bound
########File Handling - Program operates on all available XYZ files in directory#########
obatom = None
bondedlist={}
##output bonded atoms ###
def bite_angles(mol1, d_cutoff):
''' calculates bite angles for 1-M-2 and 3-M-4 ligands where 1-4 are determined based on distcutoff
rigid and ungeneralized, careful on use!
:params mol1: pybel molecule object (typically generated from readfile function)
:params d_cutoff: value of distance cutoff in angstroms
:returns: tuple (1-M-2 angle, 3-M-4 angle)
'''
y=[]
for atom in mol1:
if atom.atomicnum == METAL:
global obatom
obatom = atom.OBAtom
for atom2 in mol1:
N = atom2.OBAtom
if N.GetDistance(obatom) < d_cutoff:
if atom2.atomicnum != METAL:
if atom2.atomicnum != 1:
y.append(N)
#TODO: generalize to find relevant(tm) angles -- hard generalization...
return (y[0].GetAngle(obatom, y[1]), y[2].GetAngle(obatom, y[3]))
def atomsbonded(mol1, d_cutoff):
''' finds atoms proximal to the metal center returns as a list
:params mol1: pybel molecule object (typically generated from readfile function)
:params d_cutoff: value of distance cutoff in angstroms
:returns: list of atom numbers (from specified input file) [M, atom1, atom2,... n]
'''
y=[]
for atom in mol1:
if atom.atomicnum == METAL:
global obatom
obatom = atom.OBAtom
y.append(atom.idx)
for atom2 in mol1:
N = atom2.OBAtom
if N.GetDistance(obatom) < d_cutoff:
if atom2.atomicnum != METAL:
if atom2.atomicnum != 1:
y.append(atom2.idx)
return y
def run_sterimol(file, atom1, atom2):
radii = 'bondi'
file_Params = calcSterimol(file, radii, atom1, atom2, True)
lval = file_Params.lval; B1 = file_Params.B1; B5 = file_Params.newB5
with open('sterimol_values.csv', 'a') as f:
print >>f, file,', L:,',"%.2f" % lval, ", B1:,", "%.2f" % B1,", B5:,","%.2f" % B5, "\n"
# print >>f, file.ljust(22),"%.2f".rjust(9) % lval,", L","%.2f".rjust(9) % B1,"B1","%.2f".rjust(9) % B5,"B5"
def main_pipeline(mol_obj, d_cutoff, file):
# Generate the bite angles (angle of ligands on specified metal)
#TODO: generalize for all metal bonded angles
angle_1, angle_2 = bite_angles(mol_obj, d_cutoff) # hard coded for specific 2 angle return (add more above!)
with open('biteangle_values.csv', 'a') as myfile:
print >>myfile, file, ", 1-M-2:,", angle_1, ", 3-M-4:,", angle_2, "\n"
bonded_atom_list = atomsbonded(mol_obj, d_cutoff)
count = 1
while count < len(bonded_atom_list):
run_sterimol(file, bonded_atom_list[0], bonded_atom_list[count])
count+=1
# generate list of pairs from original metal center
#for atom in bonded_atom_list
if __name__ == "__main__":
lst=os.listdir(directory)
lst.sort()
xyz_list = []
for file in lst:
if file.endswith(".xyz"):
xyz_list.append(file)
for file in xyz_list:
if os.stat(file).st_size == 0:
print file, "0 0"
else:
molecule_obj = next(pybel.readfile("xyz", file))
main_pipeline(molecule_obj, DISTANCE_THRESHHOLD, file)
print("Operation completed successfully, please check output files")
|
python
|
#!/usr/bin/env python
from setuptools import setup
from setuptools.command.install import install as _install
class install(_install):
def pre_install_script(self):
pass
def post_install_script(self):
pass
def run(self):
self.pre_install_script()
_install.run(self)
self.post_install_script()
if __name__ == '__main__':
setup(
name = 'task-mapper',
version = '1.0',
description = 'Task Mapper',
long_description = 'distribute tasks performed on many files on either threads or processes',
author = "Pontus Pohl",
author_email = "[email protected]",
license = '',
url = '',
scripts=['task-mapper/scripts/task-mapper'],
py_modules = [],
classifiers = [
'Development Status :: Alpha',
'Environment :: Python',
'Intended Audience :: Envac',
'Intended Audience :: Envac',
'Programming Language :: Python',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Topic :: System :: Monitoring',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3'
],
entry_points = {},
package_data = {},
install_requires = [],
dependency_links = [],
zip_safe=True,
cmdclass={'install': install},
)
|
python
|
bg_black = "\u001b[48;5;0m"
bg_gray = "\u001b[48;5;8m"
bg_red = "\u001b[48;5;9m"
bg_green = "\u001b[48;5;10m"
bg_yellow = "\u001b[48;5;11m"
bg_blue = "\u001b[48;5;12m"
bg_purple = "\u001b[48;5;13m"
bg_cyan = "\u001b[48;5;14m"
bg_white = "\u001b[48;5;15m"
def customColor(number):
print(f"\u001b[48;5;{number}m")
|
python
|
# Copyright 2021 VMware, Inc.
# SPDX-License: Apache-2.0
import logging
import salt.exceptions
import saltext.vmware.utils.common as utils_common
import saltext.vmware.utils.esxi as utils_esxi
from salt.defaults import DEFAULT_TARGET_DELIM
from saltext.vmware.utils.connect import get_service_instance
log = logging.getLogger(__name__)
try:
from pyVmomi import vmodl, vim, VmomiSupport
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
__virtualname__ = "vmware_esxi"
def __virtual__():
if not HAS_PYVMOMI:
return False, "Unable to import pyVmomi module."
return __virtualname__
def get_lun_ids(service_instance=None):
"""
Return a list of LUN (Logical Unit Number) NAA (Network Addressing Authority) IDs.
"""
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(service_instance=service_instance, get_all_hosts=True)
ids = []
for host in hosts:
for datastore in host.datastore:
for extent in datastore.info.vmfs.extent:
ids.append(extent.diskName)
return ids
def _get_capability_attribs(host):
ret = {}
for attrib in dir(host.capability):
if attrib.startswith("_") or attrib.lower() == "array":
continue
val = getattr(host.capability, attrib)
# Convert all pyvmomi str[], bool[] and int[] to list.
if isinstance(val, list):
val = list(val)
ret.update({utils_common.camel_to_snake_case(attrib): val})
return ret
def get_capabilities(service_instance=None):
"""
Return ESXi host's capability information.
"""
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(service_instance=service_instance, get_all_hosts=True)
capabilities = {}
for host in hosts:
capabilities[host.name] = _get_capability_attribs(host)
return capabilities
def power_state(
datacenter_name=None, cluster_name=None, host_name=None, state=None, timeout=600, force=True
):
"""
Manage the power state of the ESXi host.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname whose power state needs to be managed (optional).
state
Sets the ESXi host to this power state. Valid values: "reboot", "standby", "poweron", "shutdown".
timeout
Timeout when transitioning power state to standby / poweron. Default: 600 seconds
force
Force power state transition. Default: True
.. code-block:: bash
salt '*' vmware_esxi.power_state datacenter_name=dc1 cluster_name=cl1 host_name=host1 state=shutdown
"""
ret = None
task = None
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
if state == "reboot":
task = h.RebootHost_Task(force)
elif state == "standby":
task = h.PowerDownHostToStandBy_Task(timeout, force)
elif state == "poweron":
task = h.PowerUpHostFromStandBy_Task(timeout)
elif state == "shutdown":
task = h.ShutdownHost_Task(force)
if task:
utils_common.wait_for_task(task, h.name, "PowerStateTask")
ret = True
except (vmodl.fault.NotSupported, salt.exceptions.VMwareApiError) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def manage_service(
service_name,
datacenter_name=None,
cluster_name=None,
host_name=None,
state=None,
startup_policy=None,
service_instance=None,
):
"""
Manage the state of the service running on the EXSI host.
service_name
Service that needs to be managed.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname whose power state needs to be managed (optional)
state
Sets the service running on the ESXi host to this state. Valid values: "start", "stop", "restart".
startup_policy
Sets the service startup policy. If unspecified, no changes are made. Valid values "on", "off", "automatic".
- on: Start and stop with host
- off: Start and stop manually
- automatic: Start automatically if any ports are open, and stop when all ports are closed
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional)
.. code-block:: bash
salt '*' vmware_esxi.manage_service sshd datacenter_name=dc1 cluster_name=cl1 host_name=host1 state=restart startup_policy=on
"""
log.debug("Running vmware_esxi.manage_service")
ret = None
task = None
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_service = h.configManager.serviceSystem
if not host_service:
continue
if state:
if state == "start":
host_service.StartService(id=service_name)
elif state == "stop":
host_service.StopService(id=service_name)
elif state == "restart":
host_service.RestartService(id=service_name)
else:
raise salt.exceptions.SaltException("Unknown state - {}".format(state))
if startup_policy is not None:
if startup_policy is True:
startup_policy = "on"
elif startup_policy is False:
startup_policy = "off"
host_service.UpdateServicePolicy(id=service_name, policy=startup_policy)
ret = True
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def list_services(
service_name=None,
datacenter_name=None,
cluster_name=None,
host_name=None,
state=None,
startup_policy=None,
service_instance=None,
):
"""
List the state of services running on matching EXSI hosts.
service_name
Filter by this service name. (optional)
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
state
Filter by this service state. Valid values: "running", "stopped"
startup_policy
Filter by this service startup policy. Valid values "on", "off", "automatic".
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.list_services
"""
log.debug("Running vmware_esxi.list_services")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_service = h.configManager.serviceSystem
ret[h.name] = {}
if not host_service:
continue
if startup_policy is not None:
# salt converts command line input "on" and "off" to True and False. Handle explicitly.
if startup_policy is True:
startup_policy = "on"
elif startup_policy is False:
startup_policy = "off"
services = host_service.serviceInfo.service
for service in services or []:
if service_name and service.key != service_name:
continue
if startup_policy and service.policy != startup_policy:
continue
if state and state == "running" and not service.running:
continue
if state and state == "stopped" and service.running:
continue
ret[h.name][service.key] = {
"state": "running" if service.running else "stopped",
"startup_policy": service.policy,
}
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def get_acceptance_level(
datacenter_name=None,
cluster_name=None,
host_name=None,
acceptance_level=None,
service_instance=None,
):
"""
Get acceptance level on matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
acceptance_level
Filter by this acceptance level. Valid values: "community", "partner", "vmware_accepted", "vmware_certified". (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get_acceptance_level
Returns:
.. code-block:: json
{
"host1": "partner",
"host2": "partner"
}
"""
log.debug("Running vmware_esxi.get_acceptance_level")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_config_manager = h.configManager.imageConfigManager
if not host_config_manager:
continue
host_acceptance_level = host_config_manager.HostImageConfigGetAcceptance()
if acceptance_level and host_acceptance_level != acceptance_level:
continue
ret[h.name] = host_acceptance_level
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def set_acceptance_level(
acceptance_level,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Set acceptance level on matching EXSI hosts.
acceptance_level
Set to this acceptance level. Valid values: "community", "partner", "vmware_accepted", "vmware_certified".
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.set_acceptance_level
Returns:
.. code-block:: json
{
"host1": "partner",
"host2": "partner"
}
"""
log.debug("Running vmware_esxi.set_acceptance_level")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_config_manager = h.configManager.imageConfigManager
if not host_config_manager:
continue
host_config_manager.UpdateHostImageAcceptanceLevel(newAcceptanceLevel=acceptance_level)
ret[h.name] = acceptance_level
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def get_advanced_config(
datacenter_name=None,
cluster_name=None,
host_name=None,
config_name=None,
service_instance=None,
):
"""
Get advanced config on matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
config_name
Filter by this config_name. (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get_advanced_config
"""
log.debug("Running vmware_esxi.get_advanced_config")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
config_manager = h.configManager.advancedOption
ret[h.name] = {}
if not config_manager:
continue
for opt in config_manager.QueryOptions(config_name):
ret[h.name][opt.key] = opt.value
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def set_advanced_configs(
config_dict,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Set multiple advanced configurations on matching EXSI hosts.
config_dict
Set the configuration key to the configuration value. Eg: {"Annotations.WelcomeMessage": "Hello"}
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.set_advanced_config config_name=Annotations.WelcomeMessage config_value=Hello
Returns:
.. code-block:: json
{
"host1": {
"Annotations.WelcomeMessage": "HelloDemo"
},
}
"""
log.debug("Running vmware_esxi.set_advanced_configs")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
config_manager = h.configManager.advancedOption
ret[h.name] = {}
if not config_manager:
continue
supported_configs = {}
for opt in config_manager.supportedOption:
if opt.key not in config_dict:
continue
supported_configs[opt.key] = opt.optionType
advanced_configs = []
for opt in config_dict:
opt_type = supported_configs[opt]
val = config_dict[opt]
if isinstance(opt_type, vim.option.BoolOption) and not isinstance(val, bool):
val = val.lower() == "true"
elif isinstance(opt_type, vim.option.LongOption):
val = VmomiSupport.vmodlTypes["long"](val)
elif isinstance(opt_type, vim.option.IntOption):
val = VmomiSupport.vmodlTypes["int"](val)
advanced_configs.append(vim.option.OptionValue(key=opt, value=val))
ret[h.name][opt] = config_dict[opt]
config_manager.UpdateOptions(changedValue=advanced_configs)
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def set_advanced_config(
config_name,
config_value,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Set a single advanced configuration on matching EXSI hosts.
config_name
Name of the advanced configuration to be set.
config_value
Set the advanced configuration to this value.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.set_advanced_config config_name=Annotations.WelcomeMessage config_value=Hello
Returns:
.. code-block:: json
{
"host1": {
"Annotations.WelcomeMessage": "HelloDemo"
},
}
"""
log.debug("Running vmware_esxi.set_advanced_config")
return set_advanced_configs(
config_dict={config_name: config_value},
datacenter_name=datacenter_name,
cluster_name=cluster_name,
host_name=host_name,
service_instance=service_instance,
)
def get_dns_config(
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Get DNS configuration on matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get_dns_config
"""
log.debug("Running vmware_esxi.get_dns_config")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
dns_config = h.config.network.dnsConfig
if not dns_config:
continue
ret[h.name] = {}
ret[h.name]["dhcp"] = dns_config.dhcp
ret[h.name]["virtual_nic"] = dns_config.virtualNicDevice
ret[h.name]["host_name"] = dns_config.hostName
ret[h.name]["domain_name"] = dns_config.domainName
ret[h.name]["ip"] = list(dns_config.address)
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def connect(host, service_instance=None):
"""
Connect an ESXi instance to a vCenter instance.
host
Name of ESXi instance in vCenter.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Connect ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.reconnect_host(host, service_instance)
return {"state": state}
def disconnect(host, service_instance=None):
"""
Disconnect an ESXi instance.
host
Name of ESXi instance in vCenter.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Disconnect ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.disconnect_host(host, service_instance)
return {"state": state}
def remove(host, service_instance=None):
"""
Remove an ESXi instance from a vCenter instance.
host
Name of ESXi instance in vCenter.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Remove ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.remove_host(host, service_instance)
return {"state": state}
def move(host, cluster_name, service_instance=None):
"""
Move an ESXi instance to a different cluster.
host
Name of ESXi instance in vCenter.
cluster_name
Name of cluster to move host to.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Move ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.move_host(host, cluster_name, service_instance)
return {"state": state}
def add(
host,
root_user,
password,
cluster_name,
datacenter_name,
verify_host_cert=True,
connect=True,
service_instance=None,
):
"""
Add an ESXi instance to a vCenter instance.
host
IP address or hostname of ESXi instance.
root_user
Username with root privilege to ESXi instance.
password
Password to root user.
cluster_name
Name of cluster ESXi host is being added to.
datacenter
Datacenter that contains cluster that ESXi instance is being added to.
verify_host_cert
Validates the host's SSL certificate is signed by a CA, and that the hostname in the certificate matches the host. Defaults to True.
connect
Specifies whether host should be connected after being added. Defaults to True.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Adding ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.add_host(
host,
root_user,
password,
cluster_name,
datacenter_name,
verify_host_cert,
connect,
service_instance,
)
return {"state": state}
def list_pkgs(
pkg_name=None,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
List the packages installed on matching EXSi hosts.
Note: Appropriate filters are recommended for large installations.
pkg_name
Filter by this package name. (optional)
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.list_pkgs
"""
log.debug("Running vmware_esxi.list_pkgs")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_pkg_manager = h.configManager.imageConfigManager
if not host_pkg_manager:
continue
ret[h.name] = {}
pkgs = host_pkg_manager.FetchSoftwarePackages()
for pkg in pkgs:
if pkg_name and pkg.name != pkg_name:
continue
ret[h.name][pkg.name] = {
"version": pkg.version,
"vendor": pkg.vendor,
"summary": pkg.summary,
"description": pkg.description,
"acceptance_level": pkg.acceptanceLevel,
"maintenance_mode_required": pkg.maintenanceModeRequired,
"creation_date": pkg.creationDate,
}
return ret
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
def get(
datacenter_name=None,
cluster_name=None,
host_name=None,
key=None,
default="",
delimiter=DEFAULT_TARGET_DELIM,
service_instance=None,
):
"""
Get configuration information for matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
key
Attempt to retrieve the named value from ESXi host configuration data, if the named value is not
available return the passed default. The default return is an empty string.
Follows the grains.get filter semantics. (optional)
The value can also represent a value in a nested dict using a ":" delimiter
for the dict. This means that if a dict in ESXi host configuration looks like this:
{'vsan': {'health': 'good'}}
To retrieve the value associated with the apache key in the pkg dict this
key can be passed:
vsan:health
delimiter
Specify an alternate delimiter to use when traversing a nested dict.
This is useful for when the desired key contains a colon. (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get dc1 cl1
"""
log.debug("Running vmware_esxi.get")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
ret[h.name] = {}
ret[h.name]["vsan"] = {}
vsan_manager = h.configManager.vsanSystem
if vsan_manager:
vsan = vsan_manager.QueryHostStatus()
ret[h.name]["vsan"]["cluster_uuid"] = vsan.uuid
ret[h.name]["vsan"]["node_uuid"] = vsan.nodeUuid
ret[h.name]["vsan"]["health"] = vsan.health
ret[h.name]["datastores"] = {}
for store in h.datastore:
ret[h.name]["datastores"][store.name] = {}
ret[h.name]["datastores"][store.name]["capacity"] = store.summary.capacity
ret[h.name]["datastores"][store.name]["free_space"] = store.summary.freeSpace
ret[h.name]["nics"] = {}
for nic in h.config.network.vnic:
ret[h.name]["nics"][nic.device] = {}
ret[h.name]["nics"][nic.device]["ip_address"] = nic.spec.ip.ipAddress
ret[h.name]["nics"][nic.device]["subnet_mask"] = nic.spec.ip.subnetMask
ret[h.name]["nics"][nic.device]["mac"] = nic.spec.mac
ret[h.name]["nics"][nic.device]["mtu"] = nic.spec.mtu
ret[h.name]["cpu_model"] = h.summary.hardware.cpuModel
ret[h.name]["num_cpu_cores"] = h.summary.hardware.numCpuCores
ret[h.name]["num_cpu_pkgs"] = h.summary.hardware.numCpuPkgs
ret[h.name]["num_cpu_threads"] = h.summary.hardware.numCpuThreads
ret[h.name]["memory_size"] = h.summary.hardware.memorySize
ret[h.name]["overall_memory_usage"] = h.summary.quickStats.overallMemoryUsage
ret[h.name]["product_name"] = h.config.product.name
ret[h.name]["product_version"] = h.config.product.version
ret[h.name]["product_build"] = h.config.product.build
ret[h.name]["product_os_type"] = h.config.product.osType
ret[h.name]["host_name"] = h.summary.config.name
ret[h.name]["system_vendor"] = h.hardware.systemInfo.vendor
ret[h.name]["system_model"] = h.hardware.systemInfo.model
ret[h.name]["bios_release_date"] = h.hardware.biosInfo.releaseDate
ret[h.name]["bios_release_version"] = h.hardware.biosInfo.biosVersion
ret[h.name]["uptime"] = h.summary.quickStats.uptime
ret[h.name]["in_maintenance_mode"] = h.runtime.inMaintenanceMode
ret[h.name]["system_uuid"] = h.hardware.systemInfo.uuid
for info in h.hardware.systemInfo.otherIdentifyingInfo:
ret[h.name].update(
{
utils_common.camel_to_snake_case(
info.identifierType.key
): info.identifierValue
}
)
ret[h.name]["capabilities"] = _get_capability_attribs(host=h)
if key:
ret[h.name] = salt.utils.data.traverse_dict_and_list(
ret[h.name], key, default, delimiter
)
return ret
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
|
python
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Revision $Id$
"""Internal use: Topic-specific extensions for TCPROS support"""
import socket
import threading
import time
try:
from xmlrpc.client import ServerProxy # Python 3.x
except ImportError:
from xmlrpclib import ServerProxy # Python 2.x
from rospy.core import logwarn, logerr, logdebug, rospyerr
import rospy.exceptions
import rospy.names
import rospy.impl.registration
import rospy.impl.transport
from rospy.impl.tcpros_base import TCPROSTransport, TCPROSTransportProtocol, \
get_tcpros_server_address, start_tcpros_server,\
DEFAULT_BUFF_SIZE, TCPROS
class TCPROSSub(TCPROSTransportProtocol):
"""
Subscription transport implementation for receiving topic data via
peer-to-peer TCP/IP sockets
"""
def __init__(self, resolved_name, recv_data_class, queue_size=None, \
buff_size=DEFAULT_BUFF_SIZE, tcp_nodelay=False):
"""
ctor.
@param resolved_name: resolved subscription name
@type resolved_name: str
@param recv_data_class: class to instantiate to receive
messages
@type recv_data_class: L{rospy.Message}
@param queue_size: maximum number of messages to
deserialize from newly read data off socket
@type queue_size: int
@param buff_size: recv buffer size
@type buff_size: int
@param tcp_nodelay: If True, request TCP_NODELAY from publisher
@type tcp_nodelay: bool
"""
super(TCPROSSub, self).__init__(resolved_name, recv_data_class, queue_size, buff_size)
self.direction = rospy.impl.transport.INBOUND
self.tcp_nodelay = tcp_nodelay
def get_header_fields(self):
"""
@return: dictionary of subscriber fields
@rtype: dict
"""
return {'topic': self.resolved_name,
'message_definition': self.recv_data_class._full_text,
'tcp_nodelay': '1' if self.tcp_nodelay else '0',
'md5sum': self.recv_data_class._md5sum,
'type': self.recv_data_class._type,
'callerid': rospy.names.get_caller_id(),
'node_type': "rospy"}
# Separate method for easier testing
def _configure_pub_socket(sock, is_tcp_nodelay):
"""
Configure socket options on a new publisher socket.
@param sock: socket.socket
@type sock: socket.socket
@param is_tcp_nodelay: if True, TCP_NODELAY will be set on outgoing socket if available
@param is_tcp_nodelay: bool
"""
# #956: low latency, TCP_NODELAY support
if is_tcp_nodelay:
if hasattr(socket, 'TCP_NODELAY'):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
else:
logwarn("WARNING: cannot enable TCP_NODELAY as its not supported on this platform")
#TODO:POLLING: TCPROSPub currently doesn't actually do anything -- not until polling is implemented
class TCPROSPub(TCPROSTransportProtocol):
"""
Publisher transport implementation for publishing topic data via
peer-to-peer TCP/IP sockets.
"""
def __init__(self, resolved_name, pub_data_class, is_latch=False, headers=None):
"""
ctor.
@param resolved_name: resolved topic name
@type resolved_name: str
@param pub_data_class: class to instance to receive messages
@type pub_data_class: L{rospy.Message} class
@param is_latch: If True, Publisher is latching
@type is_latch: bool
"""
# very small buffer size for publishers as the messages they receive are very small
super(TCPROSPub, self).__init__(resolved_name, None, queue_size=None, buff_size=128)
self.pub_data_class = pub_data_class
self.direction = rospy.impl.transport.OUTBOUND
self.is_latch = is_latch
self.headers = headers if headers else {}
def get_header_fields(self):
base = {'topic': self.resolved_name,
'type': self.pub_data_class._type,
'latching': '1' if self.is_latch else '0',
'message_definition': self.pub_data_class._full_text,
'md5sum': self.pub_data_class._md5sum,
'callerid': rospy.names.get_caller_id(),
'node_type': "rospy"}
# this implementation allows the user to override builtin
# fields. this could potentially enable some interesting
# features... or it could be really bad.
if self.headers:
base.update(self.headers)
return base
def robust_connect_subscriber(conn, dest_addr, dest_port, pub_uri, receive_cb, resolved_topic_name):
"""
Keeps trying to create connection for subscriber. Then passes off to receive_loop once connected.
"""
# kwc: this logic is not very elegant. I am waiting to rewrite
# the I/O loop with async i/o to clean this up.
# timeout is really generous. for now just choosing one that is large but not infinite
interval = 0.5
while conn.socket is None and not conn.done and not rospy.is_shutdown():
try:
conn.connect(dest_addr, dest_port, pub_uri, timeout=60.)
except rospy.exceptions.TransportInitError as e:
# if the connection was closed intentionally
# because of an unknown error, stop trying
if conn.protocol is None:
conn.done = True
break
rospyerr("unable to create subscriber transport: %s. Will try again in %ss", e, interval)
interval = interval * 2
time.sleep(interval)
# check to see if publisher state has changed
conn.done = not check_if_still_publisher(resolved_topic_name, pub_uri)
if not conn.done:
conn.receive_loop(receive_cb)
def check_if_still_publisher(resolved_topic_name, pub_uri):
try:
s = ServerProxy(pub_uri)
code, msg, val = s.getPublications(rospy.names.get_name())
if code == 1:
return len([t for t in val if t[0] == resolved_topic_name]) > 0
else:
return False
except:
return False
class TCPROSHandler(rospy.impl.transport.ProtocolHandler):
"""
ROS Protocol handler for TCPROS. Accepts both TCPROS topic
connections as well as ROS service connections over TCP. TCP server
socket is run once start_server() is called -- this is implicitly
called during init_publisher().
"""
def __init__(self):
"""ctor"""
self.tcp_nodelay_map = {} # { topic : tcp_nodelay}
def set_tcp_nodelay(self, resolved_name, tcp_nodelay):
"""
@param resolved_name: resolved topic name
@type resolved_name: str
@param tcp_nodelay: If True, sets TCP_NODELAY on publisher's
socket (disables Nagle algorithm). This results in lower
latency publishing at the cost of efficiency.
@type tcp_nodelay: bool
"""
self.tcp_nodelay_map[resolved_name] = tcp_nodelay
def shutdown(self):
"""
stops the TCP/IP server responsible for receiving inbound connections
"""
pass
def create_transport(self, resolved_name, pub_uri, protocol_params):
"""
Connect to topic resolved_name on Publisher pub_uri using TCPROS.
@param resolved_name str: resolved topic name
@type resolved_name: str
@param pub_uri: XML-RPC URI of publisher
@type pub_uri: str
@param protocol_params: protocol parameters to use for connecting
@type protocol_params: [XmlRpcLegal]
@return: code, message, debug
@rtype: (int, str, int)
"""
#Validate protocol params = [TCPROS, address, port]
if type(protocol_params) != list or len(protocol_params) != 3:
return 0, "ERROR: invalid TCPROS parameters", 0
if protocol_params[0] != TCPROS:
return 0, "INTERNAL ERROR: protocol id is not TCPROS: %s"%id, 0
id, dest_addr, dest_port = protocol_params
sub = rospy.impl.registration.get_topic_manager().get_subscriber_impl(resolved_name)
#Create connection
protocol = TCPROSSub(resolved_name, sub.data_class, \
queue_size=sub.queue_size, buff_size=sub.buff_size,
tcp_nodelay=sub.tcp_nodelay)
conn = TCPROSTransport(protocol, resolved_name)
conn.set_endpoint_id(pub_uri);
t = threading.Thread(name=resolved_name, target=robust_connect_subscriber, args=(conn, dest_addr, dest_port, pub_uri, sub.receive_callback,resolved_name))
# don't enable this just yet, need to work on this logic
#rospy.core._add_shutdown_thread(t)
# Attach connection to _SubscriberImpl
if sub.add_connection(conn): #pass tcp connection to handler
# since the thread might cause the connection to close
# it should only be started after the connection has been added to the subscriber
# https://github.com/ros/ros_comm/issues/544
t.start()
return 1, "Connected topic[%s]. Transport impl[%s]"%(resolved_name, conn.__class__.__name__), dest_port
else:
# _SubscriberImpl already closed or duplicate subscriber created
conn.close()
return 0, "ERROR: Race condition failure creating topic subscriber [%s]"%(resolved_name), 0
def supports(self, protocol):
"""
@param protocol: name of protocol
@type protocol: str
@return: True if protocol is supported
@rtype: bool
"""
return protocol == TCPROS
def get_supported(self):
"""
Get supported protocols
"""
return [[TCPROS]]
def init_publisher(self, resolved_name, protocol):
"""
Initialize this node to receive an inbound TCP connection,
i.e. startup a TCP server if one is not already running.
@param resolved_name: topic name
@type resolved__name: str
@param protocol: negotiated protocol
parameters. protocol[0] must be the string 'TCPROS'
@type protocol: [str, value*]
@return: (code, msg, [TCPROS, addr, port])
@rtype: (int, str, list)
"""
if protocol[0] != TCPROS:
return 0, "Internal error: protocol does not match TCPROS: %s"%protocol, []
start_tcpros_server()
addr, port = get_tcpros_server_address()
return 1, "ready on %s:%s"%(addr, port), [TCPROS, addr, port]
def topic_connection_handler(self, sock, client_addr, header):
"""
Process incoming topic connection. Reads in topic name from
handshake and creates the appropriate L{TCPROSPub} handler for the
connection.
@param sock: socket connection
@type sock: socket.socket
@param client_addr: client address
@type client_addr: (str, int)
@param header: key/value pairs from handshake header
@type header: dict
@return: error string or None
@rtype: str
"""
if rospy.core.is_shutdown_requested():
return "Node is shutting down"
for required in ['topic', 'md5sum', 'callerid']:
if not required in header:
return "Missing required '%s' field"%required
else:
resolved_topic_name = header['topic']
md5sum = header['md5sum']
tm = rospy.impl.registration.get_topic_manager()
topic = tm.get_publisher_impl(resolved_topic_name)
if not topic:
return "[%s] is not a publisher of [%s]. Topics are %s"%(rospy.names.get_caller_id(), resolved_topic_name, tm.get_publications())
elif not topic.data_class or topic.closed:
return "Internal error processing topic [%s]"%(resolved_topic_name)
elif md5sum != rospy.names.TOPIC_ANYTYPE and md5sum != topic.data_class._md5sum:
data_class = topic.data_class
actual_type = data_class._type
# check to see if subscriber sent 'type' header. If they did, check that
# types are same first as this provides a better debugging message
if 'type' in header:
requested_type = header['type']
if requested_type != actual_type:
return "topic types do not match: [%s] vs. [%s]"%(requested_type, actual_type)
else:
# defaults to actual type
requested_type = actual_type
return "Client [%s] wants topic [%s] to have datatype/md5sum [%s/%s], but our version has [%s/%s] Dropping connection."%(header['callerid'], resolved_topic_name, requested_type, md5sum, actual_type, data_class._md5sum)
else:
#TODO:POLLING if polling header is present, have to spin up receive loop as well
# #1334: tcp_nodelay support from subscriber option
if 'tcp_nodelay' in header:
tcp_nodelay = True if header['tcp_nodelay'].strip() == '1' else False
else:
tcp_nodelay = self.tcp_nodelay_map.get(resolved_topic_name, False)
_configure_pub_socket(sock, tcp_nodelay)
protocol = TCPROSPub(resolved_topic_name, topic.data_class, is_latch=topic.is_latch, headers=topic.headers)
transport = TCPROSTransport(protocol, resolved_topic_name)
transport.set_socket(sock, header['callerid'])
transport.remote_endpoint = client_addr
transport.write_header()
topic.add_connection(transport)
class QueuedConnection(object):
"""
It wraps a Transport instance and behaves like one
but it queues the data written to it and relays them
asynchronously to the wrapped instance.
"""
def __init__(self, connection, queue_size):
"""
ctor.
@param connection: the wrapped transport instance
@type connection: Transport
@param queue_size: the maximum size of the queue, zero means infinite
@type queue_size: int
"""
super(QueuedConnection, self).__init__()
self._connection = connection
self._queue_size = queue_size
self._lock = threading.Lock()
self._cond_data_available = threading.Condition(self._lock)
self._connection.set_cleanup_callback(self._closed_connection_callback)
self._queue = []
self._error = None
self._thread = threading.Thread(target=self._run)
self._thread.start()
def _closed_connection_callback(self, connection):
with self._lock:
self._cond_data_available.notify()
def __getattr__(self, name):
if name.startswith('__'):
raise AttributeError(name)
return getattr(self._connection, name)
def write_data(self, data):
with self._lock:
# if there was previously an error within the dispatch thread raise it
if self._error:
error = self._error
self._error = None
raise error
# pop oldest data if queue limit is reached
if self._queue_size > 0 and len(self._queue) == self._queue_size:
del self._queue[0]
self._queue.append(data)
self._cond_data_available.notify()
# effectively yields the rest of the thread quantum
time.sleep(0)
return True
def _run(self):
while not self._connection.done:
queue = []
with self._lock:
# wait for available data
while not self._queue and not self._connection.done:
self._cond_data_available.wait()
# take all data from queue for processing outside of the lock
if self._queue:
queue = self._queue
self._queue = []
# relay all data
for data in queue:
try:
self._connection.write_data(data)
except Exception as e:
with self._lock:
self._error = e
|
python
|
a = int(input())
b = int(input())
cont = a
if a <= b:
for i in range(a, b+1):
for n in range(1, 10+1):
total = cont * n
print(f'{cont} x {n} = {total}')
print('-'*10)
cont += 1
else:
print('Nenhuma tabuada no intervalo!')
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 6 07:51:51 2018
@author: tuheenahmmed
"""
def getWordScore(word, n):
"""
Returns the score for a word. Assumes the word is a valid word.
The score for a word is the sum of the points for letters in the
word, multiplied by the length of the word, PLUS 50 points if all n
letters are used on the first turn.
Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is
worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES)
word: string (lowercase letters)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: int >= 0
"""
# values=SCRABBLE_LETTER_VALUES.values()
# keys=SCRABBLE_LETTER_VALUES.keys()
score =0
if word == []:
return score
else:
for letter in word:
if letter in SCRABBLE_LETTER_VALUES:
#print (letter)
score1=SCRABBLE_LETTER_VALUES[letter]
#print(score1)
score=score+score1
#print(score)
score=score*len(word)
#print(score)
if len(word) == n:
score = score+50
return score
return score
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
"""
Снежинка / Snowflake
"""
# Оригинал: http://www.cyberforum.ru/pascalabc/thread994987.html
# uses graphABC;
# const k=8;
# var x,y:integer;
# procedure snow (x0,y0,r,n:integer);
# const t=2*pi/k;
# var i,x,y:integer;
# begin
# for i:=1 to k do
# begin
# x:=x0+round(r*cos(i*t));
# y:=y0-round(r*sin(i*t));
# line(x0,y0,x,y);
# if n>1 then snow(x,y,r div 5,n-1);
# end;
# end;
# begin
# SetWindowSize(500,500);
# SetWindowCaption('Фракталы: что-то похожее на снежинку');
# x:=windowwidth div 2;
# y:=windowheight div 2;
# snow(x,y,180,4);
# end.
from math import *
def draw_snowflake(draw_by_image, width, height, count):
def draw(x0, y0, r, n):
t = 2 * pi / count
for i in range(count):
x = x0 + r * cos(i * t)
y = y0 - r * sin(i * t)
draw_by_image.line((x0, y0, x, y), 'black')
if n > 1:
draw(x, y, r // 5, n-1)
x = width // 2
y = height // 2
draw(x, y, 180, 4)
if __name__ == '__main__':
from PIL import Image, ImageDraw
img = Image.new("RGB", (500, 500), "white")
# Количество повторений
count = 8
draw_snowflake(ImageDraw.Draw(img), img.width, img.height, count)
img.save('img.png')
|
python
|
##
# Copyright (c) 2007-2016 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from caldavclientlibrary.browser.baseshell import BaseShell
from caldavclientlibrary.browser.command import Command
from caldavclientlibrary.client.account import CalDAVAccount
from getpass import getpass
from caldavclientlibrary.protocol.url import URL
import caldavclientlibrary.browser.commands
import atexit
import getopt
import sys
import urlparse
class Shell(BaseShell):
def __init__(self, server, path, user, pswd, logging, noHostRedirect=False, afunix=None):
super(Shell, self).__init__("caldav_client")
self.prefix = self.wd = "/"
self.server = server
self.user = user
self.pswd = pswd
self.registerCommands()
# Create the account
ssl = server.startswith("https://")
server = server[8:] if ssl else server[7:]
self.account = CalDAVAccount(server, ssl=ssl, afunix=afunix, user=self.user, pswd=self.pswd, root=path, principal=None, logging=logging, noHostRedirect=noHostRedirect)
atexit.register(self.saveHistory)
def registerCommands(self):
module = caldavclientlibrary.browser.commands
for item in module.__all__:
mod = __import__("caldavclientlibrary.browser.commands." + item, globals(), locals(), ["Cmd", ])
cmd_class = mod.Cmd
if type(cmd_class) is type and issubclass(cmd_class, Command):
self.registerCommand(cmd_class())
def setWD(self, newwd):
# Check that the new one exists
resource = (newwd if newwd.endswith("/") else newwd + "/")
if not self.account.session.testResource(URL(url=resource)):
return False
self.prefix = self.wd = newwd
return True
def setUserPswd(self, user, pswd):
self.user = user
self.pswd = pswd
self.account.setUserPswd(user, pswd)
def usage():
return """Usage: shell [OPTIONS]
Options:
-l start with HTTP logging on.
--server=HOST url of the server include http/https scheme and port [REQUIRED].
--unix=PATH path to unix socket to connect to server [OPTIONAL]
--user=USER user name to login as - will be prompted if not present [OPTIONAL].
--pswd=PSWD password for user - will be prompted if not present [OPTIONAL].
--no-host-redirect Don't allow the hostname to change when an HTTP redirect occurs [OPTIONAL]
"""
def runit():
logging = False
server = None
afunix = None
user = None
pswd = None
noHostRedirect = False
opts, _ignore_args = getopt.getopt(sys.argv[1:], 'lh', ["help", "server=", "unix=", "user=", "pswd=", "no-host-redirect"])
for name, value in opts:
if name == "-l":
logging = True
elif name == "--server":
server = value
elif name == "--unix":
afunix = value
elif name == "--user":
user = value
elif name == "--pswd":
pswd = value
elif name == "--no-host-redirect":
noHostRedirect = True
else:
print usage()
raise SystemExit()
if not server or not (server.startswith("http://") or server.startswith("https://")):
print usage()
raise SystemExit()
splits = urlparse.urlsplit(server)
server = splits.scheme + "://" + splits.netloc
path = splits.path
if not path:
path = "/"
if not user:
user = raw_input("User: ")
if not pswd:
pswd = getpass("Password: ")
shell = Shell(server, path, user, pswd, logging, noHostRedirect=noHostRedirect, afunix=afunix)
shell.run()
if __name__ == '__main__':
runit()
|
python
|
'''Author: Brandon Trabucco, Copyright 2019
Helper functions to display and run a simple game'''
from game_engine.colors import *
from game_engine.tiles import *
from game_engine.stacks import *
from game_engine.drawable import Drawable
import random
random.seed(12345)
#####################################
# lets make a game board to play on #
#####################################
class Board(Drawable):
def __init__(self, name, tiles):
super(Board, self).__init__(len(tiles), len(tiles[0]))
assert(all([len(t) == self.width for t in tiles]))
self.name = name
self.tiles = tiles
self.entities = []
def place_tile(self, tile, x, y):
if not (x >= 0 and x < self.width and y >= 0 and y < self.height):
return False
self.tiles[y][x].place_tile(tile)
return True
def remove_tile(self, x, y):
if not (x >= 0 and x < self.width and y >= 0 and y < self.height):
return Null()
return self.tiles[y][x].remove_tile()
def add_entity(self, e):
if not (isinstance(e, Entity)):
return -1
self.entities.append(e)
if not (e.x >= 0 and e.x < self.width and e.y >= 0 and e.y < self.height):
return -1
self.tiles[e.y][e.x].place_tile(e)
return len(self.entities) - 1
def in_front_of(self, which_entity):
e = self.entities[which_entity]
dx, dy = [(0, -1), (1, 0), (0, 1), (-1, 0)][e.z]
if not (e.x + dx >= 0 and e.x + dx < self.width and e.y + dy >= 0 and e.y + dy < self.height):
return Null()
return self.tiles[e.y + dy][e.x + dx].first_tile
def break_in_front_of(self, which_entity):
e = self.entities[which_entity]
dx, dy = [(0, -1), (1, 0), (0, 1), (-1, 0)][e.z]
return self.remove_tile(e.x + dx, e.y + dy)
def face_entity(self, which_entity, dx, dy):
e = self.entities[which_entity]
e.face(dx, dy)
def shift_entity(self, which_entity, dx, dy):
e = self.entities[which_entity]
if isinstance(self.tiles[e.y][e.x].first_tile, Entity):
if not (e.x >= 0 and e.x < self.width and e.y >= 0 and e.y < self.height):
return False
self.tiles[e.y][e.x].remove_tile()
e.move(dx, dy)
if not (e.x >= 0 and e.x < self.width and e.y >= 0 and e.y < self.height):
return False
self.tiles[e.y][e.x].place_tile(e)
return True
def shift_destination(self, which_entity, dx, dy):
e = self.entities[which_entity]
x, y = e.x + dx, e.y + dy
if not (x >= 0 and x < self.width and y >= 0 and y < self.height):
return Null()
return self.tiles[y][x].first_tile
def move_entity(self, which_entity, x, y):
e = self.entities[which_entity]
dx, dy = x - e.x, y - e.y
self.shift_entity(which_entity, dx, dy)
def move_destination(self, which_entity, x, y):
e = self.entities[which_entity]
dx, dy = x - e.x, y - e.y
return self.shift_destination(which_entity, dx, dy)
def draw(self, canvas):
for x in range(self.width):
for y in range(self.height):
self.tiles[y][x].draw(canvas, x, y)
def undraw(self, canvas):
for x in range(self.width):
for y in range(self.height):
self.tiles[y][x].undraw(canvas)
class House(Board):
def __init__(self, name, height, width, offset):
tiles = [[None for x in range(width + 2 * offset)]
for y in range(height + 2 * offset)]
for x in range(width + 2 * offset):
for y in range(height + 2 * offset):
next_stack = Indoor()
if (x < offset or x >= width + offset or
y < offset or y >= height + offset ):
next_stack = Forest()
if random.random() > 0.6:
next_stack.remove_tile()
elif (x == offset or x == width + offset - 1 or
y == offset or y == height + offset - 1 ):
next_stack = Building()
tiles[y][x] = next_stack
super(House, self).__init__(name, tiles)
|
python
|
import rospy
from geometry_msgs.msg import Twist
from std_msgs.msg import String
import time
import sys, select, termios, tty
import donkeycar as dk
# import keyboard # using module keyboard
from pynput import keyboard
#############################################################################################
# keyboard
moveBindings = {
'w': (1, 0),
'a': (0, -1),
's': (-1.2, 0),
'd': (0, 1),
'wa': (1, -1),
'wd': (1, 1),
'sa': (-1.2, -1),
'sd': (-1.2, 1)
}
speedBindings={
'z': (1.1, 1),
'x': (.9, 1),
'c': (1, 1.1),
'v': (1, .9)
}
msg = '''
Reading from the keyboard and Publishing to Twist!
---------------------------
Moving around:
w
a s d
anything else : stop
z/x : increase/decrease only linear speed by 10%
c/v : increase/decrease only angular speed by 10%
CTRL-C to quit
'''
class KeyboardRun:
key_w = False
key_a = False
key_s = False
key_d = False
key_z = False
key_x = False
key_c = False
key_v = False
key_esc = False
cmd_controller_pub_str = 'keyboard'
def __init__(self):
# settings = termios.tcgetattr(sys.stdin)
rospy.init_node('keyboard_node')
self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
self.cmd_controller_pub = rospy.Publisher('cmd_controller', String, queue_size=1)
self.speed = rospy.get_param("~speed", 0.19)
self.turn = rospy.get_param("~turn", 0.7)
self.x = 0
self.y = 0
self.z = 0
self.th = 0
self.status = 0
self.switch = False
def run(self):
self.listener = keyboard.Listener(
on_press=self.on_press,
on_release=self.on_release)
self.listener.start()
self.switch = True
try:
print(msg)
print(self.vels(self.speed, self.turn))
while (self.listener.isAlive()):
if self.key_w == True and self.key_a == False and self.key_d == False:
self.x, self.th = moveBindings['w']
elif self.key_s == True and self.key_a == False and self.key_d == False:
self.x, self.th = moveBindings['s']
elif self.key_w == True and self.key_a == True:
self.x, self.th = moveBindings['wa']
elif self.key_w == True and self.key_d == True:
self.x, self.th = moveBindings['wd']
elif self.key_s == True and self.key_a == True:
self.x, self.th = moveBindings['sa']
elif self.key_s == True and self.key_d == True:
self.x, self.th = moveBindings['sd']
elif self.key_d == True:
self.x, self.th = moveBindings['d']
elif self.key_a == True:
self.x, self.th = moveBindings['a']
elif self.key_z == True:
self.speed *= speedBindings['z'][0]
self.turn *= speedBindings['z'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_x == True:
self.speed *= speedBindings['x'][0]
self.turn *= speedBindings['x'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_c == True:
self.speed *= speedBindings['c'][0]
self.turn *= speedBindings['c'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_v == True:
self.speed *= speedBindings['v'][0]
self.turn *= speedBindings['v'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_esc== True:
print('exit')
self.switch = False
else:
self.x = 0
self.y = 0
self.z = 0
self.th = 0
twist = Twist()
twist.linear.x = self.x * self.speed
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = self.th * self.turn
self.cmd_controller_pub.publish(self.cmd_controller_pub_str)
self.cmd_vel_pub.publish(twist)
print('twist value : ',twist.linear.x,twist.angular.z)
time.sleep(0.1)
except Exception as e:
print(e)
def on_press(self, key):
try:
print('alphanumeric key {0} pressed'.format(key.char))
_key = key.char
if _key == 'w':
self.key_w = True
elif _key == 'a':
self.key_a = True
elif _key == 's':
self.key_s = True
elif _key == 'd':
self.key_d = True
elif _key == 'z':
self.key_z = True
elif _key == 'x':
self.key_x = True
elif _key == 'c':
self.key_c = True
elif _key == 'v':
self.key_v = True
except AttributeError:
print('special key {0} pressed'.format(key.char))
def on_release(self, key):
try:
print('{0} released'.format(key))
_key = key.char
if _key == 'w':
self.key_w = False
elif _key == 'a':
self.key_a = False
elif _key == 's':
self.key_s = False
elif _key == 'd':
self.key_d = False
elif _key == 'z':
self.key_z = False
elif _key == 'x':
self.key_x = False
elif _key == 'c':
self.key_c = False
elif _key == 'v':
self.key_v = False
except AttributeError:
print('special key {0} pressed'.format(key.char))
def vels(self, speed, turn):
return "currently:\tspeed %s\tturn %s " % (speed, turn)
if __name__=="__main__":
cfg = dk.load_config()
keyboardRun = KeyboardRun() #
keyboardRun.run()
|
python
|
from os import getcwd
from re import findall
from re import match
def parseStep(line):
state = match(r"(on|off)", line).groups()[0]
x1, x2, y1, y2, z1, z2 = map(int, findall(r"(-?\d+)", line))
return state, (x1, x2), (y1, y2), (z1, z2)
def main():
with open(f"{getcwd()}/2021/day22/input.txt") as file:
file = file.readlines()
steps = list(map(parseStep, file))
# lists of each axes' critical values (values that on the boundary of regions)
critX = []
critY = []
critZ = []
for step in steps:
_, x, y, z = step
critX.append(x[0])
critX.append(x[1] + 1)
critY.append(y[0])
critY.append(y[1] + 1)
critZ.append(z[0])
critZ.append(z[1] + 1)
steps.reverse()
critX.sort()
critY.sort()
critZ.sort()
# now we iterate over all regions, adding the volume of each "on" region
total = 0
for x1, x2 in zip(critX, critX[1:]):
print(f"Tallying x={x1} .. {x2}")
xRegs = [v for v in steps if v[1][0] <= x1 <= v[1][1]]
for y1, y2 in zip(critY, critY[1:]):
yRegs = [v for v in xRegs if v[2][0] <= y1 <= v[2][1]]
for z1, z2 in zip(critZ, critZ[1:]):
if next((s == "on" for s, _, _, z in yRegs if z[0] <= z1 <= z[1]), False):
total += (x2 - x1) * (y2 - y1) * (z2 - z1)
print(f"{total} cubes are left on")
if __name__ == "__main__":
main()
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
" Help code for sl training "
import traceback
import numpy as np
import torch
import torch.nn as nn
from pysc2.lib.actions import RAW_FUNCTIONS as F
from alphastarmini.core.arch.agent import Agent
from alphastarmini.core.sl.feature import Feature
from alphastarmini.core.sl.label import Label
from alphastarmini.lib.hyper_parameters import StarCraft_Hyper_Parameters as SCHP
from alphastarmini.lib.hyper_parameters import Label_Size as LS
from alphastarmini.lib.hyper_parameters import Arch_Hyper_Parameters as AHP
from alphastarmini.lib.sc2 import raw_actions_mapping_protoss as RAMP
debug = False
def obs2feature(obs):
s = Agent.get_state_and_action_from_pickle(obs)
feature = Feature.state2feature(s)
print("feature:", feature) if debug else None
print("feature.shape:", feature.shape) if debug else None
print("begin a:") if debug else None
func_call = obs['func_call']
action = Agent.func_call_to_action(func_call).toTenser()
#tag_list = agent.get_tag_list(obs)
print('action.get_shape:', action.get_shape()) if debug else None
logits = action.toLogits()
print('logits.shape:', logits) if debug else None
label = Label.action2label(logits)
print("label:", label) if debug else None
print("label.shape:", label.shape) if debug else None
return feature, label
def obs2feature_numpy(obs):
s = Agent.get_state_and_action_from_pickle_numpy(obs)
feature = Feature.state2feature_numpy(s)
print("feature:", feature) if debug else None
print("feature.shape:", feature.shape) if debug else None
print("begin a:") if debug else None
func_call = obs['func_call']
action = Agent.func_call_to_action(func_call).toArray()
#tag_list = agent.get_tag_list(obs)
print('action.get_shape:', action.get_shape()) if debug else None
logits = action.toLogits_numpy()
print('logits.shape:', logits) if debug else None
label = Label.action2label_numpy(logits)
print("label:", label) if debug else None
print("label.shape:", label.shape) if debug else None
return feature, label
def obsToTensor(obs, final_index_list, seq_len):
feature_list = []
label_list = []
for value in obs:
feature, label = obs2feature(value)
feature_list.append(feature)
label_list.append(label)
features = torch.cat(feature_list, dim=0)
print("features.shape:", features.shape) if debug else None
labels = torch.cat(label_list, dim=0)
print("labels.shape:", labels.shape) if debug else None
is_final = torch.zeros([features.shape[0], 1])
# consider is_final
print('begin', index) if debug else None
print('end', index + seq_len) if debug else None
for j in final_index_list:
print('j', j) if debug else None
if j >= index and j < index + seq_len:
if debug:
print('in it!')
print('begin', index)
print('end', index + seq_len)
print('j', j)
is_final[j - index, 0] = 1
else:
pass
one_traj = torch.cat([features, labels, is_final], dim=1)
print("one_traj.shape:", one_traj.shape) if debug else None
return one_traj
def get_mask_by_raw_action_id(raw_action_id):
need_args = F[raw_action_id].args
# action type and delay is always enable
action_mask = [1, 1, 0, 0, 0, 0]
for arg in need_args:
print("arg:", arg) if debug else None
if arg.name == 'queued':
action_mask[2] = 1
elif arg.name == 'unit_tags':
action_mask[3] = 1
elif arg.name == 'target_unit_tag':
action_mask[4] = 1
elif arg.name == 'world':
action_mask[5] = 1
print('action_mask:', action_mask) if debug else None
return action_mask
def get_one_way_mask_in_SL(action_type_gt, device):
# only consider the ground truth
# the action_type_gt is one_hot embedding
ground_truth_raw_action_id = torch.nonzero(action_type_gt, as_tuple=True)[-1]
mask_list = []
for raw_action_id in ground_truth_raw_action_id:
mask_list.append(get_mask_by_raw_action_id(raw_action_id.item()))
mask_tensor = torch.tensor(mask_list)
mask_tensor = mask_tensor.to(device)
return mask_tensor
def get_two_way_mask_in_SL(action_type_gt, action_pred, device, strict_comparsion=True):
# consider the ground truth and the predicted
ground_truth_raw_action_id = torch.nonzero(action_type_gt, as_tuple=True)[-1]
action_pred = action_pred.reshape(-1)
mask_list = []
mask_list_2 = []
print('ground_truth.shape', ground_truth_raw_action_id.shape) if debug else None
print('ground_truth', ground_truth_raw_action_id) if debug else None
print('action_pred.shape', action_pred.shape) if debug else None
print('action_pred', action_pred) if debug else None
for raw_action_id, action_id in zip(ground_truth_raw_action_id, action_pred):
mask_raw = get_mask_by_raw_action_id(raw_action_id.item())
mask_predict = get_mask_by_raw_action_id(action_id.item())
if strict_comparsion:
if raw_action_id.item() == action_id.item():
mask_list.append(mask_raw)
mask_list_2.append(mask_predict)
else:
zero_mask = [1, 1, 0, 0, 0, 0]
mask_list.append(zero_mask)
mask_list_2.append(zero_mask)
else:
mask_list.append(mask_raw)
mask_list_2.append(mask_predict)
mask_tensor = torch.tensor(mask_list)
mask_tensor_2 = torch.tensor(mask_list_2)
print('mask_tensor', mask_tensor) if debug else None
print('mask_tensor_2', mask_tensor_2) if debug else None
mask_tensor_return = mask_tensor * mask_tensor_2
print('mask_tensor_return', mask_tensor_return) if debug else None
mask_tensor_return = mask_tensor_return.to(device)
return mask_tensor_return
def get_move_camera_weight_in_SL(action_type_gt, action_pred, device,
decrease_smart_opertaion=False, only_consider_small=False):
# consider the ground truth and the predicted
ground_truth_raw_action_id = torch.nonzero(action_type_gt, as_tuple=True)[-1]
mask_list = []
MOVE_CAMERA_ID = F.raw_move_camera.id
Smart_pt_id = F.Smart_pt.id
Smart_unit_id = F.Smart_unit.id
# Note, in SC2, move_camera resides as 50% actions in all actions
# we assume every other action has the same happenning rate, so
# assume move_camera weight is 1.
# the non_move_camera weight is MAX_ACTIONS /2. / alpha
# alpha set to 10
MOVE_CAMERA_WEIGHT = 1. # 1. / LS.action_type_encoding * 2.
alpha = 40.
NON_MOVE_CAMERA_WEIGHT = LS.action_type_encoding / 2. / alpha
SMALL_IMPORTANT_WEIGHT = NON_MOVE_CAMERA_WEIGHT * 5
# note, the human replays have many operations like smart_pt and smart_unit
# these actions are meaningless (we are hard to filter unit types for selection for them)
# so we also choose to decrease their weight
if decrease_smart_opertaion:
# TODO: change these ids to Func.id
SMART_WEIGHT = 1.5
else:
SMART_WEIGHT = NON_MOVE_CAMERA_WEIGHT
print('ground_truth_raw_action_id', ground_truth_raw_action_id) if debug else None
for raw_action_id in ground_truth_raw_action_id:
aid = raw_action_id.item()
if not only_consider_small:
if aid == MOVE_CAMERA_ID:
mask_list.append([MOVE_CAMERA_WEIGHT])
elif aid == Smart_pt_id:
mask_list.append([SMART_WEIGHT])
elif aid == Smart_unit_id:
mask_list.append([SMART_WEIGHT])
else:
# func_name = F[aid].name
# select, _, _ = RAMP.SMALL_MAPPING.get(func_name, [None, None, 1])
if aid in RAMP.SMALL_LIST:
mask_list.append([SMALL_IMPORTANT_WEIGHT])
else:
mask_list.append([NON_MOVE_CAMERA_WEIGHT])
else:
if aid in RAMP.SMALL_LIST:
mask_list.append([SMALL_IMPORTANT_WEIGHT])
else:
mask_list.append([1.])
mask_tensor = torch.tensor(mask_list)
print('mask_tensor', mask_tensor) if debug else None
# also use predict value to weight
# not used first
if False:
mask_list_2 = []
for action_id in action_pred:
if action_id.item() == MOVE_CAMERA_ID:
mask_list_2.append([MOVE_CAMERA_WEIGHT])
else:
mask_list_2.append([NON_MOVE_CAMERA_WEIGHT])
mask_tensor_2 = torch.tensor(mask_list_2)
mask_tensor = mask_tensor * mask_tensor_2
mask_tensor = mask_tensor.to(device)
return mask_tensor
def get_selected_units_accuracy(ground_truth, predict, select_units_num, action_equal_mask,
device, strict_comparsion=True, use_strict_order=False):
all_num, correct_num, gt_num, pred_num = 0, 0, 1, 0
if strict_comparsion:
action_equal_index = action_equal_mask.nonzero(as_tuple=True)[0]
ground_truth = ground_truth[action_equal_index]
predict = predict[action_equal_index]
if ground_truth.shape[0] > 0:
size = ground_truth.shape[0]
NONE_INDEX = AHP.max_entities - 1
for i in range(size):
ground_truth_sample = ground_truth[i]
ground_truth_new = torch.nonzero(ground_truth_sample, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.cpu().detach().numpy().tolist()
print('ground_truth units', ground_truth_new) if debug else None
predict_sample = predict[i].reshape(-1)
print('predict_sample units', predict_sample) if debug else None
select_units_num_sample = select_units_num[i].item()
print('select_units_num_sample units', select_units_num_sample) if debug else None
for j in range(select_units_num_sample):
pred = predict_sample[j].item()
gt = ground_truth_new[j]
if gt != NONE_INDEX: # the last index is the None index
gt_num += 1
if use_strict_order:
if pred == gt and pred != NONE_INDEX:
correct_num += 1
else:
if pred in ground_truth_new and pred != NONE_INDEX:
correct_num += 1
pred_num += 1
all_num += AHP.max_selected
print('get_selected_units_accuracy', [correct_num, gt_num, pred_num, all_num])
return [correct_num, gt_num, pred_num, all_num]
def get_target_unit_accuracy(ground_truth, predict, action_equal_mask, device,
strict_comparsion=True, remove_none=True):
right_num, all_num = 0, 0
if strict_comparsion:
action_equal_index = action_equal_mask.nonzero(as_tuple=True)[0]
ground_truth = ground_truth[action_equal_index]
predict = predict[action_equal_index]
if ground_truth.shape[0] > 0:
print('ground_truth target_unit', ground_truth)
ground_truth_new = torch.nonzero(ground_truth, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.to(device)
print('ground_truth_new target_unit', ground_truth_new) if debug else None
predict_new = predict.reshape(-1)
print('predict_new target_unit', predict_new)
NONE_ID = AHP.max_entities - 1
if remove_none:
effect_index = (ground_truth_new != NONE_ID).nonzero(as_tuple=True)[0]
ground_truth_new = ground_truth_new[effect_index]
predict_new = predict_new[effect_index]
right_num, all_num = get_right_and_all_num(ground_truth_new, predict_new)
print('get_target_unit_accuracy', [right_num, all_num])
return [right_num, all_num]
def get_location_accuracy(ground_truth, predict, action_equal_mask, device, strict_comparsion=True):
all_nums = ground_truth.shape[0]
effect_nums = 0 # when the location argument applied both in ground_truth and predict
correct_nums = 0
distance_loss = 0.
if strict_comparsion:
action_equal_index = action_equal_mask.nonzero(as_tuple=True)[0]
ground_truth = ground_truth[action_equal_index]
predict = predict[action_equal_index]
if ground_truth.shape[0] > 0:
ground_truth = ground_truth.reshape(ground_truth.shape[0], -1)
ground_truth_new = torch.nonzero(ground_truth, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.to(device)
print('ground_truth location', ground_truth_new) if debug else None
output_map_size = SCHP.world_size
for i, idx in enumerate(ground_truth_new):
row_number = idx // output_map_size
col_number = idx - output_map_size * row_number
gt_location_y = row_number
gt_location_x = col_number
print("gt_location_y, gt_location_x", gt_location_y, gt_location_x) if debug else None
[predict_x, predict_y] = predict[i]
print("predict_x, predict_y", predict_x, predict_y) if debug else None
x_diff_square = (predict_x.item() - gt_location_x.item()) ** 2
y_diff_square = (predict_y.item() - gt_location_y.item()) ** 2
print('x_diff_square', x_diff_square) if debug else None
print('y_diff_square', y_diff_square) if debug else None
# pos(output_map_size-1, output_map_size-1) isconsidered a flag meaning this arugment is not applied for this action;
# e.g., we hardly will choose or see a point of pos(output_map_size-1, output_map_size-1)
if not (gt_location_y.item() == output_map_size - 1 and gt_location_x.item() == output_map_size - 1): # the last index is the None index
if not (predict_x.item() == 0 and predict_y.item() == 0):
effect_nums += 1
diff_square = x_diff_square + y_diff_square
distance_loss += diff_square
if diff_square == 0:
correct_nums += 1
print('get_location_accuracy', [correct_nums, effect_nums, all_nums, distance_loss]) if debug else None
return [correct_nums, effect_nums, all_nums, distance_loss]
def get_accuracy(ground_truth, predict, device, return_important=False):
accuracy = 0.
ground_truth_new = torch.nonzero(ground_truth, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.to(device)
print('ground_truth action_type', ground_truth_new) if debug else None
predict_new = predict.reshape(-1)
print('predict_new', predict_new) if debug else None
# shape: [batch_size]
action_equal_mask = (ground_truth_new == predict_new)
# calculate how many move_camera? the id is 168 in raw_action
MOVE_CAMERA_ID = 168
#camera_num_action_type = torch.sum(MOVE_CAMERA_ID == ground_truth_new)
move_camera_index = (ground_truth_new == MOVE_CAMERA_ID).nonzero(as_tuple=True)[0]
non_camera_index = (ground_truth_new != MOVE_CAMERA_ID).nonzero(as_tuple=True)[0]
short_important_list = []
for j in RAMP.SMALL_MAPPING.keys():
aid = F[j].id.value
print('aid', aid) if debug else None
short_index = (ground_truth_new == aid).nonzero(as_tuple=True)[0]
print('short_index', short_index) if debug else None
short_important_list.append(short_index)
short_important_index = torch.cat(short_important_list)
print('short_important_index', short_important_index) if debug else None
print('move_camera_index', move_camera_index) if debug else None
print('non_camera_index', non_camera_index) if debug else None
print('for any type action') if debug else None
right_num, all_num = get_right_and_all_num(ground_truth_new, predict_new)
print('for move_camera action') if debug else None
camera_ground_truth_new = ground_truth_new[move_camera_index]
camera_predict_new = predict_new[move_camera_index]
camera_right_num, camera_all_num = get_right_and_all_num(camera_ground_truth_new, camera_predict_new)
print('for non-camera action') if debug else None
non_camera_ground_truth_new = ground_truth_new[non_camera_index]
non_camera_predict_new = predict_new[non_camera_index]
non_camera_right_num, non_camera_all_num = get_right_and_all_num(non_camera_ground_truth_new, non_camera_predict_new)
print('for short-important action') if debug else None
short_important_ground_truth_new = ground_truth_new[short_important_index]
short_important_predict_new = predict_new[short_important_index]
short_important_right_num, short_important_all_num = get_right_and_all_num(short_important_ground_truth_new, short_important_predict_new)
acc_list = [right_num, all_num, camera_right_num, camera_all_num, non_camera_right_num,
non_camera_all_num, short_important_right_num, short_important_all_num]
return acc_list, action_equal_mask
def get_right_and_all_num(gt, pred):
acc_num_action_type = torch.sum(pred == gt)
print('acc_num_action_type', acc_num_action_type) if debug else None
right_num = acc_num_action_type.item()
print('right_num', right_num) if debug else None
all_num = gt.shape[0]
print('all_num', all_num) if debug else None
accuracy = right_num / (all_num + 1e-9)
print('accuracy', accuracy) if debug else None
return right_num, all_num
|
python
|
from ..catalogs.in_memory import Catalog
from .dataframe import DataFrameAdapter
import dask.dataframe
import pandas
class ExcelReader(Catalog):
"""
Read the sheets in an Excel file.
This maps the Excel file, which may contain one of more spreadsheets,
onto a "Catalog" of tabular structures.
Examples
--------
Given a file path
>>> ExcelReader.from_file("path/to/excel_file.xlsx")
Given a file object
>>> file = open("path/to/excel_file.xlsx")
>>> ExcelReader.from_file(file)
Given a pandas.ExcelFile object
>>> import pandas
>>> ef = pandas.ExcelFile(file)
>>> ExcelReader.from_file(ef)
"""
@classmethod
def from_file(cls, file):
if isinstance(file, pandas.ExcelFile):
excel_file = file
else:
excel_file = pandas.ExcelFile(file)
mapping = {
sheet_name: DataFrameAdapter(
dask.dataframe.from_pandas(
excel_file.parse(sheet_name),
npartitions=1, # TODO Be smarter about this.
)
)
for sheet_name in excel_file.sheet_names
}
return cls(mapping)
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tool to manage local 3D Beacon install
.. currentmodule:: bio3dbeacon
.. moduleauthor:: Ian Sillitoe <[email protected]>
"""
from .version import __version__, __release__ # noqa
|
python
|
import mido
import cv2
#Color library using dictionary from RGB to velocity value of the Launchpad MK2
from ClearLaunchpad import RemoveNotes, ClearScreen
from FirstMido import FillNotes
cap = cv2.imread("Velocity2RGB.png")
Complete = cap.copy()
while(True):
Mat = cv2.inRange(cap, (0, 0, 0), (254, 254, 254))
cv2.imshow("Mat", Mat)
contours, hierarchy = cv2.findContours(Mat, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
for i, c in enumerate(contours):
if hierarchy[0][i][2] == -1 or hierarchy[0][i][2] > 1:
if cv2.contourArea(c) < 60000 and cv2.contourArea(c) > 1000:
try:
cX = int(cv2.moments(c)["m10"] / cv2.moments(c)["m00"])
except ZeroDivisionError:
cX = 0
try:
cY = int(cv2.moments(c)["m01"] / cv2.moments(c)["m00"])
except ZeroDivisionError:
cY = 0
points = cv2.circle(Complete, (cX, cY), 0, (255,255,255), -1)
print(cX,cY)
cv2.imshow("Final", Complete)
k = cv2.waitKey(32)
if k == 32:
break
cap.release()
cv2.destroyAllWindows()
#Each center on the x axis is spaced out by 45 units, starting with 20 as the first center point (Left to right). min of 20 max of 335
#As for y 21 and also moves by 45 units (we can go with 20 and it will still be the same). min of 20 max of 756
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2012, Rui Carmo
Description: In-process job management
License: MIT (see LICENSE.md for details)
"""
import os, sys, logging, time, traceback, multiprocessing, gc
from cPickle import loads, dumps
from Queue import PriorityQueue, Empty
from threading import Thread, Semaphore
from uuid import uuid4
from functools import partial
from collections import defaultdict
log = logging.getLogger(__name__)
default_priority = 0
max_workers = multiprocessing.cpu_count() * 2
class Pool:
"""Represents a thread pool"""
def __init__(self, workers = max_workers, rate_limit = 1000):
self.max_workers = workers
self.mutex = Semaphore()
self.results = {}
self.retries = defaultdict(int)
self.queue = PriorityQueue()
self.threads = []
self.rate_limit = rate_limit
def _tick(self):
time.sleep(1.0/self.rate_limit)
# clean up finished threads
self.threads = [t for t in self.threads if t.isAlive()]
return (not self.queue.empty()) or (len(self.threads) > 0)
def _loop(self):
"""Handle task submissions"""
def run_task(priority, f, uuid, retries, args, kwargs):
"""Run a single task"""
try:
t.name = getattr(f, '__name__', None)
result = f(*args, **kwargs)
except Exception as e:
# Retry the task if applicable
if log:
log.error(traceback.format_exc())
if retries > 0:
with self.mutex:
self.retries[uuid] += 1
# re-queue the task with a lower (i.e., higher-valued) priority
self.queue.put((priority+1, dumps((f, uuid, retries - 1, args, kwargs))))
self.queue.task_done()
return
result = e
with self.mutex:
self.results[uuid] = dumps(result)
self.retries[uuid] += 1
self.queue.task_done()
while self._tick():
# spawn more threads to fill free slots
log.warn("Running %d/%d threads" % (len(self.threads),self.max_workers))
if len(self.threads) < self.max_workers:
log.debug("Queue Length: %d" % self.queue.qsize())
try:
priority, data = self.queue.get(True, 1.0/self.rate_limit)
except Empty:
continue
f, uuid, retries, args, kwargs = loads(data)
t = Thread(target=run_task, args=[priority, f, uuid, retries, args, kwargs])
t.setDaemon(True)
self.threads.append(t)
t.start()
log.debug("Exited loop.")
for t in self.threads:
t.join()
def stop(self):
"""Flush the job queue"""
self.queue = PriorityQueue()
def start(self, daemonize=False):
"""Pool entry point"""
self.results = {}
self.retries = defaultdict(int)
if daemonize:
t = Thread(target = self._loop, args=[self])
t.setDaemon(True)
t.start()
return
else:
self._loop()
default_pool = Pool()
class Deferred(object):
"""Allows lookup of task results and status"""
def __init__(self, pool, uuid):
self.uuid = uuid
self.pool = pool
self._result = None
@property
def result(self):
if self._result is None:
with self.pool.mutex:
if self.uuid in self.pool.results.keys():
self._result = loads(self.pool.results[self.uuid])
return self._result
@property
def retries(self):
return self.pool.retries[self.uuid]
def task(func=None, pool=None, max_retries=0, priority=default_priority):
"""Task decorator - setus up a .delay() attribute in the task function"""
if func is None:
return partial(task, pool=pool, max_retries=max_retries)
if pool is None:
pool = default_pool
def delay(*args, **kwargs):
uuid = str(uuid4()) # one for each task
pool.queue.put((priority,dumps((func, uuid, max_retries, args, kwargs))))
return Deferred(pool, uuid)
func.delay = delay
func.pool = pool
return func
def start(daemonize = False):
default_pool.start(daemonize = daemonize)
|
python
|
# Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import cirq
import cirq.testing
def assert_dirac_notation_numpy(vec, expected, decimals=2):
assert cirq.dirac_notation(np.array(vec), decimals=decimals) == expected
def assert_dirac_notation_python(vec, expected, decimals=2):
assert cirq.dirac_notation(vec, decimals=decimals) == expected
def assert_valid_density_matrix(matrix, num_qubits=None, qid_shape=None):
if qid_shape is None and num_qubits is None:
num_qubits = 1
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
matrix, num_qubits=num_qubits, qid_shape=qid_shape, dtype=matrix.dtype
),
matrix,
)
def test_quantum_state():
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex128)
state_tensor_1 = np.reshape(state_vector_1, (2, 2))
density_matrix_1 = np.outer(state_vector_1, np.conj(state_vector_1))
state = cirq.QuantumState(state_vector_1)
assert state.data is state_vector_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
np.testing.assert_array_equal(state.state_vector(), state_vector_1)
np.testing.assert_array_equal(state.state_tensor(), state_tensor_1)
np.testing.assert_array_equal(state.density_matrix(), density_matrix_1)
np.testing.assert_array_equal(state.state_vector_or_density_matrix(), state_vector_1)
state = cirq.QuantumState(state_tensor_1, qid_shape=(2, 2))
assert state.data is state_tensor_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
np.testing.assert_array_equal(state.state_vector(), state_vector_1)
np.testing.assert_array_equal(state.state_tensor(), state_tensor_1)
np.testing.assert_array_equal(state.density_matrix(), density_matrix_1)
np.testing.assert_array_equal(state.state_vector_or_density_matrix(), state_vector_1)
state = cirq.QuantumState(density_matrix_1, qid_shape=(2, 2))
assert state.data is density_matrix_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
assert state.state_vector() is None
assert state.state_tensor() is None
np.testing.assert_array_equal(state.density_matrix(), density_matrix_1)
np.testing.assert_array_equal(state.state_vector_or_density_matrix(), density_matrix_1)
def test_quantum_state_quantum_state():
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex128)
quantum_state = cirq.QuantumState(state_vector_1)
state = cirq.quantum_state(quantum_state)
assert state is quantum_state
assert state.data is quantum_state.data
assert state.dtype == np.complex128
state = cirq.quantum_state(quantum_state, copy=True)
assert state is not quantum_state
assert state.data is not quantum_state.data
assert state.dtype == np.complex128
state = cirq.quantum_state(quantum_state, dtype=np.complex64)
assert state is not quantum_state
assert state.data is not quantum_state.data
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='qid shape'):
state = cirq.quantum_state(quantum_state, qid_shape=(4,))
def test_quantum_state_computational_basis_state():
state = cirq.quantum_state(7, qid_shape=(3, 4))
np.testing.assert_allclose(state.data, cirq.one_hot(index=7, shape=(12,), dtype=np.complex64))
assert state.qid_shape == (3, 4)
assert state.dtype == np.complex64
state = cirq.quantum_state((0, 1, 2, 3), qid_shape=(1, 2, 3, 4), dtype=np.complex128)
np.testing.assert_allclose(
state.data, cirq.one_hot(index=(0, 1, 2, 3), shape=(1, 2, 3, 4), dtype=np.complex64)
)
assert state.qid_shape == (1, 2, 3, 4)
assert state.dtype == np.complex128
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state(7)
with pytest.raises(ValueError, match='out of range'):
_ = cirq.quantum_state(7, qid_shape=(2, 2))
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state((0, 1, 2, 3))
with pytest.raises(ValueError, match='out of bounds'):
_ = cirq.quantum_state((0, 1, 2, 3), qid_shape=(2, 2, 2, 2))
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state((0, 0, 1, 1), qid_shape=(1, 1, 2, 2))
def test_quantum_state_state_vector_state_tensor():
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex128)
state_tensor_1 = np.reshape(state_vector_1, (2, 2))
state = cirq.quantum_state(state_vector_1, dtype=np.complex64)
np.testing.assert_array_equal(state.data, state_vector_1)
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex64
state = cirq.quantum_state(state_tensor_1, qid_shape=(2, 2))
assert state.data is state_tensor_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state(state_tensor_1)
with pytest.raises(ValueError, match='not compatible'):
_ = cirq.quantum_state(state_tensor_1, qid_shape=(2, 3))
def test_quantum_state_density_matrix():
density_matrix_1 = np.eye(4, dtype=np.complex64) / 4
state = cirq.quantum_state(density_matrix_1, qid_shape=(4,), copy=True)
assert state.data is not density_matrix_1
np.testing.assert_array_equal(state.data, density_matrix_1)
assert state.qid_shape == (4,)
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='not compatible'):
_ = cirq.quantum_state(density_matrix_1, qid_shape=(8,))
def test_quantum_state_product_state():
q0, q1, q2 = cirq.LineQubit.range(3)
product_state_1 = cirq.KET_PLUS(q0) * cirq.KET_PLUS(q1) * cirq.KET_ONE(q2)
state = cirq.quantum_state(product_state_1)
np.testing.assert_allclose(state.data, product_state_1.state_vector())
assert state.qid_shape == (2, 2, 2)
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='qid shape'):
_ = cirq.quantum_state(product_state_1, qid_shape=(2, 2))
def test_density_matrix():
density_matrix_1 = np.eye(4, dtype=np.complex64) / 4
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex64)
state = cirq.density_matrix(density_matrix_1)
assert state.data is density_matrix_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='square'):
_ = cirq.density_matrix(state_vector_1)
def test_infer_qid_shape():
computational_basis_state_1 = [0, 0, 0, 1]
computational_basis_state_2 = [0, 1, 2, 3]
computational_basis_state_3 = [0, 1, 2, 4]
computational_basis_state_4 = 9
computational_basis_state_5 = [0, 1, 2, 4, 5]
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex64)
state_vector_2 = cirq.one_hot(shape=(24,), dtype=np.complex64)
state_tensor_1 = np.reshape(state_vector_1, (2, 2))
state_tensor_2 = np.reshape(state_vector_2, (1, 2, 3, 4))
density_matrix_1 = np.eye(4, dtype=np.complex64) / 4
density_matrix_2 = np.eye(24, dtype=np.complex64) / 24
q0, q1 = cirq.LineQubit.range(2)
product_state_1 = cirq.KET_PLUS(q0) * cirq.KET_PLUS(q1)
assert cirq.qis.infer_qid_shape(
computational_basis_state_1,
state_vector_1,
state_tensor_1,
density_matrix_1,
product_state_1,
) == (2, 2)
assert cirq.qis.infer_qid_shape(
product_state_1,
density_matrix_1,
state_tensor_1,
state_vector_1,
computational_basis_state_1,
) == (2, 2)
assert cirq.qis.infer_qid_shape(
computational_basis_state_1,
computational_basis_state_2,
computational_basis_state_4,
state_tensor_2,
) == (1, 2, 3, 4)
assert cirq.qis.infer_qid_shape(
state_vector_2, density_matrix_2, computational_basis_state_4
) == (24,)
assert cirq.qis.infer_qid_shape(state_tensor_2, density_matrix_2) == (1, 2, 3, 4)
assert cirq.qis.infer_qid_shape(computational_basis_state_4) == (10,)
assert cirq.qis.infer_qid_shape(15, 7, 22, 4) == (23,)
with pytest.raises(ValueError, match='No states were specified'):
_ = cirq.qis.infer_qid_shape()
with pytest.raises(ValueError, match='Failed'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_1, computational_basis_state_5)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_1)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(state_tensor_1)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(density_matrix_1)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_1, computational_basis_state_2)
with pytest.raises(ValueError, match='Failed'):
_ = cirq.qis.infer_qid_shape(state_vector_1, computational_basis_state_4)
with pytest.raises(ValueError, match='Failed to infer'):
_ = cirq.qis.infer_qid_shape(state_vector_1, state_vector_2)
with pytest.raises(ValueError, match='Failed to infer'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_3, state_tensor_2)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_zero_state(global_phase):
zero_state = global_phase * np.array([1, 0])
bloch = cirq.bloch_vector_from_state_vector(zero_state, 0)
desired_simple = np.array([0, 0, 1])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_one_state(global_phase):
one_state = global_phase * np.array([0, 1])
bloch = cirq.bloch_vector_from_state_vector(one_state, 0)
desired_simple = np.array([0, 0, -1])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_plus_state(global_phase):
sqrt = np.sqrt(0.5)
plus_state = global_phase * np.array([sqrt, sqrt])
bloch = cirq.bloch_vector_from_state_vector(plus_state, 0)
desired_simple = np.array([1, 0, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_minus_state(global_phase):
sqrt = np.sqrt(0.5)
minus_state = np.array([-1.0j * sqrt, 1.0j * sqrt])
bloch = cirq.bloch_vector_from_state_vector(minus_state, 0)
desired_simple = np.array([-1, 0, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_iplus_state(global_phase):
sqrt = np.sqrt(0.5)
iplus_state = global_phase * np.array([sqrt, 1j * sqrt])
bloch = cirq.bloch_vector_from_state_vector(iplus_state, 0)
desired_simple = np.array([0, 1, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_iminus_state(global_phase):
sqrt = np.sqrt(0.5)
iminus_state = global_phase * np.array([sqrt, -1j * sqrt])
bloch = cirq.bloch_vector_from_state_vector(iminus_state, 0)
desired_simple = np.array([0, -1, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
def test_bloch_vector_simple_th_zero():
sqrt = np.sqrt(0.5)
# State TH|0>.
th_state = np.array([sqrt, 0.5 + 0.5j])
bloch = cirq.bloch_vector_from_state_vector(th_state, 0)
desired_simple = np.array([sqrt, sqrt, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
def test_bloch_vector_equal_sqrt3():
sqrt3 = 1 / np.sqrt(3)
test_state = np.array([0.888074, 0.325058 + 0.325058j])
bloch = cirq.bloch_vector_from_state_vector(test_state, 0)
desired_simple = np.array([sqrt3, sqrt3, sqrt3])
np.testing.assert_array_almost_equal(bloch, desired_simple)
def test_bloch_vector_multi_pure():
plus_plus_state = np.array([0.5, 0.5, 0.5, 0.5])
bloch_0 = cirq.bloch_vector_from_state_vector(plus_plus_state, 0)
bloch_1 = cirq.bloch_vector_from_state_vector(plus_plus_state, 1)
desired_simple = np.array([1, 0, 0])
np.testing.assert_array_almost_equal(bloch_1, desired_simple)
np.testing.assert_array_almost_equal(bloch_0, desired_simple)
def test_bloch_vector_multi_mixed():
sqrt = np.sqrt(0.5)
# Bell state 1/sqrt(2)(|00>+|11>)
phi_plus = np.array([sqrt, 0.0, 0.0, sqrt])
bloch_0 = cirq.bloch_vector_from_state_vector(phi_plus, 0)
bloch_1 = cirq.bloch_vector_from_state_vector(phi_plus, 1)
zero = np.zeros(3)
np.testing.assert_array_almost_equal(bloch_0, zero)
np.testing.assert_array_almost_equal(bloch_1, zero)
rcnot_state = np.array([0.90612745, -0.07465783j, -0.37533028j, 0.18023996])
bloch_mixed_0 = cirq.bloch_vector_from_state_vector(rcnot_state, 0)
bloch_mixed_1 = cirq.bloch_vector_from_state_vector(rcnot_state, 1)
true_mixed_0 = np.array([0.0, -0.6532815, 0.6532815])
true_mixed_1 = np.array([0.0, 0.0, 0.9238795])
np.testing.assert_array_almost_equal(true_mixed_0, bloch_mixed_0)
np.testing.assert_array_almost_equal(true_mixed_1, bloch_mixed_1)
def test_bloch_vector_multi_big():
five_qubit_plus_state = np.array([0.1767767] * 32)
desired_simple = np.array([1, 0, 0])
for qubit in range(0, 5):
bloch_i = cirq.bloch_vector_from_state_vector(five_qubit_plus_state, qubit)
np.testing.assert_array_almost_equal(bloch_i, desired_simple)
def test_bloch_vector_invalid():
with pytest.raises(ValueError):
_ = cirq.bloch_vector_from_state_vector(np.array([0.5, 0.5, 0.5]), 0)
with pytest.raises(IndexError):
_ = cirq.bloch_vector_from_state_vector(np.array([0.5, 0.5, 0.5, 0.5]), -1)
with pytest.raises(IndexError):
_ = cirq.bloch_vector_from_state_vector(np.array([0.5, 0.5, 0.5, 0.5]), 2)
def test_density_matrix_from_state_vector():
test_state = np.array(
[
0.0 - 0.35355339j,
0.0 + 0.35355339j,
0.0 - 0.35355339j,
0.0 + 0.35355339j,
0.0 + 0.35355339j,
0.0 - 0.35355339j,
0.0 + 0.35355339j,
0.0 - 0.35355339j,
]
)
full_rho = cirq.density_matrix_from_state_vector(test_state)
np.testing.assert_array_almost_equal(full_rho, np.outer(test_state, np.conj(test_state)))
rho_one = cirq.density_matrix_from_state_vector(test_state, [1])
true_one = np.array([[0.5 + 0.0j, 0.5 + 0.0j], [0.5 + 0.0j, 0.5 + 0.0j]])
np.testing.assert_array_almost_equal(rho_one, true_one)
rho_two_zero = cirq.density_matrix_from_state_vector(test_state, [0, 2])
true_two_zero = np.array(
[
[0.25 + 0.0j, -0.25 + 0.0j, -0.25 + 0.0j, 0.25 + 0.0j],
[-0.25 + 0.0j, 0.25 + 0.0j, 0.25 + 0.0j, -0.25 + 0.0j],
[-0.25 + 0.0j, 0.25 + 0.0j, 0.25 + 0.0j, -0.25 + 0.0j],
[0.25 + 0.0j, -0.25 + 0.0j, -0.25 + 0.0j, 0.25 + 0.0j],
]
)
np.testing.assert_array_almost_equal(rho_two_zero, true_two_zero)
# two and zero will have same single qubit density matrix.
rho_two = cirq.density_matrix_from_state_vector(test_state, [2])
true_two = np.array([[0.5 + 0.0j, -0.5 + 0.0j], [-0.5 + 0.0j, 0.5 + 0.0j]])
np.testing.assert_array_almost_equal(rho_two, true_two)
rho_zero = cirq.density_matrix_from_state_vector(test_state, [0])
np.testing.assert_array_almost_equal(rho_zero, true_two)
def test_density_matrix_invalid():
bad_state = np.array([0.5, 0.5, 0.5])
good_state = np.array([0.5, 0.5, 0.5, 0.5])
with pytest.raises(ValueError):
_ = cirq.density_matrix_from_state_vector(bad_state)
with pytest.raises(ValueError):
_ = cirq.density_matrix_from_state_vector(bad_state, [0, 1])
with pytest.raises(IndexError):
_ = cirq.density_matrix_from_state_vector(good_state, [-1, 0, 1])
with pytest.raises(IndexError):
_ = cirq.density_matrix_from_state_vector(good_state, [-1])
def test_dirac_notation():
sqrt = np.sqrt(0.5)
exp_pi_2 = 0.5 + 0.5j
assert_dirac_notation_numpy([0, 0], "0")
assert_dirac_notation_python([1], "|⟩")
assert_dirac_notation_numpy([sqrt, sqrt], "0.71|0⟩ + 0.71|1⟩")
assert_dirac_notation_python([-sqrt, sqrt], "-0.71|0⟩ + 0.71|1⟩")
assert_dirac_notation_numpy([sqrt, -sqrt], "0.71|0⟩ - 0.71|1⟩")
assert_dirac_notation_python([-sqrt, -sqrt], "-0.71|0⟩ - 0.71|1⟩")
assert_dirac_notation_numpy([sqrt, 1j * sqrt], "0.71|0⟩ + 0.71j|1⟩")
assert_dirac_notation_python([sqrt, exp_pi_2], "0.71|0⟩ + (0.5+0.5j)|1⟩")
assert_dirac_notation_numpy([exp_pi_2, -sqrt], "(0.5+0.5j)|0⟩ - 0.71|1⟩")
assert_dirac_notation_python([exp_pi_2, 0.5 - 0.5j], "(0.5+0.5j)|0⟩ + (0.5-0.5j)|1⟩")
assert_dirac_notation_numpy([0.5, 0.5, -0.5, -0.5], "0.5|00⟩ + 0.5|01⟩ - 0.5|10⟩ - 0.5|11⟩")
assert_dirac_notation_python([0.71j, 0.71j], "0.71j|0⟩ + 0.71j|1⟩")
def test_dirac_notation_partial_state():
sqrt = np.sqrt(0.5)
exp_pi_2 = 0.5 + 0.5j
assert_dirac_notation_numpy([1, 0], "|0⟩")
assert_dirac_notation_python([1j, 0], "1j|0⟩")
assert_dirac_notation_numpy([0, 1], "|1⟩")
assert_dirac_notation_python([0, 1j], "1j|1⟩")
assert_dirac_notation_numpy([sqrt, 0, 0, sqrt], "0.71|00⟩ + 0.71|11⟩")
assert_dirac_notation_python([sqrt, sqrt, 0, 0], "0.71|00⟩ + 0.71|01⟩")
assert_dirac_notation_numpy([exp_pi_2, 0, 0, exp_pi_2], "(0.5+0.5j)|00⟩ + (0.5+0.5j)|11⟩")
assert_dirac_notation_python([0, 0, 0, 1], "|11⟩")
def test_dirac_notation_precision():
sqrt = np.sqrt(0.5)
assert_dirac_notation_numpy([sqrt, sqrt], "0.7|0⟩ + 0.7|1⟩", decimals=1)
assert_dirac_notation_python([sqrt, sqrt], "0.707|0⟩ + 0.707|1⟩", decimals=3)
def test_to_valid_state_vector():
with pytest.raises(ValueError, match='Computational basis state is out of range'):
cirq.to_valid_state_vector(2, 1)
np.testing.assert_almost_equal(
cirq.to_valid_state_vector(np.array([1.0, 0.0, 0.0, 0.0], dtype=np.complex64), 2),
np.array([1.0, 0.0, 0.0, 0.0]),
)
np.testing.assert_almost_equal(
cirq.to_valid_state_vector(np.array([0.0, 1.0, 0.0, 0.0], dtype=np.complex64), 2),
np.array([0.0, 1.0, 0.0, 0.0]),
)
np.testing.assert_almost_equal(cirq.to_valid_state_vector(0, 2), np.array([1.0, 0.0, 0.0, 0.0]))
np.testing.assert_almost_equal(cirq.to_valid_state_vector(1, 2), np.array([0.0, 1.0, 0.0, 0.0]))
v = cirq.to_valid_state_vector([0, 1, 2, 0], qid_shape=(3, 3, 3, 3))
assert v.shape == (3**4,)
assert v[6 + 9] == 1
v = cirq.to_valid_state_vector([False, True, False, False], num_qubits=4)
assert v.shape == (16,)
assert v[4] == 1
v = cirq.to_valid_state_vector([0, 1, 0, 0], num_qubits=2)
assert v.shape == (4,)
assert v[1] == 1
v = cirq.to_valid_state_vector(np.array([1, 0], dtype=np.complex64), qid_shape=(2, 1))
assert v.shape == (2,)
assert v[0] == 1
def test_to_valid_state_vector_creates_new_copy():
state = np.array([1.0, 0.0, 0.0, 0.0], dtype=np.complex64)
out = cirq.to_valid_state_vector(state, 2)
assert out is not state
def test_invalid_to_valid_state_vector():
with pytest.raises(ValueError, match="Please specify"):
_ = cirq.to_valid_state_vector(np.array([1]))
with pytest.raises(ValueError):
_ = cirq.to_valid_state_vector(np.array([1.0, 0.0], dtype=np.complex64), 2)
with pytest.raises(ValueError):
_ = cirq.to_valid_state_vector(-1, 2)
with pytest.raises(ValueError):
_ = cirq.to_valid_state_vector(5, 2)
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector('0000', 2)
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector('not an int', 2)
with pytest.raises(ValueError, match=r'num_qubits != len\(qid_shape\)'):
_ = cirq.to_valid_state_vector(0, 5, qid_shape=(1, 2, 3))
with pytest.raises(ValueError, match='out of bounds'):
_ = cirq.to_valid_state_vector([3], qid_shape=(3,))
with pytest.raises(ValueError, match='out of bounds'):
_ = cirq.to_valid_state_vector([-1], qid_shape=(3,))
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector([], qid_shape=(3,))
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector([0, 1], num_qubits=3)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.to_valid_state_vector([1, 0], qid_shape=(2, 1))
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.to_valid_state_vector(np.array([1, 0], dtype=np.int64), qid_shape=(2, 1))
def test_validate_normalized_state():
cirq.validate_normalized_state_vector(cirq.testing.random_superposition(2), qid_shape=(2,))
cirq.validate_normalized_state_vector(
np.array([0.5, 0.5, 0.5, 0.5], dtype=np.complex64), qid_shape=(2, 2)
)
with pytest.raises(ValueError, match='invalid dtype'):
cirq.validate_normalized_state_vector(
np.array([1, 1], dtype=np.complex64), qid_shape=(2, 2), dtype=np.complex128
)
with pytest.raises(ValueError, match='incorrect size'):
cirq.validate_normalized_state_vector(
np.array([1, 1], dtype=np.complex64), qid_shape=(2, 2)
)
with pytest.raises(ValueError, match='not normalized'):
cirq.validate_normalized_state_vector(
np.array([1.0, 0.2, 0.0, 0.0], dtype=np.complex64), qid_shape=(2, 2)
)
def test_validate_density_matrix():
cirq.validate_density_matrix(cirq.testing.random_density_matrix(2), qid_shape=(2,))
with pytest.raises(ValueError, match='dtype'):
cirq.to_valid_density_matrix(
np.array([[1, 0], [0, 0]], dtype=np.complex64), qid_shape=(2,), dtype=np.complex128
)
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.array([[1, 0]]), qid_shape=(2,))
with pytest.raises(ValueError, match='hermitian'):
cirq.to_valid_density_matrix(np.array([[1, 0.1], [0, 0]]), qid_shape=(2,))
with pytest.raises(ValueError, match='trace 1'):
cirq.to_valid_density_matrix(np.array([[1, 0], [0, 0.1]]), qid_shape=(2,))
with pytest.raises(ValueError, match='positive semidefinite'):
cirq.to_valid_density_matrix(
np.array([[1.1, 0], [0, -0.1]], dtype=np.complex64), qid_shape=(2,)
)
def test_to_valid_density_matrix_from_density_matrix():
assert_valid_density_matrix(np.array([[1, 0], [0, 0]]))
assert_valid_density_matrix(np.array([[0.5, 0], [0, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.5], [0.5, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.2], [0.2, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.5j], [-0.5j, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.2 - 0.2j], [0.2 + 0.2j, 0.5]]))
assert_valid_density_matrix(np.eye(4) / 4.0, num_qubits=2)
assert_valid_density_matrix(np.diag([1, 0, 0, 0]), num_qubits=2)
assert_valid_density_matrix(np.ones([4, 4]) / 4.0, num_qubits=2)
assert_valid_density_matrix(np.diag([0.2, 0.8, 0, 0]), num_qubits=2)
assert_valid_density_matrix(
np.array([[0.2, 0, 0, 0.2 - 0.3j], [0, 0, 0, 0], [0, 0, 0, 0], [0.2 + 0.3j, 0, 0, 0.8]]),
num_qubits=2,
)
assert_valid_density_matrix(np.array([[1, 0, 0]] + [[0, 0, 0]] * 2), qid_shape=(3,))
assert_valid_density_matrix(
np.array([[0, 0, 0], [0, 0.5, 0.5j], [0, -0.5j, 0.5]]), qid_shape=(3,)
)
assert_valid_density_matrix(np.eye(9) / 9.0, qid_shape=(3, 3))
assert_valid_density_matrix(np.eye(12) / 12.0, qid_shape=(3, 4))
assert_valid_density_matrix(np.ones([9, 9]) / 9.0, qid_shape=(3, 3))
assert_valid_density_matrix(np.diag([0.2, 0.8, 0, 0]), qid_shape=(4,))
def test_to_valid_density_matrix_from_density_matrix_tensor():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
cirq.one_hot(shape=(2, 2, 2, 2, 2, 2), dtype=np.complex64), num_qubits=3
),
cirq.one_hot(shape=(8, 8), dtype=np.complex64),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
cirq.one_hot(shape=(2, 3, 4, 2, 3, 4), dtype=np.complex64), qid_shape=(2, 3, 4)
),
cirq.one_hot(shape=(24, 24), dtype=np.complex64),
)
def test_to_valid_density_matrix_not_square():
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.array([[1], [0]]), num_qubits=1)
def test_to_valid_density_matrix_size_mismatch_num_qubits():
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.array([[[1, 0], [0, 0]], [[0, 0], [0, 0]]]), num_qubits=2)
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.eye(4) / 4.0, num_qubits=1)
def test_to_valid_density_matrix_not_hermitian():
with pytest.raises(ValueError, match='hermitian'):
cirq.to_valid_density_matrix(np.array([[0.5, 0.5j], [0.5, 0.5j]]), num_qubits=1)
with pytest.raises(ValueError, match='hermitian'):
cirq.to_valid_density_matrix(
np.array(
[[0.2, 0, 0, -0.2 - 0.3j], [0, 0, 0, 0], [0, 0, 0, 0], [0.2 + 0.3j, 0, 0, 0.8]]
),
num_qubits=2,
)
def test_to_valid_density_matrix_mismatched_qid_shape():
with pytest.raises(ValueError, match=r'num_qubits != len\(qid_shape\)'):
cirq.to_valid_density_matrix(np.eye(4) / 4, num_qubits=1, qid_shape=(2, 2))
with pytest.raises(ValueError, match=r'num_qubits != len\(qid_shape\)'):
cirq.to_valid_density_matrix(np.eye(4) / 4, num_qubits=2, qid_shape=(4,))
with pytest.raises(ValueError, match='Both were None'):
cirq.to_valid_density_matrix(np.eye(4) / 4)
def test_to_valid_density_matrix_not_unit_trace():
with pytest.raises(ValueError, match='trace 1'):
cirq.to_valid_density_matrix(np.array([[1, 0], [0, -0.1]]), num_qubits=1)
with pytest.raises(ValueError, match='trace 1'):
cirq.to_valid_density_matrix(np.zeros([2, 2]), num_qubits=1)
def test_to_valid_density_matrix_not_positive_semidefinite():
with pytest.raises(ValueError, match='positive semidefinite'):
cirq.to_valid_density_matrix(
np.array([[0.6, 0.5], [0.5, 0.4]], dtype=np.complex64), num_qubits=1
)
def test_to_valid_density_matrix_wrong_dtype():
with pytest.raises(ValueError, match='dtype'):
cirq.to_valid_density_matrix(
np.array([[1, 0], [0, 0]], dtype=np.complex64), num_qubits=1, dtype=np.complex128
)
def test_to_valid_density_matrix_from_state_vector():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([1, 0], dtype=np.complex64), num_qubits=1
),
np.array([[1, 0], [0, 0]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([np.sqrt(0.3), np.sqrt(0.7)], dtype=np.complex64),
num_qubits=1,
),
np.array([[0.3, np.sqrt(0.3 * 0.7)], [np.sqrt(0.3 * 0.7), 0.7]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([np.sqrt(0.5), np.sqrt(0.5) * 1j], dtype=np.complex64),
num_qubits=1,
),
np.array([[0.5, -0.5j], [0.5j, 0.5]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([0.5] * 4, dtype=np.complex64), num_qubits=2
),
0.25 * np.ones((4, 4)),
)
def test_to_valid_density_matrix_from_state_vector_tensor():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array(np.full((2, 2), 0.5), dtype=np.complex64), num_qubits=2
),
0.25 * np.ones((4, 4)),
)
def test_to_valid_density_matrix_from_state_invalid_state():
with pytest.raises(ValueError, match="Invalid quantum state"):
cirq.to_valid_density_matrix(np.array([1, 0, 0]), num_qubits=2)
def test_to_valid_density_matrix_from_computational_basis():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=0, num_qubits=1), np.array([[1, 0], [0, 0]])
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=1, num_qubits=1), np.array([[0, 0], [0, 1]])
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=2, num_qubits=2),
np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=0, num_qubits=0), np.array([[1]])
)
def test_to_valid_density_matrix_from_state_invalid_computational_basis():
with pytest.raises(ValueError, match="out of range"):
cirq.to_valid_density_matrix(-1, num_qubits=2)
def test_one_hot():
result = cirq.one_hot(shape=4, dtype=np.int32)
assert result.dtype == np.int32
np.testing.assert_array_equal(result, [1, 0, 0, 0])
np.testing.assert_array_equal(
cirq.one_hot(shape=[2, 3], dtype=np.complex64), [[1, 0, 0], [0, 0, 0]]
)
np.testing.assert_array_equal(
cirq.one_hot(shape=[2, 3], dtype=np.complex64, index=(0, 2)), [[0, 0, 1], [0, 0, 0]]
)
np.testing.assert_array_equal(
cirq.one_hot(shape=5, dtype=np.complex128, index=3), [0, 0, 0, 1, 0]
)
def test_eye_tensor():
assert np.all(cirq.eye_tensor((), dtype=int) == np.array(1))
assert np.all(cirq.eye_tensor((1,), dtype=int) == np.array([[1]]))
assert np.all(cirq.eye_tensor((2,), dtype=int) == np.array([[1, 0], [0, 1]])) # yapf: disable
assert np.all(
cirq.eye_tensor((2, 2), dtype=int)
== np.array([[[[1, 0], [0, 0]], [[0, 1], [0, 0]]], [[[0, 0], [1, 0]], [[0, 0], [0, 1]]]])
) # yapf: disable
assert np.all(
cirq.eye_tensor((2, 3), dtype=int)
== np.array(
[
[[[1, 0, 0], [0, 0, 0]], [[0, 1, 0], [0, 0, 0]], [[0, 0, 1], [0, 0, 0]]],
[[[0, 0, 0], [1, 0, 0]], [[0, 0, 0], [0, 1, 0]], [[0, 0, 0], [0, 0, 1]]],
]
)
) # yapf: disable
assert np.all(
cirq.eye_tensor((3, 2), dtype=int)
== np.array(
[
[[[1, 0], [0, 0], [0, 0]], [[0, 1], [0, 0], [0, 0]]],
[[[0, 0], [1, 0], [0, 0]], [[0, 0], [0, 1], [0, 0]]],
[[[0, 0], [0, 0], [1, 0]], [[0, 0], [0, 0], [0, 1]]],
]
)
) # yapf: disable
|
python
|
#!/usr/bin/python
#
# Copyright 2019 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon sdk
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.14.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import polyaxon_sdk
from polyaxon_sdk.api.git_accesses_v1_api import GitAccessesV1Api # noqa: E501
from polyaxon_sdk.rest import ApiException
class TestGitAccessesV1Api(unittest.TestCase):
"""GitAccessesV1Api unit test stubs"""
def setUp(self):
self.api = polyaxon_sdk.api.git_accesses_v1_api.GitAccessesV1Api() # noqa: E501
def tearDown(self):
pass
def test_create_git_access(self):
"""Test case for create_git_access
List runs # noqa: E501
"""
pass
def test_delete_git_access(self):
"""Test case for delete_git_access
Patch run # noqa: E501
"""
pass
def test_get_git_access(self):
"""Test case for get_git_access
Create new run # noqa: E501
"""
pass
def test_list_git_access_names(self):
"""Test case for list_git_access_names
List bookmarked runs for user # noqa: E501
"""
pass
def test_list_git_accesses(self):
"""Test case for list_git_accesses
List archived runs for user # noqa: E501
"""
pass
def test_patch_git_access(self):
"""Test case for patch_git_access
Update run # noqa: E501
"""
pass
def test_update_git_access(self):
"""Test case for update_git_access
Get run # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
python
|
grk_tws = [['G946'],
['G5206'],
['G3428', 'G3429', 'G3430', 'G3431', 'G3432'],
['G3841'],
['G1041', 'G2379'],
['G281'],
['G32', 'G743', 'G2465'],
['G32', 'G743', 'G218', 'G1472', 'G2025', 'G3462', 'G5545', 'G5548'],
['G500'],
['G651', ' G652', ' G2491', ' G5376', ' G5570'],
['G322', 'G606', 'G1299', 'G1303', 'G1935', 'G2525', 'G2749', 'G4287', 'G4384', 'G4929', 'G5021', 'G5087'],
['G2787'],
['G2643'],
['G2435'],
['G831', 'G1413', 'G1849', 'G1850', 'G2003', 'G2715', 'G5247'],
['G907'],
['G544', 'G569', 'G571', 'G3982', 'G4100', 'G4102', 'G4103', 'G4135'],
['G4100', ' G4103'],
['G25', 'G27', 'G5207'],
['G4415'],
['G273', 'G274', 'G298', 'G338', 'G410', 'G423'],
['G987', 'G988', 'G989'],
['G1757', 'G2127', 'G2128', 'G2129', 'G3106', 'G3107', 'G3108', 'G6050'],
['G129', 'G130', 'G131', 'G1420'],
['G212', 'G213', 'G2620', 'G2744', 'G2745', 'G2746', 'G3166'],
['G4430', 'G4954', 'G4983', 'G5559'],
['G254', 'G331', 'G332', 'G1195', 'G1196', 'G1198', 'G1199', 'G1210', 'G1397', 'G1398', 'G1401', 'G1402', 'G2611', 'G2615', 'G3734', 'G3784', 'G3814', 'G4019', 'G4029', 'G4385', 'G4886', 'G4887', 'G5265'],
['G313', ' G509', ' G1080', ' G3824'],
['G80', 'G81', 'G2385', 'G2455', 'G2500', 'G4613', 'G5360', 'G5569'],
['G154', 'G363', 'G1458', 'G1528', 'G1941', 'G1951', 'G2028', 'G2046', 'G2564', 'G2821', 'G2822', 'G2840', 'G2919', 'G3004', 'G3106', 'G3333', 'G3343', 'G3603', 'G3686', 'G3687', 'G4316', 'G4341', 'G4377', 'G4779', 'G4867', 'G5455', 'G5537', 'G5581'],
['G1543', ' G2760'],
['G730', 'G815', 'G1025', 'G1064', 'G1471', 'G3439', 'G3515', 'G3516', 'G3808', 'G3812', 'G3813', 'G3816', 'G5040', 'G5041', 'G5042', 'G5043', 'G5044', 'G5206', 'G5207', 'G5388'],
['G3323', 'G5547'],
['G5546'],
['G1577'],
['G203', 'G1986', 'G4059', 'G4061'],
['G2511', 'G2512', 'G2513', 'G3689'],
['G1263', 'G1291', 'G1296', 'G1297', 'G1299', 'G1690', 'G1778', 'G1781', 'G1785', 'G2003', 'G2004', 'G2008', 'G2036', 'G2753', 'G3056', 'G3726', 'G3852', 'G3853', 'G4367', 'G4483', 'G4487', 'G5506'],
['G1653', 'G3356', 'G3627', 'G4697', 'G4834', 'G4835'],
['G176', 'G843', 'G2607', 'G2613', 'G2631', 'G2632', 'G2633', 'G2917', 'G2919', 'G2920', 'G5272', 'G6048'],
['G1843', 'G3670', 'G3671'],
['G4893'],
['G1457', 'G5048'],
['G204', 'G1137', 'G2776', 'G3037'],
['G802', 'G1242', 'G4934'],
['G4716'],
['G388', ' G4362', ' G4717', ' G4957'],
['G331', 'G332', 'G685', 'G1944', 'G2551', 'G2652', 'G2653', 'G2671', 'G2672', 'G6035'],
['G2250', 'G2962'],
['G1249'],
['G169', 'G1139', 'G1140', 'G1141', 'G1142', 'G4190', 'G4151', 'G4152', 'G4189'],
['G1139'],
['G3100', 'G3101', 'G3102'],
['G2304', ' G2999'],
['G2634', 'G2904', 'G2961', 'G2963'],
['G138', 'G140', 'G1586', 'G1588', 'G1589', 'G1951', 'G4400', 'G4401', 'G4758', 'G4899', 'G5500'],
['G126', 'G165', 'G166'],
['G2134', 'G2135'],
['G2099'],
['G92', 'G113', 'G459', 'G932', 'G987', 'G988', 'G1426', 'G2549', 'G2551', 'G2554', 'G2555', 'G2556', 'G2557', 'G2559', 'G2560', 'G2635', 'G2636', 'G4151', 'G4189', 'G4190', 'G4191', 'G5337'],
['G1869', 'G5229', 'G5251', 'G5311', 'G5312'],
['G3867', ' G3870', ' G3874', ' G4389'],
['G1680', 'G3640', 'G4102', 'G6066'],
['G4103'],
['G571'],
['G2299'],
['G1184', 'G3685', 'G4380', 'G5485', 'G5486'],
['G870', 'G1167', 'G1168', 'G1169', 'G1630', 'G1719', 'G2124', 'G2125', 'G2962', 'G5398', 'G5399', 'G5400', 'G5401'],
['G2842', 'G2844', 'G3352', 'G4790'],
['G40', ' G4130', ' G4137', ' G4151'],
['G2907', 'G4559', 'G4560', 'G4561'],
['G453', 'G454', 'G781', 'G801', 'G877', 'G878', 'G3471', 'G3472', 'G3473', 'G3474', 'G3912'],
['G165', 'G166', 'G1336'],
['G859', 'G863', 'G5483'],
['G646', 'G657', 'G863', 'G1459', 'G2641', ''],
['G1096', 'G4138'],
['G1482', 'G1484', 'G1672'],
['G334', 'G1390', 'G1394', 'G1431', 'G1434', 'G1435', 'G3311', 'G5486'],
['G1392', 'G1740', 'G4888'],
['G1391', 'G1741', 'G2620', 'G2744', 'G2745', 'G2746', 'G2755', 'G2811'],
['G112', 'G516', 'G932', 'G935', 'G1096', 'G1140', 'G2098', 'G2124', 'G2128', 'G2150', 'G2152', 'G2153', 'G2299', 'G2304', 'G2305', 'G2312', 'G2313', 'G2314', 'G2315', 'G2316', 'G2317', 'G2318', 'G2319', 'G2320', 'G3361', 'G3785', 'G4151', 'G5207', 'G5377', 'G5463', 'G5537', 'G5538'],
['G516', 'G2124', 'G2150', 'G2152', 'G2153', 'G2316', 'G2317'],
['G3962'],
['G14', 'G15', 'G18', 'G19', 'G515', 'G744', 'G865', 'G979', 'G1380', 'G2095', 'G2097', 'G2106', 'G2107', 'G2108', 'G2109', 'G2114', 'G2115', 'G2133', 'G2140', 'G2162', 'G2163', 'G2174', 'G2293', 'G2565', 'G2567', 'G2570', 'G2573', 'G2887', 'G2986', 'G3140', 'G3617', 'G3776', 'G4147', 'G4632', 'G4674', 'G4851', 'G5223', 'G5224', 'G5358', 'G5542', 'G5543', 'G5544'],
['G2097', ' G2098', ' G4283'],
['G2143', 'G5485', 'G5543'],
['G338', 'G1777', 'G3784', 'G5267'],
['G86'],
['G674', 'G1282', 'G1271', 'G2133', 'G2588', 'G2589', 'G4641', 'G4698', 'G5590'],
['G932', 'G2032', 'G3321', 'G3770', 'G3771', 'G3772'],
['G1444', 'G1445', 'G1446', 'G1447'],
['G86', 'G439', 'G440', 'G1067', 'G3041', 'G4442', 'G4443', 'G4447', 'G4448', 'G5020', 'G5394', 'G5457'],
['G748', 'G749'],
['G37', 'G38', 'G39', 'G40', 'G41', 'G42', 'G1859', 'G2150', 'G2412', 'G2413', 'G3741', 'G3742', ''],
['G40', 'G3741'],
['G39', 'G40', 'G3485', 'G5117'],
['G40', 'G4151'],
['G820', 'G1391', 'G1392', 'G1784', 'G2151', 'G2570', 'G3170', 'G4411', 'G4586', 'G5091', 'G5092', 'G5093', 'G5399'],
['G91', 'G560', 'G1679', 'G1680', 'G2070'],
['G2316', 'G3624'],
['G858', 'G4236', 'G4239', 'G4240', 'G5011', 'G5012', 'G5013', 'G5391'],
['G505', 'G5272', 'G5273'],
['G504', 'G179'],
['G1519', ' G2962', ' G5547'],
['G2816', 'G2817', 'G2819', 'G2820'],
['G92', 'G93', 'G458', 'G3892', 'G4189'],
['G121'],
['G1783', 'G1793', 'G5241'],
['G935', 'G2474', 'G2475'],
['G2205', 'G3863'],
['G2424', ' G5547'],
['G2450', 'G2451', 'G2452', 'G2453', 'G2454'],
['G144', 'G350', 'G968', 'G1106', 'G1252', 'G1341', 'G1345', 'G1348', 'G1349', 'G2917', 'G2919', 'G2920', 'G2922', 'G2923', 'G4232'],
['G2250', 'G2920', 'G2962'],
['G1342', 'G1344', 'G1345', 'G1346', 'G1347', 'G1738'],
['G1344', 'G1345', 'G1347'],
['G932', ' G2316', ' G3772'],
['G935', ' G2453'],
['G721', 'G2316'],
['G2354', 'G2355', 'G2870', 'G2875'],
['G2078', 'G2250'],
['G2316', 'G3551', 'G3565'],
['G198', 'G222', 'G227', 'G806', 'G590'],
['G203', 'G634', 'G962'],
['G1203', 'G2962'],
['G1173', ' G2960'],
['G2316', 'G2962'],
['G25', 'G26', 'G5360', 'G5361', 'G5362', 'G5363', 'G5365', 'G5367', 'G5368', 'G5369', 'G5377', 'G5381', 'G5382', 'G5383', 'G5388'],
['G3168', 'G3172'],
['G3131'],
['G1653', 'G1655', 'G1656', 'G2433', 'G2436', 'G3628', 'G3629', 'G3741', 'G4698'],
['G1247', 'G1248', 'G1249', 'G2023', 'G2038', 'G2418', 'G3008', 'G3009', 'G3010', 'G3011', 'G3930', 'G5256', 'G5257', 'G5524'],
['G880', 'G1213', 'G1229', 'G1411', 'G1569', 'G1718', 'G1770', 'G1839', 'G2285', 'G2296', 'G2297', 'G3167', 'G3902', 'G4591', 'G4592', 'G5059', ''],
['G5310'],
['G3464', 'G4666', 'G4669'],
['G2564', 'G3686', 'G3687', 'G5122'],
['G3850', 'G3942'],
['G3957'],
['G4166'],
['G4005'],
['G2316', 'G2992'],
['G599', 'G622', 'G684', 'G853', 'G1311', 'G2704', 'G4881', 'G5356'],
['G5330'],
['G1411', 'G1415', 'G1756', 'G1849', 'G1850', 'G2478', 'G2479', 'G2904', 'G3168'],
['G154', 'G1162', 'G1189', 'G1783', 'G2065', 'G2171', 'G2172', 'G3870', 'G4335', 'G4336'],
['G4309'],
['G748', 'G749', 'G2405', 'G2406', 'G2407', 'G2409', 'G2420'],
['G1843', 'G1860', 'G1861', 'G1862', 'G3670', 'G4279'],
['G2495', 'G4394', 'G4395', 'G4396', 'G4397', 'G4398', 'G5578'],
['G2434', ' G2435'],
['G5567', 'G5568'],
['G48', 'G49', 'G53', 'G54', 'G1506', 'G2511', 'G2512', 'G2513', 'G2514'],
['G4461'],
['G487', 'G3083'],
['G604', 'G1259', 'G2433', 'G2643', 'G2644'],
['G59', 'G629', 'G1805', 'G3084', 'G3085', ''],
['G2640', 'G3005', 'G3062'],
['G278', 'G3338', 'G3340', 'G3341'],
['G600', 'G2675'],
['G386', ' G1454', ' G1815'],
['G601', 'G602', 'G5537'],
['G1341', 'G1342', 'G1343', 'G1344', 'G1345', 'G1346', 'G2118'],
['G1188'],
['G4315', 'G4521'],
['G4523'],
['G40'],
['G4991', 'G4992'],
['G37', 'G38'],
['G39'],
['G1139', 'G1140', 'G1141', 'G1142', 'G1228', 'G4190', 'G4566', 'G4567'],
['G803', 'G804', 'G806', 'G1295', 'G1508', 'G4982', 'G4991', 'G5198'],
['G4990'],
['G1122'],
['G37', 'G38', 'G40', 'G873'],
['G364', 'G880', 'G1213', 'G1229', 'G1718', 'G1730', 'G1732', 'G1770', 'G3902', 'G4102', 'G4591', 'G4592', 'G4953', 'G4973', 'G5280'],
['G264', 'G265', 'G266', 'G268', 'G361', 'G3781', 'G3900', 'G4258'],
['G3816', 'G5043', 'G5207'],
['G2316', 'G5207'],
['G444', 'G5207'],
['G2316', 'G5043', 'G5207'],
['G5590'],
['G4151', 'G4152', 'G4153', 'G5326', 'G5427'],
['G2642', 'G2991', 'G3034', 'G3035', 'G3036', 'G3037', 'G4074', 'G4348', 'G5586'],
['G656', 'G752', 'G4864'],
['G4633', 'G4634', 'G4636', 'G4638'],
['G1493', 'G2411', 'G3485'],
['G551', 'G1598', 'G3985', 'G3986', 'G3987'],
['G1242', 'G1263', 'G1303', 'G1957', 'G3140', 'G3141', 'G3142', 'G3143', 'G4303', 'G4828', 'G6020'],
['G1263', 'G1957', 'G3140', 'G3141', 'G3142', 'G3143', 'G4303', 'G4828', 'G6020'],
['G5075', ' G5076'],
['G1427', ' G1733'],
['G458', 'G459', 'G3845', 'G3847', 'G3848', 'G3928'],
['G264', 'G3900'],
['G225', 'G226', 'G227', 'G228', 'G230', 'G1103', 'G3303', 'G3483', 'G3689', 'G4103', 'G4137'],
['G1679', 'G3872', 'G3982', 'G4006', 'G4100', 'G4276'],
['G543', ' G544', ' G570', ' G571'],
['G203', 'G564'],
['G167', 'G169', 'G2839', 'G2840', 'G3394'],
['G763', 'G764', 'G765'],
['G462', 'G2839'],
['G91', 'G93', 'G94'],
['G111', ' G459'],
['G106'],
['G93', 'G94', 'G458'],
['G3716', 'G3717'],
['G2171'],
['G1012', 'G1013', 'G2307', 'G2308', 'G2309', 'G2596'],
['G4678', 'G4679', 'G4680', 'G4920', 'G5428', 'G5429', 'G5430'],
['G267', 'G1263', 'G2649', 'G3140', 'G3141', 'G3142', 'G3144', 'G4828', 'G4901', 'G5575', 'G5576', 'G5577'],
['G3759'],
['G3056', 'G4487'],
['G3056', 'G4487'],
['G2041'],
['G1093', 'G2886', 'G2889', 'G3625'],
['G1391', 'G1479', 'G2151', 'G2318', 'G2323', 'G2356', 'G3000', 'G3511', 'G4352', 'G4353', 'G4573', 'G4574', 'G4576'],
['G514', 'G515', 'G516', 'G2425', 'G2661', 'G2735'],
['G2372', 'G3709', 'G3949', 'G3950'],
['G2205', 'G2206', 'G2207', 'G6041'],
['G2'],
['G6'],
['G8'],
['G7'],
['G11'],
['G76'],
['G138'],
['G406'],
['G452'],
['G491'],
['G625'],
['G207'],
['G688', 'G690'],
['G689'],
['G108'],
['G773'],
['G896'],
['G897'],
['G903'],
['G912'],
['G921'],
['G918'],
['G1263'],
['G954'],
['G958'],
['G960'],
['G963'],
['G965'],
['G2541'],
['G2542', ' G5376'],
['G2533'],
['G2535'],
['G2580'],
['G2581', 'G5478'],
['G2584'],
['G5466'],
['G2791'],
['G1138', 'G4172'],
['G2857', ' G2858'],
['G2881', ' G2882'],
['G2883'],
['G2912', ' G2914'],
['G2953', ' G2954'],
['G2956', ' G2957'],
['G1154'],
['G1158'],
['G1138'],
['G2401'],
['G124', 'G125'],
['G1639'],
['G1648'],
['G1662'],
['G2243'],
['G1665'],
['G1802'],
['G2179', ' G2180', ' G2181'],
['G2187'],
['G2269'],
['G128'],
['G2166'],
['G2096'],
['G1043'],
['G1045'],
['G1053', ' G1054'],
['G1056', 'G1057'],
['G1048'],
['G1068'],
['G1115'],
['G1671'],
['G1672', 'G1673', 'G1674', 'G1675', 'G1676'],
['G2264', ' G2265', ' G2267'],
['G2266'],
['G2264'],
['G1478'],
['G5617'],
['G1138', 'G3624'],
['G2430'],
['G2464'],
['G2268'],
['G2466'],
['G2384'],
['G2385'],
['G2385'],
['G2385'],
['G2496'],
['G2498'],
['G2408'],
['G2410'],
['G2414', 'G2415', 'G2419'],
['G2421'],
['G2403'],
['G2492'],
['G2493'],
['G2491', ' G3138'],
['G2491'],
['G910 G2491'],
['G2495'],
['G2445'],
['G2496'],
['G2446'],
['G2501'],
['G2500', 'G2501'],
['G2424'],
['G2502'],
['G2455', ' G2469'],
['G2455'],
['G2453'],
['G2748', 'G5493'],
['G2984'],
['G2976'],
['G3017', 'G3018', 'G3019', 'G3020'],
['G3091'],
['G3082'],
['G3109', ' G3110'],
['G1217'],
['G3128'],
['G444', 'G2316'],
['G3136'],
['G3137'],
['G3094', ' G3137'],
['G3137'],
['G3017', ' G3156'],
['G3370'],
['G3318'],
['G3413'],
['G3434'],
['G3475'],
['G3735', 'G1636'],
['G3497'],
['G3493'],
['G3486'],
['G3508'],
['G3481'],
['G3478', ' G3479', ' G3480'],
['G3535', 'G3536'],
['G3575'],
['G3972', ' G4569'],
['G2786', ' G4074', ' G4613'],
['G5328'],
['G5376'],
['G5374', ' G5375'],
['H776 H6429 H06430'],
['G4949', 'G5403'],
['G4091', ' G4194'],
['G4193', ' G4195'],
['G4478'],
['G4460'],
['G4471'],
['G2063', 'G2281'],
['G4497'],
['G4502'],
['G4514', ' G4516'],
['G4503'],
['G4540', 'G4541', 'G4542'],
['G4546'],
['G4545'],
['G4564'],
['G4549'],
['G1056', 'G1082', 'G2281', 'G3041', 'G5085'],
['G4589'],
['G4590'],
['G4605', 'G4606'],
['G4609', ' G4610'],
['G4826'],
['G2208', ' G2581', ' G4613'],
['G3735', 'G4614'],
['G4670'],
['G4672'],
['G4736'],
['G4947', 'G4948'],
['G5018', ' G5019'],
['G2331', ' G2332'],
['G2381'],
['G5095'],
['G5103'],
['G5174'],
['G5190'],
['G5183', 'G5184'],
['G3774'],
['G2195'],
['G4524'],
['G2199'],
['G2194'],
['G2197'],
['G2216'],
['G1427', 'G2474', 'G5443'],
['G12', ' G5421'],
['G1458', 'G2147', 'G2596', 'G2724'],
['G1492', 'G1921', 'G3670'],
['G2941'],
['G3560', 'G3867', 'G5537'],
['G476', 'G480', 'G2189', 'G2190', 'G4567', 'G5227'],
['G2346', 'G2347', 'G2552', 'G2553', 'G2561', 'G3804', 'G4777', 'G4778', 'G5003'],
['G165', 'G1074'],
['G1654'],
['G2368', 'G2379'],
['G639', 'G1568', 'G1569', 'G1605', 'G1611', 'G1839', 'G2284', 'G2285', 'G2296', 'G2297', 'G2298', 'G3167', 'G4023', 'G4423', 'G4592', 'G5059'],
['G4243'],
['G23', 'G1758', 'G2371', 'G2372', 'G3164', 'G3709', 'G3710', 'G3711', 'G3947', 'G3949', 'G5520'],
['G928', 'G3600', 'G4928'],
['G3696', 'G3833'],
['G2868', 'G4700', 'G5077', 'G5522'],
['G1577', 'G1997', 'G3831', 'G4863', 'G4864', 'G4871', 'G4905'],
['G3307'],
['G4105'],
['G1349', 'G1556', 'G1557', 'G1558', 'G2917', 'G3709'],
['G2124'],
['G513'],
['G1173', 'G1403'],
['G2915', 'G2916'],
['G692', 'G4723'],
['G2894', 'G3426', 'G4553', 'G4711'],
['G142', 'G399', 'G430', 'G503', 'G941', 'G1080', 'G1627', 'G2592', 'G3114', 'G3140', 'G4064', 'G4160', 'G4722', 'G4828', 'G4901', 'G5041', 'G5088', 'G5297', 'G5342', 'G5409', 'G5576'],
['G715'],
['G2226', 'G2341', 'G2342', 'G2934', 'G4968', 'G5074'],
['G154', 'G1871', 'G4319', 'G4434', 'G6075'],
['G3860', 'G4273'],
['G2250'],
['G5610'],
['G3376'],
['G5438'],
['G4521'],
['G1763', 'G2094'],
['G3470'],
['G2210'],
['G631', 'G1591', 'G1813'],
['G662', 'G2292', 'G3618', 'G3954', 'G3955', 'G5111', 'G5112'],
['G976', 'G2222'],
['G1120', 'G2578', 'G2827', 'G4781', 'G4794'],
['G5115'],
['G106', 'G740', 'G4286'],
['G2382'],
['G1709', 'G1720', 'G4157'],
['G5260'],
['G3565'],
['G3566'],
['G5470', 'G5474', 'G5475'],
['G4', 'G916', 'G922', 'G1117', 'G2347', 'G2599', 'G2655', 'G5413'],
['G3646'],
['G1779', 'G1780', 'G2290', 'G4916', 'G5027'],
['G2574'],
['G161', 'G162', 'G163', 'G164', 'G2221'],
['G1544'],
['G726'],
['G582', 'G583'],
['G892'],
['G716', 'G4480'],
['G5502'],
['G749', 'G750', 'G754', 'G4410', 'G4413', 'G5506'],
['G749'],
['G4175', 'G4177', 'G4847'],
['G294', 'G1463', 'G1562', 'G1737', 'G1742', 'G1746', 'G1902', 'G2066', 'G2439', 'G2440', 'G3608', 'G4016', 'G4470', 'G4616', 'G4683', 'G4749', 'G5509', 'G6005'],
['G302', 'G2174', 'G3870', 'G3874', 'G3875', 'G3888', 'G3890', 'G3931'],
['G5506'],
['G264', 'G2038', 'G2716', 'G3429', 'G3431', 'G3860', 'G3872', 'G3908', 'G4102', 'G4160', 'G4203'],
['G2844', 'G3353', 'G4791', 'G4898', 'G4904'],
['G1080', 'G1722', 'G2602', 'G2845', 'G4815'],
['G1340', 'G2292', 'G3954', 'G3982', 'G4006', 'G5287'],
['G950', 'G951', 'G1991', 'G2964', 'G3315', 'G4300', 'G4972'],
['G355', 'G1159', 'G2618', 'G2654', 'G2719', 'G5315', 'G5723'],
['G1848'],
['G853', 'G862', 'G1311', 'G1312', 'G2585', 'G2704', 'G4550', 'G4595', 'G5349', 'G5351', 'G5356'],
['G1010', 'G4824', 'G4892'],
['G1010', 'G1011', 'G1012', 'G1106', 'G4823', 'G4824', 'G4825'],
['G2114', 'G2115', 'G2174', 'G2292', 'G2293', 'G2294', 'G3870', 'G3874', 'G3954', 'G4389', 'G4837', 'G5111'],
['G2681', 'G833', 'G933', 'G4259'],
['G1016', 'G1151', 'G2353', 'G2934', 'G3447', 'G3448', 'G4165', 'G5022', 'G5022'],
['G2041', 'G2602', 'G2675', 'G2936', 'G2937', 'G2939', 'G4160', 'G5480'],
['G2226', 'G2937', 'G2938'],
['G156', 'G1462', 'G2556', 'G2557', 'G4467'],
['G1238', 'G4735', 'G4737'],
['G310', 'G349', 'G863', 'G994', 'G995', 'G1916', 'G2019', 'G2799', 'G2805', 'G2896', 'G2905', 'G2906', 'G2929', 'G4377', 'G5455'],
['G2665'],
['G609', 'G851', 'G1581', 'G2407', 'G5257', ''],
['G2217', 'G4652', 'G4653', 'G4655', 'G4656'],
['G336', 'G337', 'G520', 'G581', 'G599', 'G615', 'G622', 'G684', 'G1634', 'G1935', 'G2079', 'G2253', 'G2286', 'G2287', 'G2288', 'G2289', 'G2348', 'G2837', 'G2966', 'G3498', 'G3499', 'G3500', 'G4430', 'G4880', 'G4881', 'G5053', 'G5054'],
['G538', 'G539', 'G1386', 'G1387', 'G1388', 'G1389', 'G1818', 'G3884', 'G4105', 'G4106', 'G4108', 'G5422', 'G5423'],
['G312', 'G394', 'G518', 'G669', 'G1107', 'G1213', 'G1229', 'G1335', 'G1344', 'G1555', 'G1718', 'G1732', 'G1834', 'G2097', 'G2511', 'G2605', 'G2607', 'G3140', 'G3670', 'G3724', 'G3822', 'G3853', 'G3870', 'G3955', 'G5319', 'G5419'],
['G1378'],
['G1456', 'G1457'],
['G733', 'G2839', 'G2840', 'G3392', 'G3435', 'G4696', 'G5351'],
['G325', 'G525', 'G629', 'G859', 'G1080', 'G1325', 'G1560', 'G1659', 'G1807', 'G1929', 'G2673', 'G3086', 'G3860', 'G4506', 'G4991', 'G5088', 'G5483'],
['G1074', 'G1085', 'G4690'],
['G953'],
['G2047', 'G2048'],
['G2048', 'G2049', 'G2050', 'G3443'],
['G5056', 'G5087'],
['G3645'],
['G946', 'G947', 'G948', 'G4767', 'G5723', 'G3404'],
['G2068', 'G2666', 'G2719', 'G5315'],
['G350', 'G1252', 'G1253', 'G1381', 'G2924'],
['G149', 'G819', 'G3680', 'G3856'],
['G818', 'G819', 'G820', 'G987', 'G2617'],
['G506', 'G543', 'G544', 'G545', 'G3847', 'G3876'],
['G1287', 'G1290', 'G4650'],
['G4436'],
['G630', 'G647', 'G863'],
['G1319', 'G1322', 'G2085'],
['G3678', 'G3688', 'G5268'],
['G4058'],
['G1797', 'G1798', 'G3677'],
['G3178', 'G3182', 'G3183', 'G3184', 'G3630', 'G3632'],
['G906', 'G4657'],
['G105'],
['G1093', 'G1919', 'G2709', 'G2886', 'G3625', 'G3749', 'G4578', 'G5517'],
['G1087', 'G3187', 'G4244', 'G4245', 'G4850'],
['G430', 'G907', 'G1526', 'G2005', 'G2076', 'G2553', 'G2594', 'G3114', 'G3306', 'G4722', 'G5278', 'G5281', 'G5297', 'G5342'],
['G1398', 'G1402', 'G2615'],
['G866', 'G1937', 'G2205', 'G2206', 'G3713', 'G3788', 'G4123', 'G4124', 'G4190', 'G5354', 'G5355', 'G5366'],
['G93', 'G458', 'G2038', 'G2040', 'G2555'],
['G3927'],
['G3799', 'G4383', 'G4750'],
['G5578'],
['G1965', 'G3144', 'G5571', 'G5575', 'G5576', 'G5577'],
['G1085', 'G3614', 'G3624', 'G3965'],
['G3042'],
['G777', 'G3521', 'G3522', 'G3523'],
['G256', 'G540', 'G1080', 'G2495', 'G3737', 'G3962', 'G3964', 'G3966', 'G3967', 'G3970', 'G3971', 'G3995', 'G4245', 'G4269', 'G4613'],
['G26', 'G755', 'G1062', 'G1173', 'G1403', 'G1456', 'G1858', 'G1859', 'G2165', 'G3521', 'G4910'],
['G1456', 'G1858', 'G1859'],
['G3653', 'G4808', 'G4810'],
['G439', 'G440', 'G1067', 'G2741', 'G4442', 'G4443', 'G4447', 'G4448', 'G4451', 'G5394', 'G5457'],
['G4416', 'G5207'],
['G536'],
['G231', 'G1903'],
['G34', 'G4167', 'G4168'],
['G2627', 'G4132', 'G4215', 'G4216'],
['G832', 'G834', 'G836'],
['G4228', 'G5286'],
['G241', 'G245', 'G526', 'G915', 'G1854', 'G3581', 'G3927', 'G3941'],
['G4267', ' G4268'],
['G1608', 'G4202', 'G4203'],
['G2310', 'G2311', 'G2602'],
['G242', 'G305', 'G393', 'G985', 'G1530', 'G1816', 'G4077', 'G4855', 'G5453'],
['G3030'],
['G425', 'G525', 'G558', 'G572', 'G629', 'G630', 'G859', 'G1344', 'G1432', 'G1657', 'G1658', 'G1659', 'G1849', 'G2010', 'G3032', 'G3089', 'G3955', 'G4174', 'G4506', 'G5483', 'G5486'],
['G1081', 'G2590', 'G2592', 'G2593', 'G3703', 'G5052', 'G5352', 'G6013'],
['G2575'],
['G2374', 'G4439', 'G4440'],
['G328', 'G1241', 'G2224', 'G4024'],
['G122', 'G2055', 'G2056', 'G5131'],
['G5552', 'G5553', 'G5554', 'G5557'],
['G2636', 'G2637', 'G5397'],
['G445', 'G446', 'G746', 'G1481', 'G2232', 'G2233', 'G2230', 'G4232'],
['G248', 'G2590', 'G3450', 'G4621', 'G4719'],
['G288', 'G4718'],
['G1690', 'G4726', 'G4727', 'G4959'],
['G5463', 'G5464'],
['G710', 'G1188', 'G1448', 'G1451', 'G1764', 'G2021', 'G2092', 'G2176', 'G2902', 'G4084', 'G4474', 'G4475', 'G5495', 'G5496', 'G5497'],
['G519'],
['G917', 'G1419', 'G1421', 'G1422', 'G1423', 'G1425', 'G2205', 'G2532', 'G2553', 'G2872', 'G2873', 'G3425', 'G3433', 'G4053', 'G4183', 'G4456', 'G4457', 'G4641', 'G4642', 'G4643', 'G4645', 'G4912', 'G4927'],
['G2788', 'G2789', 'G2790'],
['G2326', 'G6013'],
['G5244'],
['G346', 'G755', 'G2775', 'G2776', 'G4719'],
['G1295', 'G1743', 'G2322', 'G2323', 'G2386', 'G2390', 'G2392', 'G2511', 'G3647', 'G4982', 'G4991', 'G5198', 'G5199'],
['G2816', 'G2818', 'G2820', 'G4789'],
['G40', 'G4172'],
['G2781', 'G3192', 'G3193'],
['G2768'],
['G2462'],
['G2460'],
['G5610'],
['G3609', 'G3613', 'G3614', 'G3624'],
['G2322', 'G3609', 'G3614', 'G3615', 'G3616', 'G3623', 'G3624'],
['G2617', 'G5014'],
['G1493', 'G1494', 'G1495', 'G1496', 'G1497', 'G2712'],
['G1504', 'G5179', 'G5481'],
['G1096', 'G2596', 'G3401', 'G3402', 'G4160'],
['G2368', 'G2369', 'G2370', 'G2379', 'G3031'],
['G1830'],
['G1256', 'G1299', 'G1319', 'G1321', 'G1378', 'G1781', 'G1785', 'G2322', 'G2727', 'G2753', 'G3559', 'G3560', 'G3614', 'G3615', 'G3624', 'G3811', 'G3852', 'G3853', 'G4264', 'G4367', 'G4822'],
['G4587'],
['G1252', 'G1328', 'G1329', 'G1381', 'G1955', 'G2058', 'G3177', 'G4793'],
['G2453'],
['G20', 'G21', 'G2167', 'G2744', 'G3685', 'G4640', 'G5463', 'G5479'],
['G2454'],
['G350', 'G1252', 'G1348', 'G2919', 'G2922', 'G2923'],
['G4773'],
['G1085', 'G5449'],
['G935', 'G936'],
['G932'],
['G2705', 'G5368', 'G5370'],
['G50', 'G56', 'G1097', 'G1107', 'G1108', 'G1231', 'G1492', 'G1921', 'G1922', 'G1987', 'G2467', 'G2589', 'G3877', 'G4267', 'G4894'],
['G75', 'G2038', 'G2040', 'G2041', 'G2872', 'G2873', 'G4704', 'G4866', 'G4904', 'G5389'],
['G3449', 'G4944', 'G5088', 'G5604', 'G5605'],
['G2985', 'G3088'],
['G3087'],
['G1785', 'G3548', 'G3551', 'G4747'],
['G111', 'G459', 'G1832', 'G3545'],
['G113', 'G266', 'G458', 'G459'],
['G3097'],
['G3014', 'G3015'],
['G1121', 'G1989', 'G1992'],
['G681', 'G796', 'G1645', 'G2985', 'G3088', 'G5338', 'G5457', 'G5458', 'G5460', 'G5462'],
['G1503', 'G1504', 'G2509', 'G2531', 'G2596', 'G3664', 'G3665', 'G3666', 'G3667', 'G3668', 'G3669', 'G3697', 'G4833', 'G5108', 'G5613', 'G5615', 'G5616', 'G5618', 'G5619'],
['G3023'],
['G200'],
['G3751'],
['G2624', 'G2819', 'G2975', 'G3091'],
['G865', 'G866', 'G5358', 'G5366', 'G5367', 'G5369', 'G5377', 'G5381', 'G5382'],
['G5011', 'G5012', 'G5014'],
['G766', 'G1937', 'G1938', 'G1939', 'G1971', 'G2237', 'G3715', 'G3806'],
['G3097'],
['G758', 'G3980', 'G4755'],
['G1392', 'G3170'],
['G2012', 'G3621', 'G3623'],
['G3312', 'G3316'],
['G3191', 'G4304'],
['G4235', 'G4236', 'G4239', 'G4240'],
['G3089', 'G5080'],
['G1010', 'G3196', 'G3609'],
['G3422'],
['G32', 'G652'],
['G972', 'G1411', 'G1413', 'G1414', 'G1415', 'G1498', 'G1752', 'G1754', 'G2159', 'G2478', 'G2479', 'G2900', 'G2904', 'G3168', 'G3173', 'G5082'],
['G1271', 'G1374', 'G3328', 'G3525', 'G3540', 'G3563', 'G4993', 'G5590'],
['G1592', 'G1701', 'G1702', 'G1703', 'G2301', 'G2606', 'G3456', 'G5512'],
['G4110', 'G4111'],
['G2354', 'G2875', 'G3602', 'G3996', 'G3997'],
['G4052', 'G4129'],
['G3466'],
['G246', 'G1074', 'G1085', 'G1484'],
['G1069', 'G2087', 'G4040', 'G4139'],
['G3376', 'G3561'],
['G937', 'G2104', 'G2903'],
['G332', 'G3660', 'G3727', 'G3728'],
['G191', 'G544', 'G3980', 'G3982', 'G4198', 'G5083', 'G5084', 'G5218', 'G5219', 'G5255', 'G5292', 'G5293', 'G5442'],
['G1081', 'G1085'],
['G1637', 'G3464'],
['G65', 'G1636', 'G1637', 'G2565'],
['G1722', 'G5308', 'G5310', 'G5311'],
['G2616', 'G2669'],
['G1299', 'G2525', 'G2680', 'G3724', 'G4270', 'G4282', 'G4309', 'G5021', 'G5500'],
['G1296', 'G1345', 'G1378', 'G1379', 'G2937', 'G3862'],
['G1983', 'G1984', 'G1985'],
['G2638', 'G2983'],
['G1484', 'G1494'],
['G833', 'G933', 'G4232'],
['G5404'],
['G991', 'G1519', 'G2983', 'G4299', 'G4382', 'G4383'],
['G420', 'G463', 'G1933', 'G3114', 'G3115', 'G3116', 'G5278', 'G5281'],
['G3966'],
['G269', 'G425', 'G31514', 'G1515', 'G1516', 'G1517', 'G1518', 'G2272'],
['G246', 'G1074', 'G1085', 'G1218', 'G1484', 'G2560', 'G2992', 'G3793'],
['G195', 'G197', 'G199', 'G739', 'G1295', 'G2005', 'G2675', 'G2676', 'G2677', 'G3647', 'G5046', 'G5047', 'G5048', 'G5050', 'G5052'],
['G1375', 'G1376', 'G1377', 'G1559', 'G2347'],
['G3115', ' G4343', ' G5281'],
['G654', 'G1294', 'G3344', 'G3346', 'G3859', 'G4106'],
['G1330', 'G1338', 'G1574', 'G2660', 'G3572', 'G4044', 'G4138'],
['G5519'],
['G4769'],
['G12', 'G999', 'G5421'],
['G3061', 'G3148', 'G4127'],
['G1189', 'G1793', 'G2065', 'G3870'],
['G728'],
['G722', 'G723'],
['G1139', 'G2192', 'G2697', 'G2722', 'G2932', 'G2933', 'G2935', 'G4047', 'G5224', 'G5564'],
['G1229', 'G1256', 'G2097', 'G2605', 'G2782', 'G2783', 'G2784', 'G2980', 'G3853', 'G3955', 'G4283', 'G4296'],
['G1229', 'G1256', 'G2097', 'G2605', 'G2782', 'G2783', 'G2784', 'G2980', 'G3955', 'G4283', 'G4296'],
['G927', 'G1784', 'G2472', 'G4185', 'G4186', 'G5092', 'G5093'],
['G747', 'G758', 'G1413', 'G2232', 'G3175'],
['G1198', 'G1199', 'G1200', 'G1201', 'G1202', 'G1210', 'G2252', 'G3612', 'G4788', 'G4869', 'G5083', 'G5084', 'G5438', 'G5439'],
['G518', 'G591', 'G1229', 'G1861', 'G2097', 'G2605', 'G2782', 'G2784', 'G2980', 'G3142', 'G3853', 'G4135'],
['G952', 'G953'],
['G147', 'G1281', 'G2585', 'G2770', 'G2771', 'G3408', 'G4297', 'G4298', 'G4851', 'G5539', 'G5622', 'G5623', 'G5624'],
['G2137'],
['G4204'],
['G4098'],
['G212', 'G1391', 'G1392', 'G2744', 'G2745', 'G2746', 'G3173', 'G5187', 'G5229', 'G5243', 'G5244', 'G5308', 'G5309', 'G5426', 'G5450'],
['G3850', 'G3942'],
['G1885'],
['G653', 'G2042', 'G3863', 'G3893', 'G3947', 'G3948', 'G3949', 'G4292'],
['G4908', 'G5428'],
['G5229', 'G5448'],
['G1349', 'G1556', 'G1557', 'G2849', 'G3811', 'G5097'],
['G4209', 'G4210', 'G4211'],
['G683', 'G4261'],
['G938'],
['G762', 'G4570'],
['G1693', 'G2830', 'G3710', 'G5433'],
['G305', 'G386', 'G393', 'G450', 'G1096', 'G1326', 'G1453', 'G1525', 'G1817', 'G1825', 'G1892', 'G1999', 'G4891', ''],
['G270', 'G2325', 'G2327'],
['G3893', 'G4955'],
['G298', 'G299', 'G1649', 'G1651', 'G1969', 'G2008', 'G3679'],
['G308', 'G324', 'G353', 'G354', 'G568', 'G588', 'G618', 'G1183', 'G1209', 'G1523', 'G1653', 'G1926', 'G2210', 'G2865', 'G2983', 'G3028', 'G3335', 'G3336', 'G3549', 'G3858', 'G3880', 'G3970', 'G4327', 'G4355', 'G4356', 'G4687', 'G4732', 'G5264', 'G5274', 'G5562'],
['G2063', 'G2563'],
['G757', 'G936', 'G2231', 'G4821'],
['G96', 'G114', 'G483', 'G550', 'G579', 'G580', 'G593', 'G683', 'G720', 'G1609', 'G3868'],
['G21', 'G2165', 'G2620', 'G2744', 'G2745', 'G4796', 'G4913', 'G5463'],
['G189', 'G191', 'G312', 'G518', 'G987', 'G1225', 'G1310', 'G1426', 'G1834', 'G2036', 'G2162', 'G2163', 'G3004', 'G3056', 'G3140', 'G3141', 'G3377'],
['G410', 'G423', 'G819', 'G3059', 'G3679', 'G3680', 'G3681', 'G5195', 'G5196', 'G5484'],
['G372', 'G373', 'G425', 'G1515', 'G1879', 'G1954', 'G1981', 'G2270', 'G2663', 'G2664', 'G2681', 'G2838', 'G3062', 'G4520'],
['G344', 'G360', 'G390', 'G1877', 'G1880', 'G1994', 'G5290'],
['G127', 'G1788', 'G2125', 'G2412', 'G5399', 'G5401'],
['G469', 'G514', 'G591', 'G2603', 'G3405', 'G3406', 'G3408'],
['G1746', 'G2066', 'G2067', 'G2440', 'G4749', 'G4016', 'G5511'],
['G2563', 'G4463', 'G4464'],
['G933', 'G934', 'G937'],
['G2679', 'G2692', 'G3639', 'G4485'],
['G746', 'G752', 'G755', 'G757', 'G758', 'G932', 'G936', 'G1018', 'G1203', 'G1299', 'G1778', 'G1785', 'G1849', 'G2232', 'G2233', 'G2525', 'G2583', 'G2888', 'G2961', 'G3545', 'G3841', 'G4165', 'G4173', 'G4291'],
['G413', 'G1377', 'G1601', 'G1530', 'G1532', 'G1632', 'G1998', 'G2027', 'G2701', 'G3729', 'G4063', 'G4370', 'G4390', 'G4890', 'G4936', 'G5143', 'G5240', 'G5295', 'G5302', 'G5343'],
['G4526'],
['G2413'],
['G266', 'G334', 'G1049', 'G1435', 'G1494', 'G2378', 'G2380', 'G3646', 'G4376', 'G5485'],
['G4547', 'G5266'],
['G4464'],
['G974', 'G975'],
['G2696', 'G4972', 'G4973'],
['G4615', 'G4687', 'G4690', 'G4701', 'G4703'],
['G327', 'G1567', 'G1934', 'G2052', 'G2212'],
['G724', 'G1949', 'G2638', 'G2902', 'G2983', 'G4815', 'G4884'],
['G192', 'G193', 'G1466', 'G1467', 'G1468', 'G4997'],
['G782', 'G375', 'G630', 'G649', 'G652', 'G657', 'G1026', 'G1032', 'G1544', 'G1599', 'G1821', 'G3333', 'G3343', 'G3936', 'G3992', 'G4311', 'G4341', 'G4369', 'G4842', 'G4882'],
['G2191', 'G2062', 'G3789'],
['G1249', 'G1401', 'G1402', 'G2324', 'G3407', 'G3411', 'G3610', 'G3816', 'G4983', 'G5257'],
['G1247', 'G1248', 'G1398', 'G1402', 'G1438', 'G1983', 'G2064', 'G2212', 'G2323', 'G2999', 'G3000', 'G3009', 'G4337', 'G4342', 'G4754', 'G5087', 'G5256'],
['G1097'],
['G644', 'G1982', 'G2683', 'G4639'],
['G127', 'G149', 'G152', 'G153', 'G422', 'G808', 'G818', 'G819', 'G821', 'G1788', 'G1791', 'G1870', 'G2617', 'G3856', 'G5195'],
['G4165', 'G4262', 'G4263'],
['G750', 'G4165', 'G4166'],
['G2375'],
['G5429'],
['G693', 'G694', 'G695', 'G696', 'G1406'],
['G27', 'G79'],
['G337', 'G615', 'G1315', 'G2380', 'G2695', 'G4968', 'G4969', 'G5407'],
['G337', 'G615', 'G1315', 'G2380', 'G2695', 'G4968', 'G4969', 'G5407'],
['G987', 'G988', 'G1228', 'G1426', 'G2636', 'G2637', 'G3059', 'G3060', 'G6022'],
['G2871', 'G4967', 'G4969', 'G5408'],
['G879', 'G1852', 'G1853', 'G2518', 'G2837', 'G5258'],
['G64', 'G1029', 'G2339', 'G2340', 'G3802', 'G3803', 'G3985', 'G4625'],
['G5510'],
['G3095', 'G3096', 'G3097', 'G5331', 'G5332', 'G5333'],
['G4687', 'G4703', 'G5300', 'G5452', 'G6037'],
['G3057'],
['G2563', 'G3586', 'G4464'],
['G483', 'G4644', 'G4645'],
['G596'],
['G461', 'G772', 'G950', 'G1411', 'G1412', 'G1743', 'G1765', 'G1840', 'G1849', 'G1991', 'G2479', 'G2480', 'G2901', 'G2904', 'G3619', 'G3756', 'G4599', 'G4732', 'G4733', 'G4741'],
['G485', 'G2052', 'G2054', 'G3055', 'G3163', 'G5379'],
['G4608'],
['G3794'],
['G679', 'G4348', 'G4350', 'G4417', 'G4624', 'G4625'],
['G3037', 'G4349', 'G4625'],
['G350', 'G1379', 'G1396', 'G1777', 'G3663', 'G5292', 'G5293'],
['G5226', 'G5293'],
['G91', 'G941', 'G971', 'G2210', 'G2346', 'G2347', 'G3804', 'G3958', 'G4310', 'G4778', 'G4841', 'G5004', 'G5723'],
['G2303'],
['G4216', 'G4563', 'G4951'],
['G3162', 'G4501'],
['G583', 'G5411'],
['G5057', 'G5058'],
['G1317', 'G1321', 'G1322', 'G2085', 'G2605', 'G2727', 'G3100', 'G2312', 'G2567', 'G3811', 'G4994'],
['G1320', 'G2567', 'G3547', 'G5572'],
['G586', 'G1181', 'G1183'],
['G1629', 'G1630', 'G2258', 'G4422', 'G4426', 'G5401'],
['G727', 'G1888', 'G2417', 'G2812', 'G3027'],
['G173', 'G174', 'G4647', 'G5146'],
['G248'],
['G968', 'G2362'],
['G744', 'G530', 'G1074', 'G1208', 'G1441', 'G1597', 'G1626', 'G1909', 'G2034', 'G2119', 'G2121', 'G2235', 'G2250', 'G2540', 'G3379', 'G3461', 'G3568', 'G3763', 'G3764', 'G3819', 'G3956', 'G3999', 'G4178', 'G4181', 'G4183', 'G4218', 'G4277', 'G4287', 'G4340', 'G4455', 'G5119', 'G5151', 'G5305', 'G5550', 'G5551', 'G5610'],
['G86', 'G2750', 'G3418', 'G3419', 'G5028'],
['G1100', 'G1258', 'G1447', 'G2084'],
['G928', 'G929', 'G930', 'G931', 'G2558', 'G2851', 'G3600'],
['G3862', ' G3970'],
['G2662', 'G3961'],
['G1611'],
['G1719', 'G1790', 'G5141', 'G5156', 'G5425'],
['G178', 'G1382', 'G1383', 'G2919', 'G3984', 'G3986', 'G4451'],
['G1429', 'G5443'],
['G2346', 'G2347'],
['G1323', 'G2778', 'G5411'],
['G387', 'G1298', 'G1613', 'G1776', 'G2346', 'G2347', 'G2350', 'G2360', 'G2553', 'G2873', 'G3636', 'G3926', 'G3930', 'G3986', 'G4423', 'G4660', 'G5015', 'G5016', 'G5182'],
['G4536', 'G4537', 'G4538'],
['G5509'],
['G344', 'G387', 'G402', 'G576', 'G654', 'G665', 'G868', 'G1294', 'G1578', 'G1612', 'G1624', 'G1994', 'G2827', 'G3179', 'G3313', 'G3329', 'G3344', 'G3346', 'G4762', 'G5077', 'G5157', 'G5290', 'G6060'],
['G50', 'G145', 'G191', 'G801', 'G1097', 'G1107', 'G1108', 'G1271', 'G1921', 'G1922', 'G1987', 'G1990', 'G2657', 'G3129', 'G3539', 'G3563', 'G3877', 'G4441', 'G4907', 'G4908', 'G4920', 'G5424', 'G5428', 'G5429', 'G6063'],
['G255', 'G512', 'G888', 'G889', 'G890'],
['G945', 'G1432', 'G1500', 'G2755', 'G2756', 'G2757', 'G2758', 'G2761', 'G3150', 'G3151', 'G3152', 'G3153', 'G3154', 'G3155'],
['G2665'],
['G288', 'G290', 'G1009', 'G1092'],
['G289', 'G290'],
['G3932', 'G3933'],
['G3701', 'G3705', 'G3706'],
['G2906', 'G5456', 'G5586'],
['G1330', 'G1704', 'G3716', 'G4043', 'G4198', 'G4748'],
['', 'G4686', 'G4753', 'G4754', 'G4757', 'G4758', 'G4961'],
['G684', 'G1287', 'G2049', 'G2673', 'G4199'],
['G69', 'G70', 'G991', 'G1127', 'G1492', 'G2334', 'G2892', 'G3525', 'G3708', 'G3906', 'G4337', 'G4648', 'G5083', 'G5438'],
['G4444'],
['G504', 'G4215', 'G4222', 'G5202', 'G5204'],
['G4077', 'G5421'],
['G4621'],
['G1098', 'G3631', 'G3820', 'G3943'],
['G3025', 'G5276'],
['G4425', 'G4617'],
['G4680'],
['G3074'],
['G1064', 'G2836', 'G3388'],
['G518', 'G1024', 'G3050', 'G3054', 'G3055', 'G3056', 'G4086', 'G4487', 'G4935', 'G5023', 'G5542'],
['G1125'],
['G91', 'G92', 'G93', 'G95', 'G264', 'G824', 'G983', 'G984', 'G1536', 'G1626', 'G1651', 'G1727', 'G1908', 'G2556', 'G2558', 'G2559', 'G2607', 'G3076', 'G3077', 'G3762', 'G4122', 'G5195', 'G5196'],
['G106', 'G2219', 'G2220'],
['G2086', 'G2201', 'G2218', 'G4805'],
]
eng_tws = [['abomination', ' abominations', ' abominable', ' foul', ' disgusting thing'],
['adoption', ' adopt', ' adopted'],
['adultery', ' adulterous', ' adulterer', ' adulteress', ' adulterers', ' adulteresses'],
['Almighty'],
['altar', ' altars'],
['amen', ' truly'],
['angel', ' angels', ' archangel'],
['anoint', ' anointed', ' anointing', ' sons of fresh olive oil'],
['antichrist', ' antichrists'],
['apostle', ' apostles', ' apostleship'],
['appoint', ' appoints', ' appointed', ' appointment', ' ordered', ' desired', ' directed', ' selected', ' reserved'],
['ark'],
['ark of the covenant', ' ark of Yahweh'],
['atonement', ' atone', ' atones', ' atoned'],
['atonement lid'],
['authority', ' authorities'],
['baptize', ' baptized', ' baptism'],
['believe', ' believes', ' believed', ' believer', ' belief', ' unbeliever', ' unbelievers', ' unbelief'],
['believer'],
['beloved', ' treasured'],
['birthright'],
['blameless'],
['blasphemy', ' blaspheme', ' blasphemes', ' blasphemed', ' blasphemous', ' blasphemies'],
['bless', ' blessed', ' blessing', ' blessings'],
['blood'],
['boast', ' boasts', ' boastful'],
['body', ' bodies'],
['bind', ' binding', ' bond', ' bonds', ' bound', ' is bound', ' tie', ' tying', ' tie up', ' tied', ' tied up', ' put on', ' puts on', ' gird up', ' fetters', ' chains'],
['born again', ' born of God', ' new birth'],
['brother', ' brothers'],
['call', ' calls', ' calling', ' called', ' invite'],
['centurion', ' centurions'],
['children', ' child'],
['Christ', ' Messiah'],
['Christian'],
['church', ' churches', ' Church'],
['circumcise', ' circumcised', ' circumcision', ' uncircumcised', ' uncircumcision'],
['clean', ' cleans', ' cleaned', ' cleanse', ' cleansed', ' cleansing', ' wash', ' washing', ' washed', ' washes', ' unclean'],
['command', ' commands', ' commanded', ' commandment', ' commandments'],
['compassion', ' compassionate', ' deal gently with'],
['condemn', ' condemns', ' condemned', ' condemnation', ' sentence of condemnation', ' sentenced to death'],
['confess', ' confessed', ' confesses', ' confession'],
['conscience', ' consciences'],
['consecrate', ' consecrated', ' consecration'],
['cornerstone', ' cornerstones'],
['covenant', ' covenants', ' new covenant', ' agreed'],
['covenant faithfulness', ' covenant loyalty', ' loving kindness', ' unfailing love', ' steadfast love'],
['cross'],
['crucify', ' crucified', ' nailing to a cross'],
['curse', ' cursed', ' curses', ' cursing', ' speaks evil'],
['daughter of Zion'],
['day of the Lord', ' day of Yahweh'],
['deacon', ' deacons'],
['demon', ' evil spirit', ' unclean spirit', ' demonic'],
['demon-possessed', ' possessed by a demon', ' possessed by demons'],
['disciple', ' disciples'],
['discipline', ' disciplines', ' disciplined', ' self-discipline', ' no discipline'],
['divine', ' divine nature'],
['dominion', ' subjugate', ' control'],
['elect', ' choose', ' chose', ' chosen', ' chosen one', ' chosen ones', ' chosen people', ' Chosen One'],
['ephod'],
['eternity', ' everlasting', ' eternal', ' forever'],
['eunuch', ' eunuchs'],
['evangelist', ' evangelists'],
['evil', ' wicked', ' wickedness', ' wickedly'],
['exalt', ' exalted', ' exalts', ' exaltation', ' high position'],
['exhort', ' exhortation', ' urge'],
['faith'],
['faithful', ' faithfulness', ' unfaithful', ' unfaithfulness', ' breach of faith', ' show self faithful', ' faithful one', ' faithful people'],
['faithless', ' faithlessness'],
['god', ' false god', ' gods', ' goddess', ' idol', ' idols', ' idolater', ' idolaters', ' idolatrous', ' idolatry', ' disgusting figure', ' goat idols'],
['favor', ' favors', ' favorable', ' favoritism'],
['fear', ' fears', ' afraid'],
['fellowship', ' be participants with', ' associate with', ' be allied with'],
['filled with the Holy Spirit'],
['flesh', ' meat', ' humans', ' living creatures'],
['fool', ' fools', ' foolish', ' folly', ' out of mind', ' unwise', ' stupid', ' doing what is repulsive', ' blame'],
['forever'],
['forgive', ' forgives', ' forgiven', ' forgiveness', ' pardon', ' pardoned'],
['forsake', ' forsakes', ' forsaken', ' forsook', ' abandon', ' abandoned', ' give up', ' has left', ' have left'],
['fulfill', ' fulfilled', ' carried out'],
['Gentile', ' Gentiles'],
['gift', ' gifts'],
['glorify'],
['glory', ' glorious', ' glorify', ' glorified', ' glorifies'],
['God'],
['godly', ' godliness', ' ungodly', ' godless', ' ungodliness', ' godlessness', ' godless actions', ' godly life'],
['God the Father', ' heavenly Father', ' Father'],
['good', ' goodness', ' pleasing'],
['good news', ' gospel'],
['grace', ' gracious', ' kind'],
['guilt', ' guilty', ' held accountable'],
['Hades', ' Sheol'],
['heart', ' hearts', ' inner self', ' myself'],
['heaven', ' sky', ' skies', ' heavens', ' heavenly', ' overhead', ' in midair'],
['Hebrew', ' Hebrews'],
['hell', ' lake of fire', ' handed down to Tartarus'],
['high priest'],
['holy', ' holiness', ' unholy', ' sacred'],
['Holy One'],
['holy place'],
['Holy Spirit', ' Spirit of God', ' Spirit of the Lord', ' Spirit'],
['honor', ' honors'],
['hope', ' hoped', ' hopes'],
['house of God', ' Yahwehs house'],
['humble', ' humbles', ' humbled', ' humility'],
['hypocrite', ' hypocrites', ' hypocrisy'],
['image of God'],
['in Christ', ' in Jesus', ' in the Lord', ' in him'],
['inherit', ' inheritance', ' heritage'],
['iniquity', ' iniquities'],
['innocent', ' innocence', ' guiltless'],
['intercede', ' intercededs', ' intercession'],
['Israel', ' Israelites'],
['jealous', ' jealousy'],
['Jesus', ' Jesus Christ', ' Christ Jesus'],
['Jew', ' Jewish', ' Jews'],
['judge', ' judges', ' judgment', ' judgments', ' decision'],
['judgment day', ' that day'],
['just', ' justice', ' unjust', ' unjustly', ' injustice', ' justly', ' justify', ' justification'],
['justify'],
['kingdom of God', ' kingdom of heaven'],
['King of the Jews', ' king of the Jews'],
['lamb', ' Lamb of God'],
['lament', ' laments', ' lamentation', ' wail'],
['last day', ' last days', ' latter days'],
['law of Moses', ' Gods law', ' law of Yahweh', ' the law', ' expert in the law'],
['life', ' live', ' lived', ' lives', ' living', ' alive'],
['lord', ' lords', ' Lord', ' master', ' masters', ' sir', ' sirs'],
['lordgod'],
['Lords Supper'],
['Lord Yahweh', ' Yahweh God'],
['love', ' loves', ' loving', ' loved'],
['majesty'],
['manna'],
['mercy', ' merciful'],
['to minister', ' ministry', ' offer as a priest', ' performs service', ' attending to needs'],
['miracle', ' miracles'],
['Most High'],
['myrrh'],
['name', ' names', ' named'],
['Nazirite', ' Nazirites', ' Nazirite vow'],
['newcovenant'],
['parable', ' parables'],
['Passover'],
['pastor', ' pastors'],
['Pentecost', ' Festival of Weeks'],
['people of God', ' my people'],
['perish', ' perished', ' perishing', ' perishable'],
['Pharisee', ' Pharisees'],
['power', ' powers', ' powerful', ' powerfully', ' be mastered'],
['pray', ' prayer', ' prayers', ' prayed'],
['predestine', ' predestined'],
['priest', ' priests', ' priesthood'],
['promise', ' promises', ' promised'],
['Promised Land'],
['prophet', ' prophets', ' prophecy', ' prophesy', ' seer', ' prophetess'],
['propitiation'],
['psalm', ' psalms'],
['pure', ' purify', ' purity', ' purification', ' sincere'],
['Rabbi'],
['ransom', ' ransomed'],
['reconcile', ' reconciles', ' reconciled', ' reconciliation'],
['redeem', ' redeems', ' redemption', ' redeemer'],
['remnant'],
['repent', ' repents', ' repented', ' repentance'],
['restore', ' restores', ' restored', ' restoration', ' bring back'],
['resurrection'],
['reveal', ' reveals', ' revealed', ' revelation'],
['righteous', ' righteousness', ' unrighteous', ' unrighteousness', ' upright', ' uprightness', ' do what is right'],
['right hand', ' right hands'],
['Sabbath'],
['Sadducee', ' Sadducees'],
['saint', ' saints', ' Gods holy people', ' holy ones', ' holy people'],
['salvation'],
['sanctify', ' sanctifies', ' sanctification'],
['sanctuary'],
['Satan', ' devil', ' evil one'],
['save', ' saves', ' saved', ' safe', ' salvation'],
['Savior', ' savior'],
['scribe', ' scribes'],
['set apart'],
['sign', ' signs', ' proof', ' remind', ' reminds', ' reminded', ' reminder', ' reminders', ' reminding'],
['sin', ' sins', ' sinned', ' sinful', ' sinner', ' sinning'],
['son', ' sons'],
['Son of God', ' Son'],
['Son of Man', ' son of man'],
['sons of God', ' children of God'],
['soul', ' souls'],
['spirit', ' spirits', ' spiritual', ' ghost'],
['stone', ' stones', ' stoning'],
['synagogue'],
['tabernacle'],
['temple'],
['tempt', ' temptation'],
['test', ' tests', ' tested', ' testing', ' testing in the fire'],
['testimony', ' testify', ' testified', ' witness', ' witnesses', ' eyewitness', ' eyewitnesses'],
['tetrarch'],
['the twelve', ' the eleven'],
['transgress', ' transgresses', ' transgression'],
['trespass', ' trespasses', ' trespassed'],
['true', ' truth', ' truths', ' truly'],
['trust', ' trusts', ' trusted', ' trustworthy', ' trustworthiness'],
['unbeliever'],
['uncircumcised'],
['unclean'],
['unfaithful'],
['ungodly'],
['unholy'],
['unjust'],
['unlawful'],
['unleavened bread'],
['unrighteous'],
['upright'],
['vow', ' vows', ' vowed'],
['will of God', ' his will', ' he wishes'],
['wise', ' wisdom'],
['witness'],
['woe'],
['word of God', ' words of God', ' word of Yahweh', ' word of the Lord', ' word of truth', ' scripture', ' scriptures'],
['wordoftruth'],
['works', ' deeds', ' work', ' acts'],
['world', ' worldly'],
['worship', ' worshiping'],
['worthy', ' worth', ' unworthy', ' worthless', ' value', ' valuation', ' full price'],
['wrath', ' fury'],
['Yahweh'],
['Yahweh of hosts', ' God of hosts', ' host of heaven', ' host of the heavens', ' Lord of hosts'],
['zeal', ' zealous'],
['Zion', ' Mount Zion'],
['Aaron'],
['Abel'],
['Abiathar'],
['Abijah'],
['Abimelech'],
['Abner'],
['Abraham', ' Abram'],
['Absalom'],
['Adam'],
['Adonijah'],
['Ahab'],
['Ahasuerus'],
['Ahaz'],
['Ahaziah'],
['Ahijah'],
['Ai'],
['Amalek', ' Amalekite', ' Amalekites'],
['Amaziah'],
['Ammon', ' Ammonite', ' Ammonites'],
['Amnon'],
['Amorite', ' Amorites'],
['Amos'],
['Amoz'],
['Andrew'],
['Annas'],
['Antioch'],
['Apollos'],
['Aquila'],
['Arabah'],
['Arabia', ' Arabian', ' Arabians'],
['Aram', ' Aramean', ' Arameans', ' Aramaic', ' Aram of Damascus'],
['Ararat'],
['Artaxerxes'],
['Asa'],
['Asaph'],
['Ashdod', ' Azotus'],
['Asher'],
['Asherah', ' Asherah pole', ' Asherah poles', ' Ashtoreth', ' Ashtoreths'],
['Ashkelon'],
['Asia'],
['Assyria', ' Assyrian', ' Assyrians', ' Assyrian Empire'],
['Athaliah'],
['Azariah'],
['Baal'],
['Baasha'],
['Babel'],
['Babylon', ' Babylonia', ' Babylonian', ' Babylonians'],
['Balaam'],
['Barabbas'],
['Barnabas'],
['Bartholomew'],
['Baruch'],
['Bashan'],
['Bathsheba'],
['Beelzebul'],
['Beersheba'],
['Benaiah'],
['Benjamin', ' Benjamite', ' Benjamites'],
['Berea'],
['Bethany'],
['Bethel'],
['Bethlehem'],
['Beth Shemesh'],
['Bethuel'],
['Boaz'],
['Caesar'],
['Caesarea', ' Caesarea Philippi'],
['Caiaphas'],
['Cain'],
['Caleb'],
['Cana'],
['Canaan', ' Canaanite', ' Canaanites'],
['Capernaum'],
['Carmel', ' Mount Carmel'],
['Chaldea', ' Chaldean', ' Chaldeans'],
['Kerethites'],
['Cilicia'],
['city of David'],
['Colossae', ' Colossians'],
['Corinth', ' Corinthians'],
['Cornelius'],
['Crete', ' Cretan', ' Cretans'],
['Cush'],
['Cyprus'],
['Cyrene'],
['Cyrus'],
['Damascus'],
['Dan'],
['Daniel'],
['Darius'],
['David'],
['Delilah'],
['Eden', ' garden of Eden'],
['Edom', ' Edomite', ' Edomites', ' Idumea'],
['Egypt', ' Egyptian', ' Egyptians'],
['Ekron', ' Ekronites'],
['Elam', ' Elamites'],
['Eleazar'],
['Eliakim'],
['Elijah'],
['Elisha'],
['Elizabeth'],
['En Gedi'],
['Enoch'],
['Ephesus', ' Ephesian', ' Ephesians'],
['Ephraim', ' Ephraimite', ' Ephraimites'],
['Ephrath', ' Ephrathah', ' Ephrathite', ' Ephrathites'],
['Esau'],
['Esther'],
['Ethiopia', ' Ethiopian'],
['Euphrates River', ' the River'],
['Eve'],
['Ezekiel'],
['Ezra'],
['Gabriel'],
['Gad'],
['Galatia', ' Galatians'],
['Galilee', ' Galilean', ' Galileans'],
['Gath', ' Gittite', ' Gittites'],
['Gaza'],
['Gerar'],
['Geshur', ' Geshurites'],
['Gethsemane'],
['Gibeah'],
['Gibeon', ' Gibeonite', ' Gibeonites'],
['Gideon'],
['Gilead', ' Gileadite', ' Gileadites'],
['Gilgal'],
['Girgashites'],
['Golgotha'],
['Goliath'],
['Gomorrah'],
['Goshen'],
['Greece'],
['Greek', ' Grecian'],
['Habakkuk'],
['Hagar'],
['Haggai'],
['Ham'],
['Hamath', ' Hamathites', ' Lebo Hamath'],
['Hamor'],
['Hananiah'],
['Hannah'],
['Haran'],
['Hebron'],
['Herod', ' Herod Antipas'],
['Herodias'],
['Herod', ' Herod the Great'],
['Hezekiah'],
['Hilkiah'],
['Hittite', ' Hittites'],
['Hivite', ' Hivites'],
['Horeb'],
['Hosea'],
['Hoshea'],
['house of David'],
['Iconium'],
['Isaac'],
['Isaiah'],
['Ishmael', ' Ishmaelite', ' Ishmaelites'],
['Issachar'],
['Israel', ' Israelite', ' Israelites', ' Jacob'],
['James (brother of Jesus)'],
['James (son of Alphaeus)'],
['James (son of Zebedee)'],
['Japheth'],
['Jebus', ' Jebusite', ' Jebusites'],
['Jehoiachin'],
['Jehoiada'],
['Jehoiakim'],
['Jehoram', ' Joram'],
['Jehoshaphat'],
['Jehu'],
['Jephthah'],
['Jeremiah'],
['Jericho'],
['Jeroboam'],
['Jerusalem'],
['Jesse'],
['Jethro', ' Reuel'],
['Jezebel'],
['Jezreel', ' Jezreelite'],
['Joab'],
['Joash'],
['Job'],
['Joel'],
['John Mark'],
['John (the apostle)'],
['John (the Baptist)'],
['Jonah'],
['Jonathan'],
['Joppa'],
['Joram'],
['Jordan River', ' Jordan'],
['Joseph (NT)'],
['Joseph (OT)'],
['Joshua'],
['Josiah'],
['Jotham'],
['Judah'],
['Judas Iscariot'],
['Judas son of James'],
['Judea'],
['Kadesh', ' Kadesh-Barnea', ' Meribah Kadesh'],
['Kedar'],
['Kedesh'],
['Kidron Valley'],
['Israel', ' kingdom of Israel'],
['Judah', ' kingdom of Judah'],
['Korah', ' Korahite', ' Korahites'],
['Laban'],
['Lamech'],
['Lazarus'],
['Leah'],
['Lebanon'],
['Leviathan'],
['Levi', ' Levite', ' Levites', ' Levitical'],
['Lot'],
['Luke'],
['Lystra'],
['Maacah'],
['Macedonia'],
['Maker'],
['Malachi'],
['Manasseh'],
['man of God'],
['Martha'],
['Mary', ' the mother of Jesus'],
['Mary Magdalene'],
['Mary (sister of Martha)'],
['Matthew', ' Levi'],
['Mede', ' Medes', ' Media'],
['the sea', ' the Great Sea', ' the western sea', ' Mediterranean Sea'],
['Melchizedek'],
['Memphis'],
['Meshech'],
['Mesopotamia', ' Aram Naharaim'],
['Micah'],
['Michael'],
['Midian', ' Midianite', ' Midianites'],
['Miriam'],
['Mishael'],
['Mizpah'],
['Moab', ' Moabite', ' Moabitess'],
['Molech', ' Moloch'],
['Mordecai'],
['Moses'],
['Mount Hermon'],
['Mount of Olives'],
['Naaman'],
['Nahor'],
['Nahum'],
['Naphtali'],
['Nathan'],
['Nazareth', ' Nazarene'],
['Nebuchadnezzar'],
['Negev'],
['Nehemiah'],
['Nile River', ' River of Egypt', ' the Nile'],
['Nineveh', ' Ninevite'],
['Noah'],
['Obadiah'],
['Omri'],
['Paddan Aram'],
['Paran'],
['Paul', ' Saul'],
['Peor', ' Mount Peor', ' Baal Peor'],
['Perizzite'],
['Persia', ' Persians'],
['Peter', ' Simon Peter', ' Cephas'],
['Pharaoh', ' king of Egypt'],
['Philip', ' the evangelist'],
['Philippi', ' Philippians'],
['Philip', ' the apostle'],
['Philistia'],
['Philistines'],
['Phinehas'],
['Phoenicia', ' Syrophoenician'],
['Pilate'],
['Pontus'],
['Potiphar'],
['Priscilla', ' Prisca'],
['Rabbah'],
['Rachel'],
['Rahab'],
['Ramah'],
['Ramoth'],
['Rebekah'],
['Sea of Reeds', ' Red Sea'],
['Rehoboam'],
['Reuben'],
['Rimmon'],
['Rome', ' Roman'],
['Ruth'],
['Salt Sea', ' Dead Sea'],
['Samaria', ' Samaritan'],
['Samson'],
['Samuel'],
['Sarah', ' Sarai'],
['Saul'],
['Sea of Galilee', ' Sea of Kinnereth', ' lake of Gennesaret', ' Sea of Tiberias'],
['Sennacherib'],
['Seth'],
['Sharon', ' Plain of Sharon'],
['Sheba'],
['Shechem'],
['Shem'],
['Shiloh'],
['Shimei'],
['Shinar'],
['Sidon', ' Sidonians'],
['Silas', ' Silvanus'],
['Simeon'],
['Simon the Zealot'],
['Sinai', ' Mount Sinai'],
['Sodom'],
['Solomon'],
['Stephen'],
['Succoth'],
['Syria', ' Ashur'],
['Tamar'],
['Tarshish'],
['Tarsus'],
['Terah'],
['Thessalonica', ' Thessalonian', ' Thessalonians'],
['Thomas'],
['Timothy'],
['Tirzah'],
['Titus'],
['Troas'],
['Tubal'],
['Tychicus'],
['Tyre', ' Tyrians'],
['Ur'],
['Uriah'],
['Uzziah'],
['Vashti'],
['Zacchaeus'],
['Zadok'],
['Zebedee'],
['Zebulun'],
['Zechariah (NT)'],
['Zechariah (OT)'],
['Zedekiah'],
['Zephaniah'],
['Zerubbabel'],
['Zoar'],
['twelve tribes of Israel', ' twelve tribes of the children of Israel', ' twelve tribes'],
['abyss', ' bottomless pit'],
['acacia'],
['accuse', ' accuses', ' accused', ' accusing', ' accuser', ' accusers', ' accusation', ' accusations'],
['acknowledge', ' acknowledges', ' acknowledged', ' admit', ' admitted'],
['acquit', ' acquits', ' acquitted'],
['administration', ' administrator', ' administrators', ' administered', ' administering'],
['admonish', ' warned', ' aware', ' have been a witness against'],
['adversary', ' adversaries', ' enemy', ' enemies'],
['afflicted', ' afflict', ' afflicting', ' affliction', ' afflictions'],
['age', ' ages', ' aged'],
['alarm', ' alarms', ' alarmed'],
['alms'],
['altar of incense', ' incense altar'],
['amazed', ' amazement', ' astonished', ' marvel', ' marveled', ' marveling', ' marvelous', ' wonder', ' wonders', ' dumbfounded', ' confusion'],
['ambassador', ' ambassadors', ' representative', ' representatives'],
['anger', ' angered', ' angry'],
['anguish'],
['archer', ' archers'],
['armor', ' armory'],
['arrogant', ' arrogantly', ' arrogance'],
['ash', ' ashes', ' dust'],
['assembly', ' assemblies', ' assemble', ' assembled', ' congregation', ' meeting'],
['assign', ' assigned', ' assigning', ' assignment', ' assignments', ' reassign'],
['astray', ' go astray', ' went astray', ' lead astray', ' led astray', ' stray', ' strayed', ' strays'],
['avenge', ' avenges', ' avenged', ' avenging', ' avenger', ' revenge', ' vengeance'],
['awe', ' awesome'],
['ax', ' axes'],
['banquet'],
['barley'],
['barren', ' dry'],
['basket', ' baskets', ' basketfuls'],
['bear', ' bears', ' bearing', ' bearer', ' carry', ' carried'],
['bear', ' bears'],
['beast', ' beasts', ' animal', ' animals'],
['beg', ' begged', ' begging', ' beggar', ' needy'],
['betray', ' betrays', ' betrayed', ' betraying', ' betrayer', ' betrayers'],
['day', ' days'],
['hour', ' hours', ' moment', ' immediately', ' for a while'],
['month', ' months', ' monthly'],
['watch (biblical time)', ' watches'],
['week', ' weeks'],
['year', ' years'],
['blemish', ' blemishes', ' unblemished', ' defect'],
['bloodshed'],
['blot out', ' blots out', ' blotted out', ' wipe out', ' wipes out', ' wiped out'],
['bold', ' boldly', ' boldness', ' emboldened'],
['Book of Life'],
['bow', ' bows', ' bowed', ' bowing', ' bow down', ' bows down', ' bowed down', ' bowing down', ' knelt', ' are bent', ' have bent', ' bend', ' bend the knee', ' will bend'],
['bow and arrow', ' bows and arrows', ' a bow', ' arrows'],
['bread'],
['breastplate', ' breastplates', ' breastpiece'],
['breath', ' breathe', ' breathes', ' breathed', ' breathing'],
['bribe', ' bribes', ' bribed', ' bribery', ' payoffs'],
['bride', ' brides', ' bridal'],
['bridegroom', ' bridegrooms'],
['bronze'],
['burden', ' burdens', ' burdened', ' burdensome', ' heavy', ' utterances'],
['burnt offering', ' burnt offerings', ' offering by fire'],
['bury', ' buries', ' buried', ' burying', ' burial'],
['camel', ' camels'],
['captive', ' captives', ' captivate', ' captivated', ' captivity', ' catch', ' captured'],
['cast out', ' casting out', ' driving out', ' throw out', ' throwing out'],
['caught up', ' caught up with', ' catch up with'],
['cedar', ' cedars', ' cedarwood'],
['census'],
['chaff'],
['chariot', ' chariots', ' charioteers'],
['cherub', ' cherubim', ' cherubs'],
['chief', ' chiefs', ' officers'],
['chief priests'],
['chronicles'],
['citizen', ' citizens', ' citizenship'],
['clan', ' clans'],
['clothe', ' clothed', ' clothes', ' clothing', ' unclothed', ' garments'],
['comfort', ' comforts', ' comforted', ' comforting', ' comforter', ' comforters', ' uncomforted'],
['commander', ' commanders', ' leader', ' leaders'],
['commit', ' commits', ' committed', ' committing', ' commitment'],
['companion', ' companions', ' fellow worker', ' fellow workers', ' friend'],
['conceive', ' conceives', ' conceived', ' conception'],
['concubine', ' concubines'],
['confidence', ' confident', ' confidently'],
['confirm', ' confirms', ' confirmed', ' confirmation', ' guaranteed'],
['consume', ' consumes', ' consumed', ' consuming'],
['contempt', ' contemptible', ' not worth listening to'],
['corrupt', ' corrupts', ' corrupted', ' corrupting', ' corruption', ' corruptly', ' incorruptibility', ' depraved'],
['council', ' councils'],
['advice', ' advise', ' advised', ' advisor', ' advisors', ' counsel', ' counselor', ' counselors', ' counsels'],
['courage', ' courageous', ' encourage', ' encouraged', ' encouragement', ' take courage', ' discourage', ' discouraged', ' discouragement', ' discouraging', ' bravest'],
['court', ' courts', ' courtyard', ' courtyards'],
['cow', ' cows', ' bull', ' bulls', ' calf', ' calves', ' cattle', ' heifer', ' ox', ' oxen'],
['create', ' creates', ' created', ' creation', ' creator'],
['creature', ' creatures'],
['crime', ' crimes', ' criminal', ' criminals'],
['crown', ' crowns', ' crowned'],
['cry', ' cries', ' cried', ' crying', ' cry out', ' cries out', ' cried out', ' outcry', ' outcries', ' shouted', ' shouts'],
['cupbearer', ' cupbearers'],
['curtain', ' curtains'],
['cut off', ' cuts off', ' cutting off'],
['cypress'],
['darkness'],
['die', ' dies', ' died', ' dead', ' deadly', ' deadness', ' death', ' deaths', ' deathly'],
['deceive', ' deceives', ' deceived', ' deceiving', ' deceit', ' deceiver', ' deceivers', ' deceitful', ' deceitfully', ' deceitfulness', ' deception', ' deceptive', ' illusions'],
['declare', ' declares', ' declared', ' declaring', ' declaration', ' declarations', ' proclaim', ' proclaimed', ' proclaims', ' proclaiming', ' proclamation', ' proclamations', ' announce', ' announces', ' make clear'],
['decree', ' decrees', ' decreed'],
['dedicate', ' dedicates', ' dedicated', ' dedication', ' established', ' devoted'],
['deer', ' doe', ' does', ' fawns', ' roebuck', ' roebucks'],
['defile', ' defiles', ' defiled', ' defiling', ' be defiled', ' are defiled', ' was defiled', ' were defiled'],
['delight', ' delights', ' delighted', ' delightful', ' took pleasure'],
['deliver', ' delivers', ' delivered', ' delivering', ' deliverer', ' deliverance', ' hand over', ' handed over', ' turn over', ' released', ' rescue', ' rescues', ' rescued', ' be rescued', ' permit to escape', ' defended'],
['descend', ' descends', ' descended', ' descending', ' descendant', ' descendants', ' clans descended'],
['desecrate', ' desecrated', ' desecrating'],
['desert', ' deserts', ' wilderness', ' wildernesses'],
['desolate', ' desolation', ' desolations', ' alone', ' left all alone', ' deserted'],
['destiny'],
['destroy', ' destroys', ' destroyed', ' destroyer', ' destroyers', ' destroying', ' destruction'],
['detest', ' detested', ' detestable'],
['devastate', ' devastated', ' devastating', ' devastation', ' devastations'],
['devour', ' devours', ' devoured', ' devouring'],
['discern', ' discerned', ' discerning', ' discernment', ' distinguish', ' distinguishing'],
['disgrace', ' disgraces', ' disgraced', ' disgraceful'],
['dishonor', ' dishonors', ' dishonored', ' dishonorable'],
['disobey', ' disobeys', ' disobeyed', ' disobedience', ' disobedient'],
['disperse', ' dispersion', ' scatter', ' scattered', ' scatters', ' distributed'],
['divination', ' diviner', ' soothsaying', ' soothsayer'],
['divorce'],
['doctrine', ' beliefs'],
['donkey', ' mule'],
['doom'],
['doorpost'],
['dove', ' pigeon'],
['dream'],
['drink offering'],
['drunk', ' drunkard'],
['dung', ' manure'],
['eagle', ' eagles'],
['earth', ' earthen', ' earthly'],
['elder', ' elders', ' older'],
['endure', ' endures', ' endured', ' enduring', ' endurance'],
['enslave', ' enslaves', ' enslaved', ' bondage'],
['envy', ' covet', ' greedy'],
['evildoer', ' evildoers', ' evildoing'],
['exile', ' exiles', ' exiled'],
['exult', ' exults', ' exulting', ' exultant'],
['face', ' faces', ' faced', ' facing', ' facial', ' facedown', ' before'],
['false prophet', ' false prophets'],
['corrupt witness', ' false report', ' false testimony', ' false witness', ' false witnesses'],
['family', ' families'],
['famine', ' famines'],
['fast', ' fasts', ' fasted', ' fasting', ' fastings'],
['ancestor', ' ancestors', ' father', ' fathers', ' fathered', ' fathering', ' forefather', ' forefathers', ' grandfather'],
['feast', ' feasts', ' feasting'],
['fellowship offering', ' fellowship offerings'],
['festival', ' festivals'],
['fig', ' figs'],
['fir', ' firs'],
['fire', ' fires', ' firebrands', ' firepans', ' fireplaces', ' firepot', ' firepots'],
['firstborn'],
['firstfruits'],
['fishermen', ' fishers'],
['flocks', ' flock', ' flocking', ' herd', ' herds'],
['flood'],
['flute', ' flutes', ' pipe', ' pipes'],
['footstool'],
['alien', ' alienates', ' alienated', ' foreign', ' foreigner', ' foreigners'],
['foreknew', ' foreknowledge'],
['sexual immorality', ' immorality', ' immoral', ' fornication'],
['found', ' founded', ' founder', ' foundation', ' foundations'],
['fountain', ' fountains', ' source', ' sources', ' spring', ' springs'],
['frankincense'],
['free', ' frees', ' freed', ' freeing', ' freedom', ' freely', ' freeman', ' freewill', ' liberty'],
['freewill offering', ' freewill offerings'],
['fruit', ' fruits', ' fruitful', ' unfruitful'],
['furnace'],
['gate', ' gates', ' gate bars', ' gatekeeper', ' gatekeepers', ' gateposts', ' gateway', ' gateways'],
['generation'],
['giant', ' giants'],
['gird', ' girded', ' wrapped around', ' tied up', ' belt', ' tuck in belt', ' tucked in belt', ' put belt around', ' put belt on', ' had sashes around', ' fastening waistband around'],
['glean', ' gleans', ' gleaned', ' gleanings'],
['goat', ' goats', ' goatskins', ' scapegoat', ' kids'],
['gold', ' golden'],
['gossip', ' gossips', ' gossiper', ' talk nonsense'],
['govern', ' government', ' governments', ' governor', ' governors', ' provincial governors', ' proconsul', ' proconsuls'],
['grain', ' grains', ' grainfields'],
['grain offering', ' grain offerings'],
['grape', ' grapes', ' grapevine'],
['groan', ' groans', ' groaned', ' groaning', ' groanings'],
['guilt offering', ' guilt offerings'],
['hail', ' hails', ' hailstones', ' hailstorm'],
['hand', ' hands', ' handed', ' handing', ' by the hand of', ' lay a hand on', ' lays his hand on', ' from the hand of'],
['hang', ' hangs', ' hanged', ' hanging', ' hangings', ' hung'],
['hard', ' harder', ' hardest', ' harden', ' hardens', ' hardened', ' hardening', ' hardness'],
['harp', ' harps', ' harpist', ' harpists'],
['harvest', ' harvests', ' harvested', ' harvesting', ' harvester', ' harvesters'],
['haughty', ' lofty'],
['head', ' heads', ' forehead', ' foreheads', ' baldhead', ' headfirst', ' headbands', ' headscarves', ' beheaded'],
['cure', ' cured', ' heal', ' heals', ' healed', ' healing', ' healings', ' healer', ' health', ' healthy', ' unhealthy'],
['heir', ' heirs'],
['high place', ' high places'],
['holy city', ' holy cities'],
['honey', ' honeycomb'],
['hoof', ' hoofs', ' hooves'],
['horn', ' horns', ' horned'],
['horror', ' horrors', ' horrible', ' horribly', ' horrified', ' horrifying', ' source of horror', ' object of horror'],
['horse', ' horses', ' warhorse', ' warhorses', ' horseback'],
['horseman', ' horsemen'],
['hour', ' hours'],
['house', ' houses', ' housetop', ' housetops', ' housekeepers', ' dwelling'],
['household', ' household members', ' members of household', ' households'],
['humiliate', ' humiliated', ' humiliation'],
['idol'],
['image', ' images', ' carved image', ' carved images', ' cast metal images', ' figure', ' figures', ' carved figure', ' carved figures', ' cast metal figure', ' cast metal figures', ' statue'],
['imitate', ' imitator', ' imitators'],
['incense', ' incenses'],
['inquire', ' inquires', ' inquired', ' inquiries'],
['instruct', ' instructs', ' instructed', ' instructing', ' instruction', ' instructions', ' instructors'],
['integrity'],
['interpret', ' interprets', ' interpreted', ' interpreting', ' interpretation', ' interpretations', ' interpreter'],
['Jewish authorities', ' Jewish leader'],
['joy', ' joyful', ' joyfully', ' joyfulness', ' enjoy', ' enjoys', ' enjoyed', ' enjoying', ' enjoyment', ' rejoice', ' gladness', ' rejoices', ' rejoiced', ' rejoicing'],
['Judaism', ' Jewish religion'],
['judge', ' judges'],
['kin', ' kinfolk', ' kindred', ' kinsfolk', ' kinsman', ' kinsmen'],
['kind', ' kinds', ' kindness', ' kindnesses'],
['king', ' kings', ' kingship', ' kingly'],
['kingdom', ' kingdoms'],
['kiss', ' kisses', ' kissed', ' kissing'],
['know', ' knows', ' knew', ' knowing', ' knowledge', ' known', ' make known', ' makes known', ' made known', ' unknown', ' seeing', ' saw'],
['labor', ' labors', ' labored', ' laborer', ' laborers', ' work', ' worked', ' hard work'],
['labor pains', ' in labor', ' birth pains', ' pains of childbirth', ' labors in pain together'],
['lamp', ' lamps torch', ' torches'],
['lampstand', ' lampstands'],
['law', ' laws', ' lawgiver', ' lawbreaker', ' lawbreakers', ' lawsuit', ' lawyer', ' principle', ' principled', ' principles'],
['lawful', ' lawfully', ' unlawful', ' not lawful', ' lawless', ' lawlessness'],
['lawless'],
['learned men', ' astrologers'],
['leopard', ' leopards'],
['leper', ' lepers', ' leprosy', ' leprous'],
['epistle', ' letter', ' letters'],
['light', ' lights', ' lighting', ' lightning', ' daylight', ' sunlight', ' twilight', ' enlighten', ' enlightened'],
['like', ' liken', ' likeness', ' likenesses', ' likewise', ' alike', ' unlike', ' as if'],
['lions', ' lion', ' lioness', ' lionesses'],
['livestock'],
['locust', ' locusts'],
['loins', ' were descendants', ' waist'],
['lots', ' casting lots'],
['lover', ' lovers'],
['lowly', ' lowliest', ' lowliness'],
['lust', ' lusts', ' lusted', ' lusting', ' lustful', ' passions', ' desires'],
['lute', ' lyre', ' lyres'],
['magic', ' magical', ' magician', ' magicians', ' who talk with spirits', ' who talks with spirits'],
['magistrate', ' magistrates'],
['magnify'],
['manager', ' managers', ' steward', ' stewards', ' stewardship'],
['mealoffering'],
['mediator'],
['meditate', ' meditates', ' meditation', ' thoughts'],
['meek', ' meekness'],
['melt', ' melted', ' melting', ' melts', ' molten'],
['member', ' members', ' body parts'],
['memorial', ' memorial offering'],
['messenger', ' messengers'],
['might', ' mighty', ' mightier', ' mightily', ' mighty works', ' mighty host'],
['mind', ' minds', ' minded', ' mindful', ' likeminded'],
['mock', ' mocks', ' mocked', ' mocking', ' mocker', ' mockers', ' mockery', ' ridicule', ' ridiculed', ' scoff at', ' scoffed at', ' taunting song', ' laughingstock'],
['mold', ' molds', ' molded', ' molding', ' molder', ' moldy'],
['mourn', ' mourns', ' mourned', ' mourning', ' mourner', ' mourners', ' mournful', ' mournfully', ' weep', ' weeping'],
['multiply', ' multiplies', ' multiplied', ' multiplying', ' multiplication', ' increase'],
['mystery', ' mysteries', ' hidden truth', ' hidden truths'],
['nation', ' nations'],
['neighbor', ' neighbors', ' neighborhood', ' neighboring'],
['new moon', ' new moons'],
['noble', ' nobles', ' nobleman', ' noblemen', ' nobility', ' royal official'],
['oak', ' oaks'],
['oath', ' oaths', ' swear', ' swears', ' swearing', ' swear by', ' swears by'],
['obey', ' obeys', ' obeyed', ' obeying', ' obedience', ' obedient', ' obediently'],
['offspring'],
['oil'],
['olive', ' olives'],
['on high', ' in the highest'],
['oppress', ' oppresses', ' oppressed', ' oppressing', ' oppression', ' oppressive', ' oppressor', ' oppressors'],
['ordain', ' ordained', ' ordinary', ' ordination', ' planned long ago', ' set up', ' prepared', ' bring about', ' marked out'],
['ordinance', ' ordinances', ' regulation offering', ' regulations', ' requirements', ' strict law', ' permanent things', ' customs'],
['oversee', ' oversees', ' overseen', ' overseer', ' overseers'],
['overtake', ' overtakes', ' overtaken', ' overtook'],
['pagan', ' pagans'],
['palace', ' palaces'],
['palm', ' palms'],
['pardon'],
['partial', ' be partial', ' partiality'],
['patient', ' patiently', ' patience', ' impatient'],
['patriarch', ' patriarchs'],
['peace', ' peaceful', ' peacefully', ' peaceable', ' peacemakers'],
['peace offering', ' peace offerings'],
['people group', ' peoples', ' the people', ' a people', ' people'],
['perfect', ' perfected', ' perfecter', ' perfection', ' perfectly', ' complete'],
['persecute', ' persecuted', ' persecuting', ' persecution', ' persecutions', ' persecutor', ' persecutors', ' chase', ' pursuers'],
['persevere', ' perseverance'],
['perverse', ' perversely', ' perversion', ' perversions', ' perversities', ' pervert', ' perverts', ' perverted', ' perverting', ' malicious', ' maliciously', ' devious', ' dishonest', ' crooked ways', ' distortion'],
['pierce', ' pierces', ' pierced', ' piercing'],
['pig', ' pigs', ' pork', ' swine'],
['column', ' columns', ' pillar', ' pillars'],
['pit', ' pits', ' pitfall', ' trenches'],
['plague', ' plagues'],
['plea', ' pleas', ' plead', ' pleads', ' pleaded', ' pleading', ' pleadings'],
['pledge', ' pledged', ' pledges'],
['plow', ' plows', ' plowed', ' plowing', ' plowers', ' plowman', ' plowmen', ' plowshares', ' unplowed'],
['pomegranate', ' pomegranates'],
['possess', ' possesses', ' possessed', ' possessing', ' possession', ' possessions', ' dispossess', ' owned title'],
['praise', ' praises', ' praised', ' praising', ' praiseworthy'],
['preach', ' preached', ' preaching', ' preacher'],
['precious', ' valuable', ' expensive', ' fine'],
['prey', ' prey on'],
['prince', ' princes', ' princess', ' princesses', ' officials', ' high officials'],
['prison', ' prisoner', ' prisoners', ' prisons', ' imprison', ' imprisons', ' imprisoned', ' imprisonment', ' imprisonments'],
['proclaim'],
['profane', ' profaned', ' profaning'],
['profit', ' profits', ' profitable', ' unprofitable', ' useful', ' progress', ' benefit'],
['prosper', ' prospered', ' prospering', ' prosperity', ' prosperous'],
['prostitute', ' prostituted', ' prostitutes', ' harlot', ' whored'],
['prostrate', ' prostrated'],
['proud', ' proudly', ' pride', ' prideful'],
['proverb', ' proverbs'],
['province', ' provinces', ' provincial'],
['provoke', ' provokes', ' provoked', ' provoking', ' provocation'],
['prudence', ' prudent', ' prudently'],
['puffed up', ' puffs up'],
['punish', ' punishes', ' punished', ' punishing', ' punishment', ' unpunished'],
['purple'],
['push', ' pushed', ' pushing'],
['qualify', ' qualified', ' disqualified'],
['queen', ' queens'],
['quench', ' quenched', ' unquenchable'],
['rage', ' rages', ' raged', ' raging', ' enrage'],
['raise', ' raises', ' raised', ' rise', ' risen', ' arise', ' arose', ' got up', ' stir up', ' stirring up', ' stirred up'],
['reap', ' reaps', ' reaped', ' reaper', ' reapers', ' reaping'],
['rebel', ' rebels', ' rebelled', ' rebelling', ' rebellion', ' rebellious', ' rebelliousness'],
['rebuke', ' rebukes', ' rebuked'],
['receive', ' receives', ' received', ' receiving', ' receiver', ' welcome', ' welcomed', ' taken up', ' acceptance'],
['reed', ' reeds'],
['refuge', ' refugee', ' refugees', ' shelter', ' shelters', ' sheltered', ' sheltering'],
['reign', ' reigns', ' reigned', ' reigning'],
['reject', ' rejects', ' rejected', ' rejecting', ' rejection'],
['rejoice'],
['renown', ' renowned', ' famous'],
['report', ' reports', ' reported', ' reputation'],
['reproach', ' reproaches', ' reproached', ' reproaching', ' reproachfully', ' insult', ' insults', ' insulted'],
['rest', ' rests', ' rested', ' resting', ' restless', ' relief'],
['return', ' returns', ' returned', ' returning', ' turning back', ' return back'],
['revere', ' revered', ' reverence', ' reverences', ' reverent', ' respect'],
['reward', ' rewards', ' rewarded', ' rewarding', ' rewarder', ' prize', ' deserve', ' future', ' payment', ' wages'],
['robe', ' robes', ' robed'],
['rod', ' rods'],
['royal', ' royalty', ' kings'],
['ruin', ' ruins', ' ruined'],
['rule', ' rules', ' ruled', ' ruler', ' rulers', ' ruling', ' rulings', ' overrules', ' overruled'],
['run', ' runs', ' ran', ' runner', ' runners', ' running', ' rushed', ' quickly went', ' spilling over', ' be spilled', ' flows', ' leap', ' climb', ' moving swiftly', ' flow'],
['sackcloth'],
['sacred'],
['sacrifice', ' sacrifices', ' sacrificed', ' sacrificing', ' offering', ' offerings'],
['sandal', ' sandals'],
['scepter', ' scepters'],
['scroll', ' scrolls'],
['seacow'],
['seal', ' seals', ' sealed', ' sealing', ' unsealed'],
['seed', ' semen'],
['seek', ' seeks', ' seeking', ' sought', ' look for', ' searches for', ' seek advice'],
['seize', ' seizes', ' seized', ' seizure'],
['selah'],
['self-control', ' self-controlled', ' controlled self'],
['send', ' sends', ' sent', ' sending', ' send out', ' sends out', ' sent out', ' sending out'],
['serpent', ' serpents', ' snake', ' snakes', ' viper', ' vipers', ' reptile'],
['servant', ' servants', ' hired servant', ' hired servants', ' female servant', ' female servants', ' servant girl', ' servant girls', ' slave', ' slaves', ' slave girl', ' slaved', ' slavery', ' maidservants', ' serve', ' serves', ' served', ' serving', ' service', ' services', ' eyeservice'],
['serve'],
['had relations with', ' lovemaking', ' sleep with', ' sleeps with', ' slept with', ' sleeping with'],
['shadow', ' shadows', ' overshadow', ' overshadowed', ' shade'],
['shame', ' shames', ' shamed', ' shameful', ' shamefully', ' shameless', ' shamelessly', ' ashamed', ' unashamed', ' causing to mock'],
['ewe', ' ewes', ' ram', ' rams', ' sheep', ' sheepfold', ' sheepfolds', ' sheepshearers', ' sheepskins'],
['shepherd', ' shepherds', ' shepherded', ' shepherding', ' chief shepherd'],
['shield', ' shields', ' shielded'],
['shrewd', ' shrewdly'],
['siege', ' besiege', ' besieged', ' besiegers', ' besieging', ' siegeworks'],
['silver'],
['sin offering', ' sin offerings'],
['sister', ' sisters'],
['skull'],
['slay', ' slain', ' kill', ' killed', ' murder', ' murdered', ' murders'],
['slander', ' slanders', ' slandered', ' slanderers', ' slandering', ' slanderous'],
['slaughter', ' slaughters', ' slaughtered', ' slaughtering'],
['asleep', ' fall asleep', ' fell asleep', ' fallen asleep', ' sleep', ' sleeps', ' slept', ' sleeping', ' sleeper', ' sleepless', ' sleepy'],
['snare', ' snares', ' ensnare', ' ensnares', ' ensnared', ' entrap', ' trap', ' traps', ' trapped'],
['snow', ' snowed', ' snowing'],
['sorcerer', ' sorcerers', ' sorceress', ' sorcery', ' sorceries', ' witchcraft', ' someone who talked with the dead'],
['plant', ' plants', ' planted', ' planting', ' implanted', ' replanted', ' transplanted', ' sow', ' sows', ' sowed', ' sown', ' sowing'],
['spear', ' spears', ' spearmen'],
['splendor', ' splendid'],
['staff', ' staffs', ' clubs'],
['statute', ' statutes'],
['stiff-necked', ' stubborn', ' stubbornly', ' stubbornness'],
['storehouse', ' storehouses'],
['strength', ' strengthen', ' strengthens', ' strengthened', ' strengthening', ' strong', ' valor', ' influence'],
['strife', ' disputes', ' quarrel', ' arguing', ' conflict', ' conflicts'],
['strong drink', ' strong drinks'],
['stronghold', ' strongholds', ' fortifications', ' fortified', ' fortress', ' fortresses'],
['stumble', ' stumbles', ' stumbled', ' stumbling', ' reeling'],
['stumbling block', ' stumbling blocks', ' stone of stumbling'],
['subject', ' subjects', ' subjected', ' subject to', ' be subject to', ' subjection', ' be subjected', ' are subjected', ' was subjected', ' were subjected', ' in subjection to', ' subdue', ' forced to become slaves'],
['submit', ' submits', ' submitted', ' submitting', ' submission', ' in submission'],
['suffer', ' suffers', ' suffered', ' suffering', ' sufferings'],
['sulfur', ' sulfurous'],
['sweep', ' sweeps', ' swept', ' sweeping', ' pursued'],
['sword', ' swords', ' swordsmen'],
['tax', ' taxes', ' taxed', ' taxing', ' taxation', ' taxpayers', ' tax collector', ' tax collectors'],
['taxcollector'],
['teach', ' teaches', ' taught', ' teaching', ' teachings', ' untaught'],
['teacher', ' teachers', ' Teacher'],
['Ten Commandments'],
['tent', ' tents', ' tentmakers'],
['tenth', ' tenths', ' tithe', ' tithes'],
['tent of meeting'],
['terror', ' terrorize', ' terrorized', ' terrors', ' terrify', ' terrified', ' terrifying', ' frightened', ' panic', ' terrifying events'],
['thief', ' thieves', ' rob', ' robs', ' robbed', ' robber', ' robbers', ' robbery', ' robbing', ' bandits', ' violent'],
['thorn', ' thornbush', ' thornbushes', ' thorns', ' thistle', ' thistles'],
['thresh', ' threshes', ' threshed', ' threshing'],
['threshold', ' thresholds'],
['throne', ' thrones', ' enthroned'],
['time', ' timely', ' times', ' untimely', ' date'],
['grave', ' gravediggers', ' graves', ' tomb', ' tombs', ' burial place'],
['tongue', ' tongues', ' language'],
['torment', ' tormented', ' tormenting', ' tormentors', ' agony', ' torturers'],
['tradition', ' traditions'],
['trample', ' tramples', ' trampled', ' trampling'],
['trance'],
['tremble', ' trembles', ' trembled', ' trembling', ' staggering'],
['trial', ' trials', ' proving'],
['tribe', ' tribes', ' tribal', ' tribesmen'],
['tribulation', ' distresses'],
['tribute', ' contribution', ' fined'],
['trouble', ' troubles', ' troubled', ' troubling', ' troublemaker', ' troublesome', ' disturbing', ' upset', ' shaken', ' hardship'],
['trumpet', ' trumpets', ' trumpeters'],
['tunic', ' tunics'],
['turn', ' turns', ' turn away', ' turns away', ' turn back', ' turns back', ' turned', ' turned away', ' turned back', ' turning', ' turning away', ' direct'],
['understand', ' understands', ' understood', ' understanding', ' thinking'],
['unprofitable'],
['vain', ' vanity', ' futile', ' empty', ' useless', ' meaningless'],
['veil', ' veils', ' veiled', ' unveiled'],
['vine', ' vines'],
['vineyard', ' vineyards'],
['virgin', ' virgins', ' virginity'],
['vision', ' visions', ' envision'],
['voice', ' voices'],
['walk', ' walks', ' walked', ' walking'],
['soldier', ' soldiers', ' warrior', ' warriors', ' army'],
['waste', ' wastes', ' wasted', ' wasting', ' wasteland', ' wastelands', ' becomes weak', ' devastates'],
['watch', ' watches', ' watched', ' watching', ' watchman', ' watchmen', ' watchful', ' take heed', ' beware', ' watch out', ' guard'],
['watchtower', ' watchtowers', ' tower'],
['water', ' waters', ' watered', ' watering'],
['cistern', ' cisterns', ' well', ' wells'],
['wheat'],
['wine', ' wines', ' wineskin', ' wineskins', ' new wine'],
['winepress'],
['winnow', ' winnows', ' winnowed', ' winnowing', ' sift', ' sifting'],
['wise men'],
['wolf', ' wolves', ' wild dogs'],
['womb', ' wombs'],
['word', ' words', ' speech'],
['it is written'],
['wrong', ' wrongs', ' wronged', ' wrongly', ' wrongfully', ' wrongdoer', ' wrongdoing', ' mistreat', ' mistreated', ' hurt', ' hurts', ' hurting', ' hurtful', ' injury', ' harm', ' harmful'],
['yeast', ' leaven', ' leavens', ' leavened', ' unleavened'],
['yoke', ' yokes', ' yoked'],
]
asm_tws = []
ben_tws = []
guj_tws = []
hin_tws = [['घृणा', ' घिनौने', ' घिनौना'],
['लेपालक', ' गोद लेना', ' दत्तक'],
['व्यभिचार', ' व्यभिचारी', ' व्यभिचारी', ' व्यभिचारिणी', ' व्यभिचारी', 'व्यभिचारिणियों'],
['सर्वशक्तिमान'],
['वेदी', ' वेदियों'],
['आमीन', ' सच में'],
['स्वर्गदूत', ' स्वर्गदूतों', ' प्रधान स्वर्गदूत'],
['अभिषेक करना', ' अभिषिक्त', ' अभिषेक'],
['मसीह का विरोधी', ' मसीह के विरोधी'],
['प्रेरित', ' प्रेरितों', ' प्रेरिताई'],
['ठहराए', ' नियुक्त करना', ' निुयक्त किया'],
['जहाज़'],
['वाचा का सन्दूक', ' यहोवा का सन्दूक'],
['प्रायश्चित', ' प्रायश्चित करना', ' प्रायश्चित किया जाए', ' प्रायश्चित किया'],
['प्रायश्चित का ढकना'],
['अधिकारी', ' अधिकारियों'],
['बपतिस्मा देना', ' बपतिस्मा लिया', ' बपतिस्मा'],
['विश्वास', ' विश्वास करे', ' विश्वास किया', ' विश्वास'],
['विश्वासी'],
['प्रिय'],
['पहिलौठे का अधिकार'],
['निर्दोष'],
['निन्दा', ' निन्दा', ' निन्दा की', ' निन्दा करना', ' निन्दक'],
['आशीष', ' धन्य', ' आशीर्वाद'],
['लहू'],
['घमण्ड', ' घमण्ड करना', ' घमण्डी'],
['देह', ' शरीरों'],
['बांधना', ' बन्धन', ' बाँधा'],
['नए सिरे से जन्म लेना', ' परमेश्\u200dवर से जन्मा है', ' नए जन्म'],
['भाई', ' भाइयों'],
['बुलाना', ' पुकारे', ' पुकारना', ' कहलाता'],
['सूबेदार', ' सूबेदारों'],
['बच्चे', ' बालक'],
['मसीह', ' मसीहा'],
['मसीही'],
['कलीसिया', ' कलीसियाओं', ' कलीसिया'],
['खतना करना', ' खतना किया', ' खतना'],
['शुद्ध', ' शुद्ध करेगा', ' शुद्ध किया', ' शुद्ध करना', ' शुद्ध', ' शुद्ध होने', ' धुलाई', ' धुलाई', ' धोया', ' धोया'],
['आज्ञा', ' आज्ञाएँ', ' आज्ञा दी', ' आज्ञा', ' आज्ञाएं'],
['तरस', ' दयालु'],
['दोष लगाना', ' दोषी', ' निन्दा', ' दण्ड की आज्ञा'],
['मान लेगा', ' मानकर', ' मान लेगा', ' अंगीकार'],
['विवेक', ' विवेक'],
['पवित्र करना', ' पवित्र ठहरेगा', ' संस्कार'],
['कोने का पत्थर', ' प्रधान'],
['वाचा', ' वाचाओं'],
['करुणा'],
['क्रूस'],
['क्रूस पर चढ़ा', ' क्रूस पर चढ़ाया'],
['श्राप', ' श्रापित', ' श्राप दे', ' कोसता है'],
['सिय्योन की बेटी'],
['प्रभु का दिन', ' यहोवा का दिन'],
['सेवक', ' सेवकों'],
['दुष्टात्मा', ' दुष्ट आत्मा', ' अशुद्ध आत्मा'],
['दुष्टात्माएँ थीं'],
['चेला', ' चेले'],
['ताड़ना', ' ताड़ना देता', ' ताड़ना करते', ' आत्म संयम'],
['ईश्वरीय'],
['प्रभुता'],
['चुना हुआ', ' चुने हुए', ' चुनना', ' चुने लोग', ' चुना हुआ', ' चुनना।'],
['एपोद'],
['सनातन', ' अनन्त', ' अनंत काल'],
['खोजे', ' खोजों'],
['सुसमाचार प्रचारक', ' सुसमाचार सुनानेवाले'],
['बुराई', ' दुष्ट', ' दुष्टता'],
['ऊँचा', ' ऊँचा किया', ' बढ़ाता', ' आनन्द'],
['समझा', ' उपदेश'],
['विश्वास'],
['विश्वासयोग्य', ' विश्वासयोग्यता'],
['भटकनेवाली', ' विश्वासघात'],
['देव', ' देवों', ' देवी'],
['अनुग्रह', ' पक्ष', ' पक्षपात', ' पक्ष'],
['डर', ' भय', ' डरना'],
['सहभागिता'],
['पवित्र आत्मा से भर गए'],
['माँस'],
['मूर्ख', ' मुर्ख लोग', ' मूर्ख', ' मूर्खता'],
['सर्वदा'],
['क्षमा कर', ' क्षमा करता', ' क्षमा किया', ' क्षमा'],
['छोड़ना', ' छोड़ देता', ' छोड़ दिया', ' त्याग कर'],
['पूरा कर', ' पूरा हुआ'],
['अन्यजाति', ' अन्यजातियों'],
['दान', ' भेंटों'],
['महिमा करे', ' महिमा होती है'],
['महिमा', ' महिमामय'],
['परमेश्\u200dवर'],
['धर्मी', ' भक्ति'],
['परमेश्वर पिता', ' स्वर्गीय पिता', ' पिता'],
['अच्छा', ' भलाई'],
['शुभ समाचार', ' सुसमाचार'],
['अनुग्रह', ' अनुग्रहकारी'],
['दोष', ' दोषी ठहरा'],
['अधोलोक', ' अथाह-कुण्ड'],
['हृदय', ' मन'],
['स्वर्ग', ' आकाश', ' आकाशमण्डल', ' स्वर्गीय'],
['इब्रानी', ' इब्रानियों'],
['नरक', ' आग की झील'],
['महायाजक'],
['पवित्र', ' पवित्रता'],
['पवित्र'],
['पवित्रस्\u200dथान'],
['पवित्र आत्मा', ' परमेश्वर की आत्मा', ' प्रभु की आत्मा', ' आत्मा'],
['आदर', ' आदर करते हैं', ' आदर करना'],
['आशा', ' आशा', ' आशाएं'],
['परमेश्\u200dवर के भवन', ' यहोवा के भवन'],
['दीन', ' विनम्र', ' नम्र बनाया', ' नम्रता'],
['कपटी', ' कपटियों', ' कपट'],
['परमेश्\u200dवर का प्रतिरूप', ' स्वरूप'],
['मसीह में', ' यीशु में', ' प्रभु में', ' उसमें'],
['अधिकारी होना', ' वंश', ' भाग', ' वारिस'],
['अधर्म', ' अधर्मों'],
['निर्दोष'],
['प्रार्थना किया', ' मध्यस्थता की', ' मध्यस्थता'],
['इस्राएल', ' इस्राएली'],
['जलन', ' ईर्ष्या'],
['यीशु', ' यीशु मसीह', ' मसीह यीशु'],
['यहूदी', ' यहूदियों का', ' यहूदियों'],
['न्यायी', ' न्याय करता', ' न्याय', ' निर्णय'],
['दण्ड के दिन'],
['सच्चा', ' न्याय', ' न्याय से'],
['धर्मी ठहराएगा', ' धार्मिकता'],
['परमेश्\u200dवर का राज्य', ' स्वर्ग का राज्य'],
['यहूदियों का राजा', ' यहूदियों का राजा'],
['मेम्\u200dना', ' परमेश्\u200dवर का मेम्\u200dना'],
['विलाप', ' विलाप करना', ' विलाप किया'],
['आखरी दिन', ' अन्तिम दिनों', ' अन्त के दिनों'],
['व्यवस्था', ' मूसा की व्यवस्था', ' परमेश्वर की व्यवस्था', ' यहोवा की व्यवस्था'],
['जीवन', ' जीना', ' रहते थे', ' जीवन', 'जीवते', ' जीवित'],
['प्रभु', ' प्रभुओं', ' गुरु', ' स्वामी', ' स्वामियों', ' श्रीमान', ' महोदय'],
['प्रभु'],
['प्रभु भोज'],
['प्रभु यहोवा', ' यहोवा परमेश्\u200dवर'],
['प्रेम', ' प्रेम करता है', ' प्रिय', ' प्रेम किया'],
['महामहिमन्'],
['मन्ना'],
['दया', ' दयालु'],
['सेवा करना', ' सेवकाई'],
['चमत्कार', ' आश्चर्यकर्मों', ' अद्भुत', ' आश्चर्य के कामों', ' चिन्ह', ' चिन्हों'],
['परमप्रधान'],
['गन्धरस'],
['नाम', ' नाम', ' नाम पर'],
['नाज़ीर', ' नाज़ीरों', ' नाज़ीर शपथ'],
['नई वाचा'],
['दृष्टान्त', ' दृष्टान्तों'],
['फसह'],
['रखवाला', ' रखवाले'],
['पिन्तेकुस्त', ' सप्ताहों का पर्व'],
['परमेश्\u200dवर की प्रजा', ' मेरी प्रजा'],
['नाश हो', ' नाश हुए', ' नाश हो रहे', ' नाशवान'],
['फरीसी', ' फरीसियों'],
['सामर्थ्य', ' शक्तियाँ'],
['प्रार्थना कर', ' प्रार्थना', ' प्रार्थनाओं', ' प्रार्थना की'],
['पहले से ठहराना', ' पहले से ठहराया'],
['याजक', ' याजकों', ' याजक पद'],
['प्रतिज्ञा', ' प्रतिज्ञाएं', ' प्रतिज्ञा किया'],
['प्रतिज्ञा का देश'],
['भविष्यद्वक्ता', ' भविष्यद्वक्ता', ' भविष्यवाणी', ' भविष्यद्वाणी', ' भविष्यद्वक्ता', ' भविष्यद्वक्तिन'],
['प्रायश्चित'],
['भजन', ' भजन'],
['शुद्ध', ' शुद्धि', ' शुद्धिकरण'],
['रब्बी'],
['छुटकारे के लिये', ' छुड़ा लिया'],
['मेल करना', ' मेल-मिलाप', ' मेल मिलाप कर लिया', ' मिलाप'],
['छुड़ा ले', ' छुटकारा', ' छुटकारा', ' छुटकारा दिलानेवाला'],
['बचे हुए'],
['मन फिराकर', ' पश्चाताप', ' फिराया', ' फिराव'],
['ज्यों का त्यों करना', ' दृढ़ करना', ' पुनः स्थापन', ' पुनः स्थापन'],
['जी उठने'],
['प्रकट करना', ' प्रकट करना', ' प्रगट किया', ' प्रकाशन'],
['धर्मी', ' धार्मिक'],
['दाहिना हाथ'],
['सब्त'],
['सदूकी', ' सदूकियों'],
['संत', ' पवित्र जन'],
['उद्धार'],
['पवित्र करना', ' पवित्र करना', ' पवित्रता'],
['पवित्रस्थान'],
['शैतान', ' शैतान', ' दुष्ट'],
['बचाना', ' बचाता है', ' उद्धार', ' सुरक्षा'],
['उद्धारकर्ता', ' बचाने वाला'],
['शास्त्री', ' शास्त्रियों'],
['पृथक करना'],
['चिन्ह', ' प्रमाण', ' स्मरण कराने वाली बात'],
['पाप', ' पापो', ' पाप करना', ' पापमय', ' पापी', ' पाप करते रहना'],
['पुत्र', ' पुत्रों'],
['परमेश्वर का पुत्र', ' पुत्र'],
['मनुष्य का पुत्र', ' मनुष्य का पुत्र'],
['परमेश्वर की सन्तान'],
['प्राण', ' प्राण'],
['आत्मा', ' आत्मा', ' आत्मिक'],
['पत्थर', ' पत्थर', ' पत्थर'],
['आराधनालय'],
['निवासस्थान'],
['मन्दिर'],
['परीक्षा करने', ' परीक्षा'],
['परीक्षा', ' परीक्षण', ' परीक्षण'],
['गवाही', ' गवाही देना'],
['चौथाई देश के राजा'],
['बारहों', ' ग्यारहों'],
['टालना', ' उल्लंघन', ' अपराध'],
['अपराध', ' अपराधों', ' विश्वासघात किया'],
['सच्चा', ' सच्चाई', ' सत्य'],
['विश्वास', ' भरोसा', ' विश्वसनीय', ' भरोसेमंद', ' विश्वसनीयता'],
['अविश्वासी', ' अविश्वासियों', ' अविश्वास'],
['खतनारहित', ' खतनाहीन'],
['अशुद्ध'],
['विश्वासघाती', ' विश्वासघात किया'],
['अभक्त', ' अधर्मी', ' अभक्ति', ' अधर्मी'],
['अपवित्र'],
['अधर्मी', ' अन्याय से', ' अन्याय'],
['उचित नहीं', ' अधर्म'],
['अख़मीरी रोटी'],
['अधर्मी', ' अधर्म'],
['सीधे', ' सिधाई'],
['मन्नत', ' मन्नतें', ' शपथ खाई'],
['परमेश्\u200dवर की इच्छा'],
['बुद्धिमान', ' बुद्धि'],
['साक्षी', ' गवाहों', ' गवाह', ' बातों के देखनेवाले'],
['हाय'],
['परमेश्\u200dवर के वचन', ' परमेश्वर के वचनों', ' यहोवा के वचन', ' प्रभु का वचन', ' पवित्रशास्त्र', ' पवित्रशास्त्र'],
['सत्य का वचन'],
['काम', ' कर्म', ' कार्य', ' कृत्य'],
['संसार', ' सांसारिक'],
['उपासना'],
['योग्य', ' मूल्यवान', ' अयोग्य', ' निकम्मा'],
['क्रोध', ' रोष'],
['यहोवा'],
['सेनाओं के यहोवा', ' सेनाओं के परमेश्\u200dवर', ' आकाश के गण', ' आकाश का सारा तारागण', ' सेनाओं का प्रभु'],
['धुन', ' उत्तेजित'],
['सिय्योन', ' सिय्योन पर्वत'],
['हारून'],
['हाबिल'],
['एब्यातार'],
['अबिय्याह'],
['अबीमेलेक'],
['अब्नेर'],
['अब्राहम', ' अब्राम'],
['अबशालोम'],
['आदम'],
['अदोनिय्याह'],
['आहाब'],
['क्षयर्ष'],
['अहाज'],
['अहज्याह'],
['अहिय्याह'],
['आई'],
['अमालेक', ' अमालेकी', ' अमालेकियों'],
['अमस्याह'],
['अम्मोन', ' अम्मोनी', ' अम्मोनियों'],
['अम्नोन'],
['एमोरी', ' एमोरियों'],
['आमोस'],
['आमोस'],
['अन्द्रियास'],
['हन्ना'],
['अन्ताकिया'],
['अप्पुल्लोस'],
['अक्विला'],
['अराबा'],
['अरब', ' अरबी', ' अरबियों'],
['अराम', ' अरामी', ' अरामियों', ' अरामी भाषा'],
['अरारात'],
['अर्तक्षत्र'],
['आसा'],
['आसाप'],
['अश्दोद', ' अज़ोतस'],
['अशेर'],
['अशेरा', ' अशेरा के लिए मूरत', ' अशेरा नामक मूर्तियों', ' अश्तोरेत'],
['अश्कलोन'],
['आसिया'],
['अश्शूर', ' अश्शूरी', ' अश्शूरियों', ' अश्शूर राज्य'],
['अतल्याह'],
['अजर्याह'],
['बाल'],
['बाशा'],
['बाबेल'],
['बाबेल', ' बाबेल', ' बाबेल', ' बाबेली'],
['बिलाम'],
['बरअब्बा'],
['बरनबास'],
['बरतुल्मै'],
['बारूक'],
['बाशान'],
['बतशेबा'],
['बालजबूल(शैतान)'],
['बेर्शेबा'],
['बनायाह'],
['बिन्यामीन', ' बिन्यामीनी', ' बिन्यामीनियों'],
['बिरीया'],
['बैतनिय्याह'],
['बेतेल'],
['बैतलहम', ' एप्रात'],
['बेतशेमेश'],
['बतूएल'],
['बोआज़'],
['कैसर'],
['कैसरिया', ' कैसरिया फिलिप्पी'],
['कैफा'],
['कैन'],
['कालेब'],
['काना'],
['कनान', ' कनानी', ' कनानियों'],
['कफरनहूम'],
['कर्मेल', ' कर्मेल पहाड़'],
['कसदी', ' कसदी', ' कसदियों'],
['करेतियों'],
['किलिकिया'],
['दाऊद के नगर'],
['कुलुस्से', ' कुलुस्सियों'],
['कुरिन्थुस', ' कुरिन्थवासी'],
['कुरनेलियुस'],
['क्रेते', ' क्रेतेवासी', ' क्रेतेवासियों'],
['कूश'],
['साइप्रस'],
['कुरेनी'],
['कुस्रू'],
['दमिश्क'],
['दान'],
['दानिय्येल'],
['दारा'],
['दाऊद'],
['दलीला'],
['अदन', ' अदन की वाटिका'],
['एदोम', ' एदोमी', ' एदोमियों', ' इदूमिया'],
['मिस्र', ' मिस्री', ' मिस्रियों'],
['एक्रोन', ' एक्रोनी'],
['एलाम', ' एलाम लोग'],
['एलीआजार'],
['एलयाकीम'],
['एलिय्याह'],
['एलीशा'],
['एलीशिबा'],
['एनगदी'],
['हनोक'],
['इफिसुस', ' इफिसुस वासी', ' इफिसियों'],
['एप्रैम', ' एप्रैमी', ' एप्रैमियों'],
['एप्रात', ' एप्रात', ' एप्राती', ' एप्राती'],
['एसाव'],
['एस्तेर'],
['कूश', ' कूशी'],
['फरात महानद', ' महानद'],
['हव्वा'],
['यहेजकेल'],
['एज्रा'],
['जिब्राईल'],
['गाद'],
['गलातिया', ' गलातियों'],
['गलील', ' गलीली', ' गलीलियों'],
['गत', ' गतवासी', ' गती'],
['गाज़ा'],
['गरार'],
['गशूर', ' गशूरियों'],
['गतसमनी'],
['गिबा'],
['गिबोन', ' गिबोनी', ' गिबोनियों'],
['गिदोन'],
['गिलाद', ' गिलादी', ' गिलादियों'],
['गिलगाल'],
['गिर्गाशियों'],
['गुलगुता'],
['गोलियत'],
['अमोरा'],
['गोशेन'],
['यूनान', ' यूनानी'],
['हबक्कूक'],
['हाजिरा'],
['हाग्गै'],
['हाम'],
['हामात', ' हमाती', ' लीबो हामात'],
['हामोर'],
['हनन्याह'],
['हन्ना'],
['हारान'],
['हेब्रोन'],
['हेरोदेस अन्तिपास'],
['हेरोदियास'],
['हेरोदेस महान'],
['हिजकिय्याह'],
['हिल्किय्याह'],
['हित्ती', ' हित्तियों'],
['हिव्वी', ' हिव्वियों'],
['होरेब'],
['होशे'],
['होशे'],
['दाऊद के घराने'],
['इकुनियुम'],
['इसहाक'],
['यशायाह'],
['इश्माएल', ' इश्माएली', ' इश्माएलियों'],
['इस्साकार'],
['इस्राएल', ' इस्राएली', ' इस्राएलियों', ' याकूब'],
['याकूब(हलफईस का पुत्र)'],
['याकूब (जब्दी का पुत्र)'],
['येपेत'],
['यबूस', ' यबूसी', ' यबूसियों'],
['यहोयाकीन'],
['यहोयादा'],
['यहोयाकीम'],
['यहोराम', ' योराम'],
['यहोशापात'],
['येहू'],
['यिप्तह'],
['यिर्मयाह'],
['यरीहो'],
['यारोबाम'],
['यरूशलेम'],
['यिशै'],
['यित्रो', ' रूएल'],
['ईजेबेल'],
['यिज्रैल', ' यिज्रैली'],
['योआब'],
['योआश'],
['अय्यूब'],
['योएल'],
['यूहन्ना मरकुस'],
['यूहन्ना (प्रेरित)'],
['यूहन्ना (बपतिस्मा देनेवाला)'],
['योना'],
['योनातान'],
['याफा'],
['योराम'],
['यरदन नदी', ' यरदन'],
['यूसुफ (नया नियम)'],
['यूसुफ (पुराना नियम)'],
['यहोशू'],
['योशिय्याह'],
['योताम'],
['यहूदा'],
['यहूदा इस्करियोती'],
['याकूब का पुत्र यहूदा'],
['यहूदिया'],
['कादेश', ' कादेशबर्ने', ' कादेश के मरीबोत'],
['केदार'],
['केदेश'],
['किद्रोन नाले'],
['इस्राएल के राज्य'],
['यहूदा', ' यहूदा का राज्य'],
['कोरह', ' कोरे', ' कोरहियों'],
['लाबान'],
['लेमेक'],
['लाज़र'],
['लिआ:'],
['लबानोन'],
['लिव्यातान'],
['लेवी', ' लेवीय', ' लेवियों', ' लेवीय'],
['लूत'],
['लूका'],
['लुस्त्रा'],
['माका'],
['मकिदुनिया'],
['सृजनहार'],
['मलाकी'],
['मनश्शे'],
['परमेश्\u200dवर का जन'],
['मार्था'],
['मरियम', ' यीशु की माता'],
['मरियम मगदलीनी'],
['मत्ती', ' लेवी'],
['मादियों', ' मादे'],
['समुद्र', ' महासमुद्र', ' पश्चिम के समुद्र', ' भूमध्य सागर'],
['मेलिकिसिदक'],
['नोप'],
['मेशेक'],
['मेसोपोटामिया', 'अरम्नहरैम'],
['मीका'],
['मीकाएल'],
['मिद्यान', ' मिद्यानी', ' मिद्यानियों'],
['मिर्याम'],
['मीशाएल'],
['मिस्पा'],
['मोआब', ' मोआबी', ' मोआबिन'],
['मोलेक', ' Moloch'],
['मोर्दकै'],
['मूसा'],
['हेर्मोन\xa0पर्वत'],
['जैतून के पहाड़'],
['नामान'],
['नाहोर'],
['नहूम'],
['नप्ताली'],
['नातान'],
['नासरत', ' नासरियों'],
['नबूकदनेस्सर'],
['दक्षिण देश'],
['नहेम्याह'],
['नील नदी', ' मिस्र की नदी', ' नील नदी'],
['नीनवे', ' नीनवे के लोगों'],
['नूह'],
['ओबद्याह'],
['ओम्री'],
['पद्दनराम'],
['पारान'],
['पौलुस', ' शाऊल'],
['पोर', ' पोर पर्वत', ' बालपोर'],
['परिज्जी'],
['फारस', ' फारसियों'],
['पतरस', ' शमौन पतरस', ' कैफा'],
['फ़िरौन', ' मिस्र के राजा'],
['फिलिप्पुस', ' सुसमाचार प्रचारक'],
['फिलिप्पी', ' फिलिप्पियों'],
['फिलिप्पुस', ' प्रेरित'],
['पलिश्तीन'],
['पलिश्तियों'],
['पीनहास'],
['फीनीके'],
['पिलातुस'],
['पुन्तुस'],
['पोतीपर'],
['प्रिस्किल्ला'],
['रब्बा'],
['राहेल'],
['राहाब'],
['रामाह'],
['रामोत'],
['रिबका'],
['नड़ सागर', ' लाल समुद्र'],
['रहूबियाम'],
['रूबेन'],
['रिम्मोन'],
['रोम', ' रोमी'],
['रूत'],
['खारा ताल', ' मृत सागर'],
['सामरिया', ' सामरी'],
['शिमशोन'],
['शमूएल'],
['सारा', ' सारै'],
['शाऊल (पुराना नियम)'],
['गलील सागर', ' किन्नेरेत की सागर', ' गन्नेसरत की झील', ' तिबिरियुस की झी'],
['सन्हेरीब'],
['शेत'],
['शारोन', ' शारोन का मैदान'],
['शीबा'],
['शेकेम'],
['शेम'],
['शीलो'],
['शिमी'],
['शिनार'],
['सीदोन', ' सीदोनियों'],
['सीलास', ' सिल्वानुस'],
['शमौन'],
['शमौन कनानी'],
['सीनै', ' सीनै पर्वत'],
['सदोम'],
['सुलैमान'],
['स्तिफनुस'],
['सुक्कोत'],
['सीरिया'],
['तामार'],
['तर्शीश'],
['तरसुस'],
['तेरह'],
['थिस्सलुनीके', ' थिस्सलुनीकियों', ' थिस्सलुनीकियों'],
['थोमा'],
['तीमुथियुस'],
['तिर्सा'],
['तीतुस'],
['त्रोआस'],
['तूबल'],
['तुखिकुस'],
['सोर', ' सोर के लोग'],
['ऊर'],
['ऊरिय्याह'],
['उज्जियाह', ' अजर्याह'],
['वशती'],
['जक्कई'],
['सादोक'],
['जब्दी'],
['जबूलून'],
['जकर्याह (नया नियम)'],
['जकर्याह (पुराना नियम)'],
['सिदकिय्याह'],
['सपन्याह'],
['जरुब्बाबेल'],
['सोअर'],
['इस्राएल के बारह गोत्र', ' इस्राएल के बारह गोत्र', ' बारह गोत्र'],
['अथाह कुण्ड', ' अथाह गड्ढे'],
['बबूल'],
['दोष लगाना', ' दोषा लगाता है', ' दोषी', ' दोष लगा रहे है', ' आरोप लगाने वाला', ' आरोप लगाने वाले', ' आरोप', ' आरोपों'],
['मानना', ' मान लेता है', ' मानकर', ' मान लेता', ' मान लिया'],
['निर्दोष', ' निर्दोष ठहराना', ' छूट जाता'],
['प्रशासन', ' प्रशासक', ' प्रशासक', ' प्रशासित', ' प्रशासन'],
['चिताना'],
['विरोधी', ' द्रोहियों', ' बैरी', ' शत्रुओं'],
['दुःखित', ' क्लेश देना', ' दुःखित', ' मारेगा', ' दुःख', ' क्लेश'],
['आयु', ' युगों', ' वृद्ध'],
['व्याकुल', ' ललकारने', ' घबरा गया'],
['दान'],
['धूप की वेदी', ' धूप वेदी'],
['चकित', ' विस्मय', ' अचम्भा किया', ' अचम्भा', ' अचम्भा करके', ' अचम्भे में आ जाना', ' आश्चर्यकर्मों', ' चमत्कार', ' चमत्कारों'],
['दूत', ' दूतों', ' प्रतिनिधि', ' राजदूत'],
['क्रोध', ' क्रोधित हुआ', ' क्रोधित'],
['पीड़ा'],
['धनुर्धारी', ' धनुर्धारियों'],
['हथियार', ' शस्त्रों का घर'],
['अहंकारी', ' अभिमान करके', ' अहंकार'],
['राख', ' राख', ' धूल'],
['मण्डली', ' सभाओं', ' इकट्ठा करना', ' इकट्ठा किया'],
['नियुक्त करना', ' बांटा', ' ठहराए', ' भाग', ' भागों', ' फिर दे देना'],
['भटक', ' भटक जाते हैं', ' भटक गए', ' भटका देना', ' भटका दिया', ' भटकना', ' भटका दिया', ' भटकना'],
['बदला लेना', ' पलटा लेनेवाला', ' पलटा लिया', ' बदला लेने', ' पलटा लेनेवाला', ' बदला', ' पलटा लेना'],
['भय', ' भययोग्य'],
['कुल्हाड़ा', ' कुल्हाड़े'],
['भोज'],
['जौ'],
['उजाड़'],
['टोकरी', ' टोकरियाँ', ' टोकरियाँ भरकर'],
['सहना', ' सह लेता है', ' उठाए', ' ढोनेवाला'],
['रीछ', ' रीछनियों'],
['पशु', ' पशुओं'],
['विनती', ' विनती की', ' विनती की', ' कंगाल'],
['पकड़वाना', ' पकड़वानेवाले', ' पकड़वाया', ' विश्वासघात किया', ' पकड़नेवाला', ' पकड़वानेवाले'],
['दिन', ' दिनों'],
['पहर', ' घंटे'],
['महीने', ' महीनों', ' महीने के'],
['पहर(बाइबल का समय)'],
['सप्ताह', ' सप्ताहों'],
['वर्ष', ' वर्षों'],
['दोष', ' दोष', ' निर्दोष'],
['लहू बहाना'],
['मिटा दे', ' मिटा देता', ' मिटाया जाता', ' मिटा डाले', ' मिटा', ' मिट गए'],
['निडर', ' निडर होकर', ' साहस', ' साहसी'],
['जीवन की पुस्तक'],
['दण्डवत्', ' झुक गया', ' दण्डवत् किया', ' झुकने', ' दण्डवत् करना', ' दण्डवत् करे', ' दण्डवत् किया', ' दण्डवत् करते रहे'],
['धनुष और तीर', ' धनुष और तीर'],
['रोटी'],
['झिलम', ' झिलमें', ' चपरास'],
['श्वांस', ' श्वांस फूंकना', ' साँस लेता है', ' श्वांस फूँक दिया', ' सांस लेना'],
['घूस', ' घूस', ' घूस दिया', ' घूस लेते हैं'],
['दुल्हन', ' वधू', ' विवाह'],
['दुल्हा', ' दुल्हे'],
['पीतल'],
['बोझ', ' बोझ', ' बोझ से दबे', ' भारी'],
['होमबलि', ' हवन', ' अग्निदान'],
['मिट्टी देना', ' दबा देता है', ' गाड़े गए', ' मिट्टी दे', ' मिट्टी देने'],
['ऊँट', ' ऊँटों'],
['बन्दी बनाना', ' बन्दी', ' वश में करना', ' मोहित', ' बँधुआई'],
['निकालना', ' निकाल दिया', ' बाहर निकालने', ' फेंक देना', ' फेंककर'],
['उठा लिया', ' जा लिया', ' दौड़ गया'],
['देवदारू', ' देवदारों', ' देवदारू की लकड़ी'],
['नाम लिखाई'],
['भूसी'],
['रथ', ' रथों', ' रथियों'],
['करूब', ' करूबों', ' करूबों'],
['प्रधान', ' प्रधानों'],
['प्रधान याजकों'],
['इतिहास'],
['निवासी', ' निवासियों', ' citizenship'],
['कुल', ' कुलों'],
['पहनाना', ' पहनना', ' वस्त्र', ' अंगरखा', ' उतारना'],
['शान्ति', ' शान्ति', ' शान्ति दी', ' शान्तिदायक', ' शान्ति देनेवाला', ' शान्ति देनेवाले', ' शान्ति नहीं मिली'],
['सेनापति', ' सरदारों'],
['करना', ' करता है', ' किया है', ' करते', ' प्रतिज्ञा'],
['साथी', ' संगी'],
['गर्भ धारण', ' गर्भवती', ' गर्भवती', ' गर्भवती होना'],
['रखैल', ' रखैलियों'],
['भरोसा', ' भरोसा करना', ' आत्मविश्वास से'],
['दृढ़ करने', ' दृढ़ करता', ' पक्की की', ' प्रमाण देने'],
['भस्म कर देगा', ' खाए', ' भस्म किया', ' भस्म करती जाएगी'],
['तुच्छ जाने', ' तुच्छ'],
['बिगड़कर', ' नाश होती है', ' बिगड़ गए', ' भ्रष्ट', ' सड़ाहट', ' बिगड़ गए'],
['महासभा', ' सभाओं'],
['सम्मति', ' सम्मति', ' सम्मति दी', ' मंत्री', ' सम्मति देनेवालों', ' युक्ति ', ' युक्ति करनेवाला', ' मंत्रियों', ' सम्मति दी'],
['हियाव', ' हियाव बांधे', ' प्रोत्साहित', ' प्रोत्साहन', ' ढाढ़स बाँधो', ' उदास', ' निरुत्साहित', ' उदास करना', ' discouraging'],
['आँगन', ' आँगनों', ' आँगन', ' आँगनों'],
['गाय', ' गायें', ' बैल', ' बैलों', ' बछड़ा', ' बछड़ों', ' पशुओं', ' बछिया', ' बैल', ' बैलों'],
['उत्\u200dपन्\u200dन', ' सर्जन करना', ' सृष्टि की', ' सृष्टि', ' सृजनहार'],
['प्राणी', ' प्राणियों'],
['दोष', ' अपराधों', ' कुकर्मी', ' कुटिल जन'],
['मुकुट', ' मुकुट', ' मुकुट पहनाना', ' मुकुट रखा'],
['पुकार', ' चिल्लाहट', ' पुकारकर', ' रोना', ' दोहाई', ' दोहाई', ' पुकारकर', ' चिल्लाहट', ' जय-जयकार'],
['पिलानेहारा', ' पिलानेवाले'],
['परदा', ' परदे'],
['नाश किया', ' नाश करे', ' काटकर'],
['सनौबर'],
['अंधियारा'],
['मरे', ' मर जाए', ' मर गया', ' मरे हुओं', ' प्राणनाशक', ' मरी हुई', ' मृत्यु', ' मरेंगे', ' मृत्यु के भय'],
['धोखा', ' धोखा', ' धोखा दिया', ' छलता', ' छली', ' धोखेबाज', ' धोखेबाजों', ' छलपूर्ण', ' धूर्तता से', ' छल में', ' छलता', ' छली'],
['वर्णन', ' वाणी', ' ठहराया', ' कहता है', ' वाणी', ' घोषणाओं'],
['आज्ञा', ' आज्ञाओं', ' आदेश दी'],
['समर्पण करे', ' समर्पण करना', ' समर्पित', ' समर्पण'],
['हिरन', ' हिरनी', ' हिरनियाँ', ' मृगनी', ' मृग', ' मृगों'],
['अशुद्ध', ' अशुद्ध', ' अशुद्ध कर डाला', ' अपवित्र करने', ' अपवित्र किए', ' अशुद्ध हुई', ' अशुद्ध हो गया', ' अशुद्ध हो गए'],
['आनन्द', ' प्रसन्\u200dन', ' सुखी', ' मनोहर'],
['छुड़ाना', ' छुड़ाना', ' छुड़ाया ', 'छुड़ाया जाना', ' छुटकारा दिलाने वाला ', 'छुटकारा'],
['उतरेगा', ' ढलान', ' उतर गया', ' उतरते', ' वंश', ' वंश'],
['अशुद्ध करना', ' अशुद्ध किया', ' अपवित्र'],
['जंगल', ' छोड़कर ', ' सुनसान', ' छोड़ देना', ' जंगल', ' जंगलों'],
['उजड़', ' उजाड़', ' उजड़े'],
['ठहराया', ' नियुक्त', ' दशा', ' पहले से ठहराया'],
['नाश', ' नाश करता', ' नाश किया', ' नाश करनेवाला', ' नाश करनेवाले', ' सत्यानाश करना'],
['घृणा', ' घृणा करा', ' घृणित'],
['नाश होगा', ' उजाड़ दिया', ' नाश करता', ' उजाड़', ' उजाड़'],
['भस्म हो जाएगा', ' भस्म करती', ' भस्म हुए', ' भस्म करनेवाली'],
['पहचान', ' जान सकता', ' समझदार', ' विवेक-शक्ति'],
['अपमान', ' अपवित्र ठहराती', ' अपवित्र ठहराया', ' लज्जा की बात'],
['निरादर', ' अपमान करता', ' अनादर किया', ' नीच'],
['अवज्ञा', ' अवज्ञा', ' अवज्ञा किया', ' आज्ञा का उल्लंगन', ' अवज्ञाकारी'],
['छितरा', ' तितर-बितर होकर'],
['भावी कहने', ' भावी कहनेवालों', ' भूत सिद्धिवालों', ' ज्योतिषी'],
['त्यागना'],
['धर्मोपदेश'],
['गदहा', ' खच्चर'],
['दण्ड'],
['चौखट'],
['पण्डुकी', ' कबूतर'],
['स्वप्न'],
['अर्घ'],
['मतवाले', ' पियक्कड़'],
['गोबर', ' खाद'],
['उकाब', ' उकाबों'],
['पृथ्वी', ' मिट्टी का', ' पार्थिव'],
['प्राचीन', ' प्राचीनों'],
['बना रहेगा', ' धीरज धरेगा', ' सहते', ' स्थिर रहता', ' धीरज'],
['सेवा करना', ' दास बनाना', ' दासत्व', ' बन्धन', ' वश में', ' बन्धन', ' बाँधा'],
['डाह', ' लालच'],
['कुकर्मी', ' कुकर्मियों', ' बुराई'],
['बँधुआई', ' बन्धुओं', ' बन्दी करके'],
['प्रफुल्लित', ' मगन', ' प्रसन्\u200dन', ' आनन्द करनेवाले'],
['मुँह', ' मुँह', ' के सामने', ' के सामने', ' चेहरे', ' मुँह के बल गिरे'],
['झूठा भविष्यद्वक्ता', ' झूठे भविष्यद्वक्ताओं'],
['अधर्मी साक्षी', ' अन्यायी साक्षी', ' झूठी गवाही', ' झूठा साक्षी', ' झूठे गवाह'],
['परिवार', ' घरानों'],
['अकाल', ' अकाल'],
['उपवास', ' उपवास', ' उपवास किया', ' उपवास करना', ' उपवास करना'],
['मूलपिता', ' पूर्वजों', ' पिता', ' बापदादे', ' जन्मा', ' जन्माता', ' पूर्वज', ' पूर्वजों', ' पुरखाओं'],
['पर्व', ' पर्वों', ' भोज'],
['मेलबलि', ' मेलबलियों'],
['पर्व', ' पर्व'],
['अंजीर', ' अंजीरों'],
['सनौबर', ' सनौबर'],
['आग', ' आग', ' लुकटियों', ' करछों', ' चिमनियों', ' भट्ठा', ' अंगीठियाँ'],
['पहलौठे'],
['पहली उपज'],
['मछुए', ' पकड़नेवाले'],
['', ' भेड़-बकरियों', ' झुण्ड', ' मण्डली', ' झुण्ड', ' गाय-बैलों'],
['जल-प्रलय', ' बाढ़ें', ' जल में डूब कर', ' बाढ़', ' जल से डूब जाएगा'],
['बाँसुरी', ' बाँसुरी', ' बाँसुरी', ' सीटी बजाने का यंत्र'],
['पाँवों की चौकी'],
['परदेशी', ' फूट करा', ' अलग किए', ' पराई', ' परदेशी', ' परदेशियों'],
['पहले से जान लिया', ' पूर्व ज्ञान'],
['व्यभिचार', ' व्यभिचार', ' अनैतिक', ' परस्त्रीगमन'],
['मिला', ' नींव डाली', ' प्रतिष्ठापक', ' नींव', ' नीवें'],
['सोता', ' सोते', ' झरना', ' झरने', ' उमड़ता'],
['लोबान'],
['स्वतंत्र', ' छुड़ाता', ' छूट गया', ' मुक्त', ' स्वतंत्रता', ' सेंत-मेंत', ' स्वतंत्र मनुष्य', ' स्वेच्छा', ' स्वतंत्रता'],
['स्वेच्छाबलि', ' स्वेच्छाबलियों'],
['फल', ' फलों', ' फलवन्त', ' निष्फल'],
['भट्ठा'],
['फाटक', ' फाटकों', ' बेंड़ों', ' द्वारपाल', ' चौकीदारों', ' द्वार के खम्भे', ' द्वार', ' द्वार'],
['पीढ़ी'],
['रपाई', ' दानव'],
['बाँधे', ' बांधा हुआ'],
['बीनने', ' बीनता', ' बीना हुआ', ' इकट्ठा करना'],
['बकरा', ' बकरियों', ' बकरियों की खालें', ' scapegoat', ' बच्चे'],
['सोना', ' सोने'],
['कानाफूसी', ' गपशप', ' कानाफूसी करनेवाला'],
['शासन करना', ' प्रभुता', ' प्रधानताएँ', ' राज्यपाल', ' राज्यपालों', ' हाकिम', ' सूबेदारों'],
['अन्न', ' किनकों', ' खेतों'],
['अन्नबलि', ' अन्नबलियों'],
['अंगूर', ' दाख', ' दाख की लता'],
['कराहते', ' रोना', ' कराहना'],
['दोषबलि', ' दोषबलियों'],
['ओलों', ' ओले गिरना', ' ओला-वृष्टि', ' ओलों का तूफ़ान'],
['हाथ', ' हाथों', ' हाथ', ' सौपना', ' के द्वारा', ' पर हाथ रखना', ' पर हाथ लगाता', ' दाहिना हाथ', ' दाहिने हाथ', ' के हाथ से'],
['लटका हुआ', ' लटकाए', ' लटकाया गया', ' लटकाकर', ' पर्दों', ' लटका दिया'],
['कठिन', ' कड़ा', ' सबसे कठिन', ' कठोर', ' कठोर कर लेता है', ' हठीला', ' कठोर रहेगा', ' कठोरता'],
['वीणा', ' वीणाओं', ' बजानेवाला', ' वीणा बजानेवाले'],
['उपज', ' फसल', ' लवनी', ' कटनी', ' लवनेवाला', ' लवनेवालों'],
['घमण्ड'],
['सिर', ' सिरों', ' माथे', ' माथों', ' चन्दुए', ' टोपियाँ', ' गुलूबंद', ' सिर कटवा दिया'],
['चंगा', ' चंगा किया', ' चंगा करना', ' चंगा हो गया', ' चंगा करने', ' चंगा करनेवाला', ' सेहत', ' बीमार'],
['उत्तराधिकारी', ' वारिस'],
['ऊँचे स्थान', ' ऊँचे स्थानों'],
['पवित्र नगर', ' पवित्र नगरों'],
['मधु', ' मधु का छत्ता'],
['खुर', ' खुरों', ' टापों'],
['सींग', ' सींगों', ' सींग वाले'],
['भय', ' चकित', ' बुरे', ' बुरी तरह से', ' भयातुर', ' भयानक'],
['घोड़ा', ' घोड़े', ' युद्ध का घोडा', ' युद्ध के घोड़ों', ' सवार होकर'],
['सवार', ' सवारों'],
['घड़ी', ' घंटे'],
['घर', ' घरों', ' छत के ऊपर', ' छतों', ' भण्डार', ' भण्डारों', ' घर का कारबार करनेवाले'],
['कुटुम्ब', ' घराना'],
['लज्जित करना', ' अपमानित', ' दीनता'],
['मूर्ति', ' मूरतें', ' मूर्तिपूजक', ' मूर्तिपूजकों', ' मूर्ति-पूजक', ' मूर्ति पूजा'],
['मूरत', ' मूरतों', ' मूर्ति खोदकर', ' खोदी हुई मूरतें', ' धातु की मूरतें ढालकर', ' मूर्ति', ' मूरतों', ' खुदी हुई मूरत', ' खुदी हुई मूर्तियों', ' धातु की खुदी हुई मूरत', ' ढली हुई मूर्तियाँ'],
['अनुकरण करना', ' के समान चाल चलने', ' जैसी चाल चलो'],
['धूप'],
['पूछना', ' जांच करना', ' जाँच-पड़ताल', ' पूछ-ताछ'],
['निर्देश', ' निर्देश', ' निर्देश दिए', ' निर्देश देते रहना', ' अनुदेश', ' निर्देश', ' निर्देशक'],
['खराई'],
['भेद करना', ' अनुवाद करना', ' समझा दिया', ' व्याख्या', ' अर्थ बताना', ' व्याख्याओं', ' फल का बतानेवाला'],
['यहूदियों', ' यहूदी'],
['आनन्द', ' आनन्दित', ' आनन्द से', ' आनन्द', ' आनन्दमय ', ' आनंद लिया', ' सुख-विलास'],
['यहूदी धर्म', ' यहूदी मत'],
['न्यायी', ' न्याय'],
['कुटुम्बी', ' भाइयों', ' जन्म-भूमि', ' कुटुम्बी', ' कुटुम्बियों'],
['प्रकार', ' भांति-भांति', ' करुणा', ' उपकार'],
['राजा', ' राजाओं', ' राज्य', ' राज्यों', ' राज्य', ' राजसी'],
['राज्य', ' राज्यों'],
['चुम्बन', ' चुम्बनों', ' चूमा', ' चूमना'],
['जानना', ' जानता है', ' जानता था', ' जानना', ' ज्ञान', ' ज्ञात', ' प्रकट करना', ' ज्ञात करता है', ' ज्ञात करता है', ' अज्ञात', ' पहले से जानना', ' पूर्वज्ञान'],
['परिश्रम', ' परिश्रम करे', ' परिश्रम किया', ' मजदूर', ' मजदूरों'],
['प्रसव', ' जच्चा की सी', ' जच्चा की सी पीड़ाएँ'],
['दीपक', ' दीपकों'],
['दीवट', ' दीवटों'],
['व्यवस्था', ' व्यवस्थाएं', ' व्यवस्था देनेवाला', ' अपराधी', ' अपराधियों', ' मुकदमा', ' वकील', ' सिद्धांत', ' सैद्धांतिक', ' सिद्धांत'],
['उचित', ' व्यवस्था की रीति पर', ' उचित नहीं'],
['अधर्मी', ' अधर्म'],
['ज्योतिषी', ' भावी बतानेवालों'],
['चीता', ' चीतों'],
['कोढ़ी', ' कोढ़ियों', ' कोढ़', ' कोढ़'],
['पत्री', ' पत्र', ' चिट्ठियाँ'],
['उजियाला', ' ज्योतियों', ' बिजली', ' बिजलियाँ', ' उजियाले', ' सूर्य के उजियाले', ' सांझ', ' प्रकाशित', ' ज्योतिर्मय'],
['के समान', ' एक मन', ' सदृश करना', ' समानता', ' समता', ' वैसे ही', ' बराबर', ' से अलग'],
['सिंहों', ' सिंह', ' सिंहनी', ' सिंहनी'],
['पशु'],
['टिड्डी', ' टिड्डियाँ'],
['कमर'],
['चिट्ठियाँ', ' चिट्ठियाँ डालकर'],
['प्रेमी', ' यारों'],
['दीन', ' छोटा होगा', ' दीनता'],
['लालच', ' अभिलाषाओं', ' मोहित', ' लालसा करना', ' लुचपन'],
['वीणा', ' तार वाला बाजा', ' सारंगियाँ'],
['जादू', ' जादू टोना', ' तांत्रिक', ' जादूगरों'],
['न्यायाधीश', ' हाकिमों'],
['बड़ा ठहराएगा'],
['प्रबंधक प्रबंधक प्रबंधकों', ' भण्डारी', ' भण्डारियों', ' भंडारीपन'],
['अन्नबलि'],
['मध्यस्थ'],
['ध्यान', ' ध्यान करता', ' ध्यान'],
['नम्र', ' नम्रता'],
['पिघलना', ' पिघल गया', ' पिघलाई', ' पिघल जाता', ' ढालकर'],
['अंग', ' अंगों'],
['स्मरण दिलानेवाले', ' दान स्मरण के लिये'],
['दूत', ' दूतों'],
['सामर्थ्य', ' सामर्थी', ' बहुत सामर्थी', ' पराक्रम से'],
['मन', ' मनों', ' मन में लेना', ' सुधि लेना', ' सुधि दिला', ' याद दिलाता है', ' स्मरण किए गए', ' स्मरण', ' स्मरण दिलाने वाला', ' स्मरण दिलाना', ' एक मन'],
['निन्दा', ' ठट्ठा करता', ' उपहास करके', ' उपहास', ' ठट्ठा करनेवाला', ' ठट्टा करनेवालों', ' हँसी उड़ाने', ' कलंक लगाते', ' उपहास करने लगे', ' घृणा करते', ' ठट्ठे में उड़ाया'],
['ढालना', ' साँचा', ' ढालकर', ' बना रहा था', ' बनानेवाला', ' फफूंदी'],
['शोक करना', ' विलाप कर रहा है', ' विलाप करने लगे', ' शोक करता हुआ', ' शोक करनेवाला', ' विलाप करनेवालों', ' उदासी', ' शोक'],
['बढ़ाएगा', ' बढ़ाता रहता', ' बढ़ गए', ' बढ़ने लगी', ' बढ़ती करेगा'],
['भेद', ' भेदों', ' भेद', ' भेदों'],
['जाति', ' जातियों'],
['पड़ोसी', ' पड़ोसियों', ' पडोस', ' आस पास के'],
['नया चाँद', ' नये चाँद'],
['प्रतिष्ठित', ' रईसों', ' धनी मनुष्य', ' प्रधान लोग'],
['बांज वृक्ष', ' बांज वृक्षों'],
['शपथ', ' शपथ', ' शपथ खा', ' शपथ खाए', ' शपथ खाकर', ' शपथ खाके', ' की शपथ खाता है'],
['आज्ञा मानना', ' आज्ञा मानना', ' आज्ञा दिया', ' आज्ञाकारी', ' आज्ञाकारी', ' आज्ञाकारी', ' आज्ञाकारी', ' अवज्ञा', ' अवज्ञा', ' अवज्ञा', ' अवज्ञा', ' अवज्ञाकारी'],
['वंश'],
['तेल'],
['जैतून'],
['ऊँचे स्थान पर', ' आकाश में'],
['अत्याचार करने', ' अंधेर करता', ' सताए हुए', ' अंधेर करते', ' अंधेर', ' ताड़ना ', ' अंधेर करनेवाले', ' अत्याचार करनेवालों'],
['नियुक्त', ' ठहराना', ' सामान्य', ' नियुक्ति'],
['विधि', ' विधियों'],
['देखरेख', ' पर्यवेक्षण', ' पर्यवेक्षक', ' अध्यक्ष', ' पर्यवेक्षक'],
['आ पड़े', ' पकड़ लेना', ' आ पकड़ा', ' पकड़ लिया'],
['अन्यजाति', ' अन्यजातियां'],
['महल', ' महलों'],
['खजूर', ' हथेलियों'],
['क्षमा कर', ' क्षमा'],
['आंशिक', ' पक्ष करना', ' पक्षपात'],
['धीरजवन्त', ' धीरज से', ' सहनशीलता', ' अधीर'],
['कुलपति', ' कुलपतियों'],
['शान्ति', ' शांतिपूर्ण', ' शांतिपूर्वक', ' शंतियोग्य', ' शांति बनाने वाले'],
['मेलबलि', ' मेलबलियों'],
['जाति', ' लोगों', ' लोग', ' प्रजा'],
['सिद्ध', ' सिद्ध कराई', ' सिद्ध करनेवाले', ' सिद्धता', ' खरे'],
['सताएँ', ' सताए जाते', ' सताता', ' उपद्रव', ' उत्पीड़न', ' सतानेवाला', ' पीछा करनेवाले'],
['धीरज धरना ', ' धीरज'],
['टेढ़ी', ' कुटिलता', ' विकृति', ' अन्याय', ' उलट फेर', ' बिगाड़ने', ' टेढ़ी-मेढ़ी', ' उलट-पुलट कर दिया', ' बहकाते'],
['छेदना', ' बेधाता', ' बेधा', ' भेदता हुआ'],
['सूअर', ' सूअरों', ' सूअर का माँस', ' सुअर'],
['स्तंभ', ' लाटें', ' खम्भा', ' खम्भों'],
['गड्ढा', ' गड्ढे', ' फंदों'],
['मरी', ' विपत्तियों'],
['गिड़गिड़ाना', ' विनती', ' मुकद्दमा', ' वाद विवाद करना', ' गिड़गिड़ाकर', ' निवेदन करता', ' निवेदन'],
['रेहन', ' का वचन दिया', ' प्रतिज्ञाओं'],
['हल', ' हल चलाना', ' हल चलाया', ' हल जोतने', ' हलवाहों', ' जोतनेवाला', ' किसान', ' हल की फाल', ' अजोत'],
['अनार', ' अनारों'],
['अधिकार में लेना', ' मोल लिया', ' कब्ज़ा था', ' अधिकार में रखना', ' अधिकार', ' सम्पति', ' निकाल देना'],
['स्तुति', ' भजन', ' स्तुति की', ' स्तुति करते', ' बड़ाई की बात'],
['प्रचार करना', ' प्रचार किया', ' प्रचार', ' प्रचारक'],
['अनमोल'],
['आहेर', ' आहेर करना'],
['राजकुमार', ' राजकुमारी', ' राजकुमारी', ' राजकुमारियां'],
['बन्दीगृह', ' बन्दी', ' बन्दी', ' बन्दीगृह', ' कैद में', ' कैद में', ' बन्दी बनाना', ' बन्दी बनना', ' बन्दी बनना'],
['प्रचार करना', ' प्रचार करना', ' प्रचार', ' घोषणा', ' उद्घोषणा', ' उद्घोषणा'],
['अशुद्ध करना', ' अपवित्र', ' अशुद्ध करना'],
['लाभ', ' लाभ', ' लाभदायक'],
['समृद्ध होना', ' समृद्धि', ' समृद्ध', ' समृद्धि', ' समृद्ध'],
['वैश्या', ' वेश्यावृत्ति', ' व्यभिचारिणी', ' व्यभिचारिणी', ' व्यभिचारिणी', ' व्यभिचारिणी '],
['दण्डवत् करना', ' दण्डवत् किया'],
['घमण्ड', ' घमण्ड से', ' बड़ाई', ' घमण्ड भरी'],
['नीतिवचन', ' नीतिवचन'],
['प्रांत', ' प्रांत', ' प्रादेशिक'],
['भड़काना', ' भड़काना', ' भड़काना', ' भड़काना', ' भड़काना'],
['फूल जाना', ' फूल जाना'],
['दण्ड देना', ' दंडित करने', ' दंडित करने', ' दंडित करने', ' दंड', ' अदंडित'],
['बैंगनी'],
['धक्का देना', ' धक्का दिया', ' धक्का'],
['योग्य करना', ' योग्य', ' निकम्मा ठहरा'],
['रानी', ' रानियाँ'],
['बुझाना', ' बुझेगी', ' नहीं बुझती'],
['क्रोध', ' क्रोधित', ' भड़का', ' क्रोध भड़काता'],
['खड़ा करना', ' उठाना', ' उठाया', ' खड़ा होना', ' उठना', ' उठा', ' उठा था'],
['लवनी', ' लवनेवाला', ' काटा', ' लवनेवाले', ' लवनेवालों', ' लवती'],
['बलवा ', ' बलवा', ' बलवा किया', ' विद्रोह', ' विद्रोह', ' बलवाई', ' विद्रोह शीलता'],
['झिड़कना', ' घुड़कने', ' डांटा'],
['नरकट', ' कांसे'],
['शरण', ' शरणार्थी', ' शरणार्थियों', ' मण्डप', ' मण्डपों', ' आड़', ' आड़ देना'],
['राज करना', ' राज्य करता है', ' राज्य करता', ' राज्य कर रहा है'],
['परित्याग', ' परित्याग', ' परित्याग', ' परित्याग', ' परित्याग'],
['आनन्द', ' आनन्द मनाना', ' आनन्दित हुआ', ' आनन्द करें'],
['कीर्ति', ' कीर्तिमान'],
['समाचार', ' समाचारों', ' समाचार दिया'],
['निन्दा', ' अपमान', ' निन्दा', ' निन्दा', ' नामधराई'],
['विश्राम करना', ' विश्राम', ' विश्राम किया', ' विश्राम', ' विश्राम रहित'],
['लौट आना', ' लौट आना', ' लौटकर', ' लौट रहे'],
['श्रद्धा', ' भय माना', ' भय मानकर', ' सम्मानों', ' भक्तियुक्त'],
['प्रतिफल', ' प्रतिफल', ' प्रतिफल देना', ' पुरस्कृत', ' प्रतिफल देने बाला'],
['चोगा', ' वस्त्र', ' चोगा पहनाना'],
['लाठी', ' छड़ें'],
['राजकीय', ' राजसी गौरव'],
['नष्ट करना', ' नष्ट करना', ' बर्बाद'],
['शासन', ' नियम', ' शासित', ' शासक', ' शासकों', ' निर्णयों', ' फैसलों', ' अतिरंजित'],
['दौड़ना', ' दौड़ना', ' दौड़ना', ' दौड़ना', ' दौड़ना'],
['टाट'],
['पवित्र'],
['बलिदान करना', ' बलि', ' बलिदान किये', ' बलिदान करना', ' भेंट', ' भेंट की वस्तुओं'],
['जूता', ' जूतियां'],
['राजदण्ड', ' राजदण्डों'],
['पुस्तक', ' दस्तावेज़ों'],
['मेढ़ों\xa0और समुद्री गाय'],
['मुहर', ' मुहर', ' मुहर लगाना', ' खुली'],
['बीज', ' वीर्य'],
['ढूँढ़े', ' ढूँढ़ते हैं', ' खोजना', ' मांगा'],
['बन्दी बनाना', ' बन्दी', ' जब्त', ' कब्जा'],
['सेला'],
['संयम', ' संयम', ' आत्म नियंत्रित', ''],
['भेजना', ' भेजा जाता', ' भेजा गया', ' भेजा जाना', ' बाहर भेजना', ' बाहर भेजना', ' बाहर भेजा गया', ' बाहर भेजा जाना'],
['सर्प', ' साँपों', ' साँप', ' साँप', ' साँप', ' साँपों'],
['सेवा करना', ' दास बनाना', ' दास बना दिया', ' सेवक', ' सेवकों', ' दास', ' दासों', ' सेवा करना', ' दासत्व', ' दासी'],
['सेवा करना', ' सेवा करना', ' सेवा किया', ' सेवा कर रहा है', ' सेवा', ' सेवा', ' मजबूर होकर काम'],
['के साथ संबंध थे', ' प्यार में रत होना', ' साथ सोना', ' साथ सोता है', ' के साथ सोया', ' के साथ सोना'],
['छाया', ' छाया', ' आच्छादन', ' आच्छादन'],
['नामधराई', ' लज्जा', ' शर्म', ' शर्मपूर्ण', ' शर्मपूर्णता से', ' शर्महीनता', ' शर्महीनता से', 'लज्जित', ' लज्जित नहीं'],
['भेड़', ' भेड़ें', ' मेंढ़ा', ' मेंढ़ा', ' भेड़', ' भेड़शाला', ' भेड़शाला', ' भेड़-बकरी', ' भेड़-काज'],
['चरवाहे', ' चरवाहा', ' चरवाहा', ' चरवाही'],
['ढाल', ' ढाल', ' ढाल'],
['धूर्त ', ' चतुरता'],
['घेर', ' घेर लेना', ' घेर लिया', ' घेर करनेवाला', ' घेर लेना', ' मोर्चा बांधना'],
['चाँदी'],
['पापबलि', ' पापबलि'],
['बहन', ' बहनों'],
['खोपड़ी'],
['घात करना', ' घात किए गए'],
['बदनामी', ' बदनामी', ' बदनाम', ' निंदक', ' निंदा', ' नृशंस'],
['वध करना', ' वध करना', ' वध करना', ' वध करना'],
['नींद', ' सो जाना', ' सो गए थे', ' सोना', ' सोना', ' “उसे नींद आ गई”', ' सोना', ' सोना', ' नींद ना आना', ' नींद'],
['फंदा', ' फंदे', ' फंसाना', ' फंसाना', ' फँसना', ' फंसाना', ' जाल', ' जालें', ' फंस गए'],
['हिम', ' हिम पड़ा', ' बर्फ गिरने के समय'],
['जादूगर', ' जादूगर', ' जादूगर', ' जादूगर', ' जादूगर', ' जादू टोना'],
['पौधे', ' पौधे', ' लगाए गए', ' रोपण', ' प्रत्यारोपित', ' पुनर्नामित', ' प्रत्यारोपित', ' बोना', ' बोआ', ' बोया', ' बोया', ' बुवाई'],
['भाला', ' भाले', ' भाला धारण करनेवाला सिपाही'],
['वैभव'],
['लाठी', ' लाठी'],
['विधि', ' विधियां'],
['हठीले', ' हठीला', ' कठोर होकर', ' कठोरता'],
['भण्डार', ' भण्डार'],
['बल', ' बलवन्त करना', ' दृढ़ किया', ' मजबूत', ' हियाव बाँधा'],
['कलह'],
['दाखमधु', ' दाखमधु'],
['गढ़', ' गढ़ों', ' मजबूत गढ़', ' दृढ़', ' मजबूत गढ़', ' गढ़ों'],
['ठोकर', ' ठोकर खाए', ' ठोकर खाया', ' ठोकर खाता'],
['ठोकर', ' ठोकर का कारण', ' ठोकर के कारण', ' ठोकर का पत्थर'],
['अधीन', ' अधीन', ' अधीन', ' अधीन', ' अधीन', ' अधीनता', ' अधीन', ' अधीन', ' अधीन था', ' अधीन थे', ' अधीन'],
['अधीन होना', ' अधीन रहना', ' अधीन हुआ', ' अधीन रहना', ' अधीनता', 'अधीनता में'],
['दुःख उठाए', ' दुःख उठाना', ' दुःख उठाया', ' दुःख उठता', ' दुःख उठाता'],
['गन्धक', ' गन्धक'],
['झाड़ना', ' उड़ा ले जाना', ' झाड़ा-बुहारा', ' झाड़न'],
['तलवार', ' तलवारें', ' तलवार रखनेवाले'],
['कर', ' करों', ' कर लगाया', ' कर लगाना', ' करदाताओं'],
['चुंगी लेनेवाला', ' चुंगी लेनेवालों'],
['सिखाना', ' सिखाता है', ' पढ़ाया गया', ' शिक्षा', ' शिक्षायें', ' अशिक्षित'],
['गुरु', ' गुरूओं', ' उपदेशक'],
['दस आज्ञाएँ'],
['तम्बू', ' तम्बूओं', ' तम्बू बनाने वाला'],
['दशमांश', ' दसवें अंश', ' दशमांश', ' दसवां'],
['मिलापवाला तम्बू'],
['डर', ' डराएँगे', ' दहशत', ' भयंकर', ' डराएँ', ' घबरा गए', ' भयानक'],
['चोर', ' चोर', ' लूटने', ' लूटने', ' लूटने', ' डाकू', ' लुटेरे', ' डकैती', ' लूट'],
['कटीले', ' झड़बेरी', ' झाड़ियों', ' काँटों', ' झाड़ी', ' ऊँटकटारे'],
['दाँवना', ' दाँवना', ' दाएँ हुए', ' दाँवने'],
['डेवढ़ी', ' डेवढ़ियों'],
['सिंहासन', ' सिंहासनों', ' विराजमान'],
['समय', ' समयानुकूल', ' समय', ' असाधारण'],
['कब्र', ' मिट्टी देनेवाले', ' कब्रें', ' कब्र', ' कब्रों', ' कब्रिस्तान'],
['जीभ', ' जीभों'],
['दुःख देने', ' सताया', ' अंधेर करना', ' दुःख देनेवालों'],
['परम्परा', ' परम्पराओं'],
['रौंदे', ' रौंदेगा', ' रौंदा', ' रौंदना'],
['बेसुध होकर'],
['कांपना', ' काँप उठना', ' काँपकर', ' थरथराते हुए'],
['परीक्षा', ' दुःख'],
['गोत्र', ' गोत्रों', ' गोत्रों', ' भाइयों'],
['क्लेश'],
['tribute'],
['दुःख', ' क्लेश', ' परेशान होना', ' सताना', ' सतानेवाले', ' उपद्रवी'],
['तुरही', ' तुरहियां', ' तुरही फूँकनेवालों'],
['कुर्ता', ' अंगरखों'],
['मुड़', ' मुड़ता', ' लौटना', ' पीछे मुड़ता है', ' वापस मुड़ता है', ' वापस मुड़ना', ' वापस मुड़ा', ' मुड़ जाना', ' वापस मुड़ा', 'मोड़', ' मुड़ कर दूर जा रहा है', ' लौटता है', ' वापस लौटाया', ' वापस लौट रहा है', ' वापस लौट जाता है'],
['समझना', ' समझता है', ' समझ लिया', ' समझ'],
['निष्फल'],
['व्यर्थ', ' अनर्थ'],
['परदा', ' घूँघटों', ' परदा पड़ा', ' उघाड़े'],
['दाखलता', ' दाखलताओं'],
['दाख की बारी', ' दाख की बारियों'],
['कुँवारी', ' कुमारियों', ' कुँवारीपन'],
['दर्शन', ' दर्शनों', ' दर्शन'],
['शब्द', ' स्वर'],
['चले', ' चलता', ' चला', ' चलता'],
['सैनिक', ' सिपाहियों', ' योद्धा', ' शूरवीरों'],
['सत्यानाश', ' नाश', ' नाश हो गया', ' नाश कर', ' उजाड़', ' खण्डहरों'],
['चौकस', ' ताकता', ' देखा', ' देख रहा था', ' द्वारपाल', ' पहरुओं', ' जागते रहो'],
['गुम्मट', ' पहरे की मिनारों', ' गुम्मट'],
['पानी', ' जल', ' पानी पिलाया', ' पानी देना'],
['गड्ढे', ' कुएँ', ' कुआँ', ' कुओं'],
['गेहूँ'],
['दाखरस', ' कुण्ड', ' कुण्डों', ' दाखरस', ' मशक', ' मशकों', ' नई दाखरस'],
['दाखरस के कुण्ड'],
['फटकना', ' फटकता', ' फटका', ' फटकेगा', ' फटके', ' छानना'],
['बुद्धिमानों'],
['भेड़िया', ' भेड़िए', ' जंगली कुत्ते'],
['गर्भ'],
['वचन', ' शब्द'],
['लिखा गया'],
['गलत', ' गलतियाँ', ' गलत करना', ' गलत तरीके से', ' गलत तरीके से', ' गलत करनेवाले', ' गलत', ' दुर्व्यवहार', ' दुर्व्यवहार', ' सताया हुआ', ' दर्द', ' चोट पहुँचाना', ' दर्दनाक'],
['ख़मीर', ' ख़मीरी', ' ख़मीर', ' ख़मीर बनाना', ' अख़मीरी'],
['जूआ', ' जूए', ' जूए में'],
]
kan_tws = []
mal_tws = []
mar_tws = []
pun_tws = []
odi_tws = []
tam_tws = []
tel_tws = []
urd_tws = []
heb_tws = []
|
python
|
import os
# os is only used for finding a dynamic absolute path to the I/O files
absolute_path = os.path.dirname(os.path.abspath(__file__))
inn = absolute_path + '/rom.in'
outt = absolute_path + '/rom.out'
# Open input files
fin = open(inn)
fout = open(outt, 'w')
# Get first line for array size
firstLine = fin.readlines()
# Create array size of numbers in file
nums = [None] * int(firstLine[0])
# Load array with values
for i in range(len(nums)):
nums[i] = str(firstLine[i + 1]).strip()
# Loop through each equation
for i in range(len(nums)):
# Init/reset values after each iteration of equation
romanValue = 0
first = 0
second = 0
print(nums[i], end='')
fout.write(nums[i])
# Loop through each character of the equation
for j in range(len(nums[i])):
# Get sum
romanSum = first + second
currentChar = nums[i][j]
# When current character is not the last, assign next character
if currentChar != '=':
nextChar = nums[i][j + 1]
# While not on the last character go through each character and increment a variable based on the values of the input
while currentChar != '=':
if currentChar == 'M':
romanValue += 1000
elif currentChar == 'D':
romanValue += 500
elif currentChar == 'C' and nextChar == 'M':
romanValue += 900
j += 1
elif currentChar == 'C' and nextChar == 'D':
romanValue += 400
j += 1
elif currentChar == 'C':
romanValue += 100
elif currentChar == 'L':
romanValue += 50
elif currentChar == 'X' and nextChar == 'C':
romanValue += 90
j += 1
elif currentChar == 'X' and nextChar == 'L':
romanValue += 40
j += 1
elif currentChar == 'X':
romanValue += 10
elif currentChar == 'V':
romanValue += 5
elif currentChar == 'I' and nextChar == 'X':
romanValue += 9
j += 1
elif currentChar == 'I' and nextChar == 'V':
romanValue += 4
j += 1
elif currentChar == 'I':
romanValue += 1
# When the plus is reached
else:
# Get value for first and second part of the sum equation
if first == 0:
first = romanValue
else:
second = romanValue
romanValue = 0
j += 1
currentChar = nums[i][j]
if currentChar != '=':
nextChar = nums[i][j + 1]
# Once the sum of the equation is determined, turn it back into a roman numeral
while romanSum > 0:
if romanSum > 1000:
romanSum = 0
print('CONCORDIA CUM VERITATE', end='')
fout.write('CONCORDIA CUM VERITATE')
elif romanSum == 1000:
romanSum -= 1000
print('M', end='')
fout.write('M')
elif romanSum - 900 >= 0:
romanSum -= 900
print('CM', end='')
fout.write('CM')
elif romanSum - 500 >= 0:
romanSum -= 500
print('D', end='')
fout.write('D')
elif romanSum - 400 >= 0:
romanSum -= 400
print('CD', end='')
fout.write('CD')
elif romanSum - 100 >= 0:
romanSum -= 100
print('C', end='')
fout.write('C')
elif romanSum - 90 >= 0:
romanSum -= 90
print('XC', end='')
fout.write('XC')
elif romanSum - 50 >= 0:
romanSum -= 50
print('L', end='')
fout.write('L')
elif romanSum - 40 >= 0:
romanSum -= 40
print('XL', end='')
fout.write('XL')
elif romanSum - 10 >= 0:
romanSum -= 10
print('X', end='')
fout.write('X')
elif romanSum - 9 >= 0:
romanSum -= 9
print('IX', end='')
fout.write('IX')
elif romanSum - 5 >= 0:
romanSum -= 5
print('V', end='')
fout.write('V')
elif romanSum - 4 >= 0:
romanSum -= 4
print('IV', end='')
fout.write('IV')
else:
romanSum -= 1
print('I', end='')
fout.write('I')
print()
fout.write('\n')
fin.close()
fout.close()
|
python
|
import factory
from karp.domain.model import Entry, Resource
class ResourceFactory(factory.Factory):
class Meta:
model = Resource
entity_id = factory.
|
python
|
#!/usr/bin/env python3
# Copyright 2020 Christian Henning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @title :hnets/structured_hmlp_examples.py
# @author :ch
# @contact :[email protected]
# @created :05/02/2020
# @version :1.0
# @python_version :3.6.10
"""
Example Instantiations of a Structured Chunked MLP - Hypernetwork
-----------------------------------------------------------------
The module :mod:`hnets.structured_hmlp_examples` provides helpers for example
instantiations of :class:`hnets.structured_mlp_hnet.StructuredHMLP`.
Functions in this module typically take a given main network and produce the
constructor arguments ``chunk_shapes``, ``num_per_chunk`` and ``assembly_fct``
of class :class:`hnets.structured_mlp_hnet.StructuredHMLP`.
Note:
These examples should be used with care. They are meant as inspiration and
might not cover all possible usecases.
.. autosummary::
hnets.structured_hmlp_examples.resnet_chunking
hnets.structured_hmlp_examples.wrn_chunking
"""
import math
import numpy as np
import torch
from warnings import warn
from mnets.resnet import ResNet
from mnets.wide_resnet import WRN
def resnet_chunking(net, gcd_chunking=False):
r"""Design a structured chunking for a ResNet.
A resnet as implemented in class :class:`mnets.resnet.ResNet` consists
roughly of 5 parts:
- An input convolutional layer with weight shape ``[C_1, C_in, 3, 3]``
- 3 blocks of ``2*n`` convolutional layers each where the first layer has
shape ``[C_i, C_j, 3, 3]`` with :math:`i \in \{2, 3, 4\}` and
:math:`j \equiv i-1` and the remaining ``2*n-1`` layers have a weight
shape of ``[C_i, C_i, 3, 3]``.
- A final fully connected layer of shape ``[n_classes, n_hidden]``.
Each layer may additionally have a bias vector and (if batch normalization
is used) a scale and shift vector.
For instance, if a resnet with biases and batchnorm is used and the first
layer will be produced as one structured chunk, then the first chunk shape
(see return value ``chunk_shapes``) will be:
``[[C_1, C_in, 3, 3], [C_1], [C_1], [C_1]]``.
This function will chunk layer wise (i.e., a chunk always comprises up to
4 elements: weights tensor, bias vector, batchnorm scale and shift). By
default, layers with the same shape are grouped together. Hence, the
standard return value contains 8 chunk shapes (input layer, first layer of
each block, remaining layers of each block (which all have the same shape)
and the fully-connected output layer). Therefore, the return value
``num_per_chunk`` would be as follows:
``[1, 1, 2*n-1, 1, 2*n-1, 1, 2*n-1, 1]``.
Args:
net (mnets.resnet.ResNet): The network for which the structured chunking
should be devised.
gcd_chunking (bool): If ``True``, the layers within the 3 resnet blocks
will be produced by 4 chunks. Therefore, the greatest common divisor
(gcd) of the feature sizes ``C_1, C_2, C_3, C_4`` is computed and
the 6 middle ``chunk_shapes`` produced by default are replaced by 4
chunk shapes ``[[C_gcd, C_i, 3, 3], [C_gcd]]`` (assuming no
batchnorm is used). Note, the first and last entry of
``chunk_shapes`` will remain unchanged by this option.
Hence, ``len(num_per_chunk) = 6`` in this case.
Returns:
(tuple): Tuple containing the following arguments that can be passed
to the constructor of class
:class:`hnets.structured_mlp_hnet.StructuredHMLP`.
- **chunk_shapes** (list)
- **num_per_chunk** (list)
- **assembly_fct** (func)
"""
if not isinstance(net, ResNet):
raise ValueError('Function expects resnet as argument ""net".')
if net._use_context_mod:
raise NotImplementedError('This function doesn\'t handle context-mod ' +
'layers yet!')
if net._param_shapes_meta is not None:
warn('Note, at the time of implementation of this function, the ' +
'resnet attribute "param_shapes_meta" was not yet implemented. ' +
'Hence, this function implementation should be updated.')
has_bn = net._use_batch_norm
has_bias = net.has_bias
n = net._n
filter_sizes = net._filter_sizes
num_layers = 6*n + 2
factor = 1
sub = 0
if has_bias:
factor += 1
if has_bn:
factor += 2
sub = 2
assert len(net.param_shapes) == factor * num_layers - sub
if gcd_chunking:
# Note, each of the `6*n` layers in the middle can be made up of
# several chunks. We know that 1 layer has `C1` as input channel
# dimension, 2n layers have `C2` and `C3` as input channel dimension and
# 2n-1 layers have `C4` as input channel dimension. Though, depending on
# the gcd, multiple chunks are required to produce the weights of 1
# layer.
num_per_chunk = [1, None, None, None, None, 1]
else:
num_per_chunk = [1, 1, 2*n-1, 1, 2*n-1, 1, 2*n-1, 1]
chunk_shapes = []
assembly_fct = None
# Note, if batchnorm is used, then the first 2 * (6*n+1) weights belong to
# batch normalization.
bn_start = 0
w_start = 2 * (6*n+1) if has_bn else 0
### First layer ###
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
if has_bn:
cs.extend(net.param_shapes[:2])
chunk_shapes.append(cs)
bn_start += 2
w_start += 2 if has_bias else 1
### Resnet blocks ###
c_div_gcd = None
if gcd_chunking:
gcd = math.gcd(filter_sizes[0], filter_sizes[1])
gcd = math.gcd(gcd, filter_sizes[2])
gcd = math.gcd(gcd, filter_sizes[3])
# The first block is made up of layers requiring `C1//gcd` chunks each,
# and so on ...
fsl = filter_sizes
c_div_gcd = [fsl[1] // gcd, fsl[2] // gcd, fsl[3] // gcd]
for i, fs in enumerate(filter_sizes):
if i == 0:
#n_layers = 1
n_chunks = c_div_gcd[0]
elif i == 1:
#n_layers = 2 * n
n_chunks = c_div_gcd[0] * (2*n-1) + c_div_gcd[1]
elif i == 2:
#n_layers = 2 * n
n_chunks = c_div_gcd[1] * (2*n-1) + c_div_gcd[2]
else:
#n_layers = 2 * n - 1
n_chunks = c_div_gcd[2] * (2*n-1)
num_per_chunk[1+i] = n_chunks
cs = []
cs.append([gcd, fs, *net._kernel_size])
if has_bias:
cs.append([gcd])
if has_bn:
cs.extend([[gcd], [gcd]])
chunk_shapes.append(cs)
bn_start += 2 * (6*n)
w_start += (2 if has_bias else 1) * (6*n)
else:
for i in range(3): # For each resnet block
# FIXME If two consecutive filter sizes are identical, we could
# add one chunk shape for this block rather than 2.
# First layer of block.
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
if has_bn:
cs.extend(net.param_shapes[bn_start:bn_start+2])
chunk_shapes.append(cs)
bn_start += 2
w_start += 2 if has_bias else 1
# Remaining 2*n-1 layers of block.
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
if has_bn:
cs.extend(net.param_shapes[bn_start:bn_start+2])
chunk_shapes.append(cs)
bn_start += 2
w_start += 2 if has_bias else 1
for ii in range(2*n-2):
assert len(cs[0]) == 4
assert np.all(np.equal(net.param_shapes[w_start], cs[0]))
if has_bias:
assert len(cs[1]) == 1
assert np.all(np.equal(net.param_shapes[w_start+1], cs[1]))
if has_bn:
o = 2 if has_bias else 1
assert len(cs[o]) == 1 and len(cs[o+1]) == 1
assert np.all(np.equal(net.param_shapes[bn_start], cs[o]))
assert np.all(np.equal(net.param_shapes[bn_start+1],
cs[o+1]))
bn_start += 2
w_start += 2 if has_bias else 1
### Final layer ###
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
# No batchnorm for last layer!
chunk_shapes.append(cs)
assert len(chunk_shapes) == len(num_per_chunk)
assembly_fct = lambda x : _resnet_chunking_afct(x, net, chunk_shapes,
num_per_chunk, gcd_chunking, c_div_gcd)
return chunk_shapes, num_per_chunk, assembly_fct
def _resnet_chunking_afct(list_of_chunks, net, chunk_shapes, num_per_chunk,
gcd_chunking, c_div_gcd):
"""The ``assembly_fct`` function required by function
:func:`resnet_chunking`.
"""
assert len(list_of_chunks) == np.sum(num_per_chunk)
has_bn = net._use_batch_norm
has_bias = net.has_bias
n = net._n
bn_weights = []
layer_weights = []
cind = 0
### First layer ###
layer_weights.append(list_of_chunks[cind][0])
if has_bias:
layer_weights.append(list_of_chunks[cind][1])
if has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
### Resnet blocks ###
if gcd_chunking:
# Number of layers per channel size.
n_per_c = [1, 2*n, 2*n, 2*n-1]
layer_ind = 0
for i, n_layer in enumerate(n_per_c):
for l in range(n_layer):
# Out of how many chunks does this layer consist?
n_c = c_div_gcd[layer_ind // (2*n)]
layer_ind += 1
chunks = list_of_chunks[cind:cind+n_c]
cind += n_c
layer_weights.append(torch.cat([c[0] for c in chunks], dim=0))
if has_bias:
layer_weights.append(torch.cat([c[1] for c in chunks],
dim=0))
if has_bn:
bn_weights.append(torch.cat([c[-2] for c in chunks], dim=0))
bn_weights.append(torch.cat([c[-1] for c in chunks], dim=0))
else:
for i in range(3): # For each block.
# First layer in block.
layer_weights.append(list_of_chunks[cind][0])
if has_bias:
layer_weights.append(list_of_chunks[cind][1])
if has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
# Remaining layers in block.
for _ in range(2*n-1):
layer_weights.append(list_of_chunks[cind][0])
if has_bias:
layer_weights.append(list_of_chunks[cind][1])
if has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
### Last layer ###
# No batchnorm for last layer!
layer_weights.append(list_of_chunks[-1][0])
if has_bias:
layer_weights.append(list_of_chunks[-1][1])
return bn_weights + layer_weights
def wrn_chunking(net, ignore_bn_weights=True, ignore_out_weights=True,
gcd_chunking=False):
r"""Design a structured chunking for a Wide-ResNet (WRN).
This function is in principle similar to function :func:`resnet_chunking`,
but with the goal to provide a chunking scheme that is identical to the one
proposed in (accessed August 18th, 2020):
Sacramento et al., "Economical ensembles with hypernetworks", 2020
https://arxiv.org/abs/2007.12927
Therefore, a WRN as implemented in class :class:`mnets.wide_resnet.WRN`
is required. For instance, a `WRN-28-10-B(3,3)` can be instantiated as
follows, using batchnorm but no biases in all convolutional layers:
.. code-block:: python
wrn = WRN(in_shape=(32, 32, 3), num_classes=10, n=4, k=10,
num_feature_maps=(16, 16, 32, 64), use_bias=False,
use_fc_bias=True, no_weights=False, use_batch_norm=True)
We denote channel sizes by ``[C_in, C_1, C_2, C_3, C_4]``, where ``C_in`` is
the number of input channels and the remaining ``C_1, C_2, C_3, C_4`` denote
the channel size per convolutional group. The widening factor is denoted by
``k``.
In general, there will be up to 11 `layer groups`, which will be realized
by separate hypernetworks (cmp table S1 in
`Sacramento et al. <https://arxiv.org/pdf/2007.12927.pdf>`_):
- ``0``: Input layer weights. If the network's convolutional layers have
biases and batchnorm layers while ``ignore_bn_weights=False``, then this
hypernet will produce weights of shape
``[[C_1, C_in, 3, 3], [C_1], [C_1], [C_1]]``. However, without
convolutional bias terms and with ``ignore_bn_weights=True``, the hypernet
will only produce weights of shape ``[[C_1, C_in, 3, 3]]``. This
specification applies to all layer groups generating convolutional layers.
- ``1``: This layer group will generate the weights of the first
convolutional layer in the first convolutional group, e.g.,
``[[k*C_2, C_1, 3, 3]]``. Let's define
``r = max(k*C_2/C_1, C_1/k*C_2)``. If ``r=1`` or ``r=2`` or
``gcd_chunking=True``, then this group is merged with layer group ``2``.
- ``2``: The remaining convolutional layer of the first convolutional group.
If ``r=1``, ``r=2`` or ``gcd_chunking=True``, then all convolutional
layers of the first group are generated. However, if biases or batch norm
weights have to be generated, then this form of chunking leads to
redundancy. Imagine bias terms are used and that the first layer in this
convolutional group has weights ``[[160, 16, 3, 3], [160]]``, while the
remaining layers have shape ``[[160, 160, 3, 3], [160]]``. If that's the
case, the hypernetwork output will be of shape
``[[160, 16, 3, 3], [160]]``, meaning that 10 chunks have to be produced
for each except the first layer. However, this means that per
convolutional layer 10 bias vectors are generated, while only one is
needed and therefore the other 9 will go to waste.
- ``3``: Same as ``1`` for the first layer in the second convolutional
group.
- ``4`` (labelled as ``3`` in the paper): Same as ``2`` for all
convolutional layers (potentially excluding the first) in the second
convolutional group.
- ``5``: Same as ``1`` for the first layer in the third convolutional
group.
- ``6`` (labelled as ``4`` in the paper): Same as ``2`` for all
convolutional layers (potentially excluding the first) in the third
convolutional group.
- ``7`` (labelled as ``5`` in the paper): If existing, this hypernetwork
produces the 1x1 convolutional layer realizing the residual connection
connecting the first and second residual block in the first convolutional
group.
- ``8`` (labelled as ``6`` in the paper): Same as ``7`` but for the first
residual connection in the second convolutional group.
- ``9`` (labelled as ``7`` in the paper): Same as ``7`` but for the first
residual connection in the third convolutional group.
- ``10``: This hypernetwork will produce the weights of the fully connected
output layer, if ``ignore_out_weights=False``.
Thus, the WRN weights would maximally be produced by 11 different sub-
hypernetworks.
Note:
There is currently an implementation mismatch, such that the
implementation provided here does not 100% mimic the architecture
described in
`Sacramento et al. <https://arxiv.org/pdf/2007.12927.pdf>`_.
To be specific, given the ``wrn`` generated above, the hypernetwork
output for layer group ``2`` will be of shape ``[160, 160, 3, 3]``,
while the paper expects a vertical chunking with a hypernet output of
shape ``[160, 80, 3, 3]``.
Args:
net (mnets.wide_resnet.WRN): The network for which the structured
chunking should be devised.
ignore_bn_weights (bool): If ``True``, even if the given ``net`` has
batchnorm weights, they will be ignored by this function.
ignore_out_weights (bool): If ``True``, output weights (layer group
``10``) will be ignored by this function.
gcd_chunking (bool): If ``True``, layer groups ``1``, ``3`` and ``5``
are ignored. Instead, the greatest common divisor (gcd) of input and
output feature size in a convolutional group is computed and weight
tensors within a convolutional group (i.e., layer groups ``2``,
``4`` and ``6``) are chunked according to this value. However, note
that this will cause the generation of unused bias and batchnorm
weights if existing (cp. description of layer group ``2``).
Returns:
(tuple): Tuple containing the following arguments that can be passed
to the constructor of class
:class:`hnets.structured_mlp_hnet.StructuredHMLP`.
- **chunk_shapes** (list)
- **num_per_chunk** (list)
- **assembly_fct** (func)
"""
if not isinstance(net, WRN):
raise ValueError('Function expects WRN as argument ""net".')
if net._use_context_mod:
raise NotImplementedError('This function doesn\'t handle context-mod ' +
'layers yet!')
assert net.param_shapes_meta is not None
has_bn = net.batchnorm_layers is not None and len(net.batchnorm_layers) > 0
has_conv_bias = net._use_bias
has_fc_bias = net._use_fc_bias
n = net._n
filter_sizes = net._filter_sizes
#n_conv_layers = 1 + 6 * n + np.sum(net._group_has_1x1)
### Group parameter shapes accoding to their meaning ###
bn_shapes = None
if has_bn:
bn_shapes = net.param_shapes[:2*len(net.batchnorm_layers)]
assert len(net.batchnorm_layers) == 6 * n + 1
for i, meta in enumerate(net.param_shapes_meta[:len(bn_shapes)]):
assert meta['name'].startswith('bn_')
if i % 2 == 1:
assert meta['layer'] == net.param_shapes_meta[i-1]['layer']
elif i > 1:
assert meta['layer'] > net.param_shapes_meta[i-2]['layer']
conv_1x1_shapes = []
pind = 0 if bn_shapes is None else len(bn_shapes)
for g_has_1x1 in net._group_has_1x1:
if g_has_1x1:
conv_1x1_shapes.append(net.param_shapes[pind])
pind += 1
assert len(conv_1x1_shapes[-1]) == 4 and \
conv_1x1_shapes[-1][-1] == 1
else:
conv_1x1_shapes.append(None)
conv_layers = []
conv_biases = [] if has_conv_bias else None
for i in range(2*(1+6*n) if has_conv_bias else 1+6*n):
shape = net.param_shapes[pind]
meta = net.param_shapes_meta[pind]
if has_conv_bias and i % 2 == 1:
assert meta['name'] == 'bias'
conv_biases.append(shape)
else:
assert meta['name'] == 'weight'
conv_layers.append(shape)
pind += 1
assert pind == len(net.param_shapes) - (2 if has_fc_bias else 2)
assert net.has_fc_out and net.mask_fc_out
if has_fc_bias:
fc_w_shape = net.param_shapes[-2]
fc_b_shape = net.param_shapes[-1]
else:
fc_w_shape = net.param_shapes[-1]
fc_b_shape = None
### Decide on chunking strategy ###
use_lg_135 = [True, True, True] # Use layer group 1, 3 or 5?
conv_group_gcd = [-1, -1, -1]
for i in range(1, 4):
fs_prev = filter_sizes[i-1]
fs_curr = filter_sizes[i]
# In this case, we always chunk.
if max(fs_prev, fs_curr) / min(fs_prev, fs_curr) in [1, 2]:
use_lg_135[i-1] = False
conv_group_gcd[i-1] = min(fs_prev, fs_curr)
elif gcd_chunking:
use_lg_135[i-1] = False
conv_group_gcd[i-1] = math.gcd(fs_prev, fs_curr)
### Prepare chunking for each layer group ###
layer_groups = [True] * 11
# Which layer group actually exist?
if not use_lg_135[0]:
layer_groups[1] = False
if not use_lg_135[1]:
layer_groups[3] = False
if not use_lg_135[2]:
layer_groups[5] = False
# 7, 8, 9 are the 1x1 layer groups.
for i, val in enumerate(net._group_has_1x1):
if not val:
layer_groups[7+i] = False
if ignore_out_weights:
layer_groups[-1] = False
chunk_shapes = []
num_per_chunk = []
# Layer group 0.
num_per_chunk.append(1)
chunk_shapes.append([])
chunk_shapes[-1].append(conv_layers[0])
if has_conv_bias:
chunk_shapes[-1].append(conv_biases[0])
if not ignore_bn_weights and has_bn:
chunk_shapes[-1].extend(bn_shapes[:2])
# Layer groups 1 to 6.
for g in range(3): # For each conv group.
# Input layer to convolutional group.
if layer_groups[1+2*g]:
num_per_chunk.append(1)
chunk_shapes.append([])
chunk_shapes[-1].append(conv_layers[1+2*n*g])
if has_conv_bias:
chunk_shapes[-1].append(conv_biases[1+2*n*g])
if not ignore_bn_weights and has_bn:
chunk_shapes[-1].extend(bn_shapes[2*(1+2*n*g):2*(1+2*n*g)+2])
# Remaining layers of convolutional group.
fs_prev = filter_sizes[g]
fs_curr = filter_sizes[g+1]
assert not has_conv_bias or np.all(np.equal([a[0] for a in \
conv_biases[1+2*n*g:1+2*n*(g+1)]], fs_curr))
assert not has_bn or np.all(np.equal([a[0] for a in \
bn_shapes[2*(1+2*n*g):2*(1+2*n*(g+1))]], fs_curr))
if layer_groups[1+2*g]:
num_per_chunk.append(2*n-1) # 1 chunk per conv layer.
chunk_shapes.append([])
chunk_shapes[-1].append(conv_layers[1+2*n*g+1])
else:
gcd = conv_group_gcd[g]
num_per_chunk.append(fs_prev//gcd + (2*n-1) * fs_curr//gcd)
chunk_shapes.append([[fs_curr, gcd, 3, 3]])
if has_conv_bias:
chunk_shapes[-1].append([fs_curr])
if not ignore_bn_weights and has_bn:
chunk_shapes[-1].extend([[fs_curr], [fs_curr]])
# Layer group 7 - 9.
for i in range(7, 10):
if layer_groups[i]:
num_per_chunk.append(1)
chunk_shapes.append([conv_1x1_shapes[i-7]])
# Layer group 10.
if not ignore_out_weights:
num_per_chunk.append(1)
chunk_shapes.append([])
chunk_shapes[-1].append(fc_w_shape)
if has_fc_bias:
chunk_shapes[-1].append(fc_b_shape)
### Get assembly function ###
assembly_fct = lambda x : _wrn_chunking_afct(x, chunk_shapes, num_per_chunk,
layer_groups, conv_group_gcd, has_conv_bias, has_fc_bias, has_bn,
ignore_bn_weights, ignore_out_weights, n, filter_sizes)
return chunk_shapes, num_per_chunk, assembly_fct
def _wrn_chunking_afct(list_of_chunks, chunk_shapes, num_per_chunk,
layer_groups, conv_group_gcd, has_conv_bias, has_fc_bias,
has_bn, ignore_bn_weights, ignore_out_weights, n,
filter_sizes):
"""The ``assembly_fct`` function required by function :func:`wrn_chunking`.
"""
assert len(list_of_chunks) == np.sum(num_per_chunk)
bn_weights = []
conv_layer_weights = []
res_1x1_layer_weights = []
last_layer_weights = []
cind = 0
### First layer ###
conv_layer_weights.append(list_of_chunks[cind][0])
if has_conv_bias:
conv_layer_weights.append(list_of_chunks[cind][1])
if not ignore_bn_weights and has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
### Resnet blocks ###
for g in range(3): # For each block.
# First layer in block.
if layer_groups[1+2*g]:
conv_layer_weights.append(list_of_chunks[cind][0])
if has_conv_bias:
conv_layer_weights.append(list_of_chunks[cind][1])
if not ignore_bn_weights and has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
# Remaining layers in block.
fs_prev = filter_sizes[g]
fs_curr = filter_sizes[g+1]
if layer_groups[1+2*g]:
for _ in range(2*n-1): # 1 chunk per layer
conv_layer_weights.append(list_of_chunks[cind][0])
if has_conv_bias:
conv_layer_weights.append(list_of_chunks[cind][1])
if not ignore_bn_weights and has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
else:
num_chunks_first = fs_prev // conv_group_gcd[g]
num_chunks_rem = fs_curr // conv_group_gcd[g]
# Important: Bias and batchnorm weights are always taken from the
# first chunk of a layer (corresponding weights in remaining layers
# are ignored). Weight tensors are concatenated across chunks.
n_per_l = [num_chunks_first] + [num_chunks_rem] * (2*n-1)
for n_c in n_per_l:
chunks = list_of_chunks[cind:cind+n_c]
cind += n_c
conv_layer_weights.append(torch.cat([c[0] for c in chunks],
dim=1))
if has_conv_bias:
conv_layer_weights.append(chunks[0][1])
if not ignore_bn_weights and has_bn:
bn_weights.append(chunks[0][-2])
bn_weights.append(chunks[0][-1])
### 1x1 residual connections ###
for i in range(3):
if layer_groups[7+i]:
res_1x1_layer_weights.append(list_of_chunks[cind][0])
cind += 1
### Last layer ###
# No batchnorm for last layer!
if not ignore_out_weights:
last_layer_weights.append(list_of_chunks[-1][0])
if has_fc_bias:
last_layer_weights.append(list_of_chunks[-1][1])
return bn_weights + res_1x1_layer_weights + conv_layer_weights + \
last_layer_weights
if __name__ == '__main__':
pass
|
python
|
import glob
import os
import pandas as pd
import pytz
from dateutil import parser, tz
from matplotlib import pyplot as plt
fp = "C:\\Users\\Robert\\Documents\\Uni\\SOLARNET\\HomogenizationCampaign\\rome\\"
file = os.path.join(fp, "data.csv")
data = pd.read_csv(file, delimiter=" ")
print(data)
converted_data = []
for fits_file, ut in zip(data.file, data.UT):
time = parser.parse(fits_file[-15:-7] +"T" + ut)
time = pytz.utc.localize(time)
type = fits_file[9:14]
if type != "CaIIK":
print(fits_file)
converted_data.append([fits_file, time, type, 1])
converted_data = pd.DataFrame(converted_data, columns=["file", "date", "type", "quality"])
converted_data.to_csv(os.path.join(fp, "converted_ds.csv"))
|
python
|
from test.common_test_util import expected_result
from test.hquery.hquery_test_util import query_html_doc
def test_escapes_work_in_string_literals():
assert query_html_doc('', '"foo bar"') == expected_result("""
foo
bar""")
assert query_html_doc('', "'foo bar'") == expected_result("""
foo
bar""")
assert query_html_doc('', '`foo bar`') == expected_result("""
foo
bar""")
|
python
|
import argparse
import sys
parser = argparse.ArgumentParser(description='Extract gold entities conll file.')
parser.add_argument('--input_file')
args = parser.parse_args()
reading = 0
golds = []
sentences = []
with open(args.input_file, 'r') as i_file:
for line in i_file:
line = line.strip()
if line and reading == 0:
sentences.append(line)
elif line and reading == 2:
parts = line.split("\t")
golds.append(parts[1])
if not line and (reading == 0 or reading == 1):
reading += 1
elif not line:
reading = 0
print("\t".join(golds))
golds = []
sentences = []
if len(sentences) > 0:
print("\n".join(golds), end="")
|
python
|
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.contrib.auth.models import User
from django.http import Http404
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
pic = models.ImageField(upload_to='profile_photo/', blank=True, default='profile_photo/defaultprofile_Wk2PTL2.jpg')
bio = models.CharField(max_length=265, blank=True)
contact_info = models.CharField(max_length=255, blank=True)
def __str__(self):
return self.bio
@classmethod
def get_profiles(cls):
profile = cls.objects.all()
return profile
class Projects(models.Model):
image = models.ImageField(upload_to='project_folder')
title = models.CharField(max_length=255)
description = models.TextField()
link = models.CharField(max_length=200)
post_date = models.DateTimeField(auto_now_add=True)
profile = models.ForeignKey(Profile, on_delete=models.CASCADE, default='1')
author = models.ForeignKey(User, on_delete=models.CASCADE, default='1')
def __str__(self):
return f'{self.profile.user.username}'
class Meta:
ordering = ['-post_date']
@classmethod
def get_project_by_id(cls, id):
try:
proj = Projects.objects.get(pk=id)
except ObjectDoesNotExist:
raise Http404()
return proj
@classmethod
def get_projects(cls):
project = cls.objects.all()
return project
@classmethod
def search_by_title(cls, search_term):
projects = cls.objects.filter(title__icontains=search_term)
return projects
class Reviews(models.Model):
design = models.PositiveSmallIntegerField(default=0)
usability = models.PositiveSmallIntegerField(default=0)
content = models.PositiveSmallIntegerField(default=0)
author = models.ForeignKey(User, on_delete=models.CASCADE, default='1')
project = models.ForeignKey(Projects, on_delete=models.CASCADE, default='project_folder/responsive.jpg')
def __str__(self):
return f'{self.design}'
#
# class Comment(models.Model):
# number = models.IntegerField(default=0)
# comment = models.CharField(max_length=200)
# date = models.DateTimeField(auto_now_add=True)
# author = models.ForeignKey(User, on_delete=models.CASCADE, default='1')
# project = models.ForeignKey(Projects, on_delete=models.CASCADE, default='project_folder/responsive.jpg')
#
# def __str__(self):
# return f'{self.username}'
#
# class Meta:
# ordering = ['-date']
#
# @classmethod
# def get_all_comments(cls):
# comments = Comment.objects.all()
# return comments
|
python
|
import torch
import torch.utils.data
import os
import numpy as np
from PIL import Image
from utils.util import *
def loaderAndResize(path):
return Image.open(path).resize((128, 128))
def loader(path):
return Image.open(path)
class GlassandAttrFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, list_wo_g, list_w_g,
transform = None, list_reader = list_reader_all,
loader = loader):
self.face_img_root = face_img_root
self.face_list_wo_g = list_reader(list_wo_g)
self.face_list_w_g = list_reader(list_w_g)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
none_occ_attr_list = self.face_list_wo_g[index][1 : ]
face_name = self.face_list_wo_g[index][0]
none_occ_img = self.loader(os.path.join(self.face_img_root, face_name))
none_occ_attr = [int(none_occ_attr_list[0]), int(none_occ_attr_list[16]),
int(none_occ_attr_list[22]), int(none_occ_attr_list[24]),
int(none_occ_attr_list[30]), int(none_occ_attr_list[20]),
int(none_occ_attr_list[39])]
for i in range(len(none_occ_attr)):
if none_occ_attr[i] == -1:
none_occ_attr[i] = 0
idx2 = np.random.randint(0, len(self.face_list_w_g))
occ_attr_list = self.face_list_w_g[idx2][1 : ]
occ_name = self.face_list_w_g[idx2][0]
occ_img = self.loader(os.path.join(self.face_img_root, occ_name))
occ_attr = [int(occ_attr_list[0]), int(occ_attr_list[16]),
int(occ_attr_list[22]), int(occ_attr_list[24]),
int(occ_attr_list[30]), int(occ_attr_list[20]),
int(occ_attr_list[39])]
for i in range(len(occ_attr)):
if occ_attr[i] == -1:
occ_attr[i] = 0
if self.transform is not None:
occ_img = self.transform(occ_img)
none_occ_img = self.transform(none_occ_img)
sample = {'none_occ_img': none_occ_img,
'occ_img': occ_img,
'occ_attr': torch.from_numpy(np.array(occ_attr)),
'none_occ_attr': torch.from_numpy(np.array(none_occ_attr)),
}
return sample
def __len__(self):
return len(self.face_list_wo_g)
class RandomFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, img_list,
transform = None, list_reader = list_reader_2,
loader = loader):
self.face_img_root = face_img_root
self.face_list, self.label = list_reader(img_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_name = self.face_list[index]
face_label_ori = self.label[index]
face_label_des = 1
img = self.loader(os.path.join(self.face_img_root, face_name))
if self.transform is not None:
img = self.transform(img)
sample = {'img': img,
'label_ori': face_label_ori,
'label_des': face_label_des,
}
return sample
def __len__(self):
return len(self.face_list)
class GlassFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, list_wo_g, list_w_g,
transform = None, list_reader = list_reader,
loader = loader):
self.face_img_root = face_img_root
self.face_list_wo_g = list_reader(list_wo_g)
self.face_list_w_g = list_reader(list_w_g)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_name = self.face_list_wo_g[index]
none_occ_img = self.loader(os.path.join(self.face_img_root, face_name))
occ_name = self.face_list_w_g[np.random.randint(0, len(self.face_list_w_g))]
occ_img = self.loader(os.path.join(self.face_img_root, occ_name))
if self.transform is not None:
occ_img = self.transform(occ_img)
none_occ_img = self.transform(none_occ_img)
sample = {'none_occ_img': none_occ_img,
'occ_img': occ_img
}
return sample
def __len__(self):
return len(self.face_list_wo_g)
class OccFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, face_name_list, occ_img_root,
occ_name_list, transform = None, list_reader = list_reader_all,
loader = loader):
self.face_img_root = face_img_root
self.face_list = list_reader(face_name_list)
self.occ_img_root = occ_img_root
self.occ_list = list_reader(occ_name_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
none_occ_attr_list = self.face_list[index][1 : ]
face_name = self.face_list[index][0]
none_occ_img = self.loader(os.path.join(self.face_img_root, face_name))
none_occ_attr = [int(none_occ_attr_list[0]), int(none_occ_attr_list[16]),
int(none_occ_attr_list[22]), int(none_occ_attr_list[24]),
int(none_occ_attr_list[30]), int(none_occ_attr_list[20]),
int(none_occ_attr_list[39])]
for i in range(len(none_occ_attr)):
if none_occ_attr[i] == -1:
none_occ_attr[i] = 0
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_img_root, occ_name))
if occ_name[0] == 'm':
occ_type = occ_name.split()[0]
else:
occ_type = occ_name.split('_')[0]
print(occ_type)
occ_face_img = process_image(none_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_face_img = self.transform(occ_face_img)
none_occ_img = self.transform(none_occ_img)
sample = {'none_occ_img': none_occ_img,
'occ_img': occ_face_img,
'occ_attr': torch.from_numpy(np.array(none_occ_attr)),
'none_occ_attr': torch.from_numpy(np.array(none_occ_attr)),
}
return sample
def __len__(self):
return len(self.face_list)
class OccFaceImageMixLoader(torch.utils.data.Dataset):
def __init__(self, face_wo_occ_root, face_wo_occ_list,
occ_root, occ_list,
face_w_occ_root, face_w_occ_list,
transform = None, loader = loader):
self.face_wo_occ_root = face_wo_occ_root
self.face_wo_occ_list = load_pickle(face_wo_occ_list)
self.occ_root = occ_root
self.occ_list = load_pickle(occ_list)
self.face_w_occ_root = face_w_occ_root
self.face_w_occ_list = load_pickle(face_w_occ_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_wo_occ_attr_list = self.face_wo_occ_list[index][1 : ]
face_wo_occ_img = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_wo_occ_img = face_wo_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_wo_occ_attr = [int(face_wo_occ_attr_list[0]), int(face_wo_occ_attr_list[16]),
int(face_wo_occ_attr_list[22]), int(face_wo_occ_attr_list[24]),
int(face_wo_occ_attr_list[30]), int(face_wo_occ_attr_list[20]),
int(face_wo_occ_attr_list[39])]
index1 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_list = self.face_w_occ_list[index1][1 : ]
face_w_occ_img = self.loader(
os.path.join(self.face_w_occ_root, self.face_w_occ_list[index1][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_w_occ_img = face_w_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr = [int(face_w_occ_attr_list[0]), int(face_w_occ_attr_list[16]),
int(face_w_occ_attr_list[22]), int(face_w_occ_attr_list[24]),
int(face_w_occ_attr_list[30]), int(face_w_occ_attr_list[20]),
int(face_w_occ_attr_list[39])]
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_root, occ_name))
occ_type = occ_name.split('_')[0]
occ_img_syn = process_image(face_wo_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_img_syn = self.transform(occ_img_syn)
face_wo_occ_img = self.transform(face_wo_occ_img)
face_w_occ_img = self.transform(face_w_occ_img)
sample = {'face_wo_occ_img': face_wo_occ_img,
'occ_img_syn': occ_img_syn,
'face_wo_occ_attr': torch.from_numpy(np.array(face_wo_occ_attr)),
'face_w_occ_img': face_w_occ_img,
'face_w_occ_attr': torch.from_numpy(np.array(face_w_occ_attr)),
'name': self.face_w_occ_list[index1][0],
}
return sample
def __len__(self):
return len(self.face_w_occ_list)
class OccFaceImageMixLoader_test(torch.utils.data.Dataset):
def __init__(self, face_wo_occ_root, face_wo_occ_list,
occ_root, occ_list,
face_w_occ_root, face_w_occ_list,
transform = None, loader = loader):
self.face_wo_occ_root = face_wo_occ_root
self.face_wo_occ_list = load_pickle(face_wo_occ_list)
self.occ_root = occ_root
self.occ_list = load_pickle(occ_list)
self.face_w_occ_root = face_w_occ_root
self.face_w_occ_list = load_pickle(face_w_occ_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_wo_occ_attr_list = self.face_wo_occ_list[index][1 : ]
face_wo_occ_img = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index][0]))
top_x = 8
top_y = 8
face_wo_occ_img = face_wo_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_wo_occ_attr = [int(face_wo_occ_attr_list[0]), int(face_wo_occ_attr_list[16]),
int(face_wo_occ_attr_list[22]), int(face_wo_occ_attr_list[24]),
int(face_wo_occ_attr_list[30]), int(face_wo_occ_attr_list[20]),
int(face_wo_occ_attr_list[39])]
index1 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_list = self.face_w_occ_list[index1][1 : ]
face_w_occ_img = self.loader(
os.path.join(self.face_w_occ_root, self.face_w_occ_list[index1][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_w_occ_img = face_w_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr = [int(face_w_occ_attr_list[0]), int(face_w_occ_attr_list[16]),
int(face_w_occ_attr_list[22]), int(face_w_occ_attr_list[24]),
int(face_w_occ_attr_list[30]), int(face_w_occ_attr_list[20]),
int(face_w_occ_attr_list[39])]
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_root, occ_name))
occ_type = occ_name.split('_')[0]
occ_img_syn = process_image(face_wo_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_img_syn = self.transform(occ_img_syn)
face_wo_occ_img = self.transform(face_wo_occ_img)
face_w_occ_img = self.transform(face_w_occ_img)
sample = {'face_wo_occ_img': face_wo_occ_img,
'occ_img_syn': occ_img_syn,
'face_wo_occ_attr': torch.from_numpy(np.array(face_wo_occ_attr)),
'face_w_occ_img': face_w_occ_img,
'face_w_occ_attr': torch.from_numpy(np.array(face_w_occ_attr)),
'name': self.face_w_occ_list[index1][0],
}
return sample
def __len__(self):
return len(self.face_w_occ_list)
class OccFaceImageMixLoaderV2(torch.utils.data.Dataset):
def __init__(self, face_wo_occ_root, face_wo_occ_list,
occ_root, occ_list,
face_w_occ_root, face_w_occ_list,
transform = None, loader = loader):
self.face_wo_occ_root = face_wo_occ_root
self.face_wo_occ_list = load_pickle(face_wo_occ_list)
self.occ_root = occ_root
self.occ_list = load_pickle(occ_list)
self.face_w_occ_root = face_w_occ_root
self.face_w_occ_list = load_pickle(face_w_occ_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
####################
face_wo_occ_attr_list = self.face_wo_occ_list[index][1 : ]
face_wo_occ_img = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_wo_occ_img = face_wo_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_wo_occ_attr = [int(face_wo_occ_attr_list[0]), int(face_wo_occ_attr_list[16]),
int(face_wo_occ_attr_list[22]), int(face_wo_occ_attr_list[24]),
int(face_wo_occ_attr_list[30]), int(face_wo_occ_attr_list[20]),
int(face_wo_occ_attr_list[39])]
####################
index1 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_list = self.face_w_occ_list[index1][1 : ]
face_w_occ_img = self.loader(
os.path.join(self.face_w_occ_root, self.face_w_occ_list[index1][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_w_occ_img = face_w_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr = [int(face_w_occ_attr_list[0]), int(face_w_occ_attr_list[16]),
int(face_w_occ_attr_list[22]), int(face_w_occ_attr_list[24]),
int(face_w_occ_attr_list[30]), int(face_w_occ_attr_list[20]),
int(face_w_occ_attr_list[39])]
####################
index2 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_adv_list = self.face_wo_occ_list[index2][1 : ]
face_wo_occ_img_adv = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index2][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_wo_occ_img_adv = face_wo_occ_img_adv.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr_adv = [int(face_w_occ_attr_adv_list[0]), int(face_w_occ_attr_adv_list[16]),
int(face_w_occ_attr_adv_list[22]), int(face_w_occ_attr_adv_list[24]),
int(face_w_occ_attr_adv_list[30]), int(face_w_occ_attr_adv_list[20]),
int(face_w_occ_attr_adv_list[39])]
###################
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_root, occ_name))
occ_type = occ_name.split('_')[0]
occ_img_syn = process_image(face_wo_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_img_syn = self.transform(occ_img_syn)
face_wo_occ_img = self.transform(face_wo_occ_img)
face_w_occ_img = self.transform(face_w_occ_img)
face_wo_occ_img_adv = self.transform(face_wo_occ_img_adv)
sample = {
'face_w_syn_occ_img': occ_img_syn,
'face_w_syn_occ_attr': torch.from_numpy(np.array(face_wo_occ_attr)),
'face_w_syn_occ_img_GT': face_wo_occ_img,
'face_wo_occ_img_adv': face_wo_occ_img_adv,
'face_wo_occ_attr_adv': torch.from_numpy(np.array(face_w_occ_attr_adv)),
'face_w_occ_img': face_w_occ_img,
'face_w_occ_attr': torch.from_numpy(np.array(face_w_occ_attr)),
}
return sample
def __len__(self):
return len(self.face_w_occ_list)
|
python
|
#!/usr/bin/env python
import pytest
from pytest import approx
import igrf13
time = "2010-07-12"
def test_igrf13():
mag = igrf13.igrf(time, 65, 85, 0, model=12)
assert mag.north.item() == approx(9295.100256)
assert mag.east.item() == approx(2560.199706)
assert mag.down.item() == approx(59670.251893)
assert mag.total.item() == approx(60444.126863)
assert mag.incl.item() == approx(80.821738)
assert mag.decl.item() == approx(15.399442)
# def test_igrf11():
#
# mag = igrf11.igrf(time, 65, 85, 0, model=11)
#
# assert mag.north.item() == approx(9301.523160)
# assert mag.east.item() == approx(2563.450424)
# assert mag.down.item() == approx(59666.132881)
# assert mag.total.item() == approx(60441.186489)
#
# assert mag.incl.item() == approx(80.814513)
# assert mag.decl.item() == approx(15.407924)
if __name__ == "__main__":
pytest.main([__file__])
|
python
|
import logging
import sdk_cmd
import sdk_tasks
import shakedown
from tests import config
log = logging.getLogger(__name__)
def broker_count_check(count, service_name=config.SERVICE_NAME):
def fun():
try:
if len(sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'broker list', json=True)) == count:
return True
except:
pass
return False
shakedown.wait_for(fun)
def restart_broker_pods(service_name=config.SERVICE_NAME):
for i in range(config.DEFAULT_BROKER_COUNT):
pod_name = '{}-{}'.format(config.DEFAULT_POD_TYPE, i)
task_name = '{}-{}'.format(pod_name, config.DEFAULT_TASK_NAME)
broker_id = sdk_tasks.get_task_ids(service_name, task_name)
restart_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'pod restart {}'.format(pod_name), json=True)
assert len(restart_info) == 2
assert restart_info['tasks'][0] == task_name
sdk_tasks.check_tasks_updated(service_name, task_name, broker_id)
sdk_tasks.check_running(service_name, config.DEFAULT_BROKER_COUNT)
def replace_broker_pod(service_name=config.SERVICE_NAME):
pod_name = '{}-0'.format(config.DEFAULT_POD_TYPE)
task_name = '{}-{}'.format(pod_name, config.DEFAULT_TASK_NAME)
broker_0_id = sdk_tasks.get_task_ids(service_name, task_name)
sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'pod replace {}'.format(pod_name))
sdk_tasks.check_tasks_updated(service_name, task_name, broker_0_id)
sdk_tasks.check_running(service_name, config.DEFAULT_BROKER_COUNT)
# wait till all brokers register
broker_count_check(config.DEFAULT_BROKER_COUNT, service_name=service_name)
def create_topic(topic_name, service_name=config.SERVICE_NAME):
# Get the list of topics that exist before we create a new topic
topic_list_before = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic list', json=True)
create_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic create {}'.format(topic_name), json=True)
log.info(create_info)
assert ('Created topic "%s".\n' % topic_name in create_info['message'])
if '.' in topic_name or '_' in topic_name:
assert ("topics with a period ('.') or underscore ('_') could collide." in create_info['message'])
topic_list_after = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic list', json=True)
new_topics = set(topic_list_after) - set(topic_list_before)
assert topic_name in new_topics
topic_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic describe {}'.format(topic_name), json=True)
assert len(topic_info) == 1
assert len(topic_info['partitions']) == config.DEFAULT_PARTITION_COUNT
def delete_topic(topic_name, service_name=config.SERVICE_NAME):
delete_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic delete {}'.format(topic_name), json=True)
assert len(delete_info) == 1
assert delete_info['message'].startswith('Output: Topic {} is marked for deletion'.format(topic_name))
topic_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic describe {}'.format(topic_name), json=True)
assert len(topic_info) == 1
assert len(topic_info['partitions']) == config.DEFAULT_PARTITION_COUNT
def assert_topic_lists_are_equal_without_automatic_topics(expected, actual):
"""Check for equality in topic lists after filtering topics that start with
an underscore."""
filtered_actual = list(filter(lambda x: not x.startswith('_'), actual))
assert expected == filtered_actual
|
python
|
# -*- coding: utf-8 -*-
""" testunit 基础类
@Time : 2020/4/10 上午1:03
@File : testbase.py
@author : pchaos
@license : Copyright(C), pchaos
@Contact : p19992003#gmail.com
"""
import unittest
import datetime
import QUANTAXIS as qa
from .testbase import TestingBase
class qaTestingBase(TestingBase):
"""unittest base class for QA
"""
@classmethod
def userInit(cls):
"""用户初始化
"""
cls.code = '000300'
dateStart = datetime.date(2005, 3, 1)
dateEnd = datetime.date(2017, 3, 31)
cls.dataFrame = qa.QA_fetch_index_day_adv(cls.code, start=dateStart, end=dateEnd)
@classmethod
def userEnd(cls):
"""class结束,用户释放资源
"""
if cls.dataFrame is not None:
cls.dataFrame = None
|
python
|
# -*- coding: utf-8 -*-
import pytest
import os
import csv
import tempfile
from datetime import datetime
from nart.writer.builtins.csvwriter import CSVWriter
from nart.model.nartdata import NartData
from nart.model.nartitem import NartItem
@pytest.fixture
def csvwriter_fixture():
"""
csvrepo의 filepath를 생성하고, 테스트가 끝난 뒤 제거한다.
:return: str. filepath. csvrepo의 filepath이다.
"""
fd, filepath = tempfile.mkstemp()
os.close(fd)
yield filepath
if os.path.exists(filepath):
os.remove(filepath)
def test_csvwriter_success(csvwriter_fixture):
"""
CSVWriter의 성공 테스트이다.
:param csvwriter_fixture: fixture이다.
"""
filepath = csvwriter_fixture
rank1 = NartItem(1, 'test1')
rank2 = NartItem(2, 'test2')
keywords = NartData(datetime.now(), [rank1, rank2])
writer = CSVWriter(path=filepath, append_if_exist=True)
writer.write(keywords)
assert os.path.exists(writer.path)
with open(writer.path, mode='r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
reader_count = 0
for row in reader:
reader_count = reader_count + 1
assert row[1] == 'test1'
assert row[2] == 'test2'
assert reader_count == 1
|
python
|
import torchvision.transforms as transforms
config = {
'params': {
"backbone": {
"kernel_size": 3,
"output_dim": 128,
"input_dim": 3,
"stride": 2,
"padding": 1,
"out_img_size": 16
},
"primary_capsules": {
"kernel_size": 1,
"stride": 1,
"input_dim": 128,
"caps_dim": 36,
"num_caps": 32,
"padding": 0,
"out_img_size": 16
},
"capsules": [{
"type": "CONV",
"num_caps": 32,
"caps_dim": 36,
"kernel_size": 3,
"stride": 2,
"matrix_pose": True,
"out_img_size": 7
}, {
"type": "CONV",
"num_caps": 32,
"caps_dim": 36,
"kernel_size": 3,
"stride": 1,
"matrix_pose": True,
"out_img_size": 5
}, {
"type": "FC",
"num_caps": 20,
"caps_dim": 36,
"matrix_pose": True
}],
"class_capsules": {
"num_caps": 100,
"caps_dim": 36,
"matrix_pose": True
}
},
"transform_train":
transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465),
(0.2023, 0.1994, 0.2010)),
]),
"transform_test":
transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465),
(0.2023, 0.1994, 0.2010)),
])
}
|
python
|
"""
This file is part of tendril
See the COPYING, README, and INSTALL files for more information
"""
import os
import imp
dirname, fname = os.path.split(os.path.abspath(__file__))
def import_(filename):
(path, name) = os.path.split(filename)
(name, ext) = os.path.splitext(name)
(f, filename, data) = imp.find_module(name, [path])
return imp.load_module(name, f, filename, data)
def get_test_object(testst, offline=False):
if '.' in testst:
modname, clsname = testst.rsplit('.', 1)
elif ':' in testst:
modname, clsname = testst.split(':')
clsname = 'Test' + clsname
else:
modname = testst
clsname = 'Test' + testst
try:
mod = import_(os.path.join(dirname, modname))
cls = getattr(mod, clsname)
instance = cls(offline=offline)
return instance
except ImportError:
raise ValueError("Test Unrecognized :" + testst)
|
python
|
# Copyright (C) 2021, Raffaello Bonghi <[email protected]>
# All rights reserved
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import launch
from launch.actions import IncludeLaunchDescription
from launch.substitutions import Command, LaunchConfiguration
from launch.launch_description_sources import PythonLaunchDescriptionSource
import launch_ros
import os
def generate_launch_description():
pkg_share = launch_ros.substitutions.FindPackageShare(package='nanosaur_description').find('nanosaur_description')
default_rviz_config_path = os.path.join(pkg_share, 'rviz/urdf.rviz')
joint_state_publisher_node = launch_ros.actions.Node(
package='joint_state_publisher',
executable='joint_state_publisher',
name='joint_state_publisher',
condition=launch.conditions.UnlessCondition(LaunchConfiguration('gui'))
)
joint_state_publisher_gui_node = launch_ros.actions.Node(
package='joint_state_publisher_gui',
executable='joint_state_publisher_gui',
name='joint_state_publisher_gui',
condition=launch.conditions.IfCondition(LaunchConfiguration('gui'))
)
rviz_node = launch_ros.actions.Node(
package='rviz2',
executable='rviz2',
name='rviz2',
output='screen',
arguments=['-d', LaunchConfiguration('rvizconfig')],
)
return launch.LaunchDescription([
launch.actions.DeclareLaunchArgument(name='gui', default_value='True',
description='Flag to enable joint_state_publisher_gui'),
launch.actions.DeclareLaunchArgument(name='rvizconfig', default_value=default_rviz_config_path,
description='Absolute path to rviz config file'),
# Nanosaur description launch
# https://answers.ros.org/question/306935/ros2-include-a-launch-file-from-a-launch-file/
IncludeLaunchDescription(
PythonLaunchDescriptionSource(
[pkg_share, '/launch/description.launch.py'])),
joint_state_publisher_node,
joint_state_publisher_gui_node,
rviz_node
])
# EOF
|
python
|
import logging
from typing import TYPE_CHECKING, Optional
from web3.types import BlockIdentifier
from rotkehlchen.assets.asset import Asset
from rotkehlchen.constants.assets import A_ALETH, A_ETH, A_WETH
from rotkehlchen.constants.ethereum import SADDLE_ALETH_POOL
from rotkehlchen.constants.misc import EXP18
from rotkehlchen.errors.price import PriceQueryUnsupportedAsset
from rotkehlchen.inquirer import Inquirer
from rotkehlchen.interfaces import CurrentPriceOracleInterface
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.types import Price
if TYPE_CHECKING:
from rotkehlchen.chain.ethereum.manager import EthereumManager
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
class SaddleOracle(CurrentPriceOracleInterface):
"""
Provides logic to use saddle as oracle for certain assets
"""
def __init__(self, eth_manager: 'EthereumManager'):
super().__init__(oracle_name='saddle')
self.eth_manager = eth_manager
def rate_limited_in_last(
self,
seconds: Optional[int] = None, # pylint: disable=unused-argument
) -> bool:
return False
def get_price(
self,
from_asset: Asset,
to_asset: Asset,
block_identifier: BlockIdentifier,
) -> Price:
"""
NOTE: This function is limited to be used for ALETH at the moment.
The reason for that is how pools for saddle are engineered and the lack
of an automated way to get the pools. ALETH was chosen because this is
the only place where its price can be queried.
What the code does is querying the pool for the swap ALETH -> ETH
and then get the eth price to calculate the ALETH price
"""
log.debug(f'Querying saddle for price of {from_asset} to {to_asset}')
if from_asset != A_ALETH:
raise PriceQueryUnsupportedAsset(
f'{from_asset} is not a valid asset for the Saddle oracle',
)
aleth_eth_price = SADDLE_ALETH_POOL.call(
ethereum=self.eth_manager,
method_name='calculateSwap',
arguments=[1, 0, 1000000000000000000],
block_identifier=block_identifier,
)
aleth_eth_price /= EXP18
if to_asset not in (A_WETH, A_ETH):
eth_price = Inquirer().find_price(A_ETH, to_asset)
return aleth_eth_price * eth_price
return aleth_eth_price
def query_current_price(self, from_asset: Asset, to_asset: Asset) -> Price:
"""At the moment until more pools get implemented this function is limited to ALETH
Refer to the docstring of `get_price`.
"""
return self.get_price(
from_asset=from_asset,
to_asset=to_asset,
block_identifier='latest',
)
|
python
|
from .version import __version__ # scTenifoldXct.__version__
from scTenifoldXct.core import scTenifoldXct
from scTenifoldXct.visualization import get_Xct_pairs, plot_XNet
from scTenifoldXct.merge import merge_scTenifoldXct
|
python
|
# Copyright 2014-2017 Lionheart Software LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import operator
import urllib.request, urllib.parse, urllib.error
import logging
from . import exceptions
PINBOARD_API_ENDPOINT = "https://api.pinboard.in/v1/"
PINBOARD_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
PINBOARD_ALTERNATE_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
PINBOARD_DATE_FORMAT = "%Y-%m-%d"
class Bookmark(object):
def __init__(self, payload, token):
self.description = payload['description']
self.extended = payload['extended']
self.url = payload['href']
self.meta = payload['meta']
self.hash = payload['hash']
self.shared = payload['shared'] == "yes"
self.toread = payload['toread'] == "yes"
self.tags = payload['tags'].split(' ')
self.time = Pinboard.datetime_from_string(payload['time'])
self.token = token
def __eq__(self, other):
return other.hash == self.hash
def __ne__(self, other):
return other.meta != self.meta
def __gt__(self, other):
return self.time > other.time
def __lt__(self, other):
return self.time < other.time
def __ge__(self, other):
return self.time >= other.time
def __le__(self, other):
return self.time <= other.time
@property
def pinboard(self):
return Pinboard(self.token)
def __repr__(self):
parse_result = urllib.parse.urlparse(self.url)
return "<Bookmark description=\"{}\" url=\"{}\">".format(self.description, parse_result.netloc)
def save(self, update_time=False):
params = {
'url': self.url,
'description': self.description,
'extended': self.extended,
'tags': self.tags,
'shared': "yes" if self.shared else "no",
'toread': "yes" if self.toread else "no",
}
if update_time:
params['dt'] = self.time
return self.pinboard.posts.add(**params)
def delete(self):
return self.pinboard.posts.delete(url=self.url)
class Tag(object):
def __init__(self, key, value):
self.name = key
self.count = int(value)
def __repr__(self):
return "<Tag name=\"{}\" count={}>".format(self.name, self.count)
class Pinboard(object):
DATE_FIELDS = ["dt", "date", "update_time", "created_at", "updated_at"]
BOOLEAN_FIELDS = ["replace", "shared", "toread"]
SPACE_DELIMITED_FIELDS = ["tag", "tags"]
def __init__(self, token):
self.token = token
def __getattr__(self, k):
return PinboardCall(self.token, k)
@staticmethod
def date_from_string(value):
return datetime.datetime.strptime(value, PINBOARD_DATE_FORMAT).date()
@staticmethod
def string_from_date(d):
return d.strftime(PINBOARD_DATE_FORMAT)
@staticmethod
def datetime_from_string(value):
try:
return datetime.datetime.strptime(value, PINBOARD_DATETIME_FORMAT)
except ValueError:
return datetime.datetime.strptime(value, PINBOARD_ALTERNATE_DATETIME_FORMAT)
@staticmethod
def string_from_datetime(dt):
return dt.strftime(PINBOARD_DATETIME_FORMAT)
class PinboardCall(object):
def __init__(self, token, path):
self.token = token
self.components = [path]
def __getattr__(self, k):
self.components.append(k)
return self
def __getitem__(self, k):
self.components.append(k)
return self
def __call__(self, *args, **kwargs):
url = "{}{}".format(PINBOARD_API_ENDPOINT, "/".join(self.components))
parse_response = kwargs.get('parse_response', True)
if 'parse_response' in kwargs:
del kwargs['parse_response']
params = kwargs.copy()
for field in Pinboard.DATE_FIELDS:
if field in kwargs:
try:
params[field] = Pinboard.string_from_datetime(kwargs[field])
except:
params[field] = kwargs[field]
for field in Pinboard.BOOLEAN_FIELDS:
if field in kwargs:
if isinstance(kwargs[field], bool):
params[field] = "yes" if kwargs[field] else "no"
else:
params[field] = kwargs[field]
for field in Pinboard.SPACE_DELIMITED_FIELDS:
if field in kwargs:
if isinstance(kwargs[field], list):
params[field] = ' '.join(kwargs[field])
else:
params[field] = kwargs[field]
params['format'] = "json"
params['auth_token'] = self.token
if 'meta' in params:
params['meta'] = 1 if kwargs['meta'] else 0
query_string = urllib.parse.urlencode(params)
final_url = "{}?{}".format(url, query_string)
try:
request = urllib.request.Request(final_url)
opener = urllib.request.build_opener(urllib.request.HTTPSHandler)
response = opener.open(request)
except urllib.error.HTTPError as e:
error_mappings = {
401: exceptions.PinboardAuthenticationError,
403: exceptions.PinboardForbiddenError,
500: exceptions.PinboardServerError,
503: exceptions.PinboardServiceUnavailable,
}
if e.code in error_mappings:
Error = error_mappings[e.code]
raise Error(e.url, e.code, e.msg, e.hdrs, e.fp)
raise
else:
if parse_response:
json_response = json.load(response)
for field in Pinboard.DATE_FIELDS:
if field in json_response:
json_response[field] = Pinboard.datetime_from_string(json_response[field])
if self.components == ["posts", "all"]:
return [Bookmark(k, self.token) for k in json_response]
elif self.components in [["posts", "get"], ["posts", "recent"]]:
json_response['posts'] = [Bookmark(k, self.token) for k in json_response['posts']]
elif self.components == ["posts", "dates"]:
json_response['dates'] = {Pinboard.date_from_string(k): int(v) \
for k, v in list(json_response['dates'].items())}
elif self.components == ["posts", "update"]:
return json_response['update_time']
elif self.components == ["tags", "get"]:
tags = [Tag(k, v) for k, v in list(json_response.items())]
tags.sort(key=operator.attrgetter('name'))
return tags
elif self.components == ["notes", "list"]:
for note in json_response['notes']:
for field in Pinboard.DATE_FIELDS:
if field in note:
note[field] = Pinboard.datetime_from_string(note[field])
elif 'result_code' in json_response:
if json_response['result_code'] == "done":
return True
else:
raise exceptions.PinboardError(json_response['result_code'])
return json_response
else:
return response
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# check_apcaccess.py - a script for checking a APC UPS
# using the apcaccess utility
#
# 2016 By Christian Stankowic
# <info at stankowic hyphen development dot net>
# https://github.com/stdevel
#
# Enhanced and error corrections by Chris Johnston 2017
# Tested on BX1300G & RS 1500G but should work on most APC UPS
# <HireChrisJohnston at g mail>
#
#> Added detection of TONBATT
#> Corrected misc errors
#> Enchanced charting & messaging
from optparse import OptionParser, OptionGroup
import os
import subprocess
import logging
import re
#set logger
LOGGER = logging.getLogger("check_apcaccess")
#global variables
ups_info={}
state=0
def check_value(val, desc, warn, crit, reverse=False):
#compares value to thresholds and sets codes
LOGGER.debug("Comparing '{0}' ({1}) to warning/critical thresholds {2}/{3} (reverse: {4})".format(val, desc, warn, crit, reverse))
snip=""
if reverse == False:
if val > crit:
#critical
snip="{0} *Critical* ({1})".format(desc, val)
set_code(2)
elif val > warn:
#warning
snip="{0} -Warning ({1})".format(desc, val)
set_code(1)
else: snip="{0}: {1}".format(desc, val)
else:
if val < crit:
#critical
snip="{0} *Critical* ({1})".format(desc, val)
set_code(2)
elif val < warn:
#warning
snip="{0} -Warning ({1})".format(desc, val)
set_code(1)
else: snip="{0}: {1}".format(desc, val)
return snip
def set_code(int):
#set result code
global state
if int > state: state = int
def get_return_str():
#get return string
if state == 3: return "UNKNOWN"
elif state == 2: return "CRITICAL"
elif state == 1: return "WARNING"
else: return "OK"
def get_value(key, isFloat=False):
#get value from apcaccess information
if isFloat:
temp = re.findall(r'[-+]?[0-9]*\.?[0-9]*', ups_info[key])
return float(temp[0])
else: return ups_info[key]
def calc_consumption():
#calculate power consumption
load = get_value('LOADPCT', True)
out = get_value('NOMPOWER', True)
power_cons = int(out*(load/100))
LOGGER.debug("MATH says, based on the information provided, it is assumed that the power consumption might be ~{0} watts".format(power_cons))
return power_cons
def check_ups():
#check UPS
global state
#get _all_ the values
starttime = get_value('STARTTIME')
status = get_value('STATUS')
battv = get_value('BATTV', True)
LOGGER.debug("BattV: {0}".format(battv))
load = get_value('LOADPCT', True)
batt = get_value('BCHARGE', True)
xfers = get_value('NUMXFERS')
tot_onbat = get_value('CUMONBATT')
on_bat = get_value('TONBATT')
linev = get_value('LINEV')
if options.time_warn and options.time_crit: time = get_value('TIMELEFT', True)
power_cons = calc_consumption()
#Check if line level is high
curr_line_level = get_value('LINEV', True)
if options.line_level > curr_line_level:
snip_line_level = " Line Level low {2} {0} for {1}".format(status,on_bat,linev)
set_code(1)
else: snip_line_level = status
#check Batt V
snip_battv = check_value(battv, "Voltage", options.battv_warn, options.battv_crit, True) +'v'
#check load
snip_load = check_value(load, "Load", options.load_warn, options.load_crit)+ '%'
#check battery charge
snip_batt = check_value(batt, "Charge", options.bat_warn, options.bat_crit, True) +'%'
#check battery time (optional)
if options.time_warn or options.time_crit:
snip_time = check_value(time, "Time Left", options.time_warn, options.time_crit, True) + 'min'
else: snip_time=""
#check power consumption (optional)
if options.consum_warn or options.consum_crit:
snip_consum = check_value(power_cons, "Power consumption", options.consum_warn, options.consum_crit) +'w'
else: snip_consum=""
# get detail
snip_detail ="(Total On Battery: " + tot_onbat + " / #Xfers: " + xfers + " since "+starttime+")"
#get performance data
if options.show_perfdata:
#initialize perfdata
perfdata=" |"
#power consumption
if options.consum_warn and options.consum_crit: perfdata = "{0} 'consumption'={1};{2};{3};;".format(perfdata, power_cons, float(options.consum_warn), float(options.consum_crit))
else: perfdata = "{0} 'Consumption'={1}w;;;".format(perfdata, power_cons)
#voltage
perfdata = "{0} 'Voltage'={1}v;{2};{3};{4};{5}".format(perfdata, battv, float(options.battv_warn), float(options.battv_crit), 11.0, 27.3)
#load
perfdata = "{0} 'Load'={1}%;{2};{3};{4};{5}".format(perfdata, load, float(options.load_warn), float(options.load_crit), 0.0, 100.0)
#battery charge
perfdata = "{0} 'Battery_Charge'={1}%;{2};{3};{4};{5}".format(perfdata, batt, float(options.bat_warn), float(options.bat_crit), 0.0, 100.0)
#battery time left only if user specified the warning and critical values
if options.time_warn or options.time_crit:
perfdata = "{0} 'Battery_Time_Left'={1};{2};{3};;".format(perfdata, time, float(options.time_warn), float(options.time_crit))
else: perfdata=""
#return result
snips = [x for x in [snip_line_level,snip_battv, snip_batt, snip_load,snip_consum,snip_time,snip_detail ] if x != ""]
print "{0}: {1}{2}".format(get_return_str(), str(", ".join(snips)), perfdata)
exit(state)
def run_cmd(cmd=""):
#run the command, it's tricky!
output = subprocess.Popen("LANG=C {0}".format(cmd), shell=True, stdout=subprocess.PIPE).stdout.read()
LOGGER.debug("Output of '{0}' => '{1}".format(cmd, output))
return output
def get_apcaccess_data():
#get output of apcaccess
global ups_info
raw_data = run_cmd("apcaccess -h" + options.host)
raw_data = raw_data.splitlines()
for line in raw_data:
#parse lines to key/value dict
key=line[:line.find(":")].strip()
value=line[line.find(":")+1:].strip()
LOGGER.debug("Found key '{0}' with value '{1}'".format(key, value))
ups_info[key]=value
if __name__ == "__main__":
#define description, version and load parser
desc='''%prog is used to check a APC UPS using the apcaccess utility.
https://github.com/HireChrisJohnston/nagios-apcupsd'''
parser = OptionParser(description=desc,version="%prog version 1.0.0")
gen_opts = OptionGroup(parser, "Generic options")
mon_opts = OptionGroup(parser, "Monitoring options")
thres_opts = OptionGroup(parser, "Threshold options")
parser.add_option_group(gen_opts)
parser.add_option_group(mon_opts)
parser.add_option_group(thres_opts)
#-d / --debug
gen_opts.add_option("-d", "--debug", dest="debug", default=False, action="store_true", help="enable debugging outputs")
#-P / --enable-perfdata
mon_opts.add_option("-P", "--enable-perfdata", dest="show_perfdata", default=False, action="store_true", help="enables performance data (default: no)")
#-w / --battv-warning
thres_opts.add_option("-w", "--battv-warning", dest="battv_warn", default=24, type=float, metavar="VOLTS", action="store", help="Defines battery voltage warning threshold (default: 24)")
#-W / --battv-critical
thres_opts.add_option("-W", "--battv-critical", dest="battv_crit", default=23.3, type=float, metavar="VOLTS", action="store", help="Defines battery voltage critical threshold (default: 23.3)")
#-c / --temp-critical
#thres_opts.add_option("-c", "--temp-critical", dest="temp_crit", default=55, type=float, metavar="TEMP", action="store", help="Defines temprature critical threshold(defalt: 55)")
#-l / --load-warning
thres_opts.add_option("-l", "--load-warning", dest="load_warn", default=50, type=int, metavar="PERCENT", action="store", help="Defines load warning threshold in percent (default: 50%)")
#-L / --load-critical
thres_opts.add_option("-L", "--load-critical", dest="load_crit", default=80, type=int, metavar="PERCENT", action="store", help="Defines load critical threshold in percent (default: 80%)")
#-b / --battery-warning
thres_opts.add_option("-b", "--battery-warning", dest="bat_warn", default=30, type=int, metavar="PERCENT", action="store", help="Defines battery load warning threshold in percent (default: 30%)")
#-B / --battery-critical
thres_opts.add_option("-B", "--battery-critical", dest="bat_crit", default=15, type=int, metavar="PERCENT", action="store", help="Defines battery load critical threshold in percent (default: 15%)")
#-t / --time-warning
thres_opts.add_option("-t", "--time-warning", dest="time_warn", type=int, metavar="TIME", action="store", help="Defines battery time left warning threshold in minutes (default: empty). If defined you must also define time-critical")
#-T / --time-critical
thres_opts.add_option("-T", "--time-critical", dest="time_crit", type=int, metavar="TIME", action="store", help="Defines battery time left critical threshold in minutes (default: empty). If defined you must also define time-warning")
#-u / --consumption-warning
thres_opts.add_option("-u", "--consumption-warning", dest="consum_warn", type=int, metavar="WATTS", action="store", help="Defines power consumption warning threshold in watts (default: empty)")
#-U / --consumption-critical
thres_opts.add_option("-U", "--consumption-critical", dest="consum_crit", type=int, metavar="WATTS", action="store", help="Defines power consumption critical threshold in watts (default: empty)")
#-H / --host
gen_opts.add_option("-H", "--host", dest="host", type="string", action="store", default="127.0.0.1", help="host of appcupsd")
#-X / --line-level
gen_opts.add_option("-X", "--line-level", dest="line_level", type="int", action="store", default="110", help="Volts of power outlet to detect no power if less than the line level")
#parse arguments
(options, args) = parser.parse_args()
#set logger level
if options.debug:
logging.basicConfig(level=logging.DEBUG)
LOGGER.setLevel(logging.DEBUG)
else:
logging.basicConfig()
LOGGER.setLevel(logging.INFO)
#debug outputs
LOGGER.debug("OPTIONS: {0}".format(options))
#get information
get_apcaccess_data()
#check UPS
check_ups()
|
python
|
from tiltfile_runner import run_tiltfile_func
from unittest.mock import Mock
import unittest
import pytest
import yaml
class DockerLocalTest(unittest.TestCase):
def test_delegates_to_local_resource_for_build(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context")
local_resource.assert_any_call(
"my_image_build", "docker build -t my_image -f Dockerfile ././path/to/build/context")
def test_delegates_to_local_resource_for_run(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context")
local_resource.assert_called_with("my_image",
"docker run --rm my_image",
resource_deps=["my_image_build"])
def test_adds_optional_recource_deps_to_run(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context",
runtime_deps=["something", "else"])
local_resource.assert_called_with(
"my_image",
"docker run --rm my_image",
resource_deps=["something", "else", "my_image_build"])
def test_adds_optional_env_vars_to_run(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context",
env_vars={
"DOG": 1,
"CAT": "two"
})
local_resource.assert_called_with(
"my_image",
'docker run --rm -e DOG="1" -e CAT="two" my_image',
resource_deps=["my_image_build"])
def test_adds_optional_run_command_array(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context",
run_cmd=["sh", "echo", "hi"])
local_resource.assert_called_with(
"my_image",
'docker run --rm my_image sh echo hi',
resource_deps=["my_image_build"])
def test_overrides_dockerfile_for_build(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
dockerfile="another.Dockerfile",
context="././path/to/build/context")
local_resource.assert_any_call(
"my_image_build", "docker build -t my_image -f another.Dockerfile ././path/to/build/context")
class DockerRemoteTest(unittest.TestCase):
def test_delegates_to_docker_build_for_build(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="./path/to/build/context",
readiness_probe=None)
docker_build.assert_called_with("my-image", "./path/to/build/context", dockerfile="Dockerfile")
def test_overrides_dockerfile_for_build(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
dockerfile="another.Dockerfile",
build_context="./path/to/build/context",
readiness_probe=None)
docker_build.assert_called_with("my-image", "./path/to/build/context", dockerfile="another.Dockerfile")
def test_uses_repository_instead_if_provided(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
docker_repo="my.aws/repo",
build_context="././path/to/build/context",
readiness_probe=None)
docker_build.assert_called_with("my.aws/repo", "././path/to/build/context", dockerfile="Dockerfile")
def test_generates_k8_yaml_job_with_defaults_for_image(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context")
expected_spec = yaml.safe_load("""
apiVersion: batch/v1
kind: Job
metadata:
name: my-image
spec:
parallelism: 1
completions: 1
backoffLimit: 0
template:
metadata:
annotations:
sidecar.istio.io/inject: "false"
spec:
containers:
- name: main
image: my-image
readinessProbe:
exec:
command:
- 'false'
initialDelaySeconds: 120
periodSeconds: 120
resources:
requests:
cpu: 1
memory: 2056Mi
limits:
cpu: 1
memory: 2056Mi
restartPolicy: Never
""")
assert k8s_yaml.call_count == 1
print(k8s_yaml.call_args[0][0])
assert yaml.safe_load(k8s_yaml.call_args[0][0]) == expected_spec
def test_can_overwrite_resource_requirements(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
docker_repo="my.aws/repo",
build_context="././path/to/build/context",
cpu="2000m",
memory="4Gi",
readiness_probe=None)
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"resources"] == yaml.safe_load("""
requests:
cpu: 2000m
memory: 4Gi
limits:
cpu: 2000m
memory: 4Gi
""")
def test_includes_image_repo_if_provided(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
docker_repo="my.aws/repo",
build_context="././path/to/build/context",
readiness_probe=None)
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"image"] == "my.aws/repo"
def test_defines_k8_job_namespace_if_provided(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None)
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["metadata"]["namespace"] == "somewhere"
def test_creates_dependent_k8s_resource_for_yaml(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None,
runtime_deps=["a", "b"])
assert k8s_resource.call_count == 1
k8s_resource.assert_called_with("my-image", resource_deps=["a", "b"])
def test_passes_env_vars_to_container_spec(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None,
env_vars={
"DOG": 1,
"CAT": "two"
})
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"env"] == yaml.safe_load("""
- name: DOG
value: 1
- name: CAT
value: two
""")
def test_creates_specified_readiness_probe(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe={"httpGet": {
"path": "/health"
}})
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"readinessProbe"] == yaml.safe_load("""
httpGet:
path: /health
""")
def test_passes_command_array_to_container_spec(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None,
run_cmd=["bloop", "--something", "--another-thing"])
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0]["args"] == [
"bloop", "--something", "--another-thing"
]
def test_adds_custom_pod_annotations(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
annotations={
'custom': 'annotation'
})
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["metadata"]["annotations"] == {
"sidecar.istio.io/inject": "false",
'custom': 'annotation'
}
def test_errors_if_resource_name_contains_invalid_char(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
with pytest.raises(Exception):
run_tiltfile_func(
"docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my_image",
build_context="././path/to/build/context",
)
class DockerTaskTest(unittest.TestCase):
def test_delegates_to_local_resource_for_build(self):
local_resource = Mock()
k8s_yaml = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_task",
mocks={
'local_resource': local_resource,
'k8s_yaml': k8s_yaml
},
ref="my-image",
build_context="././path/to/build/context",
run_remote=False)
local_resource.assert_any_call(
"my-image_build", "docker build -t my-image -f Dockerfile ././path/to/build/context")
k8s_yaml.call_count == 0
def test_strips_out_non_local_args(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_task",
mocks={'local_resource': local_resource},
ref="my-image",
build_context="././path/to/build/context",
run_remote=False,
namespace="dave",
docker_repo="test",
readiness_probe="1234")
local_resource.assert_any_call(
"my-image_build", "docker build -t my-image -f Dockerfile ././path/to/build/context")
def test_runs_on_remote(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_task",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
run_remote=True,
build_context="././path/to/build/context",
readiness_probe=None)
assert k8s_yaml.call_count == 1
|
python
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# File : embedding.py
# Author : Jiayuan Mao
# Email : [email protected]
# Date : 10/03/2018
#
# This file is part of NSCL-PyTorch.
# Distributed under terms of the MIT license.
import torch
import torch.nn as nn
__all__ = ['LearnedPositionalEmbedding']
class LearnedPositionalEmbedding(nn.Embedding):
"""This module learns positional embeddings up to a fixed maximum size.
Padding symbols are ignored, but it is necessary to specify whether padding
is added on the left side (left_pad=True) or right side (left_pad=False).
Adapted from: https://github.com/pytorch/fairseq/blob/master/fairseq/modules/learned_positional_embedding.py.
"""
def __init__(self, num_embeddings, embedding_dim, padding_idx=0, left_pad=False):
super().__init__(num_embeddings, embedding_dim, padding_idx)
self.left_pad = left_pad
def forward(self, input, incremental_state=None):
"""Input is expected to be of size [bsz x seqlen]."""
if incremental_state is not None:
# positions is the same for every token when decoding a single step
positions = input.data.new(1, 1).fill_(self.padding_idx + input.size(1))
else:
positions = make_positions(input.data, self.padding_idx, self.left_pad)
return super().forward(positions)
def max_positions(self):
"""Maximum number of supported positions."""
return self.num_embeddings - self.padding_idx - 1
def make_positions(tensor, padding_idx, left_pad):
"""Replace non-padding symbols with their position numbers.
Position numbers begin at padding_idx+1.
Padding symbols are ignored, but it is necessary to specify whether padding
is added on the left side (left_pad=True) or right side (left_pad=False).
"""
max_pos = padding_idx + 1 + tensor.size(1)
if not hasattr(make_positions, 'range_buf'):
make_positions.range_buf = tensor.new()
make_positions.range_buf = make_positions.range_buf.type_as(tensor)
if make_positions.range_buf.numel() < max_pos:
torch.arange(padding_idx + 1, max_pos, out=make_positions.range_buf)
mask = tensor.ne(padding_idx)
positions = make_positions.range_buf[:tensor.size(1)].expand_as(tensor)
if left_pad:
positions = positions - mask.size(1) + mask.long().sum(dim=1).unsqueeze(1)
return tensor.clone().masked_scatter_(mask, positions[mask])
|
python
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.plant_heating_and_cooling_equipment import HeatPumpWaterToWaterEquationFitHeating
log = logging.getLogger(__name__)
class TestHeatPumpWaterToWaterEquationFitHeating(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_heatpumpwatertowaterequationfitheating(self):
pyidf.validation_level = ValidationLevel.error
obj = HeatPumpWaterToWaterEquationFitHeating()
# alpha
var_name = "Name"
obj.name = var_name
# node
var_source_side_inlet_node_name = "node|Source Side Inlet Node Name"
obj.source_side_inlet_node_name = var_source_side_inlet_node_name
# node
var_source_side_outlet_node_name = "node|Source Side Outlet Node Name"
obj.source_side_outlet_node_name = var_source_side_outlet_node_name
# node
var_load_side_inlet_node_name = "node|Load Side Inlet Node Name"
obj.load_side_inlet_node_name = var_load_side_inlet_node_name
# node
var_load_side_outlet_node_name = "node|Load Side Outlet Node Name"
obj.load_side_outlet_node_name = var_load_side_outlet_node_name
# real
var_rated_load_side_flow_rate = 0.0001
obj.rated_load_side_flow_rate = var_rated_load_side_flow_rate
# real
var_rated_source_side_flow_rate = 0.0001
obj.rated_source_side_flow_rate = var_rated_source_side_flow_rate
# real
var_rated_heating_capacity = 0.0001
obj.rated_heating_capacity = var_rated_heating_capacity
# real
var_rated_heating_power_consumption = 0.0001
obj.rated_heating_power_consumption = var_rated_heating_power_consumption
# real
var_heating_capacity_coefficient_1 = 10.1
obj.heating_capacity_coefficient_1 = var_heating_capacity_coefficient_1
# real
var_heating_capacity_coefficient_2 = 11.11
obj.heating_capacity_coefficient_2 = var_heating_capacity_coefficient_2
# real
var_heating_capacity_coefficient_3 = 12.12
obj.heating_capacity_coefficient_3 = var_heating_capacity_coefficient_3
# real
var_heating_capacity_coefficient_4 = 13.13
obj.heating_capacity_coefficient_4 = var_heating_capacity_coefficient_4
# real
var_heating_capacity_coefficient_5 = 14.14
obj.heating_capacity_coefficient_5 = var_heating_capacity_coefficient_5
# real
var_heating_compressor_power_coefficient_1 = 15.15
obj.heating_compressor_power_coefficient_1 = var_heating_compressor_power_coefficient_1
# real
var_heating_compressor_power_coefficient_2 = 16.16
obj.heating_compressor_power_coefficient_2 = var_heating_compressor_power_coefficient_2
# real
var_heating_compressor_power_coefficient_3 = 17.17
obj.heating_compressor_power_coefficient_3 = var_heating_compressor_power_coefficient_3
# real
var_heating_compressor_power_coefficient_4 = 18.18
obj.heating_compressor_power_coefficient_4 = var_heating_compressor_power_coefficient_4
# real
var_heating_compressor_power_coefficient_5 = 19.19
obj.heating_compressor_power_coefficient_5 = var_heating_compressor_power_coefficient_5
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].name, var_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].source_side_inlet_node_name, var_source_side_inlet_node_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].source_side_outlet_node_name, var_source_side_outlet_node_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].load_side_inlet_node_name, var_load_side_inlet_node_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].load_side_outlet_node_name, var_load_side_outlet_node_name)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_load_side_flow_rate, var_rated_load_side_flow_rate)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_source_side_flow_rate, var_rated_source_side_flow_rate)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_heating_capacity, var_rated_heating_capacity)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_heating_power_consumption, var_rated_heating_power_consumption)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_1, var_heating_capacity_coefficient_1)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_2, var_heating_capacity_coefficient_2)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_3, var_heating_capacity_coefficient_3)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_4, var_heating_capacity_coefficient_4)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_5, var_heating_capacity_coefficient_5)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_1, var_heating_compressor_power_coefficient_1)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_2, var_heating_compressor_power_coefficient_2)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_3, var_heating_compressor_power_coefficient_3)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_4, var_heating_compressor_power_coefficient_4)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_5, var_heating_compressor_power_coefficient_5)
|
python
|
#!/usr/bin/python
"""
Runs every day as crontab task to pull down previous day's log from
Google app engine, and uploads it to S3.
"""
import os
import sys
import shutil
import subprocess
import string
from pytz import timezone
from datetime import datetime, timedelta
settings = {
'appcfg' : '<path to gae sdk>/bin/appcfg.sh',
'email' : '<gae account>',
'pwd' : '<gae account password>',
'outdir' : '/tmp/sortbox',
'repo' : '[email protected]:mustpax/sortbox.git',
'bucket' : '<S3 bucket for logs>',
'access_key' : '',
'secret_key' : '',
}
outdir = settings['outdir']
sortboxdir = os.path.join(outdir, 'sortbox')
logdir = os.path.join(outdir, 'logs')
pacific_tz = timezone('US/Pacific')
def cleanup():
"""
Deletes tmp dir.
"""
if os.path.exists(outdir):
print "Deleted %s" % outdir
shutil.rmtree(outdir)
def clone_repo():
"""
Clones the remote sortbox repository.
"""
cleanup()
subprocess.call("git clone %s %s" % (settings['repo'], sortboxdir), shell=True)
def build_war():
def touch(fname, times=None):
"""
Equivalent to unix touch command
"""
with file(fname, 'a'):
os.utime(fname, times)
os.chdir(sortboxdir)
# Switch to prod branch
subprocess.call("git checkout prod", shell=True)
# Create secret.conf
secret = os.path.join(sortboxdir, 'conf', 'secret.conf')
touch(secret)
print "Make all"
# Build all
subprocess.call("make all", shell=True)
war_path = os.path.join(outdir, "sortbox.war")
print "Build war file"
# Build war file
subprocess.call("play war -o %s" % war_path, shell=True)
if not os.path.exists(war_path):
print "Failed to create war file"
exit(2)
def export_log():
"""
Exports logs from the last 2 days from GAE
"""
os.chdir(outdir)
if not os.path.exists(logdir):
os.mkdir(logdir)
target = os.path.join(logdir, "raw.txt")
# Export log for the last 2 day
subprocess.call("echo %s | %s --num_days=1 --email=%s --severity=1 request_logs sortbox.war %s" \
% (settings['pwd'], settings['appcfg'], settings['email'], target), shell=True)
logfile = os.path.join(logdir, 'raw.txt')
if not os.path.exists(logfile):
print "Failed to download log file"
exit(2)
print "Saved exported log as %s" % logfile
def format_date(date):
format = "%d/%b/%Y"
return date.strftime(format)
def preprocess_log():
os.chdir(logdir)
today = format_date(datetime.now(pacific_tz))
# Remove entries from the 1st day
subprocess.call("grep -va %s raw.txt > log.tmp.txt" % today, shell=True)
# Replace null byte delimiters with new line character
subprocess.call("tr '\\0' '\n' < log.tmp.txt > log.tmp2.txt", shell=True);
# Remove all lines that starts with ':'
subprocess.call("sed '/^:/d' log.tmp2.txt > log.txt", shell=True);
print "Saved preprocessed log as %s" % os.path.join(logdir, 'log.txt')
def upload_log():
"""
Uploads log file to S3.
"""
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from itertools import takewhile
yesterday = datetime.now(pacific_tz) - timedelta(1)
logfile = "log_%s.txt" % string.replace(format_date(yesterday), '/', '_')
conn = S3Connection(settings['access_key'], settings['secret_key'])
bucket = conn.create_bucket(settings['bucket'])
k = bucket.get_key(logfile)
if not k:
k = Key(bucket)
k.key = logfile
os.chdir(logdir)
k.set_contents_from_filename('log.txt')
bucket.set_acl('public-read', k)
print "Uploaded log file as %s to S3" % k.name
else:
print "Log file already uploaded."
def pull_log():
now = datetime.now()
print "Start log export: ", now
clone_repo()
build_war()
export_log()
preprocess_log()
upload_log()
cleanup()
def main():
import time
start = time.time()
pull_log()
duration = time.time() - start
print "Finished in %d second(s)." % duration
if __name__ == "__main__":
main()
|
python
|
#!/usr/bin/python
# Copyright (c) 2017, 2018 Michael De La Rue
# Copyright (c) 2017, 2018 Will Thames
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: rds_instance_facts
version_added: "2.6"
short_description: obtain facts about one or more RDS instances
description:
- obtain facts about one or more RDS instances
options:
db_instance_identifier:
description:
- The RDS instance's unique identifier.
required: false
aliases:
- id
filters:
description:
- A filter that specifies one or more DB instances to describe.
See U(https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBInstances.html)
requirements:
- "python >= 2.7"
- "boto3"
author:
- "Will Thames (@willthames)"
- "Michael De La Rue (@mikedlr)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Get facts about an instance
- rds_instance_facts:
db_instance_identifier: new-database
register: new_database_facts
# Get all RDS instances
- rds_instance_facts:
'''
RETURN = '''
instances:
description: List of RDS instances
returned: always
type: complex
contains:
allocated_storage:
description: Gigabytes of storage allocated to the database
returned: always
type: int
sample: 10
auto_minor_version_upgrade:
description: Whether minor version upgrades happen automatically
returned: always
type: bool
sample: true
availability_zone:
description: Availability Zone in which the database resides
returned: always
type: str
sample: us-west-2b
backup_retention_period:
description: Days for which backups are retained
returned: always
type: int
sample: 7
ca_certificate_identifier:
description: ID for the CA certificate
returned: always
type: str
sample: rds-ca-2015
copy_tags_to_snapshot:
description: Whether DB tags should be copied to the snapshot
returned: always
type: bool
sample: false
db_instance_arn:
description: ARN of the database instance
returned: always
type: str
sample: arn:aws:rds:us-west-2:111111111111:db:helloworld-rds
db_instance_class:
description: Instance class of the database instance
returned: always
type: str
sample: db.t2.small
db_instance_identifier:
description: Database instance identifier
returned: always
type: str
sample: helloworld-rds
db_instance_port:
description: Port used by the database instance
returned: always
type: int
sample: 0
db_instance_status:
description: Status of the database instance
returned: always
type: str
sample: available
db_name:
description: Name of the database
returned: always
type: str
sample: management
db_parameter_groups:
description: List of database parameter groups
returned: always
type: complex
contains:
db_parameter_group_name:
description: Name of the database parameter group
returned: always
type: str
sample: psql-pg-helloworld
parameter_apply_status:
description: Whether the parameter group has been applied
returned: always
type: str
sample: in-sync
db_security_groups:
description: List of security groups used by the database instance
returned: always
type: list
sample: []
db_subnet_group:
description: list of subnet groups
returned: always
type: complex
contains:
db_subnet_group_description:
description: Description of the DB subnet group
returned: always
type: str
sample: My database subnet group
db_subnet_group_name:
description: Name of the database subnet group
returned: always
type: str
sample: my-subnet-group
subnet_group_status:
description: Subnet group status
returned: always
type: str
sample: Complete
subnets:
description: List of subnets in the subnet group
returned: always
type: complex
contains:
subnet_availability_zone:
description: Availability zone of the subnet
returned: always
type: complex
contains:
name:
description: Name of the availability zone
returned: always
type: str
sample: us-west-2c
subnet_identifier:
description: Subnet ID
returned: always
type: str
sample: subnet-abcd1234
subnet_status:
description: Subnet status
returned: always
type: str
sample: Active
vpc_id:
description: VPC id of the subnet group
returned: always
type: str
sample: vpc-abcd1234
dbi_resource_id:
description: AWS Region-unique, immutable identifier for the DB instance
returned: always
type: str
sample: db-AAAAAAAAAAAAAAAAAAAAAAAAAA
domain_memberships:
description: List of domain memberships
returned: always
type: list
sample: []
endpoint:
description: Database endpoint
returned: always
type: complex
contains:
address:
description: Database endpoint address
returned: always
type: str
sample: helloworld-rds.ctrqpe3so1sf.us-west-2.rds.amazonaws.com
hosted_zone_id:
description: Route53 hosted zone ID
returned: always
type: str
sample: Z1PABCD0000000
port:
description: Database endpoint port
returned: always
type: int
sample: 5432
engine:
description: Database engine
returned: always
type: str
sample: postgres
engine_version:
description: Database engine version
returned: always
type: str
sample: 9.5.10
iam_database_authentication_enabled:
description: Whether database authentication through IAM is enabled
returned: always
type: bool
sample: false
instance_create_time:
description: Date and time the instance was created
returned: always
type: str
sample: '2017-10-10T04:00:07.434000+00:00'
kms_key_id:
description: KMS Key ID
returned: always
type: str
sample: arn:aws:kms:us-west-2:111111111111:key/abcd1234-0000-abcd-1111-0123456789ab
latest_restorable_time:
description: Latest time to which a database can be restored with point-in-time restore
returned: always
type: str
sample: '2018-05-17T00:03:56+00:00'
license_model:
description: License model
returned: always
type: str
sample: postgresql-license
master_username:
description: Database master username
returned: always
type: str
sample: dbadmin
monitoring_interval:
description: Interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance
returned: always
type: int
sample: 0
multi_az:
description: Whether Multi-AZ is on
returned: always
type: bool
sample: false
option_group_memberships:
description: List of option groups
returned: always
type: complex
contains:
option_group_name:
description: Option group name
returned: always
type: str
sample: default:postgres-9-5
status:
description: Status of option group
returned: always
type: str
sample: in-sync
pending_modified_values:
description: Modified values pending application
returned: always
type: complex
contains: {}
performance_insights_enabled:
description: Whether performance insights are enabled
returned: always
type: bool
sample: false
preferred_backup_window:
description: Preferred backup window
returned: always
type: str
sample: 04:00-05:00
preferred_maintenance_window:
description: Preferred maintenance window
returned: always
type: str
sample: mon:05:00-mon:05:30
publicly_accessible:
description: Whether the DB is publicly accessible
returned: always
type: bool
sample: false
read_replica_db_instance_identifiers:
description: List of database instance read replicas
returned: always
type: list
sample: []
storage_encrypted:
description: Whether the storage is encrypted
returned: always
type: bool
sample: true
storage_type:
description: Storage type of the Database instance
returned: always
type: str
sample: gp2
tags:
description: Tags used by the database instance
returned: always
type: complex
contains: {}
vpc_security_groups:
description: List of VPC security groups
returned: always
type: complex
contains:
status:
description: Status of the VPC security group
returned: always
type: str
sample: active
vpc_security_group_id:
description: VPC Security Group ID
returned: always
type: str
sample: sg-abcd1234
'''
from ansible.module_utils.aws.core import AnsibleAWSModule, is_boto3_error_code
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, boto3_tag_list_to_ansible_dict, AWSRetry, camel_dict_to_snake_dict
try:
import botocore
except ImportError:
pass # handled by AnsibleAWSModule
def instance_facts(module, conn):
instance_name = module.params.get('db_instance_identifier')
filters = module.params.get('filters')
params = dict()
if instance_name:
params['DBInstanceIdentifier'] = instance_name
if filters:
params['Filters'] = ansible_dict_to_boto3_filter_list(filters)
paginator = conn.get_paginator('describe_db_instances')
try:
results = paginator.paginate(**params).build_full_result()['DBInstances']
except is_boto3_error_code('DBInstanceNotFound'):
results = []
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, "Couldn't get instance information")
for instance in results:
try:
instance['Tags'] = boto3_tag_list_to_ansible_dict(conn.list_tags_for_resource(ResourceName=instance['DBInstanceArn'],
aws_retry=True)['TagList'])
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Couldn't get tags for instance %s" % instance['DBInstanceIdentifier'])
return dict(changed=False, instances=[camel_dict_to_snake_dict(instance, ignore_list=['Tags']) for instance in results])
def main():
argument_spec = dict(
db_instance_identifier=dict(aliases=['id']),
filters=dict(type='dict')
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
conn = module.client('rds', retry_decorator=AWSRetry.jittered_backoff(retries=10))
module.exit_json(**instance_facts(module, conn))
if __name__ == '__main__':
main()
|
python
|
# window.py
#
# Copyright 2020 Herpiko Dwi Aguno
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import time
from gi.repository import Gtk
from gi.repository import Gio
from gi.repository import GLib
import threading
@Gtk.Template(resource_path='/org/blankon/blankonWelcome/window.ui')
class BlankonWelcomeWindow(Gtk.ApplicationWindow):
__gtype_name__ = 'BlankonWelcomeWindow'
SkipBackButton = Gtk.Template.Child()
NextButton = Gtk.Template.Child()
Stacks = Gtk.Template.Child()
SpinnerBox = Gtk.Template.Child()
WelcomeBox = Gtk.Template.Child()
MainBox = Gtk.Template.Child()
SeeingBox = Gtk.Template.Child()
HearingBox = Gtk.Template.Child()
TypingBox = Gtk.Template.Child()
PointingBox = Gtk.Template.Child()
SeeingButton = Gtk.Template.Child()
HearingButton = Gtk.Template.Child()
TypingButton = Gtk.Template.Child()
PointingButton = Gtk.Template.Child()
SeeingMagnifierSwitch = Gtk.Template.Child()
SeeingLargeTextSwitch = Gtk.Template.Child()
SeeingHighContrastSwitch = Gtk.Template.Child()
currentView = "welcome"
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.Stacks.set_visible_child(self.WelcomeBox)
self.NextButton.connect("clicked", self.a11y)
self.SkipBackButton.connect("clicked", self.do_skip_back)
self.SeeingButton.connect("clicked", self.show_seeing_box)
self.HearingButton.connect("clicked", self.show_hearing_box)
self.TypingButton.connect("clicked", self.show_typing_box)
self.PointingButton.connect("clicked", self.show_pointing_box)
self.SeeingMagnifierSwitch.connect("state-set", self.toggle_magnifier)
self.SeeingLargeTextSwitch.connect("state-set", self.toggle_large_text)
self.SeeingHighContrastSwitch.connect("state-set", self.toggle_high_contrast)
# Set default values
setting = Gio.Settings.new("org.gnome.desktop.interface")
current_value = setting.get_value("gtk-theme")
print(current_value)
if ("Contrast" in current_value.get_string()):
self.SeeingHighContrastSwitch.set_active(True)
def do_skip_back(self, button):
if self.currentView == "welcome" or self.currentView == "a11y":
self.NextButton.hide()
self.SkipBackButton.hide()
self.Stacks.set_visible_child(self.SpinnerBox)
# Use threading to avoid blocking UI
thread = threading.Thread(target=self.send_analytic)
thread.daemon = True
thread.start()
else:
self.Stacks.set_visible_child(self.MainBox)
self.SkipBackButton.set_label("Finish")
self.currentView = "a11y"
def a11y(self, button):
self.NextButton.hide()
self.Stacks.set_visible_child(self.MainBox)
self.SkipBackButton.set_label("Finish")
self.currentView = "a11y"
def show_seeing_box(self, button):
self.SkipBackButton.show()
self.currentView = "seeing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.SeeingBox)
def show_hearing_box(self, button):
self.SkipBackButton.show()
self.currentView = "hearing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.HearingBox)
def show_typing_box(self, button):
self.SkipBackButton.show()
self.currentView = "typing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.TypingBox)
def show_pointing_box(self, button):
self.SkipBackButton.show()
self.currentView = "pointing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.PointingBox)
def toggle_magnifier(self, switch, state):
setting = Gio.Settings.new("org.gnome.desktop.a11y.applications")
bool_value = GLib.Variant("b", state)
setting.set_value("screen-magnifier-enabled", bool_value)
def toggle_high_contrast(self, switch, state):
setting = Gio.Settings.new("org.gnome.desktop.interface")
default_value = setting.get_default_value("gtk-theme")
current_value = setting.get_value("gtk-theme")
high_contrast_value = GLib.Variant("s", "HighContrast")
print(state)
print(default_value)
print(current_value)
if (state):
print(high_contrast_value)
setting.set_value("gtk-theme", high_contrast_value)
else:
setting.set_value("gtk-theme", default_value)
def toggle_large_text(self, switch, state):
setting = Gio.Settings.new("org.gnome.desktop.interface")
scale_value = GLib.Variant("d", 1.0)
if (state):
scale_value = GLib.Variant("d", 1.5)
setting.set_value("text-scaling-factor", scale_value)
def send_analytic(self):
print("Send analytic data...")
time.sleep(1)
print("Data sent")
self.close()
|
python
|
'''
Module for performing Stable Matching
it solves an instance of the stable marriage problem.
This is used as a utility for Text-Media Matching
'''
class StableMatcher:
''' Class to implement Stable matching
This Class implements the stable matching
using the gale shapley algorithm.
'''
def __init__(
self,
media_preference_for_sentence,
sentence_preference_for_media,
set_size):
self.set_size = set_size
self.media_preference_for_sentence = media_preference_for_sentence
self.sentence_preference_for_media = sentence_preference_for_media
def get_matching(self):
''' returns the matching as a list of 2-tuples'''
return self.__gale_shapley_matching()
def __media_rank_in_sentence_preference(self, sentence_index, media_index):
return self.media_preference_for_sentence[sentence_index].index(
media_index)
def __sentence_has_better_preference(
self, sentence_index, unmatched_index):
return self.__media_rank_in_sentence_preference(
sentence_index, unmatched_index) < \
self.__media_rank_in_sentence_preference(
sentence_index,
self.media_matched_for_sentence[sentence_index])
def __gale_shapley_matching(self):
'''
Finds the stable matching between the text and media
Given two (n,n) matrices of preferences for the set of Sentences, Media
finds the stable matching by running the gale-shapley
matching algorithm
Returns : a list of tuples where each tuple (x,y) means
x = index of sentence
y = index of media
Thus, it returns the indices matched as a list of tuples
'''
# Make the matching optimal for the Media
# -1 denotes it is currently unmatched
self.sentence_matched_for_media = [-1] * self.set_size
self.media_matched_for_sentence = [-1] * self.set_size
self.count_of_unmatched_media = self.set_size
while self.count_of_unmatched_media > 0:
unmatched_media_index = -1 # no index found currently
for i in range(self.set_size):
if self.sentence_matched_for_media[i] == -1:
unmatched_media_index = i
break
for i in self.sentence_preference_for_media[unmatched_media_index]:
# the sentence is unmatched
if self.media_matched_for_sentence[i] == -1:
# we can match the sentence directly
self.media_matched_for_sentence[i] = unmatched_media_index
self.sentence_matched_for_media[unmatched_media_index] = i
self.count_of_unmatched_media -= 1
break
if self.__sentence_has_better_preference(
i,
unmatched_media_index):
# i prefers the current media better
# unmatch media currently matched for sentence i
self.sentence_matched_for_media[
self.media_matched_for_sentence[i]] = -1
self.sentence_matched_for_media[unmatched_media_index] = i
self.media_matched_for_sentence[i] = unmatched_media_index
break
matchings = [(self.sentence_matched_for_media[i], i)
for i in range(self.set_size)]
return matchings
|
python
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
csv_files = os.listdir(os.getcwd())
csv_files = [f for f in csv_files if "Line" in f and ".csv" in f]
# Function to determine significance
def isSignificant(xval,yval, xthr = 1, ythr = 2):
if abs(xval) >= xthr and abs(yval) >= ythr:
return True
else:
return False
# Read Entrez -> Name map
entrezToName = pd.read_csv("EntrezToNameMap.csv", header=0)
for csv_file in csv_files:
print("Processing file {}".format(csv_file))
df = pd.read_csv(csv_file, header=0)
df = df.rename(columns={"Unnamed: 0":"gename"})
x = df['log2FoldChange'].values
y = df['padj'].values + 1e-5
y = -np.log10(y)
significant_idx = [i for i in range(len(x)) if isSignificant(x[i],y[i])]
nonsignificant_idx = [i for i in range(len(x)) if not isSignificant(x[i],y[i])]
# Plot Volcano Plot
plt.figure(figsize=(8,8))
plt.scatter(x[significant_idx], y[significant_idx], c='red', alpha=0.35, label='Significant')
plt.scatter(x[nonsignificant_idx], y[nonsignificant_idx], c='blue', alpha=0.35, label='Nonsignificant')
plt.vlines(-1, 0, 5, linestyles='dashed')
plt.vlines(1, 0, 5, linestyles='dashed')
plt.hlines(2, min(x), max(x), linestyles='dashed')
plt.xlabel('Log2 Fold Change')
plt.ylabel('-log10 (adjusted p-value)')
plt.legend()
plt.savefig(csv_file.replace(".csv","_volcanoPlot.pdf"))
# Save names of significant differentially expressed genes
tmp_df = df.iloc[significant_idx,:].reset_index(drop=True)
final_df = pd.merge(entrezToName, tmp_df, on="gename")
final_df['keggGeneName'] = ["cge:" + str(id) for id in list(final_df['geneid'])] # Required for pathway analysis with ROntoTools
final_df.to_csv(csv_file.replace(".csv","_SignificantGenes.csv"), index=False)
|
python
|
x, y = map(int, input().split(" "))
if x == 0 and y == 0:
print("origem")
elif x > 0 and y > 0:
print("1 quadrante")
elif x < 0 and y > 0:
print("2 quadrante")
elif x < 0 and y < 0:
print("3 quadrante")
elif x > 0 and y < 0:
print("4 quadrante")
elif x == 0 and y != 0:
print("Eixo y")
elif x != 0 and y == 0:
print("Eixo x")
else:
print("ta errado")
|
python
|
from django.conf.urls import url
from web.views import get_index, fetch
urlpatterns = [
url(r'^$', get_index),
url(r'^fetch/$', fetch),
]
|
python
|
#!/usr/bin/env python
def part1(numbers, cards):
for number in numbers:
for card in cards:
mark(card, number)
if has_won(card):
return score(number, card)
def part2(numbers, cards):
for number in numbers:
iter_cards = cards.copy()
for card in iter_cards:
mark(card, number)
if has_won(card) and len(cards) > 1:
cards.remove(card)
elif has_won(card) and len(cards) == 1:
return score(number, card)
def score(number, card):
result = 0
for line in card:
for cell in line:
if cell != 'X':
result += cell
return result * number
def mark(card, value):
for y, line in enumerate(card):
for x, number in enumerate(line):
if number == value:
card[y][x] = 'X'
def has_won(card):
for line in card:
if all([cell == 'X' for cell in line]):
return True
for col in range(0, len(card[0])):
if all([line[col] == 'X' for line in card]):
return True
return False
def parse():
with open("../input/input04.txt") as f:
numbers = [int(word) for word in f.readline().split(',')]
cards = []
for card in f.read().split("\n\n"):
new_card = [line.split() for line in card.strip().split('\n')]
for y, line in enumerate(new_card):
for x, cell in enumerate(line):
new_card[y][x] = int(cell)
cards.append(new_card)
return (numbers, cards)
if __name__ == '__main__':
(numbers, cards) = parse()
print("part1 =", part1(numbers, cards))
print("part2 =", part2(numbers, cards))
|
python
|
import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from consts.district_type import DistrictType
from consts.event_type import EventType
from datetime import datetime
from database.district_query import DistrictsInYearQuery
from database.event_query import DistrictEventsQuery
from google.appengine.ext import ndb
from database.team_query import DistrictTeamsQuery
from helpers.district_helper import DistrictHelper
from helpers.event_helper import EventHelper
from helpers.model_to_dict import ModelToDict
from models import team
from models.district import District
from models.district_team import DistrictTeam
from models.event import Event
from models.event_team import EventTeam
from models.team import Team
class ApiDistrictControllerBase(ApiBaseController):
def _set_district(self, district, year):
self.district_abbrev = district
self.year = year
@property
def _validators(self):
return [("district_id_validator", "{}{}".format(self.year, self.district_abbrev))]
class ApiDistrictListController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_list_controller_{}" # year
CACHE_VERSION = 3
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictListController, self).__init__(*args, **kw)
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.year)
@property
def _validators(self):
'''
No validators for this endpoint
'''
return []
def _track_call(self, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/list', year)
def _render(self, year=None):
all_districts = DistrictsInYearQuery(self.year).fetch()
districts = list()
for district in all_districts:
dictionary = dict()
dictionary["key"] = district.abbreviation
dictionary["name"] = district.display_name
districts.append(dictionary)
return json.dumps(districts, ensure_ascii=True)
class ApiDistrictEventsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_events_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictEventsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/events', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev, self.year)
events = DistrictEventsQuery('{}{}'.format(self.year, self.district_abbrev)).fetch()
events = [ModelToDict.eventConverter(event) for event in events]
return json.dumps(events, ensure_ascii=True)
class ApiDistrictRankingsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_rankings_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 61
def __init__(self, *args, **kw):
super(ApiDistrictRankingsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/rankings', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev, self.year)
if self.year < 2009:
return json.dumps([], ensure_ascii=True)
events_future = DistrictEventsQuery(District.renderKeyName(self.year, district_abbrev)).fetch_async()
district_teams_future = DistrictTeamsQuery("{}{}".format(year, district_abbrev)).fetch_async()
events = events_future.get_result()
if not events:
return json.dumps([], ensure_ascii=True)
EventHelper.sort_events(events)
team_totals = DistrictHelper.calculate_rankings(events, district_teams_future.get_result(), self.year)
rankings = []
current_rank = 1
for key, points in team_totals:
point_detail = {}
point_detail["rank"] = current_rank
point_detail["team_key"] = key
point_detail["event_points"] = {}
for event in points["event_points"]:
event_key = event[0].key_name
point_detail["event_points"][event_key] = event[1]
event_details = Event.get_by_id(event_key)
point_detail["event_points"][event[0].key_name]['district_cmp'] = True if event_details.event_type_enum == EventType.DISTRICT_CMP else False
if "rookie_bonus" in points:
point_detail["rookie_bonus"] = points["rookie_bonus"]
else:
point_detail["rookie_bonus"] = 0
point_detail["point_total"] = points["point_total"]
rankings.append(point_detail)
current_rank += 1
return json.dumps(rankings)
class ApiDistrictTeamsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_teams_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictTeamsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/teams', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev, self.year)
district_teams = DistrictTeamsQuery('{}{}'.format(self.year, self.district_abbrev)).fetch()
district_teams_dict = [ModelToDict.teamConverter(team) for team in district_teams]
return json.dumps(district_teams_dict, ensure_ascii=True)
|
python
|
# coding=utf-8
from sys import exit
from pytun import *
from scapy.all import *
from MANGLE import *
from FenrirFangs import *
from Autoconf import *
import socket
import select
import time
from struct import *
from binascii import hexlify,unhexlify
class FENRIR:
def __init__(self):
if os.geteuid() != 0:
exit("You need root privileges to play with sockets !")
self.isRunning = False
self.tap = None
self.s = None
self.MANGLE = None
self.hostip = '10.0.0.5'
self.hostmac = '\x5c\x26\x0a\x13\x77\x8a'
#self.hostmac = '\x00\x1d\xe6\xd8\x6f\x02'
self.hostmacStr = '5c:26:0a:13:77:8a'
#self.hostmacStr = "00:1d:e6:d8:6f:02"
self.verbosity = 3
self.scksnd1 = None
self.scksnd2 = None
self.Autoconf = Autoconf()
self.FenrirFangs = FenrirFangs(self.verbosity) #FenrirFangs instance
self.pktsCount = 0
self.LhostIface = 'em1'
self.switchIface = 'eth0'
def createTap(self):
self.tap = TunTapDevice(flags=IFF_TAP|IFF_NO_PI, name='FENRIR')
self.tap.addr = "10.0.0.42"
self.tap.netmask = '255.0.0.0'
self.tap.mtu = 1500
self.tap.hwaddr = '\x00\x11\x22\x33\x44\x55'
self.hwaddrStr = "00:11:22:33:44:55"
self.tap.persist(True)
self.tap.up()
def downTap(self):
if self.tap != None:
self.tap.down()
def bindAllIface(self):
self.s = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.ntohs(0x0003))
def setAttribute(self, attributeName, attributeValue):
if attributeName == "host_ip":
self.hostip = attributeValue
elif attributeName == "host_mac":
self.hostmac = attributeValue
tempStr = hexlify(attributeValue).decode('ascii')
self.hostmacStr = tempStr[:2] + ":" + tempStr[2:4] + ":" + tempStr[4:6] + ":" + tempStr[6:8] + ":" + tempStr[8:10] + ":" + tempStr[-2:]
elif attributeName == "verbosity":
if attributeValue >= 0 and attributeValue <= 3:
self.verbosity = attributeValue
self.FenrirFangs.changeVerbosity(self.verbosity)
else:
return False
elif attributeName == "netIface":
self.switchIface = str(attributeValue)
self.Autoconf.sockNetwork = self.switchIface
elif attributeName == "hostIface":
self.LhostIface = str(attributeValue)
self.Autoconf.ifaceHost = self.LhostIface
else:
return False
def chooseIface(self,pkt) :
if pkt[Ether].dst == self.hwaddrStr :
return 'FENRIR'
elif pkt[Ether].dst == self.hostmacStr or ((pkt[Ether].dst == 'ff:ff:ff:ff:ff:ff' or pkt[Ether].dst == '01:80:c2:00:00:03') and pkt[Ether].src != self.hostmacStr) :
#elif pkt[Ether].dst == 'f8:ca:b8:31:c0:2c' or ((pkt[Ether].dst == 'ff:ff:ff:ff:ff:ff' or pkt[Ether].dst == '01:80:c2:00:00:03') and pkt[Ether].src != 'f8:ca:b8:31:c0:2c') :
return self.LhostIface
else :
return self.switchIface
def sendeth2(self, raw, interface):
self.scksnd1 = socket.socket(socket.AF_PACKET, socket.SOCK_RAW)
self.scksnd2 = socket.socket(socket.AF_PACKET, socket.SOCK_RAW)
self.scksnd1.bind((self.LhostIface, 0))
self.scksnd2.bind((self.switchIface, 0))
if interface == self.LhostIface:
# This is a dirty hotfix for the fragmentation problem; will be fixed later
try:
self.scksnd1.send(raw)
except:
pass
else :
try:
self.scksnd2.send(raw)
except:
pass
return
def initAutoconf(self):
self.hostip, self.hostmacStr = self.Autoconf.startAutoconf()
def initMANGLE(self, stop_event):
self.bindAllIface()
inputs = [self.s, self.tap]
last_mangled_request = []
mycount = 1 ## DECOMISSIONNED
self.MANGLE = MANGLE(self.hostip, self.tap.addr, self.hostmacStr, self.hwaddrStr, self.verbosity) # MANGLE instance init # ip host, ip rogue, mac host, mac rogue
while(not stop_event.is_set()):
try:
inputready,outputready,exceptready = select.select(inputs, [], [])
except select.error, e:
break
except socket.error, e:
break
for socketReady in inputready :
roundstart_time = time.time()
### FROM NETWORK ###
if socketReady == self.s :
packet = self.s.recvfrom(1600)
raw_pkt = packet[0]
if raw_pkt not in last_mangled_request: # pour éviter le sniff de paquets déjà traités (to avoid sniffing packets that have already been processed)
self.pktsCount += 1
pkt = Ether(packet[0])
if self.FenrirFangs.checkRules(pkt) == True:
if 'IP' in pkt and pkt[IP].dst != '224.0.0.252' and pkt[IP].dst != '10.0.0.255':
self.MANGLE.pktRewriter(pkt, pkt[IP].src, self.MANGLE.rogue, pkt[Ether].src, self.MANGLE.mrogue)
last_mangled_request.append(str(pkt))
#print("PKT in rules")
self.tap.write(str(pkt))
break
elif 'ARP' in pkt and (pkt[Ether].src == self.tap.hwaddr or pkt[ARP].pdst == self.hostip or pkt[ARP].psrc == self.hostip) :
epkt = pkt
elif 'IP' in pkt and (pkt[Ether].src == self.tap.hwaddr or pkt[IP].dst == self.hostip or pkt[IP].src == self.hostip or pkt[IP].dst == '224.0.0.252') :
epkt = pkt
elif 'EAPOL' in pkt :
epkt = pkt
elif 'BOOTP' in pkt :
epkt = pkt
else:
break
##### NBT-NS
if not mycount and 'IP' in epkt and (epkt[IP].dst == '10.0.0.255' and epkt[IP].dport == 137) :
print "---------- UDP Packet NBT-NS"
last_mangled_request.append(str(epkt))
tap.write(str(epkt))
##### LLMNR
elif not mycount and 'IP' in epkt and (epkt[IP].dst == '224.0.0.252' and epkt[IP].dport == 5355) :
print "---------- UDP Packet LLMNR"
last_mangled_request.append(str(epkt))
tap.write(str(epkt))
##### fin LLMNR / NBNS
elif not mycount and 'IP' in epkt and epkt[IP].dport == 445 :
print "IN MY FUCKIN IF-2"
MANGLE.pktRewriter(epkt, epkt[IP].src, MANGLE.rogue, epkt[Ether].src, MANGLE.mrogue)
last_mangled_request.append(str(epkt))
tap.write(str(epkt))
else :
mangled_request = self.MANGLE.Fenrir_Address_Translation(epkt)
ifaceToBeUsed = self.chooseIface(mangled_request)
if ifaceToBeUsed == 'FENRIR' :
self.tap.write(str(mangled_request))
else :
#mangled_request.show2()
last_mangled_request.append(str(mangled_request))
self.sendeth2(str(mangled_request), ifaceToBeUsed)
else :
last_mangled_request.remove(raw_pkt)
### FROM FENRIR ###
elif socketReady == self.tap :
self.pktsCount += 1
buf = self.tap.read(self.tap.mtu) # test paquet depuis Rogue
epkt = Ether(buf) # idem que au dessus
if epkt not in last_mangled_request:
mangled_request = self.MANGLE.Fenrir_Address_Translation(epkt)
ifaceToBeUsed = self.chooseIface(mangled_request)
########### debut LLMNR
#print str(mangled_request.summary()) + " ----------- IN tap socket loop (after MANGLE)"
if 'LLMNRQuery' in mangled_request :
print("IN")
mangled_request[LLMNRQuery].an.rdata = '10.0.0.5'
del mangled_request[IP].chksum
if 'UDP' in mangled_request:
del mangled_request[UDP].chksum
mangled_request = mangled_request.__class__(str(mangled_request))
#ls(mangled_request)
########### fin LLMNR
#print(ifaceToBeUsed)
if ifaceToBeUsed == 'FENRIR':
self.tap.write(str(mangled_request))
last_mangled_request.append(mangled_request)
else :
#mangled_request.show2()
###
if 'IP' in mangled_request and 1 == 2:
print("before frag")
frags=fragment(mangled_request, fragsize=500)
print("after frags")
for frag in frags:
frag = frag.__class__(str(frag))
last_mangled_request.append(str(frag))
self.sendeth2(str(frag), ifaceToBeUsed)
#send(frag, iface=ifaceToBeUsed)
else:
if 'IP' in mangled_request:
del mangled_request[IP].len
#mangled_request = mangled_request.__class__(str(mangled_request))
#if 'TCP' in mangled_request:
# new_mangled_request = self.MANGLE.changeSessID(mangled_request)
# mangled_request = new_mangled_request
last_mangled_request.append(str(mangled_request))
#if 'TCP' in mangled_request:
# #print("[[[")
# print(str(mangled_request[TCP].seq) + " : " + str(mangled_request[IP].len))
# print("]]]")
self.sendeth2(str(mangled_request), ifaceToBeUsed)
###
# last_mangled_request.append(str(mangled_request))
# self.sendeth2(str(mangled_request), ifaceToBeUsed)
else:
self.tap.write(str(epkt))
last_mangled_request.remove(epkt)
else :
exit('WTH')
|
python
|
from click.testing import CliRunner
import unittest
from mock import patch, Mock, PropertyMock
from floyd.cli.version import upgrade
class TestFloydVersion(unittest.TestCase):
"""
Tests cli utils helper functions
"""
def setUp(self):
self.runner = CliRunner()
@patch('floyd.cli.version.pip_upgrade')
@patch('floyd.cli.version.conda_upgrade')
@patch('floyd.cli.utils.sys')
def test_floyd_upgrade_with_standard_python(self, mock_sys, conda_upgrade, pip_upgrade):
mock_sys.version = '2.7.13 (default, Jan 19 2017, 14:48:08) \n[GCC 6.3.0 20170118]'
self.runner.invoke(upgrade)
conda_upgrade.assert_not_called()
pip_upgrade.assert_called_once()
@patch('floyd.cli.version.pip_upgrade')
@patch('floyd.cli.version.conda_upgrade')
@patch('floyd.cli.utils.sys')
def test_floyd_upgrade_with_anaconda_python(self, mock_sys, conda_upgrade, pip_upgrade):
mock_sys.version = '3.6.3 |Anaconda, Inc.| (default, Oct 13 2017, 12:02:49) \n[GCC 7.2.0]'
self.runner.invoke(upgrade)
pip_upgrade.assert_not_called()
conda_upgrade.assert_called_once()
|
python
|
# -*- coding: utf-8 -*-
"""
EnigmaLight Plugin by Speedy1985, 2014
https://github.com/speedy1985
Parts of the code is from DonDavici (c) 2012 and other plugins:
all credits to the coders :-)
EnigmaLight is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
EnigmaLight is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
"""
from enigma import eListboxPythonMultiContent, gFont, RT_HALIGN_LEFT, RT_VALIGN_CENTER
from threading import Thread, Timer
from Screens.Standby import TryQuitMainloop
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.config import config, getConfigListEntry
from Components.Label import Label
from Components.Pixmap import Pixmap
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.HelpMenu import HelpableScreen
from EL_Check import EL_Screen_Check
from __common__ import EnigmaLight_log as log, showMessage, validIP, testDaemonConnectivity, setSymbolic
from __init__ import getCrashFilePath, _ # _ is translation
from EL_PathSelector import EL_Screen_PathSelector
from threading import currentThread
from EL_ThreadHelper import callOnMainThread
import os
#===============================================================================
#
#===============================================================================
class EL_Screen_Settings(Screen, ConfigListScreen, HelpableScreen):
_hasChanged = False
_session = None
skins = None
def __init__(self, session):
log("",self,"Settings Opened succesfull..")
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.cfglist = []
ConfigListScreen.__init__(self, self.cfglist, session, on_change = self._changed)
self._session = session
self._hasChanged = False
self._hasNetworkChanged = False
self._binTypeChanged = False
self._restartBinary = False
self.controller = None
self.selected = None
self["txt_green"] = Label()
self["btn_green"] = Pixmap()
self["statusbar"] = Pixmap()
self["txt_statusbar"] = Label()
self["txt_statusbar_info"] = Label()
self["help"] = StaticText()
self["setupActions"] = ActionMap(["SetupActions", "ColorActions", "EL_Settings"],
{
"green": self.keySave,
"red": self.keyCancel,
"cancel": self.keyCancel,
"ok": self.ok,
"left": self.keyLeft,
"right": self.keyRight,
"bouquet_up": self.keyBouquetUp,
"bouquet_down": self.keyBouquetDown,
}, -2)
self["txt_green"].setText(_("Save"))
self.arm_box = False
arch = os.popen("uname -m").read()
if 'armv7l' in arch:
self.arm_box = True
self.createSetup()
log("",self,"Finisch layout...")
self["config"].onSelectionChanged.append(self.updateHelp)
self.onLayoutFinish.append(self.finishLayout)
#===========================================================================
#
#===========================================================================
def finishLayout(self):
log("",self,"Layout finisched..")
self.setTitle(_("Settings"))
if not config.plugins.enigmalight.showstatusbar.getValue():
self["statusbar"].hide()
self["txt_statusbar"].hide()
self["txt_statusbar_info"].hide()
else:
self["statusbar"].show()
self["txt_statusbar"].show()
self["txt_statusbar_info"].show()
#===========================================================================
#
#===========================================================================
def setController(self, controller):
self.controller = controller
self.controller.setSession(self.session)
#==========================================================================
# Functions for use from others thread
#==========================================================================
def handleFromThread(self,func,*args):
if args:
callOnMainThread(func,args[0])
else:
callOnMainThread(func)
def printWithThread(self,res):
print "%s :: {%s}" %(res, currentThread().getName())
def setStatusBarInfo(self,text):
#self.printWithThread("setStatusBarInfo())")
self["txt_statusbar_info"].setText(text)
def setStatusBarTxt(self,text):
#self.printWithThread("setStatusBarTxt()")
self["txt_statusbar"].setText(text)
def showStatusBar(self,value):
if value:
self["statusbar"].hide()
self["txt_statusbar_info"].hide()
self["txt_statusbar"].hide()
else:
self["statusbar"].show()
self["txt_statusbar_info"].show()
self["txt_statusbar"].show()
#===========================================================================
#
#===========================================================================
def createSetup(self):
log("",self)
self.cfglist = []
# GENERAL SETTINGS
self.cfglist.append(getConfigListEntry(_("[ General Settings ]"), config.plugins.enigmalight.about, _(" ")))
if self.arm_box:
self.cfglist.append(getConfigListEntry(_('- Type of EnigmaLight binary:'),config.plugins.enigmalight.bintype_arm, _(" ")))
#self.cfglist.append(getConfigListEntry(_('- Type of EnigmaLight binary:'),config.plugins.enigmalight.bintype, _("Here you can select the type of enigmalight, the most receivers can use the fpu version but some receivers can't. For then use the normal version")))
self.configfilepath = getConfigListEntry(_("- Configuration File"), config.plugins.enigmalight.configfilepath, _("Select your configfile, default /etc/enigmalight.conf will be used "))
self.cfglist.append(self.configfilepath)
self.cfglist.append(getConfigListEntry(_('- Run EnigmaLight as server when lights are off:'),config.plugins.enigmalight.server, _("Run EnigmaLight as Server for Boblight or other clients ")))
#self.cfglist.append(getConfigListEntry(_('- Check for update, press OK\r'),config.plugins.enigmalight.clickOK, _("Press OK to check for update.. "))),
self.cfglist.append(getConfigListEntry(_('- Show message when turn on/off lights:'),config.plugins.enigmalight.message_onoff, _("Show a messagebox when you turn on/off the lights ")))
self.cfglist.append(getConfigListEntry(_('- Enable lights on boot:'),config.plugins.enigmalight.autostart, _("Automatic turn on lights on boot ")))
self.cfglist.append(getConfigListEntry(_('- Cluster Leds:'),config.plugins.enigmalight.cluster, _("Cluster [X] Leds as one led.\nDefault each led had is own color, with this option you can bundle/cluster this to 2 -> 10 leds.")))
self.cfglist.append(getConfigListEntry(_('- Delay:'), config.plugins.enigmalight.delay, _(" Some tv's are slower then the lights. With this option you can make the output 1 -> 20 frames later.")))
self.cfglist.append(getConfigListEntry(_('- Interval:'), config.plugins.enigmalight.interval, _("How fast Enigmalight wil run.\n0.01 = 15 -> 40fps | 0.10 = 10fps | 0.20 = 5fps: ")))
self.cfglist.append(getConfigListEntry(_('- 3D Mode:'), config.plugins.enigmalight.m_3dmode, _("Turn on/off 3D Mode, SBS or TAB")))
self.cfglist.append(getConfigListEntry(_('- Default lightmode:'),config.plugins.enigmalight.mode, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Standby Mode:'),config.plugins.enigmalight.standbymode, _("Turn off lights or use moodlamp in standby ")))
self.cfglist.append(getConfigListEntry(_('- Color order:'), config.plugins.enigmalight.color_order, _(" Set the order as given in enigmalight.conf.")))
self.cfglist.append(getConfigListEntry(_("[ Blackbars ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Remove Blackbars top and bottom:'),config.plugins.enigmalight.blackbar_h, _("Remove horizontal blackbars from lights.")))
self.cfglist.append(getConfigListEntry(_('- Remove Blackbars left and right:'),config.plugins.enigmalight.blackbar_v, _("Remove vertical blackbars from lights.")))
self.cfglist.append(getConfigListEntry(_('- Delay before remove:'), config.plugins.enigmalight.blackbar_f, _("Count from 0 to given number\nif the blackbars are still there then remove them.\nif enigmalight runs on 10fps and you will wait 10sec before remove, then set it to 100")))
#getConfigListEntry(_('Switch on/off lights when TV turns on/off:'), config.plugins.enigmalight.hdmicec_enabled),
#Network
self.cfglist.append(getConfigListEntry(_("[ Network Settings ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Enable network mode (connect with other daemon):'), config.plugins.enigmalight.network_onoff, _("Use enigmalight as client and connect with other daemon over network (not for local use)")))
if config.plugins.enigmalight.network_onoff.value is True:
self.cfglist.append(getConfigListEntry(_('- Host ipaddress:'), config.plugins.enigmalight.address, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Daemon port:'), config.plugins.enigmalight.port, _(" ")))
#Timer
self.cfglist.append(getConfigListEntry(_("[ Timer Settings ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Use Timer:'), config.plugins.enigmalight.timer_onoff, _("Turn on/off lights @ given time ")))
if config.plugins.enigmalight.timer_onoff.value is True:
self.cfglist.append(getConfigListEntry(_('- Don\'t turn lights off/on in standby:'), config.plugins.enigmalight.timer_standby_onoff, _("Disable timer function in standbymode ")))
self.cfglist.append(getConfigListEntry(_("- Enable lights:"), config.plugins.enigmalight.time_start, _("Time when lights go on ")))
self.cfglist.append(getConfigListEntry(_("- Disable lights:"), config.plugins.enigmalight.time_end, _("Time when lights go off ")))
#server
self.cfglist.append(getConfigListEntry(_("[ Remote ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_("- Use remoteserver:"), config.plugins.enigmalight.remote_server, _("Control EnigmaLight from browser")))
if config.plugins.enigmalight.remote_server.value:
self.cfglist.append(getConfigListEntry(_("- Remoteserver Port:"), config.plugins.enigmalight.remote_port, _("Show status at bottomscreen fps, cpu usage and currentmode")))
#Debug
self.cfglist.append(getConfigListEntry(_("[ Misc ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_("- Show statusbar on bottom of screen:"), config.plugins.enigmalight.showstatusbar, _("Show status at bottomscreen fps, currentmode and other info")))
if config.plugins.enigmalight.showstatusbar.getValue():
self.cfglist.append(getConfigListEntry(_("- Remove statusbar from tuningscreen:"), config.plugins.enigmalight.showstatusbar_tuning, _("Remove the statusbar from colortuning screen")))
self.cfglist.append(getConfigListEntry(_("- Show errormessages:"), config.plugins.enigmalight.message_error_onoff, _("Turn on if you want to see error information")))
self.cfglist.append(getConfigListEntry(_("- Debug-Logging > /tmp/enigmalight_gui.log:"), config.plugins.enigmalight.EnableEventLog, ""))
# self.cfglist.append(getConfigListEntry(_("- Log folder path:"), config.plugins.enigmalight.logfolderpath, _("Default log wil be saved at /tmp/enigmalight_gui.log")))
# self.cfglist.append(self.logfolderpath)
self["config"].list = self.cfglist
#self["config"].l.setList(self.cfglist)
#===========================================================================
#
#===========================================================================
def _changed(self):
self._hasChanged = True
self.controller.changeValue(self["config"].getCurrent()[1])
if self["config"].getCurrent()[1] == config.plugins.enigmalight.address or self["config"].getCurrent()[1] == config.plugins.enigmalight.port or self["config"].getCurrent()[1] == config.plugins.enigmalight.network_onoff:
self._hasNetworkChanged = True
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.EnableEventLog:
self._hasNetworkChanged = False
self.saveAll()
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.remote_server or self["config"].getCurrent()[1] == config.plugins.enigmalight.remote_port:
if config.plugins.enigmalight.remote_server.value:
self.controller.StartServer()
else:
self.controller.StopServer()
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.bintype_arm:
self.saveAll()
self._binTypeChanged = True
#===========================================================================
#
#===========================================================================
def updateHelp(self):
cur = self["config"].getCurrent()
self["help"].text = cur and cur[2] or " "
#===========================================================================
#
#===========================================================================
def ok(self):
cur = self["config"].getCurrent()
if cur == self.configfilepath:
self.session.openWithCallback(self.savePathConfig,EL_Screen_PathSelector,self.configfilepath[1].value, "configfile", "Select configfile")
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.clickOK:
EL_Screen_Check(self.session).checkForUpdate(self.controller)
self.controller.setStatusBarInfo(_("Check for update..."))
self.controller.checkedForUpdates = True
#===========================================================================
#
#===========================================================================
def savePathConfig(self, pathValue, myType):
log("",self)
log("",self,"pathValue: " + str(pathValue))
log("",self,"type: " + str(myType))
if pathValue is not None:
if myType == "configfile":
self.configfilepath[1].value = pathValue
self._restartBinary = True
if pathValue != None:
message = self.session.openWithCallback(self.restartEnigmaLight,MessageBox,_("To reload the configfile EnigmaLight needs a restart, restart now ?"), MessageBox.TYPE_YESNO)
message.setTitle(_("Reload configfile ?"))
config.plugins.enigmalight.save()
def restartEnigmaLight(self,answer):
log("",self)
#first kill enigmalight
if answer:
self.controller.killEnigmalight(None,self.KillEnigmaLightDone)
def restartEnigma2(self,answer):
log("",self)
#first kill enigmalight
if answer:
self.session.open(TryQuitMainloop, 3)
def KillEnigmaLightDone(self):
log("",self)
setSymbolic() #set new symbolic if needed
self.controller.Control("grabber","start")
self.close(None)
#===========================================================================
#
#===========================================================================
def keySave(self):
log("",self)
#check ip if network is true, before save
if config.plugins.enigmalight.network_onoff.getValue():
#check ip
if not validIP(str(config.plugins.enigmalight.address.getText())):
showMessage(self.session,_("Ip address %s is not accepted, check your input and try again.") %(str(config.plugins.enigmalight.address.getText())),"W")
else:
#check connection
if not testDaemonConnectivity(config.plugins.enigmalight.address.getText(),config.plugins.enigmalight.port.value):
showMessage(self.session,_("Enigmalight can't connect with %s:%s,\ncheck your input and try again.") %(str(config.plugins.enigmalight.address.getText()),str(config.plugins.enigmalight.port.getValue())),"W")
else:
showMessage(self.session,_("Test Connection with %s:%s, succesfull!") %(str(config.plugins.enigmalight.address.getText()),str(config.plugins.enigmalight.port.getValue())),"I")
self.saveAll()
message = self.session.openWithCallback(self.startClient,MessageBox,_("Do you want to (re)start the client and connect with %s:%s ?") %(str(config.plugins.enigmalight.address.getText()),str(config.plugins.enigmalight.port.getValue())), MessageBox.TYPE_YESNO)
message.setTitle(_("(Re)start client ?"))
else:
self.saveAll()
if self._hasNetworkChanged:
self._hasNetworkChanged = False
if self.controller.lightsEnabled:
self.controller.killEnigmalight(None,None)
message = self.session.openWithCallback(self.startGrabber,MessageBox,_("Do you want to (re)start the client ?"), MessageBox.TYPE_YESNO)
message.setTitle(_("(Re)start client ?"))
elif self._binTypeChanged:
message = self.session.openWithCallback(self.restartEnigmaLight,MessageBox,_("Type of enigmalight has changed, Start this type of Enigmalight ?"), MessageBox.TYPE_YESNO)
message.setTitle(_("Start ?"))
else:
self.close(None)
#===========================================================================
#
#===========================================================================
def startClient(self, answer):
log("",self)
if answer is True:
self.controller.killEnigmalight(None,self.controller.switchtoNetwork())
else:
self.close()
def startGrabber(self, answer):
log("",self)
if answer is True:
self.controller.Control("grabber","start")
else:
self.close()
#===========================================================================
#
#===========================================================================
def keyLeft(self):
log("",self)
ConfigListScreen.keyLeft(self)
self.createSetup()
#===========================================================================
#
#===========================================================================
def keyRight(self):
log("",self)
ConfigListScreen.keyRight(self)
self.createSetup()
#===========================================================================
#
#===========================================================================
def keyBouquetUp(self):
log("",self)
self["config"].instance.moveSelection(self["config"].instance.pageUp)
#===========================================================================
#
#===========================================================================
def keyBouquetDown(self):
log("",self)
self["config"].instance.moveSelection(self["config"].instance.pageDown)
|
python
|
#! /usr/bin/env python3
import os, sys, time, re
pid = os.getpid()
os.write(1, ("About to fork (pid:%d)\n" % pid).encode())
rc = os.fork()
if rc < 0:
os.write(2, ("fork failed, returning %d\n" % rc).encode())
sys.exit(1)
elif rc == 0: # child
os.write(1, ("Child: My pid==%d. Parent's pid=%d\n" %
(os.getpid(), pid)).encode())
args = ["wc", "p3-exec.py"]
for dir in re.split(":", os.environ['PATH']): # try each directory in the path
program = "%s/%s" % (dir, args[0])
os.write(1, ("Child: ...trying to exec %s\n" % program).encode())
try:
os.execve(program, args, os.environ) # try to exec program
except FileNotFoundError: # ...expected
pass # ...fail quietly
os.write(2, ("Child: Could not exec %s\n" % args[0]).encode())
sys.exit(1) # terminate with error
else: # parent (forked ok)
os.write(1, ("Parent: My pid=%d. Child's pid=%d\n" %
(pid, rc)).encode())
childPidCode = os.wait()
os.write(1, ("Parent: Child %d terminated with exit code %d\n" %
childPidCode).encode())
|
python
|
import sys
import math
import random
class leds:
def __init__(self, call):
self.call = call
def show_next(self, color, index):
data = [0x18, 0x05, 0x05, 0x02]
if(color == "white"):
data[2] = 0x01
elif(color == "red"):
data[2] = 0x02
elif(color == "yellow"):
data[2] = 0x03
elif(color == "green"):
data[2] = 0x04
elif(color == "blue"):
data[2] = 0x05
elif(color == "purple"):
data[2] = 0x06
elif(color == "black"):
data[2] = 0x07
if(index == "random"):
data[3] = random.randint(1, 7)
else:
data[3] = index
self.call.blewrite(data)
self.call.blewait()
def show_previous(self, color, index):
data = [0x18, 0x04, 0x05, 0x02]
if(color == "white"):
data[2] = 0x01
elif(color == "red"):
data[2] = 0x02
elif(color == "yellow"):
data[2] = 0x03
elif(color == "green"):
data[2] = 0x04
elif(color == "blue"):
data[2] = 0x05
elif(color == "purple"):
data[2] = 0x06
elif(color == "black"):
data[2] = 0x07
if(index == "random"):
data[3] = random.randint(1, 7)
else:
data[3] = index
self.call.blewrite(data)
self.call.blewait()
def show_all(self, color, index):
data = [0x18, 0x02, 0x05, 0x02]
if(color == "white"):
data[2] = 0x01
elif(color == "red"):
data[2] = 0x02
elif(color == "yellow"):
data[2] = 0x03
elif(color == "green"):
data[2] = 0x04
elif(color == "blue"):
data[2] = 0x05
elif(color == "purple"):
data[2] = 0x06
elif(color == "black"):
data[2] = 0x07
if(index == "random"):
data[3] = random.randint(1, 7)
else:
data[3] = int(index)
self.call.blewrite(data)
self.call.blewait()
def show_single(self, index, r, g, b):
data = [0x18, 0x08, 0x00, 0x00, 0x00, 0x00]
data[2] = int(index)-1
data[3] = r
data[4] = g
data[5] = b
self.call.blewrite(data)
self.call.blewait()
def color(self, value):
digit = list(map(str, range(10))) + list("abcdef")
if isinstance(value, tuple):
string = '#'
for i in value:
a1 = i // 16
a2 = i % 16
string += digit[a1] + digit[a2]
return string
elif isinstance(value, str):
a1 = digit.index(value[1]) * 16 + digit.index(value[2])
a2 = digit.index(value[3]) * 16 + digit.index(value[4])
a3 = digit.index(value[5]) * 16 + digit.index(value[6])
return [a1, a2, a3]
def trun_ring(self, buf, col):
arr = self.color(col)
buf.append(arr[0])
buf.append(arr[1])
buf.append(arr[2])
return buf
def show_ring(self, led1, led2, led3, led4, led5, led6, led7, led8, led9, led10, led11, led12):
data = [0x18, 0x07]
data = self.trun_ring(data, led1)
data = self.trun_ring(data, led2)
data = self.trun_ring(data, led3)
data = self.trun_ring(data, led4)
data = self.trun_ring(data, led5)
data = self.trun_ring(data, led6)
data = self.trun_ring(data, led7)
data = self.trun_ring(data, led8)
data = self.trun_ring(data, led9)
data = self.trun_ring(data, led10)
data = self.trun_ring(data, led11)
data = self.trun_ring(data, led12)
self.call.blewrite(data)
self.call.blewait()
def clear(self):
data = [0x18, 0x03, 0x00, 0x00, 0x00]
self.call.blewrite(data)
self.call.blewait()
def show_animation(self, mode):
data = [0x18, 0x06, 0x00]
if(mode == "spoondrift"):
data[2] = 0x01
elif(mode == "meteor"):
data[2] = 0x02
elif(mode == "rainbow"):
data[2] = 0x03
elif(mode == "firefly"):
data[2] = 0x04
elif(mode == "colorwipe"):
data[2] = 0x05
elif(mode == "breathe"):
data[2] = 0x06
elif(mode == "random"):
data[2] = random.randint(1, 6)
self.call.blewrite(data)
self.call.blewait()
def color(self, value):
digit = list(map(str, range(10)))+list("abcdef")
if(isinstance(value, tuple)):
string = '#'
for i in value:
a1 = i//16
a2 = i % 16
string += digit[a1]+digit[a2]
return string
elif isinstance(value, str):
a1 = digit.index(value[1])*16+digit.index(value[2])
a2 = digit.index(value[3])*16+digit.index(value[4])
a3 = digit.index(value[5])*16+digit.index(value[6])
return [a1, a2, a3]
def trun_ring(self, buf, col):
arr = self.color(col)
buf.append(arr[0])
buf.append(arr[1])
buf.append(arr[2])
return buf
def show_all_hex(self, color):
self.show_ring(color, color, color, color, color, color,
color, color, color, color, color, color)
def show_single_hex(self, index, color):
if(math.isinf(index)):
index = 0
elif(math.isnan(index)):
index = 0
else:
index = int(index)
if(index == 0):
data = [0x18, 0x03]
data = self.trun_ring(data, color)
self.call.blewrite(data)
self.call.blewait()
else:
if(index > 0):
index = index - 1
index = index % 12
if(index < 0):
index = 13 + index
index = index % 12
data = [0x18, 0x08, index]
data = self.trun_ring(data, color)
self.call.blewrite(data)
self.call.blewait()
|
python
|
#!/usr/bin/python
from UcsSdk import *
import time
# This script shows how to monitor UCS Manager events and define your own call back to take specific action on the respective events.
ucsm_ip = '0.0.0.0'
user = 'username'
password = 'password'
def callback_all(mce):
print 'Received a New Event with ClassId: ' + str(mce.mo.classId)
print "ChangeList: ", mce.changeList
print "EventId: ", mce.eventId
def callback_lsServer(mce):
print 'Received a New Service Profile Event: ' + str(mce.mo.classId)
print "ChangeList: ", mce.changeList
print "EventId: ", mce.eventId
try:
handle = UcsHandle()
handle.Login(ucsm_ip,user, password)
# Add an event handle "ev_all" to montitor the events generated by UCS Manager for any of the ClassIds
ev_all = handle.AddEventHandler()
# Get the list of active event handles.
handle.GetEventHandlers()
# Remove an event handle "ev_all"
handle.RemoveEventHandler(ev_all)
# Use your own callback method to take specific action on respective events.
ev_all_callback = handle.AddEventHandler(callBack = callback_all)
handle.RemoveEventHandler(ev_all_callback)
# Add an event handle to filter events based on classId = lsServer
ev_lsServer = handle.AddEventHandler(classId = "LsServer", callBack = callback_lsServer)
handle.RemoveEventHandler(ev_lsServer)
# loop that keeps the script running for us to get events/callbacks
while True:
time.sleep(5)
handle.Logout()
except Exception, err:
print "Exception:", str(err)
import traceback, sys
print '-'*60
traceback.print_exc(file=sys.stdout)
print '-'*60
handle.Logout()
|
python
|
import yaml
import sys
import os
import time
import re
import copy
import pprint
"""
For each possible rule path
"""
class ParserError(ValueError):
pass
class ParserError(ValueError):
pass
class Context(object):
def __init__(self,level,name,parent=None):
self.level = level
self.name = name
self.parent = parent
if self.parent:
self.url = self.parent.url+'.'+self.name
else:
self.url = self.name
def debug(self,msg):
return
print "{}{}: {}".format(" "*self.level,self.name,msg)
class State(object):
def __init__(self,s, pos=0):
self.s = s
self.parent = None
self.store = {}
self.result = None
self.current_node = []
self.root = self.current_node
self.pos = pos
@property
def line(self):
return len(self.s[:self.pos].split("\n"))
@property
def col(self):
return len(self.s[:self.pos].split("\n")[-1])+1
def copy(self,):
state = State(self.s, self.pos)
state.parent = self
state.store = copy.deepcopy(self.store)
state.current_node = self.current_node
state.root = self.root
return state
@property
def value(self):
return self.s[self.pos:]
def advance(self, n):
old_pos = self.pos
self.pos += n
return old_pos
def go_to(self, pos):
old_pos = self.pos
self.pos = pos
return old_pos
from collections import defaultdict
encountered_contexts = defaultdict(dict)
class Iterator(object):
def __init__(self,generator, parent=None):
self.generator = generator
self.parent = parent
self.list = []
self.pos = 0
def __iter__(self):
return self
def get(self,pos):
if self.parent:
return self.parent.get(pos)
while pos >= len(self.list):
value = next(self.generator)
self.list.append(value)
return self.list[pos]
def next(self):
self.pos+=1
return self.get(self.pos-1)
def copy(self):
if self.parent:
return Iterator(None,parent=self.parent)
return Iterator(None,parent=self)
def parser(name, url):
"""
Simplifying parser rules
* or
"""
def dec(f):
return f
def decorated_function(state, context, *args, **kwargs):
if False and url is not None and url in encountered_contexts[state.pos]:
# print url,state.pos
return encountered_contexts[state.pos][url].copy()
print("{}{} {}:{}".format(" "*context.level,context.name,state.line,state.col))
new_context = Context(context.level+1,name,context)
result = f(state, new_context, *args, **kwargs)
if url is not None:
encountered_contexts[state.pos][url] = Iterator(result)
return encountered_contexts[state.pos][url]
return result
return decorated_function
return dec
class ParserGenerator(object):
"""
Generating an abstract syntax tree is done implictly by each rule
"""
def __init__(self, grammar):
self.grammar = grammar
self.parsers = {}
def compile_regex(self, regex, url):
compiled_regex = re.compile('^{}'.format(regex))
@parser('regex', url)
def regex_parser(state, context):
context.debug(regex)
match = compiled_regex.match(state.value)
if match:
s = match.group(0)
context.debug("match!")
new_state = state.copy()
new_state.result = s
new_state.advance(len(s))
yield new_state
else:
raise ParserError("Regex not matched: {}".format(regex))
return regex_parser
def compile_ref(self, key, url):
def ref_parser(state):
new_state = state.copy()
new_state.result = state.store.get(key)
yield new_state
return ref_parser
def compile_ast_list(self, props, url):
name = props.get('name')
rule_parser = self._compile_rule(props['value'], url+'.ast-list')
@parser('ast-list', url)
def ast_list_parser(state, context):
l = []
current_node = state.current_node
state.current_node = l
try:
for new_state in rule_parser(state, context):
if isinstance(current_node,dict) and name:
new_current_node = new_state.current_node
new_state.current_node = current_node.copy()
new_state.current_node[name] = new_current_node
yield new_state
finally:
state.current_node = current_node
return ast_list_parser
def compile_ast_prop(self, props, url):
name = props.get('name')
value_parser = self._compile_rule(props['value'], url+'.ast-prop')
@parser('ast-prop', url)
def ast_prop_parser(state, context):
for new_state in value_parser(state, context):
current_node = new_state.current_node
if isinstance(current_node,dict):
current_node[name] = new_state.result
yield new_state
return ast_prop_parser
def compile_ast_node(self, props, url):
"""
Create a new AST node.
* If the current node is a list, appends the new node to it
* If the current node is a dict, puts the new node in the key given by name (if provided)
* If none of these things match, does nothing
"""
rule_parser = self._compile_rule(props['value'], url+'.ast-node')
name = props.get('name')
@parser('ast-node', url)
def ast_node_parser(state, context):
d = {}
d.update(props.get('props',{}))
current_node = state.current_node
state.current_node = d
try:
for new_state in rule_parser(state, context):
new_current_node = new_state.current_node
if isinstance(current_node,list):
new_state.current_node = current_node[:]
new_state.current_node.append(new_current_node)
elif isinstance(current_node,dict):
new_state.current_node = current_node.copy()
if name:
new_state.current_node[name] = new_current_node
else:
new_state.current_node.update(new_current_node)
yield new_state
finally:
state.current_node = current_node
return ast_node_parser
def compile_repeat(self, rule, url):
rule_parser = self._compile_rule(rule, url+'.repeat')
@parser('repeat', url)
def repeat_parser(state, context):
cnt=0
current_state = state
states_to_repeat=[state]
states_to_yield = []
productions = []
while states_to_repeat or states_to_yield or productions:
if states_to_repeat:
current_state=states_to_repeat.pop()
states_to_yield.append(current_state)
try:
production=rule_parser(current_state, context)
new_state = next(production)
#if the production does not advance the state, we reject it...
if new_state.pos == current_state.pos:
continue
productions.append(production)
states_to_repeat.append(new_state)
except (ParserError, StopIteration) as e :
continue
elif states_to_yield:
state_to_yield = states_to_yield.pop()
cnt +=1
if state_to_yield != state:
yield state_to_yield
elif productions:
production = productions[-1]
try:
new_state = next(production)
states_to_yield.append(new_state)
except (ParserError,StopIteration):
productions.pop()
if cnt==0:
raise ParserError("Not matched!")
return repeat_parser
def compile_optional(self, rule, url):
rule_parser = self._compile_rule(rule, url+'.optional')
@parser('optional', url)
def optional_parser(state, context):
try:
for new_state in rule_parser(state, context):
yield new_state
except ParserError as me:
pass
yield state
return optional_parser
def compile_store(self, args, url):
name = args['name']
value = args['value']
value_parser = self._compile_rule(value, url+'.store')
@parser('store', url)
def store_parser(state, context):
for ns in value_parser(state, context):
new_state = state.copy()
new_state.result = ns.result
yield new_state
return store_parser
def compile_literal(self, value, url):
if isinstance(value, dict):
value = self._compile_rule(value, url+'.literal')
@parser('literal', url)
def literal_parser(state, context):
context.debug(value)
if callable(value):
v = value(state, context)
else:
v = value
found_value = state.value[:len(v)]
if found_value != v:
raise ParserError("Expected {}, but found '{}'".format(value, found_value))
context.debug(v)
new_state = state.copy()
new_state.advance(len(v))
new_state.result = v
yield new_state
return literal_parser
def compile_python_code(self, code, url):
gv = globals().copy()
gv['url'] = url
exec(code,gv,gv)
return gv['parser']
def compile_or(self, alternatives, url):
alternative_parsers = []
for i,alternative in enumerate(alternatives):
alternative_parsers.append((alternative,self._compile_rule(alternative, url+'.or.{}'.format(i))))
@parser('or', url)
def or_parser(state, context):
"""
Pass in context object that contains information about the following things:
* Which rule has called this one?
*
"""
found = False
alternative_productions = []
for params,alternative_parser in alternative_parsers:
try:
alternative_productions.append(alternative_parser(state, context))
except ParserError as me:
continue
i = 0
while alternative_productions:
production = alternative_productions[i%len(alternative_productions)]
try:
new_state = next(production)
found = True
yield new_state
i+=1
except (ParserError,StopIteration):
alternative_productions.remove(production)
if not found:
raise ParserError("No alternative matched!")
return or_parser
def compile_sequence(self, rules, url):
"""
Increase the level by one for each element in the sequence
"""
parsers = []
for i,rule in enumerate(rules):
ps = self._compile_rule(rule, url+'.seq.{}'.format(i))
if ps is None:
raise AttributeError
parsers.append(ps)
@parser('sequence', url)
def sequence_parser(state, context):
"""
* Execute the first parser on the state
* For each returned state, execute the second parser
* For each returned state, execute the third parser...
"""
def parse_sequence(state, parsers):
parser = parsers.pop(0)
for new_state in parser(state, context):
if parsers:
try:
for new_new_state in parse_sequence(new_state, parsers[:]):
yield new_new_state
except ParserError:
continue
else:
yield new_state
for new_state in parse_sequence(state, parsers[:]):
yield new_state
return sequence_parser
def compile(self, debug=True):
self.parsers = {}
return self._compile_rule('start', '')
def _compile_rule(self, name_or_rule, url):
"""
Takes a YAML grammar as input and returns a Python parser function that can be
called with a Stream instance and a state as arguments.
"""
name = None
if isinstance(name_or_rule,(str,unicode)):
name = name_or_rule
if name in self.parsers:
return self.parsers[name]
rule = self.grammar[name]
else:
rule = name_or_rule
if name:
new_url = url+'.'+name
else:
new_url = url
def parse_subrule(rule, name=None):
rule_name = rule.keys()[0]
args = rule.values()[0]
if rule_name == '$python':
result = self.compile_python_code(args, url+'.{}'.format(name))
if name:
self.parsers[name] = result
return result
try:
func = getattr(self,'compile_{}'.format(rule_name.replace('-','_')))
except AttributeError:
raise ParserError("Unknown rule: {}".format(rule_name))
subparser = func(args, new_url)
@parser(rule_name, None)
def subrule_parser(state, context):
for result in subparser(state, context):
yield result
if name:
@parser(name, None)
def name_parser(state, context):
for result in subrule_parser(state, context):
yield result
self.parsers[name] = name_parser
return name_parser
return subrule_parser
#this allows definition of recursive parsing rules via a simple function call
if name:
#this will lead to infinite recursion if the parser is not replaced!
@parser(name, url)
def subrule_parser(state, context):
for result in self.parsers[name](state, context):
yield result
self.parsers[name] = subrule_parser
if isinstance(rule,(list,tuple)):
sequence_parser = self.compile_sequence(rule, new_url)
if name:
@parser(name, None)
def subrule_parser(state, context):
for result in sequence_parser(state, context):
yield result
self.parsers[name] = subrule_parser
return subrule_parser
return sequence_parser
elif isinstance(rule,dict) and len(rule) == 1:
return parse_subrule(rule, name=name)
elif isinstance(rule,(str,unicode)):
new_new_url = new_url+'.'+rule
ps = self._compile_rule(rule, new_new_url)
@parser(name, None)
def subrule_parser(state, context):
for result in ps(state, context):
yield result
self.parsers[name] = subrule_parser
return subrule_parser
raise ParserError("Unknown rule: {}".format(name or name_or_rule or '(no name given)'))
if __name__ == '__main__':
import sys
sys.setrecursionlimit(100000)
if len(sys.argv) < 3:
sys.stderr.write("Usage: {} [grammar filename] [code filename]\n".format(os.path.basename(__file__)))
exit(-1)
grammar_filename = sys.argv[1]
code_filename = sys.argv[2]
with open(grammar_filename,'r') as grammar_file:
grammar = yaml.load(grammar_file.read())
with open(code_filename,'r') as code_file:
code = code_file.read()
parser_generator = ParserGenerator(grammar)
parser = parser_generator.compile()
state = State(code)
start = time.time()
results = parser(state, Context(0,'root',None))
for result in results:
print result.line,result.col
if result.value.strip():
print "Parsing failed in line {}, column {}:\n\n{}...".format(result.line,result.col,result.value[:20])
else:
print "Parsing succeeded!"
pprint.pprint(result.current_node)
|
python
|
from typing import ClassVar, List, Tuple
from urllib.parse import quote as http_quote
from .base import BaseProtocol
from ..types import IPAddressType
# HTTP 1.1 only
class HTTPProtocol(BaseProtocol[IPAddressType]):
ports: ClassVar[Tuple[int, ...]] = (80,)
_TYPES: ClassVar[List[bytes]] = [
b"*/*; q=0.300", b"text/html; q=0.999", b"application/xhtml+xml; q=1.000",
b"application/xml; q=0.900", b"text/xml; q=0.900", b"application/json; q=0.650",
b"application/pdf; q=0.800", b"image/*; q=0.700", b"image/png; q=0.775",
b"image/gif; q=0.750", b"image/jpeg; q=0.725", b"text/*; q=0.500", b"video/*; q=0.100",
b"audio/*; q=0.200", b"application/rtf; q=0.675", b"text/markdown; q=0.600",
b"text/plain; q=0.400", b"application/atom+xml; q=0.900"
]
_CHARSETS: ClassVar[List[bytes]] = [
b"*; q=0.200", b"utf-8; q=1.000", b"us-ascii; q=0.100",
b"utf-16le; q=0.900", b"utf-16; q=0.850", b"utf-16be; q=0.800",
b"utf-32le; q=0.700", b"utf-32; q=0.650", b"utf-32be; q=0.600",
b"iso-8859-15; q=0.500", b"windows-1252; q=0.400", b"iso-8859-1; q=0.300"
]
_LANGS: ClassVar[List[bytes]] = [
b"*; q=0.100", b"en-US; q=1.000", b"en; q=0.900", b"en-GB; q=0.850", b"en-CA; q=0.950",
b"en-AU; q=0.800", b"de-DE; q=0.600", b"de; q=0.500", b"de-CH; q=0.550", b"de-AT; q=0.450",
b"es; q=0.300", b"es-MX; q=0.350", b"es-ES; q=0.250", b"pt; q=0.200", b"fr; q=0.200"
]
_ENCS: ClassVar[List[bytes]] = [b"identity; q=1.000", b"*; q=0.000"]
assert _TYPES and _CHARSETS and _LANGS and _ENCS
__slots__ = ()
def pull_data(self, length_hint: int = None) -> bytes:
res = bytearray(b"GET / HTTP/1.1\r\nHost: ")
if self._dst.host is not None:
res += http_quote(self._dst.host).encode("ascii")
res += b"\r\nUser-Agent: tcpreq (TCP research scan)\r\n"
if length_hint is None or length_hint - len(res) <= 2:
return res + b"\r\n"
# Caching
res += b"Cache-Control: max-age=3600, max-stale=1600, no-transform\r\n"
if length_hint - len(res) <= 2:
return res + b"\r\n"
# Pragma
res += b"Pragma: no-cache\r\n"
if length_hint - len(res) <= 2:
return res + b"\r\n"
# Referer
res += b"Referer: about:blank\r\n"
if length_hint - len(res) <= 2:
return res + b"\r\n"
# Accept, Accept-Charset, Accept-Language, Accept-Encoding
for name, vals in ((b"Accept: ", self._TYPES), (b"Accept-Charset: ", self._CHARSETS),
(b"Accept-Language: ", self._LANGS), (b"Accept-Encoding: ", self._ENCS)):
rem = length_hint - (len(res) + len(name))
ret = True
for idx, v in enumerate(vals):
rem -= len(v) + 2 # Add 2 for next separator (", ")
if rem <= 2:
break
else:
ret = False
res += name
res += b", ".join(vals[:idx + 1])
res += b"\r\n"
if ret:
break
return res + b"\r\n"
# No need to implement push_data: HTTP is stateless (except for cookies), response can be ignored
|
python
|
'''
Leia a hora inicial e a hora final de um jogo. A seguir calcule a duração do jogo, sabendo que o mesmo pode começar em um dia e terminar em outro, tendo uma duração mínima de 1 hora e máxima de 24 horas.
| Input Sample | Output Samples |
| ------------ | ----------------------- |
| 16 2 | O JOGO DUROU 10 HORA(S) |
| ------------ | ----------------------- |
| 0 0 | O JOGO DUROU 24 HORA(S) |
| ------------ | ----------------------- |
| 2 16 | O JOGO DUROU 14 HORA(S) |
'''
hora = input().split()
inicio = int(hora[0])
fim = int(hora[1])
'''
result = fim - inicio
if result == 0:
print("O JOGO DUROU 24 HORA(S)")
else:
print("O JOGO DUROU {} HORA(S)".format(result))
'''
if inicio < fim:
result = fim - inicio
else:
result = (24 - inicio) + fim
print("O JOGO DUROU {} HORA(S)".format(result))
|
python
|
# stdlib imports
import logging
from datetime import datetime, timedelta
# third party imports
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import (Column, Integer, Float, String,
DateTime, ForeignKey, Boolean)
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy_utils import database_exists, create_database
# We dynamically (not sure why?) create the base class for our objects
Base = declarative_base()
TIMEFMT = '%Y-%m-%dT%H:%M:%S'
MYSQL_TIMEOUT = 30
# association algorithm - any peak with:
# time > origin - TMIN and time < origin + TMAX
# AND
# distance < DISTANCE
TMIN = 60
TMAX = 180
DISTANCE = 500
P_TRAVEL_TIME = 4.2
class IncorrectDataTypesException(Exception):
pass
class IncompleteConstructorException(Exception):
pass
def get_session(url='sqlite:///:memory:', create_db=True):
"""Get a SQLAlchemy Session instance for input database URL.
:param url:
SQLAlchemy URL for database, described here:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls.
:param create_db:
Boolean indicating whether to create database from scratch.
:returns:
Sqlalchemy Session instance.
"""
# Create a sqlite in-memory database engine
if not database_exists(url):
if create_db:
create_database(url)
else:
msg = ('Database does not exist, will not create without '
'create_db turned on.')
logging.error(msg)
return None
connect_args = {}
if 'mysql' in url.lower():
connect_args = {'connect_timeout': MYSQL_TIMEOUT}
engine = create_engine(url, echo=False, connect_args=connect_args)
Base.metadata.create_all(engine)
# create a session object that we can use to insert and
# extract information from the database
Session = sessionmaker(bind=engine, autoflush=False)
session = Session()
return session
class Event(Base):
"""Class representing the "event" table in the database.
"""
EVENT = {'eventid': String(64),
'netid': String(32),
'time': DateTime(),
'lat': Float(),
'lon': Float(),
'depth': Float(),
'magnitude': Float(),
'locstring': String(1024),
'lastrun': DateTime(),
}
__tablename__ = 'event'
id = Column(Integer, primary_key=True)
eventid = Column(EVENT['eventid'], index=True)
netid = Column(EVENT['netid'])
time = Column(EVENT['time'])
lat = Column(EVENT['lat'])
lon = Column(EVENT['lon'])
depth = Column(EVENT['depth'])
magnitude = Column(EVENT['magnitude'])
locstring = Column(EVENT['locstring'])
lastrun = Column(EVENT['lastrun'])
queued_events = relationship("Queued", back_populates="event",
cascade="all, delete, delete-orphan")
@property
def is_running(self):
for queue in self.queued:
if queue.is_running:
return True
return False
@property
def age_in_days(self):
return (datetime.utcnow() - self.time) / timedelta(days=1)
def __init__(self, **kwargs):
"""Instantiate an Event object from scratch (i.e., not from a query).
Note: Although keyword arguments, all arguments below must be supplied.
Args:
eventid (str): Event ID of the form "us2020abcd".
netid (str): The network code at the beginning of the eventid.
time (datetime): Origin time, UTC.
lat (float): Origin latitude.
lon (float): Origin longitude.
depth (float): Origin depth.
magnitude (float): Origin magnitude.
locstring (str): Description of earthquake location.
lastrun (datetime): Set this to something like datetime(1900,1,1).
Returns:
Event: Instance of the Event object.
"""
validate_inputs(self.EVENT, kwargs)
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return (f'Event: {self.eventid}')
class Queued(Base):
"""Class representing the "queued" table in the database.
"""
__tablename__ = 'queued'
QUEUED = {'event_id': Integer(),
'run_time': DateTime(),
}
id = Column(Integer, primary_key=True)
event_id = Column(QUEUED['event_id'], ForeignKey('event.id'))
run_time = Column(QUEUED['run_time'])
event = relationship("Event", back_populates="queued_events")
running_events = relationship("Running",
back_populates="queued_event",
cascade="all, delete, delete-orphan")
def __init__(self, **kwargs):
"""Instantiate a Queued object from scratch (i.e., not from a query).
Note: Although keyword arguments, all arguments below must be supplied.
Args:
event_id (int): ID of an existing (committed) Event object.
run_time (datetime): Time (UTC) when event is scheduled to be run.
Returns:
Queued: Instance of the Queued object.
"""
validate_inputs(self.QUEUED, kwargs)
for key, value in kwargs.items():
setattr(self, key, value)
@property
def is_running(self):
return len(self.running_events) > 0
def __repr__(self):
return (f'Queued: {self.event.eventid} {self.run_time}')
class Running(Base):
"""Class representing the "running" table in the database.
"""
__tablename__ = 'running'
RUNNING = {'queued_id': Integer(),
'start_time': DateTime(),
'success': Boolean(),
}
id = Column(Integer, primary_key=True)
queued_id = Column(RUNNING['queued_id'], ForeignKey('queued.id'))
start_time = Column(RUNNING['start_time'])
success = Column(RUNNING['success'])
queued_event = relationship("Queued", back_populates="running_events")
def __init__(self, **kwargs):
"""Instantiate a Running object from scratch (i.e., not from a query).
Note: Although keyword arguments, all arguments below must be supplied.
Args:
queued_id (int): ID of an existing (committed) Queued object.
start_time (datetime): Time (UTC) when event began running.
success (bool): Indicates whether the event has finished running successfully.
Returns:
Running: Instance of the Running object.
"""
validate_inputs(self.RUNNING, kwargs)
for key, value in kwargs.items():
setattr(self, key, value)
@property
def minutes_running(self):
# return running time in minutes
return (datetime.utcnow() - self.start_time) / timedelta(seconds=60)
def __repr__(self):
msg = (f'Running: {self.queued_event.event.eventid} '
f'started at {self.start_time}')
return (msg)
def validate_inputs(defdict, kwdict):
"""Validate all init() inputs against the python types of table columns.
Args:
defdict (dict): Dictionary containing the column
names/SQLAlchemy types.
kwdict (dict): Dictionary containing the init() kwargs.
Raises:
IncompleteConstructorException: Not all kwargs are set.
IncorrectDataTypesException: At least one of the kwargs is
of the wrong type.
"""
# first check that all required parameters are being set
if not set(defdict.keys()) <= set(kwdict.keys()):
msg = ('In Event constructor, all the following values must be set:'
f'{str(list(defdict.keys()))}')
raise IncompleteConstructorException(msg)
errors = []
for key, value in kwdict.items():
ktype = defdict[key].python_type
if not isinstance(value, ktype):
errors.append(f'{key} must be of type {ktype}')
if len(errors):
msg = '\n'.join(errors)
raise IncorrectDataTypesException(msg)
|
python
|
# Copyright (c) Xidian University and Xi'an University of Posts & Telecommunications. All Rights Reserved
import random
from .nasbench_101_cell import Cell as Cell_101
from .nasbench_201_cell import Cell as Cell_201
from gnn_lib.data import Data
from nas_lib.utils.utils_data import nas2graph
from nas_lib.utils.predictive_comparision import convert_arch_to_seq
from nas_lib.utils.utils_data import nasbench2graph_reverse
def build_datasets(args):
if args.search_space == "nasbench_101":
from nas_lib.data.nasbench_101 import NASBench101
return NASBench101(args.search_space)
elif args.search_space == 'nasbench_201':
from nas_lib.data.nasbench_201 import NASBench201
return NASBench201(args)
elif args.search_space == 'darts':
from nas_lib.data.darts import DataSetDarts
return DataSetDarts(args)
else:
raise ValueError("This architecture datasets does not support!")
def dataset_split(args, nas_dataset, budget=None):
total_keys = nas_dataset.total_keys
total_archs = nas_dataset.total_archs
if budget:
train_keys = random.sample(total_keys, budget)
else:
train_keys = random.sample(total_keys, args.search_budget)
test_keys = [key for key in total_keys if key not in train_keys]
train_data = []
test_data = []
flag = args.search_space == 'nasbench_101'
for k in train_keys:
arch = total_archs[k]
if args.search_space == 'nasbench_101':
cell_inst = Cell_101(matrix=arch['matrix'], ops=arch['ops'])
elif args.search_space == 'nasbench_201':
cell_inst = Cell_201(matrix=arch[0][0], ops=arch[0][1])
else:
raise NotImplementedError()
train_data.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100-arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100-arch[5]) * 0.01
}
)
for k in test_keys:
arch = total_archs[k]
if args.search_space == 'nasbench_101':
cell_inst = Cell_101(matrix=arch['matrix'], ops=arch['ops'])
elif args.search_space == 'nasbench_201':
cell_inst = Cell_201(matrix=arch[0][0], ops=arch[0][1])
else:
raise NotImplementedError()
test_data.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100-arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100-arch[5]) * 0.01
}
)
return train_data, test_data
def dataset_all(args, nas_dataset):
total_keys = nas_dataset.total_keys
total_archs = nas_dataset.total_archs
all_archs = []
flag = args.search_space == 'nasbench_101'
for k in total_keys:
arch = total_archs[k]
if args.search_space == 'nasbench_101':
cell_inst = Cell_101(matrix=arch['matrix'], ops=arch['ops'])
edge_index, node_f = nas2graph(args.search_space, (arch['matrix'], arch['ops']))
g_data = Data(edge_index=edge_index.long(), x=node_f.float())
seminas_vec = convert_arch_to_seq(arch['o_matrix'], arch['o_ops'])
edge_index_reverse, node_f_reverse = nasbench2graph_reverse((arch['matrix'], arch['ops']), reverse=True)
g_data_reverse = Data(edge_index=edge_index_reverse.long(), x=node_f_reverse.float())
if len(seminas_vec) < 27:
padding = 27 - len(seminas_vec)
seminas_vec = seminas_vec + [0 for _ in range(padding)]
all_archs.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100 - arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100 - arch[5]) * 0.01,
'g_data': g_data,
'arch_k': k,
'seminas_vec': seminas_vec,
'edge_idx': edge_index,
'node_f': node_f,
'edge_idx_reverse': edge_index_reverse,
'node_f_reverse': node_f_reverse,
'g_data_reverse': g_data_reverse
}
)
elif args.search_space == 'nasbench_201':
cell_inst = Cell_201(matrix=arch[0][0], ops=arch[0][1])
edge_index, node_f = nas2graph(args.search_space, (arch[0][0], arch[0][1]))
edge_index_reverse, node_f_reverse = nas2graph(args.search_space, (arch[0][0], arch[0][1]), reverse=True)
g_data_reverse = Data(edge_index=edge_index_reverse.long(), x=node_f_reverse.float())
all_archs.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100 - arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100 - arch[5]) * 0.01,
'g_data': Data(edge_index=edge_index.long(), x=node_f.float()),
'arch_k': k,
'edge_idx': edge_index,
'node_f': node_f,
'edge_idx_reverse': edge_index_reverse,
'node_f_reverse': node_f_reverse,
'g_data_reverse': g_data_reverse
}
)
else:
raise NotImplementedError()
return all_archs
def split_data_from_all_data(all_data, idxs, train_data, budget, last_budget):
train_data_new = []
counter = 0
while len(train_data_new) < (budget - last_budget):
if idxs[last_budget+counter] < len(all_data):
train_data_new.append(all_data.pop(idxs[last_budget+counter]))
counter += 1
else:
counter += 1
continue
train_data.extend(train_data_new)
return train_data, all_data
def dataset_split_idx(all_data, budget=None):
idxs = list(range(len(all_data)))
random.shuffle(idxs)
train_data = [all_data[k] for k in idxs[:budget]]
test_data = [all_data[kt] for kt in idxs[budget:]]
return train_data, test_data
def dataset_split_idx_predictive_comparison(all_data, budget=None):
idxs = list(range(len(all_data)))
random.shuffle(idxs)
train_data = [all_data[k] for k in idxs[:int(budget)]]
test_data = [all_data[kt] for kt in idxs[int(budget):]]
return train_data, test_data
|
python
|
'''
Utility module.
'''
import yaml
import numpy as np
swap = lambda x1, x2: (x2, x1) if x1 > x2 else (x1, x2)
square = lambda x: x**2
def read_params(file) -> dict:
'''
Read yaml file.
Args:
file (str): Path to the yaml file.
Returns:
dict: Contents of the yaml file.
'''
with open(file, 'r') as yaml_file:
parameters = yaml.full_load(yaml_file)
return parameters
def dim_number(params: dict) -> int:
'''
Gets a number of dimentions for the optimized finction.
Args:
params (dict): Algorithm parameters.
Retuns:
int: Length of a chromosome.
'''
function = params['function']
if function == 1 or function == 2:
return 1
elif function == 3 or function == 4:
return 2
return None # error
def chromosome_length(params: dict) -> int:
'''
Calculates a chromosome's length to be generated.
Args:
params (dict): Algorithm parameters.
Retuns:
int: Length of a chromosome.
'''
lower_bound = params['searchDomain']['lowerBound']
upper_bound = params['searchDomain']['upperBound']
precision = float(params['searchDomain']['precision'])
length = (upper_bound - lower_bound) / precision
length = int(np.ceil(np.log2(length)))
return length
def roulette_wheel(cum_probs: np.ndarray) -> int:
'''
Randomly selects an index given cumulative probabilities.
Args:
cum_probs (np.ndarray): Cumulative probabilities.
Returns:
int: Selected index.
'''
index = None
r = np.random.uniform()
for i, prob in enumerate(cum_probs):
if r <= prob:
index = i
break
return index
|
python
|
# -*- coding: utf-8 -*-
"""
Example code showing how to control Thorlabs TDC Motors using PyAPT
V1.2
20141125 V1.0 First working version
20141201 V1.0a Updated to short notation
20150324 V1.1 Added more descriptions
20150417 V1.2 Implemented motor without serial
Michael Leung
[email protected]
"""
# Import APTMotor class from PyAPT
from PyAPT import APTMotor
import time
# Create object corresponding to the motor.
Motor1 = APTMotor(83828393, HWTYPE=31) # The number should correspond to the serial number.
# Use help APTMotor to obtain full list of hardware (HW) supported.
# Note: You can control multiple motors by creating more APTMotor Objects
# Obtain current position of motor
print(Motor1.getPos())
# You can control multiple motors by creating more APTMotor Objects
# Serial numbers can be added later by using setSerialNumber and initializeHardwareDevice
# This functionality is particularly useful in the GUI setup.
Motor2 = APTMotor()
Motor2.setSerialNumber(83828393)
Motor2.initializeHardwareDevice()
print(Motor2.getPos())
# Move motor forward by 1mm, wait half a second, and return to original position.
# mRel is move relative. mAbs is move absolute (go to position xxx)
Motor1.mRel(1) # advance 1mm
time.sleep(.5)
Motor1.mRel(-1) # retract 1mm
time.sleep(1)
# Move motor forward by 1mm, wait half a second, and return to original position, at a velocity of 0.5mm/sec
motVel = 0.5 #motor velocity, in mm/sec
Motor1.mcRel(1, motVel) # advance 1mm
time.sleep(.5)
Motor1.mcRel(-1, motVel) # retract 1mm
# Clean up APT object, free up memory
Motor1.cleanUpAPT()
|
python
|
'''
Дополните приведенный код, так чтобы он вывел сумму квадратов элементов списка numbers.
numbers = [1, 78, 23, -65, 99, 9089, 34, -32, 0, -67, 1, 11, 111]
'''
numbers = [1, 78, 23, -65, 99, 9089, 34, -32, 0, -67, 1, 11, 111]
numbers2 = []
for i in range(len(numbers)):
numbers2.append(numbers[i] ** 2)
print(sum(numbers2))
|
python
|
'''
Copyright (C) 2015 Ryan Gonzalez
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
g_backup = globals().copy()
__version__ = '0.8'
__all__ = ['overload', 'RuntimeModule', 'switch', 'tail_recurse', 'copyfunc',
'set_docstring', 'annotate', 'safe_unpack', 'modify_function',
'assign', 'fannotate', 'compare_and_swap', 'is_main',
'call_if_main', 'run_main']
import sys, inspect, types, functools
def _targspec(func, specs, attr='__orig_arg__'):
if hasattr(func, '__is_overload__') and func.__is_overload__:
return getattr(func, attr)
return specs(func)
def set_docstring(doc):
'''A simple decorator to set docstrings.
:param doc: The docstring to tie to the function.
Example::
@set_docstring('This is a docstring')
def myfunc(x):
pass'''
def wrap(f):
f.__doc__ = doc
return f
return wrap
_modify_function_doc = '''
Creates a copy of a function, changing its attributes.
:param globals: Will be added to the function's globals.
:param name: The new function name. Set to ``None`` to use the function's original name.
:param code: The new function code object. Set to ``None`` to use the function's original code object.
:param defaults: The new function defaults. Set to ``None`` to use the function's original defaults.
:param closure: The new function closure. Set to ``None`` to use the function's original closure.
.. warning:: This function can be potentially dangerous.
'''
def copyfunc(f):
'''Copies a funcion.
:param f: The function to copy.
:return: The copied function.
.. deprecated:: 0.4
Use :func:`modify_function` instead.
'''
return modify_function(f)
if sys.version_info.major == 3:
@set_docstring(_modify_function_doc)
def modify_function(f, globals={}, name=None, code=None, defaults=None,
closure=None):
if code is None: code = f.__code__
if name is None: name = f.__name__
if defaults is None: defaults = f.__defaults__
if closure is None: closure = f.__closure__
newf = types.FunctionType(code, dict(f.__globals__, **globals), name=name,
argdefs=defaults, closure=closure)
newf.__dict__.update(f.__dict__)
return newf
argspec = inspect.getfullargspec
ofullargspec = inspect.getfullargspec
def _fullargspec(func):
return _targspec(func, ofullargspec)
inspect.getfullargspec = _fullargspec
def _exec(m,g): exec(m,g)
else:
@set_docstring(_modify_function_doc)
def modify_function(f, globals={}, name=None, code=None, defaults=None,
closure=None):
if code is None: code = f.func_code
if name is None: name = f.__name__
if defaults is None: defaults = f.func_defaults
if closure is None: closure = f.func_closure
newf = types.FunctionType(code, dict(f.func_globals, **globals),
name=name, argdefs=defaults, closure=closure)
newf.__dict__.update(f.__dict__)
return newf
argspec = inspect.getargspec
eval(compile('def _exec(m,g): exec m in g', '<exec>', 'exec'))
def _gettypes(args):
return tuple(map(type, args))
oargspec = inspect.getargspec
def _argspec(func):
return _targspec(func, oargspec)
inspect.getargspec = _argspec
try:
import IPython
except ImportError:
IPython = None
else:
# Replace IPython's argspec
oipyargspec = IPython.core.oinspect.getargspec
def _ipyargspec(func):
return _targspec(func, oipyargspec, '__orig_arg_ipy__')
IPython.core.oinspect.getargspec = _ipyargspec
class overload(object):
'''Simple function overloading in Python.'''
@classmethod
def argc(self, argc=None):
'''Overloads a function based on the specified argument count.
:param argc: The argument count. Defaults to ``None``. If ``None`` is given, automatically compute the argument count from the given function.
.. note::
Keyword argument counts are NOT checked! In addition, when the argument count is automatically calculated, the keyword argument count is also ignored!
Example::
@overload.argc()
def func(a):
print 'Function 1 called'
@overload.argc()
def func(a, b):
print 'Function 2 called'
func(1) # Calls first function
func(1, 2) # Calls second function
func() # Raises error
'''
# Python 2 UnboundLocalError fix
argc = {'argc': argc}
def wrap(f):
if argc['argc'] is None:
argc['argc'] = len(argspec(f).args)
try:
st = inspect.stack()[1][0]
oldf = dict(st.f_globals, **st.f_locals)[f.__name__]
except KeyError: pass
else:
if hasattr(oldf, '__pyext_overload_basic__'):
globls = oldf.__globals__ if sys.version_info.major == 3\
else oldf.func_globals
globls['overloads'][argc['argc']] = f
return oldf
@functools.wraps(f)
def newf(*args, **kwargs):
if len(args) not in overloads:
raise TypeError(
"No overload of function '%s' that takes %d args" % (
f.__name__, len(args)))
return overloads[len(args)](*args, **kwargs)
overloads = {}
overloads[argc['argc']] = f
newf = modify_function(newf, globals={'overloads': overloads})
newf.__pyext_overload_basic__ = None
newf.__orig_arg__ = argspec(f)
if IPython:
newf.__orig_arg_ipy__ = IPython.core.oinspect.getargspec(f)
return newf
return wrap
@classmethod
def args(self, *argtypes, **kw):
'''Overload a function based on the specified argument types.
:param argtypes: The argument types. If None is given, get the argument types from the function annotations(Python 3 only)
:param kw: Can only contain 1 argument, `is_cls`. If True, the function is assumed to be part of a class.
Example::
@overload.args(str)
def func(s):
print 'Got string'
@overload.args(int, str)
def func(i, s):
print 'Got int and string'
@overload.args()
def func(i:int): # A function annotation example
print 'Got int'
func('s')
func(1)
func(1, 's')
func(True) # Raises error
'''
# XXX: some of this should be moved to a utility class
# It's duplicated from overload.argc
# Python 2 UnboundLocalError fix...again!
argtypes = {'args': tuple(argtypes)}
def wrap(f):
if len(argtypes['args']) == 1 and argtypes['args'][0] is None:
aspec = argspec(f)
argtypes['args'] = tuple(map(lambda x: x[1], sorted(
aspec.annotations.items(),
key=lambda x: aspec.args.index(x[0]))))
try:
st = inspect.stack()[1][0]
oldf = dict(st.f_globals, **st.f_locals)[f.__name__]
except KeyError: pass
else:
if hasattr(oldf, '__pyext_overload_args__'):
globls = oldf.__globals__ if sys.version_info.major == 3\
else oldf.func_globals
globls['overloads'][argtypes['args']] = f
return oldf
@functools.wraps(f)
def newf(*args):
if len(kw) == 0:
cargs = args
elif len(kw) == 1 and 'is_cls' in kw and kw['is_cls']:
cargs = args[1:]
else:
raise ValueError('Invalid keyword args specified')
types = _gettypes(cargs)
if types not in overloads:
raise TypeError(\
"No overload of function '%s' that takes: %s" % (
f.__name__, types))
return overloads[types](*args)
overloads = {}
overloads[argtypes['args']] = f
newf = modify_function(newf, globals={'overloads': overloads})
newf.__pyext_overload_args__ = None
newf.__orig_arg__ = argspec(f)
if IPython:
newf.__orig_arg_ipy__ = IPython.core.oinspect.getargspec(f)
return newf
return wrap
class _RuntimeModule(object):
'Create a module object at runtime and insert it into sys.path. If called, same as :py:func:`from_objects`.'
def __call__(self, *args, **kwargs):
return self.from_objects(*args, **kwargs)
@staticmethod
@overload.argc(1)
def from_objects(name, **d):
return _RuntimeModule.from_objects(name, '', **d)
@staticmethod
@overload.argc(2)
def from_objects(name, docstring, **d):
'''Create a module at runtime from `d`.
:param name: The module name.
:param docstring: Optional. The module's docstring.
:param \*\*d: All the keyword args, mapped from name->value.
Example: ``RuntimeModule.from_objects('name', 'doc', a=1, b=2)``'''
module = types.ModuleType(name, docstring)
module.__dict__.update(d)
module.__file__ = '<runtime_module>'
sys.modules[name] = module
return module
@staticmethod
@overload.argc(2)
def from_string(name, s):
return _RuntimeModule.from_string(name, '', s)
@staticmethod
@overload.argc(3)
def from_string(name, docstring, s):
'''Create a module at runtime from `s``.
:param name: The module name.
:param docstring: Optional. The module docstring.
:param s: A string containing the module definition.'''
g = {}
_exec(s, g)
return _RuntimeModule.from_objects(name, docstring,
**dict(filter(lambda x: x[0] not in g_backup, g.items())))
RuntimeModule = _RuntimeModule()
class CaseObject(object):
'The object returned by a switch statement. When called, it will return True if the given argument equals its value, else False. It can be called with multiple parameters, in which case it checks if its value equals any of the arguments.'
def __init__(self, value, cstyle):
self.value = value
self.did_match = False
self.cstyle = cstyle
self.did_pass = not cstyle
def __call__(self, *args):
if not self.cstyle and self.did_match: return False
if assign('res', not (self.did_pass and self.cstyle) and\
self.value in args):
self.did_match = True
return res
def quit(self):
'Forces all other calls to return False. Equilavent of a ``break`` statement.'
self.did_pass = True
def default(self):
"Executed if ``quit`` wasn't called."
return not self.did_match and (not self.did_pass if self.cstyle else True)
def __iter__(self):
yield self
def __enter__(self):
return self
def __exit__(self, *args):
pass
def switch(value, cstyle=False):
'''A Python switch statement implementation that is used with a ``with`` statement.
:param value: The value to "switch".
:param cstyle: If ``True``, then cases will automatically fall through to the next one until ``case.quit()`` is encountered.
``with`` statement example::
with switch('x'):
if case(1): print 'Huh?'
if case('x'): print 'It works!!!'
.. warning:: If you modify a variable named "case" in the same scope that you use the ``with`` statement version, you will get an UnboundLocalError. The soluction is to use ``with switch('x') as case:`` instead of ``with switch('x'):``.'''
res = CaseObject(value, cstyle)
inspect.stack()[1][0].f_globals['case'] = res
return res
def tail_recurse(spec=None):
'''Remove tail recursion from a function.
:param spec: A function that, when given the arguments, returns a bool indicating whether or not to exit. If ``None,`` tail recursion is always called unless the function returns a value.
.. note::
This function has a slight overhead that is noticable when using timeit. Only use it if the function has a possibility of going over the recursion limit.
.. warning::
This function will BREAK any code that either uses any recursion other than tail recursion or calls itself multiple times. For example, ``def x(): return x()+1`` will fail.
Example::
@tail_recurse()
def add(a, b):
if a == 0: return b
return add(a-1, b+1)
add(10000000, 1) # Doesn't max the recursion limit.
'''
def _wrap(f):
class TailRecursion(Exception):
def __init__(self, args, kwargs):
self.args = args
self.kwargs = kwargs
def _newf(*args, **kwargs):
if inspect.stack()[1][3] == f.__name__:
if (spec and spec(args)) or not spec:
raise TailRecursion(args, kwargs)
while True:
try:
res = f(*args, **kwargs)
except TailRecursion as ex:
args = ex.args
kwargs = ex.kwargs
continue
else:
return res
_newf.__doc__ = f.__doc__
return _newf
return _wrap
def annotate(*args, **kwargs):
'''Set function annotations using decorators.
:param args: This is a list of annotations for the function, in the order of the function's parameters. For example, ``annotate('Annotation 1', 'Annotation 2')`` will set the annotations of parameter 1 of the function to ``Annotation 1``.
:param kwargs: This is a mapping of argument names to annotations. Note that these are applied *after* the argument list, so any args set that way will be overriden by this mapping. If there is a key named `ret`, that will be the annotation for the function's return value.
.. deprecated:: 0.5
Use :func:`fannotate` instead.
'''
def _wrap(f):
if not hasattr(f, '__annotations__'):
f.__annotations__ = {}
if 'ret' in kwargs:
f.__annotations__['return'] = kwargs.pop('ret')
f.__annotations__.update(dict(zip(argspec(f).args, args)))
f.__annotations__.update(kwargs)
return f
return _wrap
def fannotate(*args, **kwargs):
'''Set function annotations using decorators.
:param \*args: The first positional argument is used for the function's return value; all others are discarded.
:param \**kwargs: This is a mapping of argument names to annotations.
Example::
@fannotate('This for the return value', a='Parameter a', b='Parameter b')
def x(a, b):
pass
'''
def wrap(f):
if not hasattr(f, '__annotations__'):
f.__annotations__ = {}
if len(args) >= 1:
f.__annotations__['return'] = args[0]
f.__annotations__.update(kwargs)
return f
return wrap
def safe_unpack(seq, ln, fill=None):
'''Safely unpack a sequence to length `ln`, without raising ValueError. Based on Lua's method of unpacking. Empty values will be filled in with `fill`, while any extra values will be cut off.
:param seq: The sequence to unpack.
:param ln: The expected length of the sequence.
:param fill: The value to substitute if the sequence is too small. Defaults to ``None``.
Example::
s = 'a:b'
a, b = safe_unpack(s.split(':'), 2)
# a = 'a'
# b = 'b'
s = 'a'
a, b = safe_unpack(s.split(':'), 2)
# a = 'a'
# b = None'''
if len(seq) > ln:
return seq[:ln]
elif len(seq) < ln:
return seq + type(seq)([fill]*(ln-len(seq)))
else:
return seq
def assign(varname, value):
'''Assign `value` to `varname` and return it. If `varname` is an attribute and the instance name it belongs to is not defined, a NameError is raised.
This can be used to emulate assignment as an expression. For example, this::
if assign('x', 7): ...
is equilavent to this C code::
if (x = 7) ...
.. warning::
When assigning an attribute, the instance it belongs to MUST be declared as global prior to the assignment. Otherwise, the assignment will not work.
'''
fd = inspect.stack()[1][0].f_globals
fl = inspect.stack()[1][0].f_locals
if '.' not in varname:
fd[varname] = value
else:
vsplit = list(map(str.strip, varname.split('.')))
fvars = dict(fd, **fl)
if vsplit[0] not in fvars:
raise NameError('Unknown object: %s' % vsplit[0])
base = fvars[vsplit[0]]
for x in vsplit[1:-1]:
base = getattr(base, x)
setattr(base, vsplit[-1], value)
return value
def is_main(frame=1):
"Return if the caller is main. Equilavent to ``__name__ == '__main__'``."
return inspect.stack()[frame][0].f_globals['__name__'] == '__main__'
def _call_if_main(frame, f, args):
if is_main(frame): return f(*args)
def call_if_main(f,*args):
"Call the `f` with `args` if the caller's module is main."
return _call_if_main(3,f,args)
def run_main(f,*args):
"Call `f` with the `args` and terminate the program with its return code if the caller's module is main."
sys.exit(_call_if_main(3,f,args))
def compare_and_swap(var, compare, new):
"If `var` is equal to `compare`, set it to `new`."
if assign('v', inspect.stack()[1][0].f_globals)[var] == compare:
v[var] = new
|
python
|
# Generated by Django 2.2.24 on 2021-08-16 09:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('landing', '0103_policyarticle'),
]
operations = [
migrations.AddField(
model_name='section',
name='prefix',
field=models.TextField(blank=True, verbose_name='prefix text'),
),
]
|
python
|
class Config:
"""Discriminator configurations.
"""
def __init__(self, steps: int):
"""Initializer.
Args:
steps: diffusion steps.
"""
self.steps = steps
# embedding
self.pe = 128
self.embeddings = 512
self.mappers = 2
# block
self.channels = 64
self.kernels = 3
self.layers = 10
self.leak = 0.2
|
python
|
'''
Simple program to gather all the internal and external links. NOT to be confused with inlinks and outlinks.
Internal links are those links that point to another website within the same domain
External links are those links that point to another website that does NOT share the same domain
Reference link: https://www.thepythoncode.com/article/extract-all-website-links-python
'''
# First step is to import all the basic libraries required.
import requests
from urllib.parse import urlparse, urljoin
from bs4 import BeautifulSoup
import colorama
import pandas as pd
import numpy as np
import time
#Colorama is a simple tool used to display different colors on the terminal
colorama.init()
GREEN = colorama.Fore.GREEN
GRAY = colorama.Fore.LIGHTBLACK_EX
RESET = colorama.Fore.RESET
YELLOW = colorama.Fore.YELLOW
# max_urls define the maximum number of urls it must crawl to obtain the rest of the urls
# The internal and external URLs are put into sets to prevent redundancy
max_urls = 10
internal_urls = set()
external_urls = set()
#Basic methos to find whether a given URL is valid or not, just like our method
def is_valid(url):
"""
Checks whether `url` is a valid URL.
"""
parsed = urlparse(url)
return bool(parsed.netloc) and bool(parsed.scheme)
# Below function gets all the links, and as a trial we consider only the a tags for the time being.
def get_all_website_links(url):
"""
Returns all URLs that is found on `url` in which it belongs to the same website
"""
# all URLs of `url`
urls = set()
# domain name of the URL without the protocol
domain_name = urlparse(url).netloc
soup = BeautifulSoup(requests.get(url).content, "html.parser")
for a_tag in soup.findAll("a"):
href = a_tag.attrs.get("href")
if href == "" or href is None:
# href empty tag
continue
href = urljoin(url, href)
parsed_href = urlparse(href)
# remove URL GET parameters, URL fragments, etc.
href = parsed_href.scheme + "://" + parsed_href.netloc + parsed_href.path
if not is_valid(href):
# not a valid URL
continue
if href in internal_urls:
# already in the set
continue
if domain_name not in href:
# external link
if href not in external_urls:
print(f"{GRAY}[!] External link: {href}{RESET}")
external_urls.add(href)
continue
print(f"{GREEN}[*] Internal link: {href}{RESET}")
urls.add(href)
internal_urls.add(href)
return urls
total_urls_visited = 0
def crawl(url, max_urls):
"""
Crawls a web page and extracts all links.
You'll find all links in `external_urls` and `internal_urls` global set variables.
params:
max_urls (int): number of max urls to crawl, default is 10.
"""
global total_urls_visited
total_urls_visited += 1
print(f"{YELLOW}[*] Crawling: {url}{RESET}")
links = get_all_website_links(url)
for link in links:
if total_urls_visited > max_urls:
break
crawl(link, max_urls=max_urls)
Sample1 = pd.read_csv('whoisLegi.csv').sample(10)
start = time.time()
count = 1
print("Program starting, for any URLs taking more than 10s press ctrl+c")
for i in Sample1.URL:
print("URL:",count)
count +=1
try:
crawl(i, max_urls)
print("[+] Total Internal links:", len(internal_urls))
print("[+] Total External links:", len(external_urls))
print("[+] Total URLs:", len(external_urls) + len(internal_urls))
print("[+] Total crawled URLs:", max_urls)
except:
print("Not a valid URL\n")
continue
end = time.time()
print("Finished execution in:",end-start,"seconds")
|
python
|
from setuptools import setup
setup(
name='simplejira',
version='1.0',
description='simplejira',
author='Brandon Squizzato',
author_email='[email protected]',
url='https://www.github.com/bsquizz/simplejira',
packages=['simplejira'],
install_requires=[
'jira',
'pyyaml',
'prompter',
'python-editor',
'attrs',
'prettytable',
'cmd2',
'iso8601',
'six',
'pykerberos',
'python-dateutil',
'requests',
'pbr',
'requests-kerberos',
],
scripts=['bin/simplejira']
)
|
python
|
import requests
import jwt
import binascii
from base58 import b58decode_check
from ecdsa import SECP256k1, VerifyingKey, SigningKey
def submitTransaction(signedTransactionHex, nodeURL):
endpointURL = nodeURL + "submit-transaction"
payload = {'TransactionHex': signedTransactionHex}
response = requests.post(endpointURL, json=payload)
return response
def appendExtraData(transactionHex, derivedKey, nodeURL):
payload = {"TransactionHex": transactionHex,
"ExtraData": {"DerivedPublicKey": derivedKey}}
endpoint = nodeURL + "append-extra-data"
response = requests.post(endpoint, json=payload)
return response
def validateJWT(JWT, publicKey):
# this method is used to for public key validation
try:
rawPublicKeyHex = b58decode_check(publicKey)[3:].hex()
public_key = bytes(rawPublicKeyHex, 'utf-8')
public_key = binascii.unhexlify(public_key)
key = VerifyingKey.from_string(public_key, curve=SECP256k1)
key = key.to_pem()
decoded = jwt.decode(JWT, key, algorithms=['ES256'])
return {"isValid": True, "decodedJWT": decoded}
except Exception as e:
return {"isValid": False, "error": str(e)}
def getUserJWT(seedHex):
# returns JWT token of user that helps in public key validation in backend
private_key = bytes(seedHex, 'utf-8')
private_key = binascii.unhexlify(private_key)
key = SigningKey.from_string(private_key, curve=SECP256k1)
key = key.to_pem()
encoded_jwt = jwt.encode({}, key, algorithm="ES256")
return encoded_jwt
|
python
|
"""A module that provides methods for accessing the Auth API and providing the logged in user details."""
import http
import json
import logging
import fastapi
import fastapi.security
import fastapi.security.http
import requests
from pydantic import BaseModel # pylint:disable=no-name-in-module
import config
logger = logging.getLogger(__name__)
bearer_scheme = fastapi.security.HTTPBearer()
def check_auth_response(response: requests.Response):
"""Review the response from the external API and throw an error if it was forbidden or unauthorized."""
if response.status_code in [http.HTTPStatus.UNAUTHORIZED, http.HTTPStatus.FORBIDDEN]:
try:
body = response.json()
description = body['description'] if 'description' in body else None
except json.decoder.JSONDecodeError:
description = None
raise fastapi.HTTPException(
status_code=response.status_code, detail=description
)
def get_user_from_auth(auth: fastapi.security.http.HTTPAuthorizationCredentials = fastapi.Depends(bearer_scheme)):
"""Make a request to Auth API and return the response body."""
auth_response = requests.get('{}/users/@me'.format(config.AUTH_API_URL),
headers={'Authorization': '{} {}'.format(auth.scheme, auth.credentials)})
check_auth_response(auth_response)
if not auth_response: # status_code is unsuccessful
logger.error('Get User call failed unexpectedly with status {}. Response body: {}'.format(
auth_response.status_code, auth_response.text))
raise fastapi.HTTPException(status_code=http.HTTPStatus.INTERNAL_SERVER_ERROR)
return auth_response.json()
def get_current_user(auth_api_user: dict = fastapi.Depends(get_user_from_auth), account_id: str = fastapi.Header(None)):
"""Parse the provided dict into a User instance."""
return User(user_id=auth_api_user['keycloakGuid'], user_name=auth_api_user['username'], account_id=account_id)
class User(BaseModel):
"""Represents the minimal user details provided by the Auth API."""
user_id: str
user_name: str
account_id: str = None
|
python
|
"""
Exercise 7 - Sequence Slicing
Question: List slicing is important in various data manipulation activities. Let's do a few more exercises on that.
Please complete the script so that it prints out the first three items of list letters.
letters = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
Expected output:
['a', 'b', 'c']
"""
letters = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
print (letters[:3])
#['a', 'b', 'c'] -> 1 Punto
|
python
|
#!/usr/bin/env python3
# (C) 2021 gomachssm
import datetime
__copyright__ = f'(C) {datetime.date.today().year} gomachssm'
__version__ = 'dummy' # get from tag, matches v([0-9]+\.[0-9]+\.[0-9]+).
__license__ = 'Apache License, Version 2.0'
__author__ = 'gomachssm'
__url__ = 'https://github.com/gomachssm/twsqlparser'
|
python
|
import os
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import standard.analysis as sa
from tools import nicename
import tools
import task
import settings
mpl.rcParams['font.size'] = 7
mpl.rcParams['pdf.fonttype'] = 42
mpl.rcParams['ps.fonttype'] = 42
mpl.rcParams['font.family'] = 'arial'
use_torch = settings.use_torch
def load_activity_tf(save_path, lesion_kwargs=None):
"""Load model activity.
Returns:
"""
import tensorflow as tf
from model import SingleLayerModel, FullModel, NormalizedMLP
# # Reload the network and analyze activity
config = tools.load_config(save_path)
config.label_type = 'sparse'
# Load dataset
train_x, train_y, val_x, val_y = task.load_data(config.data_dir)
tf.reset_default_graph()
if config.model == 'full':
CurrentModel = FullModel
elif config.model == 'singlelayer':
CurrentModel = SingleLayerModel
elif config.model == 'normmlp':
CurrentModel = NormalizedMLP
else:
raise ValueError('Unknown model type ' + str(config.model))
# Build validation model
val_x_ph = tf.placeholder(val_x.dtype, val_x.shape)
val_y_ph = tf.placeholder(val_y.dtype, val_y.shape)
model = CurrentModel(val_x_ph, val_y_ph, config=config, training=False)
# model.save_path = rootpath + model.save_path[1:]
model.save_path = save_path
tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
with tf.Session(config=tf_config) as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
model.load()
if lesion_kwargs:
model.lesion_units(**lesion_kwargs)
# Validation
glo_out, glo_in, kc_in, kc_out, logits = sess.run(
[model.glo, model.glo_in, model.kc_in, model.kc, model.logits],
{val_x_ph: val_x, val_y_ph: val_y})
# results = sess.run(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES))
return {'glo_in': glo_in, 'glo': glo_out,
'kc_in': kc_in, 'kc': kc_out}
def load_activity_torch(save_path, lesion_kwargs=None):
import torch
from torchmodel import get_model
# Reload the network and analyze activity
config = tools.load_config(save_path)
# Load dataset
train_x, train_y, val_x, val_y = task.load_data(config.data_dir)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
with torch.no_grad():
model = get_model(config)
model.load()
model.to(device)
model.readout()
if lesion_kwargs is not None:
for key, val in lesion_kwargs.items():
model.lesion_units(key, val)
# validation
val_data = torch.from_numpy(val_x).float().to(device)
val_target = torch.from_numpy(val_y).long().to(device)
model.eval()
results = model(val_data, val_target)
for key, val in results.items():
try:
results[key] = val.cpu().numpy()
except AttributeError:
pass
results[key] = np.array(results[key])
return results
def load_activity(save_path, lesion_kwargs=None):
if use_torch:
return load_activity_torch(save_path, lesion_kwargs)
else:
return load_activity_tf(save_path, lesion_kwargs)
def plot_activity(save_path):
results = load_activity(save_path)
save_name = save_path.split('/')[-1]
plt.figure()
plt.hist(results['glo'].flatten(), bins=100)
plt.title('Glo activity distribution')
tools.save_fig(save_path, save_name + '_pn_activity')
plt.figure()
plt.hist(results['kc'].flatten(), bins=100)
plt.title('KC activity distribution')
tools.save_fig(save_path, save_name + '_kc_activity')
def image_activity(save_path, arg, sort_columns = True, sort_rows = True):
def _image(data, zticks, name, xlabel='', ylabel=''):
rect = [0.2, 0.15, 0.6, 0.65]
rect_cb = [0.82, 0.15, 0.02, 0.65]
fig = plt.figure(figsize=(2.6, 2.6))
ax = fig.add_axes(rect)
cm = 'Reds'
im = ax.imshow(data, cmap=cm, vmin=zticks[0], vmax=zticks[1], interpolation='none')
plt.axis('tight')
ax.set_ylabel(nicename(ylabel))
ax.set_xlabel(nicename(xlabel))
ax.spines["right"].set_visible(False)
ax.spines["top"].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.tick_params('both', length=0)
ax.set_xticks([0, data.shape[1]])
ax.set_yticks([0, data.shape[0]])
ax = fig.add_axes(rect_cb)
cb = plt.colorbar(im, cax=ax)
cb.set_ticks(zticks)
cb.outline.set_linewidth(0.5)
cb.set_label('Activity', fontsize=7, labelpad=5)
plt.tick_params(axis='both', which='major', labelsize=7)
cb.ax.tick_params('both', length=0)
plt.axis('tight')
tools.save_fig(save_path, '_' + name, pdf=False)
dirs = tools.get_modeldirs(save_path)
for i, d in enumerate(dirs):
results = load_activity(d)
data = results[arg]
if arg == 'glo_in':
xlabel = 'PN Input'
zticks = [0, 4]
elif arg == 'glo':
xlabel = 'PN'
zticks = [0, 4]
elif arg == 'kc':
xlabel = 'KC'
zticks = [0, 1]
else:
raise ValueError('data type not recognized for image plotting: {}'.format(arg))
if sort_columns:
data = np.sort(data, axis=1)[:,::-1]
if sort_rows:
ix = np.argsort(np.sum(data, axis=1))
data = data[ix,:]
_image(data, zticks=zticks, name = 'image_' + arg + '_' + str(i), xlabel=xlabel, ylabel='Odors')
def _distribution(data, save_path, name, xlabel, ylabel, xrange=None,
title=None, density=False):
fig = plt.figure(figsize=(1.5, 1.5))
ax = fig.add_axes((0.3, 0.25, 0.6, 0.6))
plt.hist(data, bins=30, range=xrange, density=density, align='left')
plt.ticklabel_format(axis="y", style="sci", scilimits=(0, 2))
# xticks = np.linspace(xrange[0], xrange[1], 5)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
if xrange is not None:
plt.xlim(xrange)
# ax.set_xticks(xticks)
plt.locator_params(axis='x', nbins=3)
plt.locator_params(axis='y', nbins=3)
if title is not None:
plt.title(title, fontsize=7)
# ax.set_yticks(np.linspace(0, yrange, 3))
# plt.ylim([0, yrange])
ax.spines["right"].set_visible(False)
ax.spines["top"].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
tools.save_fig(save_path, '_' + name, pdf=True)
def distribution_activity(save_path, var_names=None):
dirs = tools.get_modeldirs(save_path)
if var_names is None:
var_names = ['kc', 'glo']
elif isinstance(var_names, str):
var_names = [var_names]
for d in dirs:
results = load_activity(d)
for var_name in var_names:
data = results[var_name].flatten()
xlabel = tools.nicename(var_name)
ylabel = 'Distribution'
name = 'dist_' + var_name + '_' + tools.get_model_name(d)
figpath = tools.get_experiment_name(d)
_distribution(data, figpath, name=name, density=True,
xlabel=xlabel, ylabel=ylabel)
def sparseness_activity(save_path, var_names, activity_threshold=0.,
lesion_kwargs=None, titlekey=None, figname=None):
"""Plot the sparseness of activity.
Args:
save_path: model path
arg: str, the activity to plot
"""
if isinstance(save_path, str):
dirs = tools.get_modeldirs(save_path)
else:
dirs = save_path
if figname is None:
figname = ''
if isinstance(var_names, str):
var_names = [var_names]
for d in dirs:
results = load_activity(d, lesion_kwargs)
config = tools.load_config(d)
for var_name in var_names:
data = results[var_name]
xrange = [-0.05, 1.05]
if var_name == 'glo':
name = 'PN'
elif var_name == 'kc':
name = 'KC'
else:
raise ValueError('Unknown var name', var_name)
figpath = tools.get_experiment_name(d)
data1 = np.mean(data > activity_threshold, axis=1)
if titlekey is None:
title = None
else:
title = tools.nicename(titlekey) + ' '
title = title + tools.nicename(getattr(config, titlekey),
mode=titlekey)
fname = figname + 'spars_' + var_name + '_' + tools.get_model_name(d)
_distribution(data1, figpath, name=fname, density=False,
xlabel='% of Active '+name+'s', title=title,
ylabel='Number of Odors', xrange=xrange)
data2 = np.mean(data > activity_threshold, axis=0)
fname = figname + 'spars_' + var_name + '2_' + tools.get_model_name(d)
_distribution(data2, figpath, name=fname, density=False,
xlabel='% of Odors', title=title,
ylabel='Number of '+name+'s', xrange=xrange)
def plot_mean_activity_sparseness(save_path, arg, xkey,
loop_key=None, select_dict=None):
dirs = tools.get_modeldirs(save_path)
mean_sparseness = []
for i, d in enumerate(dirs):
results = load_activity(d)
data = results[arg]
activity_threshold = 0
data = np.count_nonzero(data > activity_threshold, axis=1) / data.shape[1]
mean_sparseness.append(data.mean())
for i, d in enumerate(dirs):
config = tools.load_config(d)
setattr(config, arg + '_sparse_mean', mean_sparseness[i])
tools.save_config(config, d)
sa.plot_results(save_path, xkey= xkey, ykey= arg + '_sparse_mean',
ax_args= {'yticks': [0, .2, .4, .6, .8]},
figsize=(1.5, 1.5), ax_box=(0.27, 0.25, 0.65, 0.65),
loop_key=loop_key,
select_dict=select_dict)
|
python
|
from django.shortcuts import render , reverse
from django.http import HttpResponseRedirect
# Create your views here.
def home(request):
return render(request , 'home.html')
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Root __init__
"""
__author__ = "Samuel Marks"
__version__ = "0.0.7"
__description__ = "CLI to replace HTTP GET on GitHub API with clones"
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.