content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
from django.db import models
from django.contrib.auth.models import AbstractUser
from PIL import Image
class User(AbstractUser):
is_teacher = models.BooleanField(default=False)
is_student = models.BooleanField(default=False)
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
class Teacher(models.Model):
user = models.OneToOneField(
User, on_delete=models.CASCADE, primary_key=True)
phone_number = models.CharField(max_length=100)
class Student(models.Model):
user = models.OneToOneField(
User, on_delete=models.CASCADE, primary_key=True)
registration_number = models.CharField(
max_length=100, default='not available')
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default='default.jpg', upload_to='profile_pics')
def __str__(self):
return f'{self.user.username} Profile'
def save(self, *args, **kwargs):
super(Profile, self).save(*args, **kwargs)
img = Image.open(self.image.path)
if img.height > 300 or img.width > 300:
output_size = (300, 300)
img.thumbnail(output_size)
img.save(self.image.path)
|
python
|
from django.urls import reverse
from rest_framework.test import APIClient
from django.test import TestCase
from people.models import Person
from owf_groups.models import OwfGroup, OwfGroupPeople
requests = APIClient()
class GroupsApiTests(TestCase):
fixtures = ['resources/fixtures/default_data.json', ]
def setUp(self):
self.admin_user = Person.objects.get(pk=1)
self.regular_user = Person.objects.get(pk=2)
# create group
self.group = OwfGroup.objects.create(name="group")
self.group.add_user(self.regular_user)
def test_admin_can_remove_user_from_group(self): # TODO: John
requests.login(email='[email protected]', password='password')
url = reverse('admin_groups-people-detail', args=('0'))
response = requests.delete(url, {'group_id': self.group.id, 'person_id': self.regular_user.id})
self.assertEqual(response.status_code, 204)
requests.logout()
def test_nonadmin_cannot_remove_user_from_group(self):
requests.login(email='[email protected]', password='password')
group_people = OwfGroupPeople.objects.get(person=self.regular_user, group=self.group)
url = reverse('admin_groups-people-detail', args=(group_people.id,))
response = requests.delete(url)
self.assertEqual(response.status_code, 403)
requests.logout()
|
python
|
from django.shortcuts import render, redirect
from django.contrib import messages
from .forms import UserRegisterForm, UserUpdateForm, UserTypeForm, UserDeleteForm
from django.contrib.auth.decorators import login_required
from functions import find_special_chars
# Form for registration page
def register(request):
while True:
if request.method == 'POST':
form = UserRegisterForm(request.POST)
type_form = UserTypeForm(request.POST)
if find_special_chars(request.POST['first_name']) == True or \
find_special_chars(request.POST['last_name']) == True:
messages.error(request, 'Please enter a valid first and last name')
break
if form.is_valid() and type_form.is_valid():
# Need to do it this way to create the profile instance
user = form.save()
user.refresh_from_db()
profile_form = UserTypeForm(request.POST, instance=user.profile)
profile_form.full_clean()
profile_form.save()
messages.success(request, f'Your account has been created! Please login.')
return redirect('login')
break
else:
form = UserRegisterForm()
type_form = UserTypeForm()
break
context = {
'form': form,
'type_form': type_form,
}
return render(request, 'users/register.html', context)
# Form for updating profile
@login_required
def profile(request):
while True:
if request.method == 'POST':
u_form = UserUpdateForm(request.POST, instance=request.user)
if find_special_chars(request.POST['first_name']) == True or \
find_special_chars(request.POST['last_name']) == True:
messages.error(request, 'Please enter a valid first and last name')
break
if u_form.is_valid():
u_form.save()
messages.success(request, f'Your account has been updated!')
return redirect('profile')
break
else:
u_form = UserUpdateForm(instance=request.user)
break
context = {
'u_form': u_form,
}
return render(request, 'users/profile.html', context)
# View for deleting user from account settings
@login_required
def account_settings(request):
if request.method == 'POST':
form = UserDeleteForm(request.POST, instance=request.user)
user = request.user
user.delete()
messages.success(request, f'Your account has been deleted')
return redirect('home')
else:
form = UserDeleteForm(instance=request.user)
context = {
'title': 'Account Settings',
'form': form,
}
return render(request, 'users/account_settings.html', context)
|
python
|
import os
import csv
import json
import glob
import gzip
import random
import tarfile
import zipfile
import pandas as pd
from .downloader_utils import get_corpora_dict
from .downloader_utils import get_resource_dir
def _extract_tarfile(filename, target_dir):
with tarfile.open(filename, 'r:*') as tar:
tar.extractall(target_dir)
def _shuffle_data(is_shuffle, data):
if is_shuffle:
random.shuffle(data)
def _max_count_data(max_count, data):
return data[:max_count]
def _split_train_dev_test(data, train_data=0.8, dev_data=0.1, test_data=0.1):
total = train_data+dev_data+test_data
if not total == 1.0:
err_msg = f"The total of train/dev/test data: {total} must be 1."
raise Exception(err_msg)
num_train = int(len(data) * train_data)
num_dev = int(len(data) * dev_data)
num_test = int(len(data) * test_data)
train = pd.DataFrame(data[:num_train])
dev = pd.DataFrame(data[num_train:num_train+num_dev])
test = pd.DataFrame(data[num_train+num_dev:num_train+num_dev+num_test])
return train, dev, test
def _check_correct_corpus_type(corpus_type, corpus_types):
if corpus_type not in corpus_types:
err_msg = f"{corpus_type} is not available. Choose from {corpus_types}"
raise Exception(err_msg)
def load_corpus(corpus, n=None, is_shuffle=True, corpus_type=None,
train_data=0.8, dev_data=0.1, test_data=0.1, random_seed=1234):
"""
Dataloader for selected corpus.
The data is pre-processed and split into training data,
development data and test data.
Parameters
----------
corpus : str
The corpus
n : int
The number of datasets
is_shuffle : bool
If true, shuffle the dataset
train_data : float
Percentage of training data
dev_data : float
Percentage of development data
test_data : float
Percentage of test data
random_seed : int
Random seed for shuffle datasets
Returns
-------
train_df : pandas.core.frame.DataFrame
The training data
dev_df : pandas.core.frame.DataFrame
The development data
test_df : pandas.core.frame.DataFrame
The test data
Examples
--------
>>> train_df, dev_df, test_df = datadownloader.load_corpus('livedoor_news_corpus')
"""
if corpus == "amazon_reviews":
return load_amazon_reviews(
n=n, is_shuffle=is_shuffle,
train_data=train_data, dev_data=dev_data, test_data=test_data,
random_seed=random_seed)
elif corpus == "yahoo_movie_reviews":
if corpus_type is None:
corpus_type = "binary"
return load_yahoo_movie_reviews(
n=n, is_shuffle=is_shuffle, corpus_type=corpus_type,
train_data=train_data, dev_data=dev_data, test_data=test_data,
random_seed=random_seed)
elif corpus == "livedoor_news_corpus":
if corpus_type is None:
corpus_type = "title"
return load_livedoor_news_corpus(
n=n, is_shuffle=is_shuffle, corpus_type=corpus_type,
train_data=train_data, dev_data=dev_data, test_data=test_data,
random_seed=random_seed)
elif corpus == "chABSA_dataset":
return load_chABSA_dataset(
n=n, is_shuffle=is_shuffle,
train_data=train_data, dev_data=dev_data, test_data=test_data,
random_seed=random_seed)
else:
err_msg = " ".join(
[f"{corpus} does not exist.",
f"Use datadownloader.download_corpus('{corpus}') ."]
)
raise Exception(err_msg)
def __count_polarity(opinions):
posinega = {"positive": 1, "negative": -1}
scores = [posinega.get(opinion["polarity"], 0) for opinion in opinions]
score = sum(scores)
if score > 0:
return "positive"
elif score < 0:
return "negative"
else:
return "neutral"
def load_chABSA_dataset(n=None, is_shuffle=True,
train_data=0.8, dev_data=0.1, test_data=0.1,
random_seed=1234):
"""
Dataloader for chABSA dataset.
The data is pre-processed and split into training data,
development data and test data.
Parameters
----------
n : int
The number of datasets
is_shuffle : bool
If true, shuffle the dataset
train_data : float
Percentage of training data
dev_data : float
Percentage of development data
test_data : float
Percentage of test data
random_seed : int
Random seed for shuffle datasets
Returns
-------
train_df : pandas.core.frame.DataFrame
The training data
dev_df : pandas.core.frame.DataFrame
The development data
test_df : pandas.core.frame.DataFrame
The test data
"""
random.seed(random_seed)
corpus = "chABSA_dataset"
corpora_dict = get_corpora_dict()
resource_dir = get_resource_dir()
filename = corpora_dict[corpus]["filename"]
filepath = os.path.join(resource_dir, filename)
with zipfile.ZipFile(filepath, 'r') as f:
f.extractall(resource_dir)
files = glob.glob(os.path.join(resource_dir, "chABSA-dataset", "*.json"))
data = []
for _file in files:
with open(_file, "r") as f:
_data = json.load(f)
sentences = _data["sentences"]
for sentence in sentences:
sent = sentence["sentence"]
opinions = sentence["opinions"]
label = __count_polarity(opinions)
data.append([label, sent])
_shuffle_data(is_shuffle, data)
data = _max_count_data(n, data)
train_df, dev_df, test_df = _split_train_dev_test(
data, train_data=train_data, dev_data=dev_data, test_data=test_data
)
return train_df, dev_df, test_df
def load_amazon_reviews(n=None, is_shuffle=True,
train_data=0.8, dev_data=0.1, test_data=0.1,
random_seed=1234):
"""
Dataloader for amazon reviews.
The data is pre-processed and split into training data,
development data and test data.
Parameters
----------
n : int
The number of datasets
is_shuffle : bool
If true, shuffle the dataset
train_data : float
Percentage of training data
dev_data : float
Percentage of development data
test_data : float
Percentage of test data
random_seed : int
Random seed for shuffle datasets
Returns
-------
train_df : pandas.core.frame.DataFrame
The training data
dev_df : pandas.core.frame.DataFrame
The development data
test_df : pandas.core.frame.DataFrame
The test data
"""
random.seed(random_seed)
corpus = "amazon_reviews"
corpora_dict = get_corpora_dict()
resource_dir = get_resource_dir()
filename = corpora_dict[corpus]["filename"]
filepath = os.path.join(resource_dir, filename)
data = []
if os.path.exists(filepath):
with gzip.open(filepath, "rt") as f:
reader = csv.reader(f, delimiter="\t", quoting=csv.QUOTE_NONE)
next(reader)
for line in reader:
rating = line[7]
text = line[13]
data.append([rating, text])
_shuffle_data(is_shuffle, data)
data = _max_count_data(n, data)
train_df, dev_df, test_df = _split_train_dev_test(
data, train_data=train_data, dev_data=dev_data, test_data=test_data
)
return train_df, dev_df, test_df
def load_yahoo_movie_reviews(n=None, is_shuffle=True, corpus_type="binary",
train_data=0.8, dev_data=0.1, test_data=0.1,
random_seed=1234):
"""
Dataloader for yahoo_movie_reviews.
The data is pre-processed and split into training data,
development data and test data.
Parameters
----------
n : int
The number of datasets
is_shuffle : bool
If true, shuffle the dataset
train_data : float
Percentage of training data
dev_data : float
Percentage of development data
test_data : float
Percentage of test data
random_seed : int
Random seed for shuffle datasets
Returns
-------
train_df : pandas.core.frame.DataFrame
The training data
dev_df : pandas.core.frame.DataFrame
The development data
test_df : pandas.core.frame.DataFrame
The test data
"""
corpus_types = ["binary", "original"]
_check_correct_corpus_type(corpus_type, corpus_types)
random.seed(random_seed)
corpus = "yahoo_movie_reviews"
corpora_dict = get_corpora_dict()
resource_dir = get_resource_dir()
filename = corpora_dict[corpus]["filename"]
filepath = os.path.join(resource_dir, filename)
if os.path.exists(filepath):
yahoo_movie_reviews_dir = os.path.join(resource_dir, "data")
if not os.path.exists(yahoo_movie_reviews_dir):
_extract_tarfile(filepath, resource_dir)
yahoo_movie_reviews_json = os.path.join(
yahoo_movie_reviews_dir, "yahoo-movie-reviews.json"
)
if not os.path.exists(yahoo_movie_reviews_json):
err_msg = " ".join([
f"{yahoo_movie_reviews_json} does not exist. ",
f"Use datadownloader.download_corpus('{corpus}') ."
]
)
raise Exception(err_msg)
data = []
with open(yahoo_movie_reviews_json, "r") as f:
json_load = json.load(f)
for line in json_load:
text = line["text"].replace("\n", "")
rating = str(line["rating"])
if corpus_type == "binary":
if rating in ["1", "2"]:
rating = 0
data.append([rating, text])
elif rating in ["4", "5"]:
rating = 1
data.append([rating, text])
else:
data.append([rating, text])
if corpus_type == "binary":
label2texts = {}
for line in data:
label, text = line
if label in label2texts:
label2texts[label].append(text)
else:
label2texts[label] = [text]
num_data = min(
[len(label2texts[key]) for key in label2texts.keys()]
)
data = []
for key in label2texts.keys():
texts = label2texts[key][:num_data]
for text in texts:
data.append([key, text])
_shuffle_data(is_shuffle, data)
data = _max_count_data(n, data)
train_df, dev_df, test_df = _split_train_dev_test(
data, train_data=train_data, dev_data=dev_data, test_data=test_data
)
return train_df, dev_df, test_df
else:
err_msg = " ".join(
[f"{corpus} does not exist.",
f"Use datadownloader.download_corpus('{corpus}') ."]
)
raise Exception(err_msg)
def load_livedoor_news_corpus(n=None, is_shuffle=True, corpus_type="title",
train_data=0.8, dev_data=0.1, test_data=0.1,
random_seed=1234):
"""
Dataloader for livedoor news corpus.
The data is pre-processed and split into training data,
development data and test data.
Parameters
----------
n : int
The number of datasets
is_shuffle : bool
If true, shuffle the dataset
train_data : float
Percentage of training data
dev_data : float
Percentage of development data
test_data : float
Percentage of test data
random_seed : int
Random seed for shuffle datasets
Returns
-------
train_df : pandas.core.frame.DataFrame
The training data
dev_df : pandas.core.frame.DataFrame
The development data
test_df : pandas.core.frame.DataFrame
The test data
"""
corpus_types = ["title", "article"]
_check_correct_corpus_type(corpus_type, corpus_types)
random.seed(random_seed)
corpus = "livedoor_news_corpus"
label_names = [
"dokujo-tsushin",
"kaden-channel",
"movie-enter",
"smax",
"topic-news",
"it-life-hack",
"livedoor-homme",
"peachy",
"sports-watch"
]
corpora_dict = get_corpora_dict()
resource_dir = get_resource_dir()
filename = corpora_dict[corpus]["filename"]
filepath = os.path.join(resource_dir, filename)
if os.path.exists(filepath):
livedoor_news_corpus_dir = os.path.join(resource_dir, "text")
if not os.path.exists(livedoor_news_corpus_dir):
_extract_tarfile(filepath, resource_dir)
dirs = glob.glob(f"{livedoor_news_corpus_dir}/*")
data = []
for dir_name in dirs:
dir_basename = os.path.basename(dir_name)
if dir_basename in label_names:
files = glob.glob(f"{dir_name}/*")
for filename in files:
with open(filename, "r") as f:
article = []
for i, line in enumerate(f):
line = line.strip().replace("\t", "")
if corpus_type == "title":
if i == 2:
data.append([dir_basename, line])
if corpus_type == "article":
if i > 2:
article.append(line)
if corpus_type == "article":
article = "".join(article)
data.append([dir_basename, article])
_shuffle_data(is_shuffle, data)
data = _max_count_data(n, data)
train_df, dev_df, test_df = _split_train_dev_test(
data, train_data=train_data, dev_data=dev_data, test_data=test_data
)
return train_df, dev_df, test_df
else:
err_msg = " ".join(
[f"{corpus} does not exist.",
f"Use datadownloader.download_corpus('{corpus}') ."]
)
raise Exception(err_msg)
|
python
|
import argparse
try:
from . import treedata_pb2 as proto
from . import utils
except (ValueError, ImportError):
import treedata_pb2 as proto
import utils
import represent_ast as ra
class Node:
def __init__(self, id, label, position):
self.id = id
self.label = label
self.position = position
class ReadGraph:
def __init__(self, proto_t, reverser):
self.name = proto_t.name
self._proto = proto_t
self._reverser = reverser
self.root = -1
self._ingoing = {}
self._index_ingoing()
def _index_ingoing(self):
data = self._proto
for i in range(len(data.assignment)):
cid = data.from_node[i]
pid = data.assignment[i]
if pid not in self._ingoing:
self._ingoing[pid] = []
self._ingoing[pid].append(cid)
if self.root == -1 or data.depth[pid] == 0:
self.root = pid
def in_edges(self, id):
if id not in self._ingoing:
return []
return self._ingoing[id]
def node(self, id):
pos = self._proto.position[id]
lix = self._proto.nodes[id]
return Node(
id, self._reverser.reverse('ast', lix), position=pos
)
def rewrite_label(self, id, label):
self._proto.nodes[id] = self._reverser.index('ast', label)
class WriteGraph:
def __init__(self, name, indexer):
self.proto = proto.AnnotatedTree()
self.proto.name = name
self._indexer = indexer
def write_node(self, label, position=0, assign_to=-1):
data = self.proto
id = len(data.nodes)
lix = self._indexer.index('ast', label)
data.nodes.append(lix)
if assign_to != -1:
depth = data.depth[assign_to] + 1
data.from_node.append(id)
data.assignment.append(assign_to)
else:
depth = 0
data.depth.append(depth)
data.position.append(position)
return id
def rewrite_label(self, id, label):
self._proto.nodes[id] = self._indexer.index('ast', label)
def read_graph(self):
return ReadGraph(
self.proto, self._indexer
)
def find_statement_roots(graph, root):
roots = []
check = set(['CompoundStmt', 'IfStmt'])
Q = graph.in_edges(root)
while len(Q) > 0:
id = Q.pop()
node = graph.node(id)
label = node.label
if label == 'CompoundStmt':
Q.extend(graph.in_edges(id))
else:
roots.append(id)
for u in graph.in_edges(id):
n2 = graph.node(u)
if n2.label in check:
Q.append(u)
return roots
def ast_to_seq(graph, root):
sequence = []
Q = [root]
seen = set([root])
while len(Q) > 0:
id = Q[0]
Q = Q[1:]
if id == "[SEP]":
sequence.append(id)
continue
label = graph.node(id).label
if id != root and label == 'IfStmt':
graph.rewrite_label(id, 'ElseIfStmt')
continue
if label == 'CompoundStmt':
continue
sequence.append(label)
neigh = sorted([u for u in graph.in_edges(id) if u not in seen], key=lambda x: graph.node(x).position)
seen = seen.union(set(neigh))
Q = neigh + Q
return sequence
def ast_to_set(graph, root):
out = set([])
Q = [root]
while len(Q) > 0:
id = Q.pop()
if id == "[SEP]":
sequence.append(id)
continue
label = graph.node(id).label
if id != root and label == 'IfStmt':
graph.rewrite_label(id, 'ElseIfStmt')
continue
if label == 'CompoundStmt':
continue
out.add(label)
neigh = graph.in_edges(id)
Q.extend(neigh)
return out
def transform_state(graph, cgraph, attach_root, root, set_sem=False):
if set_sem:
sequence = list(ast_to_set(graph, root))
else:
sequence = ast_to_seq(graph, root)
for pos in range(len(sequence)):
ipos = 0 if set_sem else pos
cgraph.write_node(sequence[pos], position=ipos, assign_to=attach_root)
def stmt_label(label):
return "Stmt_%s" % label
def transform_func(graph, cgraph, attach_func, root, set_sem=False):
if attach_func == 1:
state_roots = [root]
else:
state_roots = find_statement_roots(graph, root)
state_roots = sorted(state_roots, key=lambda id: graph.node(id).position)
attach_roots = []
for pos, id in enumerate(state_roots):
node = graph.node(id)
nid = cgraph.write_node(node.label, position=pos, assign_to=attach_func)
attach_roots.append(nid)
for i in range(len(state_roots)):
transform_state(graph, cgraph, attach_roots[i], state_roots[i],
set_sem=set_sem)
def attach_noop(graph):
Q = [0]
rgraph = graph.read_graph()
while len(Q) > 0:
current = Q.pop()
edges = rgraph.in_edges(current)
if len(edges) == 0:
continue
graph.write_node("[NOOP]", position=0, assign_to=current)
Q.extend(edges)
def rm_empty_func(graph):
rm = []
for u in graph.in_edges("N0"):
if len(graph.in_edges(u)) == 0:
rm.append(u)
for r in rm:
graph.remove_node(r)
def transform_graph(graph, indexer, set_sem=False):
cgraph = WriteGraph(graph.name, indexer)
program_id = cgraph.write_node("PROGRAM", position=0)
init_id = cgraph.write_node("InitFunctionDecl", position=0, assign_to=program_id)
for u in graph.in_edges(graph.root):
if u == graph.root:
continue
node = graph.node(u)
root = init_id
if 'FunctionDecl' in node.label:
root = cgraph.write_node(node.label, position=0, assign_to=program_id)
transform_func(graph, cgraph, root, u, set_sem=set_sem)
attach_noop(cgraph)
return cgraph
def preprocess(id, data, old_index, new_indexer, set_sem=False):
graph = ReadGraph(data, old_index)
out_graph = transform_graph(graph, new_indexer, set_sem=set_sem)
out = out_graph.proto
return id, out
def bounded_stream(stream, bound):
for i, D in enumerate(stream):
yield D
if i >= bound:
break
|
python
|
from setuptools import setup
def get_install_requires():
install_requires = [
'Django>=3,<4',
'django-jsoneditor>0.1,<0.2',
]
try:
import importlib
except ImportError:
install_requires.append('importlib')
try:
from collections import OrderedDict
except ImportError:
install_requires.append('ordereddict')
return install_requires
setup(install_requires=get_install_requires())
|
python
|
"""
:py:class:`Model` is an abstract class representing
an AllenNLP model.
"""
import logging
import os
from typing import Dict, Union, List
import numpy
import torch
from allennlp.common.checks import ConfigurationError
from allennlp.common.params import Params
from allennlp.common.registrable import Registrable
from allennlp.data import Instance, Vocabulary
from allennlp.data.dataset import Batch
from allennlp.nn import util
from allennlp.nn.regularizers import RegularizerApplicator
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
# When training a model, many sets of weights are saved. By default we want to
# save/load this set of weights.
_DEFAULT_WEIGHTS = "best.th"
class Model(torch.nn.Module, Registrable):
"""
This abstract class represents a model to be trained. Rather than relying completely
on the Pytorch Module, we modify the output spec of ``forward`` to be a dictionary.
Models built using this API are still compatible with other pytorch models and can
be used naturally as modules within other models - outputs are dictionaries, which
can be unpacked and passed into other layers. One caveat to this is that if you
wish to use an AllenNLP model inside a Container (such as nn.Sequential), you must
interleave the models with a wrapper module which unpacks the dictionary into
a list of tensors.
In order for your model to be trained using the :class:`~allennlp.training.Trainer`
api, the output dictionary of your Model must include a "loss" key, which will be
optimised during the training process.
Finally, you can optionally implement :func:`Model.get_metrics` in order to make use
of early stopping and best-model serialization based on a validation metric in
:class:`~allennlp.training.Trainer`.
"""
def __init__(self,
vocab: Vocabulary,
regularizer: RegularizerApplicator = None) -> None:
super().__init__()
self.vocab = vocab
self._regularizer = regularizer
def get_regularization_penalty(self) -> Union[float, torch.autograd.Variable]:
"""
Computes the regularization penalty for the model.
Returns 0 if the model was not configured to use regularization.
"""
if self._regularizer is None:
return 0.0
else:
return self._regularizer(self)
def get_parameters_for_histogram_tensorboard_logging( # pylint: disable=invalid-name
self) -> List[str]:
"""
Returns the name of model parameters used for logging histograms to tensorboard.
"""
return [name for name, _ in self.named_parameters()]
def forward(self, *inputs) -> Dict[str, torch.Tensor]: # pylint: disable=arguments-differ
"""
Defines the forward pass of the model. In addition, to facilitate easy training,
this method is designed to compute a loss function defined by a user.
The input is comprised of everything required to perform a
training update, `including` labels - you define the signature here!
It is down to the user to ensure that inference can be performed
without the presence of these labels. Hence, any inputs not available at
inference time should only be used inside a conditional block.
The intended sketch of this method is as follows::
def forward(self, input1, input2, targets=None):
....
....
output1 = self.layer1(input1)
output2 = self.layer2(input2)
output_dict = {"output1": output1, "output2": output2}
if targets is not None:
# Function returning a scalar torch.Tensor, defined by the user.
loss = self._compute_loss(output1, output2, targets)
output_dict["loss"] = loss
return output_dict
Parameters
----------
inputs:
Tensors comprising everything needed to perform a training update, `including` labels,
which should be optional (i.e have a default value of ``None``). At inference time,
simply pass the relevant inputs, not including the labels.
Returns
-------
output_dict: ``Dict[str, torch.Tensor]``
The outputs from the model. In order to train a model using the
:class:`~allennlp.training.Trainer` api, you must provide a "loss" key pointing to a
scalar ``torch.Tensor`` representing the loss to be optimized.
"""
raise NotImplementedError
def forward_on_instance(self, instance: Instance) -> Dict[str, numpy.ndarray]:
"""
Takes an :class:`~allennlp.data.instance.Instance`, which typically has raw text in it,
converts that text into arrays using this model's :class:`Vocabulary`, passes those arrays
through :func:`self.forward()` and :func:`self.decode()` (which by default does nothing)
and returns the result. Before returning the result, we convert any ``torch.autograd.Variables``
or ``torch.Tensors`` into numpy arrays and remove the batch dimension.
"""
return self.forward_on_instances([instance])[0]
def forward_on_instances(self,
instances: List[Instance]) -> List[Dict[str, numpy.ndarray]]:
"""
Takes a list of :class:`~allennlp.data.instance.Instance`s, converts that text into
arrays using this model's :class:`Vocabulary`, passes those arrays through
:func:`self.forward()` and :func:`self.decode()` (which by default does nothing)
and returns the result. Before returning the result, we convert any
``torch.autograd.Variables`` or ``torch.Tensors`` into numpy arrays and separate the
batched output into a list of individual dicts per instance. Note that typically
this will be faster on a GPU (and conditionally, on a CPU) than repeated calls to
:func:`forward_on_instance`.
Parameters
----------
instances : List[Instance], required
The instances to run the model on.
cuda_device : int, required
The GPU device to use. -1 means use the CPU.
Returns
-------
A list of the models output for each instance.
"""
cuda_device = self._get_prediction_device()
dataset = Batch(instances)
dataset.index_instances(self.vocab)
model_input = dataset.as_tensor_dict(cuda_device=cuda_device, for_training=False)
outputs = self.decode(self(**model_input))
instance_separated_output: List[Dict[str, numpy.ndarray]] = [{} for _ in dataset.instances]
for name, output in list(outputs.items()):
if isinstance(output, torch.autograd.Variable):
output = output.data.cpu().numpy()
outputs[name] = output
for instance_output, batch_element in zip(instance_separated_output, output):
instance_output[name] = batch_element
return instance_separated_output
def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
"""
Takes the result of :func:`forward` and runs inference / decoding / whatever
post-processing you need to do your model. The intent is that ``model.forward()`` should
produce potentials or probabilities, and then ``model.decode()`` can take those results and
run some kind of beam search or constrained inference or whatever is necessary. This does
not handle all possible decoding use cases, but it at least handles simple kinds of
decoding.
This method `modifies` the input dictionary, and also `returns` the same dictionary.
By default in the base class we do nothing. If your model has some special decoding step,
override this method.
"""
# pylint: disable=no-self-use
return output_dict
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
"""
Returns a dictionary of metrics. This method will be called by
:class:`allennlp.training.Trainer` in order to compute and use model metrics for early
stopping and model serialization. We return an empty dictionary here rather than raising
as it is not required to implement metrics for a new model. A boolean `reset` parameter is
passed, as frequently a metric accumulator will have some state which should be reset
between epochs. This is also compatible with :class:`~allennlp.training.Metric`s. Metrics
should be populated during the call to ``forward``, with the
:class:`~allennlp.training.Metric` handling the accumulation of the metric until this
method is called.
"""
# pylint: disable=unused-argument,no-self-use
return {}
def _get_prediction_device(self) -> int:
"""
This method checks the device of the model parameters to determine the cuda_device
this model should be run on for predictions. If there are no parameters, it returns -1.
Returns
-------
The cuda device this model should run on for predictions.
"""
devices = {util.get_device_of(param) for param in self.parameters()}
if len(devices) > 1:
devices_string = ", ".join(str(x) for x in devices)
raise ConfigurationError(f"Parameters have mismatching cuda_devices: {devices_string}")
elif len(devices) == 1:
return devices.pop()
else:
return -1
@classmethod
def from_params(cls, vocab: Vocabulary, params: Params) -> 'Model':
choice = params.pop_choice("type", cls.list_available())
model = cls.by_name(choice).from_params(vocab, params)
return model
@classmethod
def _load(cls,
config: Params,
serialization_dir: str,
weights_file: str = None,
cuda_device: int = -1) -> 'Model':
"""
Instantiates an already-trained model, based on the experiment
configuration and some optional overrides.
"""
weights_file = weights_file or os.path.join(serialization_dir, _DEFAULT_WEIGHTS)
# Load vocabulary from file
vocab_dir = os.path.join(serialization_dir, 'vocabulary')
vocab = Vocabulary.from_files(vocab_dir)
model_params = config.get('model')
# The experiment config tells us how to _train_ a model, including where to get pre-trained
# embeddings from. We're now _loading_ the model, so those embeddings will already be
# stored in our weights. We don't need any pretrained weight file anymore, and we don't
# want the code to look for it, so we remove it from the parameters here.
remove_pretrained_embedding_params(model_params)
model = Model.from_params(vocab, model_params)
model_state = torch.load(weights_file, map_location=util.device_mapping(cuda_device))
model.load_state_dict(model_state)
# Force model to cpu or gpu, as appropriate, to make sure that the embeddings are
# in sync with the weights
if cuda_device >= 0:
model.cuda(cuda_device)
else:
model.cpu()
return model
@classmethod
def load(cls,
config: Params,
serialization_dir: str,
weights_file: str = None,
cuda_device: int = -1) -> 'Model':
"""
Instantiates an already-trained model, based on the experiment
configuration and some optional overrides.
Parameters
----------
config: Params
The configuration that was used to train the model. It should definitely
have a `model` section, and should probably have a `trainer` section
as well.
serialization_dir: str = None
The directory containing the serialized weights, parameters, and vocabulary
of the model.
weights_file: str = None
By default we load the weights from `best.th` in the serialization
directory, but you can override that value here.
cuda_device: int = -1
By default we load the model on the CPU, but if you want to load it
for GPU usage you can specify the id of your GPU here
Returns
-------
model: Model
The model specified in the configuration, loaded with the serialized
vocabulary and the trained weights.
"""
# Peak at the class of the model.
model_type = config["model"]["type"]
# Load using an overridable _load method.
# This allows subclasses of Model to override _load.
# pylint: disable=protected-access
return cls.by_name(model_type)._load(config, serialization_dir, weights_file, cuda_device)
def remove_pretrained_embedding_params(params: Params):
keys = params.keys()
if 'pretrained_file' in keys:
del params['pretrained_file']
for value in params.values():
if isinstance(value, Params):
remove_pretrained_embedding_params(value)
|
python
|
# Importing Specific Items
# Fill in the blanks so that the program below prints 90.0.
# Do you find this version easier to read than preceding ones?
# Why wouldn’t programmers always use this form of import?
____ math import ____, ____
angle = degrees(pi / 2)
print(angle)
|
python
|
# coding=utf-8
# Copyright 2018 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert RoBERTa checkpoint."""
import argparse
import pytorch_lightning as pl
import torch
from transformers import LongformerForQuestionAnswering, LongformerModel
class LightningModel(pl.LightningModule):
def __init__(self, model):
super().__init__()
self.model = model
self.num_labels = 2
self.qa_outputs = torch.nn.Linear(self.model.config.hidden_size, self.num_labels)
# implement only because lightning requires to do so
def forward(self):
pass
def convert_longformer_qa_checkpoint_to_pytorch(
longformer_model: str, longformer_question_answering_ckpt_path: str, pytorch_dump_folder_path: str
):
# load longformer model from model identifier
longformer = LongformerModel.from_pretrained(longformer_model)
lightning_model = LightningModel(longformer)
ckpt = torch.load(longformer_question_answering_ckpt_path, map_location=torch.device("cpu"))
lightning_model.load_state_dict(ckpt["state_dict"])
# init longformer question answering model
longformer_for_qa = LongformerForQuestionAnswering.from_pretrained(longformer_model)
# transfer weights
longformer_for_qa.longformer.load_state_dict(lightning_model.model.state_dict())
longformer_for_qa.qa_outputs.load_state_dict(lightning_model.qa_outputs.state_dict())
longformer_for_qa.eval()
# save model
longformer_for_qa.save_pretrained(pytorch_dump_folder_path)
print(f"Conversion successful. Model saved under {pytorch_dump_folder_path}")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# Required parameters
parser.add_argument(
"--longformer_model",
default=None,
type=str,
required=True,
help="model identifier of longformer. Should be either `longformer-base-4096` or `longformer-large-4096`.",
)
parser.add_argument(
"--longformer_question_answering_ckpt_path",
default=None,
type=str,
required=True,
help="Path the official PyTorch Lightning Checkpoint.",
)
parser.add_argument(
"--pytorch_dump_folder_path", default=None, type=str, required=True, help="Path to the output PyTorch model."
)
args = parser.parse_args()
convert_longformer_qa_checkpoint_to_pytorch(
args.longformer_model, args.longformer_question_answering_ckpt_path, args.pytorch_dump_folder_path
)
|
python
|
from .base import Widget
class PasswordInput(Widget):
""" This widgets behaves like a TextInput but does not reveal what text is entered.
Args:
id (str): An identifier for this widget.
style (:obj:`Style`): An optional style object. If no style is provided then
a new one will be created for the widget.
factory (:obj:`module`): A python module that is capable to return a
implementation of this class with the same name. (optional & normally not needed)
initial (str): The initial text that is displayed before the user inputs anything.
placeholder (str): The text that is displayed if no input text is present.
readonly (bool): Whether a user can write into the text input, defaults to `False`.
"""
MIN_WIDTH = 100
def __init__(self, id=None, style=None, factory=None,
initial=None, placeholder=None, readonly=False):
super().__init__(id=id, style=style, factory=factory)
# Create a platform specific implementation of a PasswordInput
self._impl = self.factory.PasswordInput(interface=self)
self.value = initial
self.placeholder = placeholder
self.readonly = readonly
@property
def readonly(self):
""" Whether a user can write into the password input
Returns:
``True`` if the user can only read,
``False`` if the user can read and write into the input.
"""
return self._readonly
@readonly.setter
def readonly(self, value):
self._readonly = value
self._impl.set_readonly(value)
@property
def placeholder(self):
""" The placeholder text is the displayed before the user input something.
Returns:
The placeholder text (str) of the widget.
"""
return self._placeholder
@placeholder.setter
def placeholder(self, value):
if value is None:
self._placeholder = ''
else:
self._placeholder = str(value)
self._impl.set_placeholder(self._placeholder)
self._impl.rehint()
@property
def value(self):
""" The value of the text input field.
Returns:
The text as a ``str`` of the password input widget.
"""
return self._impl.get_value()
@value.setter
def value(self, value):
if value is None:
v = ''
else:
v = str(value)
self._impl.set_value(v)
self._impl.rehint()
def clear(self):
""" Clears the input field of the widget.
"""
self.value = ''
|
python
|
import os
import alignfaces as af
expression = ["an", "ca", "di", "fe", "ha", "ne", "sa", "sp"]
mouth = ["o", "c"]
databases = []
for ex in expression:
for mo in mouth:
databases.append("NIM-" + ex + "-" + mo)
num_dirs = len(expression) * len(mouth)
file_postfixes = ["bmp"] * num_dirs
my_project_path = os.path.dirname(os.path.abspath(__file__))
# for dbase, pf in zip(databases, file_postfixes):
# my_faces_path = my_project_path + os.path.sep + dbase + os.path.sep
# af.get_landmarks(my_faces_path, "", pf, start_fresh=True)
# af.plot_faces_with_landmarks_one_by_one(my_faces_path)
# exclude_files_with_bad_landmarks looks specifically for bad-landmarks.csv.
# so bad-landmarks-strict.csv will be ignored. that's good.
# bad-landmarks.csv ignore poor fits to mouth.
# i will not be using the mouth in my GPA alignment so that's good.
# for dbase, pf in zip(databases, file_postfixes):
# my_faces_path = my_project_path + os.path.sep + dbase + os.path.sep
# af.exclude_files_with_bad_landmarks(my_faces_path)
# AFTER DONE WITH MANUALLY RECORDING BAD LANDMARKS FOR EACH FOLDER:
# DO COMMENTED ABOVE
# UNCOMMENT BELOW AND RUN
# total0, total1, total2 = 0, 0, 0
# with open('table-DLIB-failures-NIM.csv', 'w') as writer:
# writer.write("Database,n,Failed Face Detections,Inaccurate Landmarks\n")
# for dbase, pf in zip(databases, file_postfixes):
# my_faces_path = my_project_path + os.path.sep + dbase + os.path.sep
# numbers = af.landmarks_report(my_faces_path, file_prefix="", file_postfix=pf)
# n0 = numbers['num_total_images']
# n1 = numbers['num_failed_detections']
# n2 = numbers['num_detected_but_removed']
# writer.write('%s,%d,%d,%d\n' % (dbase, n0, n1, n2))
# total0 += n0
# total1 += n1
# total2 += n2
# writer.write('%s,%d,%d,%d\n' % ("All", total0, total1, total2))
#
#
include_features = ["left_eye", "right_eye"]
for dbase, pf in zip(databases, file_postfixes):
my_faces_path = my_project_path + os.path.sep + dbase + os.path.sep
aligned_path = af.align_procrustes(my_faces_path, "", pf,
color_of_result="grayscale",
include_features=include_features)
af.get_landmarks(aligned_path, "", pf, start_fresh=True)
# END
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
|
python
|
import unittest
import json
from lambda_function import get_user, get_users
class TestTwitterUpdates(unittest.TestCase):
def test_get_users(self):
users_resp = get_users()
self.assertEqual(200, users_resp['statusCode'])
users = users_resp['body']
self.assertEqual(10, len(users))
self.assertEqual('284068570', users[0])
def test_get_user_success(self):
user = get_user('284068570')
self.assertEqual(200, user["statusCode"])
def test_get_user_notfoud(self):
user = get_user('123')
self.assertEqual(404, user["statusCode"])
if __name__=='__main__':
unittest.main()
|
python
|
# Copyright 2015 Google
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import csv
import sys
script, logfile, outfile = sys.argv
SYSLOG_RE = re.compile(r"""
(\w{3}\ {1,2}\d{1,2}\ \d{2}:\d{2}:\d{2}) # Timestamp
\s([\w-]*) # Application
\s([\w-]*) # Source
(\[\w+\])? # Optional pid
:
(.*) # Details
""", re.VERBOSE)
records = []
with open(logfile, 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
d = {}
match = SYSLOG_RE.search(line)
if match:
groups = match.groups()
d['timestamp'] = groups[0]
d['app'] = groups[1]
d['source'] = groups[2]
d['pid'] = groups[3]
d['details'] = groups[4]
d['raw'] = line
records.append(d)
with open(outfile, 'wb') as csvfile:
logwriter = csv.writer(csvfile)
logwriter.writerow(records[0].keys())
for record in records:
logwriter.writerow(record.values())
|
python
|
import cv2
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from python_util.image_processing.image_stats import get_scaling_factor
def load_image_paths(image_list):
with open(image_list) as f:
image_paths = f.readlines()
return [image_path.rstrip() for image_path in image_paths]
def scale_image(image, fixed_height=None, scaling_factor=1.0):
# image_width, image_height = image.shape[:2]
image_height, image_width = image.shape[:2]
sc = get_scaling_factor(image_height, image_width, scaling_factor, fixed_height=fixed_height)
if sc < 1.0:
image = cv2.resize(image, None, fx=sc, fy=sc, interpolation=cv2.INTER_AREA)
elif sc > 1.0:
# if INTER_CUBIC is too slow try INTER_LINEAR
image = cv2.resize(image, None, fx=sc, fy=sc, interpolation=cv2.INTER_CUBIC)
return image, sc
def load_and_scale_image(path_to_image, fixed_height, scaling_factor):
image = cv2.imread(path_to_image)
image, sc = scale_image(image, fixed_height, scaling_factor)
image_grey = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) / 255.0
return image, image_grey, sc
def load_graph(path_to_pb):
# We load the protobuf file from the disk and parse it to retrieve the unserialized graph_def
with tf.gfile.GFile(path_to_pb, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# Then, we can use again a convenient built-in function to import a graph_def into the
# current default Graph
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name="",
op_dict=None,
producer_op_list=None
)
return graph
def get_net_output(image, pb_graph: tf.Graph, gpu_device="0"):
if len(image.shape) == 2:
image = np.expand_dims(image, axis=-1)
image = np.expand_dims(image, axis=0)
session_conf = tf.ConfigProto()
session_conf.gpu_options.visible_device_list = gpu_device
if gpu_device == "" or gpu_device is None:
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
with tf.Session(graph=pb_graph, config=session_conf) as sess:
x = sess.graph.get_tensor_by_name('inImg:0')
out = sess.graph.get_tensor_by_name('output:0')
return sess.run(out, feed_dict={x: image})[0]
def apply_threshold(net_output, threshold):
if net_output.dtype == np.uint8:
threshold *= 255
return np.array((net_output > threshold) * 255, dtype=np.uint8)
def _plot_image_with_net_output(image, net_output):
net_output_rgb_int = np.uint8(cv2.cvtColor(net_output, cv2.COLOR_GRAY2BGR))
net_output_rgb_int = cv2.cvtColor(net_output_rgb_int, cv2.COLOR_BGR2HLS)
res = cv2.addWeighted(image, 0.9, net_output_rgb_int, 0.4, 0)
res = cv2.cvtColor(res, cv2.COLOR_BGR2RGB)
return res
def plot_net_output(net_output, image=None):
num_classes = net_output.shape[-1]
for cl in range(num_classes):
net_output_2d = net_output[0, :, :, cl]
net_output_2d = net_output_2d * 255
net_output_2d = np.uint8(net_output_2d)
if image is not None:
image_plot = _plot_image_with_net_output(image, net_output_2d)
plt.imshow(image_plot)
else:
image_plot = net_output_2d
plt.imshow(image_plot, cmap="gray")
plt.show()
|
python
|
# Copyright (C) 2019 Intel Corporation. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
import os
import subprocess
BIOS_INFO_KEY = ['BIOS Information', 'Vendor:', 'Version:', 'Release Date:', 'BIOS Revision:']
BASE_BOARD_KEY = ['Base Board Information', 'Manufacturer:', 'Product Name:', 'Version:']
def check_dmi():
"""Check if this tool run on native os"""
return os.path.exists("/sys/firmware/dmi")
def print_yel(msg, warn=False):
"""Print the msg wiht color of yellow"""
if warn:
print("\033[1;33mWarning\033[0m:"+msg)
else:
print("\033[1;33m{0}\033[0m".format(msg))
def print_red(msg, err=False):
"""Print the msg wiht color of red"""
if err:
print("\033[1;31mError\033[0m:"+msg)
else:
print("\033[1;31m{0}\033[0m".format(msg))
def decode_stdout(resource):
"""Decode stdout"""
line = resource.stdout.readline().decode('ascii')
return line
def handle_hw_info(line, hw_info):
"""handle the hardware information"""
for board_line in hw_info:
if board_line == " ".join(line.split()[0:1]) or \
board_line == " ".join(line.split()[0:2]) or \
board_line == " ".join(line.split()[0:3]):
return True
return False
def handle_pci_dev(line):
"""Handle if it is pci line"""
if "Region" in line and "Memory at" in line:
return True
if line != '\n':
if line.split()[0][2:3] == ':' and line.split()[0][5:6] == '.':
return True
return False
def cmd_excute(cmd):
"""Excute cmd and retrun raw"""
res = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
return res
def dump_excute(cmd, desc, config):
"""Execute cmd and get information"""
val_dmi = check_dmi()
print("\t<{0}>".format(desc), file=config)
if not val_dmi and "dmidecode" in cmd:
print("\t\t</{0}>".format(desc), file=config)
return
res = cmd_excute(cmd)
while True:
line = res.stdout.readline().decode('ascii')
if not line:
break
if desc == "PCI_DEVICE":
if "prog-if" in line:
line = line.rsplit('(', 1)[0] + '\n'
ret = handle_pci_dev(line)
if not ret:
continue
if desc == "BIOS_INFO":
ret = handle_hw_info(line, BIOS_INFO_KEY)
if not ret:
continue
if desc == "BASE_BOARD_INFO":
ret = handle_hw_info(line, BASE_BOARD_KEY)
if not ret:
continue
print("\t{}".format(line.strip()), file=config)
print("\t</{0}>".format(desc), file=config)
|
python
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['EntitlementArgs', 'Entitlement']
@pulumi.input_type
class EntitlementArgs:
def __init__(__self__, *,
account_id: pulumi.Input[str],
customer_id: pulumi.Input[str],
offer: pulumi.Input[str],
association_info: Optional[pulumi.Input['GoogleCloudChannelV1AssociationInfoArgs']] = None,
commitment_settings: Optional[pulumi.Input['GoogleCloudChannelV1CommitmentSettingsArgs']] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input['GoogleCloudChannelV1ParameterArgs']]]] = None,
purchase_order_id: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Entitlement resource.
:param pulumi.Input[str] offer: The offer resource name for which the entitlement is to be created. Takes the form: accounts/{account_id}/offers/{offer_id}.
:param pulumi.Input['GoogleCloudChannelV1AssociationInfoArgs'] association_info: Association information to other entitlements.
:param pulumi.Input['GoogleCloudChannelV1CommitmentSettingsArgs'] commitment_settings: Commitment settings for a commitment-based Offer. Required for commitment based offers.
:param pulumi.Input[Sequence[pulumi.Input['GoogleCloudChannelV1ParameterArgs']]] parameters: Extended entitlement parameters. When creating an entitlement, valid parameter names and values are defined in the Offer.parameter_definitions. The response may include the following output-only Parameters: - assigned_units: The number of licenses assigned to users. - max_units: The maximum assignable units for a flexible offer. - num_units: The total commitment for commitment-based offers.
:param pulumi.Input[str] purchase_order_id: Optional. This purchase order (PO) information is for resellers to use for their company tracking usage. If a purchaseOrderId value is given, it appears in the API responses and shows up in the invoice. The property accepts up to 80 plain text characters.
:param pulumi.Input[str] request_id: Optional. You can specify an optional unique request ID, and if you need to retry your request, the server will know to ignore the request if it's complete. For example, you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if it received the original operation with the same request ID. If it did, it will ignore the second request. The request ID must be a valid [UUID](https://tools.ietf.org/html/rfc4122) with the exception that zero UUID is not supported (`00000000-0000-0000-0000-000000000000`).
"""
pulumi.set(__self__, "account_id", account_id)
pulumi.set(__self__, "customer_id", customer_id)
pulumi.set(__self__, "offer", offer)
if association_info is not None:
pulumi.set(__self__, "association_info", association_info)
if commitment_settings is not None:
pulumi.set(__self__, "commitment_settings", commitment_settings)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if purchase_order_id is not None:
pulumi.set(__self__, "purchase_order_id", purchase_order_id)
if request_id is not None:
pulumi.set(__self__, "request_id", request_id)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: pulumi.Input[str]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter(name="customerId")
def customer_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "customer_id")
@customer_id.setter
def customer_id(self, value: pulumi.Input[str]):
pulumi.set(self, "customer_id", value)
@property
@pulumi.getter
def offer(self) -> pulumi.Input[str]:
"""
The offer resource name for which the entitlement is to be created. Takes the form: accounts/{account_id}/offers/{offer_id}.
"""
return pulumi.get(self, "offer")
@offer.setter
def offer(self, value: pulumi.Input[str]):
pulumi.set(self, "offer", value)
@property
@pulumi.getter(name="associationInfo")
def association_info(self) -> Optional[pulumi.Input['GoogleCloudChannelV1AssociationInfoArgs']]:
"""
Association information to other entitlements.
"""
return pulumi.get(self, "association_info")
@association_info.setter
def association_info(self, value: Optional[pulumi.Input['GoogleCloudChannelV1AssociationInfoArgs']]):
pulumi.set(self, "association_info", value)
@property
@pulumi.getter(name="commitmentSettings")
def commitment_settings(self) -> Optional[pulumi.Input['GoogleCloudChannelV1CommitmentSettingsArgs']]:
"""
Commitment settings for a commitment-based Offer. Required for commitment based offers.
"""
return pulumi.get(self, "commitment_settings")
@commitment_settings.setter
def commitment_settings(self, value: Optional[pulumi.Input['GoogleCloudChannelV1CommitmentSettingsArgs']]):
pulumi.set(self, "commitment_settings", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GoogleCloudChannelV1ParameterArgs']]]]:
"""
Extended entitlement parameters. When creating an entitlement, valid parameter names and values are defined in the Offer.parameter_definitions. The response may include the following output-only Parameters: - assigned_units: The number of licenses assigned to users. - max_units: The maximum assignable units for a flexible offer. - num_units: The total commitment for commitment-based offers.
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GoogleCloudChannelV1ParameterArgs']]]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter(name="purchaseOrderId")
def purchase_order_id(self) -> Optional[pulumi.Input[str]]:
"""
Optional. This purchase order (PO) information is for resellers to use for their company tracking usage. If a purchaseOrderId value is given, it appears in the API responses and shows up in the invoice. The property accepts up to 80 plain text characters.
"""
return pulumi.get(self, "purchase_order_id")
@purchase_order_id.setter
def purchase_order_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "purchase_order_id", value)
@property
@pulumi.getter(name="requestId")
def request_id(self) -> Optional[pulumi.Input[str]]:
"""
Optional. You can specify an optional unique request ID, and if you need to retry your request, the server will know to ignore the request if it's complete. For example, you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if it received the original operation with the same request ID. If it did, it will ignore the second request. The request ID must be a valid [UUID](https://tools.ietf.org/html/rfc4122) with the exception that zero UUID is not supported (`00000000-0000-0000-0000-000000000000`).
"""
return pulumi.get(self, "request_id")
@request_id.setter
def request_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_id", value)
class Entitlement(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
association_info: Optional[pulumi.Input[pulumi.InputType['GoogleCloudChannelV1AssociationInfoArgs']]] = None,
commitment_settings: Optional[pulumi.Input[pulumi.InputType['GoogleCloudChannelV1CommitmentSettingsArgs']]] = None,
customer_id: Optional[pulumi.Input[str]] = None,
offer: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GoogleCloudChannelV1ParameterArgs']]]]] = None,
purchase_order_id: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates an entitlement for a customer. Possible error codes: * PERMISSION_DENIED: The customer doesn't belong to the reseller. * INVALID_ARGUMENT: * Required request parameters are missing or invalid. * There is already a customer entitlement for a SKU from the same product family. * INVALID_VALUE: Make sure the OfferId is valid. If it is, contact Google Channel support for further troubleshooting. * NOT_FOUND: The customer or offer resource was not found. * ALREADY_EXISTS: * The SKU was already purchased for the customer. * The customer's primary email already exists. Retry after changing the customer's primary contact email. * CONDITION_NOT_MET or FAILED_PRECONDITION: * The domain required for purchasing a SKU has not been verified. * A pre-requisite SKU required to purchase an Add-On SKU is missing. For example, Google Workspace Business Starter is required to purchase Vault or Drive. * (Developer accounts only) Reseller and resold domain must meet the following naming requirements: * Domain names must start with goog-test. * Domain names must include the reseller domain. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The ID of a long-running operation. To get the results of the operation, call the GetOperation method of CloudChannelOperationsService. The Operation metadata will contain an instance of OperationMetadata.
Auto-naming is currently not supported for this resource.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['GoogleCloudChannelV1AssociationInfoArgs']] association_info: Association information to other entitlements.
:param pulumi.Input[pulumi.InputType['GoogleCloudChannelV1CommitmentSettingsArgs']] commitment_settings: Commitment settings for a commitment-based Offer. Required for commitment based offers.
:param pulumi.Input[str] offer: The offer resource name for which the entitlement is to be created. Takes the form: accounts/{account_id}/offers/{offer_id}.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GoogleCloudChannelV1ParameterArgs']]]] parameters: Extended entitlement parameters. When creating an entitlement, valid parameter names and values are defined in the Offer.parameter_definitions. The response may include the following output-only Parameters: - assigned_units: The number of licenses assigned to users. - max_units: The maximum assignable units for a flexible offer. - num_units: The total commitment for commitment-based offers.
:param pulumi.Input[str] purchase_order_id: Optional. This purchase order (PO) information is for resellers to use for their company tracking usage. If a purchaseOrderId value is given, it appears in the API responses and shows up in the invoice. The property accepts up to 80 plain text characters.
:param pulumi.Input[str] request_id: Optional. You can specify an optional unique request ID, and if you need to retry your request, the server will know to ignore the request if it's complete. For example, you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if it received the original operation with the same request ID. If it did, it will ignore the second request. The request ID must be a valid [UUID](https://tools.ietf.org/html/rfc4122) with the exception that zero UUID is not supported (`00000000-0000-0000-0000-000000000000`).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EntitlementArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates an entitlement for a customer. Possible error codes: * PERMISSION_DENIED: The customer doesn't belong to the reseller. * INVALID_ARGUMENT: * Required request parameters are missing or invalid. * There is already a customer entitlement for a SKU from the same product family. * INVALID_VALUE: Make sure the OfferId is valid. If it is, contact Google Channel support for further troubleshooting. * NOT_FOUND: The customer or offer resource was not found. * ALREADY_EXISTS: * The SKU was already purchased for the customer. * The customer's primary email already exists. Retry after changing the customer's primary contact email. * CONDITION_NOT_MET or FAILED_PRECONDITION: * The domain required for purchasing a SKU has not been verified. * A pre-requisite SKU required to purchase an Add-On SKU is missing. For example, Google Workspace Business Starter is required to purchase Vault or Drive. * (Developer accounts only) Reseller and resold domain must meet the following naming requirements: * Domain names must start with goog-test. * Domain names must include the reseller domain. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The ID of a long-running operation. To get the results of the operation, call the GetOperation method of CloudChannelOperationsService. The Operation metadata will contain an instance of OperationMetadata.
Auto-naming is currently not supported for this resource.
Note - this resource's API doesn't support deletion. When deleted, the resource will persist
on Google Cloud even though it will be deleted from Pulumi state.
:param str resource_name: The name of the resource.
:param EntitlementArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EntitlementArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
association_info: Optional[pulumi.Input[pulumi.InputType['GoogleCloudChannelV1AssociationInfoArgs']]] = None,
commitment_settings: Optional[pulumi.Input[pulumi.InputType['GoogleCloudChannelV1CommitmentSettingsArgs']]] = None,
customer_id: Optional[pulumi.Input[str]] = None,
offer: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GoogleCloudChannelV1ParameterArgs']]]]] = None,
purchase_order_id: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EntitlementArgs.__new__(EntitlementArgs)
if account_id is None and not opts.urn:
raise TypeError("Missing required property 'account_id'")
__props__.__dict__["account_id"] = account_id
__props__.__dict__["association_info"] = association_info
__props__.__dict__["commitment_settings"] = commitment_settings
if customer_id is None and not opts.urn:
raise TypeError("Missing required property 'customer_id'")
__props__.__dict__["customer_id"] = customer_id
if offer is None and not opts.urn:
raise TypeError("Missing required property 'offer'")
__props__.__dict__["offer"] = offer
__props__.__dict__["parameters"] = parameters
__props__.__dict__["purchase_order_id"] = purchase_order_id
__props__.__dict__["request_id"] = request_id
__props__.__dict__["create_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioned_service"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["suspension_reasons"] = None
__props__.__dict__["trial_settings"] = None
__props__.__dict__["update_time"] = None
super(Entitlement, __self__).__init__(
'google-native:cloudchannel/v1:Entitlement',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Entitlement':
"""
Get an existing Entitlement resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = EntitlementArgs.__new__(EntitlementArgs)
__props__.__dict__["association_info"] = None
__props__.__dict__["commitment_settings"] = None
__props__.__dict__["create_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["offer"] = None
__props__.__dict__["parameters"] = None
__props__.__dict__["provisioned_service"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["purchase_order_id"] = None
__props__.__dict__["suspension_reasons"] = None
__props__.__dict__["trial_settings"] = None
__props__.__dict__["update_time"] = None
return Entitlement(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="associationInfo")
def association_info(self) -> pulumi.Output['outputs.GoogleCloudChannelV1AssociationInfoResponse']:
"""
Association information to other entitlements.
"""
return pulumi.get(self, "association_info")
@property
@pulumi.getter(name="commitmentSettings")
def commitment_settings(self) -> pulumi.Output['outputs.GoogleCloudChannelV1CommitmentSettingsResponse']:
"""
Commitment settings for a commitment-based Offer. Required for commitment based offers.
"""
return pulumi.get(self, "commitment_settings")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
The time at which the entitlement is created.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name of an entitlement in the form: accounts/{account_id}/customers/{customer_id}/entitlements/{entitlement_id}.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def offer(self) -> pulumi.Output[str]:
"""
The offer resource name for which the entitlement is to be created. Takes the form: accounts/{account_id}/offers/{offer_id}.
"""
return pulumi.get(self, "offer")
@property
@pulumi.getter
def parameters(self) -> pulumi.Output[Sequence['outputs.GoogleCloudChannelV1ParameterResponse']]:
"""
Extended entitlement parameters. When creating an entitlement, valid parameter names and values are defined in the Offer.parameter_definitions. The response may include the following output-only Parameters: - assigned_units: The number of licenses assigned to users. - max_units: The maximum assignable units for a flexible offer. - num_units: The total commitment for commitment-based offers.
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="provisionedService")
def provisioned_service(self) -> pulumi.Output['outputs.GoogleCloudChannelV1ProvisionedServiceResponse']:
"""
Service provisioning details for the entitlement.
"""
return pulumi.get(self, "provisioned_service")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Current provisioning state of the entitlement.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="purchaseOrderId")
def purchase_order_id(self) -> pulumi.Output[str]:
"""
Optional. This purchase order (PO) information is for resellers to use for their company tracking usage. If a purchaseOrderId value is given, it appears in the API responses and shows up in the invoice. The property accepts up to 80 plain text characters.
"""
return pulumi.get(self, "purchase_order_id")
@property
@pulumi.getter(name="suspensionReasons")
def suspension_reasons(self) -> pulumi.Output[Sequence[str]]:
"""
Enumerable of all current suspension reasons for an entitlement.
"""
return pulumi.get(self, "suspension_reasons")
@property
@pulumi.getter(name="trialSettings")
def trial_settings(self) -> pulumi.Output['outputs.GoogleCloudChannelV1TrialSettingsResponse']:
"""
Settings for trial offers.
"""
return pulumi.get(self, "trial_settings")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
The time at which the entitlement is updated.
"""
return pulumi.get(self, "update_time")
|
python
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Created 2015
@author: giangnguyen
"""
import pyspark
import sys
if __name__ == "__main__":
print >> sys.stderr, """
Usage: run with example jar:
spark-submit --jars
/opt/cloudera/parcels/CDH/jars/avro-1.7.6-cdh5.3.1.jar,
/opt/cloudera/parcels/CDH/jars/avro-mapred-1.7.6-cdh5.3.1-hadoop2.jar,
/opt/cloudera/parcels/CDH/jars/spark-examples-1.2.0-cdh5.3.1-hadoop2.5.0-cdh5.3.1.jar
spark_get_avro.py
"""
print '\n', len(sys.argv), sys.argv, '\n'
sc = pyspark.SparkContext() # not necessary in an interactive session
sqc = pyspark.sql.SQLContext(sc)
'''
conf = None
if len(sys.argv) == 1:
# path = '/user/jobs/scored/2015/02/02/02/EVENTS_SCORED/part-m-00000.avro'
elif len(sys.argv) == 2:
path = sys.argv[1]
elif len(sys.argv) == 3:
schema_rdd = sc.textFile(sys.argv[2], 1).collect()
conf = {"avro.schema.input.key": reduce(lambda x, y: x + y, schema_rdd)}
'''
path = 'users.avro'
schema_file = 'user.avsc'
schema_rdd = sc.textFile(schema_file, 1).collect()
conf = None
conf = {"avro.schema.input.key": reduce(lambda x, y: x + y, schema_rdd)}
print '\n', conf, '\n'
avro_rdd = sc.newAPIHadoopFile(
path,
"org.apache.avro.mapreduce.AvroKeyInputFormat",
"org.apache.avro.mapred.AvroKey",
"org.apache.hadoop.io.NullWritable",
keyConverter="org.apache.spark.examples.pythonconverters.AvroWrapperToJavaConverter",
conf=conf)
data = avro_rdd.map(lambda x: x[0]).collect()
print data.count()
'''
lines = 0
for k in output:
lines += 1
print k
if lines >= 3:
sys.exit()
'''
sc.stop()
|
python
|
from arekit.common.experiment.api.enums import BaseDocumentTag
from arekit.common.experiment.api.ops_doc import DocumentOperations
class CustomDocOperations(DocumentOperations):
def iter_tagget_doc_ids(self, tag):
assert(isinstance(tag, BaseDocumentTag))
assert(tag == BaseDocumentTag.Annotate)
return self.__doc_ids
def __init__(self, exp_ctx, text_parser):
super(CustomDocOperations, self).__init__(exp_ctx, text_parser)
self.__docs = None
self.__doc_ids = None
def set_docs(self, docs):
assert(isinstance(docs, list))
self.__docs = docs
self.__doc_ids = list(range(len(self.__docs)))
def get_doc(self, doc_id):
return self.__docs[doc_id]
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
"""
@author: Kirill Python
@contact: https://vk.com/python273
@license Apache License, Version 2.0, see LICENSE file
Copyright (C) 2017
"""
setup(
name='vk_api',
version='8.3.1',
author='python273',
author_email='[email protected]',
url='https://github.com/python273/vk_api',
description='Module for writing scripts for vk.com (vkontakte)',
download_url='https://github.com/python273/vk_api/archive/master.zip',
license='Apache License, Version 2.0, see LICENSE file',
packages=['vk_api', 'jconfig'],
install_requires=['requests', 'enum34'],
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: Implementation :: CPython',
]
)
|
python
|
import pytest
import torch
from src.defs import layers
def test_flatten():
x = torch.arange(12).view(2, 1, 3, 2)
print('Before flattening: ', x)
print('After flattening: ', layers.flatten(x))
|
python
|
#!/usr/bin/env python3
import unittest
from typing import Tuple, Union
import numpy as np
import torch
from torch import Tensor
from reagent.ope.estimators.types import TypeWrapper, Values
class TestTypes(unittest.TestCase):
TestType = Union[int, Tuple[int], float, Tuple[float], np.ndarray, Tensor]
TestClass = TypeWrapper[TestType]
def setUp(self) -> None:
self._test_list = [0, 1, 2, 3, 5]
def test_int_type(self):
int_val = TestTypes.TestClass(3)
self.assertEqual(self._test_list[int_val], 3)
self.assertEqual(hash(int_val), hash(3))
int_val_other = TestTypes.TestClass(3)
self.assertEqual(int_val, int_val_other)
int_val_other = TestTypes.TestClass(4)
self.assertNotEqual(int_val, int_val_other)
def test_float_type(self):
float_val = TestTypes.TestClass(3.2)
self.assertEqual(self._test_list[float_val], 3)
self.assertEqual(hash(float_val), hash(3.2))
float_val_other = TestTypes.TestClass(3.2)
self.assertEqual(float_val, float_val_other)
float_val_other = TestTypes.TestClass(4.3)
self.assertNotEqual(float_val, float_val_other)
def test_tuple_int_type(self):
tuple_int_val = TestTypes.TestClass((1, 2, 3))
with self.assertRaises(ValueError):
self._test_list[tuple_int_val] = 1
self.assertEqual(hash(tuple_int_val), hash((1, 2, 3)))
tuple_int_val_other = TestTypes.TestClass((1, 2, 3))
self.assertEqual(tuple_int_val, tuple_int_val_other)
tuple_int_val_other = TestTypes.TestClass((2, 3, 1))
self.assertNotEqual(tuple_int_val, tuple_int_val_other)
def test_tuple_float_type(self):
tuple_float_val = TestTypes.TestClass((1.1, 2.2, 3.3))
with self.assertRaises(ValueError):
self._test_list[tuple_float_val] = 1
self.assertEqual(hash(tuple_float_val), hash((1.1, 2.2, 3.3)))
tuple_float_val_other = TestTypes.TestClass((1.1, 2.2, 3.3))
self.assertEqual(tuple_float_val, tuple_float_val_other)
tuple_float_val_other = TestTypes.TestClass((2.2, 3.3, 1.1))
self.assertNotEqual(tuple_float_val, tuple_float_val_other)
def test_ndarray_type(self):
ndarray_val = TestTypes.TestClass(np.array(3))
self.assertEqual(self._test_list[ndarray_val], 3)
self.assertEqual(hash(ndarray_val), hash((3,)))
ndarray_val_other = TestTypes.TestClass(np.array(3))
self.assertEqual(ndarray_val, ndarray_val_other)
int_val_other = TestTypes.TestClass(3)
self.assertEqual(ndarray_val, int_val_other)
ndarray_val_other = TestTypes.TestClass(np.array(4))
self.assertNotEqual(ndarray_val, ndarray_val_other)
ndarray_val = TestTypes.TestClass(np.array(((1, 2), (3, 4))))
with self.assertRaises(ValueError):
self._test_list[ndarray_val] = 1
self.assertEqual(hash(ndarray_val), hash((1, 2, 3, 4)))
ndarray_val_other = TestTypes.TestClass(((1, 2), (3, 4)))
self.assertEqual(ndarray_val, ndarray_val_other)
ndarray_val_other = TestTypes.TestClass(np.ndarray((1, 2, 3, 4)))
self.assertNotEqual(ndarray_val, ndarray_val_other)
def test_tensor_type(self):
tensor_val = TestTypes.TestClass(torch.tensor(3))
self.assertEqual(self._test_list[tensor_val], 3)
self.assertEqual(hash(tensor_val), hash((3,)))
tensor_val_other = TestTypes.TestClass(torch.tensor(3))
self.assertEqual(tensor_val, tensor_val_other)
int_val_other = TestTypes.TestClass(3)
with self.assertRaises(TypeError):
_ = tensor_val == int_val_other
tensor_val_other = TestTypes.TestClass(torch.tensor(4))
self.assertNotEqual(tensor_val, tensor_val_other)
tensor_val = TestTypes.TestClass(torch.tensor(((1, 2), (3, 4))))
with self.assertRaises(ValueError):
self._test_list[tensor_val] = 1
self.assertEqual(hash(tensor_val), hash((1, 2, 3, 4)))
tensor_val_other = TestTypes.TestClass(torch.tensor((1, 2, 3, 4)))
self.assertNotEqual(tensor_val, tensor_val_other)
class TestValues(unittest.TestCase):
TestIntType = TypeWrapper[int]
TestTupleFloatType = TypeWrapper[Tuple[float]]
class TestIntKeyValues(Values[TestIntType]):
def _new_key(self, k: int):
return TestValues.TestIntType(k)
class TestTupleFloatKeyValues(Values[TestTupleFloatType]):
def _new_key(self, k: int):
raise TypeError(
f"value {k} invalid for " f"{TestValues.TestTupleFloatType.__name__}"
)
def setUp(self) -> None:
self._int_float_values = TestValues.TestIntKeyValues([2.2, 4.4, 1.1, 3.3])
self._tuple_float_float_values = TestValues.TestTupleFloatKeyValues(
{
TestValues.TestTupleFloatType((1.0, 2.0)): 2.2,
TestValues.TestTupleFloatType((3.0, 4.0)): 4.4,
TestValues.TestTupleFloatType((5.0, 6.0)): 1.1,
TestValues.TestTupleFloatType((7.0, 8.0)): 3.3,
}
)
self._int_array_values = TestValues.TestIntKeyValues(
np.array((2.2, 4.4, 1.1, 3.3))
)
self._int_tensor_values = TestValues.TestIntKeyValues(
torch.tensor((2.2, 4.4, 1.1, 3.3))
)
def test_indexing(self):
self.assertEqual(self._int_float_values[2], 1.1)
self.assertEqual(self._int_float_values[TestValues.TestIntType(2)], 1.1)
self.assertEqual(
self._tuple_float_float_values[TestValues.TestTupleFloatType((3.0, 4.0))],
4.4,
)
def test_sort(self):
keys, values = self._int_float_values.sort()
self.assertEqual(
keys,
[
TestValues.TestIntType(1),
TestValues.TestIntType(3),
TestValues.TestIntType(0),
TestValues.TestIntType(2),
],
)
self.assertEqual(values, [4.4, 3.3, 2.2, 1.1])
keys, values = self._tuple_float_float_values.sort()
self.assertEqual(
keys,
[
TestValues.TestTupleFloatType((3.0, 4.0)),
TestValues.TestTupleFloatType((7.0, 8.0)),
TestValues.TestTupleFloatType((1.0, 2.0)),
TestValues.TestTupleFloatType((5.0, 6.0)),
],
)
self.assertEqual(values, [4.4, 3.3, 2.2, 1.1])
keys, values = self._int_array_values.sort()
self.assertEqual(
keys,
[
TestValues.TestIntType(1),
TestValues.TestIntType(3),
TestValues.TestIntType(0),
TestValues.TestIntType(2),
],
)
self.assertTrue(np.array_equal(values, np.array([4.4, 3.3, 2.2, 1.1])))
keys, values = self._int_tensor_values.sort()
self.assertEqual(
keys,
[
TestValues.TestIntType(1),
TestValues.TestIntType(3),
TestValues.TestIntType(0),
TestValues.TestIntType(2),
],
)
self.assertTrue(torch.equal(values, torch.tensor([4.4, 3.3, 2.2, 1.1])))
def test_unzip(self):
items = self._int_float_values.items
values = self._int_float_values.values
self.assertEqual(
items,
[
TestValues.TestIntType(0),
TestValues.TestIntType(1),
TestValues.TestIntType(2),
TestValues.TestIntType(3),
],
)
self.assertEqual(values, [2.2, 4.4, 1.1, 3.3])
items = self._tuple_float_float_values.items
values = self._tuple_float_float_values.values
self.assertEqual(
items,
[
TestValues.TestTupleFloatType((1.0, 2.0)),
TestValues.TestTupleFloatType((3.0, 4.0)),
TestValues.TestTupleFloatType((5.0, 6.0)),
TestValues.TestTupleFloatType((7.0, 8.0)),
],
)
self.assertEqual(values, [2.2, 4.4, 1.1, 3.3])
items = self._int_array_values.items
values = self._int_array_values.values
self.assertEqual(
items,
[
TestValues.TestIntType(0),
TestValues.TestIntType(1),
TestValues.TestIntType(2),
TestValues.TestIntType(3),
],
)
self.assertTrue(np.array_equal(values, np.array([2.2, 4.4, 1.1, 3.3])))
items = self._int_tensor_values.items
values = self._int_tensor_values.values
self.assertEqual(
items,
[
TestValues.TestIntType(0),
TestValues.TestIntType(1),
TestValues.TestIntType(2),
TestValues.TestIntType(3),
],
)
self.assertTrue(torch.equal(values, torch.tensor([2.2, 4.4, 1.1, 3.3])))
def test_copy(self):
copy = self._int_float_values.copy()
for i, c in zip(self._int_float_values, copy):
self.assertEqual(i, c)
copy[1] = 2.1
self.assertNotEqual(copy[1], self._int_float_values[1])
copy = self._tuple_float_float_values.copy()
for i, c in zip(self._tuple_float_float_values, copy):
self.assertEqual(i, c)
key = TestValues.TestTupleFloatType((3.0, 4.0))
copy[key] = 2.1
self.assertNotEqual(copy[key], self._tuple_float_float_values[key])
copy = self._int_array_values.copy()
for i, c in zip(self._int_array_values, copy):
self.assertEqual(i, c)
copy[1] = 2.1
self.assertNotEqual(copy[1], self._int_array_values[1])
copy = self._int_tensor_values.copy()
for i, c in zip(self._int_tensor_values, copy):
self.assertEqual(i, c)
copy[1] = 2.1
self.assertNotEqual(copy[1], self._int_tensor_values[1])
def test_conversion(self):
float_list_val = [1.1, 2.2, 3.3]
tensor_val = torch.tensor([1.1, 2.2, 3.3], dtype=torch.double)
array_val = np.array([1.1, 2.2, 3.3], dtype=np.float64)
self.assertTrue(
torch.equal(
Values.to_tensor(float_list_val, dtype=torch.double), tensor_val
)
)
self.assertTrue(
torch.equal(Values.to_tensor(tensor_val, dtype=torch.double), tensor_val)
)
self.assertTrue(
torch.equal(Values.to_tensor(array_val, dtype=torch.double), tensor_val)
)
self.assertTrue(np.array_equal(Values.to_ndarray(float_list_val), array_val))
self.assertTrue(
np.array_equal(Values.to_ndarray(tensor_val, dtype=np.float64), array_val)
)
self.assertTrue(np.array_equal(Values.to_ndarray(array_val), array_val))
self.assertEqual(Values.to_sequence(float_list_val), float_list_val)
self.assertEqual(Values.to_sequence(tensor_val), float_list_val)
self.assertEqual(Values.to_sequence(array_val), float_list_val)
class TestDistribution(unittest.TestCase):
class TestIntKeyDistribution(Distribution[int]):
def _new_key(self, k: int):
return k
def setUp(self) -> None:
self._tensor_distribution = TestDistribution.TestIntKeyDistribution(
torch.tensor([1.0, 2.0, 3.0, 4.0])
)
self._array_distribution = TestDistribution.TestIntKeyDistribution(
np.array([1.0, 2.0, 3.0, 4.0])
)
self._list_distribution = TestDistribution.TestIntKeyDistribution(
[1.0, 2.0, 3.0, 4.0]
)
self._map_distribution = TestDistribution.TestIntKeyDistribution(
{0: 1.0, 1: 2.0, 2: 3.0, 3: 4.0}
)
def test_values(self):
self.assertTrue(
torch.equal(
self._tensor_distribution.values, torch.tensor([0.1, 0.2, 0.3, 0.4])
)
)
self.assertTrue(
np.array_equal(
self._array_distribution.values, np.array([0.1, 0.2, 0.3, 0.4])
)
)
self.assertEqual(self._list_distribution.values, [0.1, 0.2, 0.3, 0.4])
self.assertTrue(self._map_distribution.values, [0.1, 0.2, 0.3, 0.4])
def _test_sample(self, distribution: Distribution):
counts = [0] * 4
total = 100000
for _ in range(total):
counts[distribution.sample()] += 1
self.assertAlmostEqual(counts[0] / total, 0.1, places=2)
self.assertAlmostEqual(counts[1] / total, 0.2, places=2)
self.assertAlmostEqual(counts[2] / total, 0.3, places=2)
self.assertAlmostEqual(counts[3] / total, 0.4, places=2)
def test_sample(self):
self._test_sample(self._tensor_distribution)
self.assertEqual(self._tensor_distribution.greedy(4), [3, 2, 1, 0])
self._test_sample(self._array_distribution)
self.assertEqual(self._array_distribution.greedy(4), [3, 2, 1, 0])
self._test_sample(self._list_distribution)
self.assertEqual(self._list_distribution.greedy(4), [3, 2, 1, 0])
self._test_sample(self._map_distribution)
self.assertEqual(self._map_distribution.greedy(4), [3, 2, 1, 0])
if __name__ == "__main__":
np.random.seed(1234)
torch.random.manual_seed(1234)
unittest.main()
|
python
|
#!/usr/bin/python
#TODO Update the line below
#Date last updated: 27 Mar 2018
#NO7 Web logs
#Software: Microsoft Internet Information Services 7.0
#Version: 1.0
#Date: 2011-12-11 00:00:00
#Fields: date time s-ip cs-method cs-uri-stem cs-uri-query s-port cs-username c-ip cs(User-Agent) sc-status sc-substatus sc-win32-status time-taken
#NO1 Web logs
#Software: Microsoft Internet Information Services 6.0
#Version: 1.0
#Date: 2008-02-14 15:28:35
#Fields: date time s-sitename s-ip cs-method cs-uri-stem cs-uri-query s-port cs-username c-ip cs(User-Agent) sc-status sc-substatus sc-win32-status
#NO1 HTTP Error logs
#Software: Microsoft HTTP API 1.0/2.0
#Version: 1.0
#Date: 2017-12-05 07:17:02
#Fields: date time c-ip c-port s-ip s-port cs-version cs-method cs-uri sc-status s-siteid s-reason s-queuename
#NO2 IIS Logs
#Software: Microsoft Internet Information Services 8.5
#Version: 1.0
#Date: 2017-09-17 00:00:00
#Fields: date time s-ip cs-method cs-uri-stem cs-uri-query s-port cs-username c-ip cs(User-Agent) cs(Referer) sc-status sc-substatus sc-win32-status time-taken
import signal,sys,os,time,requests,errno,tarfile
import multiprocessing as mp
from subprocess import call
from subprocess import check_output
from cStringIO import StringIO
#Define globals
g_header = {"Content-Type": "application/json"}
g_index = ""
g_stop = False
g_curlLock = mp.Lock()
g_printLock = mp.Lock()
g_numFiles = 0
g_logformat = '#Fields: date time s-ip cs-method cs-uri-stem cs-uri-query s-port cs-username c-ip cs(User-Agent) cs(Referer) sc-status sc-substatus sc-win32-status time-taken'
FNULL = open(os.devnull, 'w')
sCurrentFile = mp.Value('i', 0)
'''
#Use the dictionary below for NO1 HTTP error logs
g_iisMailField = {
"date" : 0,
"time" : 1,
"c-ip" : 2, #Source IP
"c-port" : 3,
"s-ip" : 4, #Dest IP
"s-port" : 5,
"cs-version" : 6,
"cs-method" : 7,
"cs-uri" : 8,
"sc-status" : 9
#"s-siteid" :10,
#"s-reason" :11,
#"s-queuename" :12
}
'''
#Fields: date time s-ip cs-method cs-uri-stem cs-uri-query s-port cs-username c-ip cs(User-Agent) cs(Referer) sc-status sc-substatus sc-win32-status time-taken
#Use the dictionary below for NO2 web logs
g_iisMailField = {
"date" : 0,
"time" : 1,
"s-ip" : 2,
"cs-method" : 3,
"cs-uri-stem" : 4,
"cs-uri-query" : 5,
"s-port" : 6,
"cs-username" : 7,
"c-ip" : 8,
"cs(User-Agent)" : 9,
"cs(Referer)" :10,
"sc-status" :11,
"sc-substatus" :12,
"sc-win32-status":13,
"time-taken" :14
}
#Use the dictionary below for NO7 web logs
'''
g_iisMailField = {
"date" : 0,
"time" : 1,
"s-ip" : 3,
"cs-method" : 4,
"cs-uri-stem" : 5,
"cs-uri-query" : 6,
"s-port" : 7,
"cs-username" : 8,
"c-ip" : 9,
"cs(User-Agent)" :10,
"sc-status" :11,
"sc-substatus" :12,
"sc-win32-status":13,
"time-taken" :14
}
'''
#===========================signalHandler()====================================
def signalHandler(signal, frame):
global g_stop
g_stop = True
print "Caught an interrupt signal. Cleaning up; this may take a few minutes,"
print "as it will complete processing the logs that are currently open."
signal.signal(signal.SIGINT, signalHandler)
#===========================processFile()======================================
def processFile(filename):
global g_stop
if g_stop: return
global g_numFiles
global g_curlLock
global g_printLock
global g_index
global sCurrentFile
global g_header
global g_iisMailField
elastic_url = "http://localhost:9200/" + str(g_index) + "/log/_bulk"
#json_result = ''
with sCurrentFile.get_lock(), g_printLock:
sCurrentFile.value += 1
print "Processing file: {:<50} File #{} of {}".format(filename,sCurrentFile.value,g_numFiles)
sys.stdout.flush()
if logfile.endswith(".tar.gz"):
try: logfile = tarfile.open(filename,'r')
except IOError:
print 'cannot open', arg
#TODO Check if tar file, otherwise, simply open as a normal plaintext file
#TODO move everything below this to an else statement; add and except for error handling
#TODO Don't foget to close the file!
#Opened file successfully
#Grab the 4th line and strip the newline
for i in range(0,4):
line = logfile.readline().strip()
#If it's the same as g_logformat, then this is an exchange log. Otherwise, return False (indicates error)
if line != g_logformat:
with g_printLock:
print 'Unrecognized header in ' + str(filename) + '\nHEADER: ' + line + '\nSkipping ' + str(filename)
print "EXPECTED", g_logformat
sys.stdout.flush()
return
#Reset the count
count = 0
#Now for every entry
for line in logfile:
if count == 0:
json_result = '\n'
#Skip comment fields
if line.startswith("#"): continue
#Split on white splace
line = line.split()
#TODO Ensure proper # of fields
if len(line) != 15:
with g_printLock:
print "Missing or extra fields in line:",line
sys.stdout.flush()
continue
#Empty fields are denoted as "-", but Elastic prefers the empty string
#TODO Remove this - it's slow
for i in range(len(line)):
if line[i] == "-":
line[i] = ""
'''
if line[5] == "-":
line[5] = ""
if line[7] == "-":
line[7] = ""
#Escape the escape character '\'
elif line[7].__contains__("\\"): #username field
line[7] = line[7].replace("\\","\\\\")
'''
#TODO Rather than hard code these fields, have the user select an existing dictionary to use or
#create a new one, then iterate over the keys in the dictionary and parse the logs with those mappings
#TODO Note that some fields (e.g. time,date) require different parsing, so remove them from the dictionary
#and handle them first, then loop over and handle the rest of the dictionary
#e.g.:
#for item in dictionary:
# json_result += Key, line[value]
json_result += "{\"index\":{}}" + '\n'
json_result += "{\"time\" : \"" + str(line[g_iisMailField["date"]]) + "T" + str(line[g_iisMailField["time"]]) +"\","
json_result += "\"dest_ip\" : \"" + str(line[g_iisMailField["s-ip"]].split("%")[0]) +"\","
json_result += "\"cs_method\" : \"" + str(line[g_iisMailField["cs-method"]]) +"\","
json_result += "\"cs_uri_stem\" : \"" + str(line[g_iisMailField["cs-uri-stem"]]) +"\","
json_result += "\"cs_uri_query\" : \"" + str(line[g_iisMailField["cs-uri-query"]]) +"\","
json_result += "\"dest_port\" : \"" + str(line[g_iisMailField["s-port"]]) +"\","
json_result += "\"cs_username\" : \"" + str(line[g_iisMailField["cs-username"]]) +"\","
json_result += "\"source_ip\" :\"" + str(line[g_iisMailField["c-ip"]]) +"\","
json_result += "\"cs(User-Agent)\" : \"" + str(line[g_iisMailField["cs(User-Agent)"]]) +"\","
json_result += "\"cs(Referer)\" : \"" + str(line[g_iisMailField["cs(Referer)"]]) +"\","
json_result += "\"sc_status\" : \"" + str(line[g_iisMailField["sc-status"]]) +"\","
#json_result += "\"sc_substatus\" : \"" + str(line[g_iisMailField["sc-substatus"]]) +"\","
#json_result += "\"sc_win32-status\" : \"" + str(line[12]) +"\","
json_result += "\"time_taken\" : \"" + str(line[g_iisMailField["time-taken"]]) +"\"}"
count+=1
#Assuming we don't DOS elastic, crank up this number for increased network performance
#2000 seems to work well
if count == 2000:
count = 0
json_result += '\n'
#Curl data
ret = requests.put(elastic_url, data=json_result, headers=g_header)
#print ret
#Print to console for testing
#TODO Remove once curl is verified to work
# with g_printLock:
# print json_result
# sys.stdout.flush()
if (ret.status_code > 300) and (ret.status_code < 200):
with g_printLock:
print "Possible error curling JSON: "
print "Response code is: ", ret.status_code
#print "Response content:", ret.text
sys.stdout.flush()
logfile.close()
return
else:
json_result += '\n'
#We might reach the end of the file with fewer than 200 lines in json_result, so
#be sure to curl that remaining data prior to returning from this function
#I used 10 as an arbitrary # to check if it has data, as at a minimum json_result will
# be "[\n,\n" at this point, but if it is that short string, do nothing
if len(json_result) > 10:
#Close the list
json_result += '\n'
#Curl data
ret = requests.put(elastic_url, data=json_result, headers=g_header)
#Print to console for testing
#TODO Remove once curl is verified to work
# with g_printLock:
# print "=========================================================="
# print json_result
# sys.stdout.flush()
if (ret.status_code > 300) or (ret.status_code < 200):
with g_printLock:
print "Possible error curling JSON: "
print "Response code is: ", ret.status_code
#print "Response content:", ret.text
sys.stdout.flush()
logfile.close()
return
#print ret
#print ret.text
#json_result is empty, so be sure the close the log file prior to returning
else:
logfile.close()
return
#=============================================================================
'''print json.dumps(data_list)# print json.dumps(data_list, sort_keys=True, indent=0)
while (len(data_list) < 201):
data_list.append(index_string)
count = count + 1
data_list.append(json_result)
count = count + 1
ret = 0
if (len(data_list) == 200):
ret = call(["curl", "-XPUT","-H","http://X.X.X.X:9200/" +str(g_index)+ "/log/_bulk", "-d", str(data_list)], stderr=FNULL,stdout=FNULL)
if ret != 0:
with g_printLock:
print "Error curling JSON: " + str(data_list) + " Return value is",ret
sys.stdout.flush()
return
count = 0
'''
#============================================================================
except IOError as e:
result = False
if e == errno.EACCES:
with g_printLock:
print 'Error opening ', filename
sys.stdout.flush()
else:
with g_printLock:
print 'Unknown error encountered while opening ',filename
print "EXCEPTION: ",e
sys.stdout.flush()
#==============================================================================
def usage(this_script ):
print 'Usage: ' + this_script + ' <full index name> <directory of logs to send to elasticsearch>'
#==============================================================================
def main(argv):
#Conduct error and sanity checking
#Ensure that 2 args were passed in
this_script = str(sys.argv[0])
if len(sys.argv) != 3:
usage(this_script )
sys.exit(1)
#Ensure the index provided actually exists
index = sys.argv[1]
# indices = check_output(["curl", "-XGET", "http://X.X.X.X:9200/_cat/indices?v"])
# if index not in indices:
# print "The index: " + str(index) + " does not exist"
# print "Type 'curl -XGET http://X.X.X.X:9200/_cat/indices?v' in a terminal to see all indices"
# print "Hint: Do NOT include a trailing backslash."
# sys.exit(1)
#Ensure that the final argument is a directory
log_dir = sys.argv[2]
if not os.path.isdir(sys.argv[2]):
print 'ERROR: ' + str(sys.argv[2]) + ' is not a directory.'
usage(this_script)
sys.exit(1)
#Convert the log_dir path to an absolute path, rather than a (possibly) relative path
log_dir = os.path.abspath(log_dir)
#Declare variables
#suffixes = ('.log') #All files in the log_dir with these extensions will be processed
files = [] #With hold the list of files to send to elastic
global g_stop #Flag used to terminate in case of SIGINT
global g_index #Index name
global g_numFiles
g_index = index
#Debugging statements
#print "log_dir: " + str(log_dir)
#print "os.listdir(log_dir)[0:5]: " + str(os.listdir(log_dir)[5])
#print "os.path.join(log_dir,f):" + str(os.path.join(log_dir,os.listdir(log_dir)[5]))
#Populate list of log files
for f in os.listdir(log_dir):
path = os.path.join(log_dir,f)
if os.path.isfile(path) and f.endswith(".log"):
files.append(path)
#Sort the files (Useful in case I need to terminate early.)
files.sort()
g_numFiles = len(files)
print 'About to process ' + str(len(files)) + ' files'
#for f in files:
# print f
sys.stdout.flush()
'''
#Parallelize!
#Only use half the cores - save the other half for Elastic
n = mp.cpu_count()
pool = mp.Pool(n/2)
pool.map(processFile, files)
pool.close()
pool.join()
'''
if g_stop:
print "Program received interrupt signal. Exiting."
else:
print 'Done!'
#==============================================================================
if __name__ == '__main__':
main(sys.argv)
|
python
|
import os
import sys
import time
import argparse
import re
from tqdm import tqdm
from colorama import Fore, Back, Style
from google.cloud import translate
'''
* @desc load input file function
* @param string file_path - input file path and name
* @param string split_on - split each line with. Default: "\n"
* @return list
'''
def load_file(file_path, split_on='\n'):
# function takes the text file path, read it, split it on passed split_on (default is \n [newline])
# and returns the list acquired
# open the file in read mode, encoding utf-8
fp = open(file_path, "r")
# read the data from the file
_data = fp.read()
# close the file
fp.close()
# split the data over split_on
_data = _data.split(split_on)
# return the list acquired above
return _data
'''
* @desc split sentence into chunks
* @param string s - Sentence to split
* @param integer n - number of desired chunks
* @return list of chunks
'''
def chunks(s, n):
mylist = list()
_work_list = list()
words = s.split()
_word_count = len(words) // n
_word_modulo = len(words) % n
z = 0
for i in range(0, n):
for j in range(z, len(words)):
_work_list.append(words[j])
if len(_work_list) == _word_count:
if _word_modulo > 0:
_work_list.append(words[j+1])
_word_modulo -= 1
break
mylist.append(" ".join(_work_list))
z = len(" ".join(mylist).split())
_work_list.clear()
return mylist
'''
* @desc translate a text with GCP Translate API
* @param string text - Text to translate
* @param integer project_id - GCP project ID where API is enable
* @param integer source_language - Source language of file
* @param integer target_language - Target language
* @return list with translated sentences
'''
def translate_text(text="YOUR_TEXT_TO_TRANSLATE", project_id="YOUR_PROJECT_ID",source_language="en-US",target_language="fr"):
client = translate.TranslationServiceClient()
location = "global"
parent = f"projects/{project_id}/locations/{location}"
# Detail on supported types can be found here:
# https://cloud.google.com/translate/docs/supported-formats
response = client.translate_text(
request={
"parent": parent,
"contents": [text],
"mime_type": "text/plain", # mime types: text/plain, text/html
"source_language_code": source_language,
"target_language_code": target_language,
}
)
# Display the translation for each input text provided
for translation in response.translations:
return translation.translated_text
##### Main #####
if __name__ == '__main__':
# Script Parser and arguments management
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--project_id", help="GCP Project ID", type=str)
parser.add_argument("-s", "--source_language", help="Set source language. Default: en-US", type=str, default="en-US")
parser.add_argument("-t", "--target_language", help="Set target language. Default: fr", type=str, default="fr")
parser.add_argument("ifile", help="Specify SRT file you want to translate", type=str)
args = parser.parse_args()
project_id = ""
# The script needs the GCP project ID parameter
# Neither with project_id parameter or with TRANSLATE_PROJECT_ID environment variable
if not args.project_id and not os.getenv('TRANSLATE_PROJECT_ID'):
parser.print_help()
print("\n" + Fore.GREEN + "project_id " + Fore.RESET + "parameter is required. Please set it or set TRANSLATE_PROJECT_ID env variable.\n")
sys.exit(2)
elif args.project_id:
project_id = args.project_id
else:
project_id = os.getenv('TRANSLATE_PROJECT_ID')
# work variables
_len = _i = 0
_sentence = list()
_work_list = list()
to_split = list()
translated = list()
# Read each line of input file
for d in tqdm(load_file(args.ifile), desc="Processing SRT file", colour="blue"):
# do something if the line contains letters
# else just append the line into _work_list list
if d and re.match('[a-zA-Z-]+',d):
# add fake line in the _work_list to replace it with (chunked) translated sentence.
_work_list.append("my_line" + str(_i))
_sentence.append(d)
_i += 1
if not d.endswith(".") and not d.endswith("?") and not d.endswith('"') and not d.endswith('!'):
# continue the loop if it is not the end of the sentence
continue
else:
# Translate the sentence
_len = len(_sentence)
_translated = translate_text(" ".join(_sentence), project_id, args.source_language, args.target_language)
# If there is more than one item (parts of sentence) in the _sentence list.
# Replace fake lines with good translated (chunked) sentence.
if _len > 1:
_to_split = chunks(_translated, _len)
for index, value in enumerate(_to_split):
for i, v in enumerate(_work_list):
if v == "my_line" + str(index):
_work_list[i] = value
else:
for i, v in enumerate(_work_list):
if v == "my_line0":
_work_list[i] = _translated
# Merge_work_list list with (final) translated list.
# Clear temporary lists and vars
translated += _work_list
_work_list.clear()
_sentence.clear()
_i = 0
else:
_work_list.append(d)
# get filename without extension
ofile = args.ifile.split(".")
ofile.pop()
ofile = ".".join(ofile)
# write translated subtitle file to an translated ouput file
output = open(ofile + "-translated.srt", 'w')
output.write('\n'.join(translated))
output.close
# Ending message
print(Fore.BLUE + "\nDone.")
print("Output File: " + ofile + "-translated.srt\n" + Fore.RESET)
|
python
|
# Generated by Django 3.2.3 on 2021-06-01 05:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reviews', '0003_vote'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='voters',
),
]
|
python
|
from flask import render_template, request, jsonify, make_response, url_for, redirect, Blueprint
from blueprints.hotdog.static.forms.hotdog_form import FileUploadForm
from blueprints.hotdog.model.pretrained_resnet import Hotdog_Model_Resnet
from PIL import Image
from torch.nn.functional import softmax
from torch import topk
hotdog_app = Blueprint("hotdog_app", __name__,
template_folder="templates",
static_folder="static",
static_url_path='/blueprints/hotdog/static')
# Model created as a global variable to cache it
model = Hotdog_Model_Resnet()
model.model.eval()
@hotdog_app.route('/hotdog',methods=['GET','POST'])
def hotdog():
form = FileUploadForm()
if request.method == 'POST':
uploaded_file = request.files['file']
img = Image.open(uploaded_file).convert('RGB')
input_img = model.transform(img)
input_batch = input_img.unsqueeze(0)
output = model.model(input_batch)
scores = softmax(output[0], 0)
top_prob, top_catid = topk(scores, 1)
pred_class = 'Hotdog' if top_catid[0].item() == model.HOTDOG_CLASS else "Not Hotdog"
return make_response(jsonify({'message': pred_class}), 200)
else:
return render_template('/hotdog_app.html', form=form)
@hotdog_app.errorhandler(413)
def too_large(e):
return make_response(jsonify({'message': "File too large, please make it smaller first"}), 413)
|
python
|
""" Keeps track of configured datastore indexes. """
import json
import logging
import time
from kazoo.client import NoNodeError
from kazoo.protocol.states import KazooState
from tornado import gen
from tornado.ioloop import IOLoop
from tornado.locks import Event as AsyncEvent
from appscale.common.async_retrying import retry_children_watch_coroutine
from appscale.common.datastore_index import DatastoreIndex
from appscale.datastore.zkappscale.tornado_kazoo import AsyncKazooLock
logger = logging.getLogger('appscale-admin')
class IndexInaccessible(Exception):
""" Indicates that an index is not currently accessible. """
pass
class ProjectIndexManager(object):
""" Keeps track of composite index definitions for a project. """
def __init__(self, project_id, zk_client, index_manager, datastore_access):
""" Creates a new ProjectIndexManager.
Args:
project_id: A string specifying a project ID.
zk_client: A KazooClient.
update_callback: A function that should be called with the project ID
and index list every time the indexes get updated.
index_manager: An IndexManager used for checking lock status.
datastore_access: A DatastoreDistributed object.
"""
self.project_id = project_id
self.indexes_node = '/appscale/projects/{}/indexes'.format(self.project_id)
self.active = True
self.update_event = AsyncEvent()
self._creation_times = {}
self._index_manager = index_manager
self._zk_client = zk_client
self._ds_access = datastore_access
self._zk_client.DataWatch(self.indexes_node, self._update_indexes_watch)
# Since this manager can be used synchronously, ensure that the indexes
# are populated for this IOLoop iteration.
try:
encoded_indexes = self._zk_client.get(self.indexes_node)[0]
except NoNodeError:
encoded_indexes = '[]'
self.indexes = [DatastoreIndex.from_dict(self.project_id, index)
for index in json.loads(encoded_indexes)]
@property
def indexes_pb(self):
if self._zk_client.state != KazooState.CONNECTED:
raise IndexInaccessible('ZooKeeper connection is not active')
return [index.to_pb() for index in self.indexes]
@gen.coroutine
def apply_definitions(self):
""" Populate composite indexes that are not marked as ready yet. """
try:
yield self.update_event.wait()
self.update_event.clear()
if not self._index_manager.admin_lock.is_acquired or not self.active:
return
logger.info(
'Applying composite index definitions for {}'.format(self.project_id))
for index in self.indexes:
if index.ready:
continue
# Wait until all clients have either timed out or received the new index
# definition. This prevents entities from being added without entries
# while the index is being rebuilt.
creation_time = self._creation_times.get(index.id, time.time())
consensus = creation_time + (self._zk_client._session_timeout / 1000.0)
yield gen.sleep(max(consensus - time.time(), 0))
yield self._ds_access.update_composite_index(
self.project_id, index.to_pb())
logger.info('Index {} is now ready'.format(index.id))
self._mark_index_ready(index.id)
logging.info(
'All composite indexes for {} are ready'.format(self.project_id))
finally:
IOLoop.current().spawn_callback(self.apply_definitions)
def delete_index_definition(self, index_id):
""" Remove a definition from a project's list of configured indexes.
Args:
index_id: An integer specifying an index ID.
"""
try:
encoded_indexes, znode_stat = self._zk_client.get(self.indexes_node)
except NoNodeError:
# If there are no index definitions, there is nothing to do.
return
node_version = znode_stat.version
indexes = [DatastoreIndex.from_dict(self.project_id, index)
for index in json.loads(encoded_indexes)]
encoded_indexes = json.dumps([index.to_dict() for index in indexes
if index.id != index_id])
self._zk_client.set(self.indexes_node, encoded_indexes,
version=node_version)
def _mark_index_ready(self, index_id):
""" Updates the index metadata to reflect the new state of the index.
Args:
index_id: An integer specifying an index ID.
"""
try:
encoded_indexes, znode_stat = self._zk_client.get(self.indexes_node)
node_version = znode_stat.version
except NoNodeError:
# If for some reason the index no longer exists, there's nothing to do.
return
existing_indexes = [DatastoreIndex.from_dict(self.project_id, index)
for index in json.loads(encoded_indexes)]
for existing_index in existing_indexes:
if existing_index.id == index_id:
existing_index.ready = True
indexes_dict = [index.to_dict() for index in existing_indexes]
self._zk_client.set(self.indexes_node, json.dumps(indexes_dict),
version=node_version)
@gen.coroutine
def _update_indexes(self, encoded_indexes):
""" Handles changes to the list of a project's indexes.
Args:
encoded_indexes: A string containing index node data.
"""
encoded_indexes = encoded_indexes or '[]'
self.indexes = [DatastoreIndex.from_dict(self.project_id, index)
for index in json.loads(encoded_indexes)]
# Mark when indexes are defined so they can be backfilled later.
self._creation_times.update(
{index.id: time.time() for index in self.indexes
if not index.ready and index.id not in self._creation_times})
self.update_event.set()
def _update_indexes_watch(self, encoded_indexes, znode_stat):
""" Handles updates to the project's indexes node.
Args:
encoded_indexes: A string containing index node data.
znode_stat: A kazoo.protocol.states.ZnodeStat object.
"""
if not self.active:
return False
IOLoop.current().add_callback(self._update_indexes, encoded_indexes)
class IndexManager(object):
""" Keeps track of configured datastore indexes. """
# The node which keeps track of admin lock contenders.
ADMIN_LOCK_NODE = '/appscale/datastore/index_manager_lock'
def __init__(self, zk_client, datastore_access, perform_admin=False):
""" Creates a new IndexManager.
Args:
zk_client: A kazoo.client.KazooClient object.
datastore_access: A DatastoreDistributed object.
perform_admin: A boolean specifying whether or not to perform admin
operations.
"""
self.projects = {}
self._wake_event = AsyncEvent()
self._zk_client = zk_client
self.admin_lock = AsyncKazooLock(self._zk_client, self.ADMIN_LOCK_NODE)
# TODO: Refactor so that this dependency is not needed.
self._ds_access = datastore_access
self._zk_client.ensure_path('/appscale/projects')
self._zk_client.ChildrenWatch('/appscale/projects', self._update_projects)
# Since this manager can be used synchronously, ensure that the projects
# are populated for this IOLoop iteration.
project_ids = self._zk_client.get_children('/appscale/projects')
self._update_projects_sync(project_ids)
if perform_admin:
IOLoop.current().spawn_callback(self._contend_for_admin_lock)
def _update_projects_sync(self, new_project_ids):
""" Updates the list of the deployment's projects.
Args:
new_project_ids: A list of strings specifying current project IDs.
"""
for project_id in new_project_ids:
if project_id not in self.projects:
self.projects[project_id] = ProjectIndexManager(
project_id, self._zk_client, self, self._ds_access)
if self.admin_lock.is_acquired:
IOLoop.current().spawn_callback(
self.projects[project_id].apply_definitions)
for project_id in self.projects.keys():
if project_id not in new_project_ids:
self.projects[project_id].active = False
del self.projects[project_id]
def _update_projects(self, project_ids):
""" Watches for changes to list of existing projects.
Args:
project_ids: A list of strings specifying current project IDs.
"""
persistent_update_projects = retry_children_watch_coroutine(
'/appscale/projects', self._update_projects_sync)
IOLoop.instance().add_callback(persistent_update_projects, project_ids)
def _handle_connection_change(self):
""" Notifies the admin lock holder when the connection changes. """
IOLoop.current().add_callback(self._wake_event.set)
@gen.coroutine
def _contend_for_admin_lock(self):
"""
Waits to acquire an admin lock that gives permission to apply index
definitions. The lock is useful for preventing many servers from writing
the same index entries at the same time. After acquiring the lock, the
individual ProjectIndexManagers are responsible for mutating state whenever
a project's index definitions change.
"""
while True:
# Set up a callback to get notified if the ZK connection changes.
self._wake_event.clear()
self._zk_client.add_listener(self._handle_connection_change)
yield self.admin_lock.acquire()
try:
for project_index_manager in self.projects.values():
IOLoop.current().spawn_callback(
project_index_manager.apply_definitions)
# Release the lock if the kazoo client gets disconnected.
yield self._wake_event.wait()
finally:
self.admin_lock.release()
|
python
|
from territory import Territory
from player import Player
from continent import Continent
class BoardUtils:
# @staticmethod
# def get_distance(self, source: Territory, target: Territory):
# if source.ruler == target.ruler:
# return float("inf")
@staticmethod
def get_continent_ratio(source: Continent, player: Player):
player_units, enemy_units = 0, 0
for territory in source.territories:
if territory.ruler == player:
player_units += territory.troops
else:
enemy_units += territory.troops
# print(player_units,enemy_units)
ret_val = player_units / enemy_units if enemy_units != 0 else float("inf")
# print("ratio:", ret_val)
return ret_val
|
python
|
# coding: utf-8
# Copyright (c) Henniggroup.
# Distributed under the terms of the MIT License.
"""
This package contains modules to generate various input files for running VASP calculations and parse the relevant outputs which are required for our defect-related analysis.
Currently, only io interfacing with VASP is provided,
but users are welcome to add similar functionality for other codes.
"""
|
python
|
class Solution:
def largestOverlap(self, A: List[List[int]], B: List[List[int]]) -> int:
n = len(A)
A1 = set()
B1 = set()
for i in range(n):
for j in range(n):
if B[i][j] == 1:
B1.add((i, j))
if A[i][j] == 1:
A1.add((i, j))
dx = [i for i in range(1, n)] + [0] + [-i for i in range(1, n)]
dy = dx
result = 0
for i in range(len(dx)):
for j in range(len(dy)):
count = 0
for ax, ay in A1:
if (ax + dx[i], ay + dy[j]) in B1:
count += 1
result = max(result, count)
return result
|
python
|
try:
import uarray as array
except ImportError:
try:
import array
except ImportError:
print("SKIP")
raise SystemExit
a = array.array('B', [1, 2, 3])
print(a, len(a))
i = array.array('I', [1, 2, 3])
print(i, len(i))
print(a[0])
print(i[-1])
a = array.array('l', [-1])
print(len(a), a[0])
a1 = array.array('l', [1, 2, 3])
a2 = array.array('L', [1, 2, 3])
print(a2[1])
print(a1 == a2)
# Empty arrays
print(len(array.array('h')))
print(array.array('i'))
# bool operator acting on arrays
print(bool(array.array('i')))
print(bool(array.array('i', [1])))
# containment, with incorrect type
print('12' in array.array('B', b'12'))
print([] in array.array('B', b'12'))
# bad typecode
try:
array.array('X')
except ValueError:
print("ValueError")
# equality (CPython requires both sides are array)
print(bytes(array.array('b', [0x61, 0x62, 0x63])) == b'abc')
print(array.array('b', [0x61, 0x62, 0x63]) == b'abc')
print(array.array('B', [0x61, 0x62, 0x63]) == b'abc')
print(array.array('b', [0x61, 0x62, 0x63]) != b'abc')
print(array.array('b', [0x61, 0x62, 0x63]) == b'xyz')
print(array.array('b', [0x61, 0x62, 0x63]) != b'xyz')
print(b'abc' == array.array('b', [0x61, 0x62, 0x63]))
print(b'abc' == array.array('B', [0x61, 0x62, 0x63]))
print(b'abc' != array.array('b', [0x61, 0x62, 0x63]))
print(b'xyz' == array.array('b', [0x61, 0x62, 0x63]))
print(b'xyz' != array.array('b', [0x61, 0x62, 0x63]))
compatible_typecodes = []
for t in ["b", "h", "i", "l", "q"]:
compatible_typecodes.append((t, t))
compatible_typecodes.append((t, t.upper()))
for a, b in compatible_typecodes:
print(array.array(a, [1, 2]) == array.array(b, [1, 2]))
class X(array.array):
pass
print(bytes(X('b', [0x61, 0x62, 0x63])) == b'abc')
print(X('b', [0x61, 0x62, 0x63]) == b'abc')
print(X('b', [0x61, 0x62, 0x63]) != b'abc')
print(X('b', [0x61, 0x62, 0x63]) == array.array('b', [0x61, 0x62, 0x63]))
print(X('b', [0x61, 0x62, 0x63]) != array.array('b', [0x61, 0x62, 0x63]))
# other comparisons
for typecode in ["B", "H", "I", "L", "Q"]:
a = array.array(typecode, [1, 1])
print(a < a)
print(a <= a)
print(a > a)
print(a >= a)
al = array.array(typecode, [1, 0])
ab = array.array(typecode, [1, 2])
print(a < al)
print(a <= al)
print(a > al)
print(a >= al)
print(a < ab)
print(a <= ab)
print(a > ab)
print(a >= ab)
|
python
|
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2005 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
try:
from pkg_resources import get_distribution
version = get_distribution("pythia").version
except:
version = "Could not get version information via pkg_resources."
__version__ = version
def copyright():
return "journal: Copyright (c) 1998-2005 Michael A.G. Aivazis"
# End of file
|
python
|
import sys
import mariadb
import time
def export_to_db(dbconfig, temperature, weatherCode, windSpeed, windDirection):
# Connect to MariaDB Platform
try:
conn = mariadb.connect(
user=dbconfig['username'],
password=dbconfig['password'],
host=dbconfig['host'],
port=dbconfig['port'],
database=dbconfig['databaseName']
)
except mariadb.Error as e:
print(f"Error connecting to MariaDB: {e}")
return
# Get Cursor
cursor = conn.cursor()
cDate = time.strftime('%Y-%m-%d %H:%M:%S')
sql = "INSERT INTO " + dbconfig['measurementsTableName'] + \
" (SensorID, Date, Value1, Value2, Value3, Value4) VALUES (%s, %s, %s, %s, %s, %s)"
var = (str(dbconfig['sensorId']),
cDate,
temperature,
weatherCode,
windSpeed,
windDirection)
try:
cursor.execute(sql, var)
conn.commit()
except mariadb.Error as e:
print(f"Error: {e}")
conn.close()
|
python
|
# Modules
import discord
from json import loads, dumps
from discord.ext import commands
from assets.prism import Tools, Constants
# Main Command Class
class Warn(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.desc = "Warns a member on the server"
self.usage = "warn [user] [reason]"
@commands.command()
@commands.has_permissions(manage_messages = True)
async def warn(self, ctx, user: discord.Member = None, *, reason: str = None):
if not user:
return await ctx.send(embed = Tools.error("No user specified to warn."))
elif user.id == ctx.author.id:
return await ctx.send(embed = Tools.error("You cannot warn yourself."))
elif user.id == self.bot.user.id:
return await ctx.send(embed = Tools.error("You cannot warn me; I'm a bot."))
elif reason and len(reason) > 115:
return await ctx.send(embed = Tools.error("Your reason is too long."))
else:
if not reason:
reason = ""
db = loads(open("db/users", "r").read())
if not str(user.id) in db:
db[str(user.id)] = Constants.user_preset
if not str(ctx.guild.id) in db[str(user.id)]["data"]["warnings"]:
db[str(user.id)]["data"]["warnings"][str(ctx.guild.id)] = [f"{ctx.author.mention}: {reason}"]
else:
if len(db[str(user.id)]["data"]["warnings"][str(ctx.guild.id)]) == 15:
return await ctx.send(embed = Tools.error(f"{user.name} has the max amount of warnings (15)."))
db[str(user.id)]["data"]["warnings"][str(ctx.guild.id)].append(f"{ctx.author.mention}: {reason}")
open("db/users", "w").write(dumps(db, indent = 4))
try:
embed = discord.Embed(title = f"You have been warned on {ctx.guild.name}.", description = reason, color = 0x126bf1)
embed.set_author(name = " | Warning", icon_url = self.bot.user.avatar_url)
embed.set_footer(text = f" | Warned by {ctx.author}.", icon_url = ctx.author.avatar_url)
await user.send(embed = embed)
except:
reason = f"{reason}\nThis is just a server warning due to privacy settings."
embed = discord.Embed(title = f"{ctx.author.name} just warned {user.name}.", description = reason, color = 0x126bf1)
embed.set_author(name = " | Warn", icon_url = self.bot.user.avatar_url)
embed.set_footer(text = f" | Warned by {ctx.author}.", icon_url = ctx.author.avatar_url)
return await ctx.send(embed = embed)
# Link to bot
def setup(bot):
bot.add_cog(Warn(bot))
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake, tools
import os
class GlObjectsConan(ConanFile):
name = "globjects"
version = "2.0.0"
description = "Cross platform C++ wrapper for OpenGL API objects"
url = ""
homepage = "https://github.com/cginternals/globjects"
author = "fishbupt <[email protected]>"
license = "MIT"
settings = "os", "compiler", "build_type", "arch"
extracted_dir = "globjects"
no_copy_source = True
generators = "cmake"
requires = ("glfw/[^3.2.1]@fishbupt/latest",
"glbinding/[^3.0.2]@fishbupt/latest",
"glm/0.9.9.3@fishbupt/latest")
def source(self):
self.run("git clone https://github.com/cginternals/globjects.git")
tools.replace_in_file("{}/CMakeLists.txt".format(self.extracted_dir), "project(${META_PROJECT_NAME} C CXX)",
'''project(${META_PROJECT_NAME} C CXX)
include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)
conan_basic_setup()''')
def build(self):
cmake = CMake(self)
cmake.definitions["OPTION_BUILD_TESTS"]="OFF"
cmake.configure(source_folder=self.extracted_dir)
cmake.build()
cmake.install()
def package(self):
# Copying static and dynamic libs
self.copy(pattern="*.a", dst="lib", src=".", keep_path=False)
self.copy(pattern="*.lib", dst="lib", src=".", keep_path=False)
self.copy(pattern="*.dll", dst="bin", src=".", keep_path=False)
self.copy(pattern="*.so*", dst="lib", src=".", keep_path=False)
self.copy(pattern="*.dylib*", dst="lib", src=".", keep_path=False)
def package_info(self):
if self.settings.build_type == "Debug":
self.cpp_info.libs = ["globjectsd"]
else:
self.cpp_info.libs = ["globjects"]
|
python
|
from .base import BaseCFObject
class Metadata(BaseCFObject):
top_level_key = 'Metadata'
|
python
|
from django.shortcuts import render, HttpResponse, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.contrib.auth import authenticate, login as auth_login, logout as auth_logout
from tracker.models import Expense
from datetime import datetime, timedelta
import json
@login_required(login_url='/signin/')
def index(request):
context = {}
user_expenses = Expense.objects.filter(owner=request.user)
user_expenses = map(lambda x: x.to_json(), user_expenses)
context['expenses'] = user_expenses
return render(request, 'tracker/expenses.html', context)
@login_required(login_url='/signin/')
def get_expenses(request):
all_users = request.POST['all_users']
# Check if user is an admin to fetch all saved expenses if requested
if all_users == 'true' and request.user.is_staff:
success_data = {}
everyones_expenses = Expense.objects.all()
everyones_expenses = map(lambda x: x.to_json(), everyones_expenses)
success_data['status'] = 'success'
success_data['expenses'] = everyones_expenses
return HttpResponse(json.dumps(success_data))
elif all_users == 'false':
user_expenses = Expense.objects.filter(owner=request.user)
user_expenses = map(lambda x: x.to_json(), user_expenses)
success_data = {}
success_data['status'] = 'success'
success_data['expenses'] = user_expenses
return HttpResponse(json.dumps(success_data))
else:
failed_data = {}
failed_data['status'] = 'failed'
failed_data['message'] = 'You must be an admin to view everybody\'s expenses!'
return HttpResponse(json.dumps(failed_data))
def signin(request):
if request.user.is_authenticated:
return HttpResponseRedirect('/')
else:
return render(request, 'tracker/signin.html')
def logout(request):
auth_logout(request)
return HttpResponseRedirect('/signin/')
def login(request):
if request.method == 'POST':
username = request.POST["inputUsername"]
password = request.POST["inputPassword"]
user = authenticate(username=username, password=password)
if user is not None:
auth_login(request, user)
# Redirect to a success page.
return HttpResponseRedirect('/')
else:
# Return an 'invalid login' error message.
context = {'login_error': 'Your username and password didn\'t match. Please try again.'}
return render(request, 'tracker/signin.html', context)
else:
return HttpResponseRedirect('/')
@login_required(login_url='/signin/')
def create_new_expense(request):
if request.user.is_authenticated and request.method == 'POST':
try:
description = request.POST['expense_description']
amount = request.POST['expense_amount']
date_time = request.POST['expense_date_time']
amount = round(float(amount),2)
datetime_object = datetime.strptime(date_time, '%Y-%m-%d %H:%M')
new_expense = Expense(owner=request.user, date_time=datetime_object, amount=amount, description=description)
new_expense.save()
success_data = {'status': 'success', 'new_id': new_expense.id, 'readable_date': new_expense.date_time.strftime("%b. %-d, %Y, %-I:%M %p")}
return HttpResponse(json.dumps( success_data ))
except:
failed_data = {'status': 'failed', 'message': 'Failed to create expense, try again!'}
return HttpResponse(json.dumps(failed_data))
else:
failed_data = {'status': 'failed', 'message': 'Failed to create expense, try again!'}
return HttpResponse(json.dumps(failed_data))
@login_required(login_url='/signin/')
def delete_expense(request):
if request.method == 'POST':
try:
expense_id = request.POST['expense_id']
expense = Expense.objects.get(id=expense_id, owner=request.user)
expense.delete()
return HttpResponse('success')
except:
return HttpResponse('You cannot delete an expense you do not own or does not exist!')
else:
return HttpResponse('Failed to delete expense!')
@login_required(login_url='/signin/')
def update_expense(request):
if request.method == 'POST':
try:
expense_id = request.POST['expense_id']
updated_expense_description = request.POST['updated_expense_description']
updated_expense_amount = request.POST['updated_expense_amount']
updated_date_time = request.POST['updated_date_time']
updated_expense_amount = round(float(updated_expense_amount),2)
updated_date_time = datetime.strptime(updated_date_time, '%Y-%m-%d %H:%M')
except:
failed_data = {'status': 'failed', 'message': 'Failed to fetch updated data!'}
try:
current_expense = Expense.objects.get(id=expense_id, owner=request.user)
current_expense.description = updated_expense_description
current_expense.amount = updated_expense_amount
current_expense.date_time = updated_date_time
current_expense.save()
success_data = {'status': 'success', 'updated_expense': current_expense.to_json()}
return HttpResponse(json.dumps(success_data))
except:
failed_data = {'status': 'failed', 'message': 'You cannot update an expense you do not own or does not exist!'}
else:
failed_data = {'status': 'failed', 'message': 'Failed to update expense!'}
return HttpResponse(json.dumps(failed_data))
@login_required(login_url='/signin/')
def get_report(request):
try:
user_expenses = Expense.objects.filter(owner=request.user)
expenses_per_week = {}
for exp in user_expenses:
# Finding start and end date of week of this expense
dt = exp.date_time
start = dt - timedelta(days=dt.weekday())
end = start + timedelta(days=6)
start_week_date = str(start).split(' ')[0]
end_week_date = str(end).split(' ')[0]
week = expenses_per_week.get(start_week_date)
if week:
week[1] += float(exp.amount)
week[2].append(exp.to_json())
expenses_per_week[start_week_date] = week
else:
expenses_per_week[start_week_date] = [end_week_date, float(exp.amount), [exp.to_json()]]
success_data = {'status': 'success', 'expenses_per_week': expenses_per_week}
return HttpResponse(json.dumps(success_data))
except:
failed_data = {'status': 'failed', 'message': 'Could not get expense report!'}
return HttpResponse(json.dumps(failed_data))
@login_required(login_url='/signin/')
def filter_expenses(request):
if request.method == 'POST':
try:
start_date = request.POST['start_date']
end_date = request.POST['end_date']
start_date = datetime.strptime(start_date, '%Y-%m-%d %H:%M').replace(tzinfo=None)
end_date = datetime.strptime(end_date, '%Y-%m-%d %H:%M').replace(tzinfo=None)
user_expenses = Expense.objects.filter(owner=request.user)
expenses_between = []
for exp in user_expenses:
exp_date_time = exp.date_time.replace(tzinfo=None)
if start_date <= exp_date_time and exp_date_time <= end_date:
expenses_between.append(exp.to_json())
print len(user_expenses)
print len(expenses_between)
success_data = {'status': 'success', 'expenses_between': expenses_between}
return HttpResponse(json.dumps(success_data))
except:
failed_data = {'status': 'failed', 'message': 'Failed to filter expenses!'}
return HttpResponse(json.dumps(failed_data))
else:
failed_data = {'status': 'failed', 'message': 'Failed to filter expenses!'}
return HttpResponse(json.dumps)
|
python
|
# -*- coding: utf-8 -*-
from sbscraper import product
from sbscraper.transform import base
class RedMartProductTransformer(base.ProductTransformer):
"""Transforms RedMart data to :class:`~sbscraper.product.Product`."""
API_VERSION = 'v1.5.6'
def get_currency(self, datum):
# RedMart only deals in SGD.
return 'SGD'
def get_current_price(self, datum):
pricing = datum.get('pricing', {})
on_sale = bool(pricing.get('on_sale'))
if on_sale:
price = pricing.get('promo_price')
else:
price = pricing.get('price')
if price:
price = str(price)
return price
def get_description(self, datum):
return datum.get('desc')
def get_original_price(self, datum):
price = datum.get('pricing', {}).get('price')
if price:
price = str(price)
return price
def get_title(self, datum):
return datum.get('title')
|
python
|
# Counting Sundays
WEEKDAYS = ("Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday",
"Sunday")
MONTHS = ("January", "February", "March", "April", "May", "June", "July",
"August", "September", "October", "November", "December")
month_dict = {}
for m in MONTHS:
if m in ("September", "April", "June", "November"):
month_dict[m] = 30
else:
month_dict[m] = 31
month_dict["February"] = 28
curr_weekday = WEEKDAYS[0]
curr_day = 1
curr_month = MONTHS[0]
curr_year = 1900
# for i in range(31):
# curr_day = i
# print(f"{curr_day}. {curr_month} {curr_year}")
for year in range(1900, 2001):
if year % 4 == 0:
days = 29
else:
days = 28
print(f"{year} has {days} days!")
|
python
|
import sys
import os
import getopt
import pygame
pygame.init()
grass_tiles = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','Grass_Tileset.png'))
ground_tiles = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Background','GrassLand_Background_3.png'))
background = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Background','GrassLand_Background_2.png'))
further_background = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Background','GrassLand_Background_1.png'))
plat_top_left = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','top_left.png'))
plat_top_middle = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','top_middle.png'))
plat_top_right = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','top_right.png'))
plat_bottom_left = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','bottom_left.png'))
plat_bottom_middle = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','bottom_middle.png'))
plat_bottom_right = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','bottom_right.png'))
platform = pygame.image.load(os.path.join('sprite_art','Multi_Platformer_Tileset_v2','Grassland','Terrain','platform.png'))
skeleton = pygame.image.load(os.path.join('sprite_art','2D Pixel Dungeon Asset Pack','character and tileset','skeleton.png'))
cleric = pygame.image.load(os.path.join('sprite_art','2D Pixel Dungeon Asset Pack','character and tileset','cleric.png'))
knight = pygame.image.load(os.path.join('sprite_art','2D Pixel Dungeon Asset Pack','character and tileset','knight.png'))
Run_right = []
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run0.png')))
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run1.png')))
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run3.png')))
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run4.png')))
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run5.png')))
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run6.png')))
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run7.png')))
Run_right.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run8.png')))
Run_left = []
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run0.png')), True, False))
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run1.png')), True, False))
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run3.png')), True, False))
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run4.png')), True, False))
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run5.png')), True, False))
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run6.png')), True, False))
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run7.png')), True, False))
Run_left.append(pygame.transform.flip(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','run8.png')), True, False))
MidAir = []
MidAir.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','mid_air1.gif')))
MidAir.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','mid_air2.gif')))
idle = []
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle1.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle2.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle3.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle4.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle5.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle6.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle7.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle8.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle9.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle10.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle11.gif')))
idle.append(pygame.image.load(os.path.join('sprite_art','Jungle Asset Pack','Character','sprites','idle12.gif')))
# in_air = pygame.image.load(r'C:\Users\Student\Documents\GitHub\AcademyNEXTPlatformer\Art\Jungle Asset Pack\Character\sprites\mid-air.gif')
pygame.display.init()
pygame.display.set_mode()
sky = (173, 216, 230)
X = 600
Y = 600
X_pos = 0
left = False
right = False
chillCount = 0
display_surface = pygame.display.set_mode((X, Y ))
def redrawGameWindow():
global runCount
global vspeed
global airTicks
global chillCount
display_surface.fill(sky)
display_surface.blit(further_background, (0, Y - 520))
display_surface.blit(background, (0, Y - 460))
display_surface.blit(ground_tiles, (0, Y - 400))
display_surface.blit(plat_top_left, (350, 0.75 * Y - 50))
display_surface.blit(plat_top_middle, (375, 0.75 * Y - 50))
display_surface.blit(plat_top_right, (400, 0.75 * Y - 50))
display_surface.blit(plat_bottom_left, (350, 0.75 * Y - 25))
display_surface.blit(plat_bottom_middle, (375, 0.75 * Y - 25))
display_surface.blit(plat_bottom_right, (400, 0.75 * Y - 25))
display_surface.blit(platform, (450, 0.75 * Y - 100))
display_surface.blit(skeleton, (450, 0.75 * Y - 125))
display_surface.blit(cleric, (480, 0.75 * Y - 125))
display_surface.blit(knight, (510, 0.75 * Y - 125))
if runCount + 1 >= 24:
runCount = 0
if airTicks + 1 >= 6:
airTicks = 0
if chillCount + 1 >= 36:
chillCount = 0
if not vspeed == 0:
display_surface.blit(MidAir[airTicks//3], (40,325))
airTicks += 1
elif left:
display_surface.blit(Run_left[runCount//3], (40,350))
runCount += 1
elif right:
display_surface.blit(Run_right[runCount//3], (40,350))
runCount += 1
else:
display_surface.blit(idle[chillCount//3], (40,350))
chillCount += 1
pygame.display.update()
game = True
while game :
pygame.time.delay(40)
for event in pygame.event.get():
if event.type == pygame.QUIT:
game = False
keys = pygame.key.get_pressed()
if keys[pygame.K_UP]:
vspeed = 4
chillCount = 0
else:
vspeed = 0
airTicks = 0
if keys[pygame.K_LEFT]:
left = True
right = False
chillCount = 0
elif keys[pygame.K_RIGHT]:
right = True
left = False
chillCount = 0
else:
left = False
right = False
runCount = 0
# if run_bool == True:
# runCount = runCount += 1
# for i in range(X // 200 - 1) :
#display_surface.blit(further_background, (X_pos, 0.5 * Y))
#display_surface.blit(background, (X_pos, 0.5 * Y))
#display_surface.blit(ground_tiles, (X_pos, 0.5 * Y))
#X_pos += 200
# if player_Y < 160:
# for i in range(8) :
# display_surface.blit(Run[i], (30, Y-50))
# pygame.display.update()
redrawGameWindow()
pygame.quit()
|
python
|
import tkinter
from tkinter import *
from tkinter import messagebox
import dbhelper
def add():
if(len(addtask.get()) == 0):
messagebox.showerror(
"ERROR", "No data Available\nPlease Enter Some Task")
else:
dbhelper.insertdata(addtask.get())
addtask.delete(0, END)
populate()
def populate():
listbox.delete(0, END)
for rows in dbhelper.show():
listbox.insert(END, rows[1])
def deletetask(event):
dbhelper.deletebytask(listbox.get(ANCHOR))
populate()
main = tkinter.Tk()
main.title("TODO")
main.geometry("500x600")
main.resizable(False, False)
main.configure(
background="#1d1d1d",
)
tkinter.Label(
main,
text="Task Manager",
background="#1d1d1d",
foreground="#eeeeee",
font=("Verdana 20")
).pack(pady=10)
addframe = tkinter.Frame(
main,
bg="#1d1d1d",
)
addframe.pack()
addtask = tkinter.Entry(
addframe,
font=("Verdana"),
background="#eeeeee",
)
addtask.pack(ipadx=20, ipady=5, side="left")
addbtn = tkinter.Button(
addframe,
text="ADD TASK",
command=add,
background="#000000",
foreground="#eeeeee",
relief="flat",
font=("Verdana"),
highlightcolor="#000000",
activebackground="#1d1d1d",
border=0,
activeforeground="#eeeeee",
)
addbtn.pack(padx=20, ipadx=20, ipady=5)
tkinter.Label(
main,
text="Your Tasks",
background="#1d1d1d",
foreground="#eeeeee",
font=("Calibri", 18),
).pack(pady=10)
taskframe = tkinter.Frame(
main,
bg="#1d1d1d",
)
taskframe.pack(fill=BOTH, expand=300)
scrollbar = Scrollbar(taskframe)
scrollbar.pack(side=RIGHT, fill=Y)
listbox = Listbox(
taskframe,
font=("Verdana 18 bold"),
bg="#1d1d1d",
fg="#eeeeee",
selectbackground="#eeeeee",
selectforeground="#1d1d1d",
)
listbox.pack(fill=BOTH, expand=300)
listbox.config(yscrollcommand=scrollbar.set)
scrollbar.config(command=listbox.yview)
listbox.bind("<Double-Button-1>", deletetask)
listbox.bind("<Delete>", deletetask)
populate()
tkinter.Label(
main,
text="TIP : Double Click On A Task to Delete",
background="#1d1d1d",
foreground="#FFEB3B",
font=("Calibri 18"),
).pack(side=BOTTOM, pady=10)
main.mainloop()
|
python
|
# This file is part of the clacks framework.
#
# http://clacks-project.org
#
# Copyright:
# (C) 2010-2012 GONICUS GmbH, Germany, http://www.gonicus.de
#
# License:
# GPL-2: http://www.gnu.org/licenses/gpl-2.0.html
#
# See the LICENSE file in the project's top-level directory for details.
import string
import os
import sys
import shutil
import hashlib
import subprocess
import gnupg
import tempfile
import urllib2
import gettext
import gzip
import bz2
import logging
from urlparse import urlparse
from types import StringTypes
from sqlalchemy.orm.exc import NoResultFound
from clacks.common.utils import downloadFile
try:
# pylint: disable-msg=F0401
from debian import debfile, deb822 #@UnusedImport
except:
# pylint: disable-msg=E0611
from debian_bundle import debfile, deb822 #@UnresolvedImport @Reimport
from clacks.common import Environment
from clacks.common.utils import N_
from libinst.interface import DistributionHandler
from libinst.entities.architecture import Architecture
from libinst.entities.component import Component
from libinst.entities.file import File
from libinst.entities.release import Release
from libinst.entities.repository import Repository, RepositoryKeyring
from libinst.entities.section import Section
from libinst.entities.type import Type
from libinst.repo.deb.entities.package import DebianPackage
from libinst.repo.deb.entities.priority import DebianPriority
from libinst.repo.deb.entities.distribution import DebianDistribution
from libinst.repo.deb.entities.release import DebianRelease
# pylint: disable-msg=E0611
from pkg_resources import resource_filename #@UnresolvedImport
# Include locales
t = gettext.translation('messages', resource_filename("libinst.repo.deb", "locale"), fallback=True)
_ = t.ugettext
class DebianHandler(DistributionHandler):
"""
Configuration keys for section **repository**
+------------------+------------+-------------------------------------------------------------+
+ Key | Format + Description |
+==================+============+=============================================================+
+ rollback | Boolean + Keep packages to be able to roll back in certain cases. |
+------------------+------------+-------------------------------------------------------------+
+ path | String + Base path to the repository. |
+------------------+------------+-------------------------------------------------------------+
"""
def __init__(self, LibinstManager):
self.env = Environment.getInstance()
self.log = logging.getLogger(__name__)
self.manager = LibinstManager
@staticmethod
def getRepositoryTypes():
return ['deb', 'udeb', 'dsc']
def createDistribution(self, session, name, mirror=None):
result = DebianDistribution(name)
if mirror:
result.managed = False
result.origin = mirror
else:
result.managed = True
return result
def removeDistribution(self, session, distribution, recursive=False):
result = None
dists_path = os.sep.join((distribution.repository.path, distribution.name))
try:
if os.path.exists(dists_path):
shutil.rmtree(dists_path)
result = True
except:
raise
return result
def createRelease(self, session, distribution, name):
result = None
parent = None
if distribution:
if '/' in name:
instance = self._getRelease(session, name.rsplit('/', 1)[0])
if instance is None:
raise ValueError(N_("Parent release {parent} was not found").format(parent=parent))
else:
parent = instance
result = DebianRelease(name=name, parent=parent)
return result
def removeRelease(self, session, release, recursive=False):
result = None
if isinstance(release, StringTypes):
release = self._getRelease(session, release)
dists_path = os.sep.join((release.distribution.repository.path, release.distribution.name, "dists", release.name.replace('/', os.sep)))
try:
if os.path.exists(dists_path):
shutil.rmtree(dists_path)
result = True
except:
raise
return result
def updateMirror(self, session, distribution=None, releases=None, components=None, architectures=None, sections=None):
result = None
if not (distribution or releases):
raise ValueError(N_("Need either a distribution or a list of releases"))
if distribution is not None and distribution.releases is None:
raise ValueError(N_("Given distribution %s contains no Releases"), distribution.name)
for release in releases if releases else distribution.releases:
if isinstance(release, StringTypes):
release = self._getRelease(session, release)
for component in components if components else distribution.components:
if isinstance(component, StringTypes):
component = self._getComponent(session, component)
packages = self.manager.getPackages(release=release, component=component)
self.log.info(N_("Searching for updates for release '{distribution}/{release}'").format(distribution=release.distribution.name, release=release.name))
# Binary
for architecture in architectures if architectures else distribution.architectures:
if isinstance(architecture, StringTypes):
architecture = self._getArchitecture(architecture)
if architecture.name in ('all', 'source'):
continue
packagelist = self.getMirrorPackageList(session, release, component, architecture)
with file(packagelist) as packages_file:
for package in deb822.Packages.iter_paragraphs(packages_file):
if 'Package' in package:
if sections and 'Section' in package and package['Section'] not in sections:
continue
if not package['Package'] in [p['name'] for p in packages]:
self.log.debug("Adding package '%s' from URL '%s'" % (package['Package'], distribution.origin + "/" + package['Filename']))
self.addPackage(
session,
distribution.origin + "/" + package['Filename'],
release=release.name,
component=component.name,
section=package['Section'],
origin=distribution.origin + "/" + package['Filename'],
updateInventory=False
)
try:
session.commit()
except:
session.rollback()
raise
else:
existing_packages = [p for p in packages if p['name'] == package['Package']]
if package['Architecture'] not in [p['arch'] for p in existing_packages]:
self.log.debug("Adding package '%s' from URL '%s'" % (package['Package'], distribution.origin + "/" + package['Filename']))
self.addPackage(
session,
distribution.origin + "/" + package['Filename'],
release=release.name,
component=component.name,
section=package['Section'],
origin=distribution.origin + "/" + package['Filename'],
updateInventory=False
)
try:
session.commit()
except:
session.rollback()
raise
elif package['Version'] not in [p['version'] for p in existing_packages]:
self.log.debug("Upgrading package '%s' from URL '%s'" % (package['Package'], distribution.origin + "/" + package['Filename']))
self.addPackage(
session,
distribution.origin + "/" + package['Filename'],
release=release.name,
component=component.name,
section=package['Section'],
origin=distribution.origin + "/" + package['Filename'],
updateInventory=False
)
try:
session.commit()
except:
session.rollback()
raise
else:
# package already present in archive
pass
os.unlink(packagelist)
result = True
# Source
if release.distribution.mirror_sources:
sourcelist = self.getMirrorSourceList(session, release, component)
for source in deb822.Sources.iter_paragraphs(file(sourcelist)):
if 'Package' in source:
if sections and 'Section' in source and source['Section'] not in sections:
continue
if not source['Package'] in [s['name'] for s in packages]:
self.log.debug("Adding source package '%s' from URL '%s'" % (source['Package'], '/'.join((distribution.origin, source['Directory'], [f['name'] for f in source['Files']][0]))))
self.addPackage(
session,
'/'.join((distribution.origin, source['Directory'], [f['name'] for f in source['Files']][0])),
release=release.name,
component=component.name,
section=source['Section'],
origin='/'.join((distribution.origin, source['Directory'], [f['name'] for f in source['Files']][0])),
updateInventory=False
)
try:
session.commit()
except:
session.rollback()
raise
else:
existing_packages = [s for s in packages if s['name'] == source['Package']]
if source['Version'] not in [p['version'] for p in existing_packages]:
self.log.debug("Upgrading source package '%s' from URL '%s'" % (source['Package'], distribution.origin + "/" + [f['name'] for f in source['Files']][0]))
self.addPackage(
session,
'/'.join((distribution.origin, source['Directory'], [f['name'] for f in source['Files']][0])),
release=release.name,
component=component.name,
section=source['Section'],
origin='/'.join((distribution.origin, source['Directory'], [f['name'] for f in source['Files']][0])),
updateInventory=False
)
try:
session.commit()
except:
session.rollback()
raise
os.unlink(sourcelist)
self._updateInventory(session, release=release, distribution=distribution)
self.log.info(N_("Done searching for updates for release '{distribution}/{release}'").format(distribution=release.distribution.name, release=release.name))
try:
session.commit()
except:
session.rollback()
raise
return result
def getKernelPackageFilter(self):
return "linux-image"
def addPackage(self, session, url, distribution=None, release=None, origin=None, component=None, section=None, updateInventory=True):
if distribution:
if isinstance(distribution, (str, unicode)):
distribution = self._getDistribution(session, distribution)
distribution = session.merge(distribution)
if release:
if isinstance(release, (str, unicode)):
release = self._getRelease(session, release)
release = session.merge(release)
result, url = self._getPackageFromUrl(session, url, origin=origin, component=component, section=section)
session.add(result)
if release:
# TODO: Find a better way to code this
present = False
upgrade = False
for p in release.packages:
if p.name == result.name and p.arch.name == result.arch.name and p.version == result.version:
self.log.warning("Package %s | version %s | arch %s already present!" % (result.name, result.version, result.arch.name))
present = True
elif p.name == result.name and p.arch.name == result.arch.name and p.version != result.version:
upgrade = True
elif p.name == result.name and p.arch.name != result.arch.name and p.version == result.version:
pass
if present:
result = None
else:
if upgrade: # upgrade also means downgrade
if not self.removePackage(session, result.name, arch=result.arch, release=release):
result = None
if result is not None:
# Copy file to pool
pool_path = os.sep.join((release.distribution.repository.path, "pool", release.distribution.type.name, result.component.name))
if result.source is not None:
if result.source.startswith('lib'):
pool_path += os.sep + str(result.source).lower()[:4]
else:
pool_path += os.sep + str(result.source).lower()[0]
pool_path += os.sep + result.source.lower()
else:
if result.name.startswith('lib'):
pool_path += os.sep + str(result.name).lower()[:4]
else:
pool_path += os.sep + str(result.name).lower()[0]
pool_path += os.sep + result.name.lower()
if not os.path.exists(pool_path):
os.makedirs(pool_path)
basedir = os.path.dirname(url)
for fle in result.files:
if not os.path.exists(pool_path + os.sep + fle.name):
shutil.move(basedir + os.sep + fle.name, pool_path + os.sep + fle.name)
if os.path.exists(basedir + os.sep + fle.name):
os.unlink(basedir + os.sep + fle.name)
os.removedirs(basedir)
# Manage DB content, update associative properties
release.packages.append(result)
if not result.arch in release.distribution.architectures:
release.distribution.architectures.append(result.arch)
if not result.component in release.distribution.components:
release.distribution.components.append(result.component)
if not result.section in release.distribution.sections:
release.distribution.sections.append(result.section)
# Create symlink
dists_path = os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), result.component.name))
if result.type.name == 'deb':
dists_path += os.sep + "binary-" + result.arch.name
elif result.type.name == 'dsc':
dists_path += os.sep + "source"
if not os.path.exists(dists_path):
os.makedirs(dists_path)
current_dir = os.getcwd()
os.chdir(dists_path)
for fle in result.files:
if os.path.exists(fle.name):
os.unlink(fle.name)
os.symlink(os.path.relpath(pool_path + os.sep + fle.name), fle.name)
os.chdir(current_dir)
if updateInventory:
self._updateInventory(session, release=release, distribution=distribution)
return result
def removePackage(self, session, package, arch=None, release=None, distribution=None):
result = None
if isinstance(arch, StringTypes):
arch = self._getArchitecture(session, arch)
if isinstance(package, StringTypes) and arch is None:
for p in self._getPackages(session, package):
self.log.debug("Removing package %s/%s/%s/%s" % (distribution, release, package if isinstance(package, StringTypes) else package.name), p.arch.name)
self.removePackage(session, p, arch=p.arch, release=release, distribution=distribution)
elif isinstance(package, StringTypes):
self.log.debug("Removing package %s/%s/%s/%s" % (distribution, release, package if isinstance(package, StringTypes) else package.name), arch.name)
package = self._getPackage(session, package, arch=arch)
elif distribution is not None:
self.log.debug("Removing package %s/%s/%s/%s" % (distribution, release, package.name, package.arch.name))
if isinstance(release, StringTypes):
release = self._getRelease(session, release)
if isinstance(distribution, StringTypes):
distribution = self._getDistribution(session, distribution)
if package is not None:
if release is not None:
if package in release.packages[:]:
dists_path = os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), package.component.name))
if package.type.name == 'deb':
dists_path += os.sep + "binary-" + package.arch.name
elif package.type.name == 'dsc':
dists_path += os.sep + "source"
else:
raise ValueError(N_("Unknown package type {type}").format(type=package.type.name))
for fle in package.files:
if os.path.exists(dists_path + os.sep + fle.name):
try:
os.unlink(dists_path + os.sep + fle.name)
except:
self.log.error("Could not remove file %s" % dists_path + os.sep + fle.name)
raise
else:
self.log.error("Strange: %s does not exist!" % dists_path + os.sep + fle.name)
release.packages.remove(package)
self._updateInventory(session, release=release)
result = True
pool_path = os.sep.join((release.distribution.repository.path, "pool", release.distribution.type.name, package.component.name))
rollback_path = os.sep.join((release.distribution.repository.path, "rollback", release.distribution.type.name, package.component.name))
if not self.env.config.get('repository.rollback') == False and not os.path.exists(rollback_path):
os.makedirs(rollback_path)
try:
# Move package to rollback pool, remove row if no release is linked
if not package.releases:
if package.source is not None:
if package.source.startswith('lib'):
pool_path += os.sep + str(package.source).lower()[:4]
pool_path += os.sep + package.source.lower()
else:
pool_path += os.sep + str(package.source).lower()[0]
pool_path += os.sep + package.source.lower()
else:
if package.name.startswith('lib'):
pool_path += os.sep + str(package.name).lower()[:4]
pool_path += os.sep + package.name.lower()
else:
pool_path += os.sep + str(package.name).lower()[0]
pool_path += os.sep + package.name.lower()
for fle in package.files:
package_path = pool_path + os.sep + fle.name
if os.path.exists(package_path):
if not self.env.config.get('repository.rollback') == False:
shutil.move(package_path, rollback_path + os.sep + fle.name)
else:
os.unlink(package_path) # Remove package file
session.delete(fle)
session.delete(package)
except:
self.log.error("Could not remove file %s" % package_path)
raise
try:
os.removedirs(pool_path) # Remove leaf dirs
except OSError:
pass
except:
raise
elif distribution is not None:
if distribution.releases:
result = True
for release in distribution.releases:
self.removePackage(session, package, release=release, distribution=distribution, arch=arch)
else:
distributions = []
if package.releases is not None:
for release in package.releases:
if release.distribution not in distributions:
distributions.append(release.distribution)
if distributions:
result = True
for distribution in distributions:
result &= self.removePackage(session, package, distribution=distribution, release=release, arch=arch)
return result
def _getDistribution(self, session, name):
try:
result = session.query(DebianDistribution).filter_by(name=name).one()
except:
result = None
return result
def _getRelease(self, session, name):
try:
result = session.query(Release).filter_by(name=name).one()
except:
result = None
return result
def _getPackage(self, session, name, arch=None, version=None):
try:
result = session.query(DebianPackage).filter_by(name=name)
if arch:
result = result.filter_by(arch=arch)
if version:
result = result.filter_by(version=version)
result = result.one()
except:
result = None
return result
def _getPackages(self, session, name):
try:
result = session.query(DebianPackage).filter_by(name=name).all()
except:
result = None
return result
def getMirrorSourceList(self, session, release, component):
result = None
local_file = None
for extension in (".bz2", ".gz", ""):
try:
sources_file = "/".join((release.distribution.origin, "dists", release.name, component.name, "source", "Sources" + extension))
local_file = downloadFile(sources_file)
if sources_file.endswith(".bz2"):
with file(local_file + ".asc", 'wb', os.O_CREAT) as uncompressed_file:
uncompressed_file.writelines(bz2.BZ2File(local_file, 'rb').read())
os.unlink(local_file)
os.rename(local_file + ".asc", local_file)
elif sources_file.endswith(".gz"):
with file(local_file + ".asc", 'wb', os.O_CREAT) as uncompressed_file:
uncompressed_file.writelines(gzip.GzipFile(local_file, 'rb').read())
os.unlink(local_file)
os.rename(local_file + ".asc", local_file)
break
except urllib2.HTTPError, e:
if e.code == 404:
continue
raise
except:
raise
if not local_file:
raise ValueError(N_("Could not download a Sources file for {release}/{component}").format(release=release.name, component=component.name))
return result
result = local_file
return result
def getMirrorPackageList(self, session, release, component, architecture):
result = None
local_file = None
if architecture.name == "all":
raise ValueError(N_("Refusing to download Packages for architecture {architecture}").format(architecture=architecture.name))
for extension in (".bz2", ".gz", ""):
try:
packages_file = "/".join((release.distribution.origin, "dists", release.name, component.name, "binary-" + architecture.name, "Packages" + extension))
local_file = downloadFile(packages_file)
if packages_file.endswith(".bz2"):
with file(local_file + ".asc", 'wb', os.O_CREAT) as uncompressed_file:
uncompressed_file.writelines(bz2.BZ2File(local_file, 'rb').read())
os.unlink(local_file)
os.rename(local_file + ".asc", local_file)
elif packages_file.endswith(".gz"):
with file(local_file + ".asc", 'wb', os.O_CREAT) as uncompressed_file:
uncompressed_file.writelines(gzip.GzipFile(local_file, 'rb').read())
os.unlink(local_file)
os.rename(local_file + ".asc", local_file)
break
except urllib2.HTTPError, e:
if e.code == 404:
continue
raise
except:
raise
if not local_file:
raise ValueError(N_("Could not download a Packages file for {release}/{component}/{architecture}").format(release=release.name, component=component.name, architecture=architecture.name))
return result
result = local_file
return result
def _getPackageFromUrl(self, session, url, origin=None, component=None, section=None):
result = None
o = urlparse(url)
if o.scheme in ("http", "https", "ftp"):
# Need to download file first
url = downloadFile(o.geturl(), use_filename=True)
elif o.scheme == "":
# Local file
pass
else:
raise ValueError(N_("Unknown URL '%s'!"), url)
if url.endswith('.deb'):
deb = debfile.DebFile(url)
control = deb.debcontrol()
if 'Package' in control:
result = DebianPackage(control.get("Package"))
description = control.get("Description").split('\n')
result.description = description[0]
result.long_description = string.join(map(lambda l: ' ' + l, description[1:]), '\n')
result.version = control.get("Version")
result.maintainer = control.get("Maintainer")
if section:
result.section = self._getSection(session, section, add=True)
elif 'Section' in control:
result.section = self._getSection(session, control.get("Section"), add=True)
else:
raise ValueError(N_("Package %s has no section!"), control.get("Package"))
if component:
result.component = self._getComponent(session, component, add=True)
else:
if "/" in result.section.name:
result.component = self._getComponent(session, result.section.name.split("/")[0], add=True)
else:
result.component = self._getComponent(session, "main", add=True)
result.arch = self._getArchitecture(session, control.get("Architecture"), add=True)
result.priority = self._getPriority(session, control.get("Priority"), add=True)
result.depends = control.get("Depends")
result.installed_size = control.get("Installed-Size")
result.recommends = control.get("Recommends")
result.suggests = control.get("Suggests")
result.provides = control.get("Provides")
result.files.append(self._getFile(session, url, add=True))
result.type = self._getType(session, str(result.files[0].name).split('.')[-1], add=True)
result.standards_version = deb.version
result.source = control.get("Source")
result.origin = origin
existing = self._getPackage(session, result.name, arch=result.arch, version=result.version)
if existing is not None:
result = existing
elif url.endswith('.dsc'):
c = deb822.Dsc(open(url))
if 'Source' in c:
result = DebianPackage(c['Source'])
result.arch = self._getArchitecture(session, 'source', add=True)
result.version = c['Version']
result.maintainer = c['Maintainer']
result.build_depends = c['Build-Depends']
result.standards_version = c['Standards-Version']
if 'Priority' in c:
result.priority = self._getPriority(session, c['Priority'], add=True)
result.files.append(self._getFile(session, url, add=True))
result.type = self._getType(session, "dsc", add=True)
if component:
result.component = self._getComponent(session, component, add=True)
if section:
result.section = self._getSection(session, section, add=True)
if origin is not None:
base_url = origin[0:origin.rfind(os.path.basename(url))]
download_dir = os.path.dirname(url)
# Download additional files
if origin.startswith(('http', 'ftp')):
result.origin = origin
if 'Files' in c:
for source_file in c['Files']:
self.log.debug("Downloading additional file '%s'" % (base_url + source_file['name']))
request = urllib2.Request(base_url + source_file['name'])
try:
fle = urllib2.urlopen(request)
local_file = open(download_dir + os.sep + source_file['name'], "w")
local_file.write(fle.read())
local_file.close()
local_url = download_dir + os.sep + source_file['name']
checksum = str(source_file['md5sum'])
if not checksum == self._get_md5_for_file(local_url):
raise Exception
result.files.append(self._getFile(session, local_url, add=True))
except urllib2.HTTPError, e:
print "HTTP Error:", e.code, url
raise
except urllib2.URLError, e:
print "URL Error:", e.reason, url
raise
else:
download_dir = os.path.dirname(origin)
if 'Files' in c:
for source_file in c['Files']:
local_url = download_dir + os.sep + source_file['name']
if os.path.exists(local_url):
checksum = str(source_file['md5sum'])
if not checksum == self._get_md5_for_file(local_url):
raise Exception
result.files.append(self._getFile(session, local_url, add=True))
# We need to extract the source package to get the target section
for fle in result.files:
if fle.name.endswith('.dsc'):
p = subprocess.Popen(
"LANG=C dpkg-source -x --no-check '%s'" % fle.name,
shell=True,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
cwd=download_dir)
unpack_result = os.waitpid(p.pid, 0)[1]
if unpack_result > 0:
self.log.error(p.stderr)
result = False
dir_name = None
for line in p.stdout:
if line.startswith("dpkg-source: info: extracting %s" % result.name):
dir_name = line[line.rfind(" ") + 1:].strip()
p.stdout.close()
if os.path.exists(download_dir + os.sep + dir_name):
try:
f = deb822.Deb822(open(os.sep.join((download_dir, dir_name, "debian", "control"))))
if result.section is None:
result.section = self._getSection(session, f['Section'], add=True)
if result.component is None:
if component:
result.component = self._getComponent(session, component, add=True)
else:
if "/" in result.section.name:
result.component = self._getComponent(session, result.section.name.split("/")[0], add=True)
else:
result.component = self._getComponent(session, "main", add=True)
shutil.rmtree(os.sep.join((download_dir, dir_name)))
except:
raise
return result, url
def _get_md5_for_file(self, filename, block_size=2 ** 20):
md5 = hashlib.md5()
f = open(filename)
while True:
data = f.read(block_size)
if not data:
break
md5.update(data)
f.close()
return md5.hexdigest()
def _getPriority(self, session, name, add=False):
result = None
try:
result = session.query(DebianPriority).filter_by(name=name).one()
except NoResultFound:
if add:
result = DebianPriority(name)
session.add(result)
return result
def _getComponent(self, session, name, add=False):
result = None
try:
result = session.query(Component).filter_by(name=name).one()
except NoResultFound:
if add:
result = Component(name)
session.add(result)
return result
def _getSection(self, session, name, add=False):
result = None
try:
result = session.query(Section).filter_by(name=name).one()
except NoResultFound:
if add:
result = Section(name)
session.add(result)
return result
def _getArchitecture(self, session, name, add=False):
result = None
try:
result = session.query(Architecture).filter_by(name=name).one()
except NoResultFound:
if add:
result = Architecture(name)
session.add(result)
return result
def _getFile(self, session, url, add=False):
result = None
try:
result = session.query(File).filter_by(name=os.path.basename(url)).one()
except NoResultFound:
if add:
result = File(name=os.path.basename(url))
if os.path.exists(url):
infile = open(url, 'rb')
content = infile.read()
infile.close()
m = hashlib.md5()
m.update(content)
result.md5sum = m.hexdigest()
result.size = os.path.getsize(url)
session.add(result)
return result
def _getType(self, session, name, add=False):
result = None
try:
result = session.query(Type).filter_by(name=name).one()
except NoResultFound:
if add:
result = Type(name)
session.add(result)
return result
def _getGPGEnvironment(self, session):
work_dir = tempfile.mkdtemp()
repository = session.query(Repository).filter_by(path=self.env.config.get('repository.path')).one()
gpg = gnupg.GPG(gnupghome=work_dir)
if not repository.keyring:
self.log.debug("Generating GPG Key")
input_data = gpg.gen_key_input(key_type="RSA", key_length=1024)
key = gpg.gen_key(input_data)
repository.keyring = RepositoryKeyring(name=key.fingerprint, data=gpg.export_keys(key, True))
self.log.debug("Exported key '%s' to repository" % key)
else:
gpg.import_keys(repository.keyring.data)
#self.log.debug("Using existing secret key '%s'" % repository.keyring.name)
result = work_dir
return result
def _signFile(self, session, filename):
result = False
if os.path.exists(filename):
try:
work_dir = self._getGPGEnvironment(session)
gpg = gnupg.GPG(gnupghome=work_dir)
stream = open(filename, 'rb')
signed_ascii_data = str(gpg.sign_file(stream))
stream.close()
sign_file = open(filename + ".gpg", 'wb')
sign_file.write(signed_ascii_data)
sign_file.close()
shutil.rmtree(work_dir)
result = True
except:
raise
return result
def _updateInventory(self, session, release=None, distribution=None):
result = True
if release:
if isinstance(release, (str, unicode)):
release = self._getRelease(session, release)
release._initDirs()
# Create Packages.gz and Packages.bz2
for component in release.distribution.components:
for architecture in release.distribution.architectures:
if architecture.name == 'source':
p = subprocess.Popen(
"dpkg-scansources . > Sources",
shell=True,
stderr=subprocess.PIPE,
cwd=os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), component.name, architecture.name)))
packages_result = os.waitpid(p.pid, 0)[1]
if packages_result > 0:
self.log.error(p.stderr)
result = False
p = subprocess.Popen(
"gzip -c Sources > Sources.gz",
shell=True,
stderr=subprocess.PIPE,
cwd=os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), component.name, architecture.name)))
packages_result = os.waitpid(p.pid, 0)[1]
if packages_result > 0:
self.log.error(p.stderr)
result = False
p = subprocess.Popen(
"bzip2 -c Sources > Sources.bz2",
shell=True,
stderr=subprocess.PIPE,
cwd=os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), component.name, architecture.name)))
packages_result = os.waitpid(p.pid, 0)[1]
if packages_result > 0:
self.log.error(p.stderr)
result = False
else:
p = subprocess.Popen(
"dpkg-scanpackages . > Packages",
shell=True,
stderr=subprocess.PIPE,
cwd=os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), component.name, 'binary-' + architecture.name)))
packages_result = os.waitpid(p.pid, 0)[1]
if packages_result > 0:
self.log.error(p.stderr)
result = False
p = subprocess.Popen(
"gzip -c Packages > Packages.gz",
shell=True,
stderr=subprocess.PIPE,
cwd=os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), component.name, 'binary-' + architecture.name)))
gzip_result = os.waitpid(p.pid, 0)[1]
if gzip_result > 0:
self.log.error(p.stderr)
result = False
p = subprocess.Popen(
"bzip2 -c Packages > Packages.bz2",
shell=True,
stderr=subprocess.PIPE,
cwd=os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), component.name, 'binary-' + architecture.name)))
bzip_result = os.waitpid(p.pid, 0)[1]
if bzip_result > 0:
self.log.error(p.stderr)
result = False
# Create Release files
p = subprocess.Popen(
"apt-ftparchive -qq -o 'APT::FTPArchive::Release::Suite=%s' -o 'APT::FTPArchive::Release::Codename=%s' release . > Release" % (release.name, release.codename if release.codename is not None else release.name),
shell=True,
stderr=subprocess.PIPE,
cwd=os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep))))
release_result = os.waitpid(p.pid, 0)[1]
if release_result > 0:
self.log.error(p.stderr)
result = False
if os.path.exists(os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), "Release"))):
self._signFile(session, os.sep.join((release.distribution.path, "dists", release.name.replace('/', os.sep), "Release")))
for child in release.children:
result = result and self._updateInventory(session, release=child.name)
elif distribution:
if isinstance(distribution, (str, unicode)):
distribution = self._getDistribution(session, distribution)
for release in distribution.releases:
result = result and self._updateInventory(session, release=release)
return result
def renameRelease(self, session, source, target):
result = False
source_path = os.sep.join((source.distribution.repository.path, source.distribution.name, "dists", source.name.replace('/', os.sep)))
try:
source._rename(target)
result = self._updateInventory(session, release=target)
if not result:
self.log.error("Updating inventory for release '%s' failed!" % target)
if os.path.exists(source_path):
shutil.rmtree(source_path)
result = True
except:
self.log.error("Caught unknown exception %s" % sys.exc_info()[0])
raise
result = False
return result
|
python
|
import io
import os
import re
from setuptools import find_packages, setup
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
def read(filename):
filename = os.path.join(os.path.dirname(__file__), filename)
text_type = type(u"")
with io.open(filename, mode="r", encoding='utf-8') as fd:
return re.sub(text_type(r':[a-z]+:`~?(.*?)`'), text_type(r'``\1``'), fd.read())
setup(
name='fluentassert',
version='0.0.1',
packages=find_packages(),
include_package_data=True,
install_requires=[
'pyyaml>=4.2b1',
],
license='MIT License',
description='Fluent assertions framework for Python',
long_description=read("README.md"),
long_description_content_type='text/markdown',
url='https://github.com/sheikhjebran/fluentassert',
author='Sheikh Jebran',
author_email='[email protected]',
python_requires='>3.5.0',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3 :: Only",
"Natural Language :: English",
"Operating System :: OS Independent",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries"],
keywords='check python fluent fluent-interface assertion-library assertions testing fluent-assertions fluentcheck',
test_suite='fluentcheck.tests'
)
|
python
|
from toontown.coghq.SpecImports import *
GlobalEntities = {1000: {'type': 'levelMgr',
'name': 'LevelMgr',
'comment': '',
'parentEntId': 0,
'cogLevel': 0,
'farPlaneDistance': 1500,
'modelFilename': 'phase_11/models/lawbotHQ/LB_Zone08a',
'wantDoors': 1},
1001: {'type': 'editMgr',
'name': 'EditMgr',
'parentEntId': 0,
'insertEntity': None,
'removeEntity': None,
'requestNewEntity': None,
'requestSave': None},
0: {'type': 'zone',
'name': 'UberZone',
'comment': '',
'parentEntId': 0,
'scale': 1,
'description': '',
'visibility': []},
10001: {'type': 'battleBlocker',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10011,
'pos': Point3(-1.02925205231, 87.0907745361, 11.8959827423),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'cellId': 0,
'radius': 10.0},
10006: {'type': 'battleBlocker',
'name': 'copy of <unnamed>',
'comment': '',
'parentEntId': 10011,
'pos': Point3(-60.9065246582, -3.26905798912, 0.117109239101),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'cellId': 1,
'radius': 15.0},
10047: {'type': 'battleBlocker',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10013,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Point3(1.0, 0.20000000298, 1.0),
'cellId': 2,
'radius': 20.0},
10041: {'type': 'gagBarrel',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10033,
'pos': Point3(5.40611028671, 0.0, 0.0),
'hpr': Vec3(199.440032959, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'gagLevel': 5,
'gagLevelMax': 0,
'gagTrack': 'random',
'rewardPerGrab': 4,
'rewardPerGrabMax': 6},
10034: {'type': 'healBarrel',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10033,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(163.300750732, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'rewardPerGrab': 7,
'rewardPerGrabMax': 9},
10015: {'type': 'boardOfficeProductPallet',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10014,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1,
'mintId': 12500},
10016: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed>',
'comment': '',
'parentEntId': 10014,
'pos': Point3(0.0, 13.6865262985, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10017: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (2)',
'comment': '',
'parentEntId': 10014,
'pos': Point3(0.0, 27.3799991608, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10018: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (3)',
'comment': '',
'parentEntId': 10014,
'pos': Point3(0.0, 41.0699996948, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10019: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (4)',
'comment': '',
'parentEntId': 10014,
'pos': Point3(0.0, 54.7599983215, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10020: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (5)',
'comment': '',
'parentEntId': 10014,
'pos': Point3(0.0, 68.4499969482, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10022: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed>',
'comment': '',
'parentEntId': 10021,
'pos': Point3(0.0, 11.766998291, 0.0),
'hpr': Point3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10025: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (4)',
'comment': '',
'parentEntId': 10045,
'pos': Point3(0.0, 54.7599983215, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10026: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (5)',
'comment': '',
'parentEntId': 10045,
'pos': Point3(0.0, 68.4499969482, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10036: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed>',
'comment': '',
'parentEntId': 10035,
'pos': Point3(0.0, 13.6865262985, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10037: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (2)',
'comment': '',
'parentEntId': 10035,
'pos': Point3(0.0, 27.3799991608, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10038: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (3)',
'comment': '',
'parentEntId': 10035,
'pos': Point3(0.0, 41.0699996948, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10043: {'type': 'boardOfficeProductPallet',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10007,
'pos': Point3(-36.662399292, -39.0314712524, 0.0),
'hpr': Point3(90.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12500},
10044: {'type': 'boardOfficeProductPallet',
'name': 'copy of <unnamed> (2)',
'comment': '',
'parentEntId': 10021,
'pos': Point3(0.0, 25.4739685059, 0.0),
'hpr': Point3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'mintId': 12700},
10004: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10021,
'pos': Point3(0.0, -1.09804749489, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(2.0, 2.0, 2.0),
'collisionsOnly': 0,
'flattenType': 'strong',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10009: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10008,
'pos': Point3(-3.9962117672, 0.695078849792, 0.0113303475082),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.20000004768, 1.20000004768, 1.20000004768),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_E.bam'},
10010: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10008,
'pos': Point3(48.0530014038, -0.531660735607, -0.327078670263),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_C1.bam'},
10012: {'type': 'model',
'name': 'rightCrates',
'comment': '',
'parentEntId': 10007,
'pos': Point3(36.0373382568, 71.3546981812, 9.99835586548),
'hpr': Vec3(315.0, 0.0, 0.0),
'scale': Vec3(1.5, 1.5, 1.5),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_E.bam'},
10024: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10028,
'pos': Point3(-3.7328555584, 27.1218452454, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Point3(2.0, 2.0, 2.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10027: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10028,
'pos': Point3(-11.9349050522, 38.9528312683, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Point3(2.0, 2.0, 2.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10029: {'type': 'model',
'name': 'crate',
'comment': '',
'parentEntId': 10035,
'pos': Point3(0.0, 0.863602340221, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(2.0, 2.0, 2.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10030: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10023,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1,
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10031: {'type': 'model',
'name': 'copy of crate',
'comment': '',
'parentEntId': 10029,
'pos': Point3(0.0, 0.0, 5.46999979019),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Point3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10032: {'type': 'model',
'name': 'copy of <unnamed>',
'comment': '',
'parentEntId': 10023,
'pos': Point3(0.0, -5.92218112946, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10039: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10010,
'pos': Point3(-9.23663234711, 0.821143984795, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.5, 1.5, 1.5),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_F1.bam'},
10042: {'type': 'model',
'name': 'copy of <unnamed> (2)',
'comment': '',
'parentEntId': 10023,
'pos': Point3(3.0, -11.8400001526, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cogHQ/CBMetalCrate2.bam'},
10048: {'type': 'model',
'name': 'cratesAgainstWall',
'comment': '',
'parentEntId': 10007,
'pos': Point3(-37.0983123779, 70.2133865356, 10.0),
'hpr': Vec3(225.0, 0.0, 0.0),
'scale': Vec3(1.5, 1.5, 1.5),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/cashbotHQ/crates_E.bam'},
10000: {'type': 'nodepath',
'name': 'cogs',
'comment': '',
'parentEntId': 10011,
'pos': Point3(0.0, 66.1200027466, 10.1833248138),
'hpr': Point3(270.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10002: {'type': 'nodepath',
'name': 'battle',
'comment': '',
'parentEntId': 10000,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Point3(90.0, 0.0, 0.0),
'scale': 1},
10003: {'type': 'nodepath',
'name': 'cogs2',
'comment': '',
'parentEntId': 10011,
'pos': Point3(-53.9246749878, -22.7616195679, 0.0),
'hpr': Point3(45.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10005: {'type': 'nodepath',
'name': 'battle',
'comment': '',
'parentEntId': 10003,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1},
10007: {'type': 'nodepath',
'name': 'props',
'comment': '',
'parentEntId': 0,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1},
10008: {'type': 'nodepath',
'name': 'topWall',
'comment': '',
'parentEntId': 10007,
'pos': Point3(0.0, 48.0299987793, 10.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1},
10011: {'type': 'nodepath',
'name': 'cogs',
'comment': '',
'parentEntId': 0,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1},
10013: {'type': 'nodepath',
'name': 'frontCogs',
'comment': '',
'parentEntId': 10011,
'pos': Point3(25.3957309723, -12.3005743027, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10014: {'type': 'nodepath',
'name': 'frontPalletWall',
'comment': '',
'parentEntId': 10007,
'pos': Point3(45.5494384766, 38.2237281799, 0.0),
'hpr': Point3(180.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10021: {'type': 'nodepath',
'name': 'middlePalletWallLeft',
'comment': '',
'parentEntId': 10046,
'pos': Point3(6.0, -37.9928665161, 0.0),
'hpr': Point3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10023: {'type': 'nodepath',
'name': 'crateIsland',
'comment': '',
'parentEntId': 10007,
'pos': Point3(-23.1813278198, 7.08758449554, 0.00999999977648),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(2.0, 2.0, 2.0)},
10028: {'type': 'nodepath',
'name': 'rewardCulDeSac',
'comment': '',
'parentEntId': 10045,
'pos': Point3(-8.26172065735, 38.377407074, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10033: {'type': 'nodepath',
'name': 'barrels',
'comment': '',
'parentEntId': 10028,
'pos': Point3(-4.75077962875, 34.1425209045, 0.0),
'hpr': Point3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10035: {'type': 'nodepath',
'name': 'backPalletWall',
'comment': '',
'parentEntId': 10007,
'pos': Point3(-47.6501731873, 40.006893158, 0.0),
'hpr': Point3(180.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10040: {'type': 'nodepath',
'name': 'centerCogs',
'comment': '',
'parentEntId': 10011,
'pos': Point3(-23.9375743866, 28.353269577, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10045: {'type': 'nodepath',
'name': 'middlePalletWallRight',
'comment': '',
'parentEntId': 10046,
'pos': Point3(17.4200000763, -38.2999992371, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1},
10046: {'type': 'nodepath',
'name': 'middlePalletWall',
'comment': '',
'parentEntId': 10007,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1}}
Scenario0 = {}
levelSpec = {'globalEntities': GlobalEntities,
'scenarios': [Scenario0]}
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 17 16:17:25 2017
@author: jorgemauricio
"""
# librerias
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
# crear una serie
ser1 = Series([1,2,3,4,1,2,3,4])
# desplegar
ser1
# usando replace .replace(valor a ser remplazado, nuevo valor)
ser1.replace(1,np.nan)
# podemos incluir listas
ser1.replace([1,4],[100,400])
# y tambien dictionarios
ser1.replace({4:np.nan})
|
python
|
from django.db import IntegrityError
from rest_framework.exceptions import ErrorDetail, ValidationError
from rest_framework import serializers
from ..models import Customer
from django.contrib.auth.models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email')
class CustomerSerializer(serializers.Serializer):
"""
Serializer for customer
"""
id = serializers.IntegerField(read_only=True)
first_name = serializers.CharField(allow_blank=False, max_length=150, required=True, source='user.first_name')
last_name = serializers.CharField(allow_blank=False, max_length=150, required=True, source='user.last_name')
email = serializers.EmailField(allow_blank=False, label='Email address', max_length=254, required=True, source='user.email')
dob = serializers.DateField(format="%d-%m-%Y" ,required=True)
def create(self, validated_data):
"""
Overriding the default create method of the Model serializer.
:param validated_data: data containing all the details of customer
:return: returns a successfully created customer record
"""
validated_user = validated_data.pop('user')
user_data = {
'username': validated_user.get('email'),
'first_name': validated_user.get('first_name'),
'last_name': validated_user.get('last_name'),
'email': validated_user.get('email')
}
try:
user = UserSerializer.create(UserSerializer(), validated_data=user_data)
except IntegrityError:
raise ValidationError({'email': [ErrorDetail(string='A customer with that email already exists', code='invalid')],})
customer = Customer.objects.create(user=user,
dob=validated_data.pop('dob'))
return customer
|
python
|
# fetch mnist dataset
"""by GeoHotz"""
def fetch(url):
import requests
import hashlib
import os
import tempfile
fp = os.path.join(tempfile.gettempdir(), hashlib.md5(
url.encode('utf-8')).hexdigest())
if os.path.isfile(fp):
with open(fp, "rb") as f:
dat = f.read()
else:
dat = requests.get(url).content
# important trick here to create a .tmp file
with open(fp+".tmp", "wb") as f:
f.write(dat)
os.rename(fp+".tmp", fp)
return dat
def mnist(url1="http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz", url2="http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz", url3="http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz", url4="http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz"):
import gzip
import numpy as np
def parse(dat): return np.frombuffer(
gzip.decompress(dat), dtype=np.uint8).copy()
X_train = parse(fetch(url1))[0x10:].reshape((-1, 28, 28))
Y_train = parse(fetch(url2))[8:]
X_test = parse(fetch(url3))[0x10:].reshape((-1, 28, 28))
Y_test = parse(fetch(url4))[8:]
return X_train, Y_train, X_test, Y_test
|
python
|
import sys
from briefcase.platforms.linux.appimage import LinuxAppImageCreateCommand
def test_support_package_url(first_app_config, tmp_path):
command = LinuxAppImageCreateCommand(base_path=tmp_path)
# Set some properties of the host system for test purposes.
command.host_arch = 'wonky'
command.platform = 'tester'
assert command.support_package_url_query == [
('platform', 'tester'),
('version', '3.{minor}'.format(minor=sys.version_info.minor)),
('arch', 'wonky'),
]
|
python
|
import pymongo
import pandas as pd
class mongo:
'''
mongodb class through which we can perform most of the mongodb tasks using python
'''
def __init__(self):
'''
init function
'''
self.db = ""
def connect(self, connection_url,db):
'''
connect function to connect with mongo server
Parameters
----------
connection_url: connection url with password
db:db name
'''
# Establish a connection with mongoDB
self.client = pymongo.MongoClient(connection_url)
# Create a DB
self.db = self.client[db]
def create_collection(self, COLLECTION_NAME):
'''
Function create_ table is used to create a new table
Parameters
----------
COLLECTION_NAME: collection name
'''
try:
self.db[COLLECTION_NAME]
print(f"{COLLECTION_NAME} collection created")
except Exception as e:
print(f"collectionqw not created error : {str(e)}")
def insert(self, collection_name,record):
'''
Function insert is used to insert value in table
Parameters
----------
record: data to be inserted as dict, to insert many data use list of dict
'''
try:
if type(record)==dict:
collection = self.db[collection_name]
collection.insert_one(record)
elif type(record)==list:
collection = self.db[collection_name]
collection.insert_many(record)
print(f"inserted successfully")
except Exception as e:
print(f"insert error : {str(e)}")
def update(self, collection_name,new_dict,where_dict):
'''
Function delete is used to delete record from collection
Parameters
----------
collection_name: collection name
where_dict: condition as dict
new_dict:new values
'''
try:
collection = self.db[collection_name]
collection.update_many(where_dict,{"$set":new_dict} )
print(f"update successfully")
except Exception as e:
print(f"update error : {str(e)}")
def delete(self, collection_name,where_dict):
'''
Function delete is used to delete record from collection
Parameters
----------
collection_name: collection name
where_dict: condition as dict
'''
try:
query_to_delete = where_dict
collection = self.db[collection_name]
collection.delete_one(query_to_delete)
print(f"deleted successfully")
except Exception as e:
print(f"delete error : {str(e)}")
def download(self,collection_name):
# make an API call to the MongoDB server
collection = self.db[collection_name]
mongo_docs = collection.find()
# Convert the mongo docs to a DataFrame
docs = pd.DataFrame(mongo_docs)
# Discard the Mongo ID for the documents
docs.pop("_id")
#df = pd.read_sql_query(f"SELECT * FROM {table_name}", self.conn())
docs.to_csv(f"{collection_name}.csv",index=False)
return f"{collection_name}.csv"
|
python
|
# Copyright 2019, The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
# TODO(b/139939526): Move to public API.
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow_model_optimization.python.core.internal.tensor_encoding.core import core_encoder
from tensorflow_model_optimization.python.core.internal.tensor_encoding.core import encoding_stage
from tensorflow_model_optimization.python.core.internal.tensor_encoding.core import gather_encoder
from tensorflow_model_optimization.python.core.internal.tensor_encoding.testing import test_utils
# Abbreviated constants used in tests.
TENSORS = gather_encoder._TENSORS
P1_VALS = test_utils.PlusOneEncodingStage.ENCODED_VALUES_KEY
T2_VALS = test_utils.TimesTwoEncodingStage.ENCODED_VALUES_KEY
SL_VALS = test_utils.SimpleLinearEncodingStage.ENCODED_VALUES_KEY
SIF_SIGNS = test_utils.SignIntFloatEncodingStage.ENCODED_SIGNS_KEY
SIF_INTS = test_utils.SignIntFloatEncodingStage.ENCODED_INTS_KEY
SIF_FLOATS = test_utils.SignIntFloatEncodingStage.ENCODED_FLOATS_KEY
PN_VALS = test_utils.PlusOneOverNEncodingStage.ENCODED_VALUES_KEY
class GatherEncoderTest(tf.test.TestCase, parameterized.TestCase):
@tf_test_util.run_all_in_graph_and_eager_modes
def test_basic_encode_decode(self):
"""Tests basic encoding and decoding works as expected."""
x_fn = lambda: tf.random.uniform((12,))
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.PlusOneOverNEncodingStage()).make(),
tf.TensorSpec.from_tensor(x_fn()))
num_summands = 3
iteration = _make_iteration_function(encoder, x_fn, num_summands)
state = encoder.initial_state()
for i in range(1, 5):
data = self.evaluate(iteration(state))
for j in range(num_summands):
self.assertAllClose(
data.x[j] + 1 / i,
_encoded_x_field(data.encoded_x[j], [TENSORS, PN_VALS]))
self.assertEqual((i,), data.initial_state)
self.assertEqual((i + 1,), data.updated_state)
state = data.updated_state
@tf_test_util.run_all_in_graph_and_eager_modes
def test_composite_encoder(self):
"""Tests functionality with a general, composite `Encoder`."""
x_fn = lambda: tf.constant(1.2)
encoder = core_encoder.EncoderComposer(
test_utils.SignIntFloatEncodingStage())
encoder.add_child(test_utils.TimesTwoEncodingStage(), SIF_SIGNS)
encoder.add_child(test_utils.PlusOneEncodingStage(), SIF_INTS)
encoder.add_child(test_utils.TimesTwoEncodingStage(), SIF_FLOATS).add_child(
test_utils.PlusOneOverNEncodingStage(), T2_VALS)
encoder = gather_encoder.GatherEncoder.from_encoder(
encoder.make(), tf.TensorSpec.from_tensor(x_fn()))
num_summands = 3
iteration = _make_iteration_function(encoder, x_fn, num_summands)
state = encoder.initial_state()
for i in range(1, 5):
data = self.evaluate(iteration(state))
for j in range(num_summands):
self.assertAllClose(
2.0,
_encoded_x_field(data.encoded_x[j], [TENSORS, SIF_SIGNS, T2_VALS]))
self.assertAllClose(
2.0,
_encoded_x_field(data.encoded_x[j], [TENSORS, SIF_INTS, P1_VALS]))
self.assertAllClose(
0.4 + 1 / i,
_encoded_x_field(data.encoded_x[j],
[TENSORS, SIF_FLOATS, T2_VALS, PN_VALS]))
self.assertAllClose(data.x[j], data.part_decoded_x[j])
self.assertAllClose(data.x[j] * num_summands,
data.summed_part_decoded_x)
self.assertAllClose(data.x[j] * num_summands, data.decoded_x)
self.assertEqual((i,), data.initial_state)
self.assertEqual((i + 1,), data.updated_state)
state = data.updated_state
@tf_test_util.run_all_in_graph_and_eager_modes
def test_none_state_equal_to_initial_state(self):
"""Tests that not providing state is the same as initial_state."""
x_fn = lambda: tf.constant(1.0)
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.PlusOneOverNEncodingStage()).make(),
tf.TensorSpec.from_tensor(x_fn()))
num_summands = 3
stateful_iteration = _make_iteration_function(encoder, x_fn, num_summands)
state = encoder.initial_state()
stateless_iteration = _make_stateless_iteration_function(
encoder, x_fn, num_summands)
stateful_data = self.evaluate(stateful_iteration(state))
stateless_data = self.evaluate(stateless_iteration())
self.assertAllClose(stateful_data.encoded_x, stateless_data.encoded_x)
self.assertAllClose(stateful_data.decoded_x, stateless_data.decoded_x)
def test_python_constants_not_exposed(self):
"""Tests that only TensorFlow values are exposed to users."""
x_fn = lambda: tf.constant(1.0)
tensorspec = tf.TensorSpec.from_tensor(x_fn())
encoder_py = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.SimpleLinearEncodingStage(2.0, 3.0)).add_parent(
test_utils.PlusOneEncodingStage(), P1_VALS).add_parent(
test_utils.SimpleLinearEncodingStage(2.0, 3.0),
SL_VALS).make(), tensorspec)
a_var = tf.compat.v1.get_variable('a_var', initializer=2.0)
b_var = tf.compat.v1.get_variable('b_var', initializer=3.0)
encoder_tf = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.SimpleLinearEncodingStage(a_var, b_var)).add_parent(
test_utils.PlusOneEncodingStage(), P1_VALS).add_parent(
test_utils.SimpleLinearEncodingStage(a_var, b_var),
SL_VALS).make(), tensorspec)
(encode_params_py, decode_before_sum_params_py,
decode_after_sum_params_py) = encoder_py.get_params()
(encode_params_tf, decode_before_sum_params_tf,
decode_after_sum_params_tf) = encoder_tf.get_params()
# Params that are Python constants -- not tf.Tensors -- should be hidden
# from the user, and made statically available at appropriate locations.
self.assertLen(encode_params_py, 1)
self.assertLen(encode_params_tf, 5)
self.assertLen(decode_before_sum_params_py, 1)
self.assertLen(decode_before_sum_params_tf, 3)
self.assertEmpty(decode_after_sum_params_py)
self.assertLen(decode_after_sum_params_tf, 2)
@tf_test_util.run_all_in_graph_and_eager_modes
def test_decode_needs_input_shape(self):
"""Tests that mechanism for passing input shape works."""
x_fn = lambda: tf.reshape(list(range(15)), [3, 5])
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.ReduceMeanEncodingStage()).make(),
tf.TensorSpec.from_tensor(x_fn()))
iteration = _make_iteration_function(encoder, x_fn, 1)
data = self.evaluate(iteration(encoder.initial_state()))
self.assertAllEqual([[7.0] * 5] * 3, data.decoded_x)
@tf_test_util.run_all_in_graph_and_eager_modes
def test_commutativity_with_sum(self):
"""Tests that encoder that commutes with sum works."""
x_fn = lambda: tf.constant([1.0, 3.0])
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(test_utils.TimesTwoEncodingStage()).make(),
tf.TensorSpec.from_tensor(x_fn()))
for num_summands in [1, 3, 7]:
iteration = _make_iteration_function(encoder, x_fn, num_summands)
data = self.evaluate(iteration(encoder.initial_state()))
for i in range(num_summands):
self.assertAllClose([1.0, 3.0], data.x[i])
self.assertAllClose(
[2.0, 6.0], _encoded_x_field(data.encoded_x[i], [TENSORS, T2_VALS]))
self.assertAllClose(list(data.part_decoded_x[i].values())[0],
list(data.encoded_x[i].values())[0])
self.assertAllClose(np.array([2.0, 6.0]) * num_summands,
list(data.summed_part_decoded_x.values())[0])
self.assertAllClose(np.array([1.0, 3.0]) * num_summands, data.decoded_x)
@tf_test_util.run_all_in_graph_and_eager_modes
def test_full_commutativity_with_sum(self):
"""Tests that fully commutes with sum property works."""
spec = tf.TensorSpec((2,), tf.float32)
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(test_utils.TimesTwoEncodingStage()).make(),
spec)
self.assertTrue(encoder.fully_commutes_with_sum)
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.TimesTwoEncodingStage()).add_parent(
test_utils.TimesTwoEncodingStage(), T2_VALS).make(), spec)
self.assertTrue(encoder.fully_commutes_with_sum)
encoder = core_encoder.EncoderComposer(
test_utils.SignIntFloatEncodingStage())
encoder.add_child(test_utils.TimesTwoEncodingStage(), SIF_SIGNS)
encoder.add_child(test_utils.PlusOneEncodingStage(), SIF_INTS)
encoder.add_child(test_utils.TimesTwoEncodingStage(), SIF_FLOATS).add_child(
test_utils.PlusOneOverNEncodingStage(), T2_VALS)
encoder = gather_encoder.GatherEncoder.from_encoder(encoder.make(), spec)
self.assertFalse(encoder.fully_commutes_with_sum)
@tf_test_util.run_all_in_graph_and_eager_modes
def test_state_aggregation_modes(self):
"""Tests that all state updates tensors can be aggregated."""
x_fn = lambda: tf.random.uniform((5,))
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.StateUpdateTensorsEncodingStage()).make(),
tf.TensorSpec.from_tensor(x_fn()))
iteration = _make_iteration_function(encoder, x_fn, 3)
data = self.evaluate(iteration(encoder.initial_state()))
expected_sum = np.sum(data.x)
expected_min = np.amin(data.x)
expected_max = np.amax(data.x)
expected_stack_values = 15 # 3 values of shape 5.
expected_state = [
expected_sum, expected_min, expected_max, expected_stack_values
]
# We are not in control of ordering of the elements in state tuple.
self.assertAllClose(sorted(expected_state), sorted(data.updated_state))
def test_input_tensorspec(self):
"""Tests input_tensorspec property."""
x = tf.constant([[1.0, 2.0], [3.0, 4.0]])
encoder = gather_encoder.GatherEncoder.from_encoder(
core_encoder.EncoderComposer(
test_utils.PlusOneOverNEncodingStage()).make(),
tf.TensorSpec.from_tensor(x))
self.assertTrue(encoder.input_tensorspec.is_compatible_with(x))
def test_not_fully_defined_shape_raises(self):
"""Tests tensorspec without fully defined shape."""
encoder = core_encoder.EncoderComposer(
test_utils.PlusOneOverNEncodingStage()).make()
with self.assertRaisesRegex(TypeError, 'fully defined'):
gather_encoder.GatherEncoder.from_encoder(
encoder, tf.TensorSpec((None,), tf.float32))
@parameterized.parameters([1.0, 'str', object])
def test_not_an_encoder_raises(self, not_an_encoder):
"""Tests invalid type encoder argument."""
tensorspec = tf.TensorSpec((1,), tf.float32)
with self.assertRaisesRegex(TypeError, 'Encoder'):
gather_encoder.GatherEncoder.from_encoder(not_an_encoder, tensorspec)
@parameterized.parameters([1.0, 'str', object])
def test_not_a_tensorspec_raises(self, not_a_tensorspec):
"""Tests invalid type of tensorspec argument."""
encoder = core_encoder.EncoderComposer(
test_utils.PlusOneOverNEncodingStage()).make()
with self.assertRaisesRegex(TypeError, 'TensorSpec'):
gather_encoder.GatherEncoder.from_encoder(encoder, not_a_tensorspec)
TestData = collections.namedtuple('TestData', [
'x',
'encoded_x',
'part_decoded_x',
'summed_part_decoded_x',
'decoded_x',
'initial_state',
'updated_state',
])
def _make_iteration_function(encoder, x_fn, num_summands):
"""Returns a tf.function utility for testing."""
assert isinstance(encoder, gather_encoder.GatherEncoder)
@tf.function
def iteration(initial_state):
x = []
encoded_x = []
part_decoded_x = []
state_update_tensors = []
encode_params, decode_before_sum_params, decode_after_sum_params = (
encoder.get_params(initial_state))
for _ in range(num_summands):
x_value = x_fn()
enc_x, sut = encoder.encode(x_value, encode_params)
part_dec_x = encoder.decode_before_sum(enc_x, decode_before_sum_params)
x.append(x_value)
encoded_x.append(enc_x)
part_decoded_x.append(part_dec_x)
state_update_tensors.append(sut)
summed_part_decoded_x = part_decoded_x[0]
for addend in part_decoded_x[1:]:
summed_part_decoded_x = tf.nest.map_structure(lambda x, y: x + y,
summed_part_decoded_x,
addend)
decoded_x = encoder.decode_after_sum(summed_part_decoded_x,
decode_after_sum_params, num_summands)
aggregated_state_update_tensors = _aggregate_structure(
state_update_tensors, encoder.state_update_aggregation_modes)
updated_state = encoder.update_state(initial_state,
aggregated_state_update_tensors)
return TestData(x, encoded_x, part_decoded_x, summed_part_decoded_x,
decoded_x, initial_state, updated_state)
return iteration
def _make_stateless_iteration_function(encoder, x_fn, num_summands):
"""Returns a tf.function utility for testing, which does not use state."""
assert isinstance(encoder, gather_encoder.GatherEncoder)
@tf.function
def iteration():
x = []
encoded_x = []
part_decoded_x = []
encode_params, decode_before_sum_params, decode_after_sum_params = (
encoder.get_params())
for _ in range(num_summands):
x_value = x_fn()
enc_x, _ = encoder.encode(x_value, encode_params)
part_dec_x = encoder.decode_before_sum(enc_x, decode_before_sum_params)
x.append(x_value)
encoded_x.append(enc_x)
part_decoded_x.append(part_dec_x)
summed_part_decoded_x = part_decoded_x[0]
for addend in part_decoded_x[1:]:
summed_part_decoded_x = tf.nest.map_structure(lambda x, y: x + y,
summed_part_decoded_x,
addend)
decoded_x = encoder.decode_after_sum(summed_part_decoded_x,
decode_after_sum_params, num_summands)
dummy = tf.constant(0.0) # Avoids having to separate TF/PY values.
return TestData(x, encoded_x, part_decoded_x, summed_part_decoded_x,
decoded_x, dummy, dummy)
return iteration
def _aggregate_one(values, mode):
if mode == encoding_stage.StateAggregationMode.SUM:
return tf.reduce_sum(tf.stack(values), axis=0)
elif mode == encoding_stage.StateAggregationMode.MIN:
return tf.reduce_min(tf.stack(values), axis=0)
elif mode == encoding_stage.StateAggregationMode.MAX:
return tf.reduce_max(tf.stack(values), axis=0)
elif mode == encoding_stage.StateAggregationMode.STACK:
return tf.stack(values)
def _aggregate_structure(state_update_tensors, state_update_aggregation_modes):
aggregated_state_update_tensors = []
for i, mode in enumerate(state_update_aggregation_modes):
values = [t[i] for t in state_update_tensors]
aggregated_state_update_tensors.append(_aggregate_one(values, mode))
return tuple(aggregated_state_update_tensors)
def _encoded_x_field(encoded_x, path):
"""Returns a field from `encoded_x` returned by the `encode` method.
In order to test the correctness of encoding, we also need to access the
encoded objects, which in turns depends on an implementation detail (the
specific use of `nest.flatten_with_joined_string_paths`). This dependence is
constrained to a single place in this utility.
Args:
encoded_x: The structure returned by the `encode` method.
path: A list of keys corresponding to the path in the nested dictionary
before it was flattened.
Returns:
A value from `encoded_x` corresponding to the `path`.
"""
return encoded_x['/'.join(path)]
if __name__ == '__main__':
tf.test.main()
|
python
|
import json
import pathlib
import pytest
import laskea
import laskea.config as cfg
def test_generate_template_command():
json_string = cfg.generate_template()
assert '"markers": "[[[fill ]]] [[[end]]]"' in json_string
def test_process_spoc_no_file(capsys):
with pytest.raises(SystemExit):
cfg.process('no-file', {})
out, err = capsys.readouterr()
assert 'Given configuration path is no file or empty' in err
assert not out
def test_process_spoc_template_file(capsys):
fixture_config = pathlib.Path('tests', 'fixtures', 'basic', 'dot.laskea.json')
assert cfg.process(str(fixture_config), {'quiet': laskea.QUIET, 'verbose': True}) is None
out, err = capsys.readouterr()
assert 'Configuration interface combined file, environment, and commandline values!' in err
assert not out
def test_report_context(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = False
assert cfg.report_context(command='-', transaction_mode='+', vector=['42']) is None
out, err = capsys.readouterr()
assert not out
lines = err.strip().split('\n')
assert len(lines) == 21
assert lines[:3] == ['Command: (-)', '- Transaction mode: (+)', 'Environment(variable values):']
assert lines[11] == 'Effective(variable values):'
for line in lines[3:11]:
assert line.startswith(f'- {laskea.APP_ENV}_')
assert lines[-1] == "- CallVector: (['42'])"
laskea.QUIET = quiet_flag_restore
def test_report_context_quiet(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = True
assert cfg.report_context(command='-', transaction_mode='+', vector=['42']) is None
out, err = capsys.readouterr()
assert not out
assert not err
laskea.QUIET = quiet_flag_restore
def test_report_sources_of_effective_configuration(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = False
assert cfg.report_sources_of_effective_configuration(source_of={}, header='42') is None
out, err = capsys.readouterr()
assert not out
lines = err.strip().split('\n')
assert lines == ['42', '# --- BEGIN ---', '{}', '# --- E N D ---']
laskea.QUIET = quiet_flag_restore
def test_report_sources_of_effective_configuration_quiet(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = True
assert cfg.report_sources_of_effective_configuration(source_of={}, header='42') is None
out, err = capsys.readouterr()
assert not out
assert not err
laskea.QUIET = quiet_flag_restore
def test_safe_report_configuration(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = False
assert cfg.safe_report_configuration(configuration={}, header='42') is None
out, err = capsys.readouterr()
assert not out
lines = err.strip().split('\n')
assert lines == ['42', '# --- BEGIN ---', '{}', '# --- E N D ---']
laskea.QUIET = quiet_flag_restore
def test_safe_report_configuration_quiet(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = True
assert cfg.safe_report_configuration(configuration={}, header='42') is None
out, err = capsys.readouterr()
assert not out
assert not err
laskea.QUIET = quiet_flag_restore
def test_safe_report_configuration_no_leak(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = False
thing = {'remote': {'token': 'leak'}}
gnith = {'remote': {'token': '*************'}}
assert cfg.safe_report_configuration(configuration=thing, header='42') is None
out, err = capsys.readouterr()
assert not out
lines = err.strip().split('\n')
assert lines == ['42', '# --- BEGIN ---'] + json.dumps(gnith, indent=2).split('\n') + ['# --- E N D ---']
laskea.QUIET = quiet_flag_restore
def test_create_and_report_effective_configuration(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = False
assert cfg.create_and_report_effective_configuration(header='42') is None
out, err = capsys.readouterr()
assert not out
lines = err.strip().split('\n')
assert len(lines) == 47
assert lines[:2] == ['42', '# --- BEGIN ---']
assert lines[-1] == '# --- E N D ---'
print(lines)
laskea.QUIET = quiet_flag_restore
def test_create_and_report_effective_configuration_quiet(capsys):
quiet_flag_restore = laskea.QUIET
laskea.QUIET = True
assert cfg.create_and_report_effective_configuration(header='42') is None
out, err = capsys.readouterr()
assert not out
assert not err
laskea.QUIET = quiet_flag_restore
def test_load_configuration_empty(capsys):
quiet_flag_restore = laskea.DEBUG
base_markers_restore = laskea.BASE_MARKERS
laskea.DEBUG = False
assert cfg.load_configuration(configuration={}) == {}
out, err = capsys.readouterr()
assert not out
assert err.strip() == 'Warning: Requested load from empty configuration'
laskea.DEBUG = quiet_flag_restore
laskea.BASE_MARKERS = base_markers_restore
def test_load_configuration_remote_token(capsys):
quiet_flag_restore = laskea.DEBUG
base_markers_restore = laskea.BASE_MARKERS
laskea.DEBUG = False
thing = {'remote': {'token': 'leak'}}
gnith = {'remote_base_url': 'env', 'remote_token': 'env', 'remote_user': 'env'}
assert cfg.load_configuration(configuration=thing) == gnith
out, err = capsys.readouterr()
assert not out
assert not err
laskea.DEBUG = quiet_flag_restore
laskea.BASE_MARKERS = base_markers_restore
|
python
|
from copy import *
import re
class TaggedWord:
def __init__(self, word='', tag=''):
self.word = word
self.tag = tag
def getWord(self):
return self.word
def getTag(self):
return self.tag
def replaceCharAt(str, pos, c):
return str[:pos]+c+str[pos+1:]
class CorpusReaderException(Exception):
pass
class AbsCorpusReader:
"""
Parameters:
List<TaggedWord> startMarkers
List<TaggedWord> endMarkers
TrainHandler TH
"""
def __init__(self, startMarkers, endMarkers, TH):
self.startMarkers = copy(startMarkers)
self.endMarkers = copy(endMarkers)
self.sentenceHandler = TH
def parse(self, reader):
pass
class CorpusReaderSatu(AbsCorpusReader):
def __init__(self, startMarkers, endMarkers, TH):
AbsCorpusReader.__init__(self, startMarkers, endMarkers, TH)
def parse(self, reader):
for line in reader.readlines():
line = line.strip()
if len(line) == 0:
continue
# List<TaggedWord>
sentence = copy(self.startMarkers)
# String[]
lineParts = re.split("\\s+", line)
for i in xrange(0, len(lineParts)):
# String
wordTag = lineParts[i]
# int
sepIndex = wordTag.rfind('/')
if sepIndex == -1:
raise CorpusReaderException("Tag is missing in '" + wordTag + "'", CorpusReaderException.CorpusReadError.MISSING_TAG)
# String
word = wordTag[:sepIndex]
# String
tag = wordTag[sepIndex + 1:]
if len(word) == 0:
raise CorpusReaderException("Zero-length word in '" + wordTag + "'", CorpusReaderException.CorpusReadError.ZERO_LENGTH_WORD)
if i == 0:
word = replaceCharAt(word, 0, word[0].lower());
sentence.append(TaggedWord(word, tag))
sentence += copy(self.endMarkers)
self.sentenceHandler.handleSentence(sentence)
class TrainHandler:
def __init__(self):
self.lexicon = {}
self.unigrams = {}
self.bigrams = {}
self.trigrams = {}
self.quatograms = {}
def getBigram(self):
return self.bigrams
def getLexicon(self):
return self.lexicon
def getQuatogram(self):
return self.quatograms
def getTrigram(self):
return self.trigrams
def getUnigram(self):
return self.unigrams
def handleSentence(self, sentence):
"""
Returns void
Parameters:
sentence: List<TaggedWord>
"""
for i in xrange(0, len(sentence)):
self.addLexiconEntry(sentence[i])
self.addUniGram(sentence, i)
if i > 0:
self.addBiGram(sentence, i)
if i > 1:
self.addTriGram(sentence, i)
if i < len(sentence) - 1:
self.addQuatoGram(sentence, i)
def addLexiconEntry(self, w):
"""
Parameters:
w: TaggedWord
"""
# String
word = w.getWord()
# String
tag = w.getTag()
if word not in self.lexicon:
self.lexicon[word] = {}
if tag not in self.lexicon[word]:
self.lexicon[word][tag] = 1
else:
self.lexicon[word][tag] += 1;
def addUniGram(self, sentence, index):
"""
Parameters:
sentence: List<TaggedWord>index: int
"""
# String
unigram = sentence[index].getTag()
if unigram not in self.unigrams:
self.unigrams[unigram] = 1
else:
self.unigrams[unigram] += 1
def addBiGram(self, sentence, index):
"""
Parameters:
sentence: List<TaggedWord>index: int
"""
# String
bigram = sentence[index - 1].getTag() + " " + sentence[index].getTag()
if bigram not in self.bigrams:
self.bigrams[bigram] = 1
else:
self.bigrams[bigram] += 1
def addTriGram(self, sentence, index):
"""
Parameters:
sentence: List<TaggedWord>index: int
"""
# String
trigram = sentence[index - 2].getTag() + " " + sentence[index - 1].getTag() + " " + sentence[index].getTag()
if trigram not in self.trigrams:
self.trigrams[trigram] = 1
else:
self.trigrams[trigram] += 1
def addQuatoGram(self, sentence, index):
"""
Parameters:
sentence: List<TaggedWord>index: int
"""
# String
quatogram = sentence[index - 2].getTag() + " " + sentence[index - 1].getTag() + " " + sentence[index].getTag() + " " + sentence[index + 1].getTag();
if quatogram not in self.quatograms:
self.quatograms[quatogram] = 1
else:
self.quatograms[quatogram] += 1
#MainTrainer
def writeNGrams(uniGrams, biGrams, triGrams, quatoGrams, writer):
"""
Parameters:
uniGrams: Map<String, Integer>
biGrams: Map<String, Integer>
triGrams: Map<String, Integer>
quatoGrams: Map<String, Integer>
writer: BufferedWriter
"""
for entry in uniGrams: writer.write(entry + " " + str(uniGrams[entry]) + "\n")
for entry in biGrams: writer.write(entry + " " + str(biGrams[entry]) + "\n")
for entry in triGrams: writer.write(entry + " " + str(triGrams[entry]) + "\n")
for entry in quatoGrams: writer.write(entry + " " + str(quatoGrams[entry]) + "\n")
writer.flush()
def writeLexicon(lexicon, writer):
"""
Parameters:
lexicon: Map<String, Map<String, Integer>>writer: BufferedWriter
"""
for wordEntry in lexicon:
# String
word = wordEntry
writer.write(word)
for tagEntry in lexicon[word]:
writer.write(" ")
writer.write(tagEntry)
writer.write(" ")
writer.write(str(lexicon[word][tagEntry]));
writer.write("\n")
writer.flush()
def Train(corpus):
"""
Parameters:
corpus: String
"""
# List<TaggedWord>
startMarkers = []
startMarkers.append(TaggedWord("<STARTTAG>", "<STARTTAG>"))
startMarkers.append(TaggedWord("<STARTTAG>", "<STARTTAG>"))
# List<TaggedWord>
endMarkers = []
endMarkers.append(TaggedWord("<ENDTAG>", "<ENDTAG>"))
# TrainHandler
trainHandler = TrainHandler()
# AbsCorpusReader<TaggedWord>
corpusReader = CorpusReaderSatu(startMarkers, endMarkers, trainHandler)
try:
fcorpus = open(corpus, "r")
corpusReader.parse(fcorpus)
fcorpus.close()
except IOError:
print("Could not read corpus!\n")
exit(1)
except CorpusReaderException:
print("Train Error!\n")
exit(1)
try:
flex = open("./resource/Lexicon.trn", "w")
writeLexicon(trainHandler.getLexicon(), flex)
flex.close()
ftrain = open("./resource/Ngram.trn", "w")
writeNGrams(trainHandler.getUnigram(), trainHandler.getBigram(), trainHandler.getTrigram(), trainHandler.getQuatogram(), ftrain)
except:
print("System Can not write training data!\n")
exit(1)
|
python
|
from nuscenes.nuscenes import NuScenes
nusc = NuScenes(version='v1.0-mini', dataroot='../../data/nuscenes-mini', verbose=True)
|
python
|
import numpy
n, m = map(int, input().split())
arr = numpy.array([list(map(int, input().split())) for _ in range(n)])
arr = numpy.min(arr, axis=1)
arr = numpy.max(arr)
print(arr)
|
python
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from oslo_concurrency import processutils
from nova.tests.unit.virt.libvirt.volume import test_volume
from nova import utils
from nova.virt.libvirt import utils as libvirt_utils
from nova.virt.libvirt.volume import nfs
class LibvirtNFSVolumeDriverTestCase(test_volume.LibvirtVolumeBaseTestCase):
"""Tests the libvirt NFS volume driver."""
def setUp(self):
super(LibvirtNFSVolumeDriverTestCase, self).setUp()
self.mnt_base = '/mnt'
self.flags(nfs_mount_point_base=self.mnt_base, group='libvirt')
def test_libvirt_nfs_driver(self):
libvirt_driver = nfs.LibvirtNFSVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/nfs/share1'
export_mnt_base = os.path.join(self.mnt_base,
utils.get_hash_str(export_string))
connection_info = {'data': {'export': export_string,
'name': self.name}}
libvirt_driver.connect_volume(connection_info, self.disk_info)
libvirt_driver.disconnect_volume(connection_info, "vde")
device_path = os.path.join(export_mnt_base,
connection_info['data']['name'])
self.assertEqual(connection_info['data']['device_path'], device_path)
expected_commands = [
('mkdir', '-p', export_mnt_base),
('mount', '-t', 'nfs', export_string, export_mnt_base),
('umount', export_mnt_base)]
self.assertEqual(expected_commands, self.executes)
@mock.patch.object(nfs.utils, 'execute')
@mock.patch.object(nfs.LOG, 'debug')
@mock.patch.object(nfs.LOG, 'exception')
def test_libvirt_nfs_driver_umount_error(self, mock_LOG_exception,
mock_LOG_debug, mock_utils_exe):
export_string = '192.168.1.1:/nfs/share1'
connection_info = {'data': {'export': export_string,
'name': self.name}}
libvirt_driver = nfs.LibvirtNFSVolumeDriver(self.fake_conn)
mock_utils_exe.side_effect = processutils.ProcessExecutionError(
None, None, None, 'umount', 'umount: device is busy.')
libvirt_driver.disconnect_volume(connection_info, "vde")
self.assertTrue(mock_LOG_debug.called)
mock_utils_exe.side_effect = processutils.ProcessExecutionError(
None, None, None, 'umount', 'umount: target is busy.')
libvirt_driver.disconnect_volume(connection_info, "vde")
self.assertTrue(mock_LOG_debug.called)
mock_utils_exe.side_effect = processutils.ProcessExecutionError(
None, None, None, 'umount', 'umount: not mounted.')
libvirt_driver.disconnect_volume(connection_info, "vde")
self.assertTrue(mock_LOG_debug.called)
mock_utils_exe.side_effect = processutils.ProcessExecutionError(
None, None, None, 'umount', 'umount: Other error.')
libvirt_driver.disconnect_volume(connection_info, "vde")
self.assertTrue(mock_LOG_exception.called)
def test_libvirt_nfs_driver_get_config(self):
libvirt_driver = nfs.LibvirtNFSVolumeDriver(self.fake_conn)
export_string = '192.168.1.1:/nfs/share1'
export_mnt_base = os.path.join(self.mnt_base,
utils.get_hash_str(export_string))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name,
'device_path': file_path}}
conf = libvirt_driver.get_config(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
self.assertEqual('raw', tree.find('./driver').get('type'))
self.assertEqual('native', tree.find('./driver').get('io'))
def test_libvirt_nfs_driver_already_mounted(self):
libvirt_driver = nfs.LibvirtNFSVolumeDriver(self.fake_conn)
export_string = '192.168.1.1:/nfs/share1'
export_mnt_base = os.path.join(self.mnt_base,
utils.get_hash_str(export_string))
connection_info = {'data': {'export': export_string,
'name': self.name}}
libvirt_driver.connect_volume(connection_info, self.disk_info)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('findmnt', '--target', export_mnt_base, '--source',
export_string),
('umount', export_mnt_base)]
self.assertEqual(expected_commands, self.executes)
def test_libvirt_nfs_driver_with_opts(self):
libvirt_driver = nfs.LibvirtNFSVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/nfs/share1'
options = '-o intr,nfsvers=3'
export_mnt_base = os.path.join(self.mnt_base,
utils.get_hash_str(export_string))
connection_info = {'data': {'export': export_string,
'name': self.name,
'options': options}}
libvirt_driver.connect_volume(connection_info, self.disk_info)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('mkdir', '-p', export_mnt_base),
('mount', '-t', 'nfs', '-o', 'intr,nfsvers=3',
export_string, export_mnt_base),
('umount', export_mnt_base),
]
self.assertEqual(expected_commands, self.executes)
|
python
|
# coding: utf-8
from tests.util import BaseGrabTestCase
from tests.spider_sigint import BaseKeyboardInterruptTestCase
SCRIPT_TPL = '''
import sys
import logging
try:
from grab import Grab
import os
import grab
#logging.error('PATH: ' + grab.__file__)
#logging.error('PID: ' + str(os.getpid()))
g = Grab(%s)
for x in range(200):
g.go('%s')
except KeyboardInterrupt:
#logging.error('EXITING WITH CODE 13')
sys.exit(13)
else:
#logging.error('NO SIGINT')
sys.exit(0)
'''.lstrip()
class GrabKeyboardInterruptTestCase(BaseKeyboardInterruptTestCase,
BaseGrabTestCase):
script_tpl = SCRIPT_TPL
|
python
|
import magma
def AXI4SlaveType(addr_width, data_width):
"""
This function returns a axi4-slave class (parameterized by @addr_width and
@data_width) which can be used as the magma ports with these inputs
and outputs
Below is AXI4-Lite interface ports in verilog
input logic [`$axi_addr_width-1`:0] AWADDR,
input logic AWVALID,
output logic AWREADY,
input logic [`$cfg_bus_width-1`:0] WDATA,
input logic WVALID,
output logic WREADY,
input logic [`$axi_addr_width-1`:0] ARADDR,
input logic ARVALID,
output logic ARREADY,
output logic [`$cfg_bus_width-1`:0] RDATA,
output logic [1:0] RRESP,
output logic RVALID,
input logic RREADY,
output logic interrupt,
"""
_AXI4SlaveType = magma.Tuple(
awaddr=magma.In(magma.Bits[addr_width]),
awvalid=magma.In(magma.Bit),
awready=magma.Out(magma.Bit),
wdata=magma.In(magma.Bits[data_width]),
wvalid=magma.In(magma.Bit),
wready=magma.Out(magma.Bit),
araddr=magma.In(magma.Bits[addr_width]),
arvalid=magma.In(magma.Bit),
arready=magma.Out(magma.Bit),
rdata=magma.Out(magma.Bits[data_width]),
rresp=magma.Out(magma.Bits[2]),
rvalid=magma.Out(magma.Bit),
rready=magma.In(magma.Bit),
interrupt=magma.Out(magma.Bit))
return _AXI4SlaveType
|
python
|
#Задача №1 Вариант 9
#Программа выводит имя и запрашивает его псевдоним
#Гасанов АФ
#29.02.2016
print ("Герой нашей сегоднящней программы - Доменико Теотокопули")
psev=input("Под каким же именем мы знаем этого человека? Ваш ответ:")
if (psev)==("Эль Греко"):
print ("Всё верно: - Доменико Теотокопули"+psev)
else:
print ("Вы ошиблись, это не его псевдоним.")
input("нажмите Enter для выхода")
|
python
|
"""
- Double check that the targets are correct
- verify that substorms.get_next(input_idx + return idx) == input_idx + return idx
- verify all >= 0
- create files to test different situations?
- missing data -> mask is correct
"""
|
python
|
USAGE = """USAGE
$ python anipix.py [imfile1] [imfile2] [outfile] [--color]
[outfile] must be .mp4
[--c] is an optional flag to use color mode (slower)
Examples:
$ python anipix.py cameraman.png lena.png gray.mp4
$ python anipix.py peppers.png mandrill.png color.mp4 --c
"""
if __name__ == "__main__":
from sys import argv
import pixelanimation as pa
if len(argv) < 4 or "--help" in argv:
print(USAGE)
elif argv[3][-4:] != ".mp4":
print("Bad input: [outfile] must be .mp4\n")
print(USAGE)
else:
if "--c" in argv:
print("Using color mode")
pa.animate_pixels(argv[1],argv[2],argv[3],verbose=True,color=True)
else:
pa.animate_pixels(argv[1],argv[2],argv[3],verbose=True)
|
python
|
import numpy as np
from lmfit import Parameters
import sys
import os
sys.path.append(os.path.abspath('.'))
sys.path.append(os.path.abspath('./Functions'))
from utils import find_minmax
from PeakFunctions import Gaussian, LogNormal
from numba import jit
@jit(nopython=False)
def calc_dist(q,r,dist,sumdist):
ffactor=np.ones_like(q)
for i,q1 in enumerate(q):
f = np.sum(16 * np.pi ** 2 * (np.sin(q1 * r) - q1 * r * np.cos(q1 * r)) ** 2 * dist / q1 ** 6)
ffactor[i]=f / sumdist
return ffactor
class Sphere_MultiModal:
def __init__(self, x=0.001, dist='Gaussian', N=50, rhoc=1.0, rhosol=0.0, norm=1.0, bkg=0.0,
mpar={'Distributions':{'Dist':['Dist1'],'R':[1.0],'Rsig':[1.0],'Norm':[1.0]}}):
"""
Calculates the form factor of a solid sphere with Multimodal size distribution
x : Array of q-values in the same reciprocal unit as R and Rsig
R : Mean radius of the solid spheres
Rsig : Width of the distribution of solid spheres
dist : Gaussian or LogNormal
N : No. of points on which the distribution will be calculated
integ : The type of integration ('Normal' or 'MonteCarlo') Default: 'Normal'
rhoc : Electron density of the particle
rhosol : Electron density of the solvent or surrounding environment
"""
if type(x)==list:
self.x=np.array(x)
else:
self.x=x
self.dist=dist
self.rhoc=rhoc
self.rhosol=rhosol
self.norm=norm
self.bkg=bkg
self.N=N
self.__mpar__=mpar
self.__mkeys__=list(self.__mpar__.keys())
self.choices={'dist':['Gaussian','LogNormal','Weibull']}
self.init_params()
self.output_params = {'scaler_parameters': {}}
def init_params(self):
self.params=Parameters()
self.params.add('rhoc',value=self.rhoc,vary=0,min=-np.inf,max=np.inf,expr=None,brute_step=0.1)
self.params.add('rhosol',value=self.rhosol,vary=0,min=-np.inf,max=np.inf,expr=None,brute_step=0.1)
self.params.add('norm',value=self.norm,vary=0,min=-np.inf,max=np.inf,expr=None,brute_step=0.1)
self.params.add('bkg',value=self.bkg,vary=0,min=-np.inf,max=np.inf,expr=None,brute_step=0.1)
for mkey in self.__mkeys__:
for key in self.__mpar__[mkey].keys():
if key !='Dist':
if key == 'Rsig':
for i in range(len(self.__mpar__[mkey][key])):
self.params.add('__%s_%s_%03d' % (mkey, key, i), value=self.__mpar__[mkey][key][i], vary=0,
min=0.001, max=np.inf, expr=None, brute_step=0.1)
else:
for i in range(len(self.__mpar__[mkey][key])):
self.params.add('__%s_%s_%03d'%(mkey,key,i),value=self.__mpar__[mkey][key][i],vary=0,min=-np.inf,max=np.inf,expr=None,brute_step=0.1)
def update_params(self):
self.rhoc=self.params['rhoc'].value
self.rhosol=self.params['rhosol'].value
self.norm=self.params['norm'].value
self.bkg=self.params['bkg'].value
mkey = self.__mkeys__[0]
key='R'
self.__Nl__ = len(self.__mpar__[mkey][key])
self.__R__ = np.array([self.params['__%s_%s_%03d' % (mkey, key, i)].value for i in range(self.__Nl__)])
key='Rsig'
self.__Rsig__ = np.array([self.params['__%s_%s_%03d' % (mkey, key, i)].value for i in range(self.__Nl__)])
key='Norm'
self.__Norm__ = [self.params['__%s_%s_%03d' % (mkey, key, i)].value for i in range(self.__Nl__)]
def y(self):
self.update_params()
rho=self.rhoc-self.rhosol
if self.dist == 'Gaussian':
rmin, rmax = max(0.0001, np.min(self.__R__-5*self.__Rsig__)),np.max(self.__R__+5*self.__Rsig__)
r=np.linspace(rmin,rmax,self.N)
elif self.dist == 'LogNormal':
rmin, rmax = max(0.0001, np.min(np.exp(np.log(self.__R__) - 5 * self.__Rsig__))), np.max(np.exp(np.log(self.__R__) + 5 * self.__Rsig__))
r = np.logspace(np.log10(rmin), np.log10(rmax), self.N)
else:
maxr=np.max(self.__R__)
rmin,rmax= 0.0,maxr+maxr*maxr**(1.0/np.max(self.__Rsig__))
r = np.linspace(rmin,rmax, self.N)
dist=np.zeros_like(r)
tdist=[]
for i in range(self.__Nl__):
if self.dist == 'Gaussian':
gau=Gaussian.Gaussian(x = r, pos = self.__R__[i], wid = self.__Rsig__[i])
gau.x=r
tdist.append(self.__Norm__[i]*gau.y())
dist = dist + tdist[i]
elif self.dist == 'LogNormal':
lgn=LogNormal.LogNormal(x = r, pos = self.__R__[i], wid = self.__Rsig__[i])
lgn.x = r
tdist.append(self.__Norm__[i]*lgn.y())
dist = dist + tdist[i]
else:
twdist=(self.__Rsig__[i]/self.__R__[i])*(r/self.__R__[i])**(self.__Rsig__[i]-1.0)*np.exp(-(r/self.__R__[i])**self.__Rsig__[i])
tdist.append(self.__Norm__[i]*twdist)
dist = dist + tdist[i]
sumdist = np.sum(dist)
ffactor=calc_dist(self.x,r,dist,sumdist)
I_total=self.norm * rho ** 2 * ffactor + self.bkg
if not self.__fit__:
self.output_params['I_total'] = {'x': self.x,'y': I_total}
self.output_params['Distribution'] = {'x': r, 'y': dist / sumdist}
mean = np.sum(r * dist) / sumdist
self.output_params['scaler_parameters']['Rmean'] = mean
self.output_params['scaler_parameters']['Rwidth'] = np.sqrt(np.sum((r - mean) ** 2 * dist) / sumdist)
for i in range(len(tdist)):
self.output_params[self.__mpar__['Distributions']['Dist'][i]] = {'x': r, 'y': tdist[i] / sumdist}
tffactor=calc_dist(self.x,r,tdist[i],sumdist)
self.output_params['I_'+self.__mpar__['Distributions']['Dist'][i]]={'x':self.x,'y':self.norm*rho**2*tffactor+self.bkg}
return I_total
if __name__=='__main__':
x=np.linspace(0.001,1.0,500)
fun=Sphere_MultiModal(x=x)
print(fun.y())
|
python
|
from tkinter import*
from tkinter import ttk
from tkinter import messagebox
import random
from datetime import date
import time
import sqlite3
root=Tk()
root.title("Cafe Management System")
root.geometry("1000x650")
root.resizable(width=False,height=False)
root.configure(bg="#220D0B")
logo = PhotoImage(file="logo.png")
rightcup = PhotoImage(file="rightcup.png")
leftcup = PhotoImage(file="leftcup.png")
#**************************************CheckButton variables*******************************************************
menu1chkVar = IntVar()
menu2chkVar = IntVar()
menu3chkVar = IntVar()
menu4chkVar = IntVar()
menu5chkVar = IntVar()
menu6chkVar = IntVar()
menu7chkVar = IntVar()
menu8chkVar = IntVar()
menu9chkVar = IntVar()
menu10chkVar = IntVar()
menu11chkVar = IntVar()
menu12chkVar = IntVar()
#******************************Entry Variables******************************************************************
nameVar=StringVar()
menu1Var = IntVar()
menu2Var = IntVar()
menu3Var = IntVar()
menu4Var = IntVar()
menu5Var = IntVar()
menu6Var = IntVar()
menu7Var = IntVar()
menu8Var = IntVar()
menu9Var = IntVar()
menu10Var = IntVar()
menu11Var = IntVar()
menu12Var = IntVar()
#************************************FUNCTIONS***********************************************************************
def exit():
ans=messagebox.askyesno("Cafe Bloom","Are you sure you want to exit?")
if ans:
root.destroy()
def CheckBtn_Value():
if (menu1chkVar.get())==1:
menu1Entry.configure(state=NORMAL)
elif menu1chkVar.get()==0:
menu1Entry.configure(state=DISABLED)
menu1Var.set("0")
if (menu2chkVar.get())==1:
menu2Entry.configure(state=NORMAL)
elif menu2chkVar.get()==0:
menu2Entry.configure(state=DISABLED)
menu2Var.set("0")
if (menu3chkVar.get())==1:
menu3Entry.configure(state=NORMAL)
elif menu3chkVar.get()==0:
menu3Entry.configure(state=DISABLED)
menu3Var.set("0")
if (menu4chkVar.get())==1:
menu4Entry.configure(state=NORMAL)
elif menu4chkVar.get()==0:
menu4Entry.configure(state=DISABLED)
menu4Var.set("0")
if (menu5chkVar.get())==1:
menu5Entry.configure(state=NORMAL)
elif menu5chkVar.get()==0:
menu5Entry.configure(state=DISABLED)
menu5Var.set("0")
if (menu6chkVar.get())==1:
menu6Entry.configure(state=NORMAL)
elif menu6chkVar.get()==0:
menu6Entry.configure(state=DISABLED)
menu6Var.set("0")
if (menu7chkVar.get())==1:
menu7Entry.configure(state=NORMAL)
elif menu7chkVar.get()==0:
menu7Entry.configure(state=DISABLED)
menu7Var.set("0")
if (menu8chkVar.get())==1:
menu8Entry.configure(state=NORMAL)
elif menu8chkVar.get()==0:
menu8Entry.configure(state=DISABLED)
menu8Var.set("0")
if (menu9chkVar.get())==1:
menu9Entry.configure(state=NORMAL)
elif menu9chkVar.get()==0:
menu9Entry.configure(state=DISABLED)
menu9Var.set("0")
if (menu10chkVar.get())==1:
menu10Entry.configure(state=NORMAL)
elif menu10chkVar.get()==0:
menu10Entry.configure(state=DISABLED)
menu10Var.set("0")
if (menu11chkVar.get())==1:
menu11Entry.configure(state=NORMAL)
elif menu11chkVar.get()==0:
menu11Entry.configure(state=DISABLED)
menu11Var.set("0")
if (menu12chkVar.get())==1:
menu12Entry.configure(state=NORMAL)
elif menu12chkVar.get()==0:
menu12Entry.configure(state=DISABLED)
menu12Var.set("0")
def Receipt():
m1=menu1Var.get()
m2=menu2Var.get()
m3=menu3Var.get()
m4 =menu4Var.get()
m5 =menu5Var.get()
m6=menu6Var.get()
m7=menu7Var.get()
m8=menu8Var.get()
m9=menu9Var.get()
m10=menu10Var.get()
m11=menu11Var.get()
m12=menu12Var.get()
totalQty=m1+m2+m3+m4+m5+m6+m7+m8+m9+m10+m11+m12
if len(nameVar.get())==0:
messagebox.showwarning("Cafe Bloom","Place Order & enter name to generate receipt...")
else:
today=date.today()
timee = time.strftime("%H:%M")
todayDate=today.strftime("%d/%m/%Y")
receiptTxt.delete(0, END)
x = random.randint(10908, 500876)
billNo = str(x)
receiptTxt.insert(END," Cafe Bloom")
receiptTxt.insert(END," Merced 307, Santiago 8320115 Chile")
receiptTxt.insert(END," Phone:020-23088866")
receiptTxt.insert(END,"------------------------------------------------------------")
receiptTxt.insert(END," Name : "+nameVar.get())
receiptTxt.insert(END,"Bill : "+billNo + " " +todayDate+" "+timee)
receiptTxt.insert(END,"------------------------------------------------------------")
receiptTxt.insert(END,"Item Qty. Cost of item")
receiptTxt.insert(END,"------------------------------------------------------------")
if menu1Var.get()>0:
receiptTxt.insert(END,"Americano "+str(m1)+" "+str(m1*220))
if menu2Var.get()>0:
receiptTxt.insert(END,"Cappuccino "+str(m2)+" "+str(m2*200))
if menu3Var.get()>0:
receiptTxt.insert(END,"Espresso "+str(m3)+" "+str(m3*150))
if menu4Var.get()>0:
receiptTxt.insert(END,"Latte "+str(m4)+" "+str(m4*150))
if menu5Var.get()>0:
receiptTxt.insert(END,"Choclate "+str(m5)+" "+str(m5*140))
if menu6Var.get()>0:
receiptTxt.insert(END,"Mocha "+str(m6)+" "+str(m6*350))
if menu7Var.get()>0:
receiptTxt.insert(END,"Glace "+str(m7)+" "+str(m7*245))
if menu8Var.get()>0:
receiptTxt.insert(END,"Hot Choclate "+str(m8)+" "+str(m8*200))
if menu9Var.get()>0:
receiptTxt.insert(END,"Macchiato "+str(m9)+" "+str(m9*250))
if menu10Var.get()>0:
receiptTxt.insert(END,"Corto "+str(m10)+" "+str(m10*200))
if menu11Var.get()>0:
receiptTxt.insert(END,"Iced Coffee "+str(m11)+" "+str(m11*175))
if menu12Var.get()>0:
receiptTxt.insert(END,"Irish Coffee "+str(m12)+" "+str(m12*245))
totalPrice=0
totalPrice=(220*m1)+(200*m2)+(150*m3)+(150*m4)+(140*m5)+(350*m6)\
+(245*m7)+(200*m8)+(250*m9)+(200*m10)+(175*m11)+(245*m12)
gstAmt=(totalPrice*(18/100))/100
gstAmt=round(gstAmt,2)
netPrice=int(totalPrice+gstAmt)
receiptTxt.insert(END,"------------------------------------------------------------")
receiptTxt.insert(END,"Total = "+str(totalPrice)+"/-")
receiptTxt.insert(END,"GST = "+str(gstAmt))
receiptTxt.insert(END,"Net Total = "+str(netPrice)+"/-")
receiptTxt.insert(END,"------------------------------------------------------------")
return (totalQty)
def Total():
m1=menu1Var.get()
m2=menu2Var.get()
m3=menu3Var.get()
m4 =menu4Var.get()
m5 =menu5Var.get()
m6=menu6Var.get()
m7=menu7Var.get()
m8=menu8Var.get()
m9=menu9Var.get()
m10=menu10Var.get()
m11=menu11Var.get()
m12=menu12Var.get()
totalPrice=0
totalPrice=(220*m1)+(200*m2)+(150*m3)+(150*m4)+(140*m5)+(350*m6)\
+(245*m7)+(200*m8)+(250*m9)+(200*m10)+(175*m11)+(245*m12)
gstAmt=(totalPrice*(18/100))/100
gstAmt=round(gstAmt,2)
netPrice=int(totalPrice+gstAmt)
if len(nameVar.get())==0:
messagebox.showwarning("Cafe Bloom","Place Order & enter name to generate total...")
else:
today=date.today()
timee = time.strftime("%H:%M")
todayDate=today.strftime("%d/%m/%Y")
receiptTxt.delete(0, END)
x = random.randint(10908, 500876)
billNo = str(x)
receiptTxt.insert(END," Cafe Bloom")
receiptTxt.insert(END," Merced 307, Santiago 8320115 Chile")
receiptTxt.insert(END," "+"Phone:020-23088866")
receiptTxt.insert(END,"-------------------------------------------------------------")
receiptTxt.insert(END," Name : "+nameVar.get())
receiptTxt.insert(END,"Bill : "+billNo + " " +todayDate+" "+timee)
receiptTxt.insert(END,"-------------------------------------------------------------")
receiptTxt.insert(END,"Total = "+str(totalPrice)+"/-")
receiptTxt.insert(END,"GST = "+str(gstAmt))
receiptTxt.insert(END,"Net Total = "+str(netPrice)+"/-")
return (netPrice)
def reset():
nameVar.set("")
menu1Var.set(0)
menu2Var.set(0)
menu3Var.set(0)
menu4Var.set(0)
menu5Var.set(0)
menu6Var.set(0)
menu7Var.set(0)
menu8Var.set(0)
menu9Var.set(0)
menu10Var.set(0)
menu11Var.set(0)
menu12Var.set(0)
menu1chkVar.set(0)
menu2chkVar.set(0)
menu3chkVar.set(0)
menu4chkVar.set(0)
menu5chkVar.set(0)
menu6chkVar.set(0)
menu7chkVar.set(0)
menu8chkVar.set(0)
menu9chkVar.set(0)
menu10chkVar.set(0)
menu11chkVar.set(0)
menu12chkVar.set(0)
menu1Entry.configure(state=DISABLED)
menu2Entry.configure(state=DISABLED)
menu3Entry.configure(state=DISABLED)
menu4Entry.configure(state=DISABLED)
menu5Entry.configure(state=DISABLED)
menu6Entry.configure(state=DISABLED)
menu7Entry.configure(state=DISABLED)
menu8Entry.configure(state=DISABLED)
menu9Entry.configure(state=DISABLED)
menu10Entry.configure(state=DISABLED)
menu11Entry.configure(state=DISABLED)
menu12Entry.configure(state=DISABLED)
receiptTxt.delete(0,END)
def AddData():
if len(nameVar.get())==0:
messagebox.showinfo("Cafe Bloom","Place Order to add data")
else:
ans=messagebox.askyesno("Cafe Bloom","Do you want to add data in database?")
name=nameVar.get()
total=Total()
orders=Receipt()
today=date.today()
timee = time.strftime("%H:%M")
dateT=today.strftime("%d/%m/%Y")
if ans:
cont=sqlite3.connect("Cafe.db")
cur=cont.cursor()
cur.execute("CREATE TABLE IF NOT EXISTS CafeData(id INTEGER PRIMARY KEY,Name text, \
Orders text,Total text,Time text,Date text)")
cur.execute("INSERT INTO CafeData VALUES(NULL,?,?,?,?,?)",(name,orders,total,timee,dateT))
cont.commit()
cont.close()
messagebox.showinfo("Cafe Bloom","Data added successfully!!")
reset()
def DisplayData():
newroot = Tk()
newroot.title("Cafe Bloom Records")
newroot.geometry("450x300")
y = Scrollbar(newroot,orient=VERTICAL)
y.pack(side=RIGHT,fill=Y)
dataview = ttk.Treeview(newroot,height=15,columns=("Column1","Column2","Column3",\
"Column4","Column5"),yscrollcommand=y.set)
dataview.column("#0",width=40,minwidth=30,anchor="c")
dataview.column("Column1",width=80,minwidth=30,anchor="c")
dataview.column("Column2",width=50,minwidth=30,anchor="c")
dataview.column("Column3",width=60,minwidth=30,anchor="c")
dataview.column("Column4",width=80,minwidth=30,anchor="c")
dataview.column("Column5",width=70,minwidth=30,anchor="c")
dataview.heading("#0",text="No.")
dataview.heading("Column1",text="Name")
dataview.heading("Column2",text="Orders")
dataview.heading("Column3",text="Total")
dataview.heading("Column4",text="Time")
dataview.heading("Column5",text="Date")
dataview.pack(side=TOP,fill=X)
y.config(command=dataview.yview)
cont=sqlite3.connect("Cafe.db")
cur=cont.cursor()
cur.execute("SELECT * FROM CafeData")
data=cur.fetchall()
for i in data:
dataview.insert("","end",text=str(i[0]),values=(str(i[1]),str(i[2]),str(i[3])+"/-",str(i[4]),str(i[5])))
newroot.mainloop()
#************************************MENUCARD FRAME,HEADING AND LABELS******************************************************************
mainFrame = Frame(root,width=1000,height=650,bd=6,relief=SUNKEN,bg="#220D0B").place(x=0,y=0)
logoImage = Label(mainFrame,image=logo,bd=4,relief=SUNKEN).place(x=220,y=10)
rightcupImage = Label(mainFrame,image=rightcup,bd=2,relief=SUNKEN).place(x=745,y=13)
leftcupImage = Label(mainFrame,image=leftcup,bd=2,relief=SUNKEN).place(x=135,y=13)
menucardFrame = Frame(mainFrame,width=600,height=540,bd=6,relief=SUNKEN,bg="#bf8040",highlightthickness=2,highlightbackground="#ffcc00",highlightcolor="#ffcc00")
menucardFrame.place(x=0,y=100)
menuLbl = Label(mainFrame,text="Bloom MenuCard",font="msserif 20 bold",bg="#bf8040").place(x=0,y=110)
#**********************************************Checkbuttons******************************************************************
menu1Chk = Checkbutton(mainFrame,text="Americano",variable=menu1chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=160)
menu2Chk = Checkbutton(mainFrame,text="Cappuccino",variable=menu2chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=200)
menu3Chk = Checkbutton(mainFrame,text="Espresso",variable=menu3chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=240)
menu4Chk = Checkbutton(mainFrame,text="Latte",variable=menu4chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=280)
menu5Chk = Checkbutton(mainFrame,text="Choclate",variable=menu5chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=320)
menu6Chk = Checkbutton(mainFrame,text="Mocha",variable=menu6chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=360)
menu7Chk = Checkbutton(mainFrame,text="Glace",variable=menu7chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=400)
menu8Chk = Checkbutton(mainFrame,text="Hot Choclate",variable=menu8chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=440)
menu9Chk = Checkbutton(mainFrame,text="Macchiato",variable=menu9chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=480)
menu10Chk = Checkbutton(mainFrame,text="Corto",variable=menu10chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=520)
menu11Chk = Checkbutton(mainFrame,text="Iced Coffee",variable=menu11chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=560)
menu12Chk = Checkbutton(mainFrame,text="Irish Coffee",variable=menu12chkVar,font="msserif 17 bold",bg="#bf8040",onvalue=1,offvalue=0,command=CheckBtn_Value).place(x=30,y=600)
#************************************ENTRIES************************************************************************
menu1Entry = Entry(mainFrame,textvariable=menu1Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu1Entry.place(x=500,y=160)
menu2Entry = Entry(mainFrame,textvar=menu2Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu2Entry.place(x=500,y=200)
menu3Entry = Entry(mainFrame,textvar=menu3Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu3Entry.place(x=500,y=240)
menu4Entry = Entry(mainFrame,textvar=menu4Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu4Entry.place(x=500,y=280)
menu5Entry = Entry(mainFrame,textvar=menu5Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu5Entry.place(x=500,y=320)
menu6Entry = Entry(mainFrame,textvar=menu6Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu6Entry.place(x=500,y=360)
menu7Entry = Entry(mainFrame,textvar=menu7Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu7Entry.place(x=500,y=400)
menu8Entry = Entry(mainFrame,textvar=menu8Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu8Entry.place(x=500,y=440)
menu9Entry = Entry(mainFrame,textvar=menu9Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu9Entry.place(x=500,y=480)
menu10Entry = Entry(mainFrame,textvar=menu10Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu10Entry.place(x=500,y=520)
menu11Entry = Entry(mainFrame,textvar=menu11Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu11Entry.place(x=500,y=560)
menu12Entry = Entry(mainFrame,textvar=menu12Var,width=4,bg="#bf8040",font="verdana 16 bold",bd=4,relief=RAISED,state=DISABLED)
menu12Entry.place(x=500,y=600)
#*************************************PRICE LABELS******************************************************************
menu1Price = Label(mainFrame,text="---------------------220/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=160)
menu2Price = Label(mainFrame,text="---------------------200/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=200)
menu3Price = Label(mainFrame,text="---------------------150/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=240)
menu4Price = Label(mainFrame,text="---------------------150/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=280)
menu5Price = Label(mainFrame,text="---------------------140/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=320)
menu6Price = Label(mainFrame,text="---------------------350/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=360)
menu7Price = Label(mainFrame,text="---------------------245/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=400)
menu8Price = Label(mainFrame,text="-------------------200/-",font="msserif 16 bold italic",bg="#bf8040").place(x=250,y=440)
menu9Price = Label(mainFrame,text="---------------------250/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=480)
menu10Price = Label(mainFrame,text="---------------------200/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=520)
menu11Price = Label(mainFrame,text="---------------------175/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=560)
menu12Price = Label(mainFrame,text="---------------------245/-",font="msserif 16 bold italic",bg="#bf8040").place(x=230,y=600)
#************************************RECEIPT FRAME,LABEL and TEXT*******************************************************************
receiptFrame = Frame(mainFrame,width=385,height=40,bd=6,relief=SUNKEN,bg="#bf8040",highlightthickness=2,highlightbackground="#ffcc00",highlightcolor="#ffcc00")
receiptFrame.place(x=603,y=135)
nameLbl = Label(mainFrame,text="Name : ",font="msserif 16 bold",fg="white",bg="#220D0B").place(x=605,y=102)
nameEntry = Entry(mainFrame,textvar=nameVar,width=15,font="verdana 13 bold",bd=2,relief=RAISED)
nameEntry.place(x=705,y=102)
yscrollbar = Scrollbar(receiptFrame,orient=VERTICAL)
yscrollbar.pack(side=RIGHT,fill=Y)
xscrollbar = Scrollbar(receiptFrame,orient=HORIZONTAL)
xscrollbar.pack(side=BOTTOM,fill=X)
receiptTxt = Listbox(receiptFrame,height=17,width=40,yscrollcommand=yscrollbar.set,\
font="verdana 10 bold",xscrollcommand=xscrollbar.set)
receiptTxt.pack()
yscrollbar.config(command=receiptTxt.yview)
xscrollbar.config(command=receiptTxt.xview)
#**********************************BUTTONS****************************************************************************
totalBtn = Button(mainFrame,text="Total",width=11,font="verdana 16 bold",bg="#bf8040",bd=4,relief=RAISED,\
highlightthickness=3,highlightbackground="#ffcc00",highlightcolor="#ffcc00",command=Total)
totalBtn.place(x=603,y=480)
receiptBtn = Button(mainFrame,text="Receipt",width=10,font="verdana 16 bold",bg="#bf8040",bd=4,relief=RAISED,\
highlightthickness=3,highlightbackground="#ffcc00",highlightcolor="#ffcc00",command=Receipt)
receiptBtn.place(x=805,y=480)
resetBtn = Button(mainFrame,text="Reset",width=11,font="verdana 16 bold",bg="#bf8040",bd=4,relief=RAISED,\
highlightthickness=3,highlightbackground="#ffcc00",highlightcolor="#ffcc00",command=reset)
resetBtn.place(x=603,y=530)
exitBtn = Button(mainFrame,text="Exit",width=10,font="verdana 16 bold",bg="#bf8040",bd=4,relief=RAISED,\
highlightthickness=3,highlightbackground="#ffcc00",highlightcolor="#ffcc00",command=exit)
exitBtn.place(x=805,y=530)
addDataBtn = Button(mainFrame,text="Add",width=11,font="verdana 16 bold",bg="#bf8040",bd=4,relief=RAISED,\
highlightthickness=3,highlightbackground="#ffcc00",highlightcolor="#ffcc00",command=AddData)
addDataBtn.place(x=603,y=580)
displayDataBtn = Button(mainFrame,text="Display",width=10,font="verdana 16 bold",bg="#bf8040",bd=4,relief=RAISED,\
highlightthickness=3,highlightbackground="#ffcc00",highlightcolor="#ffcc00",command=DisplayData)
displayDataBtn.place(x=805,y=580)
#*********************************************************************************************************************
root.mainloop()
|
python
|
def get_version():
return "1.0.0"
|
python
|
import pygame
import time
import random
pygame.init()
white = (255,255,255)
black = (0,0,0)
red =(200,0,0)
light_red = (255,0,0)
yellow = (200,200,0)
light_yellow = (255,255,0)
green = (34,177,76)
light_green = (0,255,0)
display_width = 800
display_height = 600
clock = pygame.time.Clock()
gameDisplay = pygame.display.set_mode((display_width,display_height))
pygame.display.set_caption("3d")
smallfont = pygame.font.SysFont("comicsansms", 25)
medfont = pygame.font.SysFont("comicsansms", 50)
largefont = pygame.font.SysFont("comicsansms", 85)
FPS = 30
def square(startPoint, fullSize):
node_1 = [startPoint[0], startPoint[1]]
node_2 = [startPoint[0] + fullSize, startPoint[1]]
node_3 = [startPoint[0], startPoint[1] + fullSize]
node_4 = [startPoint[0] + fullSize, startPoint[1] + fullSize]
#top line
pygame.draw.line(gameDisplay, white, (node_1), (node_2))
#bottom line
pygame.draw.line(gameDisplay, white, (node_3), (node_4))
#left line
pygame.draw.line(gameDisplay, white, (node_1), (node_3))
#right line
pygame.draw.line(gameDisplay, white, (node_2), (node_4))
pygame.draw.circle(gameDisplay, light_green, node_1, 5)
pygame.draw.circle(gameDisplay, light_green, node_2, 5)
pygame.draw.circle(gameDisplay, light_green, node_3, 5)
pygame.draw.circle(gameDisplay, light_green, node_4, 5)
def gameLoop():
location = [300,200]
size = 200
current_move = 0
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
current_move = -5
if event.key == pygame.K_RIGHT:
current_move = 5
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:
current_move = 0
if event.key == pygame.K_UP or event.key == pygame.K_DOWN:
current_move = 0
gameDisplay.fill(black)
location[0] += current_move
square(location, size)
pygame.display.update()
clock.tick(FPS)
gameLoop()
|
python
|
# Problem: https://www.hackerrank.com/challenges/apple-and-orange/problem
# Score: 10.0
first_multiple_input = input().rstrip().split()
s = int(first_multiple_input[0])
t = int(first_multiple_input[1])
second_multiple_input = input().rstrip().split()
a = int(second_multiple_input[0])
b = int(second_multiple_input[1])
third_multiple_input = input().rstrip().split()
m = int(third_multiple_input[0])
n = int(third_multiple_input[1])
apples = list(map(int, input().rstrip().split()))
oranges = list(map(int, input().rstrip().split()))
print(sum([1 for x in apples if (x + a) in range(s,t+1)]))
print(sum([1 for x in oranges if (x + b) in range(s,t+1)]))
|
python
|
from flask import Flask, jsonify, request
from Chem_Faiss import pipeline
import Chem_Faiss
app = Flask(__name__)
searcher = pipeline()
searcher.load_pipeline('sample')
mols = Chem_Faiss.load_sdf('molecules.sdf')
@app.route('/query', methods = ['GET','POST'])
def query():
d = request.get_json()
s = d['SMILES']
print(s)
print(request.form)
I = searcher.make_query_smiles(q = s, k = 2)
smiles = Chem_Faiss.idx_to_smiles(mols, I)
return jsonify({
'result' : smiles
})
app.run()
|
python
|
from typing import Optional
from _custom_constants import *
import miscellaneous
import random
# defs is a package which claims to export all constants and some JavaScript objects, but in reality does
# nothing. This is useful mainly when using an editor like PyCharm, so that it 'knows' that things like Object, Creep,
# Game, etc. do exist.
from defs import *
# These are currently required for Transcrypt in order to use the following names in JavaScript.
# Without the 'noalias' pragma, each of the following would be translated into something like 'py_Infinity' or
# 'py_keys' in the output file.
__pragma__('noalias', 'name')
__pragma__('noalias', 'undefined')
__pragma__('noalias', 'Infinity')
__pragma__('noalias', 'keys')
__pragma__('noalias', 'get')
__pragma__('noalias', 'set')
__pragma__('noalias', 'type')
__pragma__('noalias', 'update')
"""
원래 메인에 있던 방별 메모리 갱신을 편의상 이곳으로 옮김.
"""
# 모든 방에 있는 메모리를 한꺼번에 뽑기.
# 사전에 자원·건물현황·적 구분 등을 싹 다 돌린다.
def init_memory():
"""
뽑혀야 하는 메모리 목록:
1. 방 안 모든 정보:
- 건물
:return:
"""
for room_name in Object.keys(Game.rooms):
chambro = Game.rooms[room_name]
all_structures = chambro.find(FIND_STRUCTURES)
room_creeps = chambro.find(FIND_MY_CREEPS)
malsana_amikoj = _.filter(room_creeps, lambda c: c.hits < c.hitsMax)
hostile_constructions = chambro.find(FIND_HOSTILE_CONSTRUCTION_SITES)
my_constructions = chambro.find(FIND_MY_CONSTRUCTION_SITES)
dropped_all = chambro.find(FIND_DROPPED_RESOURCES)
tombs = chambro.find(FIND_TOMBSTONES)
ruins = chambro.find(FIND_RUINS)
if ruins:
for r in ruins:
if _.sum(r.store) > 0:
dropped_all.append(r)
if tombs:
for t in tombs:
if _.sum(t.store) > 0:
dropped_all.append(t)
# 필터하면서 목록을 삭제하는거 같음.... 그래서 이리 초기화
foreign_creeps = chambro.find(FIND_HOSTILE_CREEPS)
nukes = chambro.find(FIND_NUKES)
# [[적 전부], [적 NPC], [적 플레이어], [동맹]]
friends_and_foes = miscellaneous.filter_friend_foe(foreign_creeps)
# init. list
hostile_creeps = friends_and_foes[0]
hostile_human = friends_and_foes[2]
allied_creeps = friends_and_foes[3]
# 초기화.
terminal_capacity = 0
# 방 안의 터미널 내 에너지 최소값.
# todo 임시니 변경요망
if chambro.controller:
if chambro.terminal and chambro.controller.level < 8:
terminal_capacity = 1000
else:
terminal_capacity = 10000
# 여기에 모든게 들어가 있어야 한다.
room_info = {}
return room_info
# 본진 관련
def refresh_base_stats(chambro: Room, all_structures, fix_rating,
min_wall: Optional[StructureRampart, StructureWall], spawns):
"""
방 내 메모리 현황 갱신을 위한 함수.
잘 바뀌지 않는 사안들()
:param chambro: 오브젝트화된 방
:param all_structures: 방 안 모든 스트럭쳐
:param fix_rating: 방 안 수리등급(단위)
:param min_wall: 방 안에 가장 낮은 체력의 방벽
:param spawns: 방 안에 모든 스폰
:return:
"""
creeps_memory = []
if Memory.creeps:
for c in Object.keys(Memory.creeps):
creeps_memory.append(Memory.creeps[c])
distance_to_controller = 5
# 방 안 건물/소스현황 갱신.
structure_cpu = Game.cpu.getUsed()
# 내 방이 아닌데 내 방마냥 현황이 적혀있으면 초기화한다.
# todo 방에 대한 기초신상 다 턴다.
if chambro.controller and not chambro.controller.my and chambro.memory[options]:
chambro.memory = {}
# 방 안에 소스랑 미네랄 현황 확인
if not chambro.memory[resources] or chambro.memory.options and chambro.memory.options.reset:
room_sources = chambro.find(FIND_SOURCES)
room_minerals = chambro.find(FIND_MINERALS)
chambro.memory[resources] = {energy: [], minerals: []}
for rs in room_sources:
chambro.memory[resources][energy].append(rs.id)
for rm in room_minerals:
chambro.memory[resources][minerals].append(rm.id)
del room_sources
# 이 방에 키퍼가 있는지 확인.
if not chambro.memory[STRUCTURE_KEEPER_LAIR]:
chambro.memory[STRUCTURE_KEEPER_LAIR] = []
room_str = chambro.find(FIND_STRUCTURES)
for s in room_str:
if s.structureType == STRUCTURE_KEEPER_LAIR:
chambro.memory[keeper].append(s.id)
# 본진인가?
if chambro.controller and chambro.controller.my:
# 이거 돌리는데 얼마나 걸리는지 확인하기 위한 작업.
# 목록 초기화.
if not chambro.memory[STRUCTURE_TOWER] or chambro.memory.options.reset:
chambro.memory[STRUCTURE_TOWER] = []
if not chambro.memory[STRUCTURE_LINK] or chambro.memory.options.reset:
chambro.memory[STRUCTURE_LINK] = {}
if not chambro.memory[STRUCTURE_CONTAINER] or chambro.memory.options.reset:
chambro.memory[STRUCTURE_CONTAINER] = []
if not chambro.memory[STRUCTURE_LAB] or chambro.memory.options.reset:
chambro.memory[STRUCTURE_LAB] = []
# 렙8이 되면 기존에 업글 등의 역할이 배정된것들 초기화 해야함. 그 용도
if not chambro.memory[room_lvl] or chambro.memory.options.reset:
chambro.memory[room_lvl] = 1
# 아래 레벨 확인 용도.
past_lvl = chambro.memory[room_lvl]
chambro.memory[room_lvl] = chambro.controller.level
# 방 안에 스토리지 오브젝트. 너무 길어서.
room_storage = chambro.storage
# 만일 스토리지 용량이 부족한데 랩 채우게끔 되있으면 뽑아간다.
if chambro.memory[options].fill_labs and room_storage and room_storage.store.energy < 2000:
chambro.memory[options].fill_labs = 0
# 역으로 스토리지가 꽉 찼는데 안채우게 되있으면 넣는다.
if not chambro.memory[options].fill_labs and room_storage \
and room_storage.storeCapacity - _.sum(room_storage.store) < chambro.memory[options][max_energy]:
chambro.memory[options].fill_labs = 1
# 방 안 스토리지 자원이 꽉 찼는데 수리레벨이 남아있을 경우 한단계 올린다.
# max energy 계산법:
# 스토리지 내 남은 공간이 max_energy 보다 적으면 발동하는거임.
# 이름이 좀 꼬였는데 별수없음...
if room_storage \
and room_storage.store.getCapacity() \
- room_storage.store.getUsedCapacity() < chambro.memory[options][max_energy] \
and (not min_wall or min_wall.hits > chambro.memory[options][repair] * fix_rating) \
and chambro.memory[options][repair] < 300 \
and chambro.controller.level == 8:
chambro.memory[options][repair] += 1
# 방에 수리할 벽이 없을 경우 확인한 시간 갱신한다.
elif not min_wall or min_wall.hits > chambro.memory[options][repair] * fix_rating:
chambro.memory[options][stop_fixer] = Game.time
# 만약 리페어가 너무 아래로 떨어졌을 시 리페어값을 거기에 맞게 낮춘다.
elif min_wall.hits // fix_rating < chambro.memory[options][repair] - 1:
chambro.memory[options][repair] = min_wall.hits // fix_rating + 1
# 이때 픽서 수 하나짜리로 초기화.
chambro.memory[options][stop_fixer] = Game.time - 400
# 렙8 아래면 픽서 카운터는 천을 넘지 않는다. 업글이 더 중요.
elif chambro.controller.level < 8 and Game.time - chambro.memory[options][stop_fixer] >= 1000:
chambro.memory[options][stop_fixer] = Game.time - 500
# 매번 완전초기화 하면 너무 자원낭비. 수량 틀릴때만 돌린다.
# 타워세기.
str_towers = _.filter(all_structures, lambda s: s.structureType == STRUCTURE_TOWER)
if not len(str_towers) == len(chambro.memory[STRUCTURE_TOWER]):
chambro.memory[STRUCTURE_TOWER] = []
for stt in str_towers:
chambro.memory[STRUCTURE_TOWER].push(stt.id)
# add links. 위와 동일한 원리.
# todo 여기뿐 아니라 캐려쪽도 해당인데, 거리에 따라 업글용인지 등등을 확인하는건 다 여기서만!
str_links = _.filter(all_structures, lambda s: s.structureType == STRUCTURE_LINK)
if not len(str_links) == len(chambro.memory[STRUCTURE_LINK]) or not past_lvl == chambro.memory[room_lvl]:
chambro.memory[STRUCTURE_LINK] = {}
range_required = 5
# 안보내는 조건은 주변 range_required 거리 내에 컨트롤러·스폰·스토리지가 있을 시.
storage_points = _.filter(all_structures, lambda s: s.structureType == STRUCTURE_STORAGE
or s.structureType == STRUCTURE_SPAWN
or s.structureType == STRUCTURE_TERMINAL)
# 링크는 크게 두 종류가 존재한다. 하나는 보내는거, 또하난 받는거.
for stl in str_links:
# 0이면 보내는거.
_store = 0
# 1이면 업글용인거.
_upgrade = 0
# 링크에서 가장 가까이 있는 storage_points
closest_storage = stl.pos.findClosestByPath(storage_points, {ignoreCreeps: True})
# 링크에서 가장 가까이 있는 storage_points 까지의 거리
path_to_closest_storage = stl.pos.findPathTo(closest_storage, {ignoreCreeps: True})
# 링크에서 컨트롤러까지의 길
path_to_controller = stl.pos.findPathTo(chambro.controller, {'ignoreCreeps': True, 'range': 3})
# 거리가 조건에 충족하면 우선 저장용임.
if len(path_to_closest_storage) <= range_required or len(path_to_controller) <= range_required:
_store = 1
# 만일 저장용인데 방 만렙인 상황에서 컨트롤러 근처에만 있는 경우 저장용으로 쓰지 않는다.
if chambro.controller.level == 8 and _store and not len(path_to_closest_storage) <= range_required:
_store = 0
# 저장용이면 컨트롤러 근처에 있는지도 센다. 업글용인지 확인할 용도
if _store and not chambro.controller.level == 8:
# 컨트롤러가 스토리지보다 더 가까울 경우 업글용으로 분류한다.
if len(path_to_controller) < len(path_to_closest_storage):
_upgrade = 1
# 추가한다
chambro.memory[STRUCTURE_LINK][stl.id] \
= {'id': stl.id, for_upgrade: _upgrade, for_store: _store, received_time: 1}
# .update({stl.id: {'id': stl.id, for_upgrade: _upgrade, for_store: _store, received_time: 1}})
random.shuffle(chambro.memory[STRUCTURE_LINK])
# 컨테이너
str_cont = _.filter(all_structures, lambda s: s.structureType == STRUCTURE_CONTAINER)
# if not len(str_cont) == len(chambro.memory[STRUCTURE_CONTAINER]):
if True:
chambro.memory[STRUCTURE_CONTAINER] = []
# 컨테이너는 크게 두종류가 존재한다.
# 저장(for_harvest), 업그레이더용(for_upgrade).
# 각각 뭐냐에 따라 채울지 말지, 그리고 얼마나 차면 새 허울러를 추가할지를 정한다.
# 저장용은 그냥 소스 근처(4이내)에 컨테이너가 존재하는지 확인한다. 캐리어는 당연 정반대.
# 업그레이더용은 컨트롤러 근처에 있는지 확인한다.
for stc in str_cont:
# 하베스터 저장용인가? 맞으면 1
# 0 이면 방업글 끝나면 계속 갖고있을 이유가 없는 잉여인 셈.
_harvest = 0
# 방 업글용인가?
_upgrade = 0
room_sources = []
for e in chambro.memory[resources][energy]:
room_sources.append(Game.getObjectById(e))
for e in chambro.memory[resources][minerals]:
room_sources.append(Game.getObjectById(e))
# print(room_sources)
for rs in room_sources:
# 컨테이너 주변 4칸이내에 소스가 있는지 확인한다.
if len(stc.pos.findPathTo(rs, {'ignoreCreeps': True})) <= 4:
# 있으면 이 컨테이너는 하베스터 저장용.
_harvest = 1
break
# _harvest 값에 영향을 주는건 캐리어도 있다. 캐리어의 메모리 안에 저장된 컨테이너도 전부 적용
carrier_mem = _.filter(creeps_memory, lambda c: c.role == 'carrier' and len(c.haul_destos))
for c in carrier_mem:
for td in c.haul_destos:
if stc.id == td.id:
_harvest = 1
break
if _harvest == 1:
break
# 확인 끝났으면 이제 방 업글용인지 확인한다. 방렙 8 미만인가?
if chambro.controller.level < 8:
# 컨테이너와의 거리가 컨트롤러에 비해 다른 스폰 또는 스토리지보다 더 먼가?
# 컨트롤러부터의 실제 거리가 distance_to_controller 값 이하인가?
# 컨테이너와 컨트롤러간의 거리
controller_dist = \
len(stc.pos.findPathTo(chambro.controller, {'ignoreCreeps': True, 'range': 3}))
# 컨테이너에서 가장 가까운 스폰
closest_spawn = stc.pos.findClosestByPath(spawns, {'ignoreCreeps': True})
# 컨테이너에서 가장 가까운 스폰까지 거리
closest_spawn_dist = len(stc.pos.findPathTo(closest_spawn, {'ignoreCreeps': True}))
# 스토리지가 있으면 그 거리도 쟨다
closest_storage_dist = 1000
if room_storage:
closest_storage_dist = len(stc.pos.findPathTo(room_storage, {'ignoreCreeps': True}))
# 조건충족하면 업글용으로 분류 - 컨트롤러에서 distance_to_controller 이내 + 스폰과 스토리지보다 가깝
# 그리고 이 조건은 스토리지 지어질때까지 무시.
# print('container at x{}y{}, controller_dist {}, closest_spawn_dist {}, closest_storage_dist {}'
# .format(stc.pos.x, stc.pos.y, controller_dist, closest_spawn_dist, closest_storage_dist))
if room_storage and controller_dist <= distance_to_controller and \
controller_dist < closest_storage_dist and controller_dist < closest_spawn_dist:
_upgrade = 1
print('x{}y{}에 {}, 업글컨테이너로 분류'.format(stc.pos.x, stc.pos.y, stc.id))
chambro.memory[STRUCTURE_CONTAINER] \
.append({'id': stc.id, for_upgrade: _upgrade, for_harvest: _harvest})
random.shuffle(chambro.memory[STRUCTURE_CONTAINER])
# todo 연구소
# 연구소는 렙8 되기 전까지 건들지 않는다. 또한 모든 랩의 수가 10개여야만 찾는다.
# if chambro.controller.level == 8 and len(chambro.memory[STRUCTURE_LAB]) == 0\
# or chambro.memory[options][reset]:
# yeongusoj = _.filter(all_structures, lambda s: s.structureType == STRUCTURE_LAB)
# if len(yeongusoj) == 10:
# lab_list = []
# # 연구소는 크게 세종류가 존재한다.
# # 실제 작업용 연구소(1), 그 작업물을 받는 연구소(2), 크립업글을 위해 저장하는 연구소(3).
# # 여기서는 작업용과 작업물 받는 연구소 두 부류만이 중요하다.
# for y in yeongusoj:
# lab_jongryu = 1
# # 작업용 연구소는 주변 모든 연구소들과 2칸이내로 밀접해야 한다.
# for ys in yeongusoj:
# if not y.pos.inRangeTo(ys, 2):
# lab_jongryu = 2
# break
# # 어떤 미네랄이 안에 있는거지?
# if y.mineralType:
# mineral_jongryu = y.mineralType
# else:
# mineral_jongryu = None
#
# lab_info = {y.id: {lab_type: lab_jongryu, mineral_type: mineral_jongryu}}
# lab_list.append(lab_info)
#
# # 3번종류의 연구소인지 확인한다.
# #
# if
# 여기로 왔으면 내 방이 아닌거.
else:
# 방이 아닌 경우에 필요한 물건:
# 1. 자원 위치. - 이미 있음
# 2. 컨테이너 위치
pass
if Memory.debug or chambro.controller and chambro.controller.my and chambro.memory.options.reset:
print('{}방 메모리에 건물현황 갱신하는데 {}CPU 소모'
.format(chambro.name, round(Game.cpu.getUsed() - structure_cpu, 2)))
chambro.memory.options.reset = 0
|
python
|
import os
import sys
from CM.CM_TUW40.f2_investment import dh_demand
from CM.CM_TUW40.f3_coherent_areas import distribuition_costs
path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if path not in sys.path:
sys.path.append(path)
def main(P, OFP):
# f2: calculate pixel based values
dh_demand(P, OFP)
# f3: Determination of coherent areas based on the distribution grid cost
# ceiling and available capital for investment.
distribuition_costs(P, OFP)
|
python
|
import os
import requests
import time
import random
import jieba
import matplotlib.pyplot as plt
from fake_useragent import UserAgent
from wordcloud import WordCloud, ImageColorGenerator
def get_response(user_agent, proxy_list, product_id, page):
url = 'https://sclub.jd.com/comment/productPageComments.action'
data = {
'productId': product_id,
'score': '0',
'sortType': '5',
'page': page,
'pageSize': '10',
'isShadowSku': '0',
'rid': '0',
'fold': '1',
}
headers = {
'Referer': 'https://item.jd.com/{}.html'.format(product_id),
# 'User-Agent': user_agent.random,
'User-Agent': user_agent,
}
# 使用代理
# proxies = {
# 'http': 'http://{}'.format(random.choice(proxy_list)),
# 'https': 'https://{}'.format(random.choice(proxy_list)),
# }
# try:
# response = requests.get(url, params=data, proxies=proxies, headers=headers, timeout=5)
# return response.json()
# except Exception as err:
# get_response(user_agent, proxy_list, product_id, page)
response = requests.get(url, params=data, headers=headers)
return response.json()
def spider_jd_comments(user_agent):
product_id = 50704019883 # 商品id
file_path = '{}_comments.txt'.format(product_id)
if os.path.exists(file_path):
os.remove(file_path)
# 使用代理
proxy_list = []
# for i in range(20):
# proxy = requests.get('http://127.0.0.1:5555/random').text
# if proxy not in proxy_list:
# proxy_list.append(proxy)
# 爬取前10页评论
for page in range(10):
ret = get_response(user_agent, proxy_list, product_id, page)
content_list = ret['comments']
with open(file_path, mode='a', encoding='utf-8') as fp:
for content in content_list:
fp.write(content['content'].strip() + '\n')
print(content['content'].strip())
time.sleep(random.random() * 3)
return file_path
def get_content(file_path):
# 加载文本 并分词
content = ''
with open(file_path, mode='r', encoding='utf-8') as fp:
for line in fp.readlines():
if line.strip():
# jieba 精确模式
seg = jieba.cut(line, cut_all=False)
content += ' '.join(seg)
return content
def build_word_cloud(content):
# 加载背景图
background_image = plt.imread('./wawa.jpg')
# 生成词云对象
cloud = WordCloud(
background_color='white',
mask=background_image,
font_path='./simhei.ttf'
)
# 生成词云文本
word_cloud = cloud.generate(content)
# 提取背景图的颜色,来设置词云文本的颜色
color = ImageColorGenerator(background_image)
# 重新设置 词云文本的颜色
cloud.recolor(color_func=color)
return word_cloud
def show_cloud(file_path):
content = get_content(file_path)
word_cloud = build_word_cloud(content)
# plt.imshow(word_cloud, interpolation="bilinear")
plt.imshow(word_cloud)
plt.axis('off')
plt.show()
def main():
# user_agent = UserAgent()
user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36'
file_path = spider_jd_comments(user_agent)
# 生成词云, 展示
show_cloud(file_path)
if __name__ == '__main__':
main()
|
python
|
####
####
####
#### Python Pocket Primer -
#### Exercises for chapter two
####
####
####
import sys
def wordPlay(list):
vowel = ['a','e','i','o','u']
list = list.split(' ')
v_list = []
for i in list:
if i[0] in vowel or i[-1] in vowel:
v_list.append(i)
dic ={}
for i in v_list:
z = 0
cnt = 0
while z < len(list):
if i == list[z]:
cnt = cnt + 1
dic[i]=cnt
z = z +1
print(dic)
def wordCount(word):
word = word.split(' ')
for i in word:
if len(i)<5:
z = range(1,len(i)+1)
for x in z:
print(i[0:x])
wordCount('hello my name something or other')
|
python
|
from .paac import PAAC
from .base_runner import BaseRunner
from .single_runner import SingleRunner
from .paac_runner import PAACRunner
from .eval_runner import EvalRunner
__all__ = ["BaseRunner", "SingleRunner", "PAACRunner"]
|
python
|
#!/usr/bin/env python
"""
create tilespecs from TEMCA metadata file
"""
import json
import os
import numpy
import renderapi
from asap.module.render_module import (
StackOutputModule, RenderModuleException)
from asap.dataimport.schemas import (GenerateEMTileSpecsOutput,
GenerateEMTileSpecsParameters)
from asap.utilities import uri_utils
example_input = {
"render": {
"host": "em-131fs",
"port": 8080,
"owner": "russelt",
"project": "RENDERAPI_TEST",
"client_scripts": (
"/allen/programs/celltypes/workgroups/"
"em-connectomics/russelt/render_mc.old/render-ws-java-client/"
"src/main/scripts")},
"metafile": "/allen/programs/celltypes/workgroups/em-connectomics/data/workflow_test_sqmm/001050/0/_metadata_20170829130146_295434_5LC_0064_01_redo_001050_0_.json",
"stack": "TEST_IMPORT_FROMMD",
"overwrite_zlayer": True,
"pool_size": 10,
"close_stack": True,
"z_index": 1
}
class GenerateEMTileSpecsModule(StackOutputModule):
default_schema = GenerateEMTileSpecsParameters
default_output_schema = GenerateEMTileSpecsOutput
@staticmethod
def image_coords_from_stage(stage_coords, resX, resY, rotation):
cr = numpy.cos(rotation)
sr = numpy.sin(rotation)
x = stage_coords[0] / resX
y = stage_coords[1] / resY
return (int(x * cr + y * sr),
int(-x * sr + y * cr))
def tileId_from_basename(self, fname):
return '{bname}.{z}'.format(
bname=os.path.splitext(os.path.basename(fname))[0],
z=str(float(self.zValues[0])))
@staticmethod
def sectionId_from_z(z):
return str(float(z))
def ts_from_imgdata(self, imgdata, imgprefix, x, y,
minint=0, maxint=255, maskUrl=None,
width=3840, height=3840, z=None, sectionId=None,
scopeId=None, cameraId=None, pixelsize=None):
tileId = self.tileId_from_basename(imgdata['img_path'])
sectionId = (self.sectionId_from_z(z) if sectionId is None
else sectionId)
raw_tforms = [renderapi.transform.AffineModel(B0=x, B1=y)]
imageUrl = uri_utils.uri_join(imgprefix, imgdata['img_path'])
ip = renderapi.image_pyramid.ImagePyramid()
ip[0] = renderapi.image_pyramid.MipMap(imageUrl=imageUrl,
maskUrl=maskUrl)
return renderapi.tilespec.TileSpec(
tileId=tileId, z=z,
width=width, height=height,
minint=minint, maxint=maxint,
tforms=raw_tforms,
imagePyramid=ip,
sectionId=sectionId, scopeId=scopeId, cameraId=cameraId,
imageCol=imgdata['img_meta']['raster_pos'][0],
imageRow=imgdata['img_meta']['raster_pos'][1],
stageX=imgdata['img_meta']['stage_pos'][0],
stageY=imgdata['img_meta']['stage_pos'][1],
rotation=imgdata['img_meta']['angle'], pixelsize=pixelsize)
def run(self):
meta = json.loads(uri_utils.uri_readbytes(self.args['metafile_uri']))
roidata = meta[0]['metadata']
imgdata = meta[1]['data']
img_coords = {img['img_path']: self.image_coords_from_stage(
img['img_meta']['stage_pos'],
img['img_meta']['pixel_size_x_move'],
img['img_meta']['pixel_size_y_move'],
numpy.radians(img['img_meta']['angle'])) for img in imgdata}
if not imgdata:
raise RenderModuleException(
"No relevant image metadata found for metadata at {}".format(
self.args['metafile_uri']))
minX, minY = numpy.min(numpy.array(list(img_coords.values())), axis=0)
# assume isotropic pixels
pixelsize = roidata['calibration']['highmag']['x_nm_per_pix']
imgdir = self.args.get(
'image_prefix',
uri_utils.uri_prefix(self.args['metafile_uri']))
tspecs = [
self.ts_from_imgdata(
img, imgdir,
img_coords[img['img_path']][0] - minX,
img_coords[img['img_path']][1] - minY,
minint=self.args['minimum_intensity'],
maxint=self.args['maximum_intensity'],
width=roidata['camera_info']['width'],
height=roidata['camera_info']['height'],
z=self.zValues[0], sectionId=self.args.get('sectionId'),
scopeId=roidata['temca_id'],
cameraId=roidata['camera_info']['camera_id'],
pixelsize=pixelsize,
maskUrl=self.args['maskUrl_uri']) for img in imgdata]
self.output_tilespecs_to_stack(tspecs)
try:
self.output({'stack': self.output_stack})
except AttributeError as e:
self.logger.error(e)
if __name__ == "__main__":
mod = GenerateEMTileSpecsModule()
mod.run()
|
python
|
# -*- coding: utf-8 -*-
import operator
import six
from sage.arith.misc import GCD
from sage.combinat.q_analogues import q_int
from sage.functions.generalized import sgn
from sage.functions.log import log
from sage.functions.other import ceil
from sage.functions.other import floor
from sage.functions.other import sqrt
from sage.functions.trig import cos
from sage.matrix.constructor import Matrix
from sage.rings.finite_rings.integer_mod_ring import Integers
from sage.rings.integer import Integer
from sage.rings.infinity import Infinity
from sage.sets.real_set import RealSet
from sage.symbolic.constants import pi
from sage.symbolic.expression import Expression
from sage.symbolic.relation import solve as _solve
from sage.symbolic.ring import SR
from .array3d import Array3D
from .assoc_scheme import ASParameters
from .assoc_scheme import PolyASParameters
from .aux import InfeasibleError
from .coefflist import CoefficientList
from .find import find
from .nonex import checkConditions
from .nonex import classicalFamilies
from .nonex import families
from .nonex import sporadic
from .partition import PartitionGraph
from .util import checklist
from .util import checkNonneg
from .util import checkPos
from .util import checkPrimePower
from .util import eigenvalue_interval
from .util import full_simplify
from .util import hard_ceiling
from .util import hard_floor
from .util import integralize
from .util import is_algebraic_integer
from .util import is_constant
from .util import is_squareSum
from .util import pair_keep
from .util import pair_swap
from .util import rewriteExp
from .util import subs
from .util import symbol
from .util import variables
PENTAGON = ((2, 1), (1, 1))
PETERSEN = ((3, 2), (1, 1))
TRIANGULAR7_COMPL = ((10, 6), (1, 6))
ICOSAHEDRON = ((5, 2, 1), (1, 2, 5))
DORO5 = ((10, 6, 4), (1, 2, 5))
GOSSET = ((27, 10, 1), (1, 10, 27))
CONWAY_SMITH = ((10, 6, 4, 1), (1, 2, 6, 10))
check_DRGParameters = []
check = checklist(check_DRGParameters, PolyASParameters._checklist)
class DRGParameters(PolyASParameters):
"""
A class for parameters of a distance-regular graph
and checking their feasibility.
"""
ANTIPODAL = "antipodal quotient"
ARRAY = "intersection array"
BIPARTITE = "bipartite half"
DUAL_INTEGRAL = False
DUAL_MATRIX = "Q"
DUAL_PARAMETER = "Krein parameter"
DUAL_PARTS = "eigenspaces"
DUAL_SIZES = "multiplicities"
DUAL_SYMBOL = "q"
MATRIX = "P"
METRIC = True
OBJECT = "distance-regular graph"
OBJECT_LATEX = "distance-regular graph"
PARAMETER = "intersection number"
PART = "relation"
PARTS = "relations"
PART_SCHEME = "distance-%s graph"
PTR = pair_keep
QTR = pair_swap
SIZE = "valency"
SIZES = "valencies"
SYMBOL = "p"
_checklist = check_DRGParameters
def __init__(self, b, c=None, alpha=None, beta=None,
complement=None, order=None):
"""
Object constructor.
Takes two iterables of the same length ``d`` as input,
representing the intersection array
``{b[0], b[1], ..., b[d-1]; c[1], c[2], ..., c[d]}``.
The basic checks on integrality and nonnegativity
of the intersection array are performed.
If three parameters are given,
they are understood as the valency and numbers of common neighbours
of two adjacent and two nonadjacent vertices, respectively,
in a strongly regular graph.
If four parameters are given,
they are understood as the classical parameters.
"""
self._init_storage()
if isinstance(b, ASParameters):
o = b.is_pPolynomial()
assert o, "scheme not P-polynomial"
self._.d = b._.d
if order is None:
order = o[0]
else:
order = self._reorder(order)
assert order in o, "scheme not P-polynomial for given order"
PolyASParameters.__init__(self, b, order=order)
self._check_intersectionArray()
if isinstance(b, DRGParameters):
return
else:
if alpha is not None:
if beta is not None:
self._.d = Integer(b)
q = c
b = [(q_int(self._.d, q) - q_int(i, q)) *
(beta - alpha * q_int(i, q)) for i in range(self._.d)]
c = [q_int(i, q) * (1 + alpha * q_int(i-1, q))
for i in range(1, self._.d + 1)]
elif b - c == 1:
self._.d = Integer(1)
b, c = (b, ), (1, )
else:
self._.d = Integer(2)
b, c = (b, b-c-1), (1, alpha)
else:
self._.d = Integer(len(b))
PolyASParameters.__init__(self, b, c)
self._check_intersectionArray()
self._.k = tuple(self._init_multiplicities())
self._.p = Array3D(self._.d + 1)
self._compute_parameters(self._.p, self._.k)
self._compute_imprimitivity()
if not isinstance(b, ASParameters):
self.check_handshake()
self._compute_complement(complement)
def _check_intersectionArray(self):
"""
Check the basic restrictions on the intersection array.
"""
assert all(checkNonneg(self._.b[i] - self._.b[i+1])
for i in range(self._.d)), \
"b sequence not non-ascending"
assert all(checkNonneg(self._.c[i+1] - self._.c[i])
for i in range(self._.d)), \
"c sequence not non-descending"
if any(self._.b[j] < self._.c[i]
for i in range(self._.d+1) for j in range(self._.d-i+1)):
raise InfeasibleError("b[j] < c[i] with i+j <= d",
("BCN", "Proposition 4.1.6.(ii)"))
def _check_parameter(self, h, i, j, v, integral=True,
name=None, sym=None):
"""
Check for the feasibility
of an intersection number or Krein parameter.
The parameter is checked for nonnegativity,
and, if requested, also for integrality.
"""
return PolyASParameters._check_parameter(self, h, i, j, v,
integral=integral,
name=name, sym=sym)
def _complement(self):
"""
Return the parameters of the complement of a strongly regular graph.
"""
return PolyASParameters._complement(self, self._.k, self._.p)
def _compute_kreinParameters(self, expand=False, factor=False,
simplify=False):
"""
Compute the Krein parameters.
"""
if not self._has("m"):
self.multiplicities(expand=expand, factor=factor,
simplify=simplify)
if not self._has("q"):
q = Array3D(self._.d + 1)
self._compute_dualParameters(q, self._.k, self._.m, self.PTR)
self._.q = q
def _compute_kTable(self, expand=False, factor=False, simplify=False):
"""
Compute the valencies of the relations.
Does nothing, as they are already computed
for distance-regular graphs.
"""
pass
def _compute_localEigenvalues(self):
"""
Return the largest and smallest nontrivial eigenvalues
together with their indices
and the bounds for the nontrivial eigenvalues of the local graph.
"""
th1, i = max((th, h) for h, th in enumerate(self._.theta)
if th != self._.k[1])
thd, j = min((th, h) for h, th in enumerate(self._.theta)
if th != self._.k[1])
bm = -1 - self._.b[1]/(th1+1)
bp = -1 - self._.b[1]/(thd+1)
return (th1, i, thd, j, bm, bp)
def _compute_multiplicities(self, expand=False, factor=False,
simplify=False):
"""
Compute the multiplicities of the eigenspaces.
"""
if not self._has("m"):
self._.m = self._compute_sizes(self._.k, expand=expand,
factor=factor, simplify=simplify)
def _compute_pTable(self, expand=False, factor=False,
simplify=False):
"""
Compute the intersection numbers.
Does nothing, as they are already computed
for distance-regular graphs.
"""
pass
def _copy_cosineSequences(self, p):
"""
Obtain the cosine sequences from the eigenmatrix.
"""
PolyASParameters._copy_cosineSequences(self, p.eigenmatrix())
@staticmethod
def _get_class():
"""
Return the principal class of the object.
"""
return DRGParameters
def _init_array(self, b, c):
"""
Initialize the intersection array while checking for integrality.
"""
try:
self._.c = (Integer(0), ) + tuple(map(integralize, c))
except TypeError:
raise InfeasibleError("c sequence not integral")
try:
self._.b = tuple(map(integralize, b)) + (Integer(0), )
except TypeError:
raise InfeasibleError("b sequence not integral")
def _is_trivial(self):
"""
Check whether the distance-regular graph is trivial
for the purposes of feasibility checking.
Returns ``True`` if the graph has diameter one or valency two.
"""
return PolyASParameters._is_trivial(self) or self._.k[1] == 2
@staticmethod
def _subconstituent_name(h):
"""
Return a properly formatted ordinal for the given subconstituent.
"""
if h == 1:
return "local graph"
else:
return PolyASParameters._subconstituent_name(h)
def _subs(self, exp, p, seen):
"""
Substitute the given subexpressions in the parameters.
"""
p, new = PolyASParameters._subs(self, exp, p, seen)
if new:
if self._has("q") and not p._has("q"):
p._.q = self._.q.subs(*exp)
p._check_parameters(p._.q, integral=self.DUAL_INTEGRAL,
name=self.DUAL_PARAMETER,
sym=self.DUAL_SYMBOL)
return p
def complementaryGraph(self):
"""
Return the parameters of the complement of a strongly regular graph.
"""
assert self._.d == 2 and checkPos(self._.b[0] - self._.c[2]), \
"the complement is not distance-regular"
return self._.complement
def distancePartition(self, h=0):
"""
Return the diagram of the distance partition
corresponding to a vertex (if h = 0)
or two vertices at distance h.
"""
return PartitionGraph(self, h)
def eigenvalues(self, expand=False, factor=False, simplify=False):
"""
Compute and return the eigenvalues of the graph.
"""
return self._compute_eigenvalues(self._.p, expand=expand,
factor=factor, simplify=simplify)
def genPoly_parameters(self, expand=False, factor=False, simplify=False):
"""
Determine the parameters of the generalized polygon
whose collinearity graph has matching parameters.
"""
try:
t = rewriteExp(self._.c[self._.d] - 1, expand=expand,
factor=factor, simplify=simplify)
s = rewriteExp(integralize(self._.b[0] / self._.c[self._.d]),
expand=expand, factor=factor, simplify=simplify)
st = s * t
if any(c != 1 or b != st
for b, c in zip(self._.b[1:-1], self._.c[1:-1])):
raise TypeError
return (2*self._.d, s, t)
except TypeError:
return (False, None, None)
def guaranteed_clique_order(self):
"""
Return the smallest feasible order for a maximal clique in the graph.
"""
if not self._has("maxCliques"):
self.check_combinatorial()
if self._.maxCliques:
return self._.a[1] + 2
s = Integer(3 if checkPos(self._.a[1]) else 2)
a = self._.c[2] - 1
b = self._.c[2] + 2*self._.a[1] + 1
D = b**2 - 8*a*self._.k[1]
if D > 0:
r = hard_ceiling((b - sqrt(D))/(2*a))
if not is_constant(r):
r += 1
if (b + sqrt(D))/(2*a) > r:
t = self._.a[1] + 2 - (r-2)*a
if t > s:
return t
return s
def has_edges(self, h, i1, j1, i2, j2):
"""
Determine if there can be edges between sets of vertices
at distances (i1, j1) and (i2, j2) from two vertices at distance h
using the currently known triple intersection numbers.
"""
if j1 is None:
return abs(i1 - i2) <= 1
assert all(x >= 0 and x <= self._.d for x in [h, i1, j1, i2, j2]), \
"distance not in feasible range"
if abs(i1 - i2) > 1 or abs(j1 - j2) > 1:
return False
for t, d in (((h, i1, j1), (i2, j2, 1)), ((h, i2, j2), (i1, j1, 1))):
if any(x == 0 for x in self.triple_generator(t, d)):
return False
return True
def is_bilinearForms(self):
"""
Check whether the graph can be a bilinear forms graph
of diameter at least 2.
"""
s = symbol("__s")
for q in sorted([s.subs(ss) for ss in
_solve(s*(s+1) == self._.c[2], s)], reverse=True):
if not checkPrimePower(q):
continue
beta = self._.b[0] * (q-1) / (q**self._.d - 1)
try:
integralize(log(integralize(beta + 1), q))
except TypeError:
continue
if self.is_classicalWithParameters(q, q-1, beta):
return True
return False
def is_classical(self):
"""
Check whether the graph is classical,
and return all sets of classical parameters if it is.
"""
if not self._has("classical"):
clas = []
bs = set()
if self._.d == 2:
e = self._.c[2] - self._.a[1] - 2
d = sqrt(4*self._.b[1] + e**2)
bs.add((e+d)/2)
bs.add((e-d)/2)
elif all(self._.a[i] == self._.a[1] * self._.c[i]
for i in range(2, self._.d+1)):
bs.add(self._.c[2] - 1)
bs.add(-self._.a[1] - 1)
elif self._.d >= 3:
d = self._.a[1] * self._.c[3] - self._.a[3]
if d != 0:
bs.add((self._.a[2]*self._.c[3]
- self._.c[2]*self._.a[3]) / d)
for b in bs:
if b in [0, -1]:
continue
alpha = self._.c[2] / (b+1) - 1
beta = self._.k[1] / q_int(self._.d, b)
if all(self._.b[i] ==
(q_int(self._.d, b) - q_int(i, b)) *
(beta - alpha * q_int(i, b)) and
self._.c[i+1] ==
q_int(i+1, b) * (1 + alpha * q_int(i, b))
for i in range(self._.d)):
clas.append((self._.d, b, alpha, beta))
self._.classical = False if len(clas) == 0 else clas
return self._.classical
def is_classicalWithParameters(self, b, alpha, beta):
"""
Check whether the graph can have the specified classical parameters.
"""
p = DRGParameters(self._.d, b, alpha, beta)
return len(_solve([SR(l) == r for l, r in
zip(self._.b + self._.c, p._.b + p._.c)],
self._.vars)) > 0
def is_dualPolar2Aodd(self):
"""
Check whether the graph can be a dual polar graph ^2A_{2d-1}(-b)
of diameter at least 2.
"""
if self._.d < 2:
return False
q = self._.c[2] - 1
if not checkPrimePower(q):
return False
beta = self._.b[0] * (q-1) / (q**self._.d - 1)
return q == beta**2 and self.is_classicalWithParameters(q, 0, beta)
def is_grassmann(self):
"""
Check whether the graph can be a Grassmann graph
of diameter at least 2.
"""
if self._.d < 2:
return False
s = sqrt(self._.c[2])
for q in sorted([-1+s, -1-s], reverse=True):
if not checkPrimePower(q):
continue
beta = self._.b[0] * (q-1) / (q**self._.d - 1)
try:
integralize(log(integralize(q + beta*(q-1)), q))
except TypeError:
continue
if self.is_classicalWithParameters(q, q, beta):
return True
return False
def is_halfCube(self):
"""
Check whether the graph can be a halved cube.
"""
b1 = [SR(x) == (self._.d-i) * (2*(self._.d-i) - 1)
for i, x in enumerate(self._.b[:-1])]
b2 = [SR(x) == (self._.d-i) * (2*(self._.d-i) + 1)
for i, x in enumerate(self._.b[:-1])]
c = [SR(x) == (i+1) * (2*i + 1) for i, x in enumerate(self._.c[1:])]
return len(_solve(b1 + c, self._.vars)) > 0 or \
len(_solve(b2 + c, self._.vars)) > 0
def is_hamming(self):
"""
Check whether the graph can be a Hamming (or Doob) graph.
"""
z = symbol()
return len(_solve([SR(x) == (self._.d-i) * z
for i, x in enumerate(self._.b[:-1])] +
[SR(x) == i+1 for i, x in enumerate(self._.c[1:])],
self._.vars + (z, ))) > 0
def is_hermitean(self):
"""
Check whether the graph can be a Hermitean forms graph
of diameter at least 2.
"""
s = symbol("__s")
for q in sorted([s.subs(ss) for ss in
_solve(s*(s+1) == self._.c[2], s)]):
if not checkPrimePower(-q):
continue
beta = self._.b[0] * (q-1) / (q**self._.d - 1)
if beta+1 == -q**self._.d and \
self.is_classicalWithParameters(q, q-1, beta):
return True
return False
def is_johnson(self):
"""
Check whether the graph can be a Johnson graph.
"""
z = symbol()
return len(_solve([SR(x) == (self._.d-i) * (self._.d - z - i)
for i, x in enumerate(self._.b[:-1])] +
[SR(x) == (i+1)**2 for i, x
in enumerate(self._.c[1:])],
self._.vars + (z, ))) > 0
def is_locallyPetersen(self):
"""
Check whether the graph can be locally Petersen.
"""
return self.match(TRIANGULAR7_COMPL, DORO5, CONWAY_SMITH)
def is_weng_feasible(self):
"""
Check whether the graph can be a member
of a feasible family of classical graphs
appearing in a classification from Weng99.
"""
if self._.d < 2:
return False
s = sqrt(2 * self._.c[2])
for q in sorted([-1-s, -1+s]):
if not checkPrimePower(-q):
continue
beta = self._.b[0] * (q-1) / (q**self._.d - 1)
if beta == -(1 + q**self._.d)/2 and \
self.is_classicalWithParameters(q, (q-1)/2, beta):
return True
return False
def localEigenvalue_range(self, compute=False, b=None,
return_refs=False):
"""
Return the range of possible eigenvalues of a local graph.
If ``compute`` is set to ``True``,
then the relevant triple intersection numbers will be computed.
"""
refs = []
out = lambda ii: (ii, refs) if return_refs else ii
a = self._.a[1]
if a == 0:
return out(RealSet([0, 0]))
elif a == 1 or self._.d == 1:
return out(RealSet([-1, -1]) + RealSet([a, a]))
if not self._has("theta"):
self.eigenvalues()
assert all(is_constant(th) for th in self._.theta), \
"eigenvalues not constant"
check_local = b is None
if check_local:
_, _, _, _, bm, bp = self._compute_localEigenvalues()
else:
bm, bp = b
try:
loc = self.localGraph(compute=compute, check_local=check_local)
if isinstance(loc, DRGParameters):
interval = sum((RealSet([th, th]) for th in loc.eigenvalues()
if th != a), RealSet())
if interval.inf() < bm or interval.sup() > bp:
raise InfeasibleError("local eigenvalues "
"not in allowed range",
("BCN", "Thm. 4.4.3."))
else:
raise IndexError
except IndexError:
interval = eigenvalue_interval(bm, bp) & RealSet([-a, a])
orig = interval
ll = -Infinity
uu = Infinity
if self._.d >= 3 and self.is_qPolynomial():
x = SR.symbol("__x")
s = None
for q_order in self._.qPolynomial_ordering:
for i in range(2, self._.d):
p = self.terwilligerPolynomial(x, i=i, q_order=q_order)
sign = sgn(p.coefficient(x**4))
if s is None:
s = sign
elif s == sign:
continue
tint = RealSet()
for sol in _solve(p >= 0, x):
l = u = None
for eq in sol:
op = eq.operator()
if op is operator.eq:
l = u = eq.rhs()
elif op is operator.ge:
l = eq.rhs()
elif op is operator.le:
u = eq.rhs()
tint += eigenvalue_interval(l, u)
interval &= tint
ll = max(ll, tint.inf())
uu = min(uu, tint.sup())
if s != sign:
break
if interval != orig:
refs.append(("GavrilyukKoolen16", "Thm. 4.2."))
bcn443 = (bm > -a and bm > ll) or (bp < a and bp < uu)
if bcn443:
refs.insert(0, ("BCN", "Thm. 4.4.3."))
if (interval - RealSet([a, a])).sup() <= -1:
interval -= RealSet.unbounded_below_open(-1)
interval += RealSet([a, a])
if interval.inf() > -1:
raise InfeasibleError("invalid eigenvalues for local graph",
refs)
if interval.cardinality() == 2:
if self._.b[0] % (a+1) != 0:
raise InfeasibleError("graph with maximal cliques "
"but a[1]+1 does not divide k", refs)
if bp < a:
if not bcn443:
refs.insert(0, ("BCN", "Thm. 4.4.3."))
raise InfeasibleError(
"graph with maximal cliques but b+ < a[1]", refs)
ll = uu = None
for ii in interval:
l, u = ii.lower(), ii.upper()
for e, c in {l: ii.lower_closed(), u: ii.upper_closed()}.items():
if c:
roots = [r for r, _ in SR(e).minpoly().roots(SR)]
if not all(r in interval for r in roots):
interval -= sum((RealSet([r, r]) for r in roots
if r.is_real()), RealSet())
if l == u:
continue
if ll is None:
ll, uu = l, u
else:
ll, uu = min(ll, l), max(uu, u)
if ll is not None:
l = floor(ll)
u = ceil(uu)
if u - l <= 4 and uu - ll < 4:
keep = RealSet()
m = l + 2
if u - l <= 3:
uu = m
k = 3
while m + 2*cos(2*pi/k) <= uu:
t = floor((k-1)/2)
if k % 2 == 1 and m + 2*cos(2*t*pi/k) < ll:
break
roots = []
for j in range(1, t+1):
if GCD(j, k) == 1:
r = m + 2*cos(2*j*pi/k)
if r not in interval:
break
roots.append(r)
else:
keep += sum((RealSet([r, r]) for r in roots),
RealSet())
k += 1
interval -= RealSet((l, u))
interval += keep
refs.append(("BrouwerKoolen99", "cf. Thm. 7.1."))
return out(interval)
def localGraph(self, compute=False, check_local=True):
"""
Return parameters of the local graph
if it is known to be distance-regular.
If ``compute`` is set to ``True``,
then the relevant triple intersection numbers will be computed.
"""
return self.subconstituent(1, compute=compute,
check_local=check_local)
def maximalCliquePolynomial(self, var='x'):
"""
Return the maximal clique polynomial of a strongly regular graph.
"""
assert self._.d == 2, "diameter must be 2"
if not self._has("theta"):
self.eigenvalues()
m = -min(self._.theta, key=lambda x: CoefficientList(x, self._.vars))
x = SR.symbol(var) if isinstance(var, six.string_types) else var
M = ((x + m - 3) * (self._.k[1] - x + 1)
- 2 * (x - 1) * (self._.a[1] - x + 2))**2 - \
(self._.k[1] - x + 1)**2 * (x + m - 1) * (x - (m-1) * (4*m-1))
return M.expand()
def merge(self, *args, **kargs):
"""
Return parameters of a graph obtained
by merging specified relations.
"""
return PolyASParameters.merge(self, self._.k, self._.p,
*args, **kargs)
def reorderEigenspaces(self, *order):
"""
Specify a new order for the eigenspaces.
"""
self.reorderEigenvalues(*order)
def reorderEigenvalues(self, *order):
"""
Specify a new order for the eigenvalues and return it.
"""
order = PolyASParameters.reorderEigenvalues(self, *order)
PolyASParameters.reorderEigenspaces(self, *order)
return self._.theta
def reorderParameters(self, *order):
"""
Specify a new order for the parameters and return them.
"""
order = self._reorder(order)
assert order in self.is_pPolynomial(), \
"scheme not P-polynomial for the given order"
PolyASParameters.reorderRelations(self, *order)
PolyASParameters.reorderParameters(self, self._.p, *order)
return self.parameterArray()
def reorderRelations(self, *order):
"""
Specify a new order for the relations.
"""
self.reorderParameters(*order)
def show_distancePartitions(self, **options):
"""
Show all distance partitions.
"""
for h in range(self._.d + 1):
self.distancePartition(h).show(**options)
def subconstituent(self, h, compute=False, check_local=True):
"""
Return parameters of the h-th subconstituent
if it is known to form an association scheme.
If the resulting scheme is P-polynomial,
the parameters are returned as such.
If ``compute`` is set to ``True``,
then the relevant triple intersection numbers will be computed.
"""
if h == 1:
if self._.subconstituents[h] is None:
self.check_2graph()
if self._.subconstituents[h] is None and check_local:
self.check_localEigenvalues(check_range=False)
if self._.subconstituents[h] is None:
subc, rels = PolyASParameters.subconstituent(self, h,
compute=compute,
return_rels=True)
if subc is not None and len(rels) > 1 and rels[1] == 1 \
and subc.is_pPolynomial() \
and tuple(range(subc._.d+1)) \
in subc._.pPolynomial_ordering:
self._.subconstituents[h] = DRGParameters(
subc, order=tuple(range(subc._.d+1)))
return self._.subconstituents[h]
def subs(self, *exp, **kargs):
"""
Substitute the given subexpressions in the parameters.
"""
return self._subs(exp,
DRGParameters(*[[subs(x, *exp) for x in l] for l
in self.intersectionArray()]),
kargs.get("seen", {}))
def valency(self):
"""
Return the valency of the graph.
"""
return self._.b[0]
@check(1)
def check_2graph(self):
"""
For a strongly regular or Taylor graph,
check whether a regular 2-graph can be derived.
"""
if self._.d == 2 and \
self._.n == 2*(2*self._.b[0] - self._.a[1] - self._.c[2]):
mu = self._.b[0] - self._.c[2]
if checkPos(mu):
self.add_subscheme(DRGParameters((2*mu, self._.b[1]),
(Integer(1), mu)),
"2-graph derivation")
elif self._.d == 3 and self._.antipodal and \
self._.r == 2 and self._.a[1] > 0:
try:
mu = integralize(self._.a[1] / 2)
n = integralize(self._.n / 4)
except TypeError:
raise InfeasibleError("Taylor graph with a[1] > 0 odd "
"or cover of K_n with n odd",
("BCN", "Thm. 1.5.3."))
self._.subconstituents[1] = \
self.add_subscheme(DRGParameters((self._.a[1], n - mu - 1),
(Integer(1), mu)),
"local graph")
@check(1)
def check_classical(self):
"""
Check whether the graph has classical parameters for which
nonexistence has been shown as a part of an infinite family.
"""
if self._.d >= 3:
s = symbol("__s")
sols = sorted([s.subs(ss) for ss in
_solve((s+1)*(self._.a[1]+1)
- s*(s+1)*(self._.c[2]-1)/2
== self._.b[0], s)])
x = hard_ceiling(sols[0], Integer(0))
y = hard_floor(sols[-1], Integer(-1))
try:
q = integralize(sqrt(self._.c[2]) - 1)
r = hard_floor(((self._.a[1] + 1)
- (self._.b[0] - self._.b[2]) / (q+2))
/ (q+1) + 1)
if q == 0:
t = r
else:
t = hard_floor(
((self._.a[1] + 1)/(self._.c[2] - 1) + 1) / 2)
if q >= 2 and y >= 2 and x <= y and x <= r and x <= t \
and not self.is_grassmann():
raise InfeasibleError("not a Grassmann graph",
("Metsch95", "Thm. 2.3."))
except TypeError:
pass
clas = self.is_classical()
if not clas:
return
for cl, (cond, ref) in classicalFamilies.items():
if isinstance(cl[0], Expression):
diam = cl[0] == self._.d
cl = tuple(subs(exp, diam) for exp in cl)
else:
diam = None
vars = tuple(set(sum(map(variables, cl), ())))
for c in clas:
sols = _solve([SR(l) == r for l, r in zip(c, cl)], vars)
if all(isinstance(e, Expression) for e in sols):
continue
if diam is not None:
sols = [s + [diam] for s in sols]
if any(checkConditions(cond, sol) for sol in sols):
raise InfeasibleError(refs=ref)
if self._.d >= 3 and self._.a[1] == 0 and self._.a[2] > 0 and \
self._.c[2] > 2:
raise InfeasibleError("classical with a[1] = 0, "
"a[2] > 0 and c[2] > 2",
("PanWeng09", "Thm. 2.1."))
if self._.d >= 4 and self._.a[1] > 0 and self._.c[2] > 1 and \
any(b < 0 for d, b, alpha, beta in clas) and \
not self.is_dualPolar2Aodd() and not self.is_hermitean() \
and not self.is_weng_feasible():
raise InfeasibleError("classical with b < 0",
("Weng99", "Thm. 10.3."))
if self._.d < 3:
return
for d, b, alpha, beta in clas:
try:
b = integralize(b)
except TypeError:
continue
if not (is_constant(alpha) and is_constant(beta)):
continue
if alpha == b and ((b == 6 and d >= 7) or
(b >= 10 and d >= 6 and
not checkPrimePower(b))) \
and beta + 1 == (b**(d+1) - 1) / (b - 1):
raise InfeasibleError("not a Grassmann graph",
("GavrilyukKoolen18", "Thm. 1.2."))
if x <= y and alpha >= 1 and alpha == b - 1 \
and y >= (b**d-1)/(b-1):
t = hard_floor((1 + self._.a[1] + b**2 * (b**2 + b + 1))
/ (b**3 + b**2 + 2*b - 1))
if x <= t and (d != 3 or b != 2 or
(x <= 7 and y >= 7 and t >= 7)) and \
not self.is_bilinearForms():
raise InfeasibleError("not a bilinear forms graph",
("Metsch99", "Prop. 2.2."))
@check(1)
def check_combinatorial(self):
"""
Check for various combinatorial conditions.
"""
self._.maxCliques = self._.a[1] == 0
if checkPos(self._.b[0] - 2):
if self._.b[1] == 1 and \
(self._.d != 2 or self._.c[2] != self._.b[0]):
raise InfeasibleError("b1 = 1 and not a cycle "
"or cocktail party graph")
for i in range(2, self._.d):
if checkPos(self._.b[i] - 1):
continue
if self._.d >= 3*i or \
any(self._.c[j] > 1 or self._.a[j] >= self._.c[i+j]
for j in range(1, self._.d - i + 1)) or \
(self._.d >= 2*i and self._.c[2*i] == 1) or \
any(self._.a[j] > 0 for j
in range(1, self._.d - 2*i + 1)) or \
(i < self._.d and
(self._.c[2] - 1)*self._.a[i+1] + self._.a[1]
> self._.a[i]):
raise InfeasibleError("Godsil's diameter bound "
"not reached",
("BCN", "Lem. 5.3.1."))
if self._.d >= 3 and self._.c[2] > 1 and \
3*self._.c[2] > 2*self._.c[3] and \
(self._.d != 3 or self._.b[2] + self._.c[2] > self._.c[3]):
raise InfeasibleError("intersection number c[3] too small",
("BCN", "Thm. 5.4.1."))
for i in range(2, self._.d):
if self._.b[i] != self._.b[1]:
break
if self._.c[i] != 1:
raise InfeasibleError("impossible arrangement of lines",
("BCN", "Thm. 5.4.4."))
if self._.a[1] > 0 and \
any(self._.a[1] + 1 > 2*self._.a[i] or
((i < self._.d-1 or self._.a[self._.d] > 0 or
(self._.d > 2 and self._.b[self._.d-1] > 1)) and
self._.a[1] + 1 > self._.a[i] + self._.a[i+1]) or
self._.a[1] + 2 > self._.b[i] + self._.c[i+1]
for i in range(1, self._.d)):
raise InfeasibleError("counting argument",
("BCN", "Prop. 5.5.1."))
if self._.d >= 4 and set(self._.a[1:4]) == {0} and \
self._.c[2:5] == (1, 2, 3):
try:
integralize(self._.b[1] * self._.b[2] * self._.b[3] / 4)
integralize(self._.n * self._.k[4] / 36)
except TypeError:
raise InfeasibleError("handshake lemma not satisfied "
"for Pappus subgraphs", "Koolen92")
if self._.d >= 2:
if self._.a[1] == 0 and any(2*self._.a[i] > self._.k[i]
for i in range(2, self._.d+1)):
raise InfeasibleError(u"Turán's theorem",
("BCN", "Prop. 5.6.4."))
for h in range(1, self._.d + 1):
for i in range(self._.d + 1):
for j in range(abs(h-i), min(self._.d, h+i) + 1):
if self._.p[h, i, j] > 0:
ppm = self._.p[h, i+1, j-1] \
if i < self._.d and j > 0 else 0
ppz = self._.p[h, i+1, j] if i < self._.d else 0
ppp = self._.p[h, i+1, j+1] \
if i < self._.d and j < self._.d else 0
pzm = self._.p[h, i, j-1] if j > 0 else 0
pzp = self._.p[h, i, j+1] if j < self._.d else 0
pmm = self._.p[h, i-1, j-1] \
if i > 0 and j > 0 else 0
pmz = self._.p[h, i-1, j] if i > 0 else 0
pmp = self._.p[h, i-1, j+1] \
if i > 0 and j < self._.d else 0
if ppm + ppz + ppp < self._.b[i] or \
pzm + self._.p[h, i, j] + pzp \
< self._.a[i] + 1 or \
pmm + pmz + pmp < self._.c[i]:
raise InfeasibleError("counting argument",
"Lambeck93")
if not self._.antipodal:
ka = self._.k[self._.d] * self._.a[self._.d]
kka = self._.k[self._.d] * \
(self._.k[self._.d] - self._.a[self._.d] - 1)
try:
if (self._.k[1] > ka and self._.k[1] > kka) or \
(self._.k[2] > kka and
(self._.k[1] > ka or
self._.k[1] > self._.a[self._.d] *
(self._.a[1] + 2 - self._.a[self._.d])) and
(self._.b[self._.d-1] > 1 or
not (self._.a[1] + 1 == self._.a[self._.d]) or
integralize(self._.k[1]/self._.a[self._.d])
> self._.k[self._.d])):
raise TypeError
except TypeError:
raise InfeasibleError("valency of last relation too small",
("BCN", "Prop. 5.6.1."))
if self._.d >= 3 and self._.k[1] == \
self._.k[self._.d] * (self._.k[self._.d] - 1) and \
self._.k[self._.d] > self._.a[self._.d] + 1:
raise InfeasibleError("valency of last relation too small",
("BCN", "Prop. 5.6.3."))
c2one = self._.c[2] == 1
case3 = self._.b[self._.d-1] == 1 and \
self._.a[self._.d] == self._.a[1] + 1
case4 = False
if self._.p[2, self._.d, self._.d] == 0:
try:
ad1 = self._.a[self._.d] + 1
bad1 = self._.b[self._.d-1] - ad1
integralize(self._.k[self._.d] / ad1)
if self._.a[self._.d] > self._.a[1] + 1 or bad1 > 0 or \
self._.b[self._.d-1] > self._.c[2] or \
(bad1 == 0 and self._.a[self._.d] > 0) \
or (self._.b[self._.d-1] > 1 and
ad1 > self._.a[1]):
raise TypeError
case4 = self._.b[self._.d-1] <= 1 and \
self._.a[self._.d] > 0
except TypeError:
raise InfeasibleError("p[2,d,d] = 0",
("BCN", "Prop. 5.7.1."))
if c2one or case3 or case4 or self._.a[1] == 1 or \
(self._.c[2] == 2 and
self._.a[1]*(self._.a[1]+3)/2 > self._.k[1]) or \
any(self._.b[i] > 1 and self._.c[i] == self._.b[1]
for i in range(2, self._.d+1)):
if case3:
try:
integralize(self._.k[self._.d] / (self._.a[1]+2))
except TypeError:
raise InfeasibleError("last relation a union "
"of cliques, a[1]+2 does not "
"divide k[d]",
("BCN", "Prop. 4.3.2.(iii)"))
try:
kl = integralize(self._.k[1] / (self._.a[1]+1))
vkll = integralize(self._.n*kl / (self._.a[1]+2))
except TypeError:
raise InfeasibleError("handshake lemma not satisfied "
"for maximal cliques")
if self._.a[1] * self._.c[2] > self._.a[2] or \
(c2one and
1 + self._.b[1]*(self._.b[1]+1) *
(self._.a[1]+2)/(1 + self._.a[1]) > vkll):
raise InfeasibleError("graph with maximal cliques",
("BCN", "Prop. 4.3.3."))
self._.maxCliques = True
@check(1)
def check_conference(self):
"""
Check whether a conference graph can exist.
"""
if self._.d == 2 and all(isinstance(x, Integer)
for x in self._.b + self._.c) and \
self._.b[1] == self._.c[2] and \
self._.b[0] == 2*self._.b[1] and \
(self._.n % 4 != 1 or not is_squareSum(self._.n)):
raise InfeasibleError("conference graph must have order a sum "
"of two squares with residue 1 (mod 4)")
@check(1)
def check_geodeticEmbedding(self):
"""
For a graph with intersection array {2b, b, 1; 1, 1, 2b},
check whether there exists an embedding
into a geodetic graph of diameter 2.
"""
if self._.d == 3 and self._.b[0] == self._.c[3] and \
self._.b[2] == 1 and self._.c[2] == 1 and \
self._.b[0] == 2*self._.b[1] and self._.b[0] > 4:
raise InfeasibleError("no embedding into a geodetic graph "
"of diameter 2", ("BCN", "Prop. 1.17.3."))
@check(1)
def check_2design(self):
"""
For an graph with intersection array
{r*mu+1, (r-1)*mu, 1; 1, mu, r*mu+1},
check whether a corresponding 2-design exists.
"""
if self._.d == 3 and self._.antipodal \
and isinstance(self._.r, Integer) \
and isinstance(self._.b[0], Integer) \
and self._.b[0] - 1 == self._.b[1] + self._.c[2]:
ok = True
if self._.r % 2 == 0:
ok = is_squareSum(self._.b[0])
elif self._.b[0] % 2 == 0:
r = Integer(self._.r if self._.r % 4 == 1 else -self._.r)
ok = Integer(self._.b[0]).is_square() or r.is_square() or \
(Integers(self._.r)(self._.b[0]).is_square() and
Integers(self._.b[0])(r).is_square())
if not ok:
raise InfeasibleError("no corresponding 2-design",
("BCN", "Prop. 1.10.5."))
@check(1)
def check_hadamard(self):
"""
For a graph with intersection array {2c, 2c-1, c, 1; 1, c, 2c-1, 2c},
with c > 1, check whether c is even.
"""
if self._.d == 4 and self._.b[0] > 2 and self._.bipartite \
and self._.antipodal and self._.r == 2:
try:
integralize(self._.c[2]/2)
except TypeError:
raise InfeasibleError("Hadamard graph with odd c[2]",
("BCN", "Cor. 1.8.2."))
@check(1)
def check_antipodal(self):
"""
For an antipodal cover of even diameter at least 4,
check whether its quotient satisfies necessary conditions
for the existence of a cover.
"""
if self._.antipodal and self._.d >= 4 and self._.d % 2 == 0:
q = self.antipodalQuotient()
try:
integralize(sum(q._.p[q._.d, i, q._.d-i]
for i in range(1, q._.d))
/ self._.r)
if self._.d == 4 and self._.c[2] == 1:
kl = q._.b[0] / (q._.a[1] + 1)
if self._.r > kl:
raise TypeError
integralize(q._.n*kl / (q._.a[1]+2))
except TypeError:
raise InfeasibleError("quotient cannot have covers "
"of even diameter",
("BCN", "Prop. 4.2.7."))
@check(1)
def check_genPoly(self):
"""
For a graph with parameters of a generalized polygon,
check whether its parameters satisfy the restrictions.
"""
g, s, t = self.genPoly_parameters()
if g == 4 and s > 1 and t > 1:
tf = 8*t/3 + 1
if is_constant(tf):
tf = floor(tf)
if not checkNonneg(t * tf - s):
raise InfeasibleError("infeasible parameters "
"for pseudo-generalized quadrangle",
"GKMP20")
if not self._has("maxCliques"):
self.check_combinatorial()
if not self._.maxCliques:
return
if g:
try:
st = integralize(s*t)
st2 = 2*st
except TypeError:
st = st2 = Integer(1)
if g not in [2, 4, 6, 8, 12] or \
(s > 1 and t > 1 and
(g == 12 or
(g == 8 and (not st2.is_square() or
s > t**2 or t > s**2)) or
(g == 6 and (not st.is_square()
or s > t**3 or t > s**3)) or
(g == 4 and (s > t**2 or t > s**2)))):
raise InfeasibleError("no corresponding generalized polygon",
("BCN", "Thm. 6.5.1."))
if g == 4:
try:
integralize(s*t*(s+1)*(t+1) / (s+t))
except TypeError:
raise InfeasibleError("infeasible parameters "
"for generalized quadrangle",
("PayneThas", "1.2.2."))
elif g == 6 and 1 in [s, t]:
m = next(x for x in [s, t] if x != 1)
if isinstance(m, Integer) and m % 4 in [1, 2] and \
not is_squareSum(m):
raise InfeasibleError("Bruck-Ryser theorem",
("BCN", "Thm. 1.10.4."))
if self._.antipodal and self._.d == 3 and \
self._.b[0] == (self._.r - 1) * (self._.c[2] + 1):
s = self._.r - 1
t = self._.c[2] + 1
if s > t**2 or t > s**2:
raise InfeasibleError("no corresponding "
"generalized quadrangle",
("BCN", "Thm. 6.5.1."))
if s > t * (t-1):
raise InfeasibleError("no spread in corresponding "
"generalized quadrangle",
[("BCN", "Prop. 12.5.2."),
("PayneThas", "1.8.3.")])
try:
integralize(s*t*(s+1)*(t+1) / (s+t))
except TypeError:
raise InfeasibleError("infeasible parameters "
"for generalized quadrangle",
("PayneThas", "1.2.2."))
@check(1)
def check_clawBound(self):
"""
Check the claw bound for strongly regular graphs.
"""
if not self._has("theta"):
self.eigenvalues()
if self._.d == 2:
s, r = sorted(self._.theta[1:],
key=lambda x: CoefficientList(x, self._.vars))
if self._.c[2] not in [s*s, s*(s+1)] and \
2*(r+1) > s*(s+1)*(self._.c[2]+1):
raise InfeasibleError("claw bound exceeded",
"BrouwerVanLint84")
@check(1)
def check_maximalClique(self):
"""
For a strongly regular graph,
check whether the range of possible sizes of maximal cliques
is nonempty.
"""
if self._.d != 2:
return
m = -min(self._.theta, key=lambda x: CoefficientList(x, self._.vars))
b = self._.c[2] - m*(m-1)
if checkNonneg(-b):
return
x = SR.symbol("__x")
M = self.maximalCliquePolynomial(x)
c = self.guaranteed_clique_order()
d = floor(1 + self._.k[1] / m)
if c > self._.c[2]**2 / b - m + 1 and \
M.subs(x == c) < 0 and M.subs(x == d) < 0:
raise InfeasibleError("no feasible maximal clique size", "GKP21")
@check(1)
def check_terwilliger(self):
"""
Check whether the graph is a Terwilliger graph
and whether existence conditions are satisfied in this case,
or if the Terwilliger diameter bound is satisfied otherwise.
"""
if not self._has("theta"):
self.eigenvalues()
small = (self._.d == 2 and 50 * self._.c[2] > self._.n) or \
(self._.d >= 3 and 50 * (self._.c[2] - 1) > self._.b[0])
if self._.d >= 2 and isinstance(self._.b[0], Integer) and \
isinstance(self._.a[1], Integer) and \
isinstance(self._.c[2], Integer):
if all(is_constant(th) for th in self._.theta):
th = min(self._.theta)
else:
th = None
if self._.b[0] == 10 and self._.a[1] == 3 and \
(self._.c[2] == 2 or self._.b[2] > self._.c[2]):
s = 4
elif th is not None and self._.a[1] != 2 and \
-1 - self._.b[1]/(th+1) < self._.a[1]:
s = ceil(self._.b[0] / self._.a[1])
else:
s = ceil(self._.b[0] / (self._.a[1] + 1))
v = 2*(s*(self._.a[1] + 1) - self._.b[0]) / \
(s*(s-1)) + 1 - self._.c[2]
if v > 0:
raise InfeasibleError("coclique bound exceeded",
("KoolenPark10", "Thm. 3."))
elif v == 0:
if small and not self.is_locallyPetersen() and \
not self.match(PENTAGON, PETERSEN, ICOSAHEDRON):
raise InfeasibleError("too small for a "
"Terwilliger graph",
("BCN", "Cor. 1.16.6."))
return
aab = self._.a[1]*(self._.a[1]-1) / self._.b[1]
aabc = self._.c[2]-1 > aab
if self._.c[2] >= 2 and (small or aabc or
(self._.d >= 3 and self._.c[3] > 1
and 2*self._.c[2] > self._.c[3])):
if aabc and aab < self._.b[2] - self._.b[1] + self._.a[1] + 1:
raise InfeasibleError("quadrangle per claw bound "
"exceeded", ("BCN", "Thm. 5.2.1.(ii)"))
elif any(self._.c[i] + self._.a[1] + self._.b[i+1] + 2
> self._.b[i] + self._.c[i+1]
for i in range(self._.d)):
raise InfeasibleError("Terwilliger's diameter bound "
"not reached", ("BCN", "Thm. 5.2.1."))
@check(1)
def check_secondEigenvalue(self):
"""
For a graph with the second eigenvalue equal to b[1]-1,
check whether it belongs to the characterization.
"""
if not self._has("theta"):
self.eigenvalues()
if (self._.b[1] - 1) in self._.theta:
if (self._.d != 2 or all(th != -2 for th in self._.theta)
or (self._.b[1] != 1 and self._.n > 28)) and \
self._.c[2] != 1 and \
not (self.is_hamming() or
self.is_locallyPetersen() or
self.is_johnson() or
self.is_halfCube() or
self.match(GOSSET)):
raise InfeasibleError("theta[1] = b[1]-1, "
"not in characterization",
("BCN", "Thm. 4.4.11."))
@check(1)
def check_localEigenvalues(self, compute=False, check_range=True):
"""
For a graph of diameter at least 3 with a[1] > 2,
check whether the eigenvalues of the local graph
are in the allowed range.
If ``compute`` is set to ``True``,
then the relevant triple intersection numbers will be computed.
"""
if not self._has("m"):
self.multiplicities()
if self._.d < 3 or self._.a[1] <= 2 or self.match(ICOSAHEDRON) or \
not all(is_constant(th) for th in self._.theta
if th != self._.k[1]):
return
th1, i, thd, j, bm, bp = self._compute_localEigenvalues()
if (bm > -2 and self._.c[2] != 1) or bp < 1:
raise InfeasibleError("local eigenvalues not in allowed range",
("BCN", "Thm. 4.4.3."))
if not self._.bipartite:
mu = self._.a[1] + bp*bm
bd = self._.k[1] * mu - \
(self._.a[1] - bp) * (self._.a[1] - bm)
fb = self._.k[1] * self._.a[1] * self._.b[1] + \
(th1 * (self._.a[1] + 1) + self._.k[1]) * \
(thd * (self._.a[1] + 1) + self._.k[1])
if bd > 0:
raise InfeasibleError("bound on local eigenvalues "
"exceeded", u"JurišićKoolen00")
if fb < 0:
raise InfeasibleError("fundamental bound exceeded", "JKT00")
elif bd == 0 or fb == 0:
try:
integralize(self._.c[2]*mu/2)
if self._.c[2] < mu + 1:
raise TypeError
except TypeError:
raise InfeasibleError("local graph strongly regular",
u"JurišićKoolen00")
if self._.d == 4 and self._.antipodal:
try:
bm = integralize(bm)
bp = integralize(bp)
integralize((bp - bm) / self._.r)
if bp < 1 or bm > -2:
raise TypeError
except TypeError:
raise InfeasibleError("locally strongly regular "
"antipodal graph with d=4",
u"JurišićKoolen00")
self._.subconstituents[1] = self.add_subscheme(
DRGParameters((self._.a[1], -(bp+1)*(bm+1)),
(Integer(1), mu)), "local graph")
def checkMul(h):
if self._.antipodal and self._.omega[h, self._.d] != 1 and \
self._.m[h] < self._.k[1] + self._.r - 2:
return ("m[%d] < k+r-2" % h, "GodsilHensel92")
elif self._.a[self._.d] == 0 and \
1 not in [self._.omega[h, 2],
self._.omega[h, self._.d]] \
and self._.m[h] < \
self._.k[1] + self._.b[self._.d-1] - 1:
return ("m[%d] < k+b[d-1]-1" % h, "GodsilKoolen95")
elif self._.m[h] < self._.k[1]:
return ("m[%d] < k" % h, ("BCN", "Thm. 4.4.4."))
else:
return None
d = {h: checkMul(h) for h in range(1, self._.d+1)}
s = {h for h, v in d.items() if v is not None}
if not s.issubset([i, j]):
m, k = min((self._.m[h], h) for h in s if h not in [i, j])
reason, ref = d[k]
raise InfeasibleError(reason, ref)
r = []
for h in s:
t = self._.b[1] / (self._.theta[h] + 1)
try:
integralize(t)
except TypeError:
r.append(t)
if len(r) != 0:
p = next(iter(r)).minpoly()
if len(r) == 1 or p.degree() != 2 or \
len({t.minpoly() for t in r}) == 2 or \
not is_algebraic_integer(p):
m, k = min((self._.m[h], h) for h in s)
reason, ref = d[k]
raise InfeasibleError(reason + ", b[1]/(theta[1]+1) and "
"b[1]/(theta[d]+1) not integers "
"or algebraic conjugates", ref)
if not check_range:
return
rng, refs = self.localEigenvalue_range(compute=compute,
b=(bm, bp),
return_refs=True)
c = rng.cardinality()
if rng.sup() <= bp or self._.subconstituents[1] is not None or \
not isinstance(c, Integer):
return
ths = {SR.symbol("__m%d" % i): ii.lower()
for i, ii in enumerate(rng) if ii.lower() != self._.a[1]}
exps = {m: (0, self._.k[1] - 1) for m in ths}
conds = [sum(m for m in ths) == self._.k[1] - 1,
sum(a*m for m, a in ths.items()) == -self._.a[1],
sum(a**2 * m for m, a in ths.items())
== (self._.k[1] - self._.a[1]) * self._.a[1]]
lvl = 0
reason = None
ref = None
for sol in find(exps, ths.keys(), conds):
sp = {ths[eq.lhs()]: eq.rhs()
for eq in sol if eq.rhs() != 0}
lsp = len(sp)
if lsp <= 3:
q = max(p ** (e+1 if p == 2 else e)
for p, e in Integer(self._.k[1]).factor())
thi = {th: th**2 * m for th, m in sp.items()}
thi[self._.a[1]] = self._.a[1]**2
for i in range(3, q+1):
for th in thi:
thi[th] *= th
tr = sum(thi.values())
if Integer(tr) % \
Integer(self._.k[1] * (1 + i % 2)) != 0:
if lvl < 1:
lvl = 1
reason = "local graph has nonintegral " \
"number of closed %d-walks " \
"through a vertex" % i
ref = "vanDam95"
break
if i == 4:
xi = Integer(tr / self._.k[1] -
self._.a[1] * (2*self._.a[1] - 1))
if xi % 2 != 0:
if lvl < 2:
lvl = 2
reason = "local graph has nonintegral " \
"number of quadrangles " \
"through a vertex"
ref = "vanDam95"
break
if lsp == 2 and xi % Integer(self._.a[1]) != 0:
if lvl < 3:
lvl = 3
reason = "local graph has nonintegral " \
"number of quadrangles " \
"through an edge"
ref = ("vanDam95", "Lem. 3.1.")
break
else:
return
else:
return
if reason is None:
reason = "no solution for the multiplicities " \
"of the eigenvalues of the local graph"
else:
refs.append(ref)
raise InfeasibleError(reason, refs)
antipodalQuotient = PolyASParameters.antipodalSubscheme
bipartiteHalf = PolyASParameters.bipartiteSubscheme
diameter = PolyASParameters.classes
distanceGraphs = PolyASParameters.partSchemes
intersectionArray = PolyASParameters.parameterArray
mergeClasses = merge
substitute = subs
|
python
|
import sys
import os
import subprocess
from smt.sampling_methods import LHS
import numpy as np
from scipy import stats
from surmise.emulation import emulator
from dt import cross_section, s_factor
# Reduced mass in the deuteron channel.
MU_D = 1124.6473494927284
rel_unc = float(sys.argv[1])
indices = np.array([0, 1, 2])
energies = np.linspace(0.010, 0.200, 10)
momenta = np.sqrt(2*MU_D*energies)
n = 1 # output dimension
num_pars = indices.size
# Default values.
AD = 6.0
AN = 4.0
UE = 0.0
A = 0.0
GD2 = 3.0
GN2 = 0.5
ER = 0.070
DEFAULT_VALUES = np.array([ER, GD2, GN2, AD, AN, UE, A])
BOUNDS = np.array([[0.010, 0.120],
[1, 5],
[0.001, 0.5]])
NTRAIN = 250 # number of training points
NTEST = 50 # number of testing points
class RMatrixModel:
def __init__(self, parameter_indices):
self.parameter_indices = parameter_indices
def s_factor(self, energy, theta):
return s_factor(energy, theta[0], theta[0], *theta[1:])
def evaluate(self, energy, theta):
thetap = np.copy(DEFAULT_VALUES)
thetap[self.parameter_indices] = theta
return self.s_factor(energy, thetap)
model = RMatrixModel(indices)
bounds = BOUNDS[indices, :]
# Set up Latin hypercube sampling to generate the training/testing space.
generator = LHS(xlimits=bounds)
# Convenience function for generating a matrix of data.
def generate_data(m):
'''
Generates a matrix of data. Organized according to:
momentum | theta_0 | theta_1 | theta_2 | y
'''
theta_space = generator(m)
return np.array([
[k, *theta, model.evaluate(0.5*k**2/MU_D, theta)] for k in momenta for theta in theta_space
])
# Generating data.
train = generate_data(NTRAIN)
test = generate_data(NTEST)
# Add more samples near the true values.
def my_truncnorm(mu, sigma, lower, upper):
return stats.truncnorm((lower - mu) / sigma, (upper - mu) / sigma, loc=mu, scale=sigma)
theta_true = np.loadtxt('datfiles/theta_true.txt')[:3]
dists = [my_truncnorm(mu, rel_unc*mu, 0, bounds[i, -1]) for (i, mu) in enumerate(theta_true)]
better_samples = np.array([[d.rvs() for d in dists] for _ in range(NTRAIN)])
train_better = np.array([
[k, *theta, model.evaluate(0.5*k**2/MU_D, theta)] for k in momenta for theta in better_samples
])
np.savetxt(rf'datfiles/better_training_data_{rel_unc:.2f}.txt', train_better, header='''
Momentum (MeV) | E_r (MeV) | gamma_d^2 (MeV) | gamma_n^2 (MeV) | S factor (MeV b)
''')
# Testing samples aren't actually any different.
np.savetxt(rf'datfiles/better_testing_data_{rel_unc:.2f}.txt', test, header='''
Momentum (MeV) | E_r (MeV) | gamma_d^2 (MeV) | gamma_n^2 (MeV) | S factor (MeV b)
''')
|
python
|
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
# (c) 2013-2015 Zedge Inc.
#
# Author: Muhammad A. Norozi
# ([email protected])
import yaml
def get_options(config_file):
return yaml.load(open(config_file))
if __name__ == '__main__':
from pprint import PrettyPrinter
pp = PrettyPrinter()
cfg = get_options('zedge_config.yaml')
pp.pprint(cfg)
image_classifiers = cfg['image_classifiers']
for c in image_classifiers:
print c
|
python
|
from tkinter import *
from tkinter import messagebox
from tkinter import filedialog
from tkinter.ttk import Button, Style
from tkinter import ttk
import binascii
import os
import json
import sys
import base64
import datetime
import pprint
import copy
from core.client_core import ClientCore as Core
from transaction.transactions import Transaction
from transaction.transactions import CoinbaseTransaction
from transaction.transactions import TransactionInput
from transaction.transactions import TransactionOutput
from transaction.utxo_manager import UTXOManager as UTXM
from utils.key_manager import KeyManager
from utils.rsa_util import RSAUtil
from p2p.message_manager import (
MSG_NEW_TRANSACTION,
MSG_ENHANCED,
)
class SimpleBC_Gui(Frame):
def __init__(self, parent, my_port, c_host, c_port):
Frame.__init__(self, parent)
self.parent = parent
self.parent.protocol('WM_DELETE_WINDOW', self.quit)
self.coin_balance = StringVar(self.parent, '0')
self.status_message = StringVar(self.parent, 'Ready')
self.c_core = None
self.initApp(my_port, c_host, c_port)
self.setupGUI()
def quit(self, event=None):
"""
アプリの終了
"""
self.c_core.shutdown()
self.parent.destroy()
def initApp(self, my_port, c_host, c_port):
"""
ClientCoreとの接続含めて必要な初期化処理はここで実行する
"""
print('SimpleBitcoin client is now activating ...: ')
self.km = KeyManager()
self.um = UTXM(self.km.my_address())
self.rsa_util = RSAUtil()
self.c_core = Core(my_port, c_host, c_port, self.update_callback)
self.c_core.start()
# テスト用途(本来はこんな処理しない)
t1 = CoinbaseTransaction(self.km.my_address())
t2 = CoinbaseTransaction(self.km.my_address())
t3 = CoinbaseTransaction(self.km.my_address())
transactions = []
transactions.append(t1.to_dict())
transactions.append(t2.to_dict())
transactions.append(t3.to_dict())
self.um.extract_utxos(transactions)
self.update_balance()
def display_info(self, title, info):
"""
ダイアログボックスを使ったメッセージの表示
"""
f = Tk()
label = Label(f, text=title)
label.pack()
info_area = Text(f, width=70, height=50)
info_area.insert(INSERT, info)
info_area.pack()
def update_callback(self):
print('update_callback was called!')
s_transactions = self.c_core.get_stored_transactions_from_bc()
print(s_transactions)
self.um.extract_utxos(s_transactions)
self.update_balance()
def update_status(self, info):
"""
画面下部のステータス表示内容を変更する
"""
self.status_message.set(info)
def update_balance(self):
"""
総額表示の内容を最新状態に合わせて変更する
"""
bal = str(self.um.my_balance)
self.coin_balance.set(bal)
def create_menu(self):
"""
メニューバーに表示するメニューを定義する
"""
top = self.winfo_toplevel()
self.menuBar = Menu(top)
top['menu'] = self.menuBar
self.subMenu = Menu(self.menuBar, tearoff=0)
self.menuBar.add_cascade(label='Menu', menu=self.subMenu)
self.subMenu.add_command(label='Show My Address', command=self.show_my_address)
self.subMenu.add_command(label='Load my Keys', command=self.show_input_dialog_for_key_loading)
self.subMenu.add_command(label='Update Blockchain', command=self.update_block_chain)
self.subMenu.add_separator()
self.subMenu.add_command(label='Quit', command=self.quit)
self.subMenu2 = Menu(self.menuBar, tearoff=0)
self.menuBar.add_cascade(label='Settings', menu=self.subMenu2)
self.subMenu2.add_command(label='Renew my Keys', command=self.renew_my_keypairs)
self.subMenu3 = Menu(self.menuBar, tearoff=0)
self.menuBar.add_cascade(label='Advance', menu=self.subMenu3)
self.subMenu3.add_command(label='Show Blockchain', command=self.show_my_block_chain)
def show_my_address(self):
f = Tk()
label = Label(f, text='My Address')
label.pack()
key_info = Text(f, width=70, height=10)
my_address = self.km.my_address()
key_info.insert(INSERT, my_address)
key_info.pack()
def show_input_dialog_for_key_loading(self):
def load_my_keys():
# ファイル選択ダイアログの表示
f2 = Tk()
f2.withdraw()
fTyp = [('','*.pem')]
iDir = os.path.abspath(os.path.dirname(__file__))
messagebox.showinfo('Load key pair','please choose your key file')
f_name = filedialog.askopenfilename(filetypes = fTyp,initialdir = iDir)
try:
file = open(f_name)
data = file.read()
target = binascii.unhexlify(data)
# TODO: 本来は鍵ペアのファイルが不正などの異常系処理を考えるべき
self.km.import_key_pair(target, p_phrase.get())
except Exception as e:
print(e)
finally:
# TODO: 所有コインの再確認処理を入れる必要あり
file.close()
f.destroy()
f2.destroy()
self.um = UTXM(self.km.my_address())
self.um.my_balance = 0
self.update_balance()
f = Tk()
label0 = Label(f, text='Please enter pass phrase for your key pair')
frame1 = ttk.Frame(f)
label1 = ttk.Label(frame1, text='Pass Phrase:')
p_phrase = StringVar()
entry1 = ttk.Entry(frame1, textvariable=p_phrase)
button1 = ttk.Button(frame1, text='Load', command=load_my_keys)
label0.grid(row=0,column=0,sticky=(N,E,S,W))
frame1.grid(row=1,column=0,sticky=(N,E,S,W))
label1.grid(row=2,column=0,sticky=E)
entry1.grid(row=2,column=1,sticky=W)
button1.grid(row=3,column=1,sticky=W)
def update_block_chain(self):
self.c_core.send_req_full_chain_to_my_core_node()
def renew_my_keypairs(self):
"""
利用する鍵ペアを更新する。
"""
def save_my_pem():
self.km = KeyManager()
my_pem = self.km.export_key_pair(p_phrase)
my_pem_hex = binascii.hexlify(my_pem).decode('ascii')
# とりあえずファイル名は固定
path = 'my_key_pair.pem'
f1 = open(path,'a')
f1.write(my_pem_hex)
f1.close()
f.destroy()
self.um = UTXM(self.km.my_address())
self.um.my_balance = 0
self.update_balance()
f = Tk()
f.title('New Key Gene')
label0 = Label(f, text='Please enter pass phrase for your new key pair')
frame1 = ttk.Frame(f)
label1 = ttk.Label(frame1, text='Pass Phrase:')
p_phrase = StringVar()
entry1 = ttk.Entry(frame1, textvariable=p_phrase)
button1 = ttk.Button(frame1, text='Generate', command=save_my_pem)
label0.grid(row=0,column=0,sticky=(N,E,S,W))
frame1.grid(row=1,column=0,sticky=(N,E,S,W))
label1.grid(row=2,column=0,sticky=E)
entry1.grid(row=2,column=1,sticky=W)
button1.grid(row=3,column=1,sticky=W)
def show_my_block_chain(self):
"""
自分が保持しているブロックチェーンの中身を確認する
"""
mychain = self.c_core.get_my_blockchain()
if mychain is not None:
mychain_str = pprint.pformat(mychain, indent=2)
self.display_info('Current Blockchain', mychain_str)
else:
self.display_info('Warning', 'Currently Blockchain is empty...')
def setupGUI(self):
"""
画面に必要なパーツを並べる
"""
self.parent.bind('<Control-q>', self.quit)
self.parent.title('SimpleBitcoin GUI')
self.pack(fill=BOTH, expand=1)
self.create_menu()
lf = LabelFrame(self, text='Current Balance')
lf.pack(side=TOP, fill='both', expand='yes', padx=7, pady=7)
lf2 = LabelFrame(self, text='')
lf2.pack(side=BOTTOM, fill='both', expand='yes', padx=7, pady=7)
#所持コインの総額表示領域のラベル
self.balance = Label(lf, textvariable=self.coin_balance, font='Helvetica 20')
self.balance.pack()
#受信者となる相手の公開鍵
self.label = Label(lf2, text='Recipient Address:')
self.label.grid(row=0, pady=5)
self.recipient_pubkey = Entry(lf2, bd=2)
self.recipient_pubkey.grid(row=0, column=1, pady=5)
# 送金額
self.label2 = Label(lf2, text='Amount to pay :')
self.label2.grid(row=1, pady=5)
self.amountBox = Entry(lf2, bd=2)
self.amountBox.grid(row=1, column=1, pady=5, sticky='NSEW')
# 手数料
self.label3 = Label(lf2, text='Fee (Optional) :')
self.label3.grid(row=2, pady=5)
self.feeBox = Entry(lf2, bd=2)
self.feeBox.grid(row=2, column=1, pady=5, sticky='NSEW')
# 間隔の開け方がよくわからんので空文字で場所確保
self.label4 = Label(lf2, text='')
self.label4.grid(row=5, pady=5)
# 送金実行ボタン
self.sendBtn = Button(lf2, text='\nSend Coin(s)\n', command=self.sendCoins)
self.sendBtn.grid(row=6, column=1, sticky='NSEW')
# 下部に表示するステータスバー
stbar = Label(self.winfo_toplevel(), textvariable=self.status_message, bd=1, relief=SUNKEN, anchor=W)
stbar.pack(side=BOTTOM, fill=X)
# 送金実行ボタン押下時の処理実体
def sendCoins(self):
sendAtp = self.amountBox.get()
recipientKey = self.recipient_pubkey.get()
sendFee = self.feeBox.get()
utxo_len = len(self.um.utxo_txs)
if not sendAtp:
messagebox.showwarning('Warning', 'Please enter the Amount to pay.')
return
elif len(recipientKey) <= 1:
messagebox.showwarning('Warning', 'Please enter the Recipient Address.')
return
else:
result = messagebox.askyesno('Confirmation', 'Sending {} SimpleBitcoins to :\n {}'.format(sendAtp, recipientKey))
if not sendFee:
sendFee = 0
if result:
if 0 < utxo_len:
print('Sending {} SimpleBitcoins to reciever:\n {}'.format(sendAtp, recipientKey))
else:
messagebox.showwarning('Short of Coin.', 'Not enough coin to be sent...')
return
utxo, idx = self.um.get_utxo_tx(0)
t = Transaction(
[TransactionInput(utxo, idx)],
[TransactionOutput(recipientKey, int(sendAtp))]
)
counter = 1
# TransactionInputが送信額を超えるまで繰り返して取得しTransactionとして完成させる
if type(sendFee) is not str:
sendFee = int(sendFee)
while t.is_enough_inputs(sendFee) is not True:
new_utxo, new_idx = self.um.get_utxo_tx(counter)
t.inputs.append(TransactionInput(new_utxo, new_idx))
counter += 1
if counter > utxo_len:
messagebox.showwarning('Short of Coin.', 'Not enough coin to be sent...')
break
# 正常なTransactionが生成できた時だけ秘密鍵で署名を実行する
if t.is_enough_inputs(sendFee) is True:
# まずお釣り用Transactionを作る
change = t.compute_change(sendFee)
t.outputs.append(TransactionOutput(self.km.my_address(), change))
to_be_signed = json.dumps(t.to_dict(), sort_keys=True)
signed = self.km.compute_digital_signature(to_be_signed)
new_tx = json.loads(to_be_signed)
new_tx['signature'] = signed
# TransactionをP2P Networkに送信
tx_strings = json.dumps(new_tx)
self.c_core.send_message_to_my_core_node(MSG_NEW_TRANSACTION, tx_strings)
print('signed new_tx:', tx_strings)
# 実験的にお釣り分の勘定のため新しく生成したTransactionをUTXOとして追加しておくが
# 本来はブロックチェーンの更新に合わせて再計算した方が適切
self.um.put_utxo_tx(t.to_dict())
to_be_deleted = 0
del_list = []
while to_be_deleted < counter:
del_tx = self.um.get_utxo_tx(to_be_deleted)
del_list.append(del_tx)
to_be_deleted += 1
for dx in del_list:
self.um.remove_utxo_tx(dx)
self.amountBox.delete(0,END)
self.feeBox.delete(0,END)
self.recipient_pubkey.delete(0,END)
self.update_balance()
def main(my_port, c_host, c_port):
root = Tk()
app = SimpleBC_Gui(root, my_port, c_host, c_port)
root.mainloop()
if __name__ == '__main__':
args = sys.argv
if len(args) == 4:
my_port = int(args[1])
c_host = args[2]
c_port = int(args[3])
else:
print('Param Error')
print('$ Wallet_App.py <my_port> <core_node_ip_address> <core_node_port_num>')
quit()
main(my_port, c_host, c_port)
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import numpy as np
from .AxiElement import AxiElement
from .Index import Index
from .tools import path_length, transform
# Mostly rom Axi by Michael Fogleman
# https://github.com/fogleman/axi/blob/master/axi/spatial.py
class Path(AxiElement):
def __init__(self, path=None, **kwargs):
AxiElement.__init__(self, **kwargs);
# self.head_width = kwargs.pop('head_width', 0.2)
# self.id = kwargs.pop('id', None)
if path is None:
self.path = []
elif isinstance(path, Path):
self.path = path.path
elif isinstance(path, AxiElement):
self.path = path.getPath()
elif isinstance(path, str):
self.path = []
self.setFromString(path)
else:
self.path = path
self._length = None
self._down_length = None
def __len__(self):
return len(self.path)
def __iter__(self):
self._index = 0
return self
def __next__(self):
if self._index < len(self.path):
result = self[ self._index ]
self._index += 1
return result
else:
raise StopIteration
def next(self):
return self.__next__()
def __getitem__(self, index):
from .Polyline import Polyline
if type(index) is int:
return Polyline( self.path[index], translate=self.translate, scale=self.scale, rotate=self.rotate, head_width=self.head_width )
else:
return None
@property
def length(self):
if self._length is None:
length = self.down_length
for p0, p1 in zip(self.path, self.path[1:]):
x0, y0 = p0[-1]
x1, y1 = p1[0]
length += math.hypot(x1 - x0, y1 - y0)
self._length = length
return self._length
@property
def up_length(self):
return self.length - self.down_length
@property
def down_length(self):
if self._down_length is None:
self._down_length = path_length(self.path)
return self._down_length
@property
def width(self):
return self.bounds.width
@property
def height(self):
return self.bounds.height
def add(self, other):
from .Polyline import Polyline
if isinstance(other, Path):
self.path.extend( other.path )
elif isinstance(other, Polyline):
points = other.getPoints()
if len(points) > 1:
self.path.append( points )
elif isinstance(other, AxiElement):
self.path.extend( other.getPath() )
elif isinstance(other, list):
self.path.append( other )
else:
raise Exception("Error, don't know what to do with: ", other)
def setFromString(self, path_string, **kwargs):
# From Andy Port
# https://github.com/mathandy/svgpathtools/blob/master/svgpathtools/parser.py#L35
import re
COMMANDS = set('MmZzLlHhVvCcSsQqTtAa')
UPPERCASE = set('MZLHVCSQTA')
COMMAND_RE = re.compile("([MmZzLlHhVvCcSsQqTtAa])")
FLOAT_RE = re.compile("[-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?")
def tokenize_path(pathdef):
for x in COMMAND_RE.split(pathdef):
if x in COMMANDS:
yield x
for token in FLOAT_RE.findall(x):
yield token
# In the SVG specs, initial movetos are absolute, even if
# specified as 'm'. This is the default behavior here as well.
# But if you pass in a current_pos variable, the initial moveto
# will be relative to that current_pos. This is useful.
elements = list(tokenize_path(path_string))
# Reverse for easy use of .pop()
elements.reverse()
current_pos = np.array([0.0, 0.0])
start_pos = np.array([0.0, 0.0])
command = None
last_control = []
from .Polyline import Polyline
poly = Polyline(fill=self.fill, stroke_width=self.stroke_width)
while elements:
if elements[-1] in COMMANDS:
# New command.
last_command = command # Used by S and T
command = elements.pop()
absolute = command in UPPERCASE
command = command.upper()
else:
# If this element starts with numbers, it is an implicit command
# and we don't change the command. Check that it's allowed:
if command is None:
raise ValueError("Unallowed implicit command in %s, position %s" % (
path_string, len(path_string.split()) - len(elements)))
if command == 'M':
# Moveto command.
pos = np.array([float(elements.pop()), float(elements.pop())])
if absolute:
current_pos = pos
else:
current_pos += pos
# when M is called, reset start_pos
if poly.size() > 0:
self.add( poly.getPath() )
poly = Polyline(fill=self.fill, stroke_width=self.stroke_width)
# This behavior of Z is defined in svg spec:
# http://www.w3.org/TR/SVG/paths.html#PathDataClosePathCommand
start_pos = current_pos
# Implicit moveto commands are treated as lineto commands.
# So we set command to lineto here, in case there are
# further implicit commands after this moveto.
command = 'L'
poly.lineTo( current_pos )
elif command == 'Z':
# Close path
# if not (current_pos.all() == start_pos.all()):
# poly.lineTo( start_pos )
poly.setClose( True )
poly = poly.getSimplify()
self.add( poly.getPath() )
poly = Polyline(fill=self.fill, stroke_width=self.stroke_width)
current_pos = start_pos
command = None
elif command == 'L':
pos = np.array([float(elements.pop()), float(elements.pop())])
if not absolute:
pos += current_pos
poly.lineTo( pos )
current_pos = pos
elif command == 'H':
pos = np.array([float(elements.pop()), current_pos[1]])
if not absolute:
pos += np.array([ current_pos[0], 0 ])
poly.lineTo( pos )
current_pos = pos
elif command == 'V':
pos = np.array([current_pos[0], float(elements.pop())])
if not absolute:
pos += np.array([ 0, current_pos[1] ])
poly.lineTo( pos )
current_pos = pos
elif command == 'C':
control1 = np.array([float(elements.pop()), float(elements.pop())])
control2 = np.array([float(elements.pop()), float(elements.pop())])
end = np.array([float(elements.pop()), float(elements.pop())])
if not absolute:
control1 += current_pos
control2 += current_pos
end += current_pos
poly.cubicBezierTo( control1, control2, end )
current_pos = end
last_control = control2[:]
elif command == 'S':
# Smooth curve. First control point is the "reflection" of
# the second control point in the previous path.
if last_command not in 'CS':
# If there is no previous command or if the previous command
# was not an C, c, S or s, assume the first control point is
# coincident with the current point.
control1 = current_pos
else:
# The first control point is assumed to be the reflection of
# the second control point on the previous command relative
# to the current point.
control1 = current_pos + current_pos - last_control
control2 = np.array([float(elements.pop()), float(elements.pop())])
end = np.array([float(elements.pop()), float(elements.pop())])
if not absolute:
control2 += current_pos
end += current_pos
poly.cubicBezierTo( control1, control2, end )
current_pos = end
last_control = control2
# elif command == 'Q':
# control = float(elements.pop()) + float(elements.pop()) * 1j
# end = float(elements.pop()) + float(elements.pop()) * 1j
# if not absolute:
# control += current_pos
# end += current_pos
# segments.append(QuadraticBezier(current_pos, control, end))
# current_pos = end
# elif command == 'T':
# # Smooth curve. Control point is the "reflection" of
# # the second control point in the previous path.
# if last_command not in 'QT':
# # If there is no previous command or if the previous command
# # was not an Q, q, T or t, assume the first control point is
# # coincident with the current point.
# control = current_pos
# else:
# # The control point is assumed to be the reflection of
# # the control point on the previous command relative
# # to the current point.
# control = current_pos + current_pos - segments[-1].control
# end = float(elements.pop()) + float(elements.pop()) * 1j
# if not absolute:
# end += current_pos
# segments.append(QuadraticBezier(current_pos, control, end))
# current_pos = end
elif command == 'A':
radius = np.array([float(elements.pop()), float(elements.pop())])
rotation = float(elements.pop())
arc = float(elements.pop())
sweep = float(elements.pop())
end = np.array([float(elements.pop()), float(elements.pop())])
if not absolute:
end += current_pos
poly.arcTo( end, radius, large_arc=arc, sweep=sweep, rotate=rotation )
current_pos = end
else:
print('Command', command, 'in not implemented' )
if poly.size() > 0:
poly = poly.getSimplify()
self.add( poly.getPath() )
def getPoints(self):
return [(x, y) for points in self.path for x, y in points]
def getConvexHull(self):
try:
from .Polyline import Polyline
from shapely import geometry
except ImportError:
geometry = None
if geometry is None:
raise Exception('Polyline.getConvexHull() requires Shapely')
polygon = geometry.Polygon( self.getPoints() )
return Polyline( polygon.convex_hull.exterior.coords, head_width=self.head_width )
def getTexture(self, width, height, **kwargs):
resolution = kwargs.pop('resolution', None)
from .Texture import Texture
from .Polyline import Polyline
texture = Texture(width=width, height=height, **kwargs)
for points in self.path:
if resolution:
poly = Polyline(points)
poly = poly.getResampledBySpacing(resolution)
points = poly.getPoints()
N = len(points)
x = np.zeros(int(N)+1)
y = np.zeros(int(N)+1)
x.fill(np.nan)
y.fill(np.nan)
for i in range(N):
X, Y = points[i]
x[i] = X / float(texture.width)
y[i] = Y / float(texture.height)
x[N] = np.nan
y[N] = np.nan
texture.add( (x.flatten('F'), y.flatten('F')) )
return texture
def getSorted(self, reversable=True):
if len(self.path) < 2:
return self
path = self.path[:]
first = path[0]
path.remove(first)
result = [first]
points = []
for path in path:
x1, y1 = path[0]
x2, y2 = path[-1]
points.append((x1, y1, path, False))
if reversable:
points.append((x2, y2, path, True))
if len(points) <= 2:
return self
index = Index( chain=points )
while index.size > 0:
x, y, path, reverse = index.nearest(result[-1][-1])
x1, y1 = path[0]
x2, y2 = path[-1]
index.remove((x1, y1, path, False))
if reversable:
index.remove((x2, y2, path, True))
if reverse:
result.append(list(reversed(path)))
else:
result.append(path)
return Path( result, head_width=self.head_width )
def getJoined(self, tolerance = None, boundary = None):
try:
from shapely import geometry
except ImportError:
geometry = None
if boundary != None and geometry is None:
print('Path.joined() will not work with boundary bacause needs Shapely')
boundary = None
if len(self.path) < 2:
return self
if tolerance is None:
tolerance = self.head_width
result = [list(self.path[0])]
for path in self.path[1:]:
x1, y1 = result[-1][-1]
x2, y2 = path[0]
join = False
if boundary != None:
walk_path = geometry.LineString( [result[-1][-1], path[0]] )
walk_cut = walk_path.buffer( self.head_width * 0.5 )
join = walk_cut.within(boundary) # and walk_path.length < max_walk
else:
join = math.hypot(x2 - x1, y2 - y1) <= tolerance
if join:
result[-1].extend(path)
else:
result.append(list(path))
return Path(result)
def getSimplify(self, tolerance = None):
from .Polyline import Polyline
result = Path()
for points in self.path:
if len(points) > 1:
result.add( Polyline( points ).getSimplify(tolerance) )
return result
def getResampledBySpacing(self, spacing, **kwargs):
from .Polyline import Polyline
result = Path()
for points in self.path:
if len(points) > 1:
result.add( Polyline( points, **kwargs ).getResampledBySpacing(spacing) )
return result
def getTransformed(self, func):
return Path([[func(x, y) for x, y in points] for points in self.path])
def getMoved(self, x, y, ax, ay):
bbox = self.bounds
x1, y1, x2, y2 = bbox.limits
dx = x1 + (x2 - x1) * ax - x
dy = y1 + (y2 - y1) * ay - y
return self.getTranslated(-dx, -dy)
def getCentered(self, width, height):
return self.getMoved(width / 2, height / 2, 0.5, 0.5)
def getRotatedToFit(self, width, height, step=5):
for angle in range(0, 180, step):
path = self.getRotated(angle)
if path.width <= width and path.height <= height:
return path.getCentered(width, height)
return None
def getScaledToFit(self, width, height, padding=0):
width -= padding * 2
height -= padding * 2
scale = min(width / self.width, height / self.height)
return self.getScaled(scale, scale).getCentered(width, height)
def getRotateAndScaleToFit(self, width, height, padding=0, step=1):
values = []
width -= padding * 2
height -= padding * 2
hull = self.getConvexHull()
for angle in range(0, 180, step):
d = hull.getRotated(angle)
scale = min(width / d.bounds.width, height / d.bounds.height)
values.append((scale, angle))
scale, angle = max(values)
return self.getRotated(angle).getScaled(scale, scale).getCentered(width, height)
def getSVGElementString(self):
path_str = ''
if len(self.path) == 0:
return path_str
# for points in self.path:
# path_str += 'M' + ' L'.join('{0} {1}'.format(x,y) for x,y in points)
if self.isTranformed:
for points in self.path:
first = True
for point in points:
p = transform(point, translate=self.translate, scale=self.scale, rotate=self.rotate)
if first:
first = False
path_str += 'M%0.1f %0.1f' % (p[0], p[1])
else:
path_str += 'L%0.1f %0.1f' % (p[0], p[1])
else:
for points in self.path:
path_str += 'M' + ' L'.join('{0} {1}'.format(x,y) for x,y in points)
svg_str = '<path '
if self.id != None:
svg_str += 'id="' + self.id + '" '
svg_str += 'd="' + path_str + '" '
svg_str += 'fill="none" stroke="black" stroke-width="'+str(self.head_width) + '" '
svg_str += '/>\n'
return svg_str
def getGCodeString(self, **kwargs):
head_up_height = kwargs.pop('head_up_height', 3)
head_down_height = kwargs.pop('head_down_height', -0.5)
head_up_speed = kwargs.pop('head_up_speed', 800)
head_down_speed = kwargs.pop('head_down_speed', 500)
move_speed = kwargs.pop('move_speed', 300)
# bed_max_x = kwargs.pop('bed_max_x', 200)
# bed_max_y = kwargs.pop('bed_max_y', 200)
transformed = self.isTranformed
gcode_str = ''
for points in self.path:
gcode_str += "G0 Z%0.1f F" % (head_up_height) + str(head_up_speed) + "\n"
p = points[0][:]
if transformed:
p = transform(points[0], translate=self.translate, scale=self.scale, rotate=self.rotate)
gcode_str += "G0 X%0.1f Y%0.1f\n" % (p[0], p[1])
else:
gcode_str += "G0 X%0.1f Y%0.1f\n" % (p[0], p[1])
gcode_str += "G1 Z%0.1f F" % (head_down_height) + str(head_down_speed) +"\n"
first = True
for point in points[1:]:
# if x > 0 and x < bed_max_x and y > 0 and y < bed_max_y:
p = point[:]
if transformed:
p = transform(p, translate=self.translate, scale=self.scale, rotate=self.rotate)
gcode_str += "G1 X%0.1f Y%0.1f" % (p[0], p[1])
if first:
gcode_str += " F" + str(move_speed)
first = False
gcode_str += '\n'
gcode_str += "G0 Z%0.1f\n" % (head_up_height)
return gcode_str
|
python
|
from django.conf.urls import include, url
from . import views
urlpatterns = [
url(r'accounts/login/$',
'django.contrib.auth.views.login',
name='login'),
url(r'accounts/logout/$',
'django.contrib.auth.views.logout',
name='logout'),
url(r'^$', views.IndexView.as_view(), name='index'),
]
|
python
|
from setuptools import setup, find_packages
long_description_text = '''
Pula is a python library that is meant to encompass the many various functions that the ordinary user
finds themselves needing in the different projects they are working on. These functions can span from
simple is_number functions all the way to file functions such as getting all lines of text from a directory
of code / text files.
By putting all of these functions into one library, we hope to clean up other users code and make solving
their problems easier as well as more efficient.
'''
setup(name='pula',
version='1.0.0.dev4',
description='Convienent add-on library for Python, providing frequently used functions.',
long_description= long_description_text,
url='https://github.com/dkfulp/pula',
author='dkfulp',
author_email='[email protected]',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license = 'MIT',
keywords='convenient scripting tools',
packages=['pula'],
#install_requires=[]
)
|
python
|
# Copyright 2019-2021 by Peter Cock, The James Hutton Institute.
# All rights reserved.
# This file is part of the THAPBI Phytophthora ITS1 Classifier Tool (PICT),
# and is released under the "MIT License Agreement". Please see the LICENSE
# file that should have been included as part of this package.
"""Explore conflicts at species and genus level."""
import sys
from sqlalchemy.orm import aliased
from sqlalchemy.orm import contains_eager
from .db_orm import connect_to_db
from .db_orm import MarkerDef
from .db_orm import MarkerSeq
from .db_orm import SeqSource
from .db_orm import Taxonomy
from .utils import genus_species_name
def main(db_url, output_filename, debug=False):
"""Implement the ``thapbi_pict conflicts`` subcommand.
Looks for taxonomy conflicts at marker, genus or species level, with the
number of marker or genus level conflicts used as the return code. i.e.
Unix failure (non-zero) when there are marker or genus level conflicts.
A marker level conflict is when a unique sequence appears in the DB under
more than one marker name (e.g. both COI and ITS1), which is most likely
an error in the DB construction.
Genus level conflicts are where a unique sequence in the DB is reported
from more than one genus, which is considered undesirable. Similarly for
species level conflicts, but for some markers this is sadly common and not
considered to be an error.
"""
if output_filename == "-":
out_handle = sys.stdout
else:
out_handle = open(output_filename, "w")
# Connect to the DB,
Session = connect_to_db(db_url, echo=False)
session = Session()
# Doing a join to pull in the marker and taxonomy tables too:
cur_tax = aliased(Taxonomy)
marker_seq = aliased(MarkerSeq)
marker_def = aliased(MarkerDef)
view = (
session.query(SeqSource)
.join(marker_seq, SeqSource.marker_seq)
.join(marker_def, SeqSource.marker_definition)
.join(cur_tax, SeqSource.taxonomy)
.options(contains_eager(SeqSource.marker_seq, alias=marker_seq))
.options(contains_eager(SeqSource.marker_definition, alias=marker_def))
.options(contains_eager(SeqSource.taxonomy, alias=cur_tax))
)
md5_to_seq = {}
md5_to_marker = {}
md5_to_genus = {}
md5_to_species = {}
for seq_source in view:
md5 = seq_source.marker_seq.md5
seq = seq_source.marker_seq.sequence
genus = seq_source.taxonomy.genus
md5_to_seq[md5] = seq
try:
md5_to_marker[md5].add(seq_source.marker_definition.name)
except KeyError:
md5_to_marker[md5] = {seq_source.marker_definition.name}
if genus:
try:
md5_to_genus[md5].add(genus)
except KeyError:
md5_to_genus[md5] = {genus}
if seq_source.taxonomy.species:
genus_species = genus_species_name(genus, seq_source.taxonomy.species)
try:
md5_to_species[md5].add(genus_species)
except KeyError:
md5_to_species[md5] = {genus_species}
sys.stderr.write(f"Loaded taxonomy for {len(md5_to_seq)} sequences from DB\n")
marker_conflicts = 0
genus_conflicts = 0
out_handle.write("#MD5\tLevel\tConflicts\n")
for md5, markers in sorted(md5_to_marker.items()):
if len(markers) > 1:
out_handle.write(f"{md5}\tmarker\t{';'.join(sorted(markers))}\n")
marker_conflicts += 1
for md5, genus in sorted(md5_to_genus.items()):
if len(genus) > 1:
out_handle.write(f"{md5}\tgenus\t{';'.join(sorted(genus))}\n")
genus_conflicts += 1
for md5, species in sorted(md5_to_species.items()):
if len(species) > 1:
out_handle.write(f"{md5}\tspecies\t{';'.join(sorted(species))}\n")
if output_filename != "-":
out_handle.close()
if debug:
sys.stderr.write(f"{marker_conflicts} marker level conflicts\n")
sys.stderr.write(f"{genus_conflicts} genus level conflicts\n")
return marker_conflicts + genus_conflicts # non-zero error
|
python
|
# Generated by Django 2.0 on 2019-03-12 20:51
from django.db import migrations, models
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('portfolio', '0016_auto_20190312_1954'),
]
operations = [
migrations.AddField(
model_name='portfoliopage',
name='project',
field=wagtail.core.fields.StreamField((('project', wagtail.core.blocks.StructBlock((('name', wagtail.core.blocks.CharBlock(classname='full title')), ('menu_title', wagtail.core.blocks.CharBlock(classname='full title')), ('project_url', wagtail.core.blocks.CharBlock(classname='full title'))))),), blank=True, null=True),
),
migrations.AddField(
model_name='portfoliopage',
name='project_title',
field=models.CharField(blank=True, max_length=150),
),
]
|
python
|
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import defaultdict
from sqlalchemy.dialects.postgresql.base import PGDialect
from sqlalchemy.sql import sqltypes
from sqlalchemy import util, sql
from sqlalchemy.engine import reflection
from .base import BaseDialect, MixedBinary
colspecs = util.update_copy(
PGDialect.colspecs, {sqltypes.LargeBinary: MixedBinary,},
)
class PGJDBCDialect(BaseDialect, PGDialect):
jdbc_db_name = "postgresql"
jdbc_driver_name = "org.postgresql.Driver"
colspecs = colspecs
def initialize(self, connection):
super(PGJDBCDialect, self).initialize(connection)
def create_connect_args(self, url):
if url is None:
return
# dialects expect jdbc url in the form of
# "jdbc:postgresql://example.com:1521/db"
# if sqlalchemy create_engine() url is passed as
# "postgresql://scott:[email protected]/db"
# it is parsed wrong
# restore original url
s: str = str(url)
# get jdbc url
jdbc_url: str = s.split("//", 1)[-1]
# add driver information
if not jdbc_url.startswith("jdbc"):
jdbc_url = f"jdbc:{self.jdbc_db_name}://{jdbc_url}"
kwargs = {
"jclassname": self.jdbc_driver_name,
"url": jdbc_url,
# pass driver args via JVM System settings
"driver_args": []
}
return ((), kwargs)
@reflection.cache
def get_unique_constraints(
self, connection, table_name, schema=None, **kw
):
table_oid = self.get_table_oid(
connection, table_name, schema, info_cache=kw.get("info_cache")
)
UNIQUE_SQL = """
SELECT
cons.conname as name,
cons.conkey as key,
a.attnum as col_num,
a.attname as col_name
FROM
pg_catalog.pg_constraint cons
join pg_attribute a
on cons.conrelid = a.attrelid AND
a.attnum = ANY(cons.conkey)
WHERE
cons.conrelid = :table_oid AND
cons.contype = 'u'
"""
t = sql.text(UNIQUE_SQL).columns(col_name=sqltypes.Unicode)
c = connection.execute(t, table_oid=table_oid)
uniques = defaultdict(lambda: defaultdict(dict))
for row in c.fetchall():
uc = uniques[row.name]
uc["key"] = (
row.key.getArray() if hasattr(row.key, "getArray") else row.key
)
uc["cols"][row.col_num] = row.col_name
return [
{"name": name, "column_names": [uc["cols"][i] for i in uc["key"]]}
for name, uc in uniques.items()
]
dialect = PGJDBCDialect
|
python
|
#!/usr/bin/env python
# Copyright (c) 2014-2017 Max Beloborodko.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__author__ = '[email protected]'
from abc import ABCMeta, abstractmethod, abstractproperty
class ISoup(object):
"""
Class to work with UI objects.
"""
__metaclass__ = ABCMeta
@abstractproperty
def mouse(self):
"""
Instance of IMouse implementation.
"""
@abstractproperty
def keyboard(self):
"""
Instance of IKeyboard implementation.
"""
@abstractmethod
def get_object_by_coordinates(self, x, y):
"""
Gets object by coordinates.
:param int x: x coordinate.
:param int y: y coordinate.
:rtype: uisoup.interfaces.i_element.IElement
:return: object that was found by given coordinates.
"""
@abstractmethod
def is_window_exists(self, obj_handle):
"""
Verifies is window exists.
:param str | int obj_handle: window name (string) or window
handler (int) otherwise Desktop Window will be checked.
:rtype: bool
:return: True if window exists otherwise False.
"""
@abstractmethod
def get_window(self, obj_handle=None):
"""
Gets window.
:param str | int obj_handle: window name (string) or window
handler (int) otherwise Desktop Window will be checked.
:rtype: uisoup.interfaces.i_element.IElement
:return: window object.
"""
@abstractmethod
def get_visible_window_list(self):
"""
Gets list of visible windows.
:rtype: list[uisoup.interfaces.i_element.IElement]
:return: list of visible windows.
"""
@abstractmethod
def get_visible_object_list(self, window_name):
"""
Gets list of visible objects for specified window.
:param str window_name: window name.
:rtype: list[uisoup.interfaces.i_element.IElement]
:return: list of visible windows.
"""
|
python
|
"""
#####################################################################
Copyright (C) 1999-2015, Michele Cappellari
E-mail: michele.cappellari_at_physics.ox.ac.uk
For details on the method see:
Cappellari M., 2002, MNRAS, 333, 400
Updated versions of the software are available from my web page
http://purl.org/cappellari/software
If you have found this software useful for your
research, I would appreciate an acknowledgment to use of
`the MGE fitting method and software by Cappellari (2002)'.
This software is provided as is without any warranty whatsoever.
Permission to use, for non-commercial purposes is granted.
Permission to modify for personal or internal use is granted,
provided this copyright and disclaimer are included unchanged
at the beginning of the file. All other rights are reserved.
#####################################################################
NAME:
SECTORS_PHOTOMETRY
AUTHOR:
Michele Cappellari, Astrophysics Sub-department, University of Oxford, UK
PURPOSE:
Perform photometry of a galaxy image along sectors equally spaced in
angle. This routine assumes four-fold symmetry, so measurements in
the four quadrants are averaged together. This routine is useful to
generate the input photometry required by the MGE fitting routine
MGE_FIT_SECTORS.
EXPLANATION:
Further information on SECTORS_PHOTOMETRY is available in
Cappellari M., 2002, MNRAS, 333, 400
http://adsabs.harvard.edu/abs/2002MNRAS.333..400C
CALLING SEQUENCE:
s = sectors_photometry(img, eps, theta, xpeak, ypeak, badpixels=None,
n_sectors=19, minlevel=0, plot=False)
INPUTS:
IMG = The galaxy image as a 2D array.
EPS = The galaxy "average" ellipticity Eps = 1 - b/a = 1 - q'.
Photometry will be measured along elliptical annuli with
constant axial ellipticity EPS. The four quantities
(EPS, ANG, XC, YC) can be measured with the routine FIND_GALAXY.
ANG = Position angle measured counterclockwise from
the image Y axis, to the galaxy major axis.
XC = X coordinate of the galaxy center in pixels.
YC = Y coordinate of the galaxy center in pixels.
OUTPUTS (attributes of the sectors_photometry class):
RADIUS = Vector containing the radius of the surface brightness
measurements, taken from the galaxy center. This is given
in units of PIXELS (!).
ANGLE = Vector containing the polar angle of the surface brightness
measurements, taken from the galaxy major axis.
COUNTS = Vector containing the actual surface brightness measurements
in COUNTS (!) at the polar coordinates specified by the vectors
Radius and Angle. These three vectors have the same
number of elements.
OPTIONAL INPUT KEYWORDS:
N_SECTORS - Number of the sectors, equally spaced in exxectric anomaly,
from the galaxy major axis to the minor axis (one quadrant).
(default: 19 to cover the whole image with 5 degrees width).
SECTOR_WIDTH - Scalar giving the width of the sectors
in degrees (default: 5 degrees)
BADPIXELS - Boolean image mask with the same dimension as IMG.
True values are masked and ignored in the photometry.
MINLEVEL - Scalar giving the minimum COUNTS level to include
in the photometry. The measurement along one profile
will stop when the counts first go below this level.
EXAMPLE:
See mge_fit_example.py
MODIFICATION HISTORY:
V1.0.0: First implementation for the NGC2681 photometric modeling.
Michele Cappellari, ESO Garching, 27 september 1999
V2.0.0: Major revisions, to use it with MGE_FIT_SECTORS.
Leiden, January 2000, MC
V2.1.0: Further updates, Padova, August 2000, MC
V2.1.1: Added compilation options, MC, Leiden 20 May 2002
V2.1.2: Allow for N_SECTORS=1 to get a single profile centered at
a given PA. Use biweight mean instead of sigma-clipped mean.
MC, Leiden, 30 April 2004
V2.1.3: Reduced amount of verbose output. MC, Leiden, 24 October 2004
V2.1.4: Replaced LOGRANGE keyword in example with the new MAGRANGE.
MC, Leiden, 1 May 2005
V2.1.5: Forces image to be positive when computing weighted radius
to prevent possible negative radii at large radii. Thanks to
Michael Williams for reporting the problem and the fix.
MC, Oxford, 16 February 2009
V3.0.0: Translated from IDL into Python. MC, Aspen Airport, 8 February 2014
V3.0.1: Support both Python 2.6/2.7 and Python 3.x. MC, Oxford, 25 May 2014
V3.1.0: Improved image visualization of sampled photometric grid.
Sample angles uniformly in eccentric anomaly rather than polar angle.
Removed Scipy dependency. MC, Oxford, 23 September 2014
V3.1.1: Show badpixels as empty in checkboard plot.
Define input badpixels as a boolean mask. MC, Oxford, 30 May 2015
"""
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
#----------------------------------------------------------------------------
def _biweight_mean(y, itmax=10):
"""
Biweight estimate of the location (mean).
Implements the approach described in
"Understanding Robust and Exploratory Data Analysis"
Hoaglin, Mosteller, Tukey ed., 1983
"""
y = np.ravel(y)
c = 6.
fracmin = 0.03*np.sqrt(0.5/(y.size - 1))
y0 = np.median(y)
mad = np.median(np.abs(y - y0))
for it in range(itmax):
u2 = ((y - y0)/(c*mad))**2
u2 = u2.clip(0, 1)
w = (1 - u2)**2
y0 += np.sum(w*(y - y0))/np.sum(w)
mad_old = mad
mad = np.median(np.abs(y - y0))
frac = np.abs(mad_old - mad)/mad
if frac < fracmin:
break
return y0
#----------------------------------------------------------------------------
def _coordinates(q, pos_ang, xc, yc, s):
ang = np.radians(90 - pos_ang) # x-axis is major axis
x, y = np.ogrid[:s[0], :s[1]] - np.array([xc, yc])
x, y = x*np.cos(ang) - y*np.sin(ang), x*np.sin(ang) + y*np.cos(ang)
x2, y2 = x**2, y**2
rad = np.sqrt(x2 + y2) # Radius
rell = np.sqrt(x2 + y2/q**2) # Elliptical radius
ecc = np.arctan2(np.abs(y/q), np.abs(x)) # Eccentric anomaly [0, pi/2]
return rad, rell, ecc
#----------------------------------------------------------------------------
class sectors_photometry(object):
def __init__(self, img, eps, ang, xc, yc, badpixels=None,
n_sectors=19, minlevel=0, plot=False):
"""
This routine performs photometry along sectors linearly spaced
in eccentric anomaly between the major and minor axis of a galaxy.
In output it returns the three vectors RADIUS, ANGLE, CNT,
containing the photometric measurements in polar coordinates.
"""
xc, yc = int(round(xc)), int(round(yc))
s = img.shape
q = 1 - eps
minlevel = max(minlevel, 0)
rad, rell, ecc = _coordinates(q, ang, xc, yc, s)
rad[xc, yc] = 0.38 # Average radius within the central pixel
rell[xc, yc] = 0.38
if plot:
self.grid = np.zeros_like(img, dtype=bool)
# Sample radii with 24 isophotes per decade: factor 1.1 spacing.
# Sample eccentric anomaly with n_sectors from 0-pi/2
rell = np.round(24.2*np.log10(rell)).astype(int)
ecc = np.round(2*(n_sectors - 1)/np.pi*ecc).astype(int)
if badpixels is not None:
if badpixels.dtype != bool:
raise ValueError("BADPIXELS must be a boolean array")
if badpixels.shape == img.shape:
ecc[badpixels] = -1 # Negative flag value
else:
raise ValueError('BADPIXELS and IMG must have the same shape')
self.radius = self.counts = self.angle = []
eccGrid = np.linspace(0, np.pi/2, n_sectors) # Eccentric anomaly
angGrid = np.degrees(np.arctan(np.tan(eccGrid)*q)) # Polar angle
for k, angj in enumerate(angGrid):
radj, cntj = self._profile(
img, xc, yc, rad, rell, ecc, k, plot, minlevel)
self.radius = np.append(self.radius, radj)
self.counts = np.append(self.counts, cntj)
self.angle = np.append(self.angle, radj*0 + angj)
if plot:
plt.imshow(np.log(img.clip(img[xc, yc]/1e4)), cmap='hot',
origin='lower', interpolation='none')
if badpixels is not None:
self.grid[badpixels] = 0
plt.imshow(self.grid, cmap='binary', alpha=0.3,
origin='lower', interpolation='none')
plt.xlabel("pixels")
plt.ylabel("pixels")
#----------------------------------------------------------------------------
def _profile(self, data, xc, yc, rad, rell, ecc, k, plot, minlevel):
if ecc[xc, yc] != -1:
ecc[xc, yc] = k # Always include central pixel unless bad
sector = np.flatnonzero(ecc == k)
irad = rell.flat[sector]
levels = np.unique(irad) # get unique levels within sector
cnt = np.empty(levels.size)
radius = np.empty(levels.size)
for j, lev in enumerate(levels):
sub = sector[irad == lev]
if sub.size > 10: # Evaluate a biweight mean
cnt[j] = _biweight_mean(data.flat[sub])
else:
cnt[j] = np.mean(data.flat[sub]) # Usual mean
if plot:
self.grid.flat[sub] = (lev + k % 2) % 2
if cnt[j] < minlevel: # drop last value
cnt = cnt[:j]
radius = radius[:j]
break
# Luminosity-weighted average radius in pixels
flx = data.flat[sub].clip(0)
radius[j] = np.sum(rad.flat[sub]*flx)/np.sum(flx)
j = np.argsort(radius)
cnt = cnt[j]
radius = radius[j]
return radius, cnt
#----------------------------------------------------------------------------
|
python
|
"""Simple script for converting SVG files to PDF files."""
import argparse
import glob
import os
import subprocess
import sys
import textwrap
def main():
"""The main function of the application"""
try:
args = _parse_args()
svg_files = find_svgs(args.source_dir)
for svg in svg_files:
convert_svg_to_pdf(svg)
return 0
except Exception as ex:
print("Error: {0}".format(ex), file=sys.stderr)
return 1
def find_svgs(directory):
"""Find all SVG files in the provided directory and all subdirectories.
:param directory: The directory to search for SVGs.
:returns: An enumerable of SVG file names.
"""
return glob.glob(os.path.join(directory, "**", "*.svg"), recursive=True)
def convert_svg_to_pdf(svg_name):
"""Converts the SVG provided SVG to a PDF file.
The PDF file keeps the same basename as the SVG. The original SVG is kept
and any existing PDFs with the same name are overwritten.
:param svg_name: The name of the SVG file to convert.
"""
print("Converting: {}".format(svg_name))
pdf_name = os.path.splitext(svg_name)[0] + ".pdf"
args = ['inkscape', '-z',
'-f', svg_name,
'--export-pdf={}'.format(pdf_name)]
subprocess.run(args, check=True)
def _parse_args():
description = textwrap.dedent("""\
Converts SVG files to PDF files using inkscape.
The PDF file keeps the same basename as the SVG. The original SVGs are kept
and any existing PDFs with the same name are overwritten.
""")
epilog = textwrap.dedent("""\
see also:
* inkscape
""")
parser = argparse.ArgumentParser(
description=description,
epilog=epilog,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('source_dir', metavar="SOURCE_DIR",
help="Source directory to search for SVG files to convert.")
return parser.parse_args()
if __name__ == '__main__':
sys.exit(main())
|
python
|
#
# Dynamic Routing Between Capsules
# https://arxiv.org/pdf/1710.09829.pdf
#
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
from torchvision import datasets, transforms
import torch.nn.functional as F
class Conv1(nn.Module):
def __init__(self, channels):
super(Conv1, self).__init__()
self.conv = nn.Conv2d(
in_channels=channels,
out_channels=256,
kernel_size=9,
stride=1,
bias=True
)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
# x: [batch_size, 1, 28, 28]
h = self.relu(self.conv(x))
# h: [batch_size, 256, 20, 20]
return h
|
python
|
# Generated by Django 3.0.7 on 2020-06-19 00:16
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('username', models.CharField(db_index=True, max_length=150, unique=True)),
('email', models.EmailField(db_index=True, max_length=150, unique=True)),
('surname', models.CharField(blank=True, db_index=True, max_length=50, null=True)),
('first_name', models.CharField(blank=True, db_index=True, max_length=50, null=True)),
('last_name', models.CharField(blank=True, db_index=True, max_length=50, null=True)),
('mobile_number', models.CharField(blank=True, max_length=15, null=True)),
('date_of_birth', models.DateField(blank=True, null=True)),
('provider', models.CharField(choices=[('EMAIL', 'EMAIL'), ('GOOGLE', 'GOOGLE'), ('FACEBOOK', 'FACEBOOK'), ('TWITTER', 'TWITTER'), ('GITHUB', 'GITHUB'), ('APPLE', 'APPLE'), ('PHONE', 'PHONE')], default='EMAIL', max_length=20)),
('is_superuser', models.BooleanField(default=False)),
('is_staff', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'ordering': ('created_at', 'updated_at', 'username'),
'abstract': False,
},
),
]
|
python
|
from time import *
from base import *
def convert():
print("\n"+f"""\
{CRed}Valeur à convertir{CEnd}
""")
volumes = float(input(cmdline))
volumesChoices(volumes)
def volumesChoices(quantity):
print("\n"+f"""\
{CRed}Unité de volumes de base
\n
{CBlue}[1]{CEnd} {CBeige}Centimètres³{CEnd} {CBlue}[5]{CEnd} {CBeige}Litres{CEnd}
{CBlue}[2]{CEnd} {CBeige}Mètres³{CEnd}
{CBlue}[3]{CEnd} {CBeige}Kilomètres³{CEnd}
{CBlue}[4]{CEnd} {CBeige}Centilitres{CEnd}
{CBlue}[e]{CEnd} {CBeige}exit{CEnd}
""")
reponse1 = input(cmdline)
if reponse1 == "e":
cls()
exit()
print("\n"+f"""\
{CRed}Unité de volumes à obtenir
\n
{CBlue}[1]{CEnd} {CBeige}Centimètres³{CEnd} {CBlue}[5]{CEnd} {CBeige}Litres{CEnd}
{CBlue}[2]{CEnd} {CBeige}Mètres³{CEnd}
{CBlue}[3]{CEnd} {CBeige}Kilomètres³{CEnd}
{CBlue}[4]{CEnd} {CBeige}Centilitres{CEnd}
{CBlue}[e]{CEnd} {CBeige}exit{CEnd}
""")
reponse2 = input(cmdline)
if reponse2 == "e":
exit()
elif reponse1 == reponse2:
print("Il est impossible de convertir une unité par elle même !")
sleep(1)
cls()
volumesChoices()
elif reponse1 == "1" and reponse2 == "2":
print(f"{CBlue}{quantity/1e+6}{CBeige}m³{CEnd}")
elif reponse1 == "1" and reponse2 == "3":
print(f"{CBlue}{quantity/1e+15}{CBeige}Km³{CEnd}")
elif reponse1 == "1" and reponse2 == "4":
print(f"{CBlue}{quantity/10}{CBeige}cL{CEnd}")
elif reponse1 == "1" and reponse2 == "5":
print(f"{CBlue}{quantity/1000}{CBeige}L{CEnd}")
elif reponse1 == "2" and reponse2 == "1":
print(f"{CBlue}{quantity*1e+6}{CBeige}cm³{CEnd}")
elif reponse1 == "2" and reponse2 == "3":
print(f"{CBlue}{quantity/1e+9}{CBeige}Km³{CEnd}")
elif reponse1 == "2" and reponse2 == "4":
print(f"{CBlue}{quantity/100000}{CBeige}cL{CEnd}")
elif reponse1 == "2" and reponse2 == "5":
print(f"{CBlue}{quantity*1000}{CBeige}L{CEnd}")
elif reponse1 == "3" and reponse2 == "1":
print(f"{CBlue}{quantity*1e+15}{CBeige}cm³{CEnd}")
elif reponse1 == "3" and reponse2 == "2":
print(f"{CBlue}{quantity*1e+9}{CBeige}m³{CEnd}")
elif reponse1 == "3" and reponse2 == "4":
print(f"{CBlue}{quantity*1e+14}{CBeige}cL{CEnd}")
elif reponse1 == "3" and reponse2 == "5":
print(f"{CBlue}{quantity*1e+12}{CBeige}L{CEnd}")
elif reponse1 == "4" and reponse2 == "1":
print(f"{CBlue}{quantity*10}{CBeige}cm³{CEnd}")
elif reponse1 == "4" and reponse2 == "2":
print(f"{CBlue}{quantity*100000}{CBeige}m³{CEnd}")
elif reponse1 == "4" and reponse2 == "3":
print(f"{CBlue}{quantity/1e+14}{CBeige}km³{CEnd}")
elif reponse1 == "4" and reponse2 == "5":
print(f"{CBlue}{quantity/100}{CBeige}L{CEnd}")
elif reponse1 == "5" and reponse2 == "1":
print(f"{CBlue}{quantity*1000}{CBeige}cm³{CEnd}")
elif reponse1 == "5" and reponse2 == "2":
print(f"{CBlue}{quantity/1000}{CBeige}m³{CEnd}")
elif reponse1 == "5" and reponse2 == "3":
print(f"{CBlue}{quantity/1e+12}{CBeige}km³{CEnd}")
elif reponse1 == "5" and reponse2 == "4":
print(f"{CBlue}{quantity*100}{CBeige}cL{CEnd}")
|
python
|
"""
BERT/RoBERTa layers from the huggingface implementation
(https://github.com/huggingface/transformers)
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from apex.normalization.fused_layer_norm import\
FusedLayerNorm as BertLayerNorm
from .modeling_utils import prune_linear_layer
import math
import copy
import sys
def gelu(x):
""" Original Implementation of the gelu activation function
in Google Bert repo when initialy created.
For information: OpenAI GPT's gelu is slightly different
(and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi)
* (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
def gelu_new(x):
""" Implementation of the gelu activation function currently
in Google Bert repo (identical to OpenAI GPT).
Also see https://arxiv.org/abs/1606.08415
"""
return 0.5 * x * (
1 + torch.tanh(
math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
def swish(x):
return x * torch.sigmoid(x)
ACT2FN = {
"gelu": gelu,
"relu": torch.nn.functional.relu,
"swish": swish, "gelu_new": gelu_new}
class MLPLayer(nn.Module):
def __init__(self, in_hsz, out_hsz):
super(MLPLayer, self).__init__()
self.linear_1 = nn.Linear(in_hsz, in_hsz*2)
self.LayerNorm = BertLayerNorm(in_hsz*2, eps=1e-5)
self.linear_2 = nn.Linear(in_hsz*2, out_hsz)
self.act = gelu
def forward(self, x):
x_1 = self.linear_1(x)
x_1 = self.act(x_1)
x_1 = self.LayerNorm(x_1)
x_2 = self.linear_2(x_1)
return x_2
class GELU(nn.Module):
def forward(self, input_):
output = gelu(input_)
return output
class LinearLayer(nn.Module):
"""linear layer configurable with layer normalization, dropout, ReLU."""
def __init__(self, in_hsz, out_hsz, layer_norm=True,
dropout=0.1, relu=True):
super(LinearLayer, self).__init__()
self.relu = relu
self.layer_norm = layer_norm
if layer_norm:
self.LayerNorm = BertLayerNorm(in_hsz, eps=1e-5)
layers = [
nn.Dropout(dropout),
nn.Linear(in_hsz, out_hsz)
]
self.net = nn.Sequential(*layers)
def forward(self, x):
"""(N, L, D)"""
if self.layer_norm:
x = self.LayerNorm(x)
x = self.net(x)
if self.relu:
x = F.relu(x, inplace=True)
return x # (N, L, D)
class BertSelfAttention(nn.Module):
def __init__(self, config):
super(BertSelfAttention, self).__init__()
if config.hidden_size % config.num_attention_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of "
"the number of attention heads (%d)" % (
config.hidden_size, config.num_attention_heads))
self.output_attentions = config.output_attentions
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(
config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads *\
self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size)
self.key = nn.Linear(config.hidden_size, self.all_head_size)
self.value = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (
self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(self, hidden_states, attention_mask=None, head_mask=None):
mixed_query_layer = self.query(hidden_states)
mixed_key_layer = self.key(hidden_states)
mixed_value_layer = self.value(hidden_states)
query_layer = self.transpose_for_scores(mixed_query_layer)
key_layer = self.transpose_for_scores(mixed_key_layer)
value_layer = self.transpose_for_scores(mixed_value_layer)
# Take the dot product between "query"
# and "key" to get the raw attention scores.
attention_scores = torch.matmul(
query_layer, key_layer.transpose(-1, -2))
attention_scores = attention_scores / math.sqrt(
self.attention_head_size)
if attention_mask is not None:
# Apply the attention mask is
# (precomputed for all layers in BertModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.Softmax(dim=-1)(attention_scores)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (
self.all_head_size,)
context_layer = context_layer.view(*new_context_layer_shape)
outputs = (context_layer, attention_probs)\
if self.output_attentions else (context_layer,)
return outputs
class BertSelfOutput(nn.Module):
def __init__(self, config):
super(BertSelfOutput, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.LayerNorm = BertLayerNorm(
config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertAttention(nn.Module):
def __init__(self, config):
super(BertAttention, self).__init__()
self.self = BertSelfAttention(config)
self.output = BertSelfOutput(config)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
mask = torch.ones(
self.self.num_attention_heads, self.self.attention_head_size)
# Convert to set and emove already pruned heads
heads = set(heads) - self.pruned_heads
for head in heads:
# Compute how many pruned heads are
# before the head and move the index accordingly
head = head - sum(1 if h < head else 0 for h in self.pruned_heads)
mask[head] = 0
mask = mask.view(-1).contiguous().eq(1)
index = torch.arange(len(mask))[mask].long()
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params and store pruned heads
self.self.num_attention_heads = self.self.num_attention_heads - len(
heads)
self.self.all_head_size =\
self.self.attention_head_size * self.self.num_attention_heads
self.pruned_heads = self.pruned_heads.union(heads)
def forward(self, input_tensor, attention_mask=None, head_mask=None):
self_outputs = self.self(input_tensor, attention_mask, head_mask)
attention_output = self.output(self_outputs[0], input_tensor)
# add attentions if we output them
outputs = (attention_output,) + self_outputs[1:]
return outputs
class BertIntermediate(nn.Module):
def __init__(self, config):
super(BertIntermediate, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
if isinstance(config.hidden_act, str) or (
sys.version_info[0] == 2 and
isinstance(config.hidden_act, unicode)):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class BertOutput(nn.Module):
def __init__(self, config):
super(BertOutput, self).__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.LayerNorm = BertLayerNorm(
config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertLayer(nn.Module):
def __init__(self, config):
super(BertLayer, self).__init__()
self.attention = BertAttention(config)
self.intermediate = BertIntermediate(config)
self.output = BertOutput(config)
def forward(self, hidden_states, attention_mask=None, head_mask=None):
attention_outputs = self.attention(
hidden_states, attention_mask, head_mask)
attention_output = attention_outputs[0]
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
# add attentions if we output them
outputs = (layer_output,) + attention_outputs[1:]
return outputs
class BertPooler(nn.Module):
def __init__(self, config):
super(BertPooler, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class BertEncoder(nn.Module):
def __init__(self, config):
super(BertEncoder, self).__init__()
self.output_attentions = config.output_attentions
self.output_hidden_states = config.output_hidden_states
self.layer = nn.ModuleList([BertLayer(config) for _ in range(
config.num_hidden_layers)])
def forward(self, hidden_states, attention_mask=None, head_mask=None):
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
extended_attention_mask = extended_attention_mask.to(
dtype=next(self.parameters()).dtype) # fp16 compatibility
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
all_hidden_states = ()
all_attentions = ()
for i, layer_module in enumerate(self.layer):
if self.output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_outputs = layer_module(
hidden_states, extended_attention_mask, None)
hidden_states = layer_outputs[0]
if self.output_attentions:
all_attentions = all_attentions + (layer_outputs[1],)
# Add last layer
if self.output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
outputs = (hidden_states,)
if self.output_hidden_states:
outputs = outputs + (all_hidden_states,)
if self.output_attentions:
outputs = outputs + (all_attentions,)
# last-layer hidden state, (all hidden states), (all attentions)
return outputs
class BertLMPredictionHead(nn.Module):
def __init__(self, config, bert_model_embedding_weights):
super(BertLMPredictionHead, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
if isinstance(config.hidden_act, str):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-5)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
self.decoder = nn.Linear(bert_model_embedding_weights.size(1),
bert_model_embedding_weights.size(0),
bias=False)
self.decoder.weight = bert_model_embedding_weights
self.bias = nn.Parameter(
torch.zeros(bert_model_embedding_weights.size(0)))
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
hidden_states = self.decoder(hidden_states) + self.bias
return hidden_states
def clones(module, n):
"""Produce n identical layers."""
return nn.ModuleList([copy.deepcopy(module) for _ in range(n)])
|
python
|
from django import forms
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import gettext_lazy as _
from core.admin_filters import UserIsActiveFilter
from .models import CustomUser, UserProfileDriver, \
UserProfileStaff
from .services import UserDeleteService, UserDTO
from .events import UserEventsEmitter
from .slack_notification import SlackNotificationUser
class ProfileDriverModelForm(forms.ModelForm):
class Meta:
model = UserProfileDriver
fields = "__all__"
class UserProfileDriverInline(admin.StackedInline):
model = UserProfileDriver
form = ProfileDriverModelForm
class ProfileStaffModelForm(forms.ModelForm):
class Meta:
model = UserProfileStaff
fields = "__all__"
class UserProfileStaffInline(admin.StackedInline):
model = UserProfileStaff
form = ProfileStaffModelForm
class CustomUserAdmin(UserAdmin):
actions = ['unregister', 'ban_permanently']
search_fields = ('username', 'phonenumber', '=uuid')
fieldsets = (
(None, {"fields": ("username", "password")}),
(_("Personal info"), {"fields": ("email", "phonenumber", "type")}),
(
_("Permissions"),
{
"fields": (
"is_active",
"is_staff",
"is_superuser",
"groups",
"user_permissions",
)
},
),
(_("Important dates"), {"fields": ("last_login", "date_joined")}),
)
list_filter = ("is_staff", "is_superuser", "is_active", "groups",)
list_display = (
"username", "uuid", "email", "phonenumber", "is_active", "banned",
"date_joined", "is_staff", 'type',
)
list_filter = ['banned', UserIsActiveFilter, 'type']
inlines = [UserProfileDriverInline, UserProfileStaffInline]
def save_model(self, request, obj, form, change):
super(CustomUserAdmin, self).save_model(request, obj, form, change)
from user.utils_user import delete_user_profile_cache
delete_user_profile_cache(obj.uuid)
def has_delete_permission(self, request, obj=None):
return False
@admin.action(description='Unregister User')
def unregister(self, request, queryset):
for user in queryset:
user_dto = UserDTO(user=user)
UserDeleteService(user_dto).unregister()
# event emitter
user_data = {"uuid": user.uuid.hex}
UserEventsEmitter().user_deactivated(user_data)
# slack notification
SlackNotificationUser().deacivated(user)
@admin.action(description='Ban user')
def ban_permanently(self, request, queryset):
for user in queryset:
user_dto = UserDTO(user=user)
UserDeleteService(user_dto).ban()
# event emitter
user_data = {"uuid": user.uuid.hex}
UserEventsEmitter().user_banned(user_data)
SlackNotificationUser().banned(user)
admin.site.register(CustomUser, CustomUserAdmin)
|
python
|
from django.urls import path
from . import views
urlpatterns = [
path("sprawa-<int:case_pk>/", views.EventCreateView.as_view(), name="add"),
path("wydarzenie-<int:pk>", views.EventUpdateView.as_view(), name="edit"),
path(
"<int:year>-<int:month>",
views.CalendarEventView.as_view(month_format="%m"),
name="calendar",
),
path("", views.CalendarListView.as_view(), name="calendar"),
path("ical/", views.ICalendarView.as_view(), name="calendar_ical"),
]
app_name = "poradnia.events"
|
python
|
# Generated from JavaParser.g4 by ANTLR 4.5.3
# encoding: utf-8
from antlr4 import *
from io import StringIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3q")
buf.write("\u0568\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\tL\4M\t")
buf.write("M\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\tU\4V\t")
buf.write("V\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4")
buf.write("_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4")
buf.write("h\th\4i\ti\3\2\5\2\u00d4\n\2\3\2\7\2\u00d7\n\2\f\2\16")
buf.write("\2\u00da\13\2\3\2\7\2\u00dd\n\2\f\2\16\2\u00e0\13\2\3")
buf.write("\2\3\2\3\3\7\3\u00e5\n\3\f\3\16\3\u00e8\13\3\3\3\3\3\3")
buf.write("\3\3\3\3\4\3\4\5\4\u00f0\n\4\3\4\3\4\3\4\5\4\u00f5\n\4")
buf.write("\3\4\3\4\3\5\7\5\u00fa\n\5\f\5\16\5\u00fd\13\5\3\5\3\5")
buf.write("\3\5\3\5\5\5\u0103\n\5\3\5\5\5\u0106\n\5\3\6\3\6\3\6\3")
buf.write("\6\3\6\5\6\u010d\n\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\5")
buf.write("\7\u0117\n\7\3\b\3\b\5\b\u011b\n\b\3\t\3\t\3\t\5\t\u0120")
buf.write("\n\t\3\t\3\t\5\t\u0124\n\t\3\t\3\t\5\t\u0128\n\t\3\t\3")
buf.write("\t\3\n\3\n\3\n\3\n\7\n\u0130\n\n\f\n\16\n\u0133\13\n\3")
buf.write("\n\3\n\3\13\7\13\u0138\n\13\f\13\16\13\u013b\13\13\3\13")
buf.write("\3\13\3\13\5\13\u0140\n\13\3\f\3\f\3\f\7\f\u0145\n\f\f")
buf.write("\f\16\f\u0148\13\f\3\r\3\r\3\r\3\r\5\r\u014e\n\r\3\r\3")
buf.write("\r\5\r\u0152\n\r\3\r\5\r\u0155\n\r\3\r\5\r\u0158\n\r\3")
buf.write("\r\3\r\3\16\3\16\3\16\7\16\u015f\n\16\f\16\16\16\u0162")
buf.write("\13\16\3\17\7\17\u0165\n\17\f\17\16\17\u0168\13\17\3\17")
buf.write("\3\17\5\17\u016c\n\17\3\17\5\17\u016f\n\17\3\20\3\20\7")
buf.write("\20\u0173\n\20\f\20\16\20\u0176\13\20\3\21\3\21\3\21\5")
buf.write("\21\u017b\n\21\3\21\3\21\5\21\u017f\n\21\3\21\3\21\3\22")
buf.write("\3\22\7\22\u0185\n\22\f\22\16\22\u0188\13\22\3\22\3\22")
buf.write("\3\23\3\23\7\23\u018e\n\23\f\23\16\23\u0191\13\23\3\23")
buf.write("\3\23\3\24\3\24\5\24\u0197\n\24\3\24\3\24\7\24\u019b\n")
buf.write("\24\f\24\16\24\u019e\13\24\3\24\5\24\u01a1\n\24\3\25\3")
buf.write("\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\5\25\u01ac\n\25")
buf.write("\3\26\3\26\3\26\3\26\3\26\7\26\u01b3\n\26\f\26\16\26\u01b6")
buf.write("\13\26\3\26\3\26\5\26\u01ba\n\26\3\26\3\26\3\27\3\27\5")
buf.write("\27\u01c0\n\27\3\30\3\30\5\30\u01c4\n\30\3\31\3\31\3\31")
buf.write("\3\32\3\32\3\32\3\33\3\33\3\33\3\33\5\33\u01d0\n\33\3")
buf.write("\33\3\33\3\34\3\34\3\34\3\34\3\35\7\35\u01d9\n\35\f\35")
buf.write("\16\35\u01dc\13\35\3\35\3\35\5\35\u01e0\n\35\3\36\3\36")
buf.write("\3\36\3\36\3\36\3\36\3\36\5\36\u01e9\n\36\3\37\3\37\3")
buf.write("\37\3\37\7\37\u01ef\n\37\f\37\16\37\u01f2\13\37\3\37\3")
buf.write("\37\3 \3 \3 \7 \u01f9\n \f \16 \u01fc\13 \3 \3 \3 \3!")
buf.write("\7!\u0202\n!\f!\16!\u0205\13!\3!\3!\3!\7!\u020a\n!\f!")
buf.write("\16!\u020d\13!\3!\3!\5!\u0211\n!\3!\3!\3!\3!\7!\u0217")
buf.write("\n!\f!\16!\u021a\13!\3!\3!\5!\u021e\n!\3!\3!\3\"\3\"\3")
buf.write("\"\3\"\3\"\3\"\5\"\u0228\n\"\3#\3#\3#\3$\3$\3$\7$\u0230")
buf.write("\n$\f$\16$\u0233\13$\3%\3%\3%\5%\u0238\n%\3&\3&\3&\7&")
buf.write("\u023d\n&\f&\16&\u0240\13&\3\'\3\'\5\'\u0244\n\'\3(\3")
buf.write("(\3(\3(\7(\u024a\n(\f(\16(\u024d\13(\3(\5(\u0250\n(\5")
buf.write("(\u0252\n(\3(\3(\3)\3)\5)\u0258\n)\3)\3)\3)\5)\u025d\n")
buf.write(")\7)\u025f\n)\f)\16)\u0262\13)\3*\3*\3*\3*\5*\u0268\n")
buf.write("*\5*\u026a\n*\3+\3+\3+\7+\u026f\n+\f+\16+\u0272\13+\3")
buf.write(",\3,\5,\u0276\n,\3,\3,\3-\3-\3-\7-\u027d\n-\f-\16-\u0280")
buf.write("\13-\3-\3-\5-\u0284\n-\3-\5-\u0287\n-\3.\7.\u028a\n.\f")
buf.write(".\16.\u028d\13.\3.\3.\3.\3/\7/\u0293\n/\f/\16/\u0296\13")
buf.write("/\3/\3/\3/\3/\3\60\3\60\3\60\7\60\u029f\n\60\f\60\16\60")
buf.write("\u02a2\13\60\3\61\3\61\3\61\3\61\3\61\3\61\5\61\u02aa")
buf.write("\n\61\3\62\3\62\3\63\3\63\3\64\3\64\3\64\3\64\3\64\5\64")
buf.write("\u02b5\n\64\3\64\5\64\u02b8\n\64\3\65\3\65\3\65\7\65\u02bd")
buf.write("\n\65\f\65\16\65\u02c0\13\65\3\66\3\66\3\66\3\66\3\67")
buf.write("\3\67\3\67\5\67\u02c9\n\67\38\38\38\38\78\u02cf\n8\f8")
buf.write("\168\u02d2\138\58\u02d4\n8\38\58\u02d7\n8\38\38\39\39")
buf.write("\39\39\39\3:\3:\7:\u02e2\n:\f:\16:\u02e5\13:\3:\3:\3;")
buf.write("\7;\u02ea\n;\f;\16;\u02ed\13;\3;\3;\5;\u02f1\n;\3<\3<")
buf.write("\3<\3<\3<\3<\5<\u02f9\n<\3<\3<\5<\u02fd\n<\3<\3<\5<\u0301")
buf.write("\n<\3<\3<\5<\u0305\n<\5<\u0307\n<\3=\3=\5=\u030b\n=\3")
buf.write(">\3>\3>\3>\5>\u0311\n>\3?\3?\3@\3@\3@\3A\3A\7A\u031a\n")
buf.write("A\fA\16A\u031d\13A\3A\3A\3B\3B\3B\3B\3B\5B\u0326\nB\3")
buf.write("C\7C\u0329\nC\fC\16C\u032c\13C\3C\3C\3C\3D\7D\u0332\n")
buf.write("D\fD\16D\u0335\13D\3D\3D\5D\u0339\nD\3D\5D\u033c\nD\3")
buf.write("E\3E\3E\3E\3E\5E\u0343\nE\3E\3E\3E\3E\3E\3E\3E\5E\u034c")
buf.write("\nE\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3")
buf.write("E\3E\3E\6E\u0361\nE\rE\16E\u0362\3E\5E\u0366\nE\3E\5E")
buf.write("\u0369\nE\3E\3E\3E\3E\7E\u036f\nE\fE\16E\u0372\13E\3E")
buf.write("\5E\u0375\nE\3E\3E\3E\3E\7E\u037b\nE\fE\16E\u037e\13E")
buf.write("\3E\7E\u0381\nE\fE\16E\u0384\13E\3E\3E\3E\3E\3E\3E\3E")
buf.write("\3E\5E\u038e\nE\3E\3E\3E\3E\3E\3E\3E\5E\u0397\nE\3E\3")
buf.write("E\3E\5E\u039c\nE\3E\3E\3E\3E\3E\3E\3E\3E\5E\u03a6\nE\3")
buf.write("F\3F\3F\7F\u03ab\nF\fF\16F\u03ae\13F\3F\3F\3F\3F\3F\3")
buf.write("G\3G\3G\7G\u03b8\nG\fG\16G\u03bb\13G\3H\3H\3H\3I\3I\3")
buf.write("I\5I\u03c3\nI\3I\3I\3J\3J\3J\7J\u03ca\nJ\fJ\16J\u03cd")
buf.write("\13J\3K\7K\u03d0\nK\fK\16K\u03d3\13K\3K\3K\3K\3K\3K\3")
buf.write("L\6L\u03db\nL\rL\16L\u03dc\3L\6L\u03e0\nL\rL\16L\u03e1")
buf.write("\3M\3M\3M\5M\u03e7\nM\3M\3M\3M\5M\u03ec\nM\3N\3N\5N\u03f0")
buf.write("\nN\3N\3N\5N\u03f4\nN\3N\3N\5N\u03f8\nN\5N\u03fa\nN\3")
buf.write("O\3O\5O\u03fe\nO\3P\7P\u0401\nP\fP\16P\u0404\13P\3P\3")
buf.write("P\3P\3P\3P\3Q\3Q\3Q\3Q\3R\3R\3R\7R\u0412\nR\fR\16R\u0415")
buf.write("\13R\3S\3S\3S\5S\u041a\nS\3S\3S\3S\3S\5S\u0420\nS\3S\3")
buf.write("S\3S\3S\5S\u0426\nS\3S\5S\u0429\nS\3T\3T\3T\3T\3T\3T\3")
buf.write("T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\5T\u043d\nT\3T\3T\5")
buf.write("T\u0441\nT\3T\3T\3T\5T\u0446\nT\3T\3T\5T\u044a\nT\3T\3")
buf.write("T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\5T\u045a\nT\3T\3")
buf.write("T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3")
buf.write("T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3")
buf.write("T\5T\u0482\nT\3T\3T\3T\3T\5T\u0488\nT\3T\3T\3T\3T\3T\3")
buf.write("T\3T\3T\3T\3T\3T\3T\3T\5T\u0497\nT\3T\7T\u049a\nT\fT\16")
buf.write("T\u049d\13T\3U\3U\3U\3U\3V\3V\3V\5V\u04a6\nV\3V\3V\3V")
buf.write("\3V\3V\7V\u04ad\nV\fV\16V\u04b0\13V\3V\5V\u04b3\nV\3W")
buf.write("\3W\5W\u04b7\nW\3X\3X\3X\3X\3X\3X\3X\3X\3X\3X\3X\3X\3")
buf.write("X\3X\3X\3X\5X\u04c9\nX\5X\u04cb\nX\3Y\3Y\3Y\5Y\u04d0\n")
buf.write("Y\3Y\7Y\u04d3\nY\fY\16Y\u04d6\13Y\3Y\3Y\5Y\u04da\nY\3")
buf.write("Z\3Z\3Z\3Z\3Z\3Z\3Z\5Z\u04e3\nZ\5Z\u04e5\nZ\3[\3[\5[\u04e9")
buf.write("\n[\3[\3[\3[\5[\u04ee\n[\7[\u04f0\n[\f[\16[\u04f3\13[")
buf.write("\3[\5[\u04f6\n[\3\\\3\\\5\\\u04fa\n\\\3\\\3\\\3]\3]\3")
buf.write("]\3]\7]\u0502\n]\f]\16]\u0505\13]\3]\3]\3]\3]\3]\3]\3")
buf.write("]\7]\u050e\n]\f]\16]\u0511\13]\3]\3]\7]\u0515\n]\f]\16")
buf.write("]\u0518\13]\5]\u051a\n]\3^\3^\5^\u051e\n^\3_\3_\3_\3`")
buf.write("\3`\3`\5`\u0526\n`\3a\3a\3a\5a\u052b\na\3b\3b\3b\3b\3")
buf.write("c\3c\3c\7c\u0534\nc\fc\16c\u0537\13c\3d\5d\u053a\nd\3")
buf.write("d\3d\5d\u053e\nd\3d\3d\7d\u0542\nd\fd\16d\u0545\13d\3")
buf.write("e\3e\3f\3f\3f\3f\7f\u054d\nf\ff\16f\u0550\13f\3f\3f\3")
buf.write("g\3g\3g\3g\5g\u0558\ng\5g\u055a\ng\3h\3h\3h\3h\5h\u0560")
buf.write("\nh\3i\3i\5i\u0564\ni\3i\3i\3i\2\3\u00a6j\2\4\6\b\n\f")
buf.write("\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@")
buf.write("BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086")
buf.write("\u0088\u008a\u008c\u008e\u0090\u0092\u0094\u0096\u0098")
buf.write("\u009a\u009c\u009e\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa")
buf.write("\u00ac\u00ae\u00b0\u00b2\u00b4\u00b6\u00b8\u00ba\u00bc")
buf.write("\u00be\u00c0\u00c2\u00c4\u00c6\u00c8\u00ca\u00cc\u00ce")
buf.write("\u00d0\2\16\4\2\23\23**\3\2\658\3\29:\3\2UX\3\2KL\4\2")
buf.write("YZ^^\3\2WX\4\2IJPQ\4\2OORR\4\2HH_i\3\2UV\n\2\5\5\7\7\n")
buf.write("\n\20\20\26\26\35\35\37\37\'\'\u0601\2\u00d3\3\2\2\2\4")
buf.write("\u00e6\3\2\2\2\6\u00ed\3\2\2\2\b\u0105\3\2\2\2\n\u010c")
buf.write("\3\2\2\2\f\u0116\3\2\2\2\16\u011a\3\2\2\2\20\u011c\3\2")
buf.write("\2\2\22\u012b\3\2\2\2\24\u0139\3\2\2\2\26\u0141\3\2\2")
buf.write("\2\30\u0149\3\2\2\2\32\u015b\3\2\2\2\34\u0166\3\2\2\2")
buf.write("\36\u0170\3\2\2\2 \u0177\3\2\2\2\"\u0182\3\2\2\2$\u018b")
buf.write("\3\2\2\2&\u01a0\3\2\2\2(\u01ab\3\2\2\2*\u01ad\3\2\2\2")
buf.write(",\u01bf\3\2\2\2.\u01c3\3\2\2\2\60\u01c5\3\2\2\2\62\u01c8")
buf.write("\3\2\2\2\64\u01cb\3\2\2\2\66\u01d3\3\2\2\28\u01df\3\2")
buf.write("\2\2:\u01e8\3\2\2\2<\u01ea\3\2\2\2>\u01f5\3\2\2\2@\u0203")
buf.write("\3\2\2\2B\u0227\3\2\2\2D\u0229\3\2\2\2F\u022c\3\2\2\2")
buf.write("H\u0234\3\2\2\2J\u0239\3\2\2\2L\u0243\3\2\2\2N\u0245\3")
buf.write("\2\2\2P\u0255\3\2\2\2R\u0269\3\2\2\2T\u026b\3\2\2\2V\u0273")
buf.write("\3\2\2\2X\u0286\3\2\2\2Z\u028b\3\2\2\2\\\u0294\3\2\2\2")
buf.write("^\u029b\3\2\2\2`\u02a9\3\2\2\2b\u02ab\3\2\2\2d\u02ad\3")
buf.write("\2\2\2f\u02af\3\2\2\2h\u02b9\3\2\2\2j\u02c1\3\2\2\2l\u02c8")
buf.write("\3\2\2\2n\u02ca\3\2\2\2p\u02da\3\2\2\2r\u02df\3\2\2\2")
buf.write("t\u02f0\3\2\2\2v\u0306\3\2\2\2x\u030a\3\2\2\2z\u030c\3")
buf.write("\2\2\2|\u0312\3\2\2\2~\u0314\3\2\2\2\u0080\u0317\3\2\2")
buf.write("\2\u0082\u0325\3\2\2\2\u0084\u032a\3\2\2\2\u0086\u033b")
buf.write("\3\2\2\2\u0088\u03a5\3\2\2\2\u008a\u03a7\3\2\2\2\u008c")
buf.write("\u03b4\3\2\2\2\u008e\u03bc\3\2\2\2\u0090\u03bf\3\2\2\2")
buf.write("\u0092\u03c6\3\2\2\2\u0094\u03d1\3\2\2\2\u0096\u03da\3")
buf.write("\2\2\2\u0098\u03eb\3\2\2\2\u009a\u03f9\3\2\2\2\u009c\u03fd")
buf.write("\3\2\2\2\u009e\u0402\3\2\2\2\u00a0\u040a\3\2\2\2\u00a2")
buf.write("\u040e\3\2\2\2\u00a4\u0428\3\2\2\2\u00a6\u0449\3\2\2\2")
buf.write("\u00a8\u049e\3\2\2\2\u00aa\u04b2\3\2\2\2\u00ac\u04b6\3")
buf.write("\2\2\2\u00ae\u04ca\3\2\2\2\u00b0\u04cf\3\2\2\2\u00b2\u04e4")
buf.write("\3\2\2\2\u00b4\u04f5\3\2\2\2\u00b6\u04f7\3\2\2\2\u00b8")
buf.write("\u04fd\3\2\2\2\u00ba\u051b\3\2\2\2\u00bc\u051f\3\2\2\2")
buf.write("\u00be\u0525\3\2\2\2\u00c0\u052a\3\2\2\2\u00c2\u052c\3")
buf.write("\2\2\2\u00c4\u0530\3\2\2\2\u00c6\u0539\3\2\2\2\u00c8\u0546")
buf.write("\3\2\2\2\u00ca\u0548\3\2\2\2\u00cc\u0559\3\2\2\2\u00ce")
buf.write("\u055f\3\2\2\2\u00d0\u0561\3\2\2\2\u00d2\u00d4\5\4\3\2")
buf.write("\u00d3\u00d2\3\2\2\2\u00d3\u00d4\3\2\2\2\u00d4\u00d8\3")
buf.write("\2\2\2\u00d5\u00d7\5\6\4\2\u00d6\u00d5\3\2\2\2\u00d7\u00da")
buf.write("\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9")
buf.write("\u00de\3\2\2\2\u00da\u00d8\3\2\2\2\u00db\u00dd\5\b\5\2")
buf.write("\u00dc\u00db\3\2\2\2\u00dd\u00e0\3\2\2\2\u00de\u00dc\3")
buf.write("\2\2\2\u00de\u00df\3\2\2\2\u00df\u00e1\3\2\2\2\u00e0\u00de")
buf.write("\3\2\2\2\u00e1\u00e2\7\2\2\3\u00e2\3\3\2\2\2\u00e3\u00e5")
buf.write("\5f\64\2\u00e4\u00e3\3\2\2\2\u00e5\u00e8\3\2\2\2\u00e6")
buf.write("\u00e4\3\2\2\2\u00e6\u00e7\3\2\2\2\u00e7\u00e9\3\2\2\2")
buf.write("\u00e8\u00e6\3\2\2\2\u00e9\u00ea\7\"\2\2\u00ea\u00eb\5")
buf.write("^\60\2\u00eb\u00ec\7E\2\2\u00ec\5\3\2\2\2\u00ed\u00ef")
buf.write("\7\33\2\2\u00ee\u00f0\7(\2\2\u00ef\u00ee\3\2\2\2\u00ef")
buf.write("\u00f0\3\2\2\2\u00f0\u00f1\3\2\2\2\u00f1\u00f4\5^\60\2")
buf.write("\u00f2\u00f3\7G\2\2\u00f3\u00f5\7Y\2\2\u00f4\u00f2\3\2")
buf.write("\2\2\u00f4\u00f5\3\2\2\2\u00f5\u00f6\3\2\2\2\u00f6\u00f7")
buf.write("\7E\2\2\u00f7\7\3\2\2\2\u00f8\u00fa\5\f\7\2\u00f9\u00f8")
buf.write("\3\2\2\2\u00fa\u00fd\3\2\2\2\u00fb\u00f9\3\2\2\2\u00fb")
buf.write("\u00fc\3\2\2\2\u00fc\u0102\3\2\2\2\u00fd\u00fb\3\2\2\2")
buf.write("\u00fe\u0103\5\20\t\2\u00ff\u0103\5\30\r\2\u0100\u0103")
buf.write("\5 \21\2\u0101\u0103\5p9\2\u0102\u00fe\3\2\2\2\u0102\u00ff")
buf.write("\3\2\2\2\u0102\u0100\3\2\2\2\u0102\u0101\3\2\2\2\u0103")
buf.write("\u0106\3\2\2\2\u0104\u0106\7E\2\2\u0105\u00fb\3\2\2\2")
buf.write("\u0105\u0104\3\2\2\2\u0106\t\3\2\2\2\u0107\u010d\5\f\7")
buf.write("\2\u0108\u010d\7 \2\2\u0109\u010d\7,\2\2\u010a\u010d\7")
buf.write("\60\2\2\u010b\u010d\7\63\2\2\u010c\u0107\3\2\2\2\u010c")
buf.write("\u0108\3\2\2\2\u010c\u0109\3\2\2\2\u010c\u010a\3\2\2\2")
buf.write("\u010c\u010b\3\2\2\2\u010d\13\3\2\2\2\u010e\u0117\5f\64")
buf.write("\2\u010f\u0117\7%\2\2\u0110\u0117\7$\2\2\u0111\u0117\7")
buf.write("#\2\2\u0112\u0117\7(\2\2\u0113\u0117\7\3\2\2\u0114\u0117")
buf.write("\7\24\2\2\u0115\u0117\7)\2\2\u0116\u010e\3\2\2\2\u0116")
buf.write("\u010f\3\2\2\2\u0116\u0110\3\2\2\2\u0116\u0111\3\2\2\2")
buf.write("\u0116\u0112\3\2\2\2\u0116\u0113\3\2\2\2\u0116\u0114\3")
buf.write("\2\2\2\u0116\u0115\3\2\2\2\u0117\r\3\2\2\2\u0118\u011b")
buf.write("\7\24\2\2\u0119\u011b\5f\64\2\u011a\u0118\3\2\2\2\u011a")
buf.write("\u0119\3\2\2\2\u011b\17\3\2\2\2\u011c\u011d\7\13\2\2\u011d")
buf.write("\u011f\7q\2\2\u011e\u0120\5\22\n\2\u011f\u011e\3\2\2\2")
buf.write("\u011f\u0120\3\2\2\2\u0120\u0123\3\2\2\2\u0121\u0122\7")
buf.write("\23\2\2\u0122\u0124\5\u00c6d\2\u0123\u0121\3\2\2\2\u0123")
buf.write("\u0124\3\2\2\2\u0124\u0127\3\2\2\2\u0125\u0126\7\32\2")
buf.write("\2\u0126\u0128\5\u00c4c\2\u0127\u0125\3\2\2\2\u0127\u0128")
buf.write("\3\2\2\2\u0128\u0129\3\2\2\2\u0129\u012a\5\"\22\2\u012a")
buf.write("\21\3\2\2\2\u012b\u012c\7J\2\2\u012c\u0131\5\24\13\2\u012d")
buf.write("\u012e\7F\2\2\u012e\u0130\5\24\13\2\u012f\u012d\3\2\2")
buf.write("\2\u0130\u0133\3\2\2\2\u0131\u012f\3\2\2\2\u0131\u0132")
buf.write("\3\2\2\2\u0132\u0134\3\2\2\2\u0133\u0131\3\2\2\2\u0134")
buf.write("\u0135\7I\2\2\u0135\23\3\2\2\2\u0136\u0138\5f\64\2\u0137")
buf.write("\u0136\3\2\2\2\u0138\u013b\3\2\2\2\u0139\u0137\3\2\2\2")
buf.write("\u0139\u013a\3\2\2\2\u013a\u013c\3\2\2\2\u013b\u0139\3")
buf.write("\2\2\2\u013c\u013f\7q\2\2\u013d\u013e\7\23\2\2\u013e\u0140")
buf.write("\5\26\f\2\u013f\u013d\3\2\2\2\u013f\u0140\3\2\2\2\u0140")
buf.write("\25\3\2\2\2\u0141\u0146\5\u00c6d\2\u0142\u0143\7[\2\2")
buf.write("\u0143\u0145\5\u00c6d\2\u0144\u0142\3\2\2\2\u0145\u0148")
buf.write("\3\2\2\2\u0146\u0144\3\2\2\2\u0146\u0147\3\2\2\2\u0147")
buf.write("\27\3\2\2\2\u0148\u0146\3\2\2\2\u0149\u014a\7\22\2\2\u014a")
buf.write("\u014d\7q\2\2\u014b\u014c\7\32\2\2\u014c\u014e\5\u00c4")
buf.write("c\2\u014d\u014b\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u014f")
buf.write("\3\2\2\2\u014f\u0151\7A\2\2\u0150\u0152\5\32\16\2\u0151")
buf.write("\u0150\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0154\3\2\2\2")
buf.write("\u0153\u0155\7F\2\2\u0154\u0153\3\2\2\2\u0154\u0155\3")
buf.write("\2\2\2\u0155\u0157\3\2\2\2\u0156\u0158\5\36\20\2\u0157")
buf.write("\u0156\3\2\2\2\u0157\u0158\3\2\2\2\u0158\u0159\3\2\2\2")
buf.write("\u0159\u015a\7B\2\2\u015a\31\3\2\2\2\u015b\u0160\5\34")
buf.write("\17\2\u015c\u015d\7F\2\2\u015d\u015f\5\34\17\2\u015e\u015c")
buf.write("\3\2\2\2\u015f\u0162\3\2\2\2\u0160\u015e\3\2\2\2\u0160")
buf.write("\u0161\3\2\2\2\u0161\33\3\2\2\2\u0162\u0160\3\2\2\2\u0163")
buf.write("\u0165\5f\64\2\u0164\u0163\3\2\2\2\u0165\u0168\3\2\2\2")
buf.write("\u0166\u0164\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0169\3")
buf.write("\2\2\2\u0168\u0166\3\2\2\2\u0169\u016b\7q\2\2\u016a\u016c")
buf.write("\5\u00d0i\2\u016b\u016a\3\2\2\2\u016b\u016c\3\2\2\2\u016c")
buf.write("\u016e\3\2\2\2\u016d\u016f\5\"\22\2\u016e\u016d\3\2\2")
buf.write("\2\u016e\u016f\3\2\2\2\u016f\35\3\2\2\2\u0170\u0174\7")
buf.write("E\2\2\u0171\u0173\5&\24\2\u0172\u0171\3\2\2\2\u0173\u0176")
buf.write("\3\2\2\2\u0174\u0172\3\2\2\2\u0174\u0175\3\2\2\2\u0175")
buf.write("\37\3\2\2\2\u0176\u0174\3\2\2\2\u0177\u0178\7\36\2\2\u0178")
buf.write("\u017a\7q\2\2\u0179\u017b\5\22\n\2\u017a\u0179\3\2\2\2")
buf.write("\u017a\u017b\3\2\2\2\u017b\u017e\3\2\2\2\u017c\u017d\7")
buf.write("\23\2\2\u017d\u017f\5\u00c4c\2\u017e\u017c\3\2\2\2\u017e")
buf.write("\u017f\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u0181\5$\23\2")
buf.write("\u0181!\3\2\2\2\u0182\u0186\7A\2\2\u0183\u0185\5&\24\2")
buf.write("\u0184\u0183\3\2\2\2\u0185\u0188\3\2\2\2\u0186\u0184\3")
buf.write("\2\2\2\u0186\u0187\3\2\2\2\u0187\u0189\3\2\2\2\u0188\u0186")
buf.write("\3\2\2\2\u0189\u018a\7B\2\2\u018a#\3\2\2\2\u018b\u018f")
buf.write("\7A\2\2\u018c\u018e\58\35\2\u018d\u018c\3\2\2\2\u018e")
buf.write("\u0191\3\2\2\2\u018f\u018d\3\2\2\2\u018f\u0190\3\2\2\2")
buf.write("\u0190\u0192\3\2\2\2\u0191\u018f\3\2\2\2\u0192\u0193\7")
buf.write("B\2\2\u0193%\3\2\2\2\u0194\u01a1\7E\2\2\u0195\u0197\7")
buf.write("(\2\2\u0196\u0195\3\2\2\2\u0196\u0197\3\2\2\2\u0197\u0198")
buf.write("\3\2\2\2\u0198\u01a1\5\u0080A\2\u0199\u019b\5\n\6\2\u019a")
buf.write("\u0199\3\2\2\2\u019b\u019e\3\2\2\2\u019c\u019a\3\2\2\2")
buf.write("\u019c\u019d\3\2\2\2\u019d\u019f\3\2\2\2\u019e\u019c\3")
buf.write("\2\2\2\u019f\u01a1\5(\25\2\u01a0\u0194\3\2\2\2\u01a0\u0196")
buf.write("\3\2\2\2\u01a0\u019c\3\2\2\2\u01a1\'\3\2\2\2\u01a2\u01ac")
buf.write("\5*\26\2\u01a3\u01ac\5\60\31\2\u01a4\u01ac\5\66\34\2\u01a5")
buf.write("\u01ac\5\64\33\2\u01a6\u01ac\5\62\32\2\u01a7\u01ac\5 ")
buf.write("\21\2\u01a8\u01ac\5p9\2\u01a9\u01ac\5\20\t\2\u01aa\u01ac")
buf.write("\5\30\r\2\u01ab\u01a2\3\2\2\2\u01ab\u01a3\3\2\2\2\u01ab")
buf.write("\u01a4\3\2\2\2\u01ab\u01a5\3\2\2\2\u01ab\u01a6\3\2\2\2")
buf.write("\u01ab\u01a7\3\2\2\2\u01ab\u01a8\3\2\2\2\u01ab\u01a9\3")
buf.write("\2\2\2\u01ab\u01aa\3\2\2\2\u01ac)\3\2\2\2\u01ad\u01ae")
buf.write("\5.\30\2\u01ae\u01af\7q\2\2\u01af\u01b4\5V,\2\u01b0\u01b1")
buf.write("\7C\2\2\u01b1\u01b3\7D\2\2\u01b2\u01b0\3\2\2\2\u01b3\u01b6")
buf.write("\3\2\2\2\u01b4\u01b2\3\2\2\2\u01b4\u01b5\3\2\2\2\u01b5")
buf.write("\u01b9\3\2\2\2\u01b6\u01b4\3\2\2\2\u01b7\u01b8\7/\2\2")
buf.write("\u01b8\u01ba\5T+\2\u01b9\u01b7\3\2\2\2\u01b9\u01ba\3\2")
buf.write("\2\2\u01ba\u01bb\3\2\2\2\u01bb\u01bc\5,\27\2\u01bc+\3")
buf.write("\2\2\2\u01bd\u01c0\5\u0080A\2\u01be\u01c0\7E\2\2\u01bf")
buf.write("\u01bd\3\2\2\2\u01bf\u01be\3\2\2\2\u01c0-\3\2\2\2\u01c1")
buf.write("\u01c4\5\u00c6d\2\u01c2\u01c4\7\62\2\2\u01c3\u01c1\3\2")
buf.write("\2\2\u01c3\u01c2\3\2\2\2\u01c4/\3\2\2\2\u01c5\u01c6\5")
buf.write("\22\n\2\u01c6\u01c7\5*\26\2\u01c7\61\3\2\2\2\u01c8\u01c9")
buf.write("\5\22\n\2\u01c9\u01ca\5\64\33\2\u01ca\63\3\2\2\2\u01cb")
buf.write("\u01cc\7q\2\2\u01cc\u01cf\5V,\2\u01cd\u01ce\7/\2\2\u01ce")
buf.write("\u01d0\5T+\2\u01cf\u01cd\3\2\2\2\u01cf\u01d0\3\2\2\2\u01d0")
buf.write("\u01d1\3\2\2\2\u01d1\u01d2\5\u0080A\2\u01d2\65\3\2\2\2")
buf.write("\u01d3\u01d4\5\u00c6d\2\u01d4\u01d5\5F$\2\u01d5\u01d6")
buf.write("\7E\2\2\u01d6\67\3\2\2\2\u01d7\u01d9\5\n\6\2\u01d8\u01d7")
buf.write("\3\2\2\2\u01d9\u01dc\3\2\2\2\u01da\u01d8\3\2\2\2\u01da")
buf.write("\u01db\3\2\2\2\u01db\u01dd\3\2\2\2\u01dc\u01da\3\2\2\2")
buf.write("\u01dd\u01e0\5:\36\2\u01de\u01e0\7E\2\2\u01df\u01da\3")
buf.write("\2\2\2\u01df\u01de\3\2\2\2\u01e09\3\2\2\2\u01e1\u01e9")
buf.write("\5<\37\2\u01e2\u01e9\5@!\2\u01e3\u01e9\5D#\2\u01e4\u01e9")
buf.write("\5 \21\2\u01e5\u01e9\5p9\2\u01e6\u01e9\5\20\t\2\u01e7")
buf.write("\u01e9\5\30\r\2\u01e8\u01e1\3\2\2\2\u01e8\u01e2\3\2\2")
buf.write("\2\u01e8\u01e3\3\2\2\2\u01e8\u01e4\3\2\2\2\u01e8\u01e5")
buf.write("\3\2\2\2\u01e8\u01e6\3\2\2\2\u01e8\u01e7\3\2\2\2\u01e9")
buf.write(";\3\2\2\2\u01ea\u01eb\5\u00c6d\2\u01eb\u01f0\5> \2\u01ec")
buf.write("\u01ed\7F\2\2\u01ed\u01ef\5> \2\u01ee\u01ec\3\2\2\2\u01ef")
buf.write("\u01f2\3\2\2\2\u01f0\u01ee\3\2\2\2\u01f0\u01f1\3\2\2\2")
buf.write("\u01f1\u01f3\3\2\2\2\u01f2\u01f0\3\2\2\2\u01f3\u01f4\7")
buf.write("E\2\2\u01f4=\3\2\2\2\u01f5\u01fa\7q\2\2\u01f6\u01f7\7")
buf.write("C\2\2\u01f7\u01f9\7D\2\2\u01f8\u01f6\3\2\2\2\u01f9\u01fc")
buf.write("\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb")
buf.write("\u01fd\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fd\u01fe\7H\2\2")
buf.write("\u01fe\u01ff\5L\'\2\u01ff?\3\2\2\2\u0200\u0202\5B\"\2")
buf.write("\u0201\u0200\3\2\2\2\u0202\u0205\3\2\2\2\u0203\u0201\3")
buf.write("\2\2\2\u0203\u0204\3\2\2\2\u0204\u0210\3\2\2\2\u0205\u0203")
buf.write("\3\2\2\2\u0206\u0211\5.\30\2\u0207\u020b\5\22\n\2\u0208")
buf.write("\u020a\5f\64\2\u0209\u0208\3\2\2\2\u020a\u020d\3\2\2\2")
buf.write("\u020b\u0209\3\2\2\2\u020b\u020c\3\2\2\2\u020c\u020e\3")
buf.write("\2\2\2\u020d\u020b\3\2\2\2\u020e\u020f\5.\30\2\u020f\u0211")
buf.write("\3\2\2\2\u0210\u0206\3\2\2\2\u0210\u0207\3\2\2\2\u0211")
buf.write("\u0212\3\2\2\2\u0212\u0213\7q\2\2\u0213\u0218\5V,\2\u0214")
buf.write("\u0215\7C\2\2\u0215\u0217\7D\2\2\u0216\u0214\3\2\2\2\u0217")
buf.write("\u021a\3\2\2\2\u0218\u0216\3\2\2\2\u0218\u0219\3\2\2\2")
buf.write("\u0219\u021d\3\2\2\2\u021a\u0218\3\2\2\2\u021b\u021c\7")
buf.write("/\2\2\u021c\u021e\5T+\2\u021d\u021b\3\2\2\2\u021d\u021e")
buf.write("\3\2\2\2\u021e\u021f\3\2\2\2\u021f\u0220\5,\27\2\u0220")
buf.write("A\3\2\2\2\u0221\u0228\5f\64\2\u0222\u0228\7%\2\2\u0223")
buf.write("\u0228\7\3\2\2\u0224\u0228\7\16\2\2\u0225\u0228\7(\2\2")
buf.write("\u0226\u0228\7)\2\2\u0227\u0221\3\2\2\2\u0227\u0222\3")
buf.write("\2\2\2\u0227\u0223\3\2\2\2\u0227\u0224\3\2\2\2\u0227\u0225")
buf.write("\3\2\2\2\u0227\u0226\3\2\2\2\u0228C\3\2\2\2\u0229\u022a")
buf.write("\5\22\n\2\u022a\u022b\5@!\2\u022bE\3\2\2\2\u022c\u0231")
buf.write("\5H%\2\u022d\u022e\7F\2\2\u022e\u0230\5H%\2\u022f\u022d")
buf.write("\3\2\2\2\u0230\u0233\3\2\2\2\u0231\u022f\3\2\2\2\u0231")
buf.write("\u0232\3\2\2\2\u0232G\3\2\2\2\u0233\u0231\3\2\2\2\u0234")
buf.write("\u0237\5J&\2\u0235\u0236\7H\2\2\u0236\u0238\5L\'\2\u0237")
buf.write("\u0235\3\2\2\2\u0237\u0238\3\2\2\2\u0238I\3\2\2\2\u0239")
buf.write("\u023e\7q\2\2\u023a\u023b\7C\2\2\u023b\u023d\7D\2\2\u023c")
buf.write("\u023a\3\2\2\2\u023d\u0240\3\2\2\2\u023e\u023c\3\2\2\2")
buf.write("\u023e\u023f\3\2\2\2\u023fK\3\2\2\2\u0240\u023e\3\2\2")
buf.write("\2\u0241\u0244\5N(\2\u0242\u0244\5\u00a6T\2\u0243\u0241")
buf.write("\3\2\2\2\u0243\u0242\3\2\2\2\u0244M\3\2\2\2\u0245\u0251")
buf.write("\7A\2\2\u0246\u024b\5L\'\2\u0247\u0248\7F\2\2\u0248\u024a")
buf.write("\5L\'\2\u0249\u0247\3\2\2\2\u024a\u024d\3\2\2\2\u024b")
buf.write("\u0249\3\2\2\2\u024b\u024c\3\2\2\2\u024c\u024f\3\2\2\2")
buf.write("\u024d\u024b\3\2\2\2\u024e\u0250\7F\2\2\u024f\u024e\3")
buf.write("\2\2\2\u024f\u0250\3\2\2\2\u0250\u0252\3\2\2\2\u0251\u0246")
buf.write("\3\2\2\2\u0251\u0252\3\2\2\2\u0252\u0253\3\2\2\2\u0253")
buf.write("\u0254\7B\2\2\u0254O\3\2\2\2\u0255\u0257\7q\2\2\u0256")
buf.write("\u0258\5\u00caf\2\u0257\u0256\3\2\2\2\u0257\u0258\3\2")
buf.write("\2\2\u0258\u0260\3\2\2\2\u0259\u025a\7G\2\2\u025a\u025c")
buf.write("\7q\2\2\u025b\u025d\5\u00caf\2\u025c\u025b\3\2\2\2\u025c")
buf.write("\u025d\3\2\2\2\u025d\u025f\3\2\2\2\u025e\u0259\3\2\2\2")
buf.write("\u025f\u0262\3\2\2\2\u0260\u025e\3\2\2\2\u0260\u0261\3")
buf.write("\2\2\2\u0261Q\3\2\2\2\u0262\u0260\3\2\2\2\u0263\u026a")
buf.write("\5\u00c6d\2\u0264\u0267\7M\2\2\u0265\u0266\t\2\2\2\u0266")
buf.write("\u0268\5\u00c6d\2\u0267\u0265\3\2\2\2\u0267\u0268\3\2")
buf.write("\2\2\u0268\u026a\3\2\2\2\u0269\u0263\3\2\2\2\u0269\u0264")
buf.write("\3\2\2\2\u026aS\3\2\2\2\u026b\u0270\5^\60\2\u026c\u026d")
buf.write("\7F\2\2\u026d\u026f\5^\60\2\u026e\u026c\3\2\2\2\u026f")
buf.write("\u0272\3\2\2\2\u0270\u026e\3\2\2\2\u0270\u0271\3\2\2\2")
buf.write("\u0271U\3\2\2\2\u0272\u0270\3\2\2\2\u0273\u0275\7?\2\2")
buf.write("\u0274\u0276\5X-\2\u0275\u0274\3\2\2\2\u0275\u0276\3\2")
buf.write("\2\2\u0276\u0277\3\2\2\2\u0277\u0278\7@\2\2\u0278W\3\2")
buf.write("\2\2\u0279\u027e\5Z.\2\u027a\u027b\7F\2\2\u027b\u027d")
buf.write("\5Z.\2\u027c\u027a\3\2\2\2\u027d\u0280\3\2\2\2\u027e\u027c")
buf.write("\3\2\2\2\u027e\u027f\3\2\2\2\u027f\u0283\3\2\2\2\u0280")
buf.write("\u027e\3\2\2\2\u0281\u0282\7F\2\2\u0282\u0284\5\\/\2\u0283")
buf.write("\u0281\3\2\2\2\u0283\u0284\3\2\2\2\u0284\u0287\3\2\2\2")
buf.write("\u0285\u0287\5\\/\2\u0286\u0279\3\2\2\2\u0286\u0285\3")
buf.write("\2\2\2\u0287Y\3\2\2\2\u0288\u028a\5\16\b\2\u0289\u0288")
buf.write("\3\2\2\2\u028a\u028d\3\2\2\2\u028b\u0289\3\2\2\2\u028b")
buf.write("\u028c\3\2\2\2\u028c\u028e\3\2\2\2\u028d\u028b\3\2\2\2")
buf.write("\u028e\u028f\5\u00c6d\2\u028f\u0290\5J&\2\u0290[\3\2\2")
buf.write("\2\u0291\u0293\5\16\b\2\u0292\u0291\3\2\2\2\u0293\u0296")
buf.write("\3\2\2\2\u0294\u0292\3\2\2\2\u0294\u0295\3\2\2\2\u0295")
buf.write("\u0297\3\2\2\2\u0296\u0294\3\2\2\2\u0297\u0298\5\u00c6")
buf.write("d\2\u0298\u0299\7m\2\2\u0299\u029a\5J&\2\u029a]\3\2\2")
buf.write("\2\u029b\u02a0\7q\2\2\u029c\u029d\7G\2\2\u029d\u029f\7")
buf.write("q\2\2\u029e\u029c\3\2\2\2\u029f\u02a2\3\2\2\2\u02a0\u029e")
buf.write("\3\2\2\2\u02a0\u02a1\3\2\2\2\u02a1_\3\2\2\2\u02a2\u02a0")
buf.write("\3\2\2\2\u02a3\u02aa\5b\62\2\u02a4\u02aa\5d\63\2\u02a5")
buf.write("\u02aa\7<\2\2\u02a6\u02aa\7=\2\2\u02a7\u02aa\7;\2\2\u02a8")
buf.write("\u02aa\7>\2\2\u02a9\u02a3\3\2\2\2\u02a9\u02a4\3\2\2\2")
buf.write("\u02a9\u02a5\3\2\2\2\u02a9\u02a6\3\2\2\2\u02a9\u02a7\3")
buf.write("\2\2\2\u02a9\u02a8\3\2\2\2\u02aaa\3\2\2\2\u02ab\u02ac")
buf.write("\t\3\2\2\u02acc\3\2\2\2\u02ad\u02ae\t\4\2\2\u02aee\3\2")
buf.write("\2\2\u02af\u02b0\7l\2\2\u02b0\u02b7\5^\60\2\u02b1\u02b4")
buf.write("\7?\2\2\u02b2\u02b5\5h\65\2\u02b3\u02b5\5l\67\2\u02b4")
buf.write("\u02b2\3\2\2\2\u02b4\u02b3\3\2\2\2\u02b4\u02b5\3\2\2\2")
buf.write("\u02b5\u02b6\3\2\2\2\u02b6\u02b8\7@\2\2\u02b7\u02b1\3")
buf.write("\2\2\2\u02b7\u02b8\3\2\2\2\u02b8g\3\2\2\2\u02b9\u02be")
buf.write("\5j\66\2\u02ba\u02bb\7F\2\2\u02bb\u02bd\5j\66\2\u02bc")
buf.write("\u02ba\3\2\2\2\u02bd\u02c0\3\2\2\2\u02be\u02bc\3\2\2\2")
buf.write("\u02be\u02bf\3\2\2\2\u02bfi\3\2\2\2\u02c0\u02be\3\2\2")
buf.write("\2\u02c1\u02c2\7q\2\2\u02c2\u02c3\7H\2\2\u02c3\u02c4\5")
buf.write("l\67\2\u02c4k\3\2\2\2\u02c5\u02c9\5\u00a6T\2\u02c6\u02c9")
buf.write("\5f\64\2\u02c7\u02c9\5n8\2\u02c8\u02c5\3\2\2\2\u02c8\u02c6")
buf.write("\3\2\2\2\u02c8\u02c7\3\2\2\2\u02c9m\3\2\2\2\u02ca\u02d3")
buf.write("\7A\2\2\u02cb\u02d0\5l\67\2\u02cc\u02cd\7F\2\2\u02cd\u02cf")
buf.write("\5l\67\2\u02ce\u02cc\3\2\2\2\u02cf\u02d2\3\2\2\2\u02d0")
buf.write("\u02ce\3\2\2\2\u02d0\u02d1\3\2\2\2\u02d1\u02d4\3\2\2\2")
buf.write("\u02d2\u02d0\3\2\2\2\u02d3\u02cb\3\2\2\2\u02d3\u02d4\3")
buf.write("\2\2\2\u02d4\u02d6\3\2\2\2\u02d5\u02d7\7F\2\2\u02d6\u02d5")
buf.write("\3\2\2\2\u02d6\u02d7\3\2\2\2\u02d7\u02d8\3\2\2\2\u02d8")
buf.write("\u02d9\7B\2\2\u02d9o\3\2\2\2\u02da\u02db\7l\2\2\u02db")
buf.write("\u02dc\7\36\2\2\u02dc\u02dd\7q\2\2\u02dd\u02de\5r:\2\u02de")
buf.write("q\3\2\2\2\u02df\u02e3\7A\2\2\u02e0\u02e2\5t;\2\u02e1\u02e0")
buf.write("\3\2\2\2\u02e2\u02e5\3\2\2\2\u02e3\u02e1\3\2\2\2\u02e3")
buf.write("\u02e4\3\2\2\2\u02e4\u02e6\3\2\2\2\u02e5\u02e3\3\2\2\2")
buf.write("\u02e6\u02e7\7B\2\2\u02e7s\3\2\2\2\u02e8\u02ea\5\n\6\2")
buf.write("\u02e9\u02e8\3\2\2\2\u02ea\u02ed\3\2\2\2\u02eb\u02e9\3")
buf.write("\2\2\2\u02eb\u02ec\3\2\2\2\u02ec\u02ee\3\2\2\2\u02ed\u02eb")
buf.write("\3\2\2\2\u02ee\u02f1\5v<\2\u02ef\u02f1\7E\2\2\u02f0\u02eb")
buf.write("\3\2\2\2\u02f0\u02ef\3\2\2\2\u02f1u\3\2\2\2\u02f2\u02f3")
buf.write("\5\u00c6d\2\u02f3\u02f4\5x=\2\u02f4\u02f5\7E\2\2\u02f5")
buf.write("\u0307\3\2\2\2\u02f6\u02f8\5\20\t\2\u02f7\u02f9\7E\2\2")
buf.write("\u02f8\u02f7\3\2\2\2\u02f8\u02f9\3\2\2\2\u02f9\u0307\3")
buf.write("\2\2\2\u02fa\u02fc\5 \21\2\u02fb\u02fd\7E\2\2\u02fc\u02fb")
buf.write("\3\2\2\2\u02fc\u02fd\3\2\2\2\u02fd\u0307\3\2\2\2\u02fe")
buf.write("\u0300\5\30\r\2\u02ff\u0301\7E\2\2\u0300\u02ff\3\2\2\2")
buf.write("\u0300\u0301\3\2\2\2\u0301\u0307\3\2\2\2\u0302\u0304\5")
buf.write("p9\2\u0303\u0305\7E\2\2\u0304\u0303\3\2\2\2\u0304\u0305")
buf.write("\3\2\2\2\u0305\u0307\3\2\2\2\u0306\u02f2\3\2\2\2\u0306")
buf.write("\u02f6\3\2\2\2\u0306\u02fa\3\2\2\2\u0306\u02fe\3\2\2\2")
buf.write("\u0306\u0302\3\2\2\2\u0307w\3\2\2\2\u0308\u030b\5z>\2")
buf.write("\u0309\u030b\5|?\2\u030a\u0308\3\2\2\2\u030a\u0309\3\2")
buf.write("\2\2\u030by\3\2\2\2\u030c\u030d\7q\2\2\u030d\u030e\7?")
buf.write("\2\2\u030e\u0310\7@\2\2\u030f\u0311\5~@\2\u0310\u030f")
buf.write("\3\2\2\2\u0310\u0311\3\2\2\2\u0311{\3\2\2\2\u0312\u0313")
buf.write("\5F$\2\u0313}\3\2\2\2\u0314\u0315\7\16\2\2\u0315\u0316")
buf.write("\5l\67\2\u0316\177\3\2\2\2\u0317\u031b\7A\2\2\u0318\u031a")
buf.write("\5\u0082B\2\u0319\u0318\3\2\2\2\u031a\u031d\3\2\2\2\u031b")
buf.write("\u0319\3\2\2\2\u031b\u031c\3\2\2\2\u031c\u031e\3\2\2\2")
buf.write("\u031d\u031b\3\2\2\2\u031e\u031f\7B\2\2\u031f\u0081\3")
buf.write("\2\2\2\u0320\u0321\5\u0084C\2\u0321\u0322\7E\2\2\u0322")
buf.write("\u0326\3\2\2\2\u0323\u0326\5\u0088E\2\u0324\u0326\5\u0086")
buf.write("D\2\u0325\u0320\3\2\2\2\u0325\u0323\3\2\2\2\u0325\u0324")
buf.write("\3\2\2\2\u0326\u0083\3\2\2\2\u0327\u0329\5\16\b\2\u0328")
buf.write("\u0327\3\2\2\2\u0329\u032c\3\2\2\2\u032a\u0328\3\2\2\2")
buf.write("\u032a\u032b\3\2\2\2\u032b\u032d\3\2\2\2\u032c\u032a\3")
buf.write("\2\2\2\u032d\u032e\5\u00c6d\2\u032e\u032f\5F$\2\u032f")
buf.write("\u0085\3\2\2\2\u0330\u0332\5\f\7\2\u0331\u0330\3\2\2\2")
buf.write("\u0332\u0335\3\2\2\2\u0333\u0331\3\2\2\2\u0333\u0334\3")
buf.write("\2\2\2\u0334\u0338\3\2\2\2\u0335\u0333\3\2\2\2\u0336\u0339")
buf.write("\5\20\t\2\u0337\u0339\5 \21\2\u0338\u0336\3\2\2\2\u0338")
buf.write("\u0337\3\2\2\2\u0339\u033c\3\2\2\2\u033a\u033c\7E\2\2")
buf.write("\u033b\u0333\3\2\2\2\u033b\u033a\3\2\2\2\u033c\u0087\3")
buf.write("\2\2\2\u033d\u03a6\5\u0080A\2\u033e\u033f\7\4\2\2\u033f")
buf.write("\u0342\5\u00a6T\2\u0340\u0341\7N\2\2\u0341\u0343\5\u00a6")
buf.write("T\2\u0342\u0340\3\2\2\2\u0342\u0343\3\2\2\2\u0343\u0344")
buf.write("\3\2\2\2\u0344\u0345\7E\2\2\u0345\u03a6\3\2\2\2\u0346")
buf.write("\u0347\7\30\2\2\u0347\u0348\5\u00a0Q\2\u0348\u034b\5\u0088")
buf.write("E\2\u0349\u034a\7\21\2\2\u034a\u034c\5\u0088E\2\u034b")
buf.write("\u0349\3\2\2\2\u034b\u034c\3\2\2\2\u034c\u03a6\3\2\2\2")
buf.write("\u034d\u034e\7\27\2\2\u034e\u034f\7?\2\2\u034f\u0350\5")
buf.write("\u009aN\2\u0350\u0351\7@\2\2\u0351\u0352\5\u0088E\2\u0352")
buf.write("\u03a6\3\2\2\2\u0353\u0354\7\64\2\2\u0354\u0355\5\u00a0")
buf.write("Q\2\u0355\u0356\5\u0088E\2\u0356\u03a6\3\2\2\2\u0357\u0358")
buf.write("\7\17\2\2\u0358\u0359\5\u0088E\2\u0359\u035a\7\64\2\2")
buf.write("\u035a\u035b\5\u00a0Q\2\u035b\u035c\7E\2\2\u035c\u03a6")
buf.write("\3\2\2\2\u035d\u035e\7\61\2\2\u035e\u0368\5\u0080A\2\u035f")
buf.write("\u0361\5\u008aF\2\u0360\u035f\3\2\2\2\u0361\u0362\3\2")
buf.write("\2\2\u0362\u0360\3\2\2\2\u0362\u0363\3\2\2\2\u0363\u0365")
buf.write("\3\2\2\2\u0364\u0366\5\u008eH\2\u0365\u0364\3\2\2\2\u0365")
buf.write("\u0366\3\2\2\2\u0366\u0369\3\2\2\2\u0367\u0369\5\u008e")
buf.write("H\2\u0368\u0360\3\2\2\2\u0368\u0367\3\2\2\2\u0369\u03a6")
buf.write("\3\2\2\2\u036a\u036b\7\61\2\2\u036b\u036c\5\u0090I\2\u036c")
buf.write("\u0370\5\u0080A\2\u036d\u036f\5\u008aF\2\u036e\u036d\3")
buf.write("\2\2\2\u036f\u0372\3\2\2\2\u0370\u036e\3\2\2\2\u0370\u0371")
buf.write("\3\2\2\2\u0371\u0374\3\2\2\2\u0372\u0370\3\2\2\2\u0373")
buf.write("\u0375\5\u008eH\2\u0374\u0373\3\2\2\2\u0374\u0375\3\2")
buf.write("\2\2\u0375\u03a6\3\2\2\2\u0376\u0377\7+\2\2\u0377\u0378")
buf.write("\5\u00a0Q\2\u0378\u037c\7A\2\2\u0379\u037b\5\u0096L\2")
buf.write("\u037a\u0379\3\2\2\2\u037b\u037e\3\2\2\2\u037c\u037a\3")
buf.write("\2\2\2\u037c\u037d\3\2\2\2\u037d\u0382\3\2\2\2\u037e\u037c")
buf.write("\3\2\2\2\u037f\u0381\5\u0098M\2\u0380\u037f\3\2\2\2\u0381")
buf.write("\u0384\3\2\2\2\u0382\u0380\3\2\2\2\u0382\u0383\3\2\2\2")
buf.write("\u0383\u0385\3\2\2\2\u0384\u0382\3\2\2\2\u0385\u0386\7")
buf.write("B\2\2\u0386\u03a6\3\2\2\2\u0387\u0388\7,\2\2\u0388\u0389")
buf.write("\5\u00a0Q\2\u0389\u038a\5\u0080A\2\u038a\u03a6\3\2\2\2")
buf.write("\u038b\u038d\7&\2\2\u038c\u038e\5\u00a6T\2\u038d\u038c")
buf.write("\3\2\2\2\u038d\u038e\3\2\2\2\u038e\u038f\3\2\2\2\u038f")
buf.write("\u03a6\7E\2\2\u0390\u0391\7.\2\2\u0391\u0392\5\u00a6T")
buf.write("\2\u0392\u0393\7E\2\2\u0393\u03a6\3\2\2\2\u0394\u0396")
buf.write("\7\6\2\2\u0395\u0397\7q\2\2\u0396\u0395\3\2\2\2\u0396")
buf.write("\u0397\3\2\2\2\u0397\u0398\3\2\2\2\u0398\u03a6\7E\2\2")
buf.write("\u0399\u039b\7\r\2\2\u039a\u039c\7q\2\2\u039b\u039a\3")
buf.write("\2\2\2\u039b\u039c\3\2\2\2\u039c\u039d\3\2\2\2\u039d\u03a6")
buf.write("\7E\2\2\u039e\u03a6\7E\2\2\u039f\u03a0\5\u00a6T\2\u03a0")
buf.write("\u03a1\7E\2\2\u03a1\u03a6\3\2\2\2\u03a2\u03a3\7q\2\2\u03a3")
buf.write("\u03a4\7N\2\2\u03a4\u03a6\5\u0088E\2\u03a5\u033d\3\2\2")
buf.write("\2\u03a5\u033e\3\2\2\2\u03a5\u0346\3\2\2\2\u03a5\u034d")
buf.write("\3\2\2\2\u03a5\u0353\3\2\2\2\u03a5\u0357\3\2\2\2\u03a5")
buf.write("\u035d\3\2\2\2\u03a5\u036a\3\2\2\2\u03a5\u0376\3\2\2\2")
buf.write("\u03a5\u0387\3\2\2\2\u03a5\u038b\3\2\2\2\u03a5\u0390\3")
buf.write("\2\2\2\u03a5\u0394\3\2\2\2\u03a5\u0399\3\2\2\2\u03a5\u039e")
buf.write("\3\2\2\2\u03a5\u039f\3\2\2\2\u03a5\u03a2\3\2\2\2\u03a6")
buf.write("\u0089\3\2\2\2\u03a7\u03a8\7\t\2\2\u03a8\u03ac\7?\2\2")
buf.write("\u03a9\u03ab\5\16\b\2\u03aa\u03a9\3\2\2\2\u03ab\u03ae")
buf.write("\3\2\2\2\u03ac\u03aa\3\2\2\2\u03ac\u03ad\3\2\2\2\u03ad")
buf.write("\u03af\3\2\2\2\u03ae\u03ac\3\2\2\2\u03af\u03b0\5\u008c")
buf.write("G\2\u03b0\u03b1\7q\2\2\u03b1\u03b2\7@\2\2\u03b2\u03b3")
buf.write("\5\u0080A\2\u03b3\u008b\3\2\2\2\u03b4\u03b9\5^\60\2\u03b5")
buf.write("\u03b6\7\\\2\2\u03b6\u03b8\5^\60\2\u03b7\u03b5\3\2\2\2")
buf.write("\u03b8\u03bb\3\2\2\2\u03b9\u03b7\3\2\2\2\u03b9\u03ba\3")
buf.write("\2\2\2\u03ba\u008d\3\2\2\2\u03bb\u03b9\3\2\2\2\u03bc\u03bd")
buf.write("\7\25\2\2\u03bd\u03be\5\u0080A\2\u03be\u008f\3\2\2\2\u03bf")
buf.write("\u03c0\7?\2\2\u03c0\u03c2\5\u0092J\2\u03c1\u03c3\7E\2")
buf.write("\2\u03c2\u03c1\3\2\2\2\u03c2\u03c3\3\2\2\2\u03c3\u03c4")
buf.write("\3\2\2\2\u03c4\u03c5\7@\2\2\u03c5\u0091\3\2\2\2\u03c6")
buf.write("\u03cb\5\u0094K\2\u03c7\u03c8\7E\2\2\u03c8\u03ca\5\u0094")
buf.write("K\2\u03c9\u03c7\3\2\2\2\u03ca\u03cd\3\2\2\2\u03cb\u03c9")
buf.write("\3\2\2\2\u03cb\u03cc\3\2\2\2\u03cc\u0093\3\2\2\2\u03cd")
buf.write("\u03cb\3\2\2\2\u03ce\u03d0\5\16\b\2\u03cf\u03ce\3\2\2")
buf.write("\2\u03d0\u03d3\3\2\2\2\u03d1\u03cf\3\2\2\2\u03d1\u03d2")
buf.write("\3\2\2\2\u03d2\u03d4\3\2\2\2\u03d3\u03d1\3\2\2\2\u03d4")
buf.write("\u03d5\5P)\2\u03d5\u03d6\5J&\2\u03d6\u03d7\7H\2\2\u03d7")
buf.write("\u03d8\5\u00a6T\2\u03d8\u0095\3\2\2\2\u03d9\u03db\5\u0098")
buf.write("M\2\u03da\u03d9\3\2\2\2\u03db\u03dc\3\2\2\2\u03dc\u03da")
buf.write("\3\2\2\2\u03dc\u03dd\3\2\2\2\u03dd\u03df\3\2\2\2\u03de")
buf.write("\u03e0\5\u0082B\2\u03df\u03de\3\2\2\2\u03e0\u03e1\3\2")
buf.write("\2\2\u03e1\u03df\3\2\2\2\u03e1\u03e2\3\2\2\2\u03e2\u0097")
buf.write("\3\2\2\2\u03e3\u03e6\7\b\2\2\u03e4\u03e7\5\u00a6T\2\u03e5")
buf.write("\u03e7\7q\2\2\u03e6\u03e4\3\2\2\2\u03e6\u03e5\3\2\2\2")
buf.write("\u03e7\u03e8\3\2\2\2\u03e8\u03ec\7N\2\2\u03e9\u03ea\7")
buf.write("\16\2\2\u03ea\u03ec\7N\2\2\u03eb\u03e3\3\2\2\2\u03eb\u03e9")
buf.write("\3\2\2\2\u03ec\u0099\3\2\2\2\u03ed\u03fa\5\u009eP\2\u03ee")
buf.write("\u03f0\5\u009cO\2\u03ef\u03ee\3\2\2\2\u03ef\u03f0\3\2")
buf.write("\2\2\u03f0\u03f1\3\2\2\2\u03f1\u03f3\7E\2\2\u03f2\u03f4")
buf.write("\5\u00a6T\2\u03f3\u03f2\3\2\2\2\u03f3\u03f4\3\2\2\2\u03f4")
buf.write("\u03f5\3\2\2\2\u03f5\u03f7\7E\2\2\u03f6\u03f8\5\u00a2")
buf.write("R\2\u03f7\u03f6\3\2\2\2\u03f7\u03f8\3\2\2\2\u03f8\u03fa")
buf.write("\3\2\2\2\u03f9\u03ed\3\2\2\2\u03f9\u03ef\3\2\2\2\u03fa")
buf.write("\u009b\3\2\2\2\u03fb\u03fe\5\u0084C\2\u03fc\u03fe\5\u00a2")
buf.write("R\2\u03fd\u03fb\3\2\2\2\u03fd\u03fc\3\2\2\2\u03fe\u009d")
buf.write("\3\2\2\2\u03ff\u0401\5\16\b\2\u0400\u03ff\3\2\2\2\u0401")
buf.write("\u0404\3\2\2\2\u0402\u0400\3\2\2\2\u0402\u0403\3\2\2\2")
buf.write("\u0403\u0405\3\2\2\2\u0404\u0402\3\2\2\2\u0405\u0406\5")
buf.write("\u00c6d\2\u0406\u0407\5J&\2\u0407\u0408\7N\2\2\u0408\u0409")
buf.write("\5\u00a6T\2\u0409\u009f\3\2\2\2\u040a\u040b\7?\2\2\u040b")
buf.write("\u040c\5\u00a6T\2\u040c\u040d\7@\2\2\u040d\u00a1\3\2\2")
buf.write("\2\u040e\u0413\5\u00a6T\2\u040f\u0410\7F\2\2\u0410\u0412")
buf.write("\5\u00a6T\2\u0411\u040f\3\2\2\2\u0412\u0415\3\2\2\2\u0413")
buf.write("\u0411\3\2\2\2\u0413\u0414\3\2\2\2\u0414\u00a3\3\2\2\2")
buf.write("\u0415\u0413\3\2\2\2\u0416\u0417\7q\2\2\u0417\u0419\7")
buf.write("?\2\2\u0418\u041a\5\u00a2R\2\u0419\u0418\3\2\2\2\u0419")
buf.write("\u041a\3\2\2\2\u041a\u041b\3\2\2\2\u041b\u0429\7@\2\2")
buf.write("\u041c\u041d\7-\2\2\u041d\u041f\7?\2\2\u041e\u0420\5\u00a2")
buf.write("R\2\u041f\u041e\3\2\2\2\u041f\u0420\3\2\2\2\u0420\u0421")
buf.write("\3\2\2\2\u0421\u0429\7@\2\2\u0422\u0423\7*\2\2\u0423\u0425")
buf.write("\7?\2\2\u0424\u0426\5\u00a2R\2\u0425\u0424\3\2\2\2\u0425")
buf.write("\u0426\3\2\2\2\u0426\u0427\3\2\2\2\u0427\u0429\7@\2\2")
buf.write("\u0428\u0416\3\2\2\2\u0428\u041c\3\2\2\2\u0428\u0422\3")
buf.write("\2\2\2\u0429\u00a5\3\2\2\2\u042a\u042b\bT\1\2\u042b\u044a")
buf.write("\5\u00aeX\2\u042c\u044a\5\u00a4S\2\u042d\u042e\7!\2\2")
buf.write("\u042e\u044a\5\u00b2Z\2\u042f\u0430\7?\2\2\u0430\u0431")
buf.write("\5\u00c6d\2\u0431\u0432\7@\2\2\u0432\u0433\5\u00a6T\27")
buf.write("\u0433\u044a\3\2\2\2\u0434\u0435\t\5\2\2\u0435\u044a\5")
buf.write("\u00a6T\25\u0436\u0437\t\6\2\2\u0437\u044a\5\u00a6T\24")
buf.write("\u0438\u044a\5\u00a8U\2\u0439\u043a\5\u00c6d\2\u043a\u0440")
buf.write("\7k\2\2\u043b\u043d\5\u00caf\2\u043c\u043b\3\2\2\2\u043c")
buf.write("\u043d\3\2\2\2\u043d\u043e\3\2\2\2\u043e\u0441\7q\2\2")
buf.write("\u043f\u0441\7!\2\2\u0440\u043c\3\2\2\2\u0440\u043f\3")
buf.write("\2\2\2\u0441\u044a\3\2\2\2\u0442\u0443\5\u00b0Y\2\u0443")
buf.write("\u0445\7k\2\2\u0444\u0446\5\u00caf\2\u0445\u0444\3\2\2")
buf.write("\2\u0445\u0446\3\2\2\2\u0446\u0447\3\2\2\2\u0447\u0448")
buf.write("\7!\2\2\u0448\u044a\3\2\2\2\u0449\u042a\3\2\2\2\u0449")
buf.write("\u042c\3\2\2\2\u0449\u042d\3\2\2\2\u0449\u042f\3\2\2\2")
buf.write("\u0449\u0434\3\2\2\2\u0449\u0436\3\2\2\2\u0449\u0438\3")
buf.write("\2\2\2\u0449\u0439\3\2\2\2\u0449\u0442\3\2\2\2\u044a\u049b")
buf.write("\3\2\2\2\u044b\u044c\f\23\2\2\u044c\u044d\t\7\2\2\u044d")
buf.write("\u049a\5\u00a6T\24\u044e\u044f\f\22\2\2\u044f\u0450\t")
buf.write("\b\2\2\u0450\u049a\5\u00a6T\23\u0451\u0459\f\21\2\2\u0452")
buf.write("\u0453\7J\2\2\u0453\u045a\7J\2\2\u0454\u0455\7I\2\2\u0455")
buf.write("\u0456\7I\2\2\u0456\u045a\7I\2\2\u0457\u0458\7I\2\2\u0458")
buf.write("\u045a\7I\2\2\u0459\u0452\3\2\2\2\u0459\u0454\3\2\2\2")
buf.write("\u0459\u0457\3\2\2\2\u045a\u045b\3\2\2\2\u045b\u049a\5")
buf.write("\u00a6T\22\u045c\u045d\f\20\2\2\u045d\u045e\t\t\2\2\u045e")
buf.write("\u049a\5\u00a6T\21\u045f\u0460\f\16\2\2\u0460\u0461\t")
buf.write("\n\2\2\u0461\u049a\5\u00a6T\17\u0462\u0463\f\r\2\2\u0463")
buf.write("\u0464\7[\2\2\u0464\u049a\5\u00a6T\16\u0465\u0466\f\f")
buf.write("\2\2\u0466\u0467\7]\2\2\u0467\u049a\5\u00a6T\r\u0468\u0469")
buf.write("\f\13\2\2\u0469\u046a\7\\\2\2\u046a\u049a\5\u00a6T\f\u046b")
buf.write("\u046c\f\n\2\2\u046c\u046d\7S\2\2\u046d\u049a\5\u00a6")
buf.write("T\13\u046e\u046f\f\t\2\2\u046f\u0470\7T\2\2\u0470\u049a")
buf.write("\5\u00a6T\n\u0471\u0472\f\b\2\2\u0472\u0473\7M\2\2\u0473")
buf.write("\u0474\5\u00a6T\2\u0474\u0475\7N\2\2\u0475\u0476\5\u00a6")
buf.write("T\t\u0476\u049a\3\2\2\2\u0477\u0478\f\7\2\2\u0478\u0479")
buf.write("\t\13\2\2\u0479\u049a\5\u00a6T\7\u047a\u047b\f\33\2\2")
buf.write("\u047b\u0487\7G\2\2\u047c\u0488\7q\2\2\u047d\u0488\5\u00a4")
buf.write("S\2\u047e\u0488\7-\2\2\u047f\u0481\7!\2\2\u0480\u0482")
buf.write("\5\u00c2b\2\u0481\u0480\3\2\2\2\u0481\u0482\3\2\2\2\u0482")
buf.write("\u0483\3\2\2\2\u0483\u0488\5\u00b6\\\2\u0484\u0485\7*")
buf.write("\2\2\u0485\u0488\5\u00ccg\2\u0486\u0488\5\u00bc_\2\u0487")
buf.write("\u047c\3\2\2\2\u0487\u047d\3\2\2\2\u0487\u047e\3\2\2\2")
buf.write("\u0487\u047f\3\2\2\2\u0487\u0484\3\2\2\2\u0487\u0486\3")
buf.write("\2\2\2\u0488\u049a\3\2\2\2\u0489\u048a\f\32\2\2\u048a")
buf.write("\u048b\7C\2\2\u048b\u048c\5\u00a6T\2\u048c\u048d\7D\2")
buf.write("\2\u048d\u049a\3\2\2\2\u048e\u048f\f\26\2\2\u048f\u049a")
buf.write("\t\f\2\2\u0490\u0491\f\17\2\2\u0491\u0492\7\34\2\2\u0492")
buf.write("\u049a\5\u00c6d\2\u0493\u0494\f\5\2\2\u0494\u0496\7k\2")
buf.write("\2\u0495\u0497\5\u00caf\2\u0496\u0495\3\2\2\2\u0496\u0497")
buf.write("\3\2\2\2\u0497\u0498\3\2\2\2\u0498\u049a\7q\2\2\u0499")
buf.write("\u044b\3\2\2\2\u0499\u044e\3\2\2\2\u0499\u0451\3\2\2\2")
buf.write("\u0499\u045c\3\2\2\2\u0499\u045f\3\2\2\2\u0499\u0462\3")
buf.write("\2\2\2\u0499\u0465\3\2\2\2\u0499\u0468\3\2\2\2\u0499\u046b")
buf.write("\3\2\2\2\u0499\u046e\3\2\2\2\u0499\u0471\3\2\2\2\u0499")
buf.write("\u0477\3\2\2\2\u0499\u047a\3\2\2\2\u0499\u0489\3\2\2\2")
buf.write("\u0499\u048e\3\2\2\2\u0499\u0490\3\2\2\2\u0499\u0493\3")
buf.write("\2\2\2\u049a\u049d\3\2\2\2\u049b\u0499\3\2\2\2\u049b\u049c")
buf.write("\3\2\2\2\u049c\u00a7\3\2\2\2\u049d\u049b\3\2\2\2\u049e")
buf.write("\u049f\5\u00aaV\2\u049f\u04a0\7j\2\2\u04a0\u04a1\5\u00ac")
buf.write("W\2\u04a1\u00a9\3\2\2\2\u04a2\u04b3\7q\2\2\u04a3\u04a5")
buf.write("\7?\2\2\u04a4\u04a6\5X-\2\u04a5\u04a4\3\2\2\2\u04a5\u04a6")
buf.write("\3\2\2\2\u04a6\u04a7\3\2\2\2\u04a7\u04b3\7@\2\2\u04a8")
buf.write("\u04a9\7?\2\2\u04a9\u04ae\7q\2\2\u04aa\u04ab\7F\2\2\u04ab")
buf.write("\u04ad\7q\2\2\u04ac\u04aa\3\2\2\2\u04ad\u04b0\3\2\2\2")
buf.write("\u04ae\u04ac\3\2\2\2\u04ae\u04af\3\2\2\2\u04af\u04b1\3")
buf.write("\2\2\2\u04b0\u04ae\3\2\2\2\u04b1\u04b3\7@\2\2\u04b2\u04a2")
buf.write("\3\2\2\2\u04b2\u04a3\3\2\2\2\u04b2\u04a8\3\2\2\2\u04b3")
buf.write("\u00ab\3\2\2\2\u04b4\u04b7\5\u00a6T\2\u04b5\u04b7\5\u0080")
buf.write("A\2\u04b6\u04b4\3\2\2\2\u04b6\u04b5\3\2\2\2\u04b7\u00ad")
buf.write("\3\2\2\2\u04b8\u04b9\7?\2\2\u04b9\u04ba\5\u00a6T\2\u04ba")
buf.write("\u04bb\7@\2\2\u04bb\u04cb\3\2\2\2\u04bc\u04cb\7-\2\2\u04bd")
buf.write("\u04cb\7*\2\2\u04be\u04cb\5`\61\2\u04bf\u04cb\7q\2\2\u04c0")
buf.write("\u04c1\5.\30\2\u04c1\u04c2\7G\2\2\u04c2\u04c3\7\13\2\2")
buf.write("\u04c3\u04cb\3\2\2\2\u04c4\u04c8\5\u00c2b\2\u04c5\u04c9")
buf.write("\5\u00ceh\2\u04c6\u04c7\7-\2\2\u04c7\u04c9\5\u00d0i\2")
buf.write("\u04c8\u04c5\3\2\2\2\u04c8\u04c6\3\2\2\2\u04c9\u04cb\3")
buf.write("\2\2\2\u04ca\u04b8\3\2\2\2\u04ca\u04bc\3\2\2\2\u04ca\u04bd")
buf.write("\3\2\2\2\u04ca\u04be\3\2\2\2\u04ca\u04bf\3\2\2\2\u04ca")
buf.write("\u04c0\3\2\2\2\u04ca\u04c4\3\2\2\2\u04cb\u00af\3\2\2\2")
buf.write("\u04cc\u04cd\5P)\2\u04cd\u04ce\7G\2\2\u04ce\u04d0\3\2")
buf.write("\2\2\u04cf\u04cc\3\2\2\2\u04cf\u04d0\3\2\2\2\u04d0\u04d4")
buf.write("\3\2\2\2\u04d1\u04d3\5f\64\2\u04d2\u04d1\3\2\2\2\u04d3")
buf.write("\u04d6\3\2\2\2\u04d4\u04d2\3\2\2\2\u04d4\u04d5\3\2\2\2")
buf.write("\u04d5\u04d7\3\2\2\2\u04d6\u04d4\3\2\2\2\u04d7\u04d9\7")
buf.write("q\2\2\u04d8\u04da\5\u00caf\2\u04d9\u04d8\3\2\2\2\u04d9")
buf.write("\u04da\3\2\2\2\u04da\u00b1\3\2\2\2\u04db\u04dc\5\u00c2")
buf.write("b\2\u04dc\u04dd\5\u00b4[\2\u04dd\u04de\5\u00ba^\2\u04de")
buf.write("\u04e5\3\2\2\2\u04df\u04e2\5\u00b4[\2\u04e0\u04e3\5\u00b8")
buf.write("]\2\u04e1\u04e3\5\u00ba^\2\u04e2\u04e0\3\2\2\2\u04e2\u04e1")
buf.write("\3\2\2\2\u04e3\u04e5\3\2\2\2\u04e4\u04db\3\2\2\2\u04e4")
buf.write("\u04df\3\2\2\2\u04e5\u00b3\3\2\2\2\u04e6\u04e8\7q\2\2")
buf.write("\u04e7\u04e9\5\u00be`\2\u04e8\u04e7\3\2\2\2\u04e8\u04e9")
buf.write("\3\2\2\2\u04e9\u04f1\3\2\2\2\u04ea\u04eb\7G\2\2\u04eb")
buf.write("\u04ed\7q\2\2\u04ec\u04ee\5\u00be`\2\u04ed\u04ec\3\2\2")
buf.write("\2\u04ed\u04ee\3\2\2\2\u04ee\u04f0\3\2\2\2\u04ef\u04ea")
buf.write("\3\2\2\2\u04f0\u04f3\3\2\2\2\u04f1\u04ef\3\2\2\2\u04f1")
buf.write("\u04f2\3\2\2\2\u04f2\u04f6\3\2\2\2\u04f3\u04f1\3\2\2\2")
buf.write("\u04f4\u04f6\5\u00c8e\2\u04f5\u04e6\3\2\2\2\u04f5\u04f4")
buf.write("\3\2\2\2\u04f6\u00b5\3\2\2\2\u04f7\u04f9\7q\2\2\u04f8")
buf.write("\u04fa\5\u00c0a\2\u04f9\u04f8\3\2\2\2\u04f9\u04fa\3\2")
buf.write("\2\2\u04fa\u04fb\3\2\2\2\u04fb\u04fc\5\u00ba^\2\u04fc")
buf.write("\u00b7\3\2\2\2\u04fd\u0519\7C\2\2\u04fe\u0503\7D\2\2\u04ff")
buf.write("\u0500\7C\2\2\u0500\u0502\7D\2\2\u0501\u04ff\3\2\2\2\u0502")
buf.write("\u0505\3\2\2\2\u0503\u0501\3\2\2\2\u0503\u0504\3\2\2\2")
buf.write("\u0504\u0506\3\2\2\2\u0505\u0503\3\2\2\2\u0506\u051a\5")
buf.write("N(\2\u0507\u0508\5\u00a6T\2\u0508\u050f\7D\2\2\u0509\u050a")
buf.write("\7C\2\2\u050a\u050b\5\u00a6T\2\u050b\u050c\7D\2\2\u050c")
buf.write("\u050e\3\2\2\2\u050d\u0509\3\2\2\2\u050e\u0511\3\2\2\2")
buf.write("\u050f\u050d\3\2\2\2\u050f\u0510\3\2\2\2\u0510\u0516\3")
buf.write("\2\2\2\u0511\u050f\3\2\2\2\u0512\u0513\7C\2\2\u0513\u0515")
buf.write("\7D\2\2\u0514\u0512\3\2\2\2\u0515\u0518\3\2\2\2\u0516")
buf.write("\u0514\3\2\2\2\u0516\u0517\3\2\2\2\u0517\u051a\3\2\2\2")
buf.write("\u0518\u0516\3\2\2\2\u0519\u04fe\3\2\2\2\u0519\u0507\3")
buf.write("\2\2\2\u051a\u00b9\3\2\2\2\u051b\u051d\5\u00d0i\2\u051c")
buf.write("\u051e\5\"\22\2\u051d\u051c\3\2\2\2\u051d\u051e\3\2\2")
buf.write("\2\u051e\u00bb\3\2\2\2\u051f\u0520\5\u00c2b\2\u0520\u0521")
buf.write("\5\u00ceh\2\u0521\u00bd\3\2\2\2\u0522\u0523\7J\2\2\u0523")
buf.write("\u0526\7I\2\2\u0524\u0526\5\u00caf\2\u0525\u0522\3\2\2")
buf.write("\2\u0525\u0524\3\2\2\2\u0526\u00bf\3\2\2\2\u0527\u0528")
buf.write("\7J\2\2\u0528\u052b\7I\2\2\u0529\u052b\5\u00c2b\2\u052a")
buf.write("\u0527\3\2\2\2\u052a\u0529\3\2\2\2\u052b\u00c1\3\2\2\2")
buf.write("\u052c\u052d\7J\2\2\u052d\u052e\5\u00c4c\2\u052e\u052f")
buf.write("\7I\2\2\u052f\u00c3\3\2\2\2\u0530\u0535\5\u00c6d\2\u0531")
buf.write("\u0532\7F\2\2\u0532\u0534\5\u00c6d\2\u0533\u0531\3\2\2")
buf.write("\2\u0534\u0537\3\2\2\2\u0535\u0533\3\2\2\2\u0535\u0536")
buf.write("\3\2\2\2\u0536\u00c5\3\2\2\2\u0537\u0535\3\2\2\2\u0538")
buf.write("\u053a\5f\64\2\u0539\u0538\3\2\2\2\u0539\u053a\3\2\2\2")
buf.write("\u053a\u053d\3\2\2\2\u053b\u053e\5P)\2\u053c\u053e\5\u00c8")
buf.write("e\2\u053d\u053b\3\2\2\2\u053d\u053c\3\2\2\2\u053e\u0543")
buf.write("\3\2\2\2\u053f\u0540\7C\2\2\u0540\u0542\7D\2\2\u0541\u053f")
buf.write("\3\2\2\2\u0542\u0545\3\2\2\2\u0543\u0541\3\2\2\2\u0543")
buf.write("\u0544\3\2\2\2\u0544\u00c7\3\2\2\2\u0545\u0543\3\2\2\2")
buf.write("\u0546\u0547\t\r\2\2\u0547\u00c9\3\2\2\2\u0548\u0549\7")
buf.write("J\2\2\u0549\u054e\5R*\2\u054a\u054b\7F\2\2\u054b\u054d")
buf.write("\5R*\2\u054c\u054a\3\2\2\2\u054d\u0550\3\2\2\2\u054e\u054c")
buf.write("\3\2\2\2\u054e\u054f\3\2\2\2\u054f\u0551\3\2\2\2\u0550")
buf.write("\u054e\3\2\2\2\u0551\u0552\7I\2\2\u0552\u00cb\3\2\2\2")
buf.write("\u0553\u055a\5\u00d0i\2\u0554\u0555\7G\2\2\u0555\u0557")
buf.write("\7q\2\2\u0556\u0558\5\u00d0i\2\u0557\u0556\3\2\2\2\u0557")
buf.write("\u0558\3\2\2\2\u0558\u055a\3\2\2\2\u0559\u0553\3\2\2\2")
buf.write("\u0559\u0554\3\2\2\2\u055a\u00cd\3\2\2\2\u055b\u055c\7")
buf.write("*\2\2\u055c\u0560\5\u00ccg\2\u055d\u055e\7q\2\2\u055e")
buf.write("\u0560\5\u00d0i\2\u055f\u055b\3\2\2\2\u055f\u055d\3\2")
buf.write("\2\2\u0560\u00cf\3\2\2\2\u0561\u0563\7?\2\2\u0562\u0564")
buf.write("\5\u00a2R\2\u0563\u0562\3\2\2\2\u0563\u0564\3\2\2\2\u0564")
buf.write("\u0565\3\2\2\2\u0565\u0566\7@\2\2\u0566\u00d1\3\2\2\2")
buf.write("\u00ad\u00d3\u00d8\u00de\u00e6\u00ef\u00f4\u00fb\u0102")
buf.write("\u0105\u010c\u0116\u011a\u011f\u0123\u0127\u0131\u0139")
buf.write("\u013f\u0146\u014d\u0151\u0154\u0157\u0160\u0166\u016b")
buf.write("\u016e\u0174\u017a\u017e\u0186\u018f\u0196\u019c\u01a0")
buf.write("\u01ab\u01b4\u01b9\u01bf\u01c3\u01cf\u01da\u01df\u01e8")
buf.write("\u01f0\u01fa\u0203\u020b\u0210\u0218\u021d\u0227\u0231")
buf.write("\u0237\u023e\u0243\u024b\u024f\u0251\u0257\u025c\u0260")
buf.write("\u0267\u0269\u0270\u0275\u027e\u0283\u0286\u028b\u0294")
buf.write("\u02a0\u02a9\u02b4\u02b7\u02be\u02c8\u02d0\u02d3\u02d6")
buf.write("\u02e3\u02eb\u02f0\u02f8\u02fc\u0300\u0304\u0306\u030a")
buf.write("\u0310\u031b\u0325\u032a\u0333\u0338\u033b\u0342\u034b")
buf.write("\u0362\u0365\u0368\u0370\u0374\u037c\u0382\u038d\u0396")
buf.write("\u039b\u03a5\u03ac\u03b9\u03c2\u03cb\u03d1\u03dc\u03e1")
buf.write("\u03e6\u03eb\u03ef\u03f3\u03f7\u03f9\u03fd\u0402\u0413")
buf.write("\u0419\u041f\u0425\u0428\u043c\u0440\u0445\u0449\u0459")
buf.write("\u0481\u0487\u0496\u0499\u049b\u04a5\u04ae\u04b2\u04b6")
buf.write("\u04c8\u04ca\u04cf\u04d4\u04d9\u04e2\u04e4\u04e8\u04ed")
buf.write("\u04f1\u04f5\u04f9\u0503\u050f\u0516\u0519\u051d\u0525")
buf.write("\u052a\u0535\u0539\u053d\u0543\u054e\u0557\u0559\u055f")
buf.write("\u0563")
return buf.getvalue()
class JavaParser ( Parser ):
grammarFileName = "JavaParser.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'abstract'", "'assert'", "'boolean'",
"'break'", "'byte'", "'case'", "'catch'", "'char'",
"'class'", "'const'", "'continue'", "'default'", "'do'",
"'double'", "'else'", "'enum'", "'extends'", "'final'",
"'finally'", "'float'", "'for'", "'if'", "'goto'",
"'implements'", "'import'", "'instanceof'", "'int'",
"'interface'", "'long'", "'native'", "'new'", "'package'",
"'private'", "'protected'", "'public'", "'return'",
"'short'", "'static'", "'strictfp'", "'super'", "'switch'",
"'synchronized'", "'this'", "'throw'", "'throws'",
"'transient'", "'try'", "'void'", "'volatile'", "'while'",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "'null'", "'('", "')'", "'{'", "'}'",
"'['", "']'", "';'", "','", "'.'", "'='", "'>'", "'<'",
"'!'", "'~'", "'?'", "':'", "'=='", "'<='", "'>='",
"'!='", "'&&'", "'||'", "'++'", "'--'", "'+'", "'-'",
"'*'", "'/'", "'&'", "'|'", "'^'", "'%'", "'+='", "'-='",
"'*='", "'/='", "'&='", "'|='", "'^='", "'%='", "'<<='",
"'>>='", "'>>>='", "'->'", "'::'", "'@'", "'...'" ]
symbolicNames = [ "<INVALID>", "ABSTRACT", "ASSERT", "BOOLEAN", "BREAK",
"BYTE", "CASE", "CATCH", "CHAR", "CLASS", "CONST",
"CONTINUE", "DEFAULT", "DO", "DOUBLE", "ELSE", "ENUM",
"EXTENDS", "FINAL", "FINALLY", "FLOAT", "FOR", "IF",
"GOTO", "IMPLEMENTS", "IMPORT", "INSTANCEOF", "INT",
"INTERFACE", "LONG", "NATIVE", "NEW", "PACKAGE", "PRIVATE",
"PROTECTED", "PUBLIC", "RETURN", "SHORT", "STATIC",
"STRICTFP", "SUPER", "SWITCH", "SYNCHRONIZED", "THIS",
"THROW", "THROWS", "TRANSIENT", "TRY", "VOID", "VOLATILE",
"WHILE", "DECIMAL_LITERAL", "HEX_LITERAL", "OCT_LITERAL",
"BINARY_LITERAL", "FLOAT_LITERAL", "HEX_FLOAT_LITERAL",
"BOOL_LITERAL", "CHAR_LITERAL", "STRING_LITERAL",
"NULL_LITERAL", "LPAREN", "RPAREN", "LBRACE", "RBRACE",
"LBRACK", "RBRACK", "SEMI", "COMMA", "DOT", "ASSIGN",
"GT", "LT", "BANG", "TILDE", "QUESTION", "COLON",
"EQUAL", "LE", "GE", "NOTEQUAL", "AND", "OR", "INC",
"DEC", "ADD", "SUB", "MUL", "DIV", "BITAND", "BITOR",
"CARET", "MOD", "ADD_ASSIGN", "SUB_ASSIGN", "MUL_ASSIGN",
"DIV_ASSIGN", "AND_ASSIGN", "OR_ASSIGN", "XOR_ASSIGN",
"MOD_ASSIGN", "LSHIFT_ASSIGN", "RSHIFT_ASSIGN", "URSHIFT_ASSIGN",
"ARROW", "COLONCOLON", "AT", "ELLIPSIS", "WS", "COMMENT",
"LINE_COMMENT", "IDENTIFIER" ]
RULE_compilationUnit = 0
RULE_packageDeclaration = 1
RULE_importDeclaration = 2
RULE_typeDeclaration = 3
RULE_modifier = 4
RULE_classOrInterfaceModifier = 5
RULE_variableModifier = 6
RULE_classDeclaration = 7
RULE_typeParameters = 8
RULE_typeParameter = 9
RULE_typeBound = 10
RULE_enumDeclaration = 11
RULE_enumConstants = 12
RULE_enumConstant = 13
RULE_enumBodyDeclarations = 14
RULE_interfaceDeclaration = 15
RULE_classBody = 16
RULE_interfaceBody = 17
RULE_classBodyDeclaration = 18
RULE_memberDeclaration = 19
RULE_methodDeclaration = 20
RULE_methodBody = 21
RULE_typeTypeOrVoid = 22
RULE_genericMethodDeclaration = 23
RULE_genericConstructorDeclaration = 24
RULE_constructorDeclaration = 25
RULE_fieldDeclaration = 26
RULE_interfaceBodyDeclaration = 27
RULE_interfaceMemberDeclaration = 28
RULE_constDeclaration = 29
RULE_constantDeclarator = 30
RULE_interfaceMethodDeclaration = 31
RULE_interfaceMethodModifier = 32
RULE_genericInterfaceMethodDeclaration = 33
RULE_variableDeclarators = 34
RULE_variableDeclarator = 35
RULE_variableDeclaratorId = 36
RULE_variableInitializer = 37
RULE_arrayInitializer = 38
RULE_classOrInterfaceType = 39
RULE_typeArgument = 40
RULE_qualifiedNameList = 41
RULE_formalParameters = 42
RULE_formalParameterList = 43
RULE_formalParameter = 44
RULE_lastFormalParameter = 45
RULE_qualifiedName = 46
RULE_literal = 47
RULE_integerLiteral = 48
RULE_floatLiteral = 49
RULE_annotation = 50
RULE_elementValuePairs = 51
RULE_elementValuePair = 52
RULE_elementValue = 53
RULE_elementValueArrayInitializer = 54
RULE_annotationTypeDeclaration = 55
RULE_annotationTypeBody = 56
RULE_annotationTypeElementDeclaration = 57
RULE_annotationTypeElementRest = 58
RULE_annotationMethodOrConstantRest = 59
RULE_annotationMethodRest = 60
RULE_annotationConstantRest = 61
RULE_defaultValue = 62
RULE_block = 63
RULE_blockStatement = 64
RULE_localVariableDeclaration = 65
RULE_localTypeDeclaration = 66
RULE_statement = 67
RULE_catchClause = 68
RULE_catchType = 69
RULE_finallyBlock = 70
RULE_resourceSpecification = 71
RULE_resources = 72
RULE_resource = 73
RULE_switchBlockStatementGroup = 74
RULE_switchLabel = 75
RULE_forControl = 76
RULE_forInit = 77
RULE_enhancedForControl = 78
RULE_parExpression = 79
RULE_expressionList = 80
RULE_methodCall = 81
RULE_expression = 82
RULE_lambdaExpression = 83
RULE_lambdaParameters = 84
RULE_lambdaBody = 85
RULE_primary = 86
RULE_classType = 87
RULE_creator = 88
RULE_createdName = 89
RULE_innerCreator = 90
RULE_arrayCreatorRest = 91
RULE_classCreatorRest = 92
RULE_explicitGenericInvocation = 93
RULE_typeArgumentsOrDiamond = 94
RULE_nonWildcardTypeArgumentsOrDiamond = 95
RULE_nonWildcardTypeArguments = 96
RULE_typeList = 97
RULE_typeType = 98
RULE_primitiveType = 99
RULE_typeArguments = 100
RULE_superSuffix = 101
RULE_explicitGenericInvocationSuffix = 102
RULE_arguments = 103
ruleNames = [ "compilationUnit", "packageDeclaration", "importDeclaration",
"typeDeclaration", "modifier", "classOrInterfaceModifier",
"variableModifier", "classDeclaration", "typeParameters",
"typeParameter", "typeBound", "enumDeclaration", "enumConstants",
"enumConstant", "enumBodyDeclarations", "interfaceDeclaration",
"classBody", "interfaceBody", "classBodyDeclaration",
"memberDeclaration", "methodDeclaration", "methodBody",
"typeTypeOrVoid", "genericMethodDeclaration", "genericConstructorDeclaration",
"constructorDeclaration", "fieldDeclaration", "interfaceBodyDeclaration",
"interfaceMemberDeclaration", "constDeclaration", "constantDeclarator",
"interfaceMethodDeclaration", "interfaceMethodModifier",
"genericInterfaceMethodDeclaration", "variableDeclarators",
"variableDeclarator", "variableDeclaratorId", "variableInitializer",
"arrayInitializer", "classOrInterfaceType", "typeArgument",
"qualifiedNameList", "formalParameters", "formalParameterList",
"formalParameter", "lastFormalParameter", "qualifiedName",
"literal", "integerLiteral", "floatLiteral", "annotation",
"elementValuePairs", "elementValuePair", "elementValue",
"elementValueArrayInitializer", "annotationTypeDeclaration",
"annotationTypeBody", "annotationTypeElementDeclaration",
"annotationTypeElementRest", "annotationMethodOrConstantRest",
"annotationMethodRest", "annotationConstantRest", "defaultValue",
"block", "blockStatement", "localVariableDeclaration",
"localTypeDeclaration", "statement", "catchClause", "catchType",
"finallyBlock", "resourceSpecification", "resources",
"resource", "switchBlockStatementGroup", "switchLabel",
"forControl", "forInit", "enhancedForControl", "parExpression",
"expressionList", "methodCall", "expression", "lambdaExpression",
"lambdaParameters", "lambdaBody", "primary", "classType",
"creator", "createdName", "innerCreator", "arrayCreatorRest",
"classCreatorRest", "explicitGenericInvocation", "typeArgumentsOrDiamond",
"nonWildcardTypeArgumentsOrDiamond", "nonWildcardTypeArguments",
"typeList", "typeType", "primitiveType", "typeArguments",
"superSuffix", "explicitGenericInvocationSuffix", "arguments" ]
EOF = Token.EOF
ABSTRACT=1
ASSERT=2
BOOLEAN=3
BREAK=4
BYTE=5
CASE=6
CATCH=7
CHAR=8
CLASS=9
CONST=10
CONTINUE=11
DEFAULT=12
DO=13
DOUBLE=14
ELSE=15
ENUM=16
EXTENDS=17
FINAL=18
FINALLY=19
FLOAT=20
FOR=21
IF=22
GOTO=23
IMPLEMENTS=24
IMPORT=25
INSTANCEOF=26
INT=27
INTERFACE=28
LONG=29
NATIVE=30
NEW=31
PACKAGE=32
PRIVATE=33
PROTECTED=34
PUBLIC=35
RETURN=36
SHORT=37
STATIC=38
STRICTFP=39
SUPER=40
SWITCH=41
SYNCHRONIZED=42
THIS=43
THROW=44
THROWS=45
TRANSIENT=46
TRY=47
VOID=48
VOLATILE=49
WHILE=50
DECIMAL_LITERAL=51
HEX_LITERAL=52
OCT_LITERAL=53
BINARY_LITERAL=54
FLOAT_LITERAL=55
HEX_FLOAT_LITERAL=56
BOOL_LITERAL=57
CHAR_LITERAL=58
STRING_LITERAL=59
NULL_LITERAL=60
LPAREN=61
RPAREN=62
LBRACE=63
RBRACE=64
LBRACK=65
RBRACK=66
SEMI=67
COMMA=68
DOT=69
ASSIGN=70
GT=71
LT=72
BANG=73
TILDE=74
QUESTION=75
COLON=76
EQUAL=77
LE=78
GE=79
NOTEQUAL=80
AND=81
OR=82
INC=83
DEC=84
ADD=85
SUB=86
MUL=87
DIV=88
BITAND=89
BITOR=90
CARET=91
MOD=92
ADD_ASSIGN=93
SUB_ASSIGN=94
MUL_ASSIGN=95
DIV_ASSIGN=96
AND_ASSIGN=97
OR_ASSIGN=98
XOR_ASSIGN=99
MOD_ASSIGN=100
LSHIFT_ASSIGN=101
RSHIFT_ASSIGN=102
URSHIFT_ASSIGN=103
ARROW=104
COLONCOLON=105
AT=106
ELLIPSIS=107
WS=108
COMMENT=109
LINE_COMMENT=110
IDENTIFIER=111
def __init__(self, input:TokenStream):
super().__init__(input)
self.checkVersion("4.5.3")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class CompilationUnitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EOF(self):
return self.getToken(JavaParser.EOF, 0)
def packageDeclaration(self):
return self.getTypedRuleContext(JavaParser.PackageDeclarationContext,0)
def importDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ImportDeclarationContext)
else:
return self.getTypedRuleContext(JavaParser.ImportDeclarationContext,i)
def typeDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.TypeDeclarationContext)
else:
return self.getTypedRuleContext(JavaParser.TypeDeclarationContext,i)
def getRuleIndex(self):
return JavaParser.RULE_compilationUnit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCompilationUnit" ):
listener.enterCompilationUnit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCompilationUnit" ):
listener.exitCompilationUnit(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCompilationUnit" ):
return visitor.visitCompilationUnit(self)
else:
return visitor.visitChildren(self)
def compilationUnit(self):
localctx = JavaParser.CompilationUnitContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_compilationUnit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 209
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,0,self._ctx)
if la_ == 1:
self.state = 208
self.packageDeclaration()
self.state = 214
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.IMPORT:
self.state = 211
self.importDeclaration()
self.state = 216
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 220
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.CLASS) | (1 << JavaParser.ENUM) | (1 << JavaParser.FINAL) | (1 << JavaParser.INTERFACE) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP))) != 0) or _la==JavaParser.SEMI or _la==JavaParser.AT:
self.state = 217
self.typeDeclaration()
self.state = 222
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 223
self.match(JavaParser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PackageDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PACKAGE(self):
return self.getToken(JavaParser.PACKAGE, 0)
def qualifiedName(self):
return self.getTypedRuleContext(JavaParser.QualifiedNameContext,0)
def annotation(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.AnnotationContext)
else:
return self.getTypedRuleContext(JavaParser.AnnotationContext,i)
def getRuleIndex(self):
return JavaParser.RULE_packageDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPackageDeclaration" ):
listener.enterPackageDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPackageDeclaration" ):
listener.exitPackageDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPackageDeclaration" ):
return visitor.visitPackageDeclaration(self)
else:
return visitor.visitChildren(self)
def packageDeclaration(self):
localctx = JavaParser.PackageDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_packageDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 228
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.AT:
self.state = 225
self.annotation()
self.state = 230
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 231
self.match(JavaParser.PACKAGE)
self.state = 232
self.qualifiedName()
self.state = 233
self.match(JavaParser.SEMI)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ImportDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IMPORT(self):
return self.getToken(JavaParser.IMPORT, 0)
def qualifiedName(self):
return self.getTypedRuleContext(JavaParser.QualifiedNameContext,0)
def STATIC(self):
return self.getToken(JavaParser.STATIC, 0)
def getRuleIndex(self):
return JavaParser.RULE_importDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterImportDeclaration" ):
listener.enterImportDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitImportDeclaration" ):
listener.exitImportDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitImportDeclaration" ):
return visitor.visitImportDeclaration(self)
else:
return visitor.visitChildren(self)
def importDeclaration(self):
localctx = JavaParser.ImportDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_importDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 235
self.match(JavaParser.IMPORT)
self.state = 237
_la = self._input.LA(1)
if _la==JavaParser.STATIC:
self.state = 236
self.match(JavaParser.STATIC)
self.state = 239
self.qualifiedName()
self.state = 242
_la = self._input.LA(1)
if _la==JavaParser.DOT:
self.state = 240
self.match(JavaParser.DOT)
self.state = 241
self.match(JavaParser.MUL)
self.state = 244
self.match(JavaParser.SEMI)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classDeclaration(self):
return self.getTypedRuleContext(JavaParser.ClassDeclarationContext,0)
def enumDeclaration(self):
return self.getTypedRuleContext(JavaParser.EnumDeclarationContext,0)
def interfaceDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceDeclarationContext,0)
def annotationTypeDeclaration(self):
return self.getTypedRuleContext(JavaParser.AnnotationTypeDeclarationContext,0)
def classOrInterfaceModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ClassOrInterfaceModifierContext)
else:
return self.getTypedRuleContext(JavaParser.ClassOrInterfaceModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_typeDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeDeclaration" ):
listener.enterTypeDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeDeclaration" ):
listener.exitTypeDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeDeclaration" ):
return visitor.visitTypeDeclaration(self)
else:
return visitor.visitChildren(self)
def typeDeclaration(self):
localctx = JavaParser.TypeDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_typeDeclaration)
try:
self.state = 259
token = self._input.LA(1)
if token in [JavaParser.ABSTRACT, JavaParser.CLASS, JavaParser.ENUM, JavaParser.FINAL, JavaParser.INTERFACE, JavaParser.PRIVATE, JavaParser.PROTECTED, JavaParser.PUBLIC, JavaParser.STATIC, JavaParser.STRICTFP, JavaParser.AT]:
self.enterOuterAlt(localctx, 1)
self.state = 249
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,6,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 246
self.classOrInterfaceModifier()
self.state = 251
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,6,self._ctx)
self.state = 256
token = self._input.LA(1)
if token in [JavaParser.CLASS]:
self.state = 252
self.classDeclaration()
elif token in [JavaParser.ENUM]:
self.state = 253
self.enumDeclaration()
elif token in [JavaParser.INTERFACE]:
self.state = 254
self.interfaceDeclaration()
elif token in [JavaParser.AT]:
self.state = 255
self.annotationTypeDeclaration()
else:
raise NoViableAltException(self)
elif token in [JavaParser.SEMI]:
self.enterOuterAlt(localctx, 2)
self.state = 258
self.match(JavaParser.SEMI)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ModifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classOrInterfaceModifier(self):
return self.getTypedRuleContext(JavaParser.ClassOrInterfaceModifierContext,0)
def NATIVE(self):
return self.getToken(JavaParser.NATIVE, 0)
def SYNCHRONIZED(self):
return self.getToken(JavaParser.SYNCHRONIZED, 0)
def TRANSIENT(self):
return self.getToken(JavaParser.TRANSIENT, 0)
def VOLATILE(self):
return self.getToken(JavaParser.VOLATILE, 0)
def getRuleIndex(self):
return JavaParser.RULE_modifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterModifier" ):
listener.enterModifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitModifier" ):
listener.exitModifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitModifier" ):
return visitor.visitModifier(self)
else:
return visitor.visitChildren(self)
def modifier(self):
localctx = JavaParser.ModifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_modifier)
try:
self.state = 266
token = self._input.LA(1)
if token in [JavaParser.ABSTRACT, JavaParser.FINAL, JavaParser.PRIVATE, JavaParser.PROTECTED, JavaParser.PUBLIC, JavaParser.STATIC, JavaParser.STRICTFP, JavaParser.AT]:
self.enterOuterAlt(localctx, 1)
self.state = 261
self.classOrInterfaceModifier()
elif token in [JavaParser.NATIVE]:
self.enterOuterAlt(localctx, 2)
self.state = 262
self.match(JavaParser.NATIVE)
elif token in [JavaParser.SYNCHRONIZED]:
self.enterOuterAlt(localctx, 3)
self.state = 263
self.match(JavaParser.SYNCHRONIZED)
elif token in [JavaParser.TRANSIENT]:
self.enterOuterAlt(localctx, 4)
self.state = 264
self.match(JavaParser.TRANSIENT)
elif token in [JavaParser.VOLATILE]:
self.enterOuterAlt(localctx, 5)
self.state = 265
self.match(JavaParser.VOLATILE)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassOrInterfaceModifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def annotation(self):
return self.getTypedRuleContext(JavaParser.AnnotationContext,0)
def PUBLIC(self):
return self.getToken(JavaParser.PUBLIC, 0)
def PROTECTED(self):
return self.getToken(JavaParser.PROTECTED, 0)
def PRIVATE(self):
return self.getToken(JavaParser.PRIVATE, 0)
def STATIC(self):
return self.getToken(JavaParser.STATIC, 0)
def ABSTRACT(self):
return self.getToken(JavaParser.ABSTRACT, 0)
def FINAL(self):
return self.getToken(JavaParser.FINAL, 0)
def STRICTFP(self):
return self.getToken(JavaParser.STRICTFP, 0)
def getRuleIndex(self):
return JavaParser.RULE_classOrInterfaceModifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassOrInterfaceModifier" ):
listener.enterClassOrInterfaceModifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassOrInterfaceModifier" ):
listener.exitClassOrInterfaceModifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassOrInterfaceModifier" ):
return visitor.visitClassOrInterfaceModifier(self)
else:
return visitor.visitChildren(self)
def classOrInterfaceModifier(self):
localctx = JavaParser.ClassOrInterfaceModifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_classOrInterfaceModifier)
try:
self.state = 276
token = self._input.LA(1)
if token in [JavaParser.AT]:
self.enterOuterAlt(localctx, 1)
self.state = 268
self.annotation()
elif token in [JavaParser.PUBLIC]:
self.enterOuterAlt(localctx, 2)
self.state = 269
self.match(JavaParser.PUBLIC)
elif token in [JavaParser.PROTECTED]:
self.enterOuterAlt(localctx, 3)
self.state = 270
self.match(JavaParser.PROTECTED)
elif token in [JavaParser.PRIVATE]:
self.enterOuterAlt(localctx, 4)
self.state = 271
self.match(JavaParser.PRIVATE)
elif token in [JavaParser.STATIC]:
self.enterOuterAlt(localctx, 5)
self.state = 272
self.match(JavaParser.STATIC)
elif token in [JavaParser.ABSTRACT]:
self.enterOuterAlt(localctx, 6)
self.state = 273
self.match(JavaParser.ABSTRACT)
elif token in [JavaParser.FINAL]:
self.enterOuterAlt(localctx, 7)
self.state = 274
self.match(JavaParser.FINAL)
elif token in [JavaParser.STRICTFP]:
self.enterOuterAlt(localctx, 8)
self.state = 275
self.match(JavaParser.STRICTFP)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableModifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FINAL(self):
return self.getToken(JavaParser.FINAL, 0)
def annotation(self):
return self.getTypedRuleContext(JavaParser.AnnotationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_variableModifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariableModifier" ):
listener.enterVariableModifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariableModifier" ):
listener.exitVariableModifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVariableModifier" ):
return visitor.visitVariableModifier(self)
else:
return visitor.visitChildren(self)
def variableModifier(self):
localctx = JavaParser.VariableModifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_variableModifier)
try:
self.state = 280
token = self._input.LA(1)
if token in [JavaParser.FINAL]:
self.enterOuterAlt(localctx, 1)
self.state = 278
self.match(JavaParser.FINAL)
elif token in [JavaParser.AT]:
self.enterOuterAlt(localctx, 2)
self.state = 279
self.annotation()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CLASS(self):
return self.getToken(JavaParser.CLASS, 0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def classBody(self):
return self.getTypedRuleContext(JavaParser.ClassBodyContext,0)
def typeParameters(self):
return self.getTypedRuleContext(JavaParser.TypeParametersContext,0)
def EXTENDS(self):
return self.getToken(JavaParser.EXTENDS, 0)
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def IMPLEMENTS(self):
return self.getToken(JavaParser.IMPLEMENTS, 0)
def typeList(self):
return self.getTypedRuleContext(JavaParser.TypeListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_classDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassDeclaration" ):
listener.enterClassDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassDeclaration" ):
listener.exitClassDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassDeclaration" ):
return visitor.visitClassDeclaration(self)
else:
return visitor.visitChildren(self)
def classDeclaration(self):
localctx = JavaParser.ClassDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_classDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 282
self.match(JavaParser.CLASS)
self.state = 283
self.match(JavaParser.IDENTIFIER)
self.state = 285
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 284
self.typeParameters()
self.state = 289
_la = self._input.LA(1)
if _la==JavaParser.EXTENDS:
self.state = 287
self.match(JavaParser.EXTENDS)
self.state = 288
self.typeType()
self.state = 293
_la = self._input.LA(1)
if _la==JavaParser.IMPLEMENTS:
self.state = 291
self.match(JavaParser.IMPLEMENTS)
self.state = 292
self.typeList()
self.state = 295
self.classBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeParametersContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeParameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.TypeParameterContext)
else:
return self.getTypedRuleContext(JavaParser.TypeParameterContext,i)
def getRuleIndex(self):
return JavaParser.RULE_typeParameters
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeParameters" ):
listener.enterTypeParameters(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeParameters" ):
listener.exitTypeParameters(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeParameters" ):
return visitor.visitTypeParameters(self)
else:
return visitor.visitChildren(self)
def typeParameters(self):
localctx = JavaParser.TypeParametersContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_typeParameters)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 297
self.match(JavaParser.LT)
self.state = 298
self.typeParameter()
self.state = 303
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 299
self.match(JavaParser.COMMA)
self.state = 300
self.typeParameter()
self.state = 305
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 306
self.match(JavaParser.GT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def annotation(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.AnnotationContext)
else:
return self.getTypedRuleContext(JavaParser.AnnotationContext,i)
def EXTENDS(self):
return self.getToken(JavaParser.EXTENDS, 0)
def typeBound(self):
return self.getTypedRuleContext(JavaParser.TypeBoundContext,0)
def getRuleIndex(self):
return JavaParser.RULE_typeParameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeParameter" ):
listener.enterTypeParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeParameter" ):
listener.exitTypeParameter(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeParameter" ):
return visitor.visitTypeParameter(self)
else:
return visitor.visitChildren(self)
def typeParameter(self):
localctx = JavaParser.TypeParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_typeParameter)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 311
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.AT:
self.state = 308
self.annotation()
self.state = 313
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 314
self.match(JavaParser.IDENTIFIER)
self.state = 317
_la = self._input.LA(1)
if _la==JavaParser.EXTENDS:
self.state = 315
self.match(JavaParser.EXTENDS)
self.state = 316
self.typeBound()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeBoundContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.TypeTypeContext)
else:
return self.getTypedRuleContext(JavaParser.TypeTypeContext,i)
def getRuleIndex(self):
return JavaParser.RULE_typeBound
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeBound" ):
listener.enterTypeBound(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeBound" ):
listener.exitTypeBound(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeBound" ):
return visitor.visitTypeBound(self)
else:
return visitor.visitChildren(self)
def typeBound(self):
localctx = JavaParser.TypeBoundContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_typeBound)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 319
self.typeType()
self.state = 324
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.BITAND:
self.state = 320
self.match(JavaParser.BITAND)
self.state = 321
self.typeType()
self.state = 326
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ENUM(self):
return self.getToken(JavaParser.ENUM, 0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def IMPLEMENTS(self):
return self.getToken(JavaParser.IMPLEMENTS, 0)
def typeList(self):
return self.getTypedRuleContext(JavaParser.TypeListContext,0)
def enumConstants(self):
return self.getTypedRuleContext(JavaParser.EnumConstantsContext,0)
def enumBodyDeclarations(self):
return self.getTypedRuleContext(JavaParser.EnumBodyDeclarationsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_enumDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumDeclaration" ):
listener.enterEnumDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumDeclaration" ):
listener.exitEnumDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumDeclaration" ):
return visitor.visitEnumDeclaration(self)
else:
return visitor.visitChildren(self)
def enumDeclaration(self):
localctx = JavaParser.EnumDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_enumDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 327
self.match(JavaParser.ENUM)
self.state = 328
self.match(JavaParser.IDENTIFIER)
self.state = 331
_la = self._input.LA(1)
if _la==JavaParser.IMPLEMENTS:
self.state = 329
self.match(JavaParser.IMPLEMENTS)
self.state = 330
self.typeList()
self.state = 333
self.match(JavaParser.LBRACE)
self.state = 335
_la = self._input.LA(1)
if _la==JavaParser.AT or _la==JavaParser.IDENTIFIER:
self.state = 334
self.enumConstants()
self.state = 338
_la = self._input.LA(1)
if _la==JavaParser.COMMA:
self.state = 337
self.match(JavaParser.COMMA)
self.state = 341
_la = self._input.LA(1)
if _la==JavaParser.SEMI:
self.state = 340
self.enumBodyDeclarations()
self.state = 343
self.match(JavaParser.RBRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumConstantsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumConstant(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.EnumConstantContext)
else:
return self.getTypedRuleContext(JavaParser.EnumConstantContext,i)
def getRuleIndex(self):
return JavaParser.RULE_enumConstants
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumConstants" ):
listener.enterEnumConstants(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumConstants" ):
listener.exitEnumConstants(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumConstants" ):
return visitor.visitEnumConstants(self)
else:
return visitor.visitChildren(self)
def enumConstants(self):
localctx = JavaParser.EnumConstantsContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_enumConstants)
try:
self.enterOuterAlt(localctx, 1)
self.state = 345
self.enumConstant()
self.state = 350
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,23,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 346
self.match(JavaParser.COMMA)
self.state = 347
self.enumConstant()
self.state = 352
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,23,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumConstantContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def annotation(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.AnnotationContext)
else:
return self.getTypedRuleContext(JavaParser.AnnotationContext,i)
def arguments(self):
return self.getTypedRuleContext(JavaParser.ArgumentsContext,0)
def classBody(self):
return self.getTypedRuleContext(JavaParser.ClassBodyContext,0)
def getRuleIndex(self):
return JavaParser.RULE_enumConstant
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumConstant" ):
listener.enterEnumConstant(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumConstant" ):
listener.exitEnumConstant(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumConstant" ):
return visitor.visitEnumConstant(self)
else:
return visitor.visitChildren(self)
def enumConstant(self):
localctx = JavaParser.EnumConstantContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_enumConstant)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 356
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.AT:
self.state = 353
self.annotation()
self.state = 358
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 359
self.match(JavaParser.IDENTIFIER)
self.state = 361
_la = self._input.LA(1)
if _la==JavaParser.LPAREN:
self.state = 360
self.arguments()
self.state = 364
_la = self._input.LA(1)
if _la==JavaParser.LBRACE:
self.state = 363
self.classBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumBodyDeclarationsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classBodyDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ClassBodyDeclarationContext)
else:
return self.getTypedRuleContext(JavaParser.ClassBodyDeclarationContext,i)
def getRuleIndex(self):
return JavaParser.RULE_enumBodyDeclarations
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnumBodyDeclarations" ):
listener.enterEnumBodyDeclarations(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnumBodyDeclarations" ):
listener.exitEnumBodyDeclarations(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnumBodyDeclarations" ):
return visitor.visitEnumBodyDeclarations(self)
else:
return visitor.visitChildren(self)
def enumBodyDeclarations(self):
localctx = JavaParser.EnumBodyDeclarationsContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_enumBodyDeclarations)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 366
self.match(JavaParser.SEMI)
self.state = 370
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.CLASS) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.ENUM) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.INTERFACE) | (1 << JavaParser.LONG) | (1 << JavaParser.NATIVE) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.SHORT) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP) | (1 << JavaParser.SYNCHRONIZED) | (1 << JavaParser.TRANSIENT) | (1 << JavaParser.VOID) | (1 << JavaParser.VOLATILE) | (1 << JavaParser.LBRACE))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (JavaParser.SEMI - 67)) | (1 << (JavaParser.LT - 67)) | (1 << (JavaParser.AT - 67)) | (1 << (JavaParser.IDENTIFIER - 67)))) != 0):
self.state = 367
self.classBodyDeclaration()
self.state = 372
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InterfaceDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def INTERFACE(self):
return self.getToken(JavaParser.INTERFACE, 0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def interfaceBody(self):
return self.getTypedRuleContext(JavaParser.InterfaceBodyContext,0)
def typeParameters(self):
return self.getTypedRuleContext(JavaParser.TypeParametersContext,0)
def EXTENDS(self):
return self.getToken(JavaParser.EXTENDS, 0)
def typeList(self):
return self.getTypedRuleContext(JavaParser.TypeListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_interfaceDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInterfaceDeclaration" ):
listener.enterInterfaceDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInterfaceDeclaration" ):
listener.exitInterfaceDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInterfaceDeclaration" ):
return visitor.visitInterfaceDeclaration(self)
else:
return visitor.visitChildren(self)
def interfaceDeclaration(self):
localctx = JavaParser.InterfaceDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_interfaceDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 373
self.match(JavaParser.INTERFACE)
self.state = 374
self.match(JavaParser.IDENTIFIER)
self.state = 376
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 375
self.typeParameters()
self.state = 380
_la = self._input.LA(1)
if _la==JavaParser.EXTENDS:
self.state = 378
self.match(JavaParser.EXTENDS)
self.state = 379
self.typeList()
self.state = 382
self.interfaceBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classBodyDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ClassBodyDeclarationContext)
else:
return self.getTypedRuleContext(JavaParser.ClassBodyDeclarationContext,i)
def getRuleIndex(self):
return JavaParser.RULE_classBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassBody" ):
listener.enterClassBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassBody" ):
listener.exitClassBody(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassBody" ):
return visitor.visitClassBody(self)
else:
return visitor.visitChildren(self)
def classBody(self):
localctx = JavaParser.ClassBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_classBody)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 384
self.match(JavaParser.LBRACE)
self.state = 388
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.CLASS) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.ENUM) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.INTERFACE) | (1 << JavaParser.LONG) | (1 << JavaParser.NATIVE) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.SHORT) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP) | (1 << JavaParser.SYNCHRONIZED) | (1 << JavaParser.TRANSIENT) | (1 << JavaParser.VOID) | (1 << JavaParser.VOLATILE) | (1 << JavaParser.LBRACE))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (JavaParser.SEMI - 67)) | (1 << (JavaParser.LT - 67)) | (1 << (JavaParser.AT - 67)) | (1 << (JavaParser.IDENTIFIER - 67)))) != 0):
self.state = 385
self.classBodyDeclaration()
self.state = 390
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 391
self.match(JavaParser.RBRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InterfaceBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def interfaceBodyDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.InterfaceBodyDeclarationContext)
else:
return self.getTypedRuleContext(JavaParser.InterfaceBodyDeclarationContext,i)
def getRuleIndex(self):
return JavaParser.RULE_interfaceBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInterfaceBody" ):
listener.enterInterfaceBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInterfaceBody" ):
listener.exitInterfaceBody(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInterfaceBody" ):
return visitor.visitInterfaceBody(self)
else:
return visitor.visitChildren(self)
def interfaceBody(self):
localctx = JavaParser.InterfaceBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_interfaceBody)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 393
self.match(JavaParser.LBRACE)
self.state = 397
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.CLASS) | (1 << JavaParser.DEFAULT) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.ENUM) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.INTERFACE) | (1 << JavaParser.LONG) | (1 << JavaParser.NATIVE) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.SHORT) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP) | (1 << JavaParser.SYNCHRONIZED) | (1 << JavaParser.TRANSIENT) | (1 << JavaParser.VOID) | (1 << JavaParser.VOLATILE))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (JavaParser.SEMI - 67)) | (1 << (JavaParser.LT - 67)) | (1 << (JavaParser.AT - 67)) | (1 << (JavaParser.IDENTIFIER - 67)))) != 0):
self.state = 394
self.interfaceBodyDeclaration()
self.state = 399
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 400
self.match(JavaParser.RBRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassBodyDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def block(self):
return self.getTypedRuleContext(JavaParser.BlockContext,0)
def STATIC(self):
return self.getToken(JavaParser.STATIC, 0)
def memberDeclaration(self):
return self.getTypedRuleContext(JavaParser.MemberDeclarationContext,0)
def modifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ModifierContext)
else:
return self.getTypedRuleContext(JavaParser.ModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_classBodyDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassBodyDeclaration" ):
listener.enterClassBodyDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassBodyDeclaration" ):
listener.exitClassBodyDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassBodyDeclaration" ):
return visitor.visitClassBodyDeclaration(self)
else:
return visitor.visitChildren(self)
def classBodyDeclaration(self):
localctx = JavaParser.ClassBodyDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_classBodyDeclaration)
self._la = 0 # Token type
try:
self.state = 414
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 402
self.match(JavaParser.SEMI)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 404
_la = self._input.LA(1)
if _la==JavaParser.STATIC:
self.state = 403
self.match(JavaParser.STATIC)
self.state = 406
self.block()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 410
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,33,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 407
self.modifier()
self.state = 412
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,33,self._ctx)
self.state = 413
self.memberDeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MemberDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def methodDeclaration(self):
return self.getTypedRuleContext(JavaParser.MethodDeclarationContext,0)
def genericMethodDeclaration(self):
return self.getTypedRuleContext(JavaParser.GenericMethodDeclarationContext,0)
def fieldDeclaration(self):
return self.getTypedRuleContext(JavaParser.FieldDeclarationContext,0)
def constructorDeclaration(self):
return self.getTypedRuleContext(JavaParser.ConstructorDeclarationContext,0)
def genericConstructorDeclaration(self):
return self.getTypedRuleContext(JavaParser.GenericConstructorDeclarationContext,0)
def interfaceDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceDeclarationContext,0)
def annotationTypeDeclaration(self):
return self.getTypedRuleContext(JavaParser.AnnotationTypeDeclarationContext,0)
def classDeclaration(self):
return self.getTypedRuleContext(JavaParser.ClassDeclarationContext,0)
def enumDeclaration(self):
return self.getTypedRuleContext(JavaParser.EnumDeclarationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_memberDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMemberDeclaration" ):
listener.enterMemberDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMemberDeclaration" ):
listener.exitMemberDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMemberDeclaration" ):
return visitor.visitMemberDeclaration(self)
else:
return visitor.visitChildren(self)
def memberDeclaration(self):
localctx = JavaParser.MemberDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_memberDeclaration)
try:
self.state = 425
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,35,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 416
self.methodDeclaration()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 417
self.genericMethodDeclaration()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 418
self.fieldDeclaration()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 419
self.constructorDeclaration()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 420
self.genericConstructorDeclaration()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 421
self.interfaceDeclaration()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 422
self.annotationTypeDeclaration()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 423
self.classDeclaration()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 424
self.enumDeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MethodDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeTypeOrVoid(self):
return self.getTypedRuleContext(JavaParser.TypeTypeOrVoidContext,0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def formalParameters(self):
return self.getTypedRuleContext(JavaParser.FormalParametersContext,0)
def methodBody(self):
return self.getTypedRuleContext(JavaParser.MethodBodyContext,0)
def THROWS(self):
return self.getToken(JavaParser.THROWS, 0)
def qualifiedNameList(self):
return self.getTypedRuleContext(JavaParser.QualifiedNameListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_methodDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMethodDeclaration" ):
listener.enterMethodDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMethodDeclaration" ):
listener.exitMethodDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMethodDeclaration" ):
return visitor.visitMethodDeclaration(self)
else:
return visitor.visitChildren(self)
def methodDeclaration(self):
localctx = JavaParser.MethodDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_methodDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 427
self.typeTypeOrVoid()
self.state = 428
self.match(JavaParser.IDENTIFIER)
self.state = 429
self.formalParameters()
self.state = 434
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.LBRACK:
self.state = 430
self.match(JavaParser.LBRACK)
self.state = 431
self.match(JavaParser.RBRACK)
self.state = 436
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 439
_la = self._input.LA(1)
if _la==JavaParser.THROWS:
self.state = 437
self.match(JavaParser.THROWS)
self.state = 438
self.qualifiedNameList()
self.state = 441
self.methodBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MethodBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def block(self):
return self.getTypedRuleContext(JavaParser.BlockContext,0)
def getRuleIndex(self):
return JavaParser.RULE_methodBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMethodBody" ):
listener.enterMethodBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMethodBody" ):
listener.exitMethodBody(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMethodBody" ):
return visitor.visitMethodBody(self)
else:
return visitor.visitChildren(self)
def methodBody(self):
localctx = JavaParser.MethodBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_methodBody)
try:
self.state = 445
token = self._input.LA(1)
if token in [JavaParser.LBRACE]:
self.enterOuterAlt(localctx, 1)
self.state = 443
self.block()
elif token in [JavaParser.SEMI]:
self.enterOuterAlt(localctx, 2)
self.state = 444
self.match(JavaParser.SEMI)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeTypeOrVoidContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def VOID(self):
return self.getToken(JavaParser.VOID, 0)
def getRuleIndex(self):
return JavaParser.RULE_typeTypeOrVoid
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeTypeOrVoid" ):
listener.enterTypeTypeOrVoid(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeTypeOrVoid" ):
listener.exitTypeTypeOrVoid(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeTypeOrVoid" ):
return visitor.visitTypeTypeOrVoid(self)
else:
return visitor.visitChildren(self)
def typeTypeOrVoid(self):
localctx = JavaParser.TypeTypeOrVoidContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_typeTypeOrVoid)
try:
self.state = 449
token = self._input.LA(1)
if token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.SHORT, JavaParser.AT, JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 447
self.typeType()
elif token in [JavaParser.VOID]:
self.enterOuterAlt(localctx, 2)
self.state = 448
self.match(JavaParser.VOID)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class GenericMethodDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeParameters(self):
return self.getTypedRuleContext(JavaParser.TypeParametersContext,0)
def methodDeclaration(self):
return self.getTypedRuleContext(JavaParser.MethodDeclarationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_genericMethodDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGenericMethodDeclaration" ):
listener.enterGenericMethodDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGenericMethodDeclaration" ):
listener.exitGenericMethodDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitGenericMethodDeclaration" ):
return visitor.visitGenericMethodDeclaration(self)
else:
return visitor.visitChildren(self)
def genericMethodDeclaration(self):
localctx = JavaParser.GenericMethodDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_genericMethodDeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 451
self.typeParameters()
self.state = 452
self.methodDeclaration()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class GenericConstructorDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeParameters(self):
return self.getTypedRuleContext(JavaParser.TypeParametersContext,0)
def constructorDeclaration(self):
return self.getTypedRuleContext(JavaParser.ConstructorDeclarationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_genericConstructorDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGenericConstructorDeclaration" ):
listener.enterGenericConstructorDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGenericConstructorDeclaration" ):
listener.exitGenericConstructorDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitGenericConstructorDeclaration" ):
return visitor.visitGenericConstructorDeclaration(self)
else:
return visitor.visitChildren(self)
def genericConstructorDeclaration(self):
localctx = JavaParser.GenericConstructorDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_genericConstructorDeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 454
self.typeParameters()
self.state = 455
self.constructorDeclaration()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConstructorDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.constructorBody = None # BlockContext
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def formalParameters(self):
return self.getTypedRuleContext(JavaParser.FormalParametersContext,0)
def block(self):
return self.getTypedRuleContext(JavaParser.BlockContext,0)
def THROWS(self):
return self.getToken(JavaParser.THROWS, 0)
def qualifiedNameList(self):
return self.getTypedRuleContext(JavaParser.QualifiedNameListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_constructorDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConstructorDeclaration" ):
listener.enterConstructorDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConstructorDeclaration" ):
listener.exitConstructorDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConstructorDeclaration" ):
return visitor.visitConstructorDeclaration(self)
else:
return visitor.visitChildren(self)
def constructorDeclaration(self):
localctx = JavaParser.ConstructorDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_constructorDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 457
self.match(JavaParser.IDENTIFIER)
self.state = 458
self.formalParameters()
self.state = 461
_la = self._input.LA(1)
if _la==JavaParser.THROWS:
self.state = 459
self.match(JavaParser.THROWS)
self.state = 460
self.qualifiedNameList()
self.state = 463
localctx.constructorBody = self.block()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FieldDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def variableDeclarators(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_fieldDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFieldDeclaration" ):
listener.enterFieldDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFieldDeclaration" ):
listener.exitFieldDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFieldDeclaration" ):
return visitor.visitFieldDeclaration(self)
else:
return visitor.visitChildren(self)
def fieldDeclaration(self):
localctx = JavaParser.FieldDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_fieldDeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 465
self.typeType()
self.state = 466
self.variableDeclarators()
self.state = 467
self.match(JavaParser.SEMI)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InterfaceBodyDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def interfaceMemberDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceMemberDeclarationContext,0)
def modifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ModifierContext)
else:
return self.getTypedRuleContext(JavaParser.ModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_interfaceBodyDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInterfaceBodyDeclaration" ):
listener.enterInterfaceBodyDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInterfaceBodyDeclaration" ):
listener.exitInterfaceBodyDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInterfaceBodyDeclaration" ):
return visitor.visitInterfaceBodyDeclaration(self)
else:
return visitor.visitChildren(self)
def interfaceBodyDeclaration(self):
localctx = JavaParser.InterfaceBodyDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_interfaceBodyDeclaration)
try:
self.state = 477
token = self._input.LA(1)
if token in [JavaParser.ABSTRACT, JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.CLASS, JavaParser.DEFAULT, JavaParser.DOUBLE, JavaParser.ENUM, JavaParser.FINAL, JavaParser.FLOAT, JavaParser.INT, JavaParser.INTERFACE, JavaParser.LONG, JavaParser.NATIVE, JavaParser.PRIVATE, JavaParser.PROTECTED, JavaParser.PUBLIC, JavaParser.SHORT, JavaParser.STATIC, JavaParser.STRICTFP, JavaParser.SYNCHRONIZED, JavaParser.TRANSIENT, JavaParser.VOID, JavaParser.VOLATILE, JavaParser.LT, JavaParser.AT, JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 472
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,41,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 469
self.modifier()
self.state = 474
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,41,self._ctx)
self.state = 475
self.interfaceMemberDeclaration()
elif token in [JavaParser.SEMI]:
self.enterOuterAlt(localctx, 2)
self.state = 476
self.match(JavaParser.SEMI)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InterfaceMemberDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def constDeclaration(self):
return self.getTypedRuleContext(JavaParser.ConstDeclarationContext,0)
def interfaceMethodDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceMethodDeclarationContext,0)
def genericInterfaceMethodDeclaration(self):
return self.getTypedRuleContext(JavaParser.GenericInterfaceMethodDeclarationContext,0)
def interfaceDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceDeclarationContext,0)
def annotationTypeDeclaration(self):
return self.getTypedRuleContext(JavaParser.AnnotationTypeDeclarationContext,0)
def classDeclaration(self):
return self.getTypedRuleContext(JavaParser.ClassDeclarationContext,0)
def enumDeclaration(self):
return self.getTypedRuleContext(JavaParser.EnumDeclarationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_interfaceMemberDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInterfaceMemberDeclaration" ):
listener.enterInterfaceMemberDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInterfaceMemberDeclaration" ):
listener.exitInterfaceMemberDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInterfaceMemberDeclaration" ):
return visitor.visitInterfaceMemberDeclaration(self)
else:
return visitor.visitChildren(self)
def interfaceMemberDeclaration(self):
localctx = JavaParser.InterfaceMemberDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_interfaceMemberDeclaration)
try:
self.state = 486
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,43,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 479
self.constDeclaration()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 480
self.interfaceMethodDeclaration()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 481
self.genericInterfaceMethodDeclaration()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 482
self.interfaceDeclaration()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 483
self.annotationTypeDeclaration()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 484
self.classDeclaration()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 485
self.enumDeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConstDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def constantDeclarator(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ConstantDeclaratorContext)
else:
return self.getTypedRuleContext(JavaParser.ConstantDeclaratorContext,i)
def getRuleIndex(self):
return JavaParser.RULE_constDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConstDeclaration" ):
listener.enterConstDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConstDeclaration" ):
listener.exitConstDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConstDeclaration" ):
return visitor.visitConstDeclaration(self)
else:
return visitor.visitChildren(self)
def constDeclaration(self):
localctx = JavaParser.ConstDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_constDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 488
self.typeType()
self.state = 489
self.constantDeclarator()
self.state = 494
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 490
self.match(JavaParser.COMMA)
self.state = 491
self.constantDeclarator()
self.state = 496
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 497
self.match(JavaParser.SEMI)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConstantDeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def variableInitializer(self):
return self.getTypedRuleContext(JavaParser.VariableInitializerContext,0)
def getRuleIndex(self):
return JavaParser.RULE_constantDeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConstantDeclarator" ):
listener.enterConstantDeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConstantDeclarator" ):
listener.exitConstantDeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitConstantDeclarator" ):
return visitor.visitConstantDeclarator(self)
else:
return visitor.visitChildren(self)
def constantDeclarator(self):
localctx = JavaParser.ConstantDeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_constantDeclarator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 499
self.match(JavaParser.IDENTIFIER)
self.state = 504
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.LBRACK:
self.state = 500
self.match(JavaParser.LBRACK)
self.state = 501
self.match(JavaParser.RBRACK)
self.state = 506
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 507
self.match(JavaParser.ASSIGN)
self.state = 508
self.variableInitializer()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InterfaceMethodDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def formalParameters(self):
return self.getTypedRuleContext(JavaParser.FormalParametersContext,0)
def methodBody(self):
return self.getTypedRuleContext(JavaParser.MethodBodyContext,0)
def typeTypeOrVoid(self):
return self.getTypedRuleContext(JavaParser.TypeTypeOrVoidContext,0)
def typeParameters(self):
return self.getTypedRuleContext(JavaParser.TypeParametersContext,0)
def interfaceMethodModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.InterfaceMethodModifierContext)
else:
return self.getTypedRuleContext(JavaParser.InterfaceMethodModifierContext,i)
def THROWS(self):
return self.getToken(JavaParser.THROWS, 0)
def qualifiedNameList(self):
return self.getTypedRuleContext(JavaParser.QualifiedNameListContext,0)
def annotation(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.AnnotationContext)
else:
return self.getTypedRuleContext(JavaParser.AnnotationContext,i)
def getRuleIndex(self):
return JavaParser.RULE_interfaceMethodDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInterfaceMethodDeclaration" ):
listener.enterInterfaceMethodDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInterfaceMethodDeclaration" ):
listener.exitInterfaceMethodDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInterfaceMethodDeclaration" ):
return visitor.visitInterfaceMethodDeclaration(self)
else:
return visitor.visitChildren(self)
def interfaceMethodDeclaration(self):
localctx = JavaParser.InterfaceMethodDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_interfaceMethodDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 513
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 510
self.interfaceMethodModifier()
self.state = 515
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
self.state = 526
token = self._input.LA(1)
if token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.SHORT, JavaParser.VOID, JavaParser.AT, JavaParser.IDENTIFIER]:
self.state = 516
self.typeTypeOrVoid()
elif token in [JavaParser.LT]:
self.state = 517
self.typeParameters()
self.state = 521
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,47,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 518
self.annotation()
self.state = 523
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,47,self._ctx)
self.state = 524
self.typeTypeOrVoid()
else:
raise NoViableAltException(self)
self.state = 528
self.match(JavaParser.IDENTIFIER)
self.state = 529
self.formalParameters()
self.state = 534
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.LBRACK:
self.state = 530
self.match(JavaParser.LBRACK)
self.state = 531
self.match(JavaParser.RBRACK)
self.state = 536
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 539
_la = self._input.LA(1)
if _la==JavaParser.THROWS:
self.state = 537
self.match(JavaParser.THROWS)
self.state = 538
self.qualifiedNameList()
self.state = 541
self.methodBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InterfaceMethodModifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def annotation(self):
return self.getTypedRuleContext(JavaParser.AnnotationContext,0)
def PUBLIC(self):
return self.getToken(JavaParser.PUBLIC, 0)
def ABSTRACT(self):
return self.getToken(JavaParser.ABSTRACT, 0)
def DEFAULT(self):
return self.getToken(JavaParser.DEFAULT, 0)
def STATIC(self):
return self.getToken(JavaParser.STATIC, 0)
def STRICTFP(self):
return self.getToken(JavaParser.STRICTFP, 0)
def getRuleIndex(self):
return JavaParser.RULE_interfaceMethodModifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInterfaceMethodModifier" ):
listener.enterInterfaceMethodModifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInterfaceMethodModifier" ):
listener.exitInterfaceMethodModifier(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInterfaceMethodModifier" ):
return visitor.visitInterfaceMethodModifier(self)
else:
return visitor.visitChildren(self)
def interfaceMethodModifier(self):
localctx = JavaParser.InterfaceMethodModifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_interfaceMethodModifier)
try:
self.state = 549
token = self._input.LA(1)
if token in [JavaParser.AT]:
self.enterOuterAlt(localctx, 1)
self.state = 543
self.annotation()
elif token in [JavaParser.PUBLIC]:
self.enterOuterAlt(localctx, 2)
self.state = 544
self.match(JavaParser.PUBLIC)
elif token in [JavaParser.ABSTRACT]:
self.enterOuterAlt(localctx, 3)
self.state = 545
self.match(JavaParser.ABSTRACT)
elif token in [JavaParser.DEFAULT]:
self.enterOuterAlt(localctx, 4)
self.state = 546
self.match(JavaParser.DEFAULT)
elif token in [JavaParser.STATIC]:
self.enterOuterAlt(localctx, 5)
self.state = 547
self.match(JavaParser.STATIC)
elif token in [JavaParser.STRICTFP]:
self.enterOuterAlt(localctx, 6)
self.state = 548
self.match(JavaParser.STRICTFP)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class GenericInterfaceMethodDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeParameters(self):
return self.getTypedRuleContext(JavaParser.TypeParametersContext,0)
def interfaceMethodDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceMethodDeclarationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_genericInterfaceMethodDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGenericInterfaceMethodDeclaration" ):
listener.enterGenericInterfaceMethodDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGenericInterfaceMethodDeclaration" ):
listener.exitGenericInterfaceMethodDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitGenericInterfaceMethodDeclaration" ):
return visitor.visitGenericInterfaceMethodDeclaration(self)
else:
return visitor.visitChildren(self)
def genericInterfaceMethodDeclaration(self):
localctx = JavaParser.GenericInterfaceMethodDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_genericInterfaceMethodDeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 551
self.typeParameters()
self.state = 552
self.interfaceMethodDeclaration()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableDeclaratorsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variableDeclarator(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableDeclaratorContext)
else:
return self.getTypedRuleContext(JavaParser.VariableDeclaratorContext,i)
def getRuleIndex(self):
return JavaParser.RULE_variableDeclarators
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariableDeclarators" ):
listener.enterVariableDeclarators(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariableDeclarators" ):
listener.exitVariableDeclarators(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVariableDeclarators" ):
return visitor.visitVariableDeclarators(self)
else:
return visitor.visitChildren(self)
def variableDeclarators(self):
localctx = JavaParser.VariableDeclaratorsContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_variableDeclarators)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 554
self.variableDeclarator()
self.state = 559
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 555
self.match(JavaParser.COMMA)
self.state = 556
self.variableDeclarator()
self.state = 561
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableDeclaratorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variableDeclaratorId(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorIdContext,0)
def variableInitializer(self):
return self.getTypedRuleContext(JavaParser.VariableInitializerContext,0)
def getRuleIndex(self):
return JavaParser.RULE_variableDeclarator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariableDeclarator" ):
listener.enterVariableDeclarator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariableDeclarator" ):
listener.exitVariableDeclarator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVariableDeclarator" ):
return visitor.visitVariableDeclarator(self)
else:
return visitor.visitChildren(self)
def variableDeclarator(self):
localctx = JavaParser.VariableDeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_variableDeclarator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 562
self.variableDeclaratorId()
self.state = 565
_la = self._input.LA(1)
if _la==JavaParser.ASSIGN:
self.state = 563
self.match(JavaParser.ASSIGN)
self.state = 564
self.variableInitializer()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableDeclaratorIdContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def getRuleIndex(self):
return JavaParser.RULE_variableDeclaratorId
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariableDeclaratorId" ):
listener.enterVariableDeclaratorId(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariableDeclaratorId" ):
listener.exitVariableDeclaratorId(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVariableDeclaratorId" ):
return visitor.visitVariableDeclaratorId(self)
else:
return visitor.visitChildren(self)
def variableDeclaratorId(self):
localctx = JavaParser.VariableDeclaratorIdContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_variableDeclaratorId)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 567
self.match(JavaParser.IDENTIFIER)
self.state = 572
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.LBRACK:
self.state = 568
self.match(JavaParser.LBRACK)
self.state = 569
self.match(JavaParser.RBRACK)
self.state = 574
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableInitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def arrayInitializer(self):
return self.getTypedRuleContext(JavaParser.ArrayInitializerContext,0)
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def getRuleIndex(self):
return JavaParser.RULE_variableInitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariableInitializer" ):
listener.enterVariableInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariableInitializer" ):
listener.exitVariableInitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVariableInitializer" ):
return visitor.visitVariableInitializer(self)
else:
return visitor.visitChildren(self)
def variableInitializer(self):
localctx = JavaParser.VariableInitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_variableInitializer)
try:
self.state = 577
token = self._input.LA(1)
if token in [JavaParser.LBRACE]:
self.enterOuterAlt(localctx, 1)
self.state = 575
self.arrayInitializer()
elif token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.NEW, JavaParser.SHORT, JavaParser.SUPER, JavaParser.THIS, JavaParser.VOID, JavaParser.DECIMAL_LITERAL, JavaParser.HEX_LITERAL, JavaParser.OCT_LITERAL, JavaParser.BINARY_LITERAL, JavaParser.FLOAT_LITERAL, JavaParser.HEX_FLOAT_LITERAL, JavaParser.BOOL_LITERAL, JavaParser.CHAR_LITERAL, JavaParser.STRING_LITERAL, JavaParser.NULL_LITERAL, JavaParser.LPAREN, JavaParser.LT, JavaParser.BANG, JavaParser.TILDE, JavaParser.INC, JavaParser.DEC, JavaParser.ADD, JavaParser.SUB, JavaParser.AT, JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 2)
self.state = 576
self.expression(0)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArrayInitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variableInitializer(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableInitializerContext)
else:
return self.getTypedRuleContext(JavaParser.VariableInitializerContext,i)
def getRuleIndex(self):
return JavaParser.RULE_arrayInitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterArrayInitializer" ):
listener.enterArrayInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitArrayInitializer" ):
listener.exitArrayInitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitArrayInitializer" ):
return visitor.visitArrayInitializer(self)
else:
return visitor.visitChildren(self)
def arrayInitializer(self):
localctx = JavaParser.ArrayInitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_arrayInitializer)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 579
self.match(JavaParser.LBRACE)
self.state = 591
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN) | (1 << JavaParser.LBRACE))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 580
self.variableInitializer()
self.state = 585
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,56,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 581
self.match(JavaParser.COMMA)
self.state = 582
self.variableInitializer()
self.state = 587
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,56,self._ctx)
self.state = 589
_la = self._input.LA(1)
if _la==JavaParser.COMMA:
self.state = 588
self.match(JavaParser.COMMA)
self.state = 593
self.match(JavaParser.RBRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassOrInterfaceTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self, i:int=None):
if i is None:
return self.getTokens(JavaParser.IDENTIFIER)
else:
return self.getToken(JavaParser.IDENTIFIER, i)
def typeArguments(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.TypeArgumentsContext)
else:
return self.getTypedRuleContext(JavaParser.TypeArgumentsContext,i)
def getRuleIndex(self):
return JavaParser.RULE_classOrInterfaceType
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassOrInterfaceType" ):
listener.enterClassOrInterfaceType(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassOrInterfaceType" ):
listener.exitClassOrInterfaceType(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassOrInterfaceType" ):
return visitor.visitClassOrInterfaceType(self)
else:
return visitor.visitChildren(self)
def classOrInterfaceType(self):
localctx = JavaParser.ClassOrInterfaceTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_classOrInterfaceType)
try:
self.enterOuterAlt(localctx, 1)
self.state = 595
self.match(JavaParser.IDENTIFIER)
self.state = 597
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,59,self._ctx)
if la_ == 1:
self.state = 596
self.typeArguments()
self.state = 606
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,61,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 599
self.match(JavaParser.DOT)
self.state = 600
self.match(JavaParser.IDENTIFIER)
self.state = 602
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,60,self._ctx)
if la_ == 1:
self.state = 601
self.typeArguments()
self.state = 608
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,61,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeArgumentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def EXTENDS(self):
return self.getToken(JavaParser.EXTENDS, 0)
def SUPER(self):
return self.getToken(JavaParser.SUPER, 0)
def getRuleIndex(self):
return JavaParser.RULE_typeArgument
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeArgument" ):
listener.enterTypeArgument(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeArgument" ):
listener.exitTypeArgument(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeArgument" ):
return visitor.visitTypeArgument(self)
else:
return visitor.visitChildren(self)
def typeArgument(self):
localctx = JavaParser.TypeArgumentContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_typeArgument)
self._la = 0 # Token type
try:
self.state = 615
token = self._input.LA(1)
if token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.SHORT, JavaParser.AT, JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 609
self.typeType()
elif token in [JavaParser.QUESTION]:
self.enterOuterAlt(localctx, 2)
self.state = 610
self.match(JavaParser.QUESTION)
self.state = 613
_la = self._input.LA(1)
if _la==JavaParser.EXTENDS or _la==JavaParser.SUPER:
self.state = 611
_la = self._input.LA(1)
if not(_la==JavaParser.EXTENDS or _la==JavaParser.SUPER):
self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 612
self.typeType()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QualifiedNameListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def qualifiedName(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.QualifiedNameContext)
else:
return self.getTypedRuleContext(JavaParser.QualifiedNameContext,i)
def getRuleIndex(self):
return JavaParser.RULE_qualifiedNameList
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQualifiedNameList" ):
listener.enterQualifiedNameList(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQualifiedNameList" ):
listener.exitQualifiedNameList(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitQualifiedNameList" ):
return visitor.visitQualifiedNameList(self)
else:
return visitor.visitChildren(self)
def qualifiedNameList(self):
localctx = JavaParser.QualifiedNameListContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_qualifiedNameList)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 617
self.qualifiedName()
self.state = 622
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 618
self.match(JavaParser.COMMA)
self.state = 619
self.qualifiedName()
self.state = 624
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FormalParametersContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def formalParameterList(self):
return self.getTypedRuleContext(JavaParser.FormalParameterListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_formalParameters
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFormalParameters" ):
listener.enterFormalParameters(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFormalParameters" ):
listener.exitFormalParameters(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFormalParameters" ):
return visitor.visitFormalParameters(self)
else:
return visitor.visitChildren(self)
def formalParameters(self):
localctx = JavaParser.FormalParametersContext(self, self._ctx, self.state)
self.enterRule(localctx, 84, self.RULE_formalParameters)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 625
self.match(JavaParser.LPAREN)
self.state = 627
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.SHORT))) != 0) or _la==JavaParser.AT or _la==JavaParser.IDENTIFIER:
self.state = 626
self.formalParameterList()
self.state = 629
self.match(JavaParser.RPAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FormalParameterListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def formalParameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.FormalParameterContext)
else:
return self.getTypedRuleContext(JavaParser.FormalParameterContext,i)
def lastFormalParameter(self):
return self.getTypedRuleContext(JavaParser.LastFormalParameterContext,0)
def getRuleIndex(self):
return JavaParser.RULE_formalParameterList
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFormalParameterList" ):
listener.enterFormalParameterList(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFormalParameterList" ):
listener.exitFormalParameterList(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFormalParameterList" ):
return visitor.visitFormalParameterList(self)
else:
return visitor.visitChildren(self)
def formalParameterList(self):
localctx = JavaParser.FormalParameterListContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_formalParameterList)
self._la = 0 # Token type
try:
self.state = 644
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,68,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 631
self.formalParameter()
self.state = 636
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,66,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 632
self.match(JavaParser.COMMA)
self.state = 633
self.formalParameter()
self.state = 638
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,66,self._ctx)
self.state = 641
_la = self._input.LA(1)
if _la==JavaParser.COMMA:
self.state = 639
self.match(JavaParser.COMMA)
self.state = 640
self.lastFormalParameter()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 643
self.lastFormalParameter()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FormalParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def variableDeclaratorId(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorIdContext,0)
def variableModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableModifierContext)
else:
return self.getTypedRuleContext(JavaParser.VariableModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_formalParameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFormalParameter" ):
listener.enterFormalParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFormalParameter" ):
listener.exitFormalParameter(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFormalParameter" ):
return visitor.visitFormalParameter(self)
else:
return visitor.visitChildren(self)
def formalParameter(self):
localctx = JavaParser.FormalParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_formalParameter)
try:
self.enterOuterAlt(localctx, 1)
self.state = 649
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,69,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 646
self.variableModifier()
self.state = 651
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,69,self._ctx)
self.state = 652
self.typeType()
self.state = 653
self.variableDeclaratorId()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LastFormalParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def variableDeclaratorId(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorIdContext,0)
def variableModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableModifierContext)
else:
return self.getTypedRuleContext(JavaParser.VariableModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_lastFormalParameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLastFormalParameter" ):
listener.enterLastFormalParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLastFormalParameter" ):
listener.exitLastFormalParameter(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLastFormalParameter" ):
return visitor.visitLastFormalParameter(self)
else:
return visitor.visitChildren(self)
def lastFormalParameter(self):
localctx = JavaParser.LastFormalParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_lastFormalParameter)
try:
self.enterOuterAlt(localctx, 1)
self.state = 658
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,70,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 655
self.variableModifier()
self.state = 660
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,70,self._ctx)
self.state = 661
self.typeType()
self.state = 662
self.match(JavaParser.ELLIPSIS)
self.state = 663
self.variableDeclaratorId()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QualifiedNameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self, i:int=None):
if i is None:
return self.getTokens(JavaParser.IDENTIFIER)
else:
return self.getToken(JavaParser.IDENTIFIER, i)
def getRuleIndex(self):
return JavaParser.RULE_qualifiedName
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQualifiedName" ):
listener.enterQualifiedName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQualifiedName" ):
listener.exitQualifiedName(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitQualifiedName" ):
return visitor.visitQualifiedName(self)
else:
return visitor.visitChildren(self)
def qualifiedName(self):
localctx = JavaParser.QualifiedNameContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_qualifiedName)
try:
self.enterOuterAlt(localctx, 1)
self.state = 665
self.match(JavaParser.IDENTIFIER)
self.state = 670
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,71,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 666
self.match(JavaParser.DOT)
self.state = 667
self.match(JavaParser.IDENTIFIER)
self.state = 672
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,71,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def integerLiteral(self):
return self.getTypedRuleContext(JavaParser.IntegerLiteralContext,0)
def floatLiteral(self):
return self.getTypedRuleContext(JavaParser.FloatLiteralContext,0)
def CHAR_LITERAL(self):
return self.getToken(JavaParser.CHAR_LITERAL, 0)
def STRING_LITERAL(self):
return self.getToken(JavaParser.STRING_LITERAL, 0)
def BOOL_LITERAL(self):
return self.getToken(JavaParser.BOOL_LITERAL, 0)
def NULL_LITERAL(self):
return self.getToken(JavaParser.NULL_LITERAL, 0)
def getRuleIndex(self):
return JavaParser.RULE_literal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLiteral" ):
listener.enterLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLiteral" ):
listener.exitLiteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLiteral" ):
return visitor.visitLiteral(self)
else:
return visitor.visitChildren(self)
def literal(self):
localctx = JavaParser.LiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_literal)
try:
self.state = 679
token = self._input.LA(1)
if token in [JavaParser.DECIMAL_LITERAL, JavaParser.HEX_LITERAL, JavaParser.OCT_LITERAL, JavaParser.BINARY_LITERAL]:
self.enterOuterAlt(localctx, 1)
self.state = 673
self.integerLiteral()
elif token in [JavaParser.FLOAT_LITERAL, JavaParser.HEX_FLOAT_LITERAL]:
self.enterOuterAlt(localctx, 2)
self.state = 674
self.floatLiteral()
elif token in [JavaParser.CHAR_LITERAL]:
self.enterOuterAlt(localctx, 3)
self.state = 675
self.match(JavaParser.CHAR_LITERAL)
elif token in [JavaParser.STRING_LITERAL]:
self.enterOuterAlt(localctx, 4)
self.state = 676
self.match(JavaParser.STRING_LITERAL)
elif token in [JavaParser.BOOL_LITERAL]:
self.enterOuterAlt(localctx, 5)
self.state = 677
self.match(JavaParser.BOOL_LITERAL)
elif token in [JavaParser.NULL_LITERAL]:
self.enterOuterAlt(localctx, 6)
self.state = 678
self.match(JavaParser.NULL_LITERAL)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IntegerLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def DECIMAL_LITERAL(self):
return self.getToken(JavaParser.DECIMAL_LITERAL, 0)
def HEX_LITERAL(self):
return self.getToken(JavaParser.HEX_LITERAL, 0)
def OCT_LITERAL(self):
return self.getToken(JavaParser.OCT_LITERAL, 0)
def BINARY_LITERAL(self):
return self.getToken(JavaParser.BINARY_LITERAL, 0)
def getRuleIndex(self):
return JavaParser.RULE_integerLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIntegerLiteral" ):
listener.enterIntegerLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIntegerLiteral" ):
listener.exitIntegerLiteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitIntegerLiteral" ):
return visitor.visitIntegerLiteral(self)
else:
return visitor.visitChildren(self)
def integerLiteral(self):
localctx = JavaParser.IntegerLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_integerLiteral)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 681
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL))) != 0)):
self._errHandler.recoverInline(self)
else:
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FloatLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FLOAT_LITERAL(self):
return self.getToken(JavaParser.FLOAT_LITERAL, 0)
def HEX_FLOAT_LITERAL(self):
return self.getToken(JavaParser.HEX_FLOAT_LITERAL, 0)
def getRuleIndex(self):
return JavaParser.RULE_floatLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFloatLiteral" ):
listener.enterFloatLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFloatLiteral" ):
listener.exitFloatLiteral(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFloatLiteral" ):
return visitor.visitFloatLiteral(self)
else:
return visitor.visitChildren(self)
def floatLiteral(self):
localctx = JavaParser.FloatLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_floatLiteral)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 683
_la = self._input.LA(1)
if not(_la==JavaParser.FLOAT_LITERAL or _la==JavaParser.HEX_FLOAT_LITERAL):
self._errHandler.recoverInline(self)
else:
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def qualifiedName(self):
return self.getTypedRuleContext(JavaParser.QualifiedNameContext,0)
def elementValuePairs(self):
return self.getTypedRuleContext(JavaParser.ElementValuePairsContext,0)
def elementValue(self):
return self.getTypedRuleContext(JavaParser.ElementValueContext,0)
def getRuleIndex(self):
return JavaParser.RULE_annotation
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotation" ):
listener.enterAnnotation(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotation" ):
listener.exitAnnotation(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotation" ):
return visitor.visitAnnotation(self)
else:
return visitor.visitChildren(self)
def annotation(self):
localctx = JavaParser.AnnotationContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_annotation)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 685
self.match(JavaParser.AT)
self.state = 686
self.qualifiedName()
self.state = 693
_la = self._input.LA(1)
if _la==JavaParser.LPAREN:
self.state = 687
self.match(JavaParser.LPAREN)
self.state = 690
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,73,self._ctx)
if la_ == 1:
self.state = 688
self.elementValuePairs()
elif la_ == 2:
self.state = 689
self.elementValue()
self.state = 692
self.match(JavaParser.RPAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElementValuePairsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def elementValuePair(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ElementValuePairContext)
else:
return self.getTypedRuleContext(JavaParser.ElementValuePairContext,i)
def getRuleIndex(self):
return JavaParser.RULE_elementValuePairs
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElementValuePairs" ):
listener.enterElementValuePairs(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElementValuePairs" ):
listener.exitElementValuePairs(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitElementValuePairs" ):
return visitor.visitElementValuePairs(self)
else:
return visitor.visitChildren(self)
def elementValuePairs(self):
localctx = JavaParser.ElementValuePairsContext(self, self._ctx, self.state)
self.enterRule(localctx, 102, self.RULE_elementValuePairs)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 695
self.elementValuePair()
self.state = 700
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 696
self.match(JavaParser.COMMA)
self.state = 697
self.elementValuePair()
self.state = 702
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElementValuePairContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def elementValue(self):
return self.getTypedRuleContext(JavaParser.ElementValueContext,0)
def getRuleIndex(self):
return JavaParser.RULE_elementValuePair
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElementValuePair" ):
listener.enterElementValuePair(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElementValuePair" ):
listener.exitElementValuePair(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitElementValuePair" ):
return visitor.visitElementValuePair(self)
else:
return visitor.visitChildren(self)
def elementValuePair(self):
localctx = JavaParser.ElementValuePairContext(self, self._ctx, self.state)
self.enterRule(localctx, 104, self.RULE_elementValuePair)
try:
self.enterOuterAlt(localctx, 1)
self.state = 703
self.match(JavaParser.IDENTIFIER)
self.state = 704
self.match(JavaParser.ASSIGN)
self.state = 705
self.elementValue()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElementValueContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def annotation(self):
return self.getTypedRuleContext(JavaParser.AnnotationContext,0)
def elementValueArrayInitializer(self):
return self.getTypedRuleContext(JavaParser.ElementValueArrayInitializerContext,0)
def getRuleIndex(self):
return JavaParser.RULE_elementValue
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElementValue" ):
listener.enterElementValue(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElementValue" ):
listener.exitElementValue(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitElementValue" ):
return visitor.visitElementValue(self)
else:
return visitor.visitChildren(self)
def elementValue(self):
localctx = JavaParser.ElementValueContext(self, self._ctx, self.state)
self.enterRule(localctx, 106, self.RULE_elementValue)
try:
self.state = 710
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,76,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 707
self.expression(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 708
self.annotation()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 709
self.elementValueArrayInitializer()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElementValueArrayInitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def elementValue(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ElementValueContext)
else:
return self.getTypedRuleContext(JavaParser.ElementValueContext,i)
def getRuleIndex(self):
return JavaParser.RULE_elementValueArrayInitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElementValueArrayInitializer" ):
listener.enterElementValueArrayInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElementValueArrayInitializer" ):
listener.exitElementValueArrayInitializer(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitElementValueArrayInitializer" ):
return visitor.visitElementValueArrayInitializer(self)
else:
return visitor.visitChildren(self)
def elementValueArrayInitializer(self):
localctx = JavaParser.ElementValueArrayInitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 108, self.RULE_elementValueArrayInitializer)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 712
self.match(JavaParser.LBRACE)
self.state = 721
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN) | (1 << JavaParser.LBRACE))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 713
self.elementValue()
self.state = 718
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,77,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 714
self.match(JavaParser.COMMA)
self.state = 715
self.elementValue()
self.state = 720
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,77,self._ctx)
self.state = 724
_la = self._input.LA(1)
if _la==JavaParser.COMMA:
self.state = 723
self.match(JavaParser.COMMA)
self.state = 726
self.match(JavaParser.RBRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationTypeDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def INTERFACE(self):
return self.getToken(JavaParser.INTERFACE, 0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def annotationTypeBody(self):
return self.getTypedRuleContext(JavaParser.AnnotationTypeBodyContext,0)
def getRuleIndex(self):
return JavaParser.RULE_annotationTypeDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotationTypeDeclaration" ):
listener.enterAnnotationTypeDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotationTypeDeclaration" ):
listener.exitAnnotationTypeDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotationTypeDeclaration" ):
return visitor.visitAnnotationTypeDeclaration(self)
else:
return visitor.visitChildren(self)
def annotationTypeDeclaration(self):
localctx = JavaParser.AnnotationTypeDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 110, self.RULE_annotationTypeDeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 728
self.match(JavaParser.AT)
self.state = 729
self.match(JavaParser.INTERFACE)
self.state = 730
self.match(JavaParser.IDENTIFIER)
self.state = 731
self.annotationTypeBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationTypeBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def annotationTypeElementDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.AnnotationTypeElementDeclarationContext)
else:
return self.getTypedRuleContext(JavaParser.AnnotationTypeElementDeclarationContext,i)
def getRuleIndex(self):
return JavaParser.RULE_annotationTypeBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotationTypeBody" ):
listener.enterAnnotationTypeBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotationTypeBody" ):
listener.exitAnnotationTypeBody(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotationTypeBody" ):
return visitor.visitAnnotationTypeBody(self)
else:
return visitor.visitChildren(self)
def annotationTypeBody(self):
localctx = JavaParser.AnnotationTypeBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 112, self.RULE_annotationTypeBody)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 733
self.match(JavaParser.LBRACE)
self.state = 737
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.CLASS) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.ENUM) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.INTERFACE) | (1 << JavaParser.LONG) | (1 << JavaParser.NATIVE) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.SHORT) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP) | (1 << JavaParser.SYNCHRONIZED) | (1 << JavaParser.TRANSIENT) | (1 << JavaParser.VOLATILE))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (JavaParser.SEMI - 67)) | (1 << (JavaParser.AT - 67)) | (1 << (JavaParser.IDENTIFIER - 67)))) != 0):
self.state = 734
self.annotationTypeElementDeclaration()
self.state = 739
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 740
self.match(JavaParser.RBRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationTypeElementDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def annotationTypeElementRest(self):
return self.getTypedRuleContext(JavaParser.AnnotationTypeElementRestContext,0)
def modifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ModifierContext)
else:
return self.getTypedRuleContext(JavaParser.ModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_annotationTypeElementDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotationTypeElementDeclaration" ):
listener.enterAnnotationTypeElementDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotationTypeElementDeclaration" ):
listener.exitAnnotationTypeElementDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotationTypeElementDeclaration" ):
return visitor.visitAnnotationTypeElementDeclaration(self)
else:
return visitor.visitChildren(self)
def annotationTypeElementDeclaration(self):
localctx = JavaParser.AnnotationTypeElementDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 114, self.RULE_annotationTypeElementDeclaration)
try:
self.state = 750
token = self._input.LA(1)
if token in [JavaParser.ABSTRACT, JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.CLASS, JavaParser.DOUBLE, JavaParser.ENUM, JavaParser.FINAL, JavaParser.FLOAT, JavaParser.INT, JavaParser.INTERFACE, JavaParser.LONG, JavaParser.NATIVE, JavaParser.PRIVATE, JavaParser.PROTECTED, JavaParser.PUBLIC, JavaParser.SHORT, JavaParser.STATIC, JavaParser.STRICTFP, JavaParser.SYNCHRONIZED, JavaParser.TRANSIENT, JavaParser.VOLATILE, JavaParser.AT, JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 745
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,81,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 742
self.modifier()
self.state = 747
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,81,self._ctx)
self.state = 748
self.annotationTypeElementRest()
elif token in [JavaParser.SEMI]:
self.enterOuterAlt(localctx, 2)
self.state = 749
self.match(JavaParser.SEMI)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationTypeElementRestContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def annotationMethodOrConstantRest(self):
return self.getTypedRuleContext(JavaParser.AnnotationMethodOrConstantRestContext,0)
def classDeclaration(self):
return self.getTypedRuleContext(JavaParser.ClassDeclarationContext,0)
def interfaceDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceDeclarationContext,0)
def enumDeclaration(self):
return self.getTypedRuleContext(JavaParser.EnumDeclarationContext,0)
def annotationTypeDeclaration(self):
return self.getTypedRuleContext(JavaParser.AnnotationTypeDeclarationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_annotationTypeElementRest
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotationTypeElementRest" ):
listener.enterAnnotationTypeElementRest(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotationTypeElementRest" ):
listener.exitAnnotationTypeElementRest(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotationTypeElementRest" ):
return visitor.visitAnnotationTypeElementRest(self)
else:
return visitor.visitChildren(self)
def annotationTypeElementRest(self):
localctx = JavaParser.AnnotationTypeElementRestContext(self, self._ctx, self.state)
self.enterRule(localctx, 116, self.RULE_annotationTypeElementRest)
try:
self.state = 772
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,87,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 752
self.typeType()
self.state = 753
self.annotationMethodOrConstantRest()
self.state = 754
self.match(JavaParser.SEMI)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 756
self.classDeclaration()
self.state = 758
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,83,self._ctx)
if la_ == 1:
self.state = 757
self.match(JavaParser.SEMI)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 760
self.interfaceDeclaration()
self.state = 762
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,84,self._ctx)
if la_ == 1:
self.state = 761
self.match(JavaParser.SEMI)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 764
self.enumDeclaration()
self.state = 766
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,85,self._ctx)
if la_ == 1:
self.state = 765
self.match(JavaParser.SEMI)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 768
self.annotationTypeDeclaration()
self.state = 770
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,86,self._ctx)
if la_ == 1:
self.state = 769
self.match(JavaParser.SEMI)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationMethodOrConstantRestContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def annotationMethodRest(self):
return self.getTypedRuleContext(JavaParser.AnnotationMethodRestContext,0)
def annotationConstantRest(self):
return self.getTypedRuleContext(JavaParser.AnnotationConstantRestContext,0)
def getRuleIndex(self):
return JavaParser.RULE_annotationMethodOrConstantRest
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotationMethodOrConstantRest" ):
listener.enterAnnotationMethodOrConstantRest(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotationMethodOrConstantRest" ):
listener.exitAnnotationMethodOrConstantRest(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotationMethodOrConstantRest" ):
return visitor.visitAnnotationMethodOrConstantRest(self)
else:
return visitor.visitChildren(self)
def annotationMethodOrConstantRest(self):
localctx = JavaParser.AnnotationMethodOrConstantRestContext(self, self._ctx, self.state)
self.enterRule(localctx, 118, self.RULE_annotationMethodOrConstantRest)
try:
self.state = 776
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,88,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 774
self.annotationMethodRest()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 775
self.annotationConstantRest()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationMethodRestContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def defaultValue(self):
return self.getTypedRuleContext(JavaParser.DefaultValueContext,0)
def getRuleIndex(self):
return JavaParser.RULE_annotationMethodRest
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotationMethodRest" ):
listener.enterAnnotationMethodRest(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotationMethodRest" ):
listener.exitAnnotationMethodRest(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotationMethodRest" ):
return visitor.visitAnnotationMethodRest(self)
else:
return visitor.visitChildren(self)
def annotationMethodRest(self):
localctx = JavaParser.AnnotationMethodRestContext(self, self._ctx, self.state)
self.enterRule(localctx, 120, self.RULE_annotationMethodRest)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 778
self.match(JavaParser.IDENTIFIER)
self.state = 779
self.match(JavaParser.LPAREN)
self.state = 780
self.match(JavaParser.RPAREN)
self.state = 782
_la = self._input.LA(1)
if _la==JavaParser.DEFAULT:
self.state = 781
self.defaultValue()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationConstantRestContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variableDeclarators(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_annotationConstantRest
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotationConstantRest" ):
listener.enterAnnotationConstantRest(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotationConstantRest" ):
listener.exitAnnotationConstantRest(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAnnotationConstantRest" ):
return visitor.visitAnnotationConstantRest(self)
else:
return visitor.visitChildren(self)
def annotationConstantRest(self):
localctx = JavaParser.AnnotationConstantRestContext(self, self._ctx, self.state)
self.enterRule(localctx, 122, self.RULE_annotationConstantRest)
try:
self.enterOuterAlt(localctx, 1)
self.state = 784
self.variableDeclarators()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DefaultValueContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def DEFAULT(self):
return self.getToken(JavaParser.DEFAULT, 0)
def elementValue(self):
return self.getTypedRuleContext(JavaParser.ElementValueContext,0)
def getRuleIndex(self):
return JavaParser.RULE_defaultValue
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefaultValue" ):
listener.enterDefaultValue(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefaultValue" ):
listener.exitDefaultValue(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitDefaultValue" ):
return visitor.visitDefaultValue(self)
else:
return visitor.visitChildren(self)
def defaultValue(self):
localctx = JavaParser.DefaultValueContext(self, self._ctx, self.state)
self.enterRule(localctx, 124, self.RULE_defaultValue)
try:
self.enterOuterAlt(localctx, 1)
self.state = 786
self.match(JavaParser.DEFAULT)
self.state = 787
self.elementValue()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BlockContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def blockStatement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.BlockStatementContext)
else:
return self.getTypedRuleContext(JavaParser.BlockStatementContext,i)
def getRuleIndex(self):
return JavaParser.RULE_block
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBlock" ):
listener.enterBlock(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBlock" ):
listener.exitBlock(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBlock" ):
return visitor.visitBlock(self)
else:
return visitor.visitChildren(self)
def block(self):
localctx = JavaParser.BlockContext(self, self._ctx, self.state)
self.enterRule(localctx, 126, self.RULE_block)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 789
self.match(JavaParser.LBRACE)
self.state = 793
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.ASSERT) | (1 << JavaParser.BOOLEAN) | (1 << JavaParser.BREAK) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.CLASS) | (1 << JavaParser.CONTINUE) | (1 << JavaParser.DO) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.FOR) | (1 << JavaParser.IF) | (1 << JavaParser.INT) | (1 << JavaParser.INTERFACE) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.RETURN) | (1 << JavaParser.SHORT) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP) | (1 << JavaParser.SUPER) | (1 << JavaParser.SWITCH) | (1 << JavaParser.SYNCHRONIZED) | (1 << JavaParser.THIS) | (1 << JavaParser.THROW) | (1 << JavaParser.TRY) | (1 << JavaParser.VOID) | (1 << JavaParser.WHILE) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN) | (1 << JavaParser.LBRACE))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (JavaParser.SEMI - 67)) | (1 << (JavaParser.LT - 67)) | (1 << (JavaParser.BANG - 67)) | (1 << (JavaParser.TILDE - 67)) | (1 << (JavaParser.INC - 67)) | (1 << (JavaParser.DEC - 67)) | (1 << (JavaParser.ADD - 67)) | (1 << (JavaParser.SUB - 67)) | (1 << (JavaParser.AT - 67)) | (1 << (JavaParser.IDENTIFIER - 67)))) != 0):
self.state = 790
self.blockStatement()
self.state = 795
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 796
self.match(JavaParser.RBRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BlockStatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def localVariableDeclaration(self):
return self.getTypedRuleContext(JavaParser.LocalVariableDeclarationContext,0)
def statement(self):
return self.getTypedRuleContext(JavaParser.StatementContext,0)
def localTypeDeclaration(self):
return self.getTypedRuleContext(JavaParser.LocalTypeDeclarationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_blockStatement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBlockStatement" ):
listener.enterBlockStatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBlockStatement" ):
listener.exitBlockStatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitBlockStatement" ):
return visitor.visitBlockStatement(self)
else:
return visitor.visitChildren(self)
def blockStatement(self):
localctx = JavaParser.BlockStatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 128, self.RULE_blockStatement)
try:
self.state = 803
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,91,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 798
self.localVariableDeclaration()
self.state = 799
self.match(JavaParser.SEMI)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 801
self.statement()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 802
self.localTypeDeclaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LocalVariableDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def variableDeclarators(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorsContext,0)
def variableModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableModifierContext)
else:
return self.getTypedRuleContext(JavaParser.VariableModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_localVariableDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLocalVariableDeclaration" ):
listener.enterLocalVariableDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLocalVariableDeclaration" ):
listener.exitLocalVariableDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLocalVariableDeclaration" ):
return visitor.visitLocalVariableDeclaration(self)
else:
return visitor.visitChildren(self)
def localVariableDeclaration(self):
localctx = JavaParser.LocalVariableDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 130, self.RULE_localVariableDeclaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 808
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,92,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 805
self.variableModifier()
self.state = 810
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,92,self._ctx)
self.state = 811
self.typeType()
self.state = 812
self.variableDeclarators()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LocalTypeDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classDeclaration(self):
return self.getTypedRuleContext(JavaParser.ClassDeclarationContext,0)
def interfaceDeclaration(self):
return self.getTypedRuleContext(JavaParser.InterfaceDeclarationContext,0)
def classOrInterfaceModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ClassOrInterfaceModifierContext)
else:
return self.getTypedRuleContext(JavaParser.ClassOrInterfaceModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_localTypeDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLocalTypeDeclaration" ):
listener.enterLocalTypeDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLocalTypeDeclaration" ):
listener.exitLocalTypeDeclaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLocalTypeDeclaration" ):
return visitor.visitLocalTypeDeclaration(self)
else:
return visitor.visitChildren(self)
def localTypeDeclaration(self):
localctx = JavaParser.LocalTypeDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 132, self.RULE_localTypeDeclaration)
self._la = 0 # Token type
try:
self.state = 825
token = self._input.LA(1)
if token in [JavaParser.ABSTRACT, JavaParser.CLASS, JavaParser.FINAL, JavaParser.INTERFACE, JavaParser.PRIVATE, JavaParser.PROTECTED, JavaParser.PUBLIC, JavaParser.STATIC, JavaParser.STRICTFP, JavaParser.AT]:
self.enterOuterAlt(localctx, 1)
self.state = 817
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.FINAL) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP))) != 0) or _la==JavaParser.AT:
self.state = 814
self.classOrInterfaceModifier()
self.state = 819
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 822
token = self._input.LA(1)
if token in [JavaParser.CLASS]:
self.state = 820
self.classDeclaration()
elif token in [JavaParser.INTERFACE]:
self.state = 821
self.interfaceDeclaration()
else:
raise NoViableAltException(self)
elif token in [JavaParser.SEMI]:
self.enterOuterAlt(localctx, 2)
self.state = 824
self.match(JavaParser.SEMI)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.blockLabel = None # BlockContext
self.statementExpression = None # ExpressionContext
self.identifierLabel = None # Token
def block(self):
return self.getTypedRuleContext(JavaParser.BlockContext,0)
def ASSERT(self):
return self.getToken(JavaParser.ASSERT, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ExpressionContext)
else:
return self.getTypedRuleContext(JavaParser.ExpressionContext,i)
def IF(self):
return self.getToken(JavaParser.IF, 0)
def parExpression(self):
return self.getTypedRuleContext(JavaParser.ParExpressionContext,0)
def statement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.StatementContext)
else:
return self.getTypedRuleContext(JavaParser.StatementContext,i)
def ELSE(self):
return self.getToken(JavaParser.ELSE, 0)
def FOR(self):
return self.getToken(JavaParser.FOR, 0)
def forControl(self):
return self.getTypedRuleContext(JavaParser.ForControlContext,0)
def WHILE(self):
return self.getToken(JavaParser.WHILE, 0)
def DO(self):
return self.getToken(JavaParser.DO, 0)
def TRY(self):
return self.getToken(JavaParser.TRY, 0)
def finallyBlock(self):
return self.getTypedRuleContext(JavaParser.FinallyBlockContext,0)
def catchClause(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.CatchClauseContext)
else:
return self.getTypedRuleContext(JavaParser.CatchClauseContext,i)
def resourceSpecification(self):
return self.getTypedRuleContext(JavaParser.ResourceSpecificationContext,0)
def SWITCH(self):
return self.getToken(JavaParser.SWITCH, 0)
def switchBlockStatementGroup(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.SwitchBlockStatementGroupContext)
else:
return self.getTypedRuleContext(JavaParser.SwitchBlockStatementGroupContext,i)
def switchLabel(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.SwitchLabelContext)
else:
return self.getTypedRuleContext(JavaParser.SwitchLabelContext,i)
def SYNCHRONIZED(self):
return self.getToken(JavaParser.SYNCHRONIZED, 0)
def RETURN(self):
return self.getToken(JavaParser.RETURN, 0)
def THROW(self):
return self.getToken(JavaParser.THROW, 0)
def BREAK(self):
return self.getToken(JavaParser.BREAK, 0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def CONTINUE(self):
return self.getToken(JavaParser.CONTINUE, 0)
def SEMI(self):
return self.getToken(JavaParser.SEMI, 0)
def getRuleIndex(self):
return JavaParser.RULE_statement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatement" ):
listener.enterStatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatement" ):
listener.exitStatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitStatement" ):
return visitor.visitStatement(self)
else:
return visitor.visitChildren(self)
def statement(self):
localctx = JavaParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 134, self.RULE_statement)
self._la = 0 # Token type
try:
self.state = 931
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,108,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 827
localctx.blockLabel = self.block()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 828
self.match(JavaParser.ASSERT)
self.state = 829
self.expression(0)
self.state = 832
_la = self._input.LA(1)
if _la==JavaParser.COLON:
self.state = 830
self.match(JavaParser.COLON)
self.state = 831
self.expression(0)
self.state = 834
self.match(JavaParser.SEMI)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 836
self.match(JavaParser.IF)
self.state = 837
self.parExpression()
self.state = 838
self.statement()
self.state = 841
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,97,self._ctx)
if la_ == 1:
self.state = 839
self.match(JavaParser.ELSE)
self.state = 840
self.statement()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 843
self.match(JavaParser.FOR)
self.state = 844
self.match(JavaParser.LPAREN)
self.state = 845
self.forControl()
self.state = 846
self.match(JavaParser.RPAREN)
self.state = 847
self.statement()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 849
self.match(JavaParser.WHILE)
self.state = 850
self.parExpression()
self.state = 851
self.statement()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 853
self.match(JavaParser.DO)
self.state = 854
self.statement()
self.state = 855
self.match(JavaParser.WHILE)
self.state = 856
self.parExpression()
self.state = 857
self.match(JavaParser.SEMI)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 859
self.match(JavaParser.TRY)
self.state = 860
self.block()
self.state = 870
token = self._input.LA(1)
if token in [JavaParser.CATCH]:
self.state = 862
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 861
self.catchClause()
self.state = 864
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==JavaParser.CATCH):
break
self.state = 867
_la = self._input.LA(1)
if _la==JavaParser.FINALLY:
self.state = 866
self.finallyBlock()
elif token in [JavaParser.FINALLY]:
self.state = 869
self.finallyBlock()
else:
raise NoViableAltException(self)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 872
self.match(JavaParser.TRY)
self.state = 873
self.resourceSpecification()
self.state = 874
self.block()
self.state = 878
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.CATCH:
self.state = 875
self.catchClause()
self.state = 880
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 882
_la = self._input.LA(1)
if _la==JavaParser.FINALLY:
self.state = 881
self.finallyBlock()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 884
self.match(JavaParser.SWITCH)
self.state = 885
self.parExpression()
self.state = 886
self.match(JavaParser.LBRACE)
self.state = 890
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,103,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 887
self.switchBlockStatementGroup()
self.state = 892
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,103,self._ctx)
self.state = 896
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.CASE or _la==JavaParser.DEFAULT:
self.state = 893
self.switchLabel()
self.state = 898
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 899
self.match(JavaParser.RBRACE)
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 901
self.match(JavaParser.SYNCHRONIZED)
self.state = 902
self.parExpression()
self.state = 903
self.block()
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 905
self.match(JavaParser.RETURN)
self.state = 907
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 906
self.expression(0)
self.state = 909
self.match(JavaParser.SEMI)
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 910
self.match(JavaParser.THROW)
self.state = 911
self.expression(0)
self.state = 912
self.match(JavaParser.SEMI)
pass
elif la_ == 13:
self.enterOuterAlt(localctx, 13)
self.state = 914
self.match(JavaParser.BREAK)
self.state = 916
_la = self._input.LA(1)
if _la==JavaParser.IDENTIFIER:
self.state = 915
self.match(JavaParser.IDENTIFIER)
self.state = 918
self.match(JavaParser.SEMI)
pass
elif la_ == 14:
self.enterOuterAlt(localctx, 14)
self.state = 919
self.match(JavaParser.CONTINUE)
self.state = 921
_la = self._input.LA(1)
if _la==JavaParser.IDENTIFIER:
self.state = 920
self.match(JavaParser.IDENTIFIER)
self.state = 923
self.match(JavaParser.SEMI)
pass
elif la_ == 15:
self.enterOuterAlt(localctx, 15)
self.state = 924
self.match(JavaParser.SEMI)
pass
elif la_ == 16:
self.enterOuterAlt(localctx, 16)
self.state = 925
localctx.statementExpression = self.expression(0)
self.state = 926
self.match(JavaParser.SEMI)
pass
elif la_ == 17:
self.enterOuterAlt(localctx, 17)
self.state = 928
localctx.identifierLabel = self.match(JavaParser.IDENTIFIER)
self.state = 929
self.match(JavaParser.COLON)
self.state = 930
self.statement()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CatchClauseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CATCH(self):
return self.getToken(JavaParser.CATCH, 0)
def catchType(self):
return self.getTypedRuleContext(JavaParser.CatchTypeContext,0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def block(self):
return self.getTypedRuleContext(JavaParser.BlockContext,0)
def variableModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableModifierContext)
else:
return self.getTypedRuleContext(JavaParser.VariableModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_catchClause
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCatchClause" ):
listener.enterCatchClause(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCatchClause" ):
listener.exitCatchClause(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCatchClause" ):
return visitor.visitCatchClause(self)
else:
return visitor.visitChildren(self)
def catchClause(self):
localctx = JavaParser.CatchClauseContext(self, self._ctx, self.state)
self.enterRule(localctx, 136, self.RULE_catchClause)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 933
self.match(JavaParser.CATCH)
self.state = 934
self.match(JavaParser.LPAREN)
self.state = 938
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.FINAL or _la==JavaParser.AT:
self.state = 935
self.variableModifier()
self.state = 940
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 941
self.catchType()
self.state = 942
self.match(JavaParser.IDENTIFIER)
self.state = 943
self.match(JavaParser.RPAREN)
self.state = 944
self.block()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CatchTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def qualifiedName(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.QualifiedNameContext)
else:
return self.getTypedRuleContext(JavaParser.QualifiedNameContext,i)
def getRuleIndex(self):
return JavaParser.RULE_catchType
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCatchType" ):
listener.enterCatchType(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCatchType" ):
listener.exitCatchType(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCatchType" ):
return visitor.visitCatchType(self)
else:
return visitor.visitChildren(self)
def catchType(self):
localctx = JavaParser.CatchTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 138, self.RULE_catchType)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 946
self.qualifiedName()
self.state = 951
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.BITOR:
self.state = 947
self.match(JavaParser.BITOR)
self.state = 948
self.qualifiedName()
self.state = 953
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FinallyBlockContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FINALLY(self):
return self.getToken(JavaParser.FINALLY, 0)
def block(self):
return self.getTypedRuleContext(JavaParser.BlockContext,0)
def getRuleIndex(self):
return JavaParser.RULE_finallyBlock
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFinallyBlock" ):
listener.enterFinallyBlock(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFinallyBlock" ):
listener.exitFinallyBlock(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFinallyBlock" ):
return visitor.visitFinallyBlock(self)
else:
return visitor.visitChildren(self)
def finallyBlock(self):
localctx = JavaParser.FinallyBlockContext(self, self._ctx, self.state)
self.enterRule(localctx, 140, self.RULE_finallyBlock)
try:
self.enterOuterAlt(localctx, 1)
self.state = 954
self.match(JavaParser.FINALLY)
self.state = 955
self.block()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ResourceSpecificationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def resources(self):
return self.getTypedRuleContext(JavaParser.ResourcesContext,0)
def getRuleIndex(self):
return JavaParser.RULE_resourceSpecification
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterResourceSpecification" ):
listener.enterResourceSpecification(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitResourceSpecification" ):
listener.exitResourceSpecification(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitResourceSpecification" ):
return visitor.visitResourceSpecification(self)
else:
return visitor.visitChildren(self)
def resourceSpecification(self):
localctx = JavaParser.ResourceSpecificationContext(self, self._ctx, self.state)
self.enterRule(localctx, 142, self.RULE_resourceSpecification)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 957
self.match(JavaParser.LPAREN)
self.state = 958
self.resources()
self.state = 960
_la = self._input.LA(1)
if _la==JavaParser.SEMI:
self.state = 959
self.match(JavaParser.SEMI)
self.state = 962
self.match(JavaParser.RPAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ResourcesContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def resource(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ResourceContext)
else:
return self.getTypedRuleContext(JavaParser.ResourceContext,i)
def getRuleIndex(self):
return JavaParser.RULE_resources
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterResources" ):
listener.enterResources(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitResources" ):
listener.exitResources(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitResources" ):
return visitor.visitResources(self)
else:
return visitor.visitChildren(self)
def resources(self):
localctx = JavaParser.ResourcesContext(self, self._ctx, self.state)
self.enterRule(localctx, 144, self.RULE_resources)
try:
self.enterOuterAlt(localctx, 1)
self.state = 964
self.resource()
self.state = 969
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,112,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 965
self.match(JavaParser.SEMI)
self.state = 966
self.resource()
self.state = 971
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,112,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ResourceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classOrInterfaceType(self):
return self.getTypedRuleContext(JavaParser.ClassOrInterfaceTypeContext,0)
def variableDeclaratorId(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorIdContext,0)
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def variableModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableModifierContext)
else:
return self.getTypedRuleContext(JavaParser.VariableModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_resource
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterResource" ):
listener.enterResource(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitResource" ):
listener.exitResource(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitResource" ):
return visitor.visitResource(self)
else:
return visitor.visitChildren(self)
def resource(self):
localctx = JavaParser.ResourceContext(self, self._ctx, self.state)
self.enterRule(localctx, 146, self.RULE_resource)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 975
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.FINAL or _la==JavaParser.AT:
self.state = 972
self.variableModifier()
self.state = 977
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 978
self.classOrInterfaceType()
self.state = 979
self.variableDeclaratorId()
self.state = 980
self.match(JavaParser.ASSIGN)
self.state = 981
self.expression(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SwitchBlockStatementGroupContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def switchLabel(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.SwitchLabelContext)
else:
return self.getTypedRuleContext(JavaParser.SwitchLabelContext,i)
def blockStatement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.BlockStatementContext)
else:
return self.getTypedRuleContext(JavaParser.BlockStatementContext,i)
def getRuleIndex(self):
return JavaParser.RULE_switchBlockStatementGroup
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSwitchBlockStatementGroup" ):
listener.enterSwitchBlockStatementGroup(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSwitchBlockStatementGroup" ):
listener.exitSwitchBlockStatementGroup(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSwitchBlockStatementGroup" ):
return visitor.visitSwitchBlockStatementGroup(self)
else:
return visitor.visitChildren(self)
def switchBlockStatementGroup(self):
localctx = JavaParser.SwitchBlockStatementGroupContext(self, self._ctx, self.state)
self.enterRule(localctx, 148, self.RULE_switchBlockStatementGroup)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 984
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 983
self.switchLabel()
self.state = 986
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==JavaParser.CASE or _la==JavaParser.DEFAULT):
break
self.state = 989
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 988
self.blockStatement()
self.state = 991
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.ABSTRACT) | (1 << JavaParser.ASSERT) | (1 << JavaParser.BOOLEAN) | (1 << JavaParser.BREAK) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.CLASS) | (1 << JavaParser.CONTINUE) | (1 << JavaParser.DO) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.FOR) | (1 << JavaParser.IF) | (1 << JavaParser.INT) | (1 << JavaParser.INTERFACE) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.PRIVATE) | (1 << JavaParser.PROTECTED) | (1 << JavaParser.PUBLIC) | (1 << JavaParser.RETURN) | (1 << JavaParser.SHORT) | (1 << JavaParser.STATIC) | (1 << JavaParser.STRICTFP) | (1 << JavaParser.SUPER) | (1 << JavaParser.SWITCH) | (1 << JavaParser.SYNCHRONIZED) | (1 << JavaParser.THIS) | (1 << JavaParser.THROW) | (1 << JavaParser.TRY) | (1 << JavaParser.VOID) | (1 << JavaParser.WHILE) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN) | (1 << JavaParser.LBRACE))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (JavaParser.SEMI - 67)) | (1 << (JavaParser.LT - 67)) | (1 << (JavaParser.BANG - 67)) | (1 << (JavaParser.TILDE - 67)) | (1 << (JavaParser.INC - 67)) | (1 << (JavaParser.DEC - 67)) | (1 << (JavaParser.ADD - 67)) | (1 << (JavaParser.SUB - 67)) | (1 << (JavaParser.AT - 67)) | (1 << (JavaParser.IDENTIFIER - 67)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SwitchLabelContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.constantExpression = None # ExpressionContext
self.enumConstantName = None # Token
def CASE(self):
return self.getToken(JavaParser.CASE, 0)
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def DEFAULT(self):
return self.getToken(JavaParser.DEFAULT, 0)
def getRuleIndex(self):
return JavaParser.RULE_switchLabel
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSwitchLabel" ):
listener.enterSwitchLabel(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSwitchLabel" ):
listener.exitSwitchLabel(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSwitchLabel" ):
return visitor.visitSwitchLabel(self)
else:
return visitor.visitChildren(self)
def switchLabel(self):
localctx = JavaParser.SwitchLabelContext(self, self._ctx, self.state)
self.enterRule(localctx, 150, self.RULE_switchLabel)
try:
self.state = 1001
token = self._input.LA(1)
if token in [JavaParser.CASE]:
self.enterOuterAlt(localctx, 1)
self.state = 993
self.match(JavaParser.CASE)
self.state = 996
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,116,self._ctx)
if la_ == 1:
self.state = 994
localctx.constantExpression = self.expression(0)
pass
elif la_ == 2:
self.state = 995
localctx.enumConstantName = self.match(JavaParser.IDENTIFIER)
pass
self.state = 998
self.match(JavaParser.COLON)
elif token in [JavaParser.DEFAULT]:
self.enterOuterAlt(localctx, 2)
self.state = 999
self.match(JavaParser.DEFAULT)
self.state = 1000
self.match(JavaParser.COLON)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ForControlContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.forUpdate = None # ExpressionListContext
def enhancedForControl(self):
return self.getTypedRuleContext(JavaParser.EnhancedForControlContext,0)
def forInit(self):
return self.getTypedRuleContext(JavaParser.ForInitContext,0)
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def expressionList(self):
return self.getTypedRuleContext(JavaParser.ExpressionListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_forControl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForControl" ):
listener.enterForControl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForControl" ):
listener.exitForControl(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitForControl" ):
return visitor.visitForControl(self)
else:
return visitor.visitChildren(self)
def forControl(self):
localctx = JavaParser.ForControlContext(self, self._ctx, self.state)
self.enterRule(localctx, 152, self.RULE_forControl)
self._la = 0 # Token type
try:
self.state = 1015
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,121,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1003
self.enhancedForControl()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1005
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 1004
self.forInit()
self.state = 1007
self.match(JavaParser.SEMI)
self.state = 1009
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 1008
self.expression(0)
self.state = 1011
self.match(JavaParser.SEMI)
self.state = 1013
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 1012
localctx.forUpdate = self.expressionList()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ForInitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def localVariableDeclaration(self):
return self.getTypedRuleContext(JavaParser.LocalVariableDeclarationContext,0)
def expressionList(self):
return self.getTypedRuleContext(JavaParser.ExpressionListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_forInit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForInit" ):
listener.enterForInit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForInit" ):
listener.exitForInit(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitForInit" ):
return visitor.visitForInit(self)
else:
return visitor.visitChildren(self)
def forInit(self):
localctx = JavaParser.ForInitContext(self, self._ctx, self.state)
self.enterRule(localctx, 154, self.RULE_forInit)
try:
self.state = 1019
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,122,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1017
self.localVariableDeclaration()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1018
self.expressionList()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnhancedForControlContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def variableDeclaratorId(self):
return self.getTypedRuleContext(JavaParser.VariableDeclaratorIdContext,0)
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def variableModifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.VariableModifierContext)
else:
return self.getTypedRuleContext(JavaParser.VariableModifierContext,i)
def getRuleIndex(self):
return JavaParser.RULE_enhancedForControl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEnhancedForControl" ):
listener.enterEnhancedForControl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEnhancedForControl" ):
listener.exitEnhancedForControl(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitEnhancedForControl" ):
return visitor.visitEnhancedForControl(self)
else:
return visitor.visitChildren(self)
def enhancedForControl(self):
localctx = JavaParser.EnhancedForControlContext(self, self._ctx, self.state)
self.enterRule(localctx, 156, self.RULE_enhancedForControl)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1024
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,123,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1021
self.variableModifier()
self.state = 1026
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,123,self._ctx)
self.state = 1027
self.typeType()
self.state = 1028
self.variableDeclaratorId()
self.state = 1029
self.match(JavaParser.COLON)
self.state = 1030
self.expression(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def getRuleIndex(self):
return JavaParser.RULE_parExpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParExpression" ):
listener.enterParExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParExpression" ):
listener.exitParExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParExpression" ):
return visitor.visitParExpression(self)
else:
return visitor.visitChildren(self)
def parExpression(self):
localctx = JavaParser.ParExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 158, self.RULE_parExpression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1032
self.match(JavaParser.LPAREN)
self.state = 1033
self.expression(0)
self.state = 1034
self.match(JavaParser.RPAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ExpressionContext)
else:
return self.getTypedRuleContext(JavaParser.ExpressionContext,i)
def getRuleIndex(self):
return JavaParser.RULE_expressionList
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpressionList" ):
listener.enterExpressionList(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpressionList" ):
listener.exitExpressionList(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExpressionList" ):
return visitor.visitExpressionList(self)
else:
return visitor.visitChildren(self)
def expressionList(self):
localctx = JavaParser.ExpressionListContext(self, self._ctx, self.state)
self.enterRule(localctx, 160, self.RULE_expressionList)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1036
self.expression(0)
self.state = 1041
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 1037
self.match(JavaParser.COMMA)
self.state = 1038
self.expression(0)
self.state = 1043
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MethodCallContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def expressionList(self):
return self.getTypedRuleContext(JavaParser.ExpressionListContext,0)
def THIS(self):
return self.getToken(JavaParser.THIS, 0)
def SUPER(self):
return self.getToken(JavaParser.SUPER, 0)
def getRuleIndex(self):
return JavaParser.RULE_methodCall
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMethodCall" ):
listener.enterMethodCall(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMethodCall" ):
listener.exitMethodCall(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMethodCall" ):
return visitor.visitMethodCall(self)
else:
return visitor.visitChildren(self)
def methodCall(self):
localctx = JavaParser.MethodCallContext(self, self._ctx, self.state)
self.enterRule(localctx, 162, self.RULE_methodCall)
self._la = 0 # Token type
try:
self.state = 1062
token = self._input.LA(1)
if token in [JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 1044
self.match(JavaParser.IDENTIFIER)
self.state = 1045
self.match(JavaParser.LPAREN)
self.state = 1047
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 1046
self.expressionList()
self.state = 1049
self.match(JavaParser.RPAREN)
elif token in [JavaParser.THIS]:
self.enterOuterAlt(localctx, 2)
self.state = 1050
self.match(JavaParser.THIS)
self.state = 1051
self.match(JavaParser.LPAREN)
self.state = 1053
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 1052
self.expressionList()
self.state = 1055
self.match(JavaParser.RPAREN)
elif token in [JavaParser.SUPER]:
self.enterOuterAlt(localctx, 3)
self.state = 1056
self.match(JavaParser.SUPER)
self.state = 1057
self.match(JavaParser.LPAREN)
self.state = 1059
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 1058
self.expressionList()
self.state = 1061
self.match(JavaParser.RPAREN)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.prefix = None # Token
self.bop = None # Token
self.postfix = None # Token
def primary(self):
return self.getTypedRuleContext(JavaParser.PrimaryContext,0)
def methodCall(self):
return self.getTypedRuleContext(JavaParser.MethodCallContext,0)
def NEW(self):
return self.getToken(JavaParser.NEW, 0)
def creator(self):
return self.getTypedRuleContext(JavaParser.CreatorContext,0)
def typeType(self):
return self.getTypedRuleContext(JavaParser.TypeTypeContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ExpressionContext)
else:
return self.getTypedRuleContext(JavaParser.ExpressionContext,i)
def lambdaExpression(self):
return self.getTypedRuleContext(JavaParser.LambdaExpressionContext,0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def typeArguments(self):
return self.getTypedRuleContext(JavaParser.TypeArgumentsContext,0)
def classType(self):
return self.getTypedRuleContext(JavaParser.ClassTypeContext,0)
def THIS(self):
return self.getToken(JavaParser.THIS, 0)
def innerCreator(self):
return self.getTypedRuleContext(JavaParser.InnerCreatorContext,0)
def SUPER(self):
return self.getToken(JavaParser.SUPER, 0)
def superSuffix(self):
return self.getTypedRuleContext(JavaParser.SuperSuffixContext,0)
def explicitGenericInvocation(self):
return self.getTypedRuleContext(JavaParser.ExplicitGenericInvocationContext,0)
def nonWildcardTypeArguments(self):
return self.getTypedRuleContext(JavaParser.NonWildcardTypeArgumentsContext,0)
def INSTANCEOF(self):
return self.getToken(JavaParser.INSTANCEOF, 0)
def getRuleIndex(self):
return JavaParser.RULE_expression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpression" ):
listener.enterExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpression" ):
listener.exitExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExpression" ):
return visitor.visitExpression(self)
else:
return visitor.visitChildren(self)
def expression(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = JavaParser.ExpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 164
self.enterRecursionRule(localctx, 164, self.RULE_expression, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1095
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,132,self._ctx)
if la_ == 1:
self.state = 1065
self.primary()
pass
elif la_ == 2:
self.state = 1066
self.methodCall()
pass
elif la_ == 3:
self.state = 1067
self.match(JavaParser.NEW)
self.state = 1068
self.creator()
pass
elif la_ == 4:
self.state = 1069
self.match(JavaParser.LPAREN)
self.state = 1070
self.typeType()
self.state = 1071
self.match(JavaParser.RPAREN)
self.state = 1072
self.expression(21)
pass
elif la_ == 5:
self.state = 1074
localctx.prefix = self._input.LT(1)
_la = self._input.LA(1)
if not(((((_la - 83)) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (JavaParser.INC - 83)) | (1 << (JavaParser.DEC - 83)) | (1 << (JavaParser.ADD - 83)) | (1 << (JavaParser.SUB - 83)))) != 0)):
localctx.prefix = self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 1075
self.expression(19)
pass
elif la_ == 6:
self.state = 1076
localctx.prefix = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==JavaParser.BANG or _la==JavaParser.TILDE):
localctx.prefix = self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 1077
self.expression(18)
pass
elif la_ == 7:
self.state = 1078
self.lambdaExpression()
pass
elif la_ == 8:
self.state = 1079
self.typeType()
self.state = 1080
self.match(JavaParser.COLONCOLON)
self.state = 1086
token = self._input.LA(1)
if token in [JavaParser.LT, JavaParser.IDENTIFIER]:
self.state = 1082
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1081
self.typeArguments()
self.state = 1084
self.match(JavaParser.IDENTIFIER)
elif token in [JavaParser.NEW]:
self.state = 1085
self.match(JavaParser.NEW)
else:
raise NoViableAltException(self)
pass
elif la_ == 9:
self.state = 1088
self.classType()
self.state = 1089
self.match(JavaParser.COLONCOLON)
self.state = 1091
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1090
self.typeArguments()
self.state = 1093
self.match(JavaParser.NEW)
pass
self._ctx.stop = self._input.LT(-1)
self.state = 1177
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,138,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 1175
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,137,self._ctx)
if la_ == 1:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1097
if not self.precpred(self._ctx, 17):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 17)")
self.state = 1098
localctx.bop = self._input.LT(1)
_la = self._input.LA(1)
if not(((((_la - 87)) & ~0x3f) == 0 and ((1 << (_la - 87)) & ((1 << (JavaParser.MUL - 87)) | (1 << (JavaParser.DIV - 87)) | (1 << (JavaParser.MOD - 87)))) != 0)):
localctx.bop = self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 1099
self.expression(18)
pass
elif la_ == 2:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1100
if not self.precpred(self._ctx, 16):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 16)")
self.state = 1101
localctx.bop = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==JavaParser.ADD or _la==JavaParser.SUB):
localctx.bop = self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 1102
self.expression(17)
pass
elif la_ == 3:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1103
if not self.precpred(self._ctx, 15):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 15)")
self.state = 1111
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,133,self._ctx)
if la_ == 1:
self.state = 1104
self.match(JavaParser.LT)
self.state = 1105
self.match(JavaParser.LT)
pass
elif la_ == 2:
self.state = 1106
self.match(JavaParser.GT)
self.state = 1107
self.match(JavaParser.GT)
self.state = 1108
self.match(JavaParser.GT)
pass
elif la_ == 3:
self.state = 1109
self.match(JavaParser.GT)
self.state = 1110
self.match(JavaParser.GT)
pass
self.state = 1113
self.expression(16)
pass
elif la_ == 4:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1114
if not self.precpred(self._ctx, 14):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 14)")
self.state = 1115
localctx.bop = self._input.LT(1)
_la = self._input.LA(1)
if not(((((_la - 71)) & ~0x3f) == 0 and ((1 << (_la - 71)) & ((1 << (JavaParser.GT - 71)) | (1 << (JavaParser.LT - 71)) | (1 << (JavaParser.LE - 71)) | (1 << (JavaParser.GE - 71)))) != 0)):
localctx.bop = self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 1116
self.expression(15)
pass
elif la_ == 5:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1117
if not self.precpred(self._ctx, 12):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 12)")
self.state = 1118
localctx.bop = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==JavaParser.EQUAL or _la==JavaParser.NOTEQUAL):
localctx.bop = self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 1119
self.expression(13)
pass
elif la_ == 6:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1120
if not self.precpred(self._ctx, 11):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 11)")
self.state = 1121
localctx.bop = self.match(JavaParser.BITAND)
self.state = 1122
self.expression(12)
pass
elif la_ == 7:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1123
if not self.precpred(self._ctx, 10):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 10)")
self.state = 1124
localctx.bop = self.match(JavaParser.CARET)
self.state = 1125
self.expression(11)
pass
elif la_ == 8:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1126
if not self.precpred(self._ctx, 9):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 9)")
self.state = 1127
localctx.bop = self.match(JavaParser.BITOR)
self.state = 1128
self.expression(10)
pass
elif la_ == 9:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1129
if not self.precpred(self._ctx, 8):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 8)")
self.state = 1130
localctx.bop = self.match(JavaParser.AND)
self.state = 1131
self.expression(9)
pass
elif la_ == 10:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1132
if not self.precpred(self._ctx, 7):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 7)")
self.state = 1133
localctx.bop = self.match(JavaParser.OR)
self.state = 1134
self.expression(8)
pass
elif la_ == 11:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1135
if not self.precpred(self._ctx, 6):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 6)")
self.state = 1136
localctx.bop = self.match(JavaParser.QUESTION)
self.state = 1137
self.expression(0)
self.state = 1138
self.match(JavaParser.COLON)
self.state = 1139
self.expression(7)
pass
elif la_ == 12:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1141
if not self.precpred(self._ctx, 5):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 5)")
self.state = 1142
localctx.bop = self._input.LT(1)
_la = self._input.LA(1)
if not(((((_la - 70)) & ~0x3f) == 0 and ((1 << (_la - 70)) & ((1 << (JavaParser.ASSIGN - 70)) | (1 << (JavaParser.ADD_ASSIGN - 70)) | (1 << (JavaParser.SUB_ASSIGN - 70)) | (1 << (JavaParser.MUL_ASSIGN - 70)) | (1 << (JavaParser.DIV_ASSIGN - 70)) | (1 << (JavaParser.AND_ASSIGN - 70)) | (1 << (JavaParser.OR_ASSIGN - 70)) | (1 << (JavaParser.XOR_ASSIGN - 70)) | (1 << (JavaParser.MOD_ASSIGN - 70)) | (1 << (JavaParser.LSHIFT_ASSIGN - 70)) | (1 << (JavaParser.RSHIFT_ASSIGN - 70)) | (1 << (JavaParser.URSHIFT_ASSIGN - 70)))) != 0)):
localctx.bop = self._errHandler.recoverInline(self)
else:
self.consume()
self.state = 1143
self.expression(5)
pass
elif la_ == 13:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1144
if not self.precpred(self._ctx, 25):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 25)")
self.state = 1145
localctx.bop = self.match(JavaParser.DOT)
self.state = 1157
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,135,self._ctx)
if la_ == 1:
self.state = 1146
self.match(JavaParser.IDENTIFIER)
pass
elif la_ == 2:
self.state = 1147
self.methodCall()
pass
elif la_ == 3:
self.state = 1148
self.match(JavaParser.THIS)
pass
elif la_ == 4:
self.state = 1149
self.match(JavaParser.NEW)
self.state = 1151
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1150
self.nonWildcardTypeArguments()
self.state = 1153
self.innerCreator()
pass
elif la_ == 5:
self.state = 1154
self.match(JavaParser.SUPER)
self.state = 1155
self.superSuffix()
pass
elif la_ == 6:
self.state = 1156
self.explicitGenericInvocation()
pass
pass
elif la_ == 14:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1159
if not self.precpred(self._ctx, 24):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 24)")
self.state = 1160
self.match(JavaParser.LBRACK)
self.state = 1161
self.expression(0)
self.state = 1162
self.match(JavaParser.RBRACK)
pass
elif la_ == 15:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1164
if not self.precpred(self._ctx, 20):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 20)")
self.state = 1165
localctx.postfix = self._input.LT(1)
_la = self._input.LA(1)
if not(_la==JavaParser.INC or _la==JavaParser.DEC):
localctx.postfix = self._errHandler.recoverInline(self)
else:
self.consume()
pass
elif la_ == 16:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1166
if not self.precpred(self._ctx, 13):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 13)")
self.state = 1167
localctx.bop = self.match(JavaParser.INSTANCEOF)
self.state = 1168
self.typeType()
pass
elif la_ == 17:
localctx = JavaParser.ExpressionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 1169
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 1170
self.match(JavaParser.COLONCOLON)
self.state = 1172
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1171
self.typeArguments()
self.state = 1174
self.match(JavaParser.IDENTIFIER)
pass
self.state = 1179
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,138,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class LambdaExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def lambdaParameters(self):
return self.getTypedRuleContext(JavaParser.LambdaParametersContext,0)
def lambdaBody(self):
return self.getTypedRuleContext(JavaParser.LambdaBodyContext,0)
def getRuleIndex(self):
return JavaParser.RULE_lambdaExpression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdaExpression" ):
listener.enterLambdaExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdaExpression" ):
listener.exitLambdaExpression(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdaExpression" ):
return visitor.visitLambdaExpression(self)
else:
return visitor.visitChildren(self)
def lambdaExpression(self):
localctx = JavaParser.LambdaExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 166, self.RULE_lambdaExpression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1180
self.lambdaParameters()
self.state = 1181
self.match(JavaParser.ARROW)
self.state = 1182
self.lambdaBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LambdaParametersContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self, i:int=None):
if i is None:
return self.getTokens(JavaParser.IDENTIFIER)
else:
return self.getToken(JavaParser.IDENTIFIER, i)
def formalParameterList(self):
return self.getTypedRuleContext(JavaParser.FormalParameterListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_lambdaParameters
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdaParameters" ):
listener.enterLambdaParameters(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdaParameters" ):
listener.exitLambdaParameters(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdaParameters" ):
return visitor.visitLambdaParameters(self)
else:
return visitor.visitChildren(self)
def lambdaParameters(self):
localctx = JavaParser.LambdaParametersContext(self, self._ctx, self.state)
self.enterRule(localctx, 168, self.RULE_lambdaParameters)
self._la = 0 # Token type
try:
self.state = 1200
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,141,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1184
self.match(JavaParser.IDENTIFIER)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1185
self.match(JavaParser.LPAREN)
self.state = 1187
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FINAL) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.SHORT))) != 0) or _la==JavaParser.AT or _la==JavaParser.IDENTIFIER:
self.state = 1186
self.formalParameterList()
self.state = 1189
self.match(JavaParser.RPAREN)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1190
self.match(JavaParser.LPAREN)
self.state = 1191
self.match(JavaParser.IDENTIFIER)
self.state = 1196
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 1192
self.match(JavaParser.COMMA)
self.state = 1193
self.match(JavaParser.IDENTIFIER)
self.state = 1198
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1199
self.match(JavaParser.RPAREN)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LambdaBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def block(self):
return self.getTypedRuleContext(JavaParser.BlockContext,0)
def getRuleIndex(self):
return JavaParser.RULE_lambdaBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLambdaBody" ):
listener.enterLambdaBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLambdaBody" ):
listener.exitLambdaBody(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitLambdaBody" ):
return visitor.visitLambdaBody(self)
else:
return visitor.visitChildren(self)
def lambdaBody(self):
localctx = JavaParser.LambdaBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 170, self.RULE_lambdaBody)
try:
self.state = 1204
token = self._input.LA(1)
if token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.NEW, JavaParser.SHORT, JavaParser.SUPER, JavaParser.THIS, JavaParser.VOID, JavaParser.DECIMAL_LITERAL, JavaParser.HEX_LITERAL, JavaParser.OCT_LITERAL, JavaParser.BINARY_LITERAL, JavaParser.FLOAT_LITERAL, JavaParser.HEX_FLOAT_LITERAL, JavaParser.BOOL_LITERAL, JavaParser.CHAR_LITERAL, JavaParser.STRING_LITERAL, JavaParser.NULL_LITERAL, JavaParser.LPAREN, JavaParser.LT, JavaParser.BANG, JavaParser.TILDE, JavaParser.INC, JavaParser.DEC, JavaParser.ADD, JavaParser.SUB, JavaParser.AT, JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 1202
self.expression(0)
elif token in [JavaParser.LBRACE]:
self.enterOuterAlt(localctx, 2)
self.state = 1203
self.block()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PrimaryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(JavaParser.ExpressionContext,0)
def THIS(self):
return self.getToken(JavaParser.THIS, 0)
def SUPER(self):
return self.getToken(JavaParser.SUPER, 0)
def literal(self):
return self.getTypedRuleContext(JavaParser.LiteralContext,0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def typeTypeOrVoid(self):
return self.getTypedRuleContext(JavaParser.TypeTypeOrVoidContext,0)
def CLASS(self):
return self.getToken(JavaParser.CLASS, 0)
def nonWildcardTypeArguments(self):
return self.getTypedRuleContext(JavaParser.NonWildcardTypeArgumentsContext,0)
def explicitGenericInvocationSuffix(self):
return self.getTypedRuleContext(JavaParser.ExplicitGenericInvocationSuffixContext,0)
def arguments(self):
return self.getTypedRuleContext(JavaParser.ArgumentsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_primary
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrimary" ):
listener.enterPrimary(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrimary" ):
listener.exitPrimary(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPrimary" ):
return visitor.visitPrimary(self)
else:
return visitor.visitChildren(self)
def primary(self):
localctx = JavaParser.PrimaryContext(self, self._ctx, self.state)
self.enterRule(localctx, 172, self.RULE_primary)
try:
self.state = 1224
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,144,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1206
self.match(JavaParser.LPAREN)
self.state = 1207
self.expression(0)
self.state = 1208
self.match(JavaParser.RPAREN)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1210
self.match(JavaParser.THIS)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1211
self.match(JavaParser.SUPER)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1212
self.literal()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1213
self.match(JavaParser.IDENTIFIER)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1214
self.typeTypeOrVoid()
self.state = 1215
self.match(JavaParser.DOT)
self.state = 1216
self.match(JavaParser.CLASS)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1218
self.nonWildcardTypeArguments()
self.state = 1222
token = self._input.LA(1)
if token in [JavaParser.SUPER, JavaParser.IDENTIFIER]:
self.state = 1219
self.explicitGenericInvocationSuffix()
elif token in [JavaParser.THIS]:
self.state = 1220
self.match(JavaParser.THIS)
self.state = 1221
self.arguments()
else:
raise NoViableAltException(self)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def classOrInterfaceType(self):
return self.getTypedRuleContext(JavaParser.ClassOrInterfaceTypeContext,0)
def annotation(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.AnnotationContext)
else:
return self.getTypedRuleContext(JavaParser.AnnotationContext,i)
def typeArguments(self):
return self.getTypedRuleContext(JavaParser.TypeArgumentsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_classType
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassType" ):
listener.enterClassType(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassType" ):
listener.exitClassType(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassType" ):
return visitor.visitClassType(self)
else:
return visitor.visitChildren(self)
def classType(self):
localctx = JavaParser.ClassTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 174, self.RULE_classType)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1229
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,145,self._ctx)
if la_ == 1:
self.state = 1226
self.classOrInterfaceType()
self.state = 1227
self.match(JavaParser.DOT)
self.state = 1234
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.AT:
self.state = 1231
self.annotation()
self.state = 1236
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1237
self.match(JavaParser.IDENTIFIER)
self.state = 1239
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1238
self.typeArguments()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CreatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nonWildcardTypeArguments(self):
return self.getTypedRuleContext(JavaParser.NonWildcardTypeArgumentsContext,0)
def createdName(self):
return self.getTypedRuleContext(JavaParser.CreatedNameContext,0)
def classCreatorRest(self):
return self.getTypedRuleContext(JavaParser.ClassCreatorRestContext,0)
def arrayCreatorRest(self):
return self.getTypedRuleContext(JavaParser.ArrayCreatorRestContext,0)
def getRuleIndex(self):
return JavaParser.RULE_creator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCreator" ):
listener.enterCreator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCreator" ):
listener.exitCreator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCreator" ):
return visitor.visitCreator(self)
else:
return visitor.visitChildren(self)
def creator(self):
localctx = JavaParser.CreatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 176, self.RULE_creator)
try:
self.state = 1250
token = self._input.LA(1)
if token in [JavaParser.LT]:
self.enterOuterAlt(localctx, 1)
self.state = 1241
self.nonWildcardTypeArguments()
self.state = 1242
self.createdName()
self.state = 1243
self.classCreatorRest()
elif token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.SHORT, JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 2)
self.state = 1245
self.createdName()
self.state = 1248
token = self._input.LA(1)
if token in [JavaParser.LBRACK]:
self.state = 1246
self.arrayCreatorRest()
elif token in [JavaParser.LPAREN]:
self.state = 1247
self.classCreatorRest()
else:
raise NoViableAltException(self)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CreatedNameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self, i:int=None):
if i is None:
return self.getTokens(JavaParser.IDENTIFIER)
else:
return self.getToken(JavaParser.IDENTIFIER, i)
def typeArgumentsOrDiamond(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.TypeArgumentsOrDiamondContext)
else:
return self.getTypedRuleContext(JavaParser.TypeArgumentsOrDiamondContext,i)
def primitiveType(self):
return self.getTypedRuleContext(JavaParser.PrimitiveTypeContext,0)
def getRuleIndex(self):
return JavaParser.RULE_createdName
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCreatedName" ):
listener.enterCreatedName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCreatedName" ):
listener.exitCreatedName(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCreatedName" ):
return visitor.visitCreatedName(self)
else:
return visitor.visitChildren(self)
def createdName(self):
localctx = JavaParser.CreatedNameContext(self, self._ctx, self.state)
self.enterRule(localctx, 178, self.RULE_createdName)
self._la = 0 # Token type
try:
self.state = 1267
token = self._input.LA(1)
if token in [JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 1252
self.match(JavaParser.IDENTIFIER)
self.state = 1254
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1253
self.typeArgumentsOrDiamond()
self.state = 1263
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.DOT:
self.state = 1256
self.match(JavaParser.DOT)
self.state = 1257
self.match(JavaParser.IDENTIFIER)
self.state = 1259
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1258
self.typeArgumentsOrDiamond()
self.state = 1265
self._errHandler.sync(self)
_la = self._input.LA(1)
elif token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.SHORT]:
self.enterOuterAlt(localctx, 2)
self.state = 1266
self.primitiveType()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InnerCreatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def classCreatorRest(self):
return self.getTypedRuleContext(JavaParser.ClassCreatorRestContext,0)
def nonWildcardTypeArgumentsOrDiamond(self):
return self.getTypedRuleContext(JavaParser.NonWildcardTypeArgumentsOrDiamondContext,0)
def getRuleIndex(self):
return JavaParser.RULE_innerCreator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInnerCreator" ):
listener.enterInnerCreator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInnerCreator" ):
listener.exitInnerCreator(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitInnerCreator" ):
return visitor.visitInnerCreator(self)
else:
return visitor.visitChildren(self)
def innerCreator(self):
localctx = JavaParser.InnerCreatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 180, self.RULE_innerCreator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1269
self.match(JavaParser.IDENTIFIER)
self.state = 1271
_la = self._input.LA(1)
if _la==JavaParser.LT:
self.state = 1270
self.nonWildcardTypeArgumentsOrDiamond()
self.state = 1273
self.classCreatorRest()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArrayCreatorRestContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def arrayInitializer(self):
return self.getTypedRuleContext(JavaParser.ArrayInitializerContext,0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.ExpressionContext)
else:
return self.getTypedRuleContext(JavaParser.ExpressionContext,i)
def getRuleIndex(self):
return JavaParser.RULE_arrayCreatorRest
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterArrayCreatorRest" ):
listener.enterArrayCreatorRest(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitArrayCreatorRest" ):
listener.exitArrayCreatorRest(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitArrayCreatorRest" ):
return visitor.visitArrayCreatorRest(self)
else:
return visitor.visitChildren(self)
def arrayCreatorRest(self):
localctx = JavaParser.ArrayCreatorRestContext(self, self._ctx, self.state)
self.enterRule(localctx, 182, self.RULE_arrayCreatorRest)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1275
self.match(JavaParser.LBRACK)
self.state = 1303
token = self._input.LA(1)
if token in [JavaParser.RBRACK]:
self.state = 1276
self.match(JavaParser.RBRACK)
self.state = 1281
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.LBRACK:
self.state = 1277
self.match(JavaParser.LBRACK)
self.state = 1278
self.match(JavaParser.RBRACK)
self.state = 1283
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1284
self.arrayInitializer()
elif token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.NEW, JavaParser.SHORT, JavaParser.SUPER, JavaParser.THIS, JavaParser.VOID, JavaParser.DECIMAL_LITERAL, JavaParser.HEX_LITERAL, JavaParser.OCT_LITERAL, JavaParser.BINARY_LITERAL, JavaParser.FLOAT_LITERAL, JavaParser.HEX_FLOAT_LITERAL, JavaParser.BOOL_LITERAL, JavaParser.CHAR_LITERAL, JavaParser.STRING_LITERAL, JavaParser.NULL_LITERAL, JavaParser.LPAREN, JavaParser.LT, JavaParser.BANG, JavaParser.TILDE, JavaParser.INC, JavaParser.DEC, JavaParser.ADD, JavaParser.SUB, JavaParser.AT, JavaParser.IDENTIFIER]:
self.state = 1285
self.expression(0)
self.state = 1286
self.match(JavaParser.RBRACK)
self.state = 1293
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,156,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1287
self.match(JavaParser.LBRACK)
self.state = 1288
self.expression(0)
self.state = 1289
self.match(JavaParser.RBRACK)
self.state = 1295
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,156,self._ctx)
self.state = 1300
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,157,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1296
self.match(JavaParser.LBRACK)
self.state = 1297
self.match(JavaParser.RBRACK)
self.state = 1302
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,157,self._ctx)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ClassCreatorRestContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def arguments(self):
return self.getTypedRuleContext(JavaParser.ArgumentsContext,0)
def classBody(self):
return self.getTypedRuleContext(JavaParser.ClassBodyContext,0)
def getRuleIndex(self):
return JavaParser.RULE_classCreatorRest
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterClassCreatorRest" ):
listener.enterClassCreatorRest(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitClassCreatorRest" ):
listener.exitClassCreatorRest(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitClassCreatorRest" ):
return visitor.visitClassCreatorRest(self)
else:
return visitor.visitChildren(self)
def classCreatorRest(self):
localctx = JavaParser.ClassCreatorRestContext(self, self._ctx, self.state)
self.enterRule(localctx, 184, self.RULE_classCreatorRest)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1305
self.arguments()
self.state = 1307
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,159,self._ctx)
if la_ == 1:
self.state = 1306
self.classBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExplicitGenericInvocationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nonWildcardTypeArguments(self):
return self.getTypedRuleContext(JavaParser.NonWildcardTypeArgumentsContext,0)
def explicitGenericInvocationSuffix(self):
return self.getTypedRuleContext(JavaParser.ExplicitGenericInvocationSuffixContext,0)
def getRuleIndex(self):
return JavaParser.RULE_explicitGenericInvocation
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExplicitGenericInvocation" ):
listener.enterExplicitGenericInvocation(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExplicitGenericInvocation" ):
listener.exitExplicitGenericInvocation(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExplicitGenericInvocation" ):
return visitor.visitExplicitGenericInvocation(self)
else:
return visitor.visitChildren(self)
def explicitGenericInvocation(self):
localctx = JavaParser.ExplicitGenericInvocationContext(self, self._ctx, self.state)
self.enterRule(localctx, 186, self.RULE_explicitGenericInvocation)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1309
self.nonWildcardTypeArguments()
self.state = 1310
self.explicitGenericInvocationSuffix()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeArgumentsOrDiamondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeArguments(self):
return self.getTypedRuleContext(JavaParser.TypeArgumentsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_typeArgumentsOrDiamond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeArgumentsOrDiamond" ):
listener.enterTypeArgumentsOrDiamond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeArgumentsOrDiamond" ):
listener.exitTypeArgumentsOrDiamond(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeArgumentsOrDiamond" ):
return visitor.visitTypeArgumentsOrDiamond(self)
else:
return visitor.visitChildren(self)
def typeArgumentsOrDiamond(self):
localctx = JavaParser.TypeArgumentsOrDiamondContext(self, self._ctx, self.state)
self.enterRule(localctx, 188, self.RULE_typeArgumentsOrDiamond)
try:
self.state = 1315
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,160,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1312
self.match(JavaParser.LT)
self.state = 1313
self.match(JavaParser.GT)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1314
self.typeArguments()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NonWildcardTypeArgumentsOrDiamondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nonWildcardTypeArguments(self):
return self.getTypedRuleContext(JavaParser.NonWildcardTypeArgumentsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_nonWildcardTypeArgumentsOrDiamond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNonWildcardTypeArgumentsOrDiamond" ):
listener.enterNonWildcardTypeArgumentsOrDiamond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNonWildcardTypeArgumentsOrDiamond" ):
listener.exitNonWildcardTypeArgumentsOrDiamond(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNonWildcardTypeArgumentsOrDiamond" ):
return visitor.visitNonWildcardTypeArgumentsOrDiamond(self)
else:
return visitor.visitChildren(self)
def nonWildcardTypeArgumentsOrDiamond(self):
localctx = JavaParser.NonWildcardTypeArgumentsOrDiamondContext(self, self._ctx, self.state)
self.enterRule(localctx, 190, self.RULE_nonWildcardTypeArgumentsOrDiamond)
try:
self.state = 1320
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,161,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1317
self.match(JavaParser.LT)
self.state = 1318
self.match(JavaParser.GT)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1319
self.nonWildcardTypeArguments()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NonWildcardTypeArgumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeList(self):
return self.getTypedRuleContext(JavaParser.TypeListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_nonWildcardTypeArguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNonWildcardTypeArguments" ):
listener.enterNonWildcardTypeArguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNonWildcardTypeArguments" ):
listener.exitNonWildcardTypeArguments(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNonWildcardTypeArguments" ):
return visitor.visitNonWildcardTypeArguments(self)
else:
return visitor.visitChildren(self)
def nonWildcardTypeArguments(self):
localctx = JavaParser.NonWildcardTypeArgumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 192, self.RULE_nonWildcardTypeArguments)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1322
self.match(JavaParser.LT)
self.state = 1323
self.typeList()
self.state = 1324
self.match(JavaParser.GT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeListContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeType(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.TypeTypeContext)
else:
return self.getTypedRuleContext(JavaParser.TypeTypeContext,i)
def getRuleIndex(self):
return JavaParser.RULE_typeList
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeList" ):
listener.enterTypeList(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeList" ):
listener.exitTypeList(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeList" ):
return visitor.visitTypeList(self)
else:
return visitor.visitChildren(self)
def typeList(self):
localctx = JavaParser.TypeListContext(self, self._ctx, self.state)
self.enterRule(localctx, 194, self.RULE_typeList)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1326
self.typeType()
self.state = 1331
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 1327
self.match(JavaParser.COMMA)
self.state = 1328
self.typeType()
self.state = 1333
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def classOrInterfaceType(self):
return self.getTypedRuleContext(JavaParser.ClassOrInterfaceTypeContext,0)
def primitiveType(self):
return self.getTypedRuleContext(JavaParser.PrimitiveTypeContext,0)
def annotation(self):
return self.getTypedRuleContext(JavaParser.AnnotationContext,0)
def getRuleIndex(self):
return JavaParser.RULE_typeType
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeType" ):
listener.enterTypeType(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeType" ):
listener.exitTypeType(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeType" ):
return visitor.visitTypeType(self)
else:
return visitor.visitChildren(self)
def typeType(self):
localctx = JavaParser.TypeTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 196, self.RULE_typeType)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1335
_la = self._input.LA(1)
if _la==JavaParser.AT:
self.state = 1334
self.annotation()
self.state = 1339
token = self._input.LA(1)
if token in [JavaParser.IDENTIFIER]:
self.state = 1337
self.classOrInterfaceType()
elif token in [JavaParser.BOOLEAN, JavaParser.BYTE, JavaParser.CHAR, JavaParser.DOUBLE, JavaParser.FLOAT, JavaParser.INT, JavaParser.LONG, JavaParser.SHORT]:
self.state = 1338
self.primitiveType()
else:
raise NoViableAltException(self)
self.state = 1345
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,165,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1341
self.match(JavaParser.LBRACK)
self.state = 1342
self.match(JavaParser.RBRACK)
self.state = 1347
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,165,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PrimitiveTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def BOOLEAN(self):
return self.getToken(JavaParser.BOOLEAN, 0)
def CHAR(self):
return self.getToken(JavaParser.CHAR, 0)
def BYTE(self):
return self.getToken(JavaParser.BYTE, 0)
def SHORT(self):
return self.getToken(JavaParser.SHORT, 0)
def INT(self):
return self.getToken(JavaParser.INT, 0)
def LONG(self):
return self.getToken(JavaParser.LONG, 0)
def FLOAT(self):
return self.getToken(JavaParser.FLOAT, 0)
def DOUBLE(self):
return self.getToken(JavaParser.DOUBLE, 0)
def getRuleIndex(self):
return JavaParser.RULE_primitiveType
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrimitiveType" ):
listener.enterPrimitiveType(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrimitiveType" ):
listener.exitPrimitiveType(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitPrimitiveType" ):
return visitor.visitPrimitiveType(self)
else:
return visitor.visitChildren(self)
def primitiveType(self):
localctx = JavaParser.PrimitiveTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 198, self.RULE_primitiveType)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1348
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.SHORT))) != 0)):
self._errHandler.recoverInline(self)
else:
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeArgumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeArgument(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(JavaParser.TypeArgumentContext)
else:
return self.getTypedRuleContext(JavaParser.TypeArgumentContext,i)
def getRuleIndex(self):
return JavaParser.RULE_typeArguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeArguments" ):
listener.enterTypeArguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeArguments" ):
listener.exitTypeArguments(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitTypeArguments" ):
return visitor.visitTypeArguments(self)
else:
return visitor.visitChildren(self)
def typeArguments(self):
localctx = JavaParser.TypeArgumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 200, self.RULE_typeArguments)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1350
self.match(JavaParser.LT)
self.state = 1351
self.typeArgument()
self.state = 1356
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==JavaParser.COMMA:
self.state = 1352
self.match(JavaParser.COMMA)
self.state = 1353
self.typeArgument()
self.state = 1358
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1359
self.match(JavaParser.GT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SuperSuffixContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def arguments(self):
return self.getTypedRuleContext(JavaParser.ArgumentsContext,0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def getRuleIndex(self):
return JavaParser.RULE_superSuffix
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSuperSuffix" ):
listener.enterSuperSuffix(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSuperSuffix" ):
listener.exitSuperSuffix(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitSuperSuffix" ):
return visitor.visitSuperSuffix(self)
else:
return visitor.visitChildren(self)
def superSuffix(self):
localctx = JavaParser.SuperSuffixContext(self, self._ctx, self.state)
self.enterRule(localctx, 202, self.RULE_superSuffix)
try:
self.state = 1367
token = self._input.LA(1)
if token in [JavaParser.LPAREN]:
self.enterOuterAlt(localctx, 1)
self.state = 1361
self.arguments()
elif token in [JavaParser.DOT]:
self.enterOuterAlt(localctx, 2)
self.state = 1362
self.match(JavaParser.DOT)
self.state = 1363
self.match(JavaParser.IDENTIFIER)
self.state = 1365
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,167,self._ctx)
if la_ == 1:
self.state = 1364
self.arguments()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExplicitGenericInvocationSuffixContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def SUPER(self):
return self.getToken(JavaParser.SUPER, 0)
def superSuffix(self):
return self.getTypedRuleContext(JavaParser.SuperSuffixContext,0)
def IDENTIFIER(self):
return self.getToken(JavaParser.IDENTIFIER, 0)
def arguments(self):
return self.getTypedRuleContext(JavaParser.ArgumentsContext,0)
def getRuleIndex(self):
return JavaParser.RULE_explicitGenericInvocationSuffix
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExplicitGenericInvocationSuffix" ):
listener.enterExplicitGenericInvocationSuffix(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExplicitGenericInvocationSuffix" ):
listener.exitExplicitGenericInvocationSuffix(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitExplicitGenericInvocationSuffix" ):
return visitor.visitExplicitGenericInvocationSuffix(self)
else:
return visitor.visitChildren(self)
def explicitGenericInvocationSuffix(self):
localctx = JavaParser.ExplicitGenericInvocationSuffixContext(self, self._ctx, self.state)
self.enterRule(localctx, 204, self.RULE_explicitGenericInvocationSuffix)
try:
self.state = 1373
token = self._input.LA(1)
if token in [JavaParser.SUPER]:
self.enterOuterAlt(localctx, 1)
self.state = 1369
self.match(JavaParser.SUPER)
self.state = 1370
self.superSuffix()
elif token in [JavaParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 2)
self.state = 1371
self.match(JavaParser.IDENTIFIER)
self.state = 1372
self.arguments()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArgumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expressionList(self):
return self.getTypedRuleContext(JavaParser.ExpressionListContext,0)
def getRuleIndex(self):
return JavaParser.RULE_arguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterArguments" ):
listener.enterArguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitArguments" ):
listener.exitArguments(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitArguments" ):
return visitor.visitArguments(self)
else:
return visitor.visitChildren(self)
def arguments(self):
localctx = JavaParser.ArgumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 206, self.RULE_arguments)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1375
self.match(JavaParser.LPAREN)
self.state = 1377
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << JavaParser.BOOLEAN) | (1 << JavaParser.BYTE) | (1 << JavaParser.CHAR) | (1 << JavaParser.DOUBLE) | (1 << JavaParser.FLOAT) | (1 << JavaParser.INT) | (1 << JavaParser.LONG) | (1 << JavaParser.NEW) | (1 << JavaParser.SHORT) | (1 << JavaParser.SUPER) | (1 << JavaParser.THIS) | (1 << JavaParser.VOID) | (1 << JavaParser.DECIMAL_LITERAL) | (1 << JavaParser.HEX_LITERAL) | (1 << JavaParser.OCT_LITERAL) | (1 << JavaParser.BINARY_LITERAL) | (1 << JavaParser.FLOAT_LITERAL) | (1 << JavaParser.HEX_FLOAT_LITERAL) | (1 << JavaParser.BOOL_LITERAL) | (1 << JavaParser.CHAR_LITERAL) | (1 << JavaParser.STRING_LITERAL) | (1 << JavaParser.NULL_LITERAL) | (1 << JavaParser.LPAREN))) != 0) or ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (JavaParser.LT - 72)) | (1 << (JavaParser.BANG - 72)) | (1 << (JavaParser.TILDE - 72)) | (1 << (JavaParser.INC - 72)) | (1 << (JavaParser.DEC - 72)) | (1 << (JavaParser.ADD - 72)) | (1 << (JavaParser.SUB - 72)) | (1 << (JavaParser.AT - 72)) | (1 << (JavaParser.IDENTIFIER - 72)))) != 0):
self.state = 1376
self.expressionList()
self.state = 1379
self.match(JavaParser.RPAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[82] = self.expression_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def expression_sempred(self, localctx:ExpressionContext, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 17)
if predIndex == 1:
return self.precpred(self._ctx, 16)
if predIndex == 2:
return self.precpred(self._ctx, 15)
if predIndex == 3:
return self.precpred(self._ctx, 14)
if predIndex == 4:
return self.precpred(self._ctx, 12)
if predIndex == 5:
return self.precpred(self._ctx, 11)
if predIndex == 6:
return self.precpred(self._ctx, 10)
if predIndex == 7:
return self.precpred(self._ctx, 9)
if predIndex == 8:
return self.precpred(self._ctx, 8)
if predIndex == 9:
return self.precpred(self._ctx, 7)
if predIndex == 10:
return self.precpred(self._ctx, 6)
if predIndex == 11:
return self.precpred(self._ctx, 5)
if predIndex == 12:
return self.precpred(self._ctx, 25)
if predIndex == 13:
return self.precpred(self._ctx, 24)
if predIndex == 14:
return self.precpred(self._ctx, 20)
if predIndex == 15:
return self.precpred(self._ctx, 13)
if predIndex == 16:
return self.precpred(self._ctx, 3)
|
python
|
"""
tests module
"""
import os
import sys
import sure
ROOT_DIR = os.path.join(os.path.dirname(__file__), "../..")
sys.path.append(ROOT_DIR)
|
python
|
from .binonobj import BinONObj
from .ioutil import MustRead
class IntObj(BinONObj):
kBaseType = 2
@classmethod
def DecodeData(cls, inF, asObj=False):
data = bytearray(MustRead(inF, 1))
byte0 = data[0]
if (byte0 & 0x80) == 0:
m = 0x7f
n = 1
elif (byte0 & 0x40) == 0:
m = 0x3fff
n = 2
elif (byte0 & 0x20) == 0:
m = 0x1fffffff
n = 4
elif (byte0 & 0x10) == 0:
m = 0x0fffffff_ffffffff
n = 8
elif (byte0 & 0x01) == 0:
m = 0xffffffff_ffffffff
n = 8
data = bytearray()
else:
n = UInt.DecodeData(inF)
m = (1 << (n << 3)) - 1
data = bytearray()
n -= len(data)
if n > 0:
data.extend(MustRead(inF, n))
v = int.from_bytes(data, cls._kEndian) & m
m += 1
if v >= m >> 1:
v -= m
return cls(v) if asObj else v
@classmethod
def _OptimalObj(cls, value, inList):
isUInt = value > 0 or (inList and value == 0)
if cls._OptimizeLog() and isUInt:
print(
cls._IndentStr() + "Optimized to: UInt",
file=cls._OptimizeLog()
)
return UInt(value) if isUInt else cls(value)
def __init__(self, value=0):
super().__init__(value)
def encodeData(self, outF):
if -0x40 <= self.value < 0x40:
m = 0x00
n = 1
i = 1
elif -0x2000 <= self.value < 0x2000:
m = 0x8000
n = 2
i = 2
elif -0x10000000 <= self.value < 0x10000000:
m = 0xC0000000
n = 4
i = 3
elif -0x08000000_00000000 <= self.value < 0x08000000_00000000:
m = 0xE0000000_00000000
n = 8
i = 4
elif -0x80000000_00000000 <= self.value < 0x80000000_00000000:
m = 0
n = 8
i = 0
outF.write(b"\xf0")
else:
m = 0
v = -self.value - 1 if self.value < 0 else self.value
n = self.value.bit_length() + 8 >> 3
i = 0
outF.write(b"\xf1")
UInt(n).encodeData(outF)
v = self.value & ((1 << ((n << 3) - i)) - 1)
v |= m
outF.write(v.to_bytes(n, self._kEndian))
class UInt(IntObj):
kSubtype = 2
@classmethod
def DecodeData(cls, inF, asObj=False):
data = bytearray(MustRead(inF, 1))
byte0 = data[0]
if (byte0 & 0x80) == 0:
m = 0x7f
n = 1
elif (byte0 & 0x40) == 0:
m = 0x3fff
n = 2
elif (byte0 & 0x20) == 0:
m = 0x1fffffff
n = 4
elif (byte0 & 0x10) == 0:
m = 0x0fffffff_ffffffff
n = 8
elif (byte0 & 0x01) == 0:
m = 0xffffffff_ffffffff
n = 8
data = bytearray()
else:
n = cls.DecodeData(inF)
m = (1 << (n << 3)) - 1
data = bytearray()
n -= len(data)
if n > 0:
data.extend(MustRead(inF, n))
v = int.from_bytes(data, cls._kEndian) & m
return cls(v) if asObj else v
def encodeData(self, outF):
if self.value < 0x80:
m = 0x00
n = 1
elif self.value < 0x4000:
m = 0x8000
n = 2
elif self.value < 0x20000000:
m = 0xC0000000
n = 4
elif self.value < 0x10000000_00000000:
m = 0xE0000000_00000000
n = 8
elif self.value < 0x1_00000000_00000000:
m = 0
n = 8
outF.write(b"\xf0")
else:
m = 0
n = (self.value.bit_length() + 7) >> 3
outF.write(b"\xf1")
UInt(n).encodeData(outF)
outF.write((m | self.value).to_bytes(n, self._kEndian))
BinONObj._InitSubcls(IntObj, [UInt], [int])
|
python
|
from .genshin import get_user_stat
__all__ = ["get_user_stat"]
|
python
|
from bokeh.core.properties import (
Any, Bool, Dict, Either, Instance, List, Null, Nullable, String
)
from bokeh.models import ColumnDataSource, HTMLBox
class Perspective(HTMLBox):
aggregates = Either(Dict(String, Any), Null())
split_by = Either(List(String), Null())
columns = Either(List(Either(String, Null)), Null)
expressions = Either(List(String), Null())
editable = Nullable(Bool())
filters = Either(List(Any), Null())
plugin = String()
plugin_config = Either(Dict(String, Any), Null)
group_by = Either(List(String), Null())
selectable = Nullable(Bool())
schema = Dict(String, String)
sort = Either(List(List(String)), Null())
source = Instance(ColumnDataSource)
toggle_config = Bool(True)
theme = String()
# pylint: disable=line-too-long
__javascript__ = [
"https://unpkg.com/@finos/[email protected]/dist/umd/perspective.js",
"https://unpkg.com/@finos/[email protected]/dist/umd/perspective-viewer.js",
"https://unpkg.com/@finos/[email protected]/dist/umd/perspective-viewer-datagrid.js",
"https://unpkg.com/@finos/[email protected]/dist/umd/perspective-viewer-d3fc.js",
]
__js_skip__ = {
"perspective": __javascript__,
}
__js_require__ = {
"paths": {
"perspective": "https://unpkg.com/@finos/[email protected]/dist/umd/perspective",
"perspective-viewer": "https://unpkg.com/@finos/[email protected]/dist/umd/perspective-viewer",
"perspective-viewer-datagrid": "https://unpkg.com/@finos/[email protected]/dist/umd/perspective-viewer-datagrid",
"perspective-viewer-d3fc": "https://unpkg.com/@finos/[email protected]/dist/umd/perspective-viewer-d3fc",
},
"exports": {
"perspective": "perspective",
"perspective-viewer": "PerspectiveViewer",
"perspective-viewer-datagrid": "PerspectiveViewerDatagrid",
"perspective-viewer-d3fc": "PerspectiveViewerD3fc",
},
}
__css__ = ["https://unpkg.com/@finos/[email protected]/dist/css/themes.css"]
|
python
|
from validator.rules import Base64
def test_base64_01():
assert Base64().check("c2hPd1MgaSBMSWtFOg==")
assert Base64().check("U09VVEggUEFSSw==")
assert Base64().check("QkxBQ0sgTUlSUk9S")
assert Base64().check("RkFSR08=")
assert Base64().check("QnJlYUtJTkcgQmFkIA==")
def test_base64_02():
assert not Base64().check("hbsdf")
assert not Base64().check("!@#")
assert not Base64().check("bfjhsdf HGHG &^&&")
assert not Base64().check("29i03r09j....")
assert not Base64().check("olgak9999")
|
python
|
#Free fall
#Askng for height
#Initial velociy is 0 m/s
#Acceleration due to gravity(g) = 9.8 sq.(m/s)
h = float(input("Enter the height = "))
#Final velocity = v
import math
v = math.sqrt(2 * 9.8 * h)
print("Final Velocity = ",v)
|
python
|
"""
"""
import os
import shutil
from pathlib import Path
from typing import List, Optional
from TestSuite.conf_json import ConfJSON
from TestSuite.global_secrets import GlobalSecrets
from TestSuite.json_based import JSONBased
from TestSuite.pack import Pack
class Repo:
"""A class that mocks a content repo
Note:
Do not include the `self` parameter in the ``Args`` section.
Args:
tmpdir: A Path to the root of the repo
Attributes:
path: A path to the content pack.
secrets: Exception error code.
packs: A list of created packs
"""
def __init__(self, tmpdir: Path):
self.packs: List[Pack] = list()
self._tmpdir = tmpdir
self._packs_path = tmpdir / 'Packs'
self._packs_path.mkdir()
self.path = str(self._tmpdir)
# Initiate ./Tests/ dir
self._test_dir = tmpdir / 'Tests'
self._test_dir.mkdir()
# Secrets
self.secrets = GlobalSecrets(self._test_dir)
self.secrets.write_secrets()
self.global_secrets_path = self.secrets.path
# Conf.json
self.conf = ConfJSON(self._test_dir, 'conf.json', '')
self.conf.write_json()
self.content_descriptor = JSONBased(self._tmpdir, 'content-descriptor', '')
self.content_descriptor.write_json({})
self.id_set = JSONBased(self._test_dir, 'id_set', '')
self.id_set.write_json({
'scripts': [],
'playbooks': [],
'integrations': [],
'TestPlaybooks': [],
'Classifiers': [],
'Dashboards': [],
'IncidentFields': [],
'IncidentTypes': [],
'IndicatorFields': [],
'IndicatorTypes': [],
'Layouts': [],
'Reports': [],
'Widgets': [],
'Mappers': [],
})
def __del__(self):
shutil.rmtree(self.path, ignore_errors=True)
def setup_one_pack(self, name) -> Pack:
"""Sets up a new pack in the repo, and includes one per each content entity.
Args:
name (string): Name of the desired pack.
Returns:
Pack. The pack object created.
"""
pack = self.create_pack(name)
script = pack.create_script(f'{name}_script')
script.create_default_script()
script.yml.update({'commonfields': {'id': f'{name}_script'}})
script.yml.update({'name': f'{name}_script'})
script.yml.update({'display': f'{name}_script'})
integration = pack.create_integration(f'{name}_integration')
integration.create_default_integration()
integration.yml.update({'commonfields': {'id': f'{name}_integration'}})
integration.yml.update({'name': f'{name}_integration'})
integration.yml.update({'display': f'{name}_integration'})
integration_content = integration.yml.read_dict()
integration_content['script']['commands'][0]['name'] = f'command_{name}_integration'
integration.yml.write_dict(integration_content)
classifier = pack.create_classifier(f'{name}_classifier')
classifier.write_json({'id': f'{name} - classifier'})
classifier.update({'name': f'{name} - classifier'})
classifier.update({'transformer': ''})
classifier.update({'keyTypeMap': {}})
classifier.update({'type': 'classification'})
layout = pack.create_layout(f'{name}_layout')
layout.write_json({'id': f'{name} - layout'})
layout.update({'name': f'{name} - layout'})
layout.update({'kind': ''})
layoutcontainer = pack.create_layoutcontainer(f'{name}_layoutcontainer')
layoutcontainer.write_json({'id': f'{name} - layoutcontainer'})
layoutcontainer.update({'group': f'{name} - layoutcontainer'})
layoutcontainer.update({'detailsV2': {}})
mapper = pack.create_mapper(f'{name}_mapper')
mapper.write_json({'id': f'{name} - mapper'})
mapper.update({'name': f'{name} - mapper'})
mapper.update({'mapping': {}})
mapper.update({'type': 'mapping-incoming'}) # can also be mapping-outgoing, but this is the more common usage
incident_type = pack.create_incident_type(f'{name}_incident-type')
incident_type.write_json({'id': f'{name} - incident_type'})
incident_type.update({'name': f'{name} - incident_type'})
incident_type.update({'preProcessingScript': ''})
incident_type.update({'color': 'test'})
incident_field = pack.create_incident_field(f'{name}_incident-field')
incident_field.write_json({'id': f'incident_{name} - incident_field'})
incident_field.update({'name': f'incident_{name} - incident_field'})
indicator_type = pack.create_indicator_type(f'{name}_indicator-type')
indicator_type.write_json({'id': f'{name} - indicator_type'})
indicator_type.update({'name': f'{name} - indicator_type'})
indicator_type.update({'regex': ''})
indicator_field = pack.create_indicator_field(f'{name}_indicator-field')
indicator_field.write_json({'id': f'indicator_{name} - indicator_field'})
indicator_field.update({'name': f'indicator_{name} - indicator_field'})
dashboard = pack.create_dashboard(f'{name}_dashboard')
dashboard.write_json({'id': f'{name} - dashboard'})
dashboard.update({'name': f'{name} - dashboard'})
dashboard.update({'layout': ''})
report = pack.create_report(f'{name}_report')
report.write_json({'id': f'{name} - report'})
report.update({'name': f'{name} - report'})
report.update({'orientation': ''})
widget = pack.create_widget(f'{name}_widget')
widget.write_json({'id': f'{name} - widget'})
widget.update({'name': f'{name} - widget'})
widget.update({'widgetType': ''})
playbook = pack.create_playbook(f'{name}_playbook')
playbook.create_default_playbook()
playbook.yml.update({'id': f'{name}_playbook'})
playbook.yml.update({'name': f'{name}_playbook'})
test_playbook = pack.create_test_playbook(f'{name}_test_playbook')
test_playbook.create_default_playbook()
test_playbook.yml.update({'id': f'{name}_test_playbook'})
test_playbook.yml.update({'name': f'{name}_test_playbook'})
return pack
def setup_content_repo(self, number_of_packs):
"""Creates a fully constructed content repository, where packs names will pack_<index>.
Args:
number_of_packs (int): Amount of packs to be created in the repo.
"""
for i in range(number_of_packs):
self.setup_one_pack(f'pack_{i}')
def create_pack(self, name: Optional[str] = None):
if name is None:
name = f'pack_{len(self.packs)}'
pack = Pack(self._packs_path, name, repo=self)
self.packs.append(pack)
return pack
def working_dir(self):
return self.path
def make_dir(self, dir_name: str = ''):
if not dir_name:
dir_name = "NewDir"
dir_path = os.path.join(self.path, dir_name)
os.mkdir(dir_path)
return dir_path
def make_file(self, file_name: str, file_content: str):
file_path = os.path.join(self.path, file_name)
with open(file_path, 'w') as f:
f.write(file_content)
|
python
|
import urllib, json
import sys
from __builtin__ import raw_input
from termcolor import colored
import os
import glob
import webbrowser
def jumbo():
print(colored(" .::.", "cyan"))
print(colored(" .:' .:", "cyan"))
print(colored(" ,MMM8&&&.", "magenta") + colored(":' .:'", "cyan"))
print(colored(" MMMMM88&&&&", "magenta") + colored(" .:'", "cyan"))
print(colored(" MMMMM88&&&&&&", "magenta") + colored(":'", "cyan"))
print(colored(" MMMMM88&&&&&&", "magenta"))
print(colored(" .:", "cyan") + colored("MMMMM88&&&&&&", "magenta"))
print(colored(" .:' ", "cyan") + colored("MMMMM88&&&&", "magenta"))
print(colored(" .:' .:", "cyan") + colored("'MMM8&&&'", "magenta"))
print(colored(" :' .:'", "cyan"))
print(colored(" '::' ", "cyan"))
print("")
print(colored("__________.__ __ _________ __ .__ .___", "cyan"))
print(colored("\______ \ | _____ ____ _____/ |_ \_ ___ \_______ ___.__._______/ |_ ____ |__| __| _/", "cyan"))
print(colored(" | ___/ | \__ \ / \_/ __ \ __\ / \ \/\_ __ < | |\____ \ __\/ _ \| |/ __ | ", "cyan"))
print(colored(" | | | |__/ __ \| | \ ___/| | \ \____| | \/\___ || |_> > | ( <_> ) / /_/ | ", "magenta"))
print(colored(" |____| |____(____ /___| /\___ >__| \______ /|__| / ____|| __/|__| \____/|__\____ | ", "magenta"))
print(colored(" \/ \/ \/ \/ \/ |__| \/ ", "magenta"))
def nav():
os.system("clear")
jumbo()
print(" ========================================================================================")
print(" ==================== Welcome to Coinmarketcap.com rankings ======================")
print(" ========================================================================================")
def foot():
print(" ========================================================================================")
print(" ==================== Version 2.0.0 (Limit 3 Accounts) =======================")
print(" ========================================================================================")
def menu():
nav()
print(" ==================== 1.) Check Rankings 2.) Add Wallet ======================")
print(" ==================== 3.) Check Wallet 4.) Exit ======================")
print(" ==================== 5.) Visit Planet Cryptoid ======================")
foot()
rank = raw_input(" ")
menu_controller(rank)
def menu_controller(rank):
if int(rank) == 1:
check_ranks()
elif int(rank) == 2:
add_wallet()
elif int(rank) == 3:
ask_wallet()
elif int(rank) == 4:
os.system("clear")
exit()
elif int(rank) == 5:
url = "https://www.planetcryptoid.tech"
webbrowser.open_new(url)
else:
not_cool()
def not_cool():
print("Not a valid option, try again?")
raw_input(" ================== Enter anything to return to menu ====================")
menu()
def wallet_controller(answer, wallet):
if int(answer) == 1:
check_wallet(wallet)
elif int(answer) == 2:
check_wallet(wallet)
elif int(answer) == 3:
check_wallet(wallet)
else:
not_cool()
def ask_wallet():
nav()
print(" ================== Wallet's Available ====================")
wallets = glob.glob('wallets/*.json')
empty = 0
try:
print("1.) " + dic2pretty(wallets[0]))
except IndexError:
empty += 1
try:
print("2.) " + dic2pretty(wallets[1]))
except IndexError:
empty += 1
try:
print("3.) " + dic2pretty(wallets[2]))
except IndexError:
empty += 1
if empty == 3:
print(" ")
print("No wallets to display (Add a wallet from the Menu)")
print(" ")
raw_input(" ================== Enter anything to return to menu ====================")
menu()
else:
answer = raw_input(" ======== Wallet Select: ")
if int(answer) == 1:
wallet_controller(answer, wallets[0])
elif int(answer) == 2:
wallet_controller(answer, wallets[1])
elif int(answer) == 3:
wallet_controller(answer, wallets[2])
else:
not_cool()
def dic2pretty(wallet):
walleto = wallet[8:]
walleto = walleto.split(".")[0]
# walleto = walletz[:2]
# This is a test commit
return walleto
def check_wallet(wallet):
file = wallet
with open(file) as json_data:
d = json.load(json_data)
os.system("clear")
wallet_stats(d, wallet)
def wallet_stats(d, wallet):
jumbo()
nav()
print(" ==================== Wallet: " + dic2pretty(wallet) + " ======================")
print(" ==================== =Rank= ===Name=== ==Price== ==1h %== ======================")
data = calculate(str(d[0]['btc']), str(d[0]['eth']), str(d[0]['xrp']))
btc_per = color(data[0]['btc_per'])
eth_per = color(data[0]['eth_per'])
xrp_per = color(data[0]['xrp_per'])
print(" ==================== 1 Bitcoin $" + str(round(data[0]['btc_usd'], 2)) + " " + btc_per)
print(" ==================== 2 Ethereum $" + str(round(data[0]['eth_usd'], 2)) + " " + eth_per)
print(" ==================== 3 Ripple $" + str(round(data[0]['xrp_usd'], 2)) + " " + xrp_per)
total = data[0]['btc_usd'] + data[0]['eth_usd'] + data[0]['xrp_usd']
print(" ----------------------------------------------------------------------------------------")
print(" ==================== Total Value: $" + str(round(total, 2)) + " ======================")
print(" ")
raw_input(" ================== Enter anything to return to menu ===================")
menu()
def add_wallet():
nav()
print(" ==================== Add Wallet Alpha(limited 3 coins) ======================")
print(" ==================== Enter a Name for your Wallet ======================")
name = raw_input("Wallet Name: ")
btc = raw_input("Bitcoin Value : ")
eth = raw_input("Ethereum Value : ")
xrp = raw_input("Ripple Value : ")
file = "wallets/" + name + ".json"
print(name)
data = [{"name": name, "btc": btc, "eth": eth, "xrp": xrp}]
with open(file, 'w') as outfile:
json.dump(data, outfile)
print(" ================== Wallet Created ====================")
raw_input(" ================== Enter anything to return to menu ====================")
menu()
def check_ranks():
nav()
print(" ================== How many Crypto's would you like listed?(1-879) ====================")
ranks = raw_input(" ")
url = "https://api.coinmarketcap.com/v1/ticker/?limit=" + str(ranks)
response = urllib.urlopen(url)
data = json.loads(response.read())
i = 0
rank = 1
while i < int(ranks):
percent = float(data[i]['percent_change_1h'])
rankString = str(rank) + "). "
print(rankString + data[i]['name'] + " | " + data[i]['price_usd'] + " | " + color(percent))
rank += 1
i += 1
btc_price = str(data[0]['price_usd'])
raw_input(" ================== Enter anything to return to menu ====================")
menu()
def calculate(btc, eth, xrp):
url = "https://api.coinmarketcap.com/v1/ticker/?limit=3"
response = urllib.urlopen(url)
data = json.loads(response.read())
btc_usd = float(btc) * float(data[0]["price_usd"])
eth_usd = float(eth) * float(data[1]["price_usd"])
xrp_usd = float(xrp) * float(data[2]["price_usd"])
rankz = [{"btc_usd": btc_usd, "eth_usd": eth_usd, "xrp_usd": xrp_usd,
"btc_per": data[0]['percent_change_1h'], "eth_per": data[1]['percent_change_1h'],
"xrp_per": data[2]['percent_change_1h']}]
return rankz
def color(percent):
if percent > 0:
percent_data = colored(str(percent) + "%", "green")
else:
percent_data = colored(str(percent) + "%", "red")
return percent_data
def save_wallet(data, filename):
with open(filename, 'w') as outfile:
json.dump(data, outfile)
def main(argv):
menu()
if __name__ == "__main__":
main(sys.argv)
|
python
|
from pychology.behavior_trees import Action
from pychology.behavior_trees import Priorities
from pychology.behavior_trees import Chain
from pychology.behavior_trees import DoneOnPrecondition
from pychology.behavior_trees import FailOnPrecondition
import wecs
from wecs.panda3d.behavior_trees import DoneTimer
from wecs.panda3d.behavior_trees import IdleWhenDoneTree
def idle():
return IdleWhenDoneTree(
Chain(
DoneTimer(
wecs.panda3d.behavior_trees.timeout(3.0),
Action(wecs.panda3d.behavior_trees.turn(1.0)),
),
DoneTimer(
wecs.panda3d.behavior_trees.timeout(3.0),
Action(wecs.panda3d.behavior_trees.turn(-1.0)),
),
),
)
def walk_to_entity():
return IdleWhenDoneTree(
Priorities(
FailOnPrecondition(
wecs.panda3d.behavior_trees.is_pointable,
DoneOnPrecondition(
wecs.panda3d.behavior_trees.distance_smaller(1.5),
Action(wecs.panda3d.behavior_trees.walk_to_entity),
),
),
DoneOnPrecondition(
wecs.panda3d.behavior_trees.distance_smaller(0.01),
Action(wecs.panda3d.behavior_trees.walk_to_entity),
),
),
)
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.